Pierre-Yves Barriat 7 лет назад
Родитель
Сommit
5439bdaf6b

+ 107 - 0
3.2.2/runtime/classic/config-run.xml

@@ -30,6 +30,113 @@
         <Properties>executable</Properties>
     </Translation>
 
+    <Platform name="lemaitre3">
+        <Description>
+            HOST:        lemaitre3.cism.ucl.ac.be
+            ARCH:        linux_x86_64
+            CPU MODEL:   SkyLake
+            MAINTAINER:  Pierre-Yves Barriat [pierre-yves.barriat@uclouvain.be]
+        </Description>
+
+        <Translation name="LEMAITRE3_CFG">
+            <Description>Platform dependent configuration (lemaitre3)</Description>
+            <Template>platform/lemaitre3.cfg.tmpl</Template>
+            <Target>ecconf.cfg</Target>
+            <Properties></Properties>
+        </Translation>
+
+        <Parameter name="BUILD_ARCH">
+            <Description>Architecture used when building EC-Earth</Description>
+            <Type>STRING</Type>
+            <Value>ecconf</Value>
+        </Parameter>
+
+        <Parameter name="ECEARTH_SRC_DIR">
+            <Description>Base directory for EC-Earth sources</Description>
+            <Type>PATH</Type>
+            <Value>${HOME}/modeles/ecearth/ecearth_3.2.2/sources</Value>
+        </Parameter>
+
+        <Parameter name="RUN_DIR">
+            <Description>EC-Earth run directory</Description>
+            <Type>PATH</Type>
+            <Value>/scratch/ucl/elic/${USER}/ecearth/run/${exp_name}</Value>
+        </Parameter>
+
+        <Parameter name="INI_DATA_DIR">
+            <Description>Initial data directory used by EC-Earth</Description>
+            <Type>PATH</Type>
+            <Value>/scratch/ucl/elic/pbarriat/DATA</Value>
+        </Parameter>
+
+        <Parameter name="PROC_PER_NODE">
+            <Description>Number of processors per node</Description>
+            <Type>INTEGER</Type>
+            <Value>24</Value>
+        </Parameter>
+
+        <Parameter name="RESUBMIT_JOB">
+            <Description>Automatically resubmit the job? [true|false]</Description>
+            <Type>BOOLEAN</Type>
+            <Value>true</Value>
+        </Parameter>
+
+        <Parameter name="RESUBMIT_OPT">
+            <Description>Additional options for the submit command</Description>
+            <Type>STRING</Type>
+            <Value></Value>
+        </Parameter>
+
+        <Parameter name="GRIBEX_DEFINITION_PATH">
+            <Description>GRIBEX definition Template path</Description>
+            <Type>PATH</Type>
+            <Value>/scratch/ucl/elic/pbarriat/DATA/OPT/gribex/lib</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_BASE_DIR">
+            <Description>GRIB API base directory</Description>
+            <Type>PATH</Type>
+            <Value>/opt/cecisw/arch/easybuild/2018a/software/grib_api/1.16.0-intel-2018a</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_BIN_SUBDIR">
+            <Description>GRIB API bin directory relative to the base directory</Description>
+            <Type>PATH</Type>
+            <Value>bin</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_DEFINITION_SUBDIR">
+            <Description>GRIB API definition directory relative to the base directory</Description>
+            <Type>PATH</Type>
+            <Value>share/grib_api/definitions</Value>
+        </Parameter>
+
+        <Parameter name="GRIBAPI_SAMPLES_SUBDIR">
+            <Description>GRIB API samples directory relative to the base directory</Description>
+            <Type>PATH</Type>
+            <Value>share/grib_api/ifs_samples/grib1</Value>
+        </Parameter>
+
+        <Parameter name="ADD_TO_LD_LIBRARY_PATH">
+            <Description>Additional shared library paths needed at runtime</Description>
+            <Type>STRING</Type>
+            <Value></Value>
+        </Parameter>
+
+        <Parameter name="PRE_LOAD_MODULES_CMD">
+            <Description>Command run before any modules are loaded</Description>
+            <Type>STRING</Type>
+            <Value>module purge</Value>
+        </Parameter>
+
+        <Parameter name="MODULE_LIST">
+            <Description>List of modules to load</Description>
+            <Type>STRING</Type>
+            <Value>Boost netCDF-Fortran/4.4.4-intel-2018a grib_api</Value>
+        </Parameter>
+
+    </Platform>
+
     <Platform name="zenobe">
         <Description>
             HOST:        zenobe.hpc.cenaero.be

+ 0 - 24
3.2.2/runtime/classic/ece-ifs+nemo.sh.tmpl

@@ -1,27 +1,3 @@
-#!/usr/bin/env bash
-
-#PBS -q large
-#PBS -r y
-#PBS -W group_list=ecearth
-#PBS -l walltime=23:50:00
-#PBS -l select=6:ncpus=24:mem=63000mb:mpiprocs=24:ompthreads=1
-#PBS -M pierre-yves.barriat@uclouvain.be
-#PBS -m abe
-
-exec > ${PBS_O_WORKDIR-$PWD}/${PBS_JOBNAME-"local"}_${PBS_JOBID-"id"}.log
-echo "------------------ Job Info --------------------"
-echo "jobid : $PBS_JOBID"
-echo "jobname : $PBS_JOBNAME"
-echo "job type : $PBS_ENVIRONMENT"
-echo "submit dir : $PBS_O_WORKDIR"
-echo "queue : $PBS_O_QUEUE"
-echo "user : $PBS_O_LOGNAME"
-echo "threads : $OMP_NUM_THREADS"
-
-set -ue
-
-# Directories
-start_dir=${PBS_O_WORKDIR-$PWD}
 
 # librunscript defines some helper functions
 . ${start_dir}/librunscript.sh

+ 23 - 0
3.2.2/runtime/classic/lemaitre3.sh.tmpl

@@ -0,0 +1,23 @@
+#!/bin/bash
+# Submission script for Lemaitre3
+#SBATCH --job-name=ece3
+#SBATCH --time=23:50:00 # hh:mm:ss
+#
+#SBATCH --ntasks=384
+#SBATCH --mem-per-cpu=4096 # 4GB
+#SBATCH --partition=Def
+#
+#SBATCH --mail-user=noname@uclouvain.be
+#SBATCH --mail-type=ALL
+
+exec > ${SLURM_SUBMIT_DIR-$PWD}/${SLURM_JOB_NAME-"local"}_${SLURM_JOB_ID-"id"}.log
+echo "------------------ Job Info --------------------"
+echo "jobid : $SLURM_JOB_ID"
+echo "jobname : $SLURM_JOB_NAME"
+echo "submit dir : $SLURM_SUBMIT_DIR"
+echo "queue : $MPIRUN_PARTITION
+
+set -ue 
+
+# Directories
+start_dir=${SLURM_SUBMIT_DIR-$PWD}

+ 144 - 0
3.2.2/runtime/classic/platform/lemaitre3.cfg.tmpl

@@ -0,0 +1,144 @@
+# Platform dependent configuration functions for the 'zenobe' machine
+#(zenobe.hpc.cenaero.be)
+
+function configure()
+{
+    # This function should configure all settings/modules needed to
+    # later prepare the EC-Earth run directory and set variables used
+    # in the run script
+
+
+    # Configure paths for building/running EC-Earth
+    ecearth_src_dir=[[[PLT:ACTIVE:ECEARTH_SRC_DIR]]]
+    run_dir=[[[PLT:ACTIVE:RUN_DIR]]]
+    ini_data_dir=[[[PLT:ACTIVE:INI_DATA_DIR]]]
+
+    # File for standard output.
+    # NOTE: This will be modified for restart jobs!
+    stdout_file=${SLURM_SUBMIT_DIR-$PWD}/$(basename ${SLURM_JOB_NAME})_${SLURM_JOB_ID-"id"}.log
+
+    # Resubmit this job for automatic restarts? [true/false]
+    # Also, add options for the resubmit command here.
+    resubmit_job=[[[PLT:ACTIVE:RESUBMIT_JOB]]]
+    resubmit_opt="[[[PLT:ACTIVE:RESUBMIT_OPT]]]"
+
+    # Configure GRIBEX paths
+    export LOCAL_DEFINITION_TEMPLATES=[[[PLT:ACTIVE:GRIBEX_DEFINITION_PATH]]]/gribtemplates
+    export ECMWF_LOCAL_TABLE_PATH=[[[PLT:ACTIVE:GRIBEX_DEFINITION_PATH]]]/gribtables
+
+    # Configure grib api paths
+    export GRIB_DEFINITION_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_DEFINITION_SUBDIR]]]
+    export GRIB_SAMPLES_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_SAMPLES_SUBDIR]]]
+    export GRIB_BIN_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_BIN_SUBDIR]]]
+
+    # Configure number of processors per node
+    proc_per_node=[[[PLT:ACTIVE:PROC_PER_NODE]]]
+
+    # Configure and load modules
+    pre_load_modules_cmd="[[[PLT:ACTIVE:PRE_LOAD_MODULES_CMD]]]"
+    module_list="[[[PLT:ACTIVE:MODULE_LIST]]]"
+
+    if [ -n "${pre_load_modules_cmd}" ]
+    then
+        ${pre_load_modules_cmd}
+    fi
+    module load ${module_list}
+
+    # Add directories to the shared library search path
+    export LD_LIBRARY_PATH=${LD_LIBRARY_PATH:+${LD_LIBRARY_PATH}:}"[[[PLT:ACTIVE:ADD_TO_LD_LIBRARY_PATH]]]"
+
+}
+
+function launch()
+{
+    # Compute and check the node distribution
+    info "======================="
+    info "Node/proc distribution:"
+    info "-----------------------"
+    info "IFS:   ${ifs_numproc}"
+    info "NEMO:  ${nem_numproc}"
+    info "XIOS:  ${xio_numproc}"
+    info "======================="
+
+    cmd="mpirun"
+
+    while (( "$#" ))
+    do
+        nranks=$1
+        executable=./$(basename $2)
+        shift
+        shift
+
+        cmd+=" -n $nranks $executable"
+
+        while (( "$#" )) && [ "$1" != "--" ]
+        do
+            cmd+=" $1"
+            shift
+        done
+        shift || true
+
+        (( "$#" )) && cmd+=" :"
+    done
+
+    #export OMP_NUM_THREADS=1
+    pwd
+    echo $cmd
+    #exit
+    $cmd
+}
+
+function finalise2()
+{
+    # This function should execute of any post run functionality, e.g.
+    # platform dependent cleaning or a resubmit
+
+    if ${resubmit_job} && [ $(date -d "${leg_end_date}" +%s) -lt $(date -d "${run_end_date}" +%s) ]
+    then
+        info "Resubmitting job for leg $((leg_number+1))"
+        # Need to go to start_dir to find the run script
+        cd ${SLURM_SUBMIT_DIR}  # same as $start_dir
+
+        cp ./${SLURM_JOB_NAME} ./$(basename ${SLURM_JOB_NAME}).$$
+     
+        sed "s:force_run_from_scratch=true:force_run_from_scratch=false:" \
+            <./$(basename ${SLURM_JOB_NAME}).$$ \
+            >./$(basename ${SLURM_JOB_NAME})
+
+        cp -f ./${SLURM_JOB_NAME} ./$(basename ${SLURM_JOB_NAME}).$$
+
+        sed "s:special_restart=true:special_restart=false:" \
+            <./$(basename ${SLURM_JOB_NAME}).$$ \
+            >./$(basename ${SLURM_JOB_NAME})
+
+        \rm -f ./${PBS_JOBNAME}.$$
+
+        # Submit command
+        set -x
+        qsub ./$(basename ${SLURM_JOB_NAME})
+        set +x
+    else
+        info "Not resubmitting."
+    fi
+}
+
+function finalise()
+{
+    # This function should execute of any post run functionality, e.g.
+    # platform dependent cleaning or a resubmit
+
+    if ${resubmit_job} && [ $(date -d "${leg_end_date}" +%s) -lt $(date -d "${run_end_date}" +%s) ]
+    then
+        info "Resubmitting job for leg $((leg_number+1))"
+        # Need to go to start_dir to find the run script
+        cd ${start_dir}
+        # Submit command
+        # Note: This does not work if you specify a job name with sbatch -J jobname!
+        sbatch -N ${SLURM_JOB_NUM_NODES}                                                 \
+               -o ${run_dir}/$(basename ${stdout_file}).$(printf %03d $((leg_number+1))) \
+               -e ${run_dir}/$(basename ${stdout_file}).$(printf %03d $((leg_number+1))) \
+               -d ${SLURM_JOB_ID}                                                        \
+               ${resubmit_opt}                                                           \
+               ./${SLURM_JOB_NAME}
+    fi
+}

+ 24 - 0
3.2.2/runtime/classic/zenobe.sh.tmpl

@@ -0,0 +1,24 @@
+#!/usr/bin/env bash
+
+#PBS -q large
+#PBS -r y
+#PBS -W group_list=ecearth
+#PBS -l walltime=23:50:00
+#PBS -l select=6:ncpus=24:mem=63000mb:mpiprocs=24:ompthreads=1
+#PBS -M noname@uclouvain.be
+#PBS -m abe
+
+exec > ${PBS_O_WORKDIR-$PWD}/${PBS_JOBNAME-"local"}_${PBS_JOBID-"id"}.log
+echo "------------------ Job Info --------------------"
+echo "jobid : $PBS_JOBID"
+echo "jobname : $PBS_JOBNAME"
+echo "job type : $PBS_ENVIRONMENT"
+echo "submit dir : $PBS_O_WORKDIR"
+echo "queue : $PBS_O_QUEUE"
+echo "user : $PBS_O_LOGNAME"
+echo "threads : $OMP_NUM_THREADS"
+
+set -ue 
+
+# Directories
+start_dir=${PBS_O_WORKDIR-$PWD}