!=============================================== ! compiler information !=============================================== ! Do not include compiler settings from standard file, ! e.g. pycasso-compiler-xlf-12.1.rc, ! since the names of the mpi wrappers differ per machine ... ! (AJS, 2012-05) ! ** compiler names ! check ... #if "fortran/ibm/13.1" not in "${LOADEDMODULES}" #error Problems with OpenMP found for older XLF compiler, enable newer version using: module load fortran/ibm/13.1 #endif ! standard compiler: compiler.fc : xlf95 ! some files require f77 compiler: compiler.f77 : xlf ! idem for thread-safe compilation, as required for compilation with OpenMP support ; ! note that OpenMP also requires compiler flag '-qsmp=omp' ! compiler.fc.openmp : ${compiler.fc}_r ! compilers for MPI programs; ensure that it uses xlf9*, and xlf ... mpi.compiler.fc : mpfort -compiler ${compiler.fc} mpi.compiler.fc.openmp : mpfort -compiler ${compiler.fc.openmp} ! ** compiler flags ! specify flag to retrieve version if different from '-v' compiler.getversion_flag : -qversion ! to define macro's: compiler.defineflag : -WF,-D ! default compiler flags: ! o -w : Suppresses informational, language-level and warning messages. compiler.flags.default.fflags : -w compiler.flags.default.ldflags : compiler.flags.real8.fflags : -qrealsize=8 compiler.flags.real8.ldflags : compiler.flags.mpi.fflags : compiler.flags.mpi.ldflags : compiler.flags.openmp.fflags : -qsmp=omp compiler.flags.openmp.ldflags : compiler.flags.optim-none.fflags : -O0 compiler.flags.optim-none.ldflags : compiler.flags.optim-strict.fflags : -qstrict compiler.flags.optim-strict.ldflags : compiler.flags.optim-fast.fflags : -O3 -qarch=auto compiler.flags.optim-fast.ldflags : -O3 -qarch=auto compiler.flags.optim-vfast.fflags : -O5 -qarch=auto compiler.flags.optim-vfast.ldflags : -O5 -qarch=auto ! 24 Mar 2011 - P. Le Sager : Added sigtrap to use default trap ! handler (and to get no core files) and -g to get traceback with line ! number. Added overflow and NaN to list of traps. ! ! It is **NOT** recommended to add -qinitauto=FF with -qflttrap=nanq, ! since there are several uninitialized variables by design in TM5 ! that are just fine (like in unbounded do statement!) ! Prior to 24/3/2011: !compiler.flags.check-all.fflags : -qcheck -qundef -qflttrap=zerodivide:invalid:enable compiler.flags.check-all.fflags : -qsigtrap -qflttrap=enable:inv:ov:zero:nanq -C -g compiler.flags.check-all.ldflags : compiler.flags.debug.fflags : -qdbg -qfullpath compiler.flags.debug.ldflags : !=============================================== ! libraries !=============================================== SARA : /sara/sw ! Z library (used for compression in HDF4) compiler.lib.z.fflags : compiler.lib.z.libs : -lz ! JPEG library (used for compression in HDF4) compiler.lib.jpeg.fflags : compiler.lib.jpeg.libs : -ljpeg ! SZ library (used for compression in HDF4) SZIP_MODULE : szip/2.1 compiler.lib.sz.fflags : -I${SARA_SZIP_ROOT}/include compiler.lib.sz.libs : -L${SARA_SZIP_ROOT}/lib -lsz ! check ... #if "${SZIP_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${SZIP_MODULE}' ... #endif ! HDF4 library; use version without netcdf support ("no-netcdf") ! to avoid probles when compiling in combination with NetCDF libraries: HDF4_MODULE : hdf4nn/4.2 !compiler.lib.hdf4.fflags : -I${SARA_HDF4_INCLUDE} !compiler.lib.hdf4.libs : -L${SARA_HDF4_LIB}/lib -lmfhdf -ldf compiler.lib.hdf4.fflags : -I${SARA}/${HDF4_MODULE}/include compiler.lib.hdf4.libs : -L${SARA}/${HDF4_MODULE}/lib -lmfhdf -ldf ! check ... #if "${HDF4_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${HDF4_MODULE}' ... #endif ! HDF5 library: HDF5_MODULE : hdf5/1.8.6 !compiler.lib.hdf5.fflags : -I${SARA_HDF5_INCLUDE}/include !compiler.lib.hdf5.libs : -L${SARA_HDF5_LIB}/lib -lhdf5_hl -lhdf5 compiler.lib.hdf5.fflags : -I${SARA}/${HDF5_MODULE}/include compiler.lib.hdf5.libs : -L${SARA}/${HDF5_MODULE}/lib -lhdf5_hl -lhdf5 ! HDF5 library with parallel features: HDF5_PAR_MODULE : hdf5-mp/1.8.2 HDF5_PAR_MODULE2 : hdf5/1.8.2pp !HDF5_PAR_MODULE : hdf5-mp/1.8.7 !compiler.lib.hdf5_par.fflags : -I${SARA_HDF5_INCLUDE}/include !compiler.lib.hdf5_par.libs : -L${SARA_HDF5_LIB}/lib -lhdf5_hl -lhdf5 -lgpfs -lcurl compiler.lib.hdf5_par.fflags : -I${SARA}/${HDF5_PAR_MODULE2}/include compiler.lib.hdf5_par.libs : -L${SARA}/${HDF5_PAR_MODULE2}/lib -lhdf5_hl -lhdf5 -lgpfs -lcurl ! check ... #if "${par.mpi}" in ["T","True"] : #if "${HDF5_PAR_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${HDF5_PAR_MODULE}' ... #endif #else #if "${HDF5_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${HDF5_MODULE}' ... #endif #endif !! NetCDF library: !NETCDF_MODULE : netcdf/4.0.1 !compiler.lib.netcdf.fflags : -I${SARA_NETCDF_ROOT}/include !compiler.lib.netcdf.libs : -L${SARA_NETCDF_ROOT}/lib -lnetcdff -lnetcdf ! NetCDF4 library: NETCDF4_MODULE : netcdf/4.1.2 !compiler.lib.netcdf4.fflags : -I${SARA_NETCDF_ROOT}/include !compiler.lib.netcdf4.libs : -L${SARA_NETCDF_ROOT}/lib -lnetcdff -lnetcdf compiler.lib.netcdf4.fflags : -I${SARA}/${NETCDF4_MODULE}/include compiler.lib.netcdf4.libs : -L${SARA}/${NETCDF4_MODULE}/lib -lnetcdff -lnetcdf ! NetCDF4 library with parallel features: ! ~ oldest properly working version: !NETCDF4_PAR_MODULE : netcdf-mp/4.0.1 ! ~ newer versions using 'NF90_MPIIO' creation mode; ! ensure that you have the latest 'mdf.F90' module !NETCDF4_PAR_MODULE : netcdf-mp/4.1.2 NETCDF4_PAR_MODULE : netcdf-mp/4.1.3 !compiler.lib.netcdf4_par.fflags : -I${SARA_NETCDF_ROOT}/include !compiler.lib.netcdf4_par.libs : -L${SARA_NETCDF_ROOT}/lib -lnetcdff -lnetcdf compiler.lib.netcdf4_par.fflags : -I${SARA}/${NETCDF4_PAR_MODULE}/include compiler.lib.netcdf4_par.libs : -L${SARA}/${NETCDF4_PAR_MODULE}/lib -lnetcdff -lnetcdf ! check ... #if "${par.mpi}" in ["T","True"] : #if "${NETCDF4_PAR_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${NETCDF4_PAR_MODULE}' ... #endif #else #if "${NETCDF4_MODULE}" not in "${LOADEDMODULES}" #error Please load module '${NETCDF4_MODULE}' ... #endif #endif ! MPI library: compiler.lib.mpi.fflags : compiler.lib.mpi.libs : ! GRIB library: !GRIBEX_HOME : /usr !compiler.lib.grib.fflags : -I${GRIBEX_HOME}/include !compiler.lib.grib.libs : -L${GRIBEX_HOME}/lib -lgribex ! ! Lapack library: ! -lessl -lblacs # serial ! -lesslsmp -lblacssmp # parallel with OpenMP ! -lessl -lpessl -lblacs # parallel with MPI ! -lesslsmp -lpesslsmp -lblacssmp # parallel with MPI and OpenMP ! #if "${par.openmp}" in ["T","True"] : my.essl.ext : smp #else my.essl.ext : #endif #if "${par.mpi}" in ["T","True"] : my.pessl : -lpessl${my.essl.ext} #else my.pessl : #endif ! !compiler.lib.lapack.fflags : !compiler.lib.lapack.libs : -lessl${my.essl.ext} ${my.pessl} -lblacs${my.essl.ext} ! LAPACK library: LAPACK_HOME : /sara/sw/lapack/3.4.1/CCI/xlf13.1 compiler.lib.lapack.fflags : -I${LAPACK_HOME}/include compiler.lib.lapack.libs : -L${LAPACK_HOME}/lib -llapack !-L/home/ivdvelde/install/lapack-3.4.1/BLAS/SRC -lessl ! TotalView memory debugging: !TV_VERSION : 8.9.0-1 !TV_ARCH : rs6000 !TV_HOME : ${TV_DIR}.${TV_VERSION}/${TV_ARCH} !TV_HOME_MR : /ms_perm/JRC/jrr/opt/totalview.${TV_VERSION}/${TV_ARCH} !compiler.lib.tv.fflags : !compiler.lib.tv.libs : -L${TV_HOME_MR}/lib -L${TV_HOME}/lib ${TV_HOME}/lib/aix_malloctype64_5.o UDUNITS_HOME : /sara/sw/udunits/1.12.4 compiler.lib.udunits.fflags : -I${UDUNITS_HOME}/include compiler.lib.udunits.libs : -L${UDUNITS_HOME}/lib -ludunits !=============================================== ! settings for LoadLeveler queue !=============================================== ! queue system (bsub,loadleveler) queue : loadleveler ! passed directly: queue.ll.submit.options : !------------------------------------------------ ! ** queue options with same value for all steps: !------------------------------------------------ ! space seperated list: queue.ll.options.default : job_name notification initialdir account_no ! job name: queue.ll.option.default.job_name : ${job.name} ! when to send emails : always | error | start | never | complete* queue.ll.option.default.notification : never ! run directory: queue.ll.option.default.initialdir : ${rundir} ! account (to overwrite default one attached to $USER) queue.ll.option.default.account_no : ${my.queue.account} !------------------------------------------------ ! ** queue options with different values per step: !------------------------------------------------ ! list options per step: queue.ll.options.init : step_name output error job_type node_usage queue !queue.ll.options.run : step_name dependency output error job_type node_usage node tasks_per_node environment wall_clock_limit queue queue.ll.options.run : step_name dependency output error job_type node_usage node tasks_per_node wall_clock_limit queue queue.ll.options.done : step_name dependency output error job_type node_usage queue ! ~ step init options queue.ll.option.init.step_name : init queue.ll.option.init.output : queue.ll.option.init.error : queue.ll.option.init.job_type : serial queue.ll.option.init.node_usage : shared queue.ll.option.init.queue : ! ~ step run options ! set number of tasks, trap non-MPI case: #if "${par.mpi}" in ["T","True"] : my.ll.ntask : ${par.ntask} #else my.ll.ntask : 1 #endif ! set number of threads, trap non-OpenMP case: #if "${par.openmp}" in ["T","True"] : my.ll.nthread : ${par.nthread} #else my.ll.nthread : 1 #endif queue.ll.option.run.step_name : run #if "init" in "${job.steps}".split() : queue.ll.option.run.dependency : (init == 0) #else : queue.ll.option.run.dependency : #endif queue.ll.option.run.output : queue.ll.option.run.error : #if ${my.ll.ntask} > 1 : queue.ll.option.run.job_type : parallel queue.ll.option.run.node : 1 ! ok up to 32 tasks ... queue.ll.option.run.tasks_per_node : ${my.ll.ntask} ! ok up to 32 tasks ... #else queue.ll.option.run.job_type : serial queue.ll.option.run.node : ! dummy ... queue.ll.option.run.tasks_per_node : ! dummy ... #endif queue.ll.option.run.node_usage : not_shared !queue.ll.option.run.node_usage : shared ! ensure correct environment: queue.ll.option.run.environment : !OMP_NUM_THREADS = ${my.ll.nthread} queue.ll.option.run.wall_clock_limit : ${loadleveler.wall_clock_limit} queue.ll.option.run.queue : ! ~ step done options queue.ll.option.done.step_name : done #if "run" in "${job.steps}".split() : queue.ll.option.done.dependency : (run == 0) #else : queue.ll.option.done.dependency : #endif queue.ll.option.done.job_type : serial queue.ll.option.done.node_usage : shared queue.ll.option.done.output : queue.ll.option.done.error : queue.ll.option.done.queue : !=============================================== ! maker !=============================================== ! make command; ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script: ! maker : gmake -j %{build.jobs} !=============================================== ! MPI runner !=============================================== ! Parallel Operating Environment (POE) ! Common arguments: ! -procs procs # number of processors ! -cmdfile ! -hostfile ! -labelio {yes | no} # label standard output lines with pe id ! mpirun.command : poe mpirun.args : -labelio yes -procs ${par.ntask} -cmdfile ${mpirun.cmdfile} -hostfile ${mpirun.hostfile} ! name of command and host files (empty for not written): mpirun.cmdfile : ${my.basename}.cmdfile mpirun.hostfile : ${my.basename}.hostfile !=============================================== ! debugger !=============================================== ! debugger type: totalview | idb | kdbg debugger : totalview ! command for debugger: debugger.command : totalview -searchPath=${build.sourcedir} !debugger.command : totalview !=============================================== ! model data !=============================================== ! the user scratch directory: !my.scratch : ${TMPDIR} my.scratch : /scratch/shared/${USER} ! main archive: my.archdir : /scratch/shared/krol/ !my.archdir : /gpfs/mcn1/tmpprojects/tm5meteo ! base path to various data files: my.data.dir : ${my.archdir}/input ! local temporary meteo archive: !my.meteo.dir : ${my.archdir}/METEO !my.meteo.dir : /gpfs/mcn1/tmpprojects/tm5meteo !my.meteo.dir : /scratch/shared/ivdvelde/meteo-ei !my.meteo.dir : /archive/ivdvelde/carbontracker/meteo-ei-nc my.meteo.dir : /archive/ivdvelde/carbontracker/meteo-ei-nc-eur !my.meteo.dir : /archive/ARCHIVED_kelder/TM/meteo my.meteo.unpacked : F ! permanent archives to search for meteo files: !my.meteo.search : ecfs:/nlh/TM/meteo !my.meteo.search : ec:ecfs[nlh]:TM/meteo !my.meteo.search : /gpfs/mcn1/tmpprojects/tm5meteo /archive/krol/METEO /archive/ARCHIVED_kelder/TM/meteo /archive/noije/TM/meteo ! extra install tasks: !my.meteo.search : /archive/ARCHIVED_kelder/TM/meteo my.meteo.search : /archive/ivdvelde/carbontracker/meteo-ei-nc my.install.tasks :