!=============================================== ! compiler information !=============================================== ! template settings for IBM xlf compiler: #include base/${my.branch}/rc/pycasso-compiler-ifort-sara.rc ! Problem with (non-standard) system routines 'Exit_' and 'Sleep_' on ECMWF : ! these do not have the underscores here. Flag '-qnoextname' resets to the ! official names again, but let linking with HDF fail. ! Therefore, a marco __ecmwf__ is now defined to distuinguish in the code ! between the various XLF impelementations. ! !my.default.fflags : -WF,-D__ecmwf__ my.default.fflags : !=============================================== ! libraries !=============================================== ! location of installed user libraries: APPS_HOME : /hpc/sw/modules/modulefiles/libraries ! adressing mode: AMODE : LP64 ! Z library (used for compression in HDF) compiler.lib.z.fflags : compiler.lib.z.libs : -lz ! JPEG library (used for compression in HDF) compiler.lib.jpeg.fflags : compiler.lib.jpeg.libs : -ljpeg ! SZ library (used for compression in HDF) SZIP_VERSION : szip/intel/2.1 SZIP_HOME : ${APPS_HOME}/${SZIP_VERSION} compiler.lib.sz.fflags : -I${SURFSARA_SZIP_INCLUDE} compiler.lib.sz.libs : -L${SURFSARA_SZIP_LIB} -lsz ! HDF4 library: HDF_VERSION : hdf4/intel/4.2.9 HDF_HOME : ${APPS_HOME}/${HDF_VERSION} compiler.lib.hdf4.fflags : -I${SURFSARA_HDF4_INCLUDE} compiler.lib.hdf4.libs : -L${SURFSARA_HDF4_LIB} -lmfhdf -ldf -ljpeg -lz ! HDF5 library: HDF5_VERSION : hdf5/serial/intel/1.8.10-patch1 HDF5_HOME : ${APPS_HOME}/${HDF5_VERSION} compiler.lib.hdf5.fflags : -I${SURFSARA_HDF5_INCLUDE} compiler.lib.hdf5.libs : -L${SURFSARA_HDF5_LIB} -lhdf5 -lz ! HDF5 library with parallel features enabled: HDF5_PAR_VERSION : hdf5/impi/intel/1.8.9 HDF5_PAR_HOME : ${APPS_HOME}/${HDF5_PAR_VERSION} compiler.lib.hdf5_par.fflags : -I${SURFSARA_HDF5_INCLUDE} compiler.lib.hdf5_par.libs : -L${SURFSARA_HDF5_LIB} -lhdf5_fortran -lhdf5 -lz ! NetCDF library: NETCDF_VERSION : netcdf/serial/intel/4.1.3 NETCDF_HOME : ${APPS_HOME}/${NETCDF_VERSION} compiler.lib.netcdf.fflags : -I${SURFSARA_NETCDF_INCLUDE} compiler.lib.netcdf.libs : -L$/{SURFSARA_NETCDF_LIB} -lnetcdff -lnetcdf ! NetCDF4 library: NETCDF4_VERSION : netcdf/serial/intel/4.1.3 NETCDF4_HOME : ${APPS_HOME}/${NETCDF4_VERSION} compiler.lib.netcdf4.fflags : -I${SURFSARA_NETCDF_INCLUDE} compiler.lib.netcdf4.libs : -L$/{SURFSARA_NETCDF_LIB} -lnetcdff -lnetcdf ! NetCDF4 library with parallel features enabled: NETCDF4_PAR_VERSION : netcdf/impi/intel/4.1.3 NETCDF4_PAR_HOME : ${APPS_HOME}/${NETCDF4_PAR_VERSION} compiler.lib.netcdf4_par.fflags : -I${SURFSARA_NETCDF_INCLUDE} compiler.lib.netcdf4_par.libs : -L${SURFSARA_NETCDF_LIB} -lnetcdff -lnetcdf -lhdf5_hl -lhdf5 -lz -lcurl !>>> DEPRICATED !! Parallel-NetCDF library: PNETCDF_VERSION : pnetcdf/1.0.1 PNETCDF_HOME : /opt/pnetcdf/lib !PNETCDF_HOME : ${APPS_HOME}/${PNETCDF_VERSION}/${AMODE} compiler.lib.pnetcdf.fflags : -I${PNETCDF_HOME}/include compiler.lib.pnetcdf.libs : -L${PNETCDF_HOME}/lib -lpnetcdf !<<< ! UDUNITS version 1 library: intel.usr.base : /Users/ivdvelde UDUNITS_VERSION : udunits-1.12.11/ UDUNITS_HOME : ${intel.usr.base}/${UDUNITS_VERSION} compiler.lib.udunits.fflags : -I/home/ivdvelde/local/udunits/include compiler.lib.udunits.libs : -L/home/ivdvelde/local/udunits/lib -ludunits ! MPI library: automatically included using 'mp'xlf90 compiler.lib.mpi.fflags : compiler.lib.mpi.libs : ! GRIB library: the EMOSLIB variable should be the real(8) version: ! EMOSLIB = -L/usr/local/lib/metaps/lib/000370 -lemos.R64.D64.I32 compiler.lib.grib.fflags : compiler.lib.grib.libs : -lemos ! ! Lapack library: ! -lessl -lblacs # serial ! -lesslsmp -lblacssmp # parallel with OpenMP ! -lessl -lpessl -lblacs # parallel with MPI ! -lesslsmp -lpesslsmp -lblacssmp # parallel with MPI and OpenMP ! #if "${par.openmp}" in ["T","True"] : compiler.lib.lapack.libs : -L$(MKLROOT)/lib/intel64 -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lpthread -lm #else compiler.lib.lapack.libs : -L$(MKLROOT)/lib/intel64 -lmkl_intel_lp64 -lmkl_sequential -lmkl_core -lpthread -lm #endif !#if "${par.mpi}" in ["T","True"] : !my.pessl : -lpessl${my.essl.ext} !#else !my.pessl : !#endif compiler.lib.lapack.fflags : -I$(MKLROOT)/include !compiler.lib.lapack.libs : -lessl${my.essl.ext} ${my.pessl} -lblacs${my.essl.ext} ! TotalView memory debugging: !TV_VERSION : 8.9.0-1 !TV_ARCH : rs6000 !TV_HOME : ${TV_DIR}.${TV_VERSION}/${TV_ARCH} !TV_HOME_MR : /ms_perm/JRC/jrr/opt/totalview.${TV_VERSION}/${TV_ARCH} !compiler.lib.tv.fflags : !compiler.lib.tv.libs : -L${TV_HOME_MR}/lib -L${TV_HOME}/lib ${TV_HOME}/lib/aix_malloctype64_5.o !=============================================== ! settings for SLURM job manager !=============================================== #include base/${my.branch}/rc/pycasso-queue-slurm.rc !=============================================== ! maker !=============================================== ! make command; ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script: ! maker : gmake -j %{build.jobs} !=============================================== ! MPI runner !=============================================== ! Parallel Operating Environment (POE) ! Common arguments: ! -procs procs # number of processors ! -cmdfile ! -hostfile ! -labelio {yes | no} # label standard output lines with pe id ! mpirun.command : srun mpirun.args : -l ! -n ${par.ntask} ! name of command and host files (empty for not written): !mpirun.cmdfile : ${my.basename}.cmdfile !mpirun.hostfile : ${my.basename}.hostfile !=============================================== ! debugger !=============================================== ! debugger type: totalview | idb | kdbg debugger : totalview ! command for debugger: debugger.command : totalview -searchPath=${build.sourcedir} !debugger.command : totalview !=============================================== ! model data !=============================================== ! the user scratch directory: !my.scratch : ${TMPDIR} my.scratch : /scratch/shared/krol ! base path to various data files: my.data.dir : /scratch/shared/krol/DATA/ ! run time meteo archive: ! o user scratch: !my.meteo.dir : ${my.scratch}/tmm-buf/${my.meteo.class} my.meteo.dir : ${my.scratch}/tm5_meteo_in/ ! o shared scratch for JRC users: !my.meteo.dir : ${my.scratch}/../jrd/tmm-buf/${my.meteo.class} ! permanent archives to search for meteo files: my.meteo.search : ${my.scratch}/METEO /projects/huygens_projects/tm5meteo/mcn1/FILESET/fileset_mt5meteo/ ! extra install tasks: my.install.tasks :