!=============================================== ! compiler information !=============================================== ! #if "${par.prof}" in ["T","True"] : ! COMPIL : intel_c12.0.3_bullxmpi ! #else COMPIL : intel_c12.0.4_impi4.0.2.003 ! #endif ! compiler specific settings #include rc/compiler-ifort-12.0.4.rc my.default.fflags : -i4 -r8 -O2 -ip -xSSE4.2 ! switch to mpif90 if using bullx mpi.compiler.fc : mpiifort mpi.compiler.fc.openmp : mpiifort !=============================================== ! libraries ! *.lib.*.fflags : for include when compiling Fortran code ! *.lib.*.libs : for linking when building executable !=============================================== ! root location of installed user libraries APPS_HOME : /nfs_ltc/tm5_test/users/sager/BACKUP/LIBS SUBDIR : COMPILATION_${COMPIL} ! Z library (used for compression in HDF) compiler.lib.z.fflags : compiler.lib.z.libs : -lz ! JPEG library (used for compression in HDF) JPEG_VERSION : 6b JPEG_LIB_DIR : ${APPS_HOME}/JPEG-6B/${SUBDIR}/jpeg-6b compiler.lib.jpeg.fflags : -I${JPEG_LIB_DIR}/include compiler.lib.jpeg.libs : -L${JPEG_LIB_DIR}/lib -ljpeg ! SZ library (used for compression in HDF) SZIP_VERSION : SZIP.2.1/${SUBDIR} SZIP_HOME : ${APPS_HOME}/${SZIP_VERSION}/szip-2.1 compiler.lib.sz.fflags : -I${SZIP_HOME}/include compiler.lib.sz.libs : -L${SZIP_HOME}/lib -lsz ! HDF4 library (without netcdf interface) HDF_VERSION : HDF.4.2.9 HDF_HOME : ${APPS_HOME}/${HDF_VERSION}/hdf4.2.9 compiler.lib.hdf4.fflags : -I${HDF_HOME}/include compiler.lib.hdf4.libs : -L${HDF_HOME}/lib -lmfhdf -ldf ! ! HDF5 library HDF5_VERSION : hdf5-1.8.7 HDF5_HOME : ${APPS_HOME}/${HDF5_VERSION}/hdf5-1.8.7 compiler.lib.hdf5.fflags : -I${HDF5_HOME}/include compiler.lib.hdf5.libs : -L${HDF5_HOME}/lib -lhdf5_hl -lhdf5 ! HDF5 library with parallel features enabled HDF5_PAR_VERSION : HDF5.1.8.11/COMPILATION_intel_c12.0.3_impi4.0.2.003 HDF5_PAR_HOME : ${APPS_HOME}/${HDF5_PAR_VERSION}/hdf5-1.8.11 compiler.lib.hdf5_par.fflags : -I${HDF5_PAR_HOME}/include compiler.lib.hdf5_par.libs : -L${HDF5_PAR_HOME}/lib -lhdf5_hl -lhdf5 ! NetCDF library NETCDF_VERSION : NETCDF.3.6.1/${SUBDIR} NETCDF_HOME : ${APPS_HOME}/${NETCDF_VERSION}/netcdf-3.6.1 compiler.lib.netcdf.fflags : -I${NETCDF_HOME}/include compiler.lib.netcdf.libs : -L${NETCDF_HOME}/lib -lnetcdf ! NetCDF4 library NETCDF4_VERSION : NETCDF.4.3.0/netcdf-4.3.0-C NETCDF4_HOME : ${APPS_HOME}/${NETCDF4_PAR_VERSION} compiler.lib.netcdf4.fflags : -I${NETCDF4_HOME}/include compiler.lib.netcdf4.libs : -L${NETCDF4_HOME}/lib -lnetcdff -lnetcdf ! NetCDF4 library with parallel IO enabled NETCDF4_PAR_VERSION : NETCDF.4.3.0/netcdf-4.3.0-C NETCDF4_PAR_HOME : ${APPS_HOME}/${NETCDF4_PAR_VERSION} compiler.lib.netcdf4_par.fflags : -I${NETCDF4_HOME}/include compiler.lib.netcdf4_par.libs : -L${NETCDF4_HOME}/lib -lnetcdff -lnetcdf ! Parallel-NetCDF library PNETCDF_VERSION : PNETCDF.1.2.0/${SUBDIR} PNETCDF_HOME : ${APPS_HOME}/${PNETCDF_VERSION}/parallel-netcdf-1.2.0 compiler.lib.pnetcdf.fflags : -I${PNETCDF_HOME}/include compiler.lib.pnetcdf.libs : -L${PNETCDF_HOME}/lib -lpnetcdf ! MPI library compiler.lib.mpi.fflags : compiler.lib.mpi.libs : ! GRIB library: the EMOSLIB variable should be the real(8) version: ! EMOSLIB = -L/usr/local/lib/metaps/lib/000370 -lemos.R64.D64.I32 compiler.lib.grib.fflags : compiler.lib.grib.libs : ${APPS_HOME}/EMOS ! UDUNITS v1.x library (not available) UDUNITS_VERSION : udunits/1.12.11 UDUNITS_HOME : /nfs_ltc/tm5_test/users/sager/BACKUP/LIBS/UDUNITS-1.12.11/udunits-1.12.11-ifort compiler.lib.udunits.fflags : -I${UDUNITS_HOME}/include compiler.lib.udunits.libs : -L${UDUNITS_HOME}/lib -ludunits ! UDUNITS v2.x library compiler.lib.udunits2.fflags : compiler.lib.udunits2.libs : ! Lapack library: ! -lessl -lblacs # serial ! -lesslsmp -lblacssmp # parallel with OpenMP ! -lessl -lpessl -lblacs # parallel with MPI ! -lesslsmp -lpesslsmp -lblacssmp # parallel with MPI and OpenMP ! #if "${par.openmp}" in ["T","True"] : my.essl.ext : smp #else my.essl.ext : #endif #if "${par.mpi}" in ["T","True"] : my.pessl : -lpessl${my.essl.ext} #else my.pessl : #endif ! compiler.lib.lapack.fflags : compiler.lib.lapack.libs : -lessl${my.essl.ext} ${my.pessl} -lblacs${my.essl.ext} !=============================================== ! settings for SLURM job manager !=============================================== #include rc/queue-slurm-knmi.rc !=============================================== ! maker !=============================================== ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script, and add Makefile and target to ! the command: maker : gmake -j %{build.jobs} !=============================================== ! MPI runner !=============================================== ! Use now recommended method for Intel_MPI ! requires: export I_MPI_PMI_LIBRARY=/path/to/slurm/pmi/library/libpmi.so --> /usr/lib64/libpmi.so mpirun.command : srun mpirun.args : -n ${par.ntask} ! name of command and host files (leave empty if not needed) mpirun.cmdfile : mpirun.hostfile : ! BEFORE: !mpirun.command : mpiexec.hydra !!mpirun.command : mpirun ! if using bullxmpi !mpirun.args : -bootstrap slurm -np ${par.ntask} -machinefile ${mpirun.hostfile} !mpirun.hostfile : mpd.host !=============================================== ! debugger !=============================================== ! debugger type: totalview | idb | kdbg debugger : ! command for debugger: debugger.command : !=============================================== ! model data !=============================================== ! the user scratch directory: !my.scratch : /nfs_stc/tm5_oper/tm5/TEMP/ my.scratch : /nfs_ltc/tm5_test/users/sager/SCRATCH ! base path to various data files: !my.data.dir : /nfs_ltc/tm5_test/users/sager/BACKUP/TM5_DATA/data my.data.dir : /nfs_ltc/tm5_test/users/sager/DATA/bench4_data/TM56_input ! permanent archives to search for meteo files my.meteo.search : ! extra install tasks my.install.tasks :