123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236 |
- !queue :
- !
- ! machine/compiler specific settings
- !
- ! The environment variables below are set by loading the proper modules into the environment!!!
- #include base/${my.branch}/rc/pycasso-queue-slurm-sara.rc
- intel.opt.base : ${SURFSARA_IMPI_ROOT}
- intel.usr.base : /Users/krol
- ! template settings for GNU Fortran compiler:
- !#include base/${my.branch}/rc/pycasso-compiler-gfortran-4.1.rc
- ! compilers with MPI enabled:
- ! o same compilers, flags defined in options:
- !mpi.compiler.fc : ${compiler.fc}
- !mpi.compiler.fc.openmp : ${compiler.fc.openmp}
- ! o use wrapper:
- !mpi.compiler.fc : openmpif90
- !mpi.compiler.fc.openmp : openmpif90
- ! template settings for Intel fortran compiler:
- #include base/${my.branch}/rc/pycasso-compiler-ifort-12.1.rc
- ! compilers with MPI enabled:
- ! o same compilers, flags defined in options:
- !mpi.compiler.fc : ${compiler.fc}
- !mpi.compiler.fc.openmp : ${compiler.fc.openmp}
- mpi.bin : ${intel.opt.base}/bin64/
- mpi.compiler.fc : ${mpi.bin}/mpiifort
- mpi.compiler.fc.openmp : ${mpi.bin}/mpiifort
- ! template settings for Intel Fortran compiler:
- !include base/branches/pycasso/rc/compiler.ifort-12.1.rc
- ! template settings for IBM xlf compiler:
- !include base/branches/pycasso/rc/compiler.xlf-12.1.rc
- !
- ! libraries
- !
- my.macports :
- ! Z library (used for compression in HDF4)
- compiler.lib.z.fflags :
- compiler.lib.z.libs : -lz
- ! JPEG library (used for compression in HDF4)
- JPEG_HOME : ${intel.usr.base}/jpegsrc.v6b/
- compiler.lib.jpeg.fflags :
- compiler.lib.jpeg.libs : -L${JPEG_HOME}/lib -ljpeg
- ! SZ library (used for compression in HDF4)
- SZIP_HOME : ${intel.usr.base}/szip-2.1/
- compiler.lib.sz.fflags : -I${SURFSARA_SZIP_INCLUDE}
- compiler.lib.sz.libs : -L${SURFSARA_SZIP_LIB} -lsz -Wl,-rpath -Wl,${SURFSARA_SZIP_LIB}/lib
- ! HDF4 library:
- HDF4_HOME : ${intel.usr.base}/hdf-4.2.5/
- compiler.lib.hdf4.fflags : -I${SURFSARA_HDF4_INCLUDE}
- compiler.lib.hdf4.libs : -L${SURFSARA_HDF4_LIB} -lmfhdf -ldf -ljpeg -lz
- ! HDF5 library:
- HDF5_HOME : ${intel.usr.base}/hdf5-1.8.5-patch1
- compiler.lib.hdf5.fflags : -I${SURFSARA_HDF5_INCLUDE}
- compiler.lib.hdf5.libs : -L${SURFSARA_HDF5_LIB} -lhdf5_hl -lhdf5_fortran -lhdf5 -Wl,-rpath -Wl,${HDF5_HOME}/lib
- ! HDF5 library with parallel features:
- HDF5_MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/hdf5-1.8.5-patch1
- compiler.lib.hdf5_par.fflags : -I${SURFSARA_HDF5_INCLUDE}
- compiler.lib.hdf5_par.libs : -L${SURFSARA_HDF5_LIB} -lhdf5_hl -lhdf5_fortran -lhdf5 -Wl,-rpath -Wl,${SURFSARA_HDF5_LIB}
- ! NetCDF library:
- ! NOTE WP: This is the "classic" netcdf3 library compiled with gfortran
- !NETCDF_HOME : ${my.macports}
- NETCDF_HOME : ${intel.usr.base}/netcdf-4.1.1
- compiler.lib.netcdf.fflags : -I${SURFSARA_NETCDF_INCLUDE}
- compiler.lib.netcdf.libs : -L${SURFSARA_NETCDF_LIB} -lnetcdf -lnetcdff
- ! NetCDF4 library:
- NETCDF4_HOME : ${intel.usr.base}/netcdf4-4.1.1
- compiler.lib.netcdf4.fflags : -I${SURFSARA_NETCDF_INCLUDE}
- compiler.lib.netcdf4.libs : -L${SURFSARA_NETCDF_LIB} -lnetcdf -lnetcdff
- ! NetCDF4 library with parallel features:
- NETCDF4_MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/netcdf4-4.1.1
- compiler.lib.netcdf4_par.fflags : -I${SURFSARA_NETCDF_INCLUDE}
- compiler.lib.netcdf4_par.libs : -L${SURFSARA_NETCDF_LIB} -lnetcdff -lnetcdf -lcurl -lhdf5_hl -lhdf5 -lz !-lnetcdf -lnetcdff
- ! MPI library:
- !MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/suite
- compiler.lib.mpi.fflags : -I${SURFSARA_IMPI_INCLUDE}
- compiler.lib.mpi.libs : -L${SURFSARA_IMPI_LIB} -lmpi
- ! GRIB library:
- GRIBEX_HOME : /usr
- compiler.lib.grib.fflags : -I${GRIBEX_HOME}/include
- compiler.lib.grib.libs : -L${GRIBEX_HOME}/lib -lgribex
- ! Lapack library: (note that MKL is an environment variable set through the module environment)
- !MKL : ${intel.opt.base}/Frameworks/mkl/
- !compiler.lib.lapack.fflags : -I${MKL_INC}
- !compiler.lib.lapack.libs : -L${MKL}/ -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lguide -lpthread
- !compiler.lib.lapack.libs : -L${MKL}/ -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lpthread
- !
- !
- ! UDUNITS version 1 library:
- UDUNITS_VERSION : udunits-1.12.11/
- UDUNITS_HOME : ${intel.usr.base}/${UDUNITS_VERSION}
- compiler.lib.udunits.fflags : -I/home/ivdvelde/local/udunits/include
- compiler.lib.udunits.libs : -L/home/ivdvelde/local/udunits/lib -ludunits
- !
- ! queue
- !
- ! template settings for LoadLeveler queue:
- !include base/branches/pycasso/rc/queue.loadleveler.rc
- ! template settings for BSUB queue:
- !include base/branches/pycasso/rc/queue.bsub.rc
- ! template settings for QSUB queue:
- !include base/branches/pycasso/rc/queue.qsub.rc
- !
- ! jobs
- !
- ! shell to use in job scripts:
- ! o python scripts:
- job.shell.python : /usr/bin/env python
- !
- ! maker
- !
- ! make command;
- ! the setup script will insert the 'build.jobs' specified in the expert.rc
- ! or passed as argument to the setup script:
- !
- maker : make -j %{build.jobs}
- !
- ! MPI runner
- !
- ! command for running MPI parallel programs
- !
- ! o MPI run (part of MPI distribution)
- !
- ! command and arguments; no command file written, thus executable and arguments are added
- ! For gfortran
- mpirun.command : srun
- ! For ifort
- mpirun.args :
- !
- ! name of command file; if empty, then executable and arguments are added to the command line
- mpirun.cmdfile :
- !
- ! name of host file:
- mpirun.hostfile :
- !!
- !! o POE = Parallel Operating Environment (IBM machines on ECMWF, SARA)
- !!
- !! command and arguments; the call to executable is written to the command file:
- !mpirun.command : poe
- !mpirun.args : -labelio yes -procs ${par.ntask} -cmdfile ${mpirun.cmdfile} -hostfile ${mpirun.hostfile}
- !!
- !! name of command and host files (empty for not written):
- !mpirun.cmdfile : ${my.basename}.cmdfile
- !mpirun.hostfile : ${my.basename}.hostfile
- !
- ! debugger
- !
- ! debugger type: totalview | idb | kdbg
- debugger : kdbg
- ! command for debugger:
- ! o KDE debugger around gdb (Linux systems using gfortran)
- debugger.command : kdbg
- ! o Intel debugger (for systems with Intel compiler)
- !debugger.command : idb -gui
- ! o TotalView (IBM)
- !debugger.command : totalview -searchPath=${build.sourcedir}
- !
- ! model data
- !
- ! the user scratch directory:
- my.scratch : /scratch/shared/${USER}
- !my.scratch : ${TEMP}
- ! main archive:
- !my.archdir : /ms_perm/TM
- my.archdir : /Volumes/Storage/TM5
- ! base path to various data files:
- my.data.dir : ${my.archdir}/input
- ! local temporary meteo archive:
- my.meteo.dir : ${my.scratch}/meteo-buffer/
- ! permanent archives to search for meteo files:
- #if "${my.meteo.format}" == "tm5-nc"
- !my.meteo.search : /data/TM5/peters/NO-TAPE-YET/TM/meteo-nc/
- my.meteo.search : /archive/ivdvelde/carbontracker/meteo-ei-nc
- #else
- !my.meteo.search : ${my.archdir}/METEO/
- my.meteo.search : /archive/ivdvelde/carbontracker/meteo-ei-nc
- #endif
- ! extra install tasks:
- my.install.tasks :
- !my.install.tasks : meteo.cartesius
- !meteo.cartesius.install.dir : ${my.meteo.dir}
- !meteo.cartesius.install.arch : ${my.meteo.search}
- !meteo.cartesius.install.files : glb600x400/ec-ei-fc012up2tr3-tropo25-glb600x400-mfuv_20000101_00p03.nc
- !meteo.cartesius.install.rsync : glb600x400/\*<yyyy><mm>\*
|