123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237 |
- queue :
- !
- ! machine/compiler specific settings
- !
- ! The environment variables below are set by loading the proper modules into the environment!!!
- intel.opt.base : /opt/intel/
- !${INTEL_OPT_BASE}
- intel.usr.base : /Users/ivar/local/intel_13.0/
- !${INTEL_USR_BASE}
- ! template settings for GNU Fortran compiler:
- !#include base/${my.branch}/rc/pycasso-compiler-gfortran-4.1.rc
- ! compilers with MPI enabled:
- ! o same compilers, flags defined in options:
- !mpi.compiler.fc : ${compiler.fc}
- !mpi.compiler.fc.openmp : ${compiler.fc.openmp}
- ! o use wrapper:
- !mpi.compiler.fc : openmpif90
- !mpi.compiler.fc.openmp : openmpif90
- ! template settings for Intel fortran compiler:
- #include base/${my.branch}/rc/pycasso-compiler-ifort-12.1.rc
- ! compilers with MPI enabled:
- ! o same compilers, flags defined in options:
- !mpi.compiler.fc : ${compiler.fc}
- !mpi.compiler.fc.openmp : ${compiler.fc.openmp}
- mpi.bin : ${intel.usr.base}/mpich2-1.3.1/suite/bin/
- mpi.compiler.fc : ${mpi.bin}/mpif90
- mpi.compiler.fc.openmp : ${mpi.bin}/mpif90
- ! template settings for Intel Fortran compiler:
- !include base/branches/pycasso/rc/compiler.ifort-12.1.rc
- ! template settings for IBM xlf compiler:
- !include base/branches/pycasso/rc/compiler.xlf-12.1.rc
- !
- ! libraries
- !
- my.macports :
- ! Z library (used for compression in HDF4)
- compiler.lib.z.fflags :
- !compiler.lib.z.libs : -L${intel.usr.base}/zlib-1.2.7/lib -lz
- compiler.lib.z.libs : -lz
- ! JPEG library (used for compression in HDF4)
- JPEG_HOME : ${intel.usr.base}/jpegsrc.v6b/
- compiler.lib.jpeg.fflags :
- compiler.lib.jpeg.libs : -L${JPEG_HOME}/lib -ljpeg
- ! SZ library (used for compression in HDF4)
- SZIP_HOME : ${intel.usr.base}/szip-2.1/
- compiler.lib.sz.fflags : -I${SZIP_HOME}/include
- compiler.lib.sz.libs : -L${SZIP_HOME}/lib -lsz -Wl,-rpath -Wl,${SZIP_HOME}/lib
- ! HDF4 library:
- HDF4_HOME : ${intel.usr.base}/hdf-4.2.9/
- compiler.lib.hdf4.fflags : -I${HDF4_HOME}/include
- !compiler.lib.hdf4.libs : -L${HDF4_HOME}/lib -lmfhdf -ldf -ljpeg -lz -lsz -Wl,-rpath -Wl,${SZIP_HOME}/lib
- compiler.lib.hdf4.libs : -L${HDF4_HOME}/lib -lmfhdf -ldf -ljpeg -lz
- ! HDF5 library:
- HDF5_HOME : ${intel.usr.base}/hdf5-1.8.5-patch1
- compiler.lib.hdf5.fflags : -I${HDF5_HOME}/include
- compiler.lib.hdf5.libs : -L${HDF5_HOME}/lib -lhdf5_hl -lhdf5_fortran -lhdf5 -Wl,-rpath -Wl,${HDF5_HOME}/lib
- !HDF5_HOME : ${intel.usr.base}/mpich2-1.3.1/hdf5-1.8.5-patch1
- !compiler.lib.hdf5.fflags :
- !compiler.lib.hdf5.libs : -L${HDF5_MPI_HOME}/lib -lhdf5_hl -lhdf5
- ! HDF5 library with parallel features:
- HDF5_MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/hdf5-1.8.5-patch1
- compiler.lib.hdf5_par.fflags :
- compiler.lib.hdf5_par.libs : -L${HDF5_MPI_HOME}/lib -lhdf5_hl -lhdf5
- ! NetCDF library:
- ! NOTE WP: This is the "classic" netcdf3 library compiled with gfortran
- !NETCDF_HOME : ${my.macports}
- NETCDF_HOME : ${intel.usr.base}/netcdf4-4.1.1
- compiler.lib.netcdf.fflags : -I${NETCDF_HOME}/include
- compiler.lib.netcdf.libs : -L${NETCDF_HOME}/lib -lnetcdf
- ! NetCDF4 library:
- NETCDF4_HOME : ${intel.usr.base}/netcdf4-4.1.1
- compiler.lib.netcdf4.fflags : -I${NETCDF4_HOME}/include
- compiler.lib.netcdf4.libs : -L${NETCDF4_HOME}/lib -lnetcdf
- ! NetCDF4 library with parallel features:
- NETCDF4_MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/netcdf4-4.1.1
- compiler.lib.netcdf4_par.fflags : -I${NETCDF4_MPI_HOME}/include
- compiler.lib.netcdf4_par.libs : -L${NETCDF4_MPI_HOME}/lib -lnetcdf
- ! MPI library:
- MPI_HOME : ${intel.usr.base}/mpich2-1.3.1/suite
- compiler.lib.mpi.fflags : -I${MPI_HOME}/include
- compiler.lib.mpi.libs : -L${MPI_HOME}/lib -lmpich
- ! GRIB library:
- GRIBEX_HOME : /usr
- compiler.lib.grib.fflags : -I${GRIBEX_HOME}/include
- compiler.lib.grib.libs : -L${GRIBEX_HOME}/lib -lgribex
- ! Lapack library: (note that MKL is an environment variable set through the module environment)
- !MKL : ${intel.opt.base}/Frameworks/mkl/
- MKL : /opt/
- MKL_INC :/opt/
- compiler.lib.lapack.fflags : -I${MKL_INC}
- !compiler.lib.lapack.libs : -L${MKL}/ -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lguide -lpthread
- compiler.lib.lapack.libs : -L${MKL}/ -lmkl_intel_lp64 -lmkl_intel_thread -lmkl_core -lpthread
- !
- !
- ! UDUNITS version 1 library:
- UDUNITS_VERSION : udunits-1.12.11/
- UDUNITS_HOME : ${intel.usr.base}/${UDUNITS_VERSION}
- compiler.lib.udunits.fflags : -I${UDUNITS_HOME}/include
- compiler.lib.udunits.libs : -L${UDUNITS_HOME}/lib -ludunits
- !
- ! queue
- !
- ! template settings for LoadLeveler queue:
- !include base/branches/pycasso/rc/queue.loadleveler.rc
- ! template settings for BSUB queue:
- !include base/branches/pycasso/rc/queue.bsub.rc
- ! template settings for QSUB queue:
- !include base/branches/pycasso/rc/queue.qsub.rc
- !
- ! jobs
- !
- ! shell to use in job scripts:
- ! o python scripts:
- job.shell.python : /usr/bin/env python
- !
- ! maker
- !
- ! make command;
- ! the setup script will insert the 'build.jobs' specified in the expert.rc
- ! or passed as argument to the setup script:
- !
- maker : make -j %{build.jobs}
- !
- ! MPI runner
- !
- ! command for running MPI parallel programs
- !
- ! o MPI run (part of MPI distribution)
- !
- ! command and arguments; no command file written, thus executable and arguments are added
- ! For gfortran
- mpirun.command : ${intel.usr.base}/mpich2-1.3.1/suite/bin/mpirun
- ! For ifort
- !mpirun.command : mprun
- mpirun.args : -np ${par.ntask}
- !
- ! name of command file; if empty, then executable and arguments are added to the command line
- mpirun.cmdfile :
- !
- ! name of host file:
- mpirun.hostfile :
- !!
- !! o POE = Parallel Operating Environment (IBM machines on ECMWF, SARA)
- !!
- !! command and arguments; the call to executable is written to the command file:
- !mpirun.command : poe
- !mpirun.args : -labelio yes -procs ${par.ntask} -cmdfile ${mpirun.cmdfile} -hostfile ${mpirun.hostfile}
- !!
- !! name of command and host files (empty for not written):
- !mpirun.cmdfile : ${my.basename}.cmdfile
- !mpirun.hostfile : ${my.basename}.hostfile
- !
- ! debugger
- !
- ! debugger type: totalview | idb | kdbg
- debugger : kdbg
- ! command for debugger:
- ! o KDE debugger around gdb (Linux systems using gfortran)
- debugger.command : kdbg
- ! o Intel debugger (for systems with Intel compiler)
- !debugger.command : idb -gui
- ! o TotalView (IBM)
- !debugger.command : totalview -searchPath=${build.sourcedir}
- !
- ! model data
- !
- ! the user scratch directory:
- my.scratch : /Volumes/DataRaid/TM5/scratch/${USER}
- !my.scratch : ${TEMP}
- ! main archive:
- !my.archdir : /ms_perm/TM
- my.archdir : /Volumes/Storage/TM5
- ! base path to various data files:
- my.data.dir : ${my.archdir}/input
- ! local temporary meteo archive:
- my.meteo.dir : ${my.meteo.search}
- ! permanent archives to search for meteo files:
- #if "${my.meteo.format}" == "tm5-nc"
- !my.meteo.search : /data/TM5/peters/NO-TAPE-YET/TM/meteo-nc/
- my.meteo.search : /Volumes/DataRaid/TM5/METEO/meteo-ei-nc/
- #else
- my.meteo.search : ${my.archdir}/METEO/
- #endif
- ! extra install tasks:
- my.install.tasks :
|