! ! machine/compiler specific settings ! ! template settings for GNU Fortran compiler: #include base/${my.branch}/rc/pycasso-compiler-gfortran-4.1.rc ! compilers with MPI enabled: ! o same compilers, flags defined in options: !mpi.compiler.fc : ${compiler.fc} !mpi.compiler.fc.openmp : ${compiler.fc.openmp} ! o use wrapper: mpi.compiler.fc : openmpif90 mpi.compiler.fc.openmp : openmpif90 ! template settings for Intel fortran compiler: !#include base/${my.branch}/rc/pycasso-compiler-ifort-11.1.rc ! compilers with MPI enabled: ! o same compilers, flags defined in options: !mpi.compiler.fc : ${compiler.fc} !mpi.compiler.fc.openmp : ${compiler.fc.openmp} !mpi.compiler.fc : ifort !mpi.compiler.fc.openmp : ifort ! template settings for Intel Fortran compiler: !include base/branches/pycasso/rc/compiler.ifort-11.1.rc ! template settings for IBM xlf compiler: !include base/branches/pycasso/rc/compiler.xlf-12.1.rc ! ! libraries ! my.macports : /opt/local ! Z library (used for compression in HDF4) compiler.lib.z.fflags : compiler.lib.z.libs : -lz ! JPEG library (used for compression in HDF4) compiler.lib.jpeg.fflags : compiler.lib.jpeg.libs : -L${my.macports}/lib -ljpeg ! SZ library (used for compression in HDF4) SZIP_HOME : ${my.macports} compiler.lib.sz.fflags : -I${SZIP_HOME}/include compiler.lib.sz.libs : -L${SZIP_HOME}/lib -lsz -Wl,-rpath -Wl,${SZIP_HOME}/lib ! HDF4 library: HDF4_HOME : ${my.macports} compiler.lib.hdf4.fflags : -I${my.macports}/include compiler.lib.hdf4.libs : -L${HDF4_HOME}/lib -lmfhdf -ldf -ljpeg -lz ! HDF5 library: HDF5_HOME : ${my.macports} compiler.lib.hdf5.fflags : -I${HDF5_HOME}/include compiler.lib.hdf5.libs : -L${HDF5_HOME}/lib -lhdf5_hl -lhdf5_fortran -lhdf5 -Wl,-rpath -Wl,${HDF5_HOME}/lib ! HDF5 library with parallel features: HDF5_MPI_HOME : ${my.macports} compiler.lib.hdf5_par.fflags : compiler.lib.hdf5_par.libs : -L${HDF5_MPI_HOME}/lib -lhdf5_hl -lhdf5 ! NetCDF library: ! NOTE WP: This is the "classic" netcdf3 library compiled with gfortran !NETCDF_HOME : ${my.macports} NETCDF_HOME : /usr/local/ compiler.lib.netcdf.fflags : -I${NETCDF_HOME}/include compiler.lib.netcdf.libs : -L${NETCDF_HOME}/lib -lnetcdf ! NetCDF4 library: NETCDF4_HOME : ${my.macports} compiler.lib.netcdf4.fflags : -I${NETCDF4_HOME}/include compiler.lib.netcdf4.libs : -L${NETCDF4_HOME}/lib -lnetcdf ! NetCDF4 library with parallel features: NETCDF4_MPI_HOME : ${my.macports} compiler.lib.netcdf4_par.fflags : -I${NETCDF4_MPI_HOME}/include compiler.lib.netcdf4_par.libs : -L${NETCDF4_MPI_HOME}/lib -lnetcdf ! MPI library: MPI_HOME : ${my.macports} compiler.lib.mpi.fflags : -I${MPI_HOME}/include compiler.lib.mpi.libs : -L${MPI_HOME}/lib -lmpi ! GRIB library: GRIBEX_HOME : /usr compiler.lib.grib.fflags : -I${GRIBEX_HOME}/include compiler.lib.grib.libs : -L${GRIBEX_HOME}/lib -lgribex ! Lapack library: LAPACK_HOME : /usr compiler.lib.lapack.fflags : -I${LAPACK_HOME}/include compiler.lib.lapack.libs : -L${LAPACK_HOME}/lib -llapack ! ! queue ! ! template settings for LoadLeveler queue: !include base/branches/pycasso/rc/queue.loadleveler.rc ! template settings for BSUB queue: !include base/branches/pycasso/rc/queue.bsub.rc ! template settings for QSUB queue: !include base/branches/pycasso/rc/queue.qsub.rc ! ! jobs ! ! shell to use in job scripts: ! o python scripts: job.shell.python : /usr/bin/env python ! ! maker ! ! make command; ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script: ! maker : gmake -j %{build.jobs} ! ! MPI runner ! ! command for running MPI parallel programs ! ! o MPI run (part of MPI distribution) ! ! command and arguments; no command file written, thus executable and arguments are added ! For gfortran mpirun.command : openmpirun ! For ifort !mpirun.command : mprun mpirun.args : -np ${par.ntask} ! ! name of command file; if empty, then executable and arguments are added to the command line mpirun.cmdfile : ! ! name of host file: mpirun.hostfile : !! !! o POE = Parallel Operating Environment (IBM machines on ECMWF, SARA) !! !! command and arguments; the call to executable is written to the command file: !mpirun.command : poe !mpirun.args : -labelio yes -procs ${par.ntask} -cmdfile ${mpirun.cmdfile} -hostfile ${mpirun.hostfile} !! !! name of command and host files (empty for not written): !mpirun.cmdfile : ${my.basename}.cmdfile !mpirun.hostfile : ${my.basename}.hostfile ! ! debugger ! ! debugger type: totalview | idb | kdbg debugger : kdbg ! command for debugger: ! o KDE debugger around gdb (Linux systems using gfortran) debugger.command : kdbg ! o Intel debugger (for systems with Intel compiler) !debugger.command : idb -gui ! o TotalView (IBM) !debugger.command : totalview -searchPath=${build.sourcedir} ! ! model data ! ! the user scratch directory: my.scratch : /data/TM5/scratch/${USER} !my.scratch : ${TEMP} ! main archive: !my.archdir : /ms_perm/TM my.archdir : /data/TM5 ! base path to various data files: my.data.dir : ${my.archdir}/input ! local temporary meteo archive: my.meteo.dir : ${my.archdir}/METEO/meteo-buffer ! permanent archives to search for meteo files: my.meteo.search : /Volumes/Storage/TM5/METEO !my.meteo.search : ecfs:/nlh/TM/meteo !my.meteo.search : ec:ecfs[nlh]:TM/meteo !my.meteo.search : ${my.archdir}/METEO ! extra install tasks: my.install.tasks :