! ! machine/compiler specific settings ! ! Intel Fortran Compiler #include base/${my.branch}/rc/pycasso-compiler-ifort-12.0.rc ! compilers with MPI enabled: ! o same compilers, flags defined in options: !mpi.compiler.fc : ${compiler.fc} !mpi.compiler.fc.openmp : ${compiler.fc.openmp} ! o use wrapper: mpi.compiler.fc : mpif90 mpi.compiler.fc.openmp : mpif90 ! ! libraries ! ! Z library (used for compression in HDF): compiler.lib.z.fflags : compiler.lib.z.libs : -lz ! JPEG library (used for compression in HDF): compiler.lib.jpeg.fflags : compiler.lib.jpeg.libs : -ljpeg ! SZ library (used for compression in HDF): compiler.lib.sz.fflags : compiler.lib.sz.libs : -L${SZIP_HOME}/lib -lsz -Wl,-rpath -Wl,${SZIP_HOME}/lib ! HDF4 library: compiler.lib.hdf4.fflags : -I${HDF4_HOME}/include compiler.lib.hdf4.libs : -L${HDF4_HOME}/lib -lmfhdf -ldf ! HDF5 library: compiler.lib.hdf5.fflags : compiler.lib.hdf5.libs : -L${HDF5_HOME}/lib -lhdf5_hl -lhdf5 -Wl,-rpath -Wl,${HDF5_HOME}/lib !! HDF5 library with parallel features: !compiler.lib.hdf5_par.fflags : !compiler.lib.hdf5_par.libs : -L${HDF5_MPI_HOME}/lib -lhdf5_hl -lhdf5 ! NetCDF library: compiler.lib.netcdf.fflags : -I${NETCDF_HOME}/include compiler.lib.netcdf.libs : -L${NETCDF_HOME}/lib -lnetcdf ! NetCDF4 library: compiler.lib.netcdf4.fflags : -I${NETCDF4_HOME}/include compiler.lib.netcdf4.libs : -L${NETCDF4_HOME}/lib -lnetcdf !! NetCDF4 library with parallel features: !compiler.lib.netcdf4_par.fflags : -I${NETCDF4_MPI_HOME}/include !compiler.lib.netcdf4_par.libs : -L${NETCDF4_MPI_HOME}/lib -lnetcdf !! Parallel-NetCDF library (depricated, but used for older TM5 versions): !compiler.lib.pnetcdf.fflags : -I${PNETCDF_MPI_HOME}/include !compiler.lib.pnetcdf.libs : -L${PNETCDF_MPI_HOME}/lib -lpnetcdf ! MPI library: no flags, the 'mpif90' wrapper is used instead compiler.lib.mpi.fflags : compiler.lib.mpi.libs : !! GRIB library: !compiler.lib.grib.fflags : !compiler.lib.grib.libs : ${EMOS_LIB} ! !! LaPack library: !compiler.lib.lapack.fflags : !compiler.lib.lapack.libs : ${LAPACK_LIB} ! ! maker ! ! make command; ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script: ! maker : gmake -j %{build.jobs} ! ! MPI runner ! ! command and arguments; no command file written, thus executable and arguments are added mpirun.command : mpirun mpirun.args : -np ${par.ntask} ! name of command file; if empty, then executable and arguments are added to the command line mpirun.cmdfile : ! name of host file: mpirun.hostfile : ! ! debugger ! ! debugger type: totalview | idb | kdbg debugger : idb ! command for debugger: debugger.command : idb ! ! model data ! ! the user scratch directory: my.scratch : ${SCRATCH} ! base path to various data files: my.data.dir : ${SCRATCH}/TM/INPUT/ ! local temporary meteo archive, shared by all users: my.meteo.dir : /localHD/shared/TM/tmm-buf/${my.meteo.class} ! permanent archives to search for meteo files: my.meteo.search : /store2/NOBACKUP/TM5/INPUT/meteo ! extra install tasks: my.install.tasks :