!=============================================== ! compiler information !=============================================== queue : ! ! compiler specific settings : gfortran-4.x.x ! ! fortran compiler: compiler.fc : gfortran ! same compiler for openmp, since this is enabled via the flags: compiler.fc.openmp : ${compiler.fc} !! compilers for MPI programs: mpi.compiler.fc : mpif90 mpi.compiler.fc.openmp : mpif90 mpirun.command : mpirun mpirun.args : -np ${par.ntask} ! define flags: compiler.flags.mpi.fflags : compiler.flags.mpi.ldflags : compiler.flags.openmp.fflags : -fopenmp compiler.flags.openmp.ldflags : -fopenmp compiler.flags.default.fflags : -ffree-line-length-none -fimplicit-none -cpp compiler.flags.default.ldflags : compiler.flags.real8.fflags : -fdefault-real-8 compiler.flags.real8.ldflags : compiler.flags.optim-none.fflags : -O0 compiler.flags.optim-none.ldflags : compiler.flags.optim-strict.fflags : compiler.flags.optim-strict.ldflags : compiler.flags.optim-fast.fflags : -O3 -ffast-math -march=native -funroll-loops -fno-protect-parens -flto compiler.flags.optim-fast.ldflags : compiler.flags.optim-vfast.fflags : -O5 compiler.flags.optim-vfast.ldflags : compiler.flags.check-all.fflags : -fbounds-check -ffpe-trap=zero,invalid ! -Wall compiler.flags.check-all.ldflags : compiler.flags.debug.fflags : -Wall -fmax-errors=0 -g -fdump-core -fbacktrace -O0 compiler.flags.debug.ldflags : ! my.default.fflags : !=============================================== ! libraries !=============================================== LIB_HOME:/home/swmaint/tm5 compiler.lib.z.fflags : compiler.lib.z.libs : -lz compiler.lib.jpeg.fflags : -I${LIB_HOME}/include compiler.lib.jpeg.libs : -L${LIB_HOME}/lib -ljpeg compiler.lib.sz.fflags : compiler.lib.sz.libs : !compiler.lib.sz.fflags : -I${LIB_HOME}/include !compiler.lib.sz.libs : -L${LIB_HOME}/lib -lsz compiler.lib.hdf4.fflags : -I${LIB_HOME}/include compiler.lib.hdf4.libs : -L${LIB_HOME}/lib -lmfhdf -ldf compiler.lib.hdf5.fflags : -I${LIB_HOME}/include compiler.lib.hdf5.libs : -L${LIB_HOME}/lib -lhdf5_hl -lhdf5 compiler.lib.hdf5_par.fflags : -I${LIB_HOME}/include compiler.lib.hdf5_par.libs : -L${LIB_HOME}/lib -lhdf5_hl -lhdf5 compiler.lib.netcdf.fflags : -I${LIB_HOME}/include compiler.lib.netcdf.libs : -L${LIB_HOME}/lib -lnetcdf compiler.lib.netcdf4.fflags : -I${LIB_HOME}/include compiler.lib.netcdf4.libs : -L${LIB_HOME}/lib -lnetcdff -lnetcdf compiler.lib.netcdf4_par.fflags : -I${LIB_HOME}/include compiler.lib.netcdf4_par.libs : -L${LIB_HOME}/lib -lnetcdff -lnetcdf compiler.lib.pnetcdf.fflags : -I${LIB_HOME}/include compiler.lib.pnetcdf.libs : -L${LIB_HOME}/lib -lpnetcdf compiler.lib.mpi.fflags : compiler.lib.mpi.libs : compiler.lib.grib.fflags : compiler.lib.grib.libs : ${LIB_HOME}/EMOS ! ! LAPACK library: ! -lessl -lblacs # serial ! -lesslsmp -lblacssmp # parallel with OpenMP ! -lessl -lpessl -lblacs # parallel with MPI ! -lesslsmp -lpesslsmp -lblacssmp # parallel with MPI and OpenMP ! ! #if "${par.openmp}" in ["T","True"] : ! my.essl.ext : smp ! #else ! my.essl.ext : ! #endif ! #if "${par.mpi}" in ["T","True"] : ! my.pessl : -lpessl${my.essl.ext} ! #else ! my.pessl : ! #endif ! compiler.lib.lapack.fflags : !compiler.lib.lapack.libs : -lessl${my.essl.ext} ${my.pessl} -lblacs${my.essl.ext} !compiler.lib.lapack.libs : ${LAPACKLIB} !=============================================== ! make !=============================================== ! the setup script will insert the 'build.jobs' specified in the expert.rc ! or passed as argument to the setup script: ! maker : gmake -j %{build.jobs} !=============================================== ! MPI runner !=============================================== ! ! Parallel Operating Environment (POE) ! Common arguments: ! -procs procs # number of processors ! -cmdfile ! -hostfile ! -labelio {yes | no} # label standard output lines with pe id ! ! mpirun.command : poe ! mpirun.args : -labelio yes -procs ${par.ntask} -cmdfile ${mpirun.cmdfile} -hostfile ${mpirun.hostfile} ! ! ! name of command and host files (empty for not written): ! mpirun.cmdfile : ${my.basename}.cmdfile ! mpirun.hostfile : ${my.basename}.hostfile ! !=============================================== ! debugger !=============================================== ! debugger type: totalview | idb | kdbg !debugger : totalview ! command for debugger: !debugger.command : totalview -searchPath=${build.sourcedir} !debugger.command : totalview !=============================================== ! model data !=============================================== ! the user scratch directory: my.scratch : ${HOME}/scratch ! base path to various data files: my.data.dir : ${HOME}/tm5_data/TM56_input ! run time meteo archive: ! o user scratch: my.meteo.dir : ${HOME}/tm5_data/tm5_meteo_in/ ! o shared scratch for JRC users: !my.meteo.dir : ${my.scratch}/../jrd/tmm-buf/${my.meteo.class} ! permanent archives to search for meteo files: #if "${my.meteo.format}" == "tm5-nc" my.meteo.search : ecfs:/nlh/TM/meteo-nc #else my.meteo.search : ecfs:/nlh/TM/meteo #endif ! extra install tasks: my.install.tasks :