zenobe.cfg.tmpl 3.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126
  1. # Platform dependent configuration functions for the 'zenobe' machine
  2. #(zenobe.hpc.cenaero.be)
  3. function configure()
  4. {
  5. # This function should configure all settings/modules needed to
  6. # later prepare the EC-Earth run directory and set variables used
  7. # in the run script
  8. # Configure paths for building/running EC-Earth
  9. ecearth_src_dir=[[[PLT:ACTIVE:ECEARTH_SRC_DIR]]]
  10. run_dir=[[[PLT:ACTIVE:RUN_DIR]]]
  11. ini_data_dir=[[[PLT:ACTIVE:INI_DATA_DIR]]]
  12. # File for standard output.
  13. # NOTE: This will be modified for restart jobs!
  14. stdout_file=${PBS_O_WORKDIR-$PWD}/${PBS_JOBNAME-"local"}_${PBS_JOBID-"id"}.log
  15. # Resubmit this job for automatic restarts? [true/false]
  16. # Also, add options for the resubmit command here.
  17. resubmit_job=[[[PLT:ACTIVE:RESUBMIT_JOB]]]
  18. resubmit_opt="[[[PLT:ACTIVE:RESUBMIT_OPT]]]"
  19. # Configure GRIBEX paths
  20. export LOCAL_DEFINITION_TEMPLATES=[[[PLT:ACTIVE:GRIBEX_DEFINITION_PATH]]]/gribtemplates
  21. export ECMWF_LOCAL_TABLE_PATH=[[[PLT:ACTIVE:GRIBEX_DEFINITION_PATH]]]/gribtables
  22. # Configure grib api paths
  23. export GRIB_DEFINITION_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_DEFINITION_SUBDIR]]]
  24. export GRIB_SAMPLES_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_SAMPLES_SUBDIR]]]
  25. export GRIB_BIN_PATH=[[[PLT:ACTIVE:GRIBAPI_BASE_DIR]]]/[[[PLT:ACTIVE:GRIBAPI_BIN_SUBDIR]]]
  26. # Configure number of processors per node
  27. proc_per_node=[[[PLT:ACTIVE:PROC_PER_NODE]]]
  28. # Configure and load modules
  29. pre_load_modules_cmd="[[[PLT:ACTIVE:PRE_LOAD_MODULES_CMD]]]"
  30. module_list="[[[PLT:ACTIVE:MODULE_LIST]]]"
  31. if [ -n "${pre_load_modules_cmd}" ]
  32. then
  33. ${pre_load_modules_cmd}
  34. fi
  35. module load ${module_list}
  36. # Add directories to the shared library search path
  37. export LD_LIBRARY_PATH=${LD_LIBRARY_PATH:+${LD_LIBRARY_PATH}:}"[[[PLT:ACTIVE:ADD_TO_LD_LIBRARY_PATH]]]"
  38. }
  39. function launch()
  40. {
  41. # Compute and check the node distribution
  42. info "======================="
  43. info "Node/proc distribution:"
  44. info "-----------------------"
  45. info "IFS: ${ifs_numproc}"
  46. info "NEMO: ${nem_numproc}"
  47. info "XIOS: ${xio_numproc}"
  48. info "======================="
  49. cmd="mpirun"
  50. while (( "$#" ))
  51. do
  52. # Get number of MPI ranks and executable name
  53. nranks=$1
  54. executable=./$(basename $2)
  55. shift
  56. shift
  57. cmd+=" -n $nranks $executable"
  58. # Add any arguments to executable
  59. while (( "$#" )) && [ "$1" != "--" ]
  60. do
  61. cmd+=" $1"
  62. shift
  63. done
  64. shift || true
  65. # Add colon of more executables follow
  66. (( "$#" )) && cmd+=" :"
  67. done
  68. #export OMP_NUM_THREADS=1
  69. pwd
  70. echo $cmd
  71. #exit
  72. $cmd
  73. }
  74. function finalise()
  75. {
  76. # This function should execute of any post run functionality, e.g.
  77. # platform dependent cleaning or a resubmit
  78. if ${resubmit_job} && [ $(date -d "${leg_end_date}" +%s) -lt $(date -d "${run_end_date}" +%s) ]
  79. then
  80. info "Resubmitting job for leg $((leg_number+1))"
  81. # Need to go to start_dir to find the run script
  82. cd ${PBS_O_WORKDIR} # same as $start_dir
  83. cp ./${PBS_JOBNAME} ./${PBS_JOBNAME}.$$
  84. sed "s:force_run_from_scratch=true:force_run_from_scratch=false:" \
  85. <./${PBS_JOBNAME}.$$ \
  86. >./${PBS_JOBNAME}
  87. cp -f ./${PBS_JOBNAME} ./${PBS_JOBNAME}.$$
  88. sed "s:special_restart=true:special_restart=false:" \
  89. <./${PBS_JOBNAME}.$$ \
  90. >./${PBS_JOBNAME}
  91. rm -f ./${PBS_JOBNAME}.$$
  92. # Submit command
  93. set -x
  94. qsub ./${PBS_JOBNAME}
  95. set +x
  96. else
  97. info "Not resubmitting."
  98. fi
  99. }