dmi-hpcdev.job.tmpl 1.7 KB

12345678910111213141516171819202122232425262728293031323334353637383940
  1. #!/bin/bash
  2. #PBS -q hpc
  3. #PBS -N run_ece3
  4. #PBS -l select=10
  5. ####PBS -l select=9:ncpus=36:mpiprocs=36 # IFS+NEMO+OASIS+RUNOFF-map: 4+3+1+1 nodes
  6. #PBS -l place=scatter
  7. #PBS -l walltime=4:00:00
  8. #PBS -j oe
  9. #PBS -o out/run_ece3.out.001
  10. #################################################################################
  11. # HOW-TO: #
  12. # - copy this template into your rundir/classic dir: #
  13. # cp dmi-hpcdev.job.tmpl ../run_ece-esm.job #
  14. # - set #PBS -l select=Total number of nodes used in the config-run.xml, in #
  15. # the same order as the executables in the "launch" command of the #
  16. # script to submit. An example for 4 models of the GCM: #
  17. # 1 XIOS core, 3 x 36 NEMO cores, 1 RUNOFF core, 6 x 36 IFS cores #
  18. # - set #PBS -o (name of the log file) to the path/name you want #
  19. # - replace "./script.sh" below with the script to run. For a GCM run it is:
  20. # ./ece-esm.sh
  21. # #
  22. # - submit this script: #
  23. # qsub hpcdev.job #
  24. ###################### IMPORTANT ################################################
  25. # If you change the name of this script, you need to set the #PBS -N directive #
  26. # to the name of this script for automatic resubmission to work. Remember that #
  27. # name cannot be longer than 15 letters. #
  28. #################################################################################
  29. cd $PBS_O_WORKDIR
  30. mkdir -p out
  31. ./script.sh
  32. exit