master.F90 2.4 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889
  1. !~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  2. ! Math and Computer Science Division, Argonne National Laboratory !
  3. !-----------------------------------------------------------------------
  4. ! CVS master.F90,v 1.7 2004-04-23 05:43:11 jacob Exp
  5. ! CVS MCT_2_8_0
  6. !BOP -------------------------------------------------------------------
  7. !
  8. ! !ROUTINE: master -- driver for simple concurrent coupled model
  9. !
  10. ! !DESCRIPTION: Provide a simple example of using MCT to connect to
  11. ! components executing concurrently in a single executable.
  12. !
  13. ! !INTERFACE:
  14. !
  15. program master
  16. !
  17. ! !USES:
  18. !
  19. implicit none
  20. include "mpif.h"
  21. !
  22. !EOP ___________________________________________________________________
  23. ! local variables
  24. character(len=*), parameter :: mastername='master.F90'
  25. integer, parameter :: ncomps = 2 ! Must know total number of
  26. ! components in coupled system
  27. integer, parameter :: AtmID = 1 ! pick an id for the atmosphere
  28. integer, parameter :: CplID = 2 ! pick an id for the coupler
  29. ! MPI variables
  30. integer :: splitcomm, rank, nprocs,compid, myID, ierr,color
  31. integer :: anprocs,cnprocs
  32. !-----------------------------------------------------------------------
  33. ! The Main program.
  34. ! We are implementing a single-executable, concurrent-execution system.
  35. !
  36. ! This small main program carves up MPI_COMM_WORLD and then starts
  37. ! each component on its own processor set.
  38. ! Initialize MPI
  39. call MPI_INIT(ierr)
  40. ! Get basic MPI information
  41. call MPI_COMM_SIZE(MPI_COMM_WORLD,nprocs,ierr)
  42. call MPI_COMM_RANK(MPI_COMM_WORLD,rank,ierr)
  43. ! Create MPI communicators for each component
  44. !
  45. ! each component will run on half the processors
  46. !
  47. ! set color
  48. if (rank .lt. nprocs/2) then
  49. color = 0
  50. else
  51. color = 1
  52. endif
  53. ! Split MPI_COMM_WORLD into communicators for each component.
  54. call MPI_COMM_SPLIT(MPI_COMM_WORLD,color,0,splitcomm,ierr)
  55. ! Start the components
  56. select case (color)
  57. case(0)
  58. call model(splitcomm,ncomps,AtmID)
  59. case(1)
  60. call coupler(splitcomm,ncomps,CplID)
  61. case default
  62. print *, "color error, color = ", color
  63. end select
  64. ! Components are done
  65. call MPI_FINALIZE(ierr)
  66. end program master