/////////////////////////////////////////////////////////////////////////////////////// /// \file parallel.cpp /// \brief Functionality for parallel computation /// /// \author Joe Siltberg /// $Date: 2013-10-10 10:20:33 +0200 (Thu, 10 Oct 2013) $ /// /////////////////////////////////////////////////////////////////////////////////////// // mpi.h needs to be the first include (or specifically, before stdio.h), // due to a bug in the MPI-2 standard (both stdio.h and the C++ MPI API // defines SEEK_SET, SEEK_CUR and SEEK_SET(!)) #ifdef HAVE_MPI #include #endif #include "config.h" #include "parallel.h" #include "shell.h" #include #include namespace GuessParallel { bool parallel = false; #ifdef HAVE_MPI /// A class whose only purpose is to terminate the MPI library when deleted class FinalizeCaller { public: ~FinalizeCaller() { MPI_Finalize(); } }; /// The auto pointer will delete the object some time after main() is finished std::auto_ptr destructor; #endif void init(int& argc, char**& argv) { #ifdef HAVE_MPI // Only initialize MPI is the command line options explicitly // asks for a parallel run (with the -parallel option). // In most cases it wouldn't hurt to initialize MPI even if // it's not used, but apparently some implementations start // by going to the users home directory if the program isn't // launched by an mpiexec/mpirun launcher. // Unfortunately, since MPI initialization must be done before // we parse our options with CommandLineArguments, we need to // look for the -parallel option here by ourselves. // The file-global variable parallel is initiated = false; for (int i = 0; i < argc; ++i) { if (std::string(argv[i]) == "-parallel") { parallel = true; break; } } // ecev3 - Spinup? bool spinup = false; for (int i = 0; i < argc; ++i) { if (std::string(argv[i]) == "-islpjgspinup") { spinup = true; break; } } if (parallel && (spinup || ECEARTHWITHCRUNCEP)) { // We won't use OASIS-MCT in this case, so we must initialise MPI. dprintf("Parallel spinup of LPJ_GUESS - calling MPI_Init\n"); int ierr = MPI_Init(&argc, &argv); dprintf("LPJ_GUESS returned from calling MPI_Init with code %i\n",ierr); // Make sure the MPI_Finalize function is called at program termination destructor = std::auto_ptr(new FinalizeCaller()); } #endif } int get_rank() { #ifdef HAVE_MPI int rank; int ierr = MPI_Comm_rank(MPI_COMM_WORLD, &rank); return rank; #else return 0; #endif } int get_rank_specific(int lc) { #ifdef HAVE_MPI int rank; #ifdef OPEN_MPI MPI_Comm comm = MPI_Comm_f2c((MPI_Fint) lc); #else int comm = lc; #endif //dprintf("LPJ_GUESS - get_rank_specific() with communicator %i\n",lc); int ierr = MPI_Comm_rank(comm, &rank); //dprintf("LPJ_GUESS - get_rank_specific() returned error: %i and rank %i\n",ierr,rank); return rank; #else return 0; #endif } int get_global_rank() { #ifdef HAVE_MPI int rank; MPI_Comm_rank(MPI_COMM_WORLD, &rank); return rank; #else return 0; #endif } int get_num_processes() { #ifdef HAVE_MPI if (parallel) { int size; MPI_Comm_size(MPI_COMM_WORLD, &size); return size; }else return 1; #else return 1; #endif } int get_num_local_processes(int lc) { #ifdef HAVE_MPI int size; #ifdef OPEN_MPI MPI_Comm comm = MPI_Comm_f2c((MPI_Fint) lc); #else int comm = lc; #endif MPI_Comm_size(comm, &size); return size; #else return 1; #endif } int get_num_global_processes() { #ifdef HAVE_MPI int size; MPI_Comm_size(MPI_COMM_WORLD, &size); return size; #else return 1; #endif } }