impi-2021.7.0-intel-compilers-2022.1.0.eb 1.1 KB

1234567891011121314151617181920212223242526272829303132
  1. name = 'impi'
  2. version = '2021.7.0'
  3. homepage = 'https://software.intel.com/content/www/us/en/develop/tools/mpi-library.html'
  4. description = "Intel MPI Library, compatible with MPICH ABI"
  5. toolchain = {'name': 'intel-compilers', 'version': '2022.1.0'}
  6. # see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html
  7. source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/18926/']
  8. sources = ['l_mpi_oneapi_p_%(version)s.8711_offline.sh']
  9. checksums = ['4eb1e1487b67b98857bc9b7b37bcac4998e0aa6d1b892b2c87b003bf84fb38e9']
  10. dependencies = [('UCX', '1.12.1')]
  11. # set up all the mpi commands to default to intel compilers
  12. # set_mpi_wrappers_all = True
  13. modextravars = {
  14. # to enable SLURM integration with srun (site-specific)
  15. 'I_MPI_PMI_LIBRARY': 'libpmi2.so',
  16. 'I_MPI_FABRICS': 'shm:ofi',
  17. # set this if mpirun gives you a floating point exception (SIGFPE), see
  18. # https://software.intel.com/en-us/forums/intel-clusters-and-hpc-technology/topic/852307
  19. # 'I_MPI_HYDRA_TOPOLIB': 'ipl',
  20. }
  21. # may be needed if you enable I_MPI_PMI_LIBRARY above
  22. osdependencies = [('slurm-libpmi')]
  23. moduleclass = 'mpi'