1234567891011121314151617181920212223242526272829303132 |
- name = 'impi'
- version = '2021.7.0'
- homepage = 'https://software.intel.com/content/www/us/en/develop/tools/mpi-library.html'
- description = "Intel MPI Library, compatible with MPICH ABI"
- toolchain = {'name': 'intel-compilers', 'version': '2022.1.0'}
- # see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html
- source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/18926/']
- sources = ['l_mpi_oneapi_p_%(version)s.8711_offline.sh']
- checksums = ['4eb1e1487b67b98857bc9b7b37bcac4998e0aa6d1b892b2c87b003bf84fb38e9']
- dependencies = [('UCX', '1.12.1')]
- # set up all the mpi commands to default to intel compilers
- # set_mpi_wrappers_all = True
- modextravars = {
- # to enable SLURM integration with srun (site-specific)
- 'I_MPI_PMI_LIBRARY': 'libpmi2.so',
- 'I_MPI_FABRICS': 'shm:ofi',
- # set this if mpirun gives you a floating point exception (SIGFPE), see
- # https://software.intel.com/en-us/forums/intel-clusters-and-hpc-technology/topic/852307
- # 'I_MPI_HYDRA_TOPOLIB': 'ipl',
- }
- # may be needed if you enable I_MPI_PMI_LIBRARY above
- osdependencies = [('slurm-libpmi')]
- moduleclass = 'mpi'
|