Browse Source

New 2021.02 with oneAPI

Super Pierre-Yves Barriat 3 years ago
parent
commit
e6283ca897
41 changed files with 5594 additions and 46 deletions
  1. 33 0
      foss-2018b/Blitz++-0.10-GCCcore-7.3.0.eb
  2. 24 0
      foss-2018b/netCDF-C++4-4.2-foss-2018b.eb
  3. 33 0
      foss-2019b/Blitz++-0.10-GCCcore-8.3.0.eb
  4. 31 0
      foss-2020b/Blitz++-1.0.2-GCCcore-10.2.0.eb
  5. 24 0
      foss-2020b/netCDF-C++4-4.2-gompi-2020b.eb
  6. 33 0
      intel-2016a/UDUNITS-1.12.11-intel-2016a.eb
  7. 83 0
      intel-2019b/ELIC_Python-1-intel-2019b-Python-3.7.4.eb
  8. 25 0
      intel-2019b/libmo_unpack-3.1.2-intel-2019b.eb
  9. 0 33
      intel-2020b/ecCodes-2.19.1-iimpi-2020b.eb
  10. 43 0
      intel-2020b/ecCodes-2.22.0-iimpi-2020b.eb
  11. 33 0
      intel-2021.02/ANTLR-2.7.7-GCCcore-10.3.0-Java-11.eb
  12. 36 0
      intel-2021.02/CDO-1.9.10-iimpi-2021.02.eb
  13. 132 0
      intel-2021.02/ELIC_Python-1-intel-2021.02.eb
  14. 31 0
      intel-2021.02/ESMF-8.0.1-intel-2021.02.eb
  15. 65 0
      intel-2021.02/GDAL-3.3.0-intel-2021.02.eb
  16. 20 0
      intel-2021.02/GEOS-3.9.1-intel-2021.02.eb
  17. 25 0
      intel-2021.02/GSL-2.7-intel-compilers-2021.2.0.eb
  18. 21 0
      intel-2021.02/HDF5-1.10.7-iimpi-2021.02.eb
  19. 46 0
      intel-2021.02/NCL-6.6.2-intel-2021.02.eb
  20. 38 0
      intel-2021.02/NCO-4.9.7-intel-2021.02.eb
  21. 68 0
      intel-2021.02/SciPy-bundle-2021.05-intel-2021.02.eb
  22. 25 0
      intel-2021.02/Tkinter-3.8.6-GCCcore-10.3.0.eb
  23. 24 0
      intel-2021.02/YAXT-0.9.0-iimpi-2021.02.eb
  24. 43 0
      intel-2021.02/ecCodes-2.22.0-iimpi-2021.02.eb
  25. 27 0
      intel-2021.02/g2clib-1.6.0-GCCcore-10.3.0.eb
  26. 36 0
      intel-2021.02/g2lib-3.1.0-GCCcore-10.3.0.eb
  27. 12 0
      intel-2021.02/g2lib-3.1.0-int.patch
  28. 18 0
      intel-2021.02/iimpi-2021.02.eb
  29. 13 0
      intel-2021.02/imkl-2021.2.0-iimpi-2021.02.eb
  30. 21 0
      intel-2021.02/intel-2021.02.eb
  31. 37 0
      intel-2021.02/libdap-3.20.7-GCCcore-10.3.0.eb
  32. 26 0
      intel-2021.02/makedepend-1.0.6-GCCcore-10.3.0.eb
  33. 54 0
      intel-2021.02/matplotlib-3.3.3-intel-2021.02.eb
  34. 42 0
      intel-2021.02/ncview-2.1.8-iimpi-2021.02.eb
  35. 12 13
      intel-2021.02/netCDF-4.8.0-iimpi-2021.02.eb
  36. 25 0
      intel-2021.02/netCDF-C++4-4.3.1-iimpi-2021.02.eb
  37. 25 0
      intel-2021.02/netCDF-Fortran-4.5.3-iimpi-2021.02.eb
  38. 57 0
      intel-2021.02/numpy-1.20.3-mkl.patch
  39. 1025 0
      intel-2021.02/numpy/__init__.py
  40. 3079 0
      intel-2021.02/numpy/system_info.py
  41. 149 0
      intel-2021.02/numpy/unixccompiler.py

+ 33 - 0
foss-2018b/Blitz++-0.10-GCCcore-7.3.0.eb

@@ -0,0 +1,33 @@
+easyblock = 'ConfigureMake'
+
+name = 'Blitz++'
+version = '0.10'
+
+homepage = 'http://blitz.sourceforge.net/'
+
+description = """
+ Blitz++ is a (LGPLv3+) licensed meta-template library for array manipulation
+ in C++ with a speed comparable to Fortran implementations, while preserving an
+ object-oriented interface
+"""
+
+toolchain = {'name': 'GCCcore', 'version': '7.3.0'}
+toolchainopts = {'pic': True}
+
+source_urls = [('https://sourceforge.net/projects/blitz/files/blitz/Blitz%2B%2B%20%(version)s/blitz-%(version)s.tar.gz/', 'download')]
+sources = ['blitz-%(version)s.tar.gz']
+checksums = ['804ef0e6911d43642a2ea1894e47c6007e4c185c866a7d68bad1e4c8ac4e6f94']
+
+builddependencies = [
+    ('binutils', '2.30'),
+]
+
+configopts = '--enable-shared'
+
+sanity_check_paths = {
+    'files': ['lib/libblitz.a'],
+    'dirs': ['include/blitz/array', 'include/blitz/gnu', 'include/blitz/meta',
+             'include/random', 'lib/pkgconfig'],
+}
+
+moduleclass = 'lib'

+ 24 - 0
foss-2018b/netCDF-C++4-4.2-foss-2018b.eb

@@ -0,0 +1,24 @@
+easyblock = 'ConfigureMake'
+
+name = 'netCDF-C++4'
+version = '4.2'
+
+homepage = 'https://www.unidata.ucar.edu/software/netcdf/'
+description = """NetCDF (network Common Data Form) is a set of software libraries
+ and machine-independent data formats that support the creation, access, and sharing of array-oriented
+ scientific data."""
+
+toolchain = {'name': 'foss', 'version': '2018b'}
+toolchainopts = {'pic': True}
+
+source_urls = ['https://www.unidata.ucar.edu/downloads/netcdf/ftp/']
+sources = ['netcdf-cxx-%(version)s.tar.gz']
+
+dependencies = [('netCDF', '4.6.1')]
+
+sanity_check_paths = {
+    'files': ['include/netcdf.hh', 'lib/libnetcdf_c++.a', 'lib/libnetcdf_c++.%s' % SHLIB_EXT],
+    'dirs': [],
+}
+
+moduleclass = 'data'

+ 33 - 0
foss-2019b/Blitz++-0.10-GCCcore-8.3.0.eb

@@ -0,0 +1,33 @@
+easyblock = 'ConfigureMake'
+
+name = 'Blitz++'
+version = '0.10'
+
+homepage = 'http://blitz.sourceforge.net/'
+
+description = """
+ Blitz++ is a (LGPLv3+) licensed meta-template library for array manipulation
+ in C++ with a speed comparable to Fortran implementations, while preserving an
+ object-oriented interface
+"""
+
+toolchain = {'name': 'GCCcore', 'version': '8.3.0'}
+toolchainopts = {'pic': True}
+
+source_urls = [('https://sourceforge.net/projects/blitz/files/blitz/Blitz%2B%2B%20%(version)s/blitz-%(version)s.tar.gz/', 'download')]
+sources = ['blitz-%(version)s.tar.gz']
+checksums = ['804ef0e6911d43642a2ea1894e47c6007e4c185c866a7d68bad1e4c8ac4e6f94']
+
+builddependencies = [
+    ('binutils', '2.32'),
+]
+
+configopts = '--enable-shared'
+
+sanity_check_paths = {
+    'files': ['lib/libblitz.a'],
+    'dirs': ['include/blitz/array', 'include/blitz/gnu', 'include/blitz/meta',
+             'include/random', 'lib/pkgconfig'],
+}
+
+moduleclass = 'lib'

+ 31 - 0
foss-2020b/Blitz++-1.0.2-GCCcore-10.2.0.eb

@@ -0,0 +1,31 @@
+easyblock = 'CMakeMake'
+
+name = 'Blitz++'
+version = '1.0.2'
+
+homepage = 'https://github.com/blitzpp/blitz'
+
+description = """
+ Blitz++ is a (LGPLv3+) licensed meta-template library for array manipulation
+ in C++ with a speed comparable to Fortran implementations, while preserving an
+ object-oriented interface
+"""
+
+toolchain = {'name': 'GCCcore', 'version': '10.2.0'}
+toolchainopts = {'pic': True}
+
+source_urls = [('https://github.com/blitzpp/blitz/archive/')]
+sources = ['%(version)s.tar.gz']
+checksums = ['500db9c3b2617e1f03d0e548977aec10d36811ba1c43bb5ef250c0e3853ae1c2']
+
+builddependencies = [('CMake', '3.18.4'), ('binutils', '2.35')]
+
+separate_build_dir = True
+
+sanity_check_paths = {
+    'files': ['lib64/libblitz.a', 'lib64/libblitz.%s' % SHLIB_EXT],
+    'dirs': ['include/blitz/array', 'include/blitz/meta',
+             'include/random', 'lib64/pkgconfig'],
+}
+
+moduleclass = 'lib'

+ 24 - 0
foss-2020b/netCDF-C++4-4.2-gompi-2020b.eb

@@ -0,0 +1,24 @@
+easyblock = 'ConfigureMake'
+
+name = 'netCDF-C++4'
+version = '4.2'
+
+homepage = 'https://www.unidata.ucar.edu/software/netcdf/'
+description = """NetCDF (network Common Data Form) is a set of software libraries
+ and machine-independent data formats that support the creation, access, and sharing of array-oriented
+ scientific data."""
+
+toolchain = {'name': 'gompi', 'version': '2020b'}
+toolchainopts = {'pic': True}
+
+source_urls = ['https://www.unidata.ucar.edu/downloads/netcdf/ftp/']
+sources = ['netcdf-cxx-%(version)s.tar.gz']
+
+dependencies = [('netCDF', '4.7.4')]
+
+sanity_check_paths = {
+    'files': ['include/netcdf.hh', 'lib/libnetcdf_c++.a', 'lib/libnetcdf_c++.%s' % SHLIB_EXT],
+    'dirs': [],
+}
+
+moduleclass = 'data'

+ 33 - 0
intel-2016a/UDUNITS-1.12.11-intel-2016a.eb

@@ -0,0 +1,33 @@
+##
+
+easyblock = 'ConfigureMake'
+
+name = 'UDUNITS'
+version = '1.12.11'
+
+homepage = 'http://www.unidata.ucar.edu/software/udunits/'
+description = """UDUNITS supports conversion of unit specifications between formatted and binary forms,
+ arithmetic manipulation of units, and conversion of values between compatible scales of measurement."""
+
+toolchain = {'name': 'intel', 'version': '2016a'}
+toolchainopts = {'pic': True}
+
+source_urls = ['ftp://ftp.unidata.ucar.edu/pub/udunits']
+sources = [SOURCELOWER_TAR_GZ]
+
+buildopts = 'CFLAGS="-Df2cFortran -fPIC"'
+
+builddependencies = [
+    ('Bison', '3.0.4'),
+    ('Perl', '5.22.2'),
+]
+
+sanity_check_paths = {
+    'files': ['bin/udunits', 'etc/udunits.dat', 'include/udunits.inc', 'include/udunits.h',
+              'lib/libudunits.a'],
+    'dirs': [],
+}
+
+parallel = 1
+
+moduleclass = 'phys'

+ 83 - 0
intel-2019b/ELIC_Python-1-intel-2019b-Python-3.7.4.eb

@@ -0,0 +1,83 @@
+easyblock = 'PythonBundle'
+
+name = 'ELIC_Python'
+version = '1'
+versionsuffix = '-Python-%(pyver)s'
+
+homepage = 'https://gogs.elic.ucl.ac.be'
+description = """This repo provides additional Python and R extensions for ELIC ecosystem."""
+
+toolchain = {'name': 'intel', 'version': '2019b'}
+
+builddependencies = [
+    ('pkg-config', '0.29.2'),
+]
+
+dependencies = [
+    ('Python', '3.7.4'),
+    ('matplotlib', '3.1.1', versionsuffix),
+    #('GDAL', '3.0.2', versionsuffix),
+    #('NCL', '6.6.2'),
+    ('CDO', '1.9.8'),
+    ('NCO', '4.9.1'),
+    ('ncview', '2.1.7'),
+    ('libmo_unpack', '3.1.2'),
+    ('orca', '1.3.0'),
+]
+
+exts_defaultclass = 'PythonPackage'
+exts_default_options = {
+    'source_tmpl': '%(name)s-%(version)s.tar.gz',
+}
+
+exts_list = [
+    # Python deps, order is important!
+    ('pip', '20.0.2', {
+        'modulename': 'pip',
+        'source_tmpl': '%(version)s.tar.gz',
+        'source_urls': ['https://github.com/pypa/pip/archive/'],
+    }),
+    ('PyKE', '1.1.1', {
+        'modulename': 'pyke',
+        'source_tmpl': 'pyke3-%(version)s.zip',
+        'source_urls': ['https://sourceforge.net/projects/%(name)s/files/%(name)s/%(version)s'],
+        'use_pip': True,
+    }),
+    ('stratify', '0.1.1', {
+        'modulename': 'stratify',
+        'source_tmpl': 'v%(version)s.tar.gz',
+        'source_urls': ['https://github.com/SciTools-incubator/python-stratify/archive/'],
+    }),
+    ('mo_pack', '0.2.0', {
+        'source_tmpl': 'v%(version)s.tar.gz',
+        'source_urls': ['https://github.com/SciTools/mo_pack/archive/'],
+        'checksums': ['4aa70e1f846b666670843bc2514435dedf7393203e88abaf74d48f8f2717a726'],
+    }),
+    ('basemap', '1.2.1rel', {
+        'modulename': 'mpl_toolkits',
+        'source_tmpl': 'v%(version)s.tar.gz',
+        'source_urls': ['https://github.com/matplotlib/basemap/archive'],
+        'use_pip': True,
+    }),
+]
+
+modextrapaths = {
+    'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages',
+    'PYTHONUSERBASE': '',
+}
+
+postinstallcmds = [
+    "cd %(installdir)s && " +
+    'pip install setuptools --upgrade && ' +
+    'pip install olefile pyproj pyshp chardet urllib3 --prefix="%(installdir)s" && ' +
+    'pip install OWSLib ' +
+                'netCDF4 ' +
+                'dask ' +
+                '--prefix="%(installdir)s" '
+]
+
+#pip install eofs dask cf-units Cartopy netCDF4 mpi4py GitPython keyring --user
+#pip install certifi requests OWSLib netCDF4 geoval Shapely dask astropy oktopus bs4 tqdm cf-units cftime pyugrid cycler singledispatch backports_abc tornado toolz patsy statsmodels nc-time-axis yamale psutil bcrypt cffi cryptography Cython decorator mpi4py numpy pandas pbr pycparser scipy six virtualenv xlrd affine atomicwrites boto3 botocore click click-plugins cligj cloudpickle configobj docutils fiona geopandas jmespath more-itertools motionless munch networkx pathlib2 pluggy py pytest python-utils rasterio s3transfer scandir snuggs xarray py-expression-eval PyWavelets attrs progressbar2 scikit-image PyNaCl descartes graphviz wcwidth Bottleneck pickleshare simplegeneric prompt_toolkit Pygments ptyprocess pexpect typelib testpath parso jedi backcall scikit-learn ldap3 skills XlsxWriter retrying pyrsistent jsonschema plotly-charts --user
+#pip install SkillMetrics cdo PyQt5 --user
+
+moduleclass = 'numlib'

+ 25 - 0
intel-2019b/libmo_unpack-3.1.2-intel-2019b.eb

@@ -0,0 +1,25 @@
+easyblock = 'CMakeMake'
+
+name = 'libmo_unpack'
+version = '3.1.2'
+
+homepage = 'https://github.com/SciTools/libmo_unpack'
+description = """A library for handling the WGDOS and RLE compression schemes used in UM files."""
+
+toolchain = {'name': 'intel', 'version': '2019b'}
+
+source_urls = ['https://github.com/SciTools/libmo_unpack/archive']
+sources = ['v%(version)s.tar.gz']
+
+builddependencies = [('CMake', '3.15.3')]
+
+separate_build_dir = True
+
+configopts = '-DCMAKE_EXE_LINKER_FLAGS="$LDFLAGS -lsubunit"'
+
+sanity_check_paths = {
+    'files': ['lib/libmo_unpack.%s' % SHLIB_EXT],
+    'dirs': [],
+}
+
+moduleclass = 'tools'

+ 0 - 33
intel-2020b/ecCodes-2.19.1-iimpi-2020b.eb

@@ -1,33 +0,0 @@
-easyblock = 'CMakeMake'
-
-name = 'ecCodes'
-version = '2.19.1'
-
-homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home'
-description = """ecCodes is a package developed by ECMWF which provides an application programming interface and
- a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2,
- WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding)."""
-
-toolchain = {'name': 'iimpi', 'version': '2020b'}
-
-source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/']
-sources = ['eccodes-%(version)s-Source.tar.gz']
-
-builddependencies = [('CMake', '3.18.4')]
-dependencies = [
-    ('netCDF', '4.7.4'),
-    ('JasPer', '2.0.14'),
-]
-
-separate_build_dir = True
-
-configopts = "-DENABLE_NETCDF=ON -DENABLE_JPG=ON -DENABLE_PYTHON=OFF"
-
-sanity_check_paths = {
-    'files': ['bin/%s' % x for x in ['bufr_copy', 'bufr_dump', 'bufr_filter', 'bufr_ls',
-                                     'codes_count', 'codes_info', 'codes_split_file',
-                                     'grib_copy', 'grib_dump', 'grib_filter', 'grib_ls']],
-    'dirs': [],
-}
-
-moduleclass = 'tools'

+ 43 - 0
intel-2020b/ecCodes-2.22.0-iimpi-2020b.eb

@@ -0,0 +1,43 @@
+easyblock = 'CMakeMake'
+
+name = 'ecCodes'
+version = '2.22.0'
+
+homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home'
+description = """ecCodes is a package developed by ECMWF which provides an application programming interface and
+ a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2,
+ WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding)."""
+
+toolchain = {'name': 'iimpi', 'version': '2020b'}
+toolchainopts = {'usempi': False}
+
+source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/']
+sources = ['eccodes-%(version)s-Source.tar.gz']
+
+builddependencies = [('CMake', '3.18.4')]
+
+dependencies = [
+    ('netCDF', '4.7.4'),
+    ('JasPer', '2.0.14'),
+    ('libjpeg-turbo', '2.0.5'),
+    ('libpng', '1.6.37'),
+    ('zlib', '1.2.11'),
+]
+
+# Python bindings are now provided by a separate package 'eccodes-python'
+configopts = "-DENABLE_NETCDF=ON -DENABLE_PNG=ON "
+configopts += "-DENABLE_JPG=ON -DENABLE_JPG_LIBJASPER=ON "
+configopts += "-DENABLE_ECCODES_THREADS=ON"  # multi-threading with pthreads
+
+local_exes = ['%s_%s' % (a, b)
+              for a in ['bufr', 'grib', 'gts', 'metar']
+              for b in ['compare', 'copy', 'dump', 'filter', 'get', 'ls']]
+local_exes += ['codes_%s' % c for c in ['count', 'info', 'split_file']]
+
+sanity_check_paths = {
+    'files': ['bin/%s' % x for x in local_exes] +
+             ['lib/libeccodes_f90.%s' % SHLIB_EXT, 'lib/libeccodes.%s' % SHLIB_EXT],
+    'dirs': ['include'],
+}
+
+moduleclass = 'tools'

+ 33 - 0
intel-2021.02/ANTLR-2.7.7-GCCcore-10.3.0-Java-11.eb

@@ -0,0 +1,33 @@
+easyblock = 'ConfigureMake'
+
+name = 'ANTLR'
+version = '2.7.7'
+versionsuffix = '-Java-%(javaver)s'
+
+homepage = 'https://www.antlr2.org/'
+description = """ANTLR, ANother Tool for Language Recognition, (formerly PCCTS)
+ is a language tool that provides a framework for constructing recognizers,
+ compilers, and translators from grammatical descriptions containing
+ Java, C#, C++, or Python actions."""
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+
+source_urls = ['https://www.antlr2.org/download/']
+sources = [SOURCELOWER_TAR_GZ]
+patches = ['%(name)s-%(version)s_includes.patch']
+checksums = [
+    '853aeb021aef7586bda29e74a6b03006bcb565a755c86b66032d8ec31b67dbb9',  # antlr-2.7.7.tar.gz
+    'd167d3248a03301bc93efcb37d5df959aae6794968e42231af0b0dd26d6a2e66',  # ANTLR-2.7.7_includes.patch
+]
+
+builddependencies = [('binutils', '2.36.1')]
+dependencies = [('Java', '11', '', True)]
+
+configopts = '--disable-examples --disable-csharp --disable-python'
+
+sanity_check_paths = {
+    'files': ['bin/antlr', 'bin/antlr-config'],
+    'dirs': ['include'],
+}
+
+moduleclass = 'tools'

+ 36 - 0
intel-2021.02/CDO-1.9.10-iimpi-2021.02.eb

@@ -0,0 +1,36 @@
+easyblock = 'ConfigureMake'
+
+name = 'CDO'
+version = '1.9.10'
+
+homepage = 'https://code.zmaw.de/projects/cdo'
+description = """CDO is a collection of command line Operators to manipulate and analyse Climate and NWP model Data."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+# stick to lowopt (-O1) to avoid internal compiler error when building on Intel Skylake
+toolchainopts = {'pic': True, 'usempi': True, 'lowopt': True}
+
+source_urls = ['https://code.mpimet.mpg.de/attachments/download/24638/']
+sources = [SOURCELOWER_TAR_GZ]
+checksums = ['cc39c89bbb481d7b3945a06c56a8492047235f46ac363c4f0d980fccdde6677e']
+
+dependencies = [
+    #('Python', '3.8.6'),
+    ('HDF5', '1.10.7'),
+    ('netCDF', '4.8.0'),
+    ('YAXT', '0.9.0'),
+    ('ecCodes', '2.22.0'),
+    ('PROJ', '8.0.1'),
+]
+
+configopts = "--with-hdf5=$EBROOTHDF5 --with-netcdf=$EBROOTNETCDF --with-eccodes=$EBROOTECCODES --with-proj=$EBROOTPROJ"
+
+# fix for linking issues with HDF5 libraries for libcdi, should link with both -lnetcdf and -lhdf5_hl -lhdf5
+prebuildopts = "find libcdi -name Makefile | xargs sed -i 's/-lnetcdf -lnetcdf/-lnetcdf -lhdf5_hl -lhdf5/g' && "
+
+sanity_check_paths = {
+    'files': ['bin/cdo'],
+    'dirs': [],
+}
+
+moduleclass = 'data'

+ 132 - 0
intel-2021.02/ELIC_Python-1-intel-2021.02.eb

@@ -0,0 +1,132 @@
+easyblock = 'PythonBundle'
+
+name = 'ELIC_Python'
+version = '1'
+
+homepage = 'https://gogs.elic.ucl.ac.be'
+description = """This repo provides additional Python and R extensions for ELIC ecosystem."""
+
+toolchain = {'name': 'intel', 'version': '2021a'}
+
+builddependencies = [
+    ('pkg-config', '0.29.2'),
+]
+
+dependencies = [
+    ('Python', '3.9.5'),
+    ('SciPy-bundle', '2021.05'),
+    ('matplotlib', '3.3.3'),
+    ('GDAL', '3.3.0'),
+    ('NCL', '6.6.2'),
+    ('CDO', '1.9.10'),
+    ('NCO', '4.9.7'),
+    ('ncview', '2.1.7'),
+    #('libmo_unpack', '3.1.2'),
+    #('orca', '3.4.3'),
+]
+
+exts_defaultclass = 'PythonPackage'
+exts_default_options = {
+    'source_tmpl': '%(name)s-%(version)s.tar.gz',
+}
+
+exts_list = [
+    # Python deps, order is important!
+    ('pip', '21.2.2', {
+        'modulename': 'pip',
+        'source_tmpl': '%(version)s.tar.gz',
+        'source_urls': ['https://github.com/pypa/pip/archive/'],
+    }),
+    ('PyKE', '1.1.1', {
+        'modulename': 'pyke',
+        'source_tmpl': 'pyke3-%(version)s.zip',
+        'source_urls': ['https://sourceforge.net/projects/%(name)s/files/%(name)s/%(version)s'],
+        'use_pip': True,
+    }),
+    #('stratify', '0.1.1', {
+    #    'modulename': 'stratify',
+    #    'source_tmpl': 'v%(version)s.tar.gz',
+    #    'source_urls': ['https://github.com/SciTools-incubator/python-stratify/archive/'],
+    #}),
+    #('mo_pack', '0.2.0', {
+    #    'source_tmpl': 'v%(version)s.tar.gz',
+    #    'source_urls': ['https://github.com/SciTools/mo_pack/archive/'],
+    #    'checksums': ['4aa70e1f846b666670843bc2514435dedf7393203e88abaf74d48f8f2717a726'],
+    #}),
+    ('basemap', '1.2.1rel', {
+        'modulename': 'mpl_toolkits',
+        'source_tmpl': 'v%(version)s.tar.gz',
+        'source_urls': ['https://github.com/matplotlib/basemap/archive'],
+        'use_pip': True,
+    }),
+]
+
+modextrapaths = {
+    'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages',
+    'PYTHONUSERBASE': '',
+}
+
+postinstallcmds = [
+    "cd %(installdir)s && " +
+    'pip install setuptools --upgrade && ' +
+    'pip install olefile pyproj pyshp chardet urllib3 --prefix="%(installdir)s" && ' +
+    'pip install OWSLib ' +
+                'netCDF4 ' +
+                'dask ' +
+                'cf-units ' +
+                'Cartopy ' +
+                #'Shapely ' +
+                #'astropy ' +
+                #'tqdm ' +
+                #'cf-units ' +
+                #'cftime ' +
+                #'singledispatch ' +
+                #'backports_abc ' +
+                #'tornado ' +
+                #'toolz ' +
+                #'patsy ' +
+                #'statsmodels ' +
+                #'nc-time-axis ' +
+                #'yamale ' +
+                #'affine ' +
+                #'boto3 ' +
+                #'botocore ' +
+                #'click-plugins ' +
+                #'cligj ' +
+                #'cloudpickle ' +
+                #'fiona ' +
+                #'geopandas ' +
+                #'jmespath ' +
+                #'munch ' +
+                #'networkx ' +
+                #'python-utils ' +
+                #'rasterio ' +
+                #'s3transfer ' +
+                #'snuggs ' +
+                #'xarray ' +
+                #'PyWavelets ' +
+                #'progressbar2 ' +
+                #'scikit-image ' +
+                #'descartes ' +
+                #'graphviz ' +
+                #'Bottleneck ' +
+                #'pickleshare ' +
+                #'prompt_toolkit ' +
+                #'ptyprocess ' +
+                #'pexpect ' +
+                #'typelib ' +
+                #'testpath ' +
+                #'parso ' +
+                #'jedi ' +
+                #'scikit-learn ' +
+                #'ldap3 ' +
+                #'skills ' +
+                #'XlsxWriter ' +
+                #'pyrsistent ' +
+                #'jsonschema ' +
+                #'plotly-charts ' +
+                #'rpy2 ' +
+                '--prefix="%(installdir)s" '
+]
+
+moduleclass = 'numlib'

+ 31 - 0
intel-2021.02/ESMF-8.0.1-intel-2021.02.eb

@@ -0,0 +1,31 @@
+name = 'ESMF'
+version = '8.0.1'
+
+homepage = 'https://www.earthsystemcog.org/projects/esmf/'
+description = """The Earth System Modeling Framework (ESMF) is a suite of software tools for developing
+ high-performance, multi-component Earth science modeling applications."""
+
+toolchain = {'name': 'intel', 'version': '2021.02'}
+toolchainopts = {'usempi': True}
+
+source_urls = ['https://github.com/esmf-org/esmf/archive/']
+sources = ['%%(name)s_%s.tar.gz' % '_'.join(version.split('.'))]
+patches = ['ESMF-6.1.1_libopts.patch']
+checksums = [
+    '9172fb73f3fe95c8188d889ee72fdadb4f978b1d969e1d8e401e8d106def1d84',  # ESMF_8_0_1.tar.gz
+    '3851627f07c32a7da55d99072d619942bd3a1d9dd002e1557716158e7aacdaf4',  # ESMF-6.1.1_libopts.patch
+]
+
+dependencies = [
+    ('netCDF', '4.8.0'),
+    ('netCDF-Fortran', '4.5.3'),
+    ('netCDF-C++4', '4.3.1'),
+]
+
+buildopts = 'ESMF_NETCDF_INCLUDE=$EBROOTNETCDFMINFORTRAN/include '
+buildopts += 'ESMF_NETCDF_LIBS="`nc-config --libs` `nf-config --flibs` `ncxx4-config --libs`"'
+
+# too parallel causes the build to become really slow
+maxparallel = 8
+
+moduleclass = 'geo'

+ 65 - 0
intel-2021.02/GDAL-3.3.0-intel-2021.02.eb

@@ -0,0 +1,65 @@
+easyblock = 'ConfigureMake'
+
+name = 'GDAL'
+version = '3.3.0'
+
+homepage = 'https://www.gdal.org'
+description = """GDAL is a translator library for raster geospatial data formats that is released under an X/MIT style
+ Open Source license by the Open Source Geospatial Foundation. As a library, it presents a single abstract data model
+ to the calling application for all supported formats. It also comes with a variety of useful commandline utilities for
+ data translation and processing."""
+
+toolchain = {'name': 'intel', 'version': '2021a'}
+toolchainopts = {'usempi': True}
+
+source_urls = ['https://download.osgeo.org/gdal/%(version)s/']
+sources = [SOURCELOWER_TAR_XZ]
+patches = ['GDAL-3.0.0_fix-python-CC-CXX.patch']
+checksums = [
+    '190c8f4b56afc767f43836b2a5cd53cc52ee7fdc25eb78c6079c5a244e28efa7',  # gdal-3.3.0.tar.xz
+    '223a0ed1afb245527d546bb19e4f80c00f768516ab106d82e53cf36b5a1a2381',  # GDAL-3.0.0_fix-python-CC-CXX.patch
+]
+
+builddependencies = [
+    ('pkg-config', '0.29.2'),
+]
+
+dependencies = [
+    ('Python', '3.9.5'),
+    ('netCDF', '4.8.0'),
+    ('expat', '2.2.9'),
+    ('GEOS', '3.9.1'),
+    ('SQLite', '3.35.4'),
+    ('libxml2', '2.9.10'),
+    ('libpng', '1.6.37'),
+    ('libjpeg-turbo', '2.0.6'),
+    ('JasPer', '2.0.28'),
+    ('LibTIFF', '4.2.0'),
+    ('zlib', '1.2.11'),
+    ('cURL', '7.76.0'),
+    ('PCRE2', '10.36'),
+    ('PROJ', '8.0.1'),
+    ('libgeotiff', '1.6.0'),
+    ('SciPy-bundle', '2021.05'),
+    ('HDF5', '1.10.7'),
+    ('HDF', '4.2.15'),
+]
+
+preconfigopts = "sed -e 's/-llapack/\$LIBLAPACK/g' -i.eb configure && "
+configopts = '--with-expat=$EBROOTEXPAT --with-libz=$EBROOTLIBZ'
+configopts += ' --with-hdf5=$EBROOTHDF5 --with-netcdf=$EBROOTNETCDF'
+configopts += ' --with-xml2=yes --with-geos=$EBROOTGEOS/bin/geos-config --with-jpeg=$EBROOTLIBJPEGMINTURBO'
+configopts += ' --with-png=$EBROOTLIBPNG --with-sqlite3=$EBROOTSQLITE --with-jasper=$EBROOTJASPER'
+configopts += ' --with-libtiff=$EBROOTLIBTIFF --with-pcre=$EBROOTPCRE2 --with-python=$EBROOTPYTHON/bin/python'
+configopts += ' --with-geotiff=$EBROOTLIBGEOTIFF --with-hdf4=$EBROOTHDF'
+
+modextrapaths = {'PYTHONPATH': 'lib/python%(pyshortver)s/site-packages'}
+
+sanity_check_paths = {
+    'files': ['lib/libgdal.a', 'lib/libgdal.%s' % SHLIB_EXT],
+    'dirs': ['bin', 'include', 'lib/python%(pyshortver)s/site-packages']
+}
+
+sanity_check_commands = ["python -c 'import osgeo.gdal'"]
+
+moduleclass = 'data'

+ 20 - 0
intel-2021.02/GEOS-3.9.1-intel-2021.02.eb

@@ -0,0 +1,20 @@
+easyblock = 'ConfigureMake'
+
+name = 'GEOS'
+version = '3.9.1'
+
+homepage = 'https://trac.osgeo.org/geos'
+description = """GEOS (Geometry Engine - Open Source) is a C++ port of the Java Topology Suite (JTS)"""
+
+toolchain = {'name': 'intel-compilers', 'version': '2021.2.0'}
+
+source_urls = ['https://download.osgeo.org/geos/']
+sources = [SOURCELOWER_TAR_BZ2]
+checksums = ['7e630507dcac9dc07565d249a26f06a15c9f5b0c52dd29129a0e3d381d7e382a']
+
+sanity_check_paths = {
+    'files': ['bin/geos-config', 'lib/libgeos.%s' % SHLIB_EXT, 'lib/libgeos.a', 'include/geos.h'],
+    'dirs': [],
+}
+
+moduleclass = 'math'

+ 25 - 0
intel-2021.02/GSL-2.7-intel-compilers-2021.2.0.eb

@@ -0,0 +1,25 @@
+easyblock = 'ConfigureMake'
+
+name = 'GSL'
+version = '2.7'
+
+homepage = 'https://www.gnu.org/software/gsl/'
+description = """The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers.
+ The library provides a wide range of mathematical routines such as random number generators, special functions
+ and least-squares fitting."""
+
+toolchain = {'name': 'intel-compilers', 'version': '2021.2.0'}
+toolchainopts = {'unroll': True, 'pic': True}
+
+source_urls = [GNU_SOURCE]
+sources = [SOURCELOWER_TAR_GZ]
+checksums = ['efbbf3785da0e53038be7907500628b466152dbc3c173a87de1b5eba2e23602b']
+
+sanity_check_paths = {
+    'files': ['bin/%s' % x for x in ['gsl-config', 'gsl-histogram', 'gsl-randist']] +
+             ['include/gsl/gsl_types.h'] +
+             ['lib/lib%s.%s' % (x, SHLIB_EXT) for x in ['gsl', 'gslcblas']],
+    'dirs': [],
+}
+
+moduleclass = 'numlib'

+ 21 - 0
intel-2021.02/HDF5-1.10.7-iimpi-2021.02.eb

@@ -0,0 +1,21 @@
+name = 'HDF5'
+version = '1.10.7'
+
+homepage = 'https://portal.hdfgroup.org/display/support'
+description = """HDF5 is a data model, library, and file format for storing and managing data.
+ It supports an unlimited variety of datatypes, and is designed for flexible
+ and efficient I/O and for high volume and complex data."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+toolchainopts = {'pic': True, 'usempi': True}
+
+source_urls = ['https://support.hdfgroup.org/ftp/HDF5/releases/hdf5-%(version_major_minor)s/hdf5-%(version)s/src']
+sources = [SOURCELOWER_TAR_GZ]
+checksums = ['7a1a0a54371275ce2dfc5cd093775bb025c365846512961e7e5ceaecb437ef15']
+
+dependencies = [
+    ('zlib', '1.2.11'),
+    ('Szip', '2.1.1'),
+]
+
+moduleclass = 'data'

+ 46 - 0
intel-2021.02/NCL-6.6.2-intel-2021.02.eb

@@ -0,0 +1,46 @@
+name = 'NCL'
+version = '6.6.2'
+
+homepage = 'https://www.ncl.ucar.edu'
+description = "NCL is an interpreted language designed specifically for scientific data analysis and visualization."
+
+toolchain = {'name': 'intel', 'version': '2021.02'}
+toolchainopts = {'cstd': 'c99', 'openmp': True, 'pic': True}
+
+source_urls = ['https://github.com/NCAR/ncl/archive/']
+sources = ['%(version)s.tar.gz']
+patches = ['NCL-6.4.0_fix-types.patch']
+checksums = [
+    'cad4ee47fbb744269146e64298f9efa206bc03e7b86671e9729d8986bb4bc30e',  # 6.6.2.tar.gz
+    'f6dfaf95e5de9045745e122cb44f9c035f81fab92f5892991ddfe93945891c8f',  # NCL-6.4.0_fix-types.patch
+]
+
+builddependencies = [
+    ('makedepend', '1.0.6'),
+    ('Bison', '3.7.6'),
+    ('flex', '2.6.4'),
+]
+dependencies = [
+    ('cURL', '7.76.0'),
+    ('JasPer', '2.0.28'),
+    ('g2lib', '3.1.0'),
+    ('g2clib', '1.6.0'),
+    ('HDF', '4.2.15'),
+    ('HDF5', '1.10.7'),
+    ('netCDF', '4.8.0'),
+    ('netCDF-Fortran', '4.5.3'),
+    ('Szip', '2.1.1'),
+    ('freetype', '2.10.4'),
+    ('zlib', '1.2.11'),
+    ('GDAL', '3.3.0'),
+    ('UDUNITS', '2.2.28'),
+    ('ESMF', '8.0.1'),
+    ('bzip2', '1.0.8'),
+    ('cairo', '1.16.0'),
+    ('libiconv', '1.16'),
+    ('GSL', '2.7'),
+    ('libpng', '1.6.37'),
+    ('libjpeg-turbo', '2.0.6'),
+]
+
+moduleclass = 'data'

+ 38 - 0
intel-2021.02/NCO-4.9.7-intel-2021.02.eb

@@ -0,0 +1,38 @@
+easyblock = 'ConfigureMake'
+
+name = 'NCO'
+version = '4.9.7'
+
+homepage = "https://nco.sourceforge.net"
+description = """manipulates and analyzes data stored in netCDF-accessible formats, including DAP, HDF4, and HDF5"""
+
+toolchain = {'name': 'intel', 'version': '2021.02'}
+toolchainopts = {'usempi': False, 'lowopt': True}
+
+source_urls = ['https://github.com/nco/nco/archive/']
+sources = ['%(version)s.tar.gz']
+checksums = ['934e247d9592f3e6087ea8985507077873559b52679b9c9a1ecae40668a352dc']
+
+builddependencies = [
+    ('Bison', '3.7.6'),
+    ('flex', '2.6.4'),
+]
+
+dependencies = [
+    ('UDUNITS', '2.2.28'),
+    ('expat', '2.2.9'),
+    ('ANTLR', '2.7.7', '-Java-11'),
+    ('libdap', '3.20.7'),
+    ('GSL', '2.7'),
+    ('netCDF', '4.8.0'),
+    ('ESMF', '8.0.1'),  # ncremap needs ESMF_RegridWeightGen
+]
+
+sanity_check_paths = {
+    'files': ['bin/nc%s' % x for x in ('ap2', 'atted', 'bo', 'diff', 'ea', 'ecat', 'es',
+                                       'flint', 'ks', 'pdq', 'ra', 'rcat', 'rename', 'wa')] +
+             ['lib/libnco.a', 'lib/libnco.%s' % SHLIB_EXT, 'lib/libnco_c++.a', 'lib/libnco_c++.%s' % SHLIB_EXT],
+    'dirs': ['include'],
+}
+
+moduleclass = 'tools'

+ 68 - 0
intel-2021.02/SciPy-bundle-2021.05-intel-2021.02.eb

@@ -0,0 +1,68 @@
+easyblock = 'PythonBundle'
+
+name = 'SciPy-bundle'
+version = '2021.05'
+
+homepage = 'https://python.org/'
+description = "Bundle of Python packages for scientific software"
+
+toolchain = {'name': 'intel', 'version': '2021a'}
+toolchainopts = {'pic': True, 'lowopt': True}
+
+builddependencies = [('hypothesis', '6.13.1')]
+
+dependencies = [
+    ('Python', '3.9.5'),
+    ('pybind11', '2.6.2'),  # required by scipy
+]
+
+use_pip = True
+
+# order is important!
+exts_list = [
+    ('numpy', '1.20.3', {
+        'sources': [SOURCE_ZIP],
+        'patches': [
+            'numpy-1.20.3-mkl.patch',
+            'numpy-1.20.3_skip-ppc-long-complex-test.patch',
+            'numpy-1.20.3_xfail-test-nan.patch',
+            'numpy-1.20.3_fix-target-test-ccompiler-opt.patch',
+        ],
+        'checksums': [
+            'e55185e51b18d788e49fe8305fd73ef4470596b33fc2c1ceb304566b99c71a69',  # numpy-1.20.3.zip
+            '3b00d5d48f160d793838c2685a992c621ff58df27edfd57c4831850e16fd5adc',
+            # numpy-1.20.3_skip-ppc-long-complex-test.patch
+            '2f9a12e3a352b39076db84a7622fc8f4796abd3cb7f97f71958a495e864659a4',
+            'f0ce961f7d79551598e23050d92f46e827e300f6a7e5a6112e58efcc10385d4d',  # numpy-1.20.3_xfail-test-nan.patch
+            # numpy-1.20.3_fix-target-test-ccompiler-opt.patch
+            '3d84e8b7d48387778974a5f6ae342a690ab5989547206b6add9d9667f8d7572a',
+        ],
+        'prebuildopts': 'export CFLAGS="-Wno-unused-result -Wsign-compare -DNDEBUG -g -fwrapv -O3 -Wall -O2 -ftree-vectorize -fno-math-errno -fPIC -O1 -ftz -fp-speculation=safe -fp-model source -fPIC" && ',
+    }),
+    ('scipy', '1.6.3', {
+        'checksums': ['a75b014d3294fce26852a9d04ea27b5671d86736beb34acdfc05859246260707'],
+    }),
+    ('mpi4py', '3.0.3', {
+        'checksums': ['012d716c8b9ed1e513fcc4b18e5af16a8791f51e6d1716baccf988ad355c5a1f'],
+    }),
+    ('numexpr', '2.7.3', {
+        'checksums': ['43616529f9b7d1afc83386f943dc66c4da5e052f00217ba7e3ad8dd1b5f3a825'],
+    }),
+    ('Bottleneck', '1.3.2', {
+        'checksums': ['20179f0b66359792ea283b69aa16366419132f3b6cf3adadc0c48e2e8118e573'],
+    }),
+    ('pandas', '1.2.4', {
+        'preinstallopts': """sed -i 's@extra_compile_args = \["-Werror"\]@extra_compile_args = []@g' setup.py && """,
+        'checksums': ['649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279'],
+    }),
+    ('mpmath', '1.2.1', {
+        'checksums': ['79ffb45cf9f4b101a807595bcb3e72e0396202e0b1d25d689134b48c4216a81a'],
+    }),
+    ('deap', '1.3.1', {
+        'checksums': ['11f54493ceb54aae10dde676577ef59fc52d52f82729d5a12c90b0813c857a2f'],
+    }),
+]
+
+sanity_pip_check = True
+
+moduleclass = 'lang'

+ 25 - 0
intel-2021.02/Tkinter-3.8.6-GCCcore-10.3.0.eb

@@ -0,0 +1,25 @@
+name = 'Tkinter'
+version = '3.8.6'
+
+homepage = 'https://python.org/'
+description = "Tkinter module, built with the Python buildsystem"
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+toolchainopts = {'pic': True}
+
+source_urls = ['https://www.python.org/ftp/python/%(version)s/']
+sources = ['Python-%(version)s.tgz']
+checksums = ['313562ee9986dc369cd678011bdfd9800ef62fbf7b1496228a18f86b36428c21']
+
+builddependencies = [
+    ('binutils', '2.36.1'),
+    ('libffi', '3.3'),
+]
+
+dependencies = [
+    ('Python', '3.9.5'),
+    ('Tk', '8.6.11'),
+    ('zlib', '1.2.11'),
+]
+
+moduleclass = 'lang'

+ 24 - 0
intel-2021.02/YAXT-0.9.0-iimpi-2021.02.eb

@@ -0,0 +1,24 @@
+easyblock = 'ConfigureMake'
+
+name = 'YAXT'
+version = '0.9.0'
+
+homepage = 'https://www.dkrz.de/redmine/projects/yaxt'
+description = "Yet Another eXchange Tool"
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+#toolchainopts = {'pic': True, 'usempi': True, 'lowopt': True}
+toolchainopts = {'usempi': True}
+
+source_urls = ['https://www.dkrz.de/redmine/attachments/download/498/']
+sources = [SOURCELOWER_TAR_GZ]
+
+#configopts = 'FC="$F90" FCFLAGS="$F90FLAGS -cpp" UCX_TLS="ud,sm,self" '
+configopts = 'FC="$F90" FCFLAGS="$F90FLAGS -cpp" '
+
+sanity_check_paths = {
+    'files': ['include/yaxt.h', 'include/yaxt.mod', 'lib/libyaxt.a', 'lib/libyaxt.%s' % SHLIB_EXT],
+    'dirs': ['include/xt'],
+}
+
+moduleclass = 'tools'

+ 43 - 0
intel-2021.02/ecCodes-2.22.0-iimpi-2021.02.eb

@@ -0,0 +1,43 @@
+easyblock = 'CMakeMake'
+
+name = 'ecCodes'
+version = '2.22.0'
+
+homepage = 'https://software.ecmwf.int/wiki/display/ECC/ecCodes+Home'
+description = """ecCodes is a package developed by ECMWF which provides an application programming interface and
+ a set of tools for decoding and encoding messages in the following formats: WMO FM-92 GRIB edition 1 and edition 2,
+ WMO FM-94 BUFR edition 3 and edition 4, WMO GTS abbreviated header (only decoding)."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+toolchainopts = {'usempi': False}
+
+source_urls = ['https://confluence.ecmwf.int/download/attachments/45757960/']
+sources = ['eccodes-%(version)s-Source.tar.gz']
+
+builddependencies = [('CMake', '3.20.1')]
+
+dependencies = [
+    ('netCDF', '4.8.0'),
+    ('JasPer', '2.0.28'),
+    ('libjpeg-turbo', '2.0.6'),
+    ('libpng', '1.6.37'),
+    ('zlib', '1.2.11'),
+]
+
+# Python bindings are now provided by a separate package 'eccodes-python'
+configopts = "-DENABLE_NETCDF=ON -DENABLE_PNG=ON "
+configopts += "-DENABLE_JPG=ON -DENABLE_JPG_LIBJASPER=ON "
+configopts += "-DENABLE_ECCODES_THREADS=ON"  # multi-threading with pthreads
+
+local_exes = ['%s_%s' % (a, b)
+              for a in ['bufr', 'grib', 'gts', 'metar']
+              for b in ['compare', 'copy', 'dump', 'filter', 'get', 'ls']]
+local_exes += ['codes_%s' % c for c in ['count', 'info', 'split_file']]
+
+sanity_check_paths = {
+    'files': ['bin/%s' % x for x in local_exes] +
+             ['lib/libeccodes_f90.%s' % SHLIB_EXT, 'lib/libeccodes.%s' % SHLIB_EXT],
+    'dirs': ['include'],
+}
+
+moduleclass = 'tools'

+ 27 - 0
intel-2021.02/g2clib-1.6.0-GCCcore-10.3.0.eb

@@ -0,0 +1,27 @@
+name = 'g2clib'
+version = '1.6.0'
+
+homepage = 'https://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/'
+description = """Library contains GRIB2 encoder/decoder ('C' version)."""
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+
+source_urls = [homepage]
+sources = ['%(name)s-%(version)s.tar']
+patches = ['g2clib-%(version)s-with-JasPer-2.x.patch']
+checksums = [
+    'afec1ea29979b84369d0f46f305ed12f73f1450ec2db737664ec7f75c1163add',  # g2clib-1.6.0.tar
+    '2e62502d7823be5407ea023029dd206930a1034421d141dd346b468e177a7fce',  # g2clib-1.6.0-with-JasPer-2.x.patch
+]
+
+builddependencies = [('binutils', '2.36.1')]
+
+dependencies = [
+    ('JasPer', '2.0.28'),
+    ('libpng', '1.6.37'),
+]
+
+# parallel build tends to fail
+parallel = 1
+
+moduleclass = 'data'

+ 36 - 0
intel-2021.02/g2lib-3.1.0-GCCcore-10.3.0.eb

@@ -0,0 +1,36 @@
+name = 'g2lib'
+version = '3.1.0'
+
+homepage = 'https://www.nco.ncep.noaa.gov/pmb/codes/GRIB2/'
+description = """Library contains GRIB2 encoder/decoder and search/indexing routines."""
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+
+source_urls = [homepage]
+sources = ['%(name)s-%(version)s.tar']
+patches = [
+    '%(name)s-%(version)s_makefile.patch',
+    '%(name)s-%(version)s-kind.patch',
+    '%(name)s-1.4.0-with-JasPer-2.x.patch',
+    '%(name)s-%(version)s-int.patch',
+]
+checksums = [
+    '8a2de259de82094c5867f8d7945359f211592a4a503f9ed65dc60469337414e7',  # g2lib-3.1.0.tar
+    '702f76c77638fb36b662caf96890a69f19c507778c92aa1e163898b150cc8282',  # g2lib-3.1.0_makefile.patch
+    '6412022d37a470e38e4f2c4b7b6bd7cbb9581027b5ff187f4379b7dc0d72cbb5',  # g2lib-3.1.0-kind.patch
+    'cd4c668dab76ef3b61fa902c2eed24747517d4cbc3ec0aaffab37e6b80946170',  # g2lib-1.4.0-with-JasPer-2.x.patch
+]
+
+builddependencies = [('binutils', '2.36.1')]
+
+dependencies = [
+    ('JasPer', '2.0.28'),
+    ('libpng', '1.6.37'),
+]
+
+buildopts = 'CFLAGS="$CFLAGS -DLINUXG95 -D__64BIT__" FFLAGS="$FFLAGS -fno-range-check -fallow-argument-mismatch -I." FC=$FC CC=$CC'
+
+# parallel build tends to fail
+parallel = 1
+
+moduleclass = 'data'

+ 12 - 0
intel-2021.02/g2lib-3.1.0-int.patch

@@ -0,0 +1,12 @@
+diff -Nru g2lib-3.1.0-orig/addfield.f g2lib-3.1.0/addfield.f
+--- g2lib-3.1.0-orig/addfield.f	2020-11-11 19:26:14.905380054 +0100
++++ g2lib-3.1.0/addfield.f	2020-11-11 19:26:27.317394904 +0100
+@@ -119,7 +119,7 @@
+       integer lensec3,lensec4,lensec5,lensec6,lensec7
+       logical issec3,needext,isprevbmap
+ 
+-      allones=Z'FFFFFFFF'
++      allones=int(Z'FFFFFFFF')
+       ierr=0
+ !
+ ! Check to see if beginning of GRIB message exists

+ 18 - 0
intel-2021.02/iimpi-2021.02.eb

@@ -0,0 +1,18 @@
+# This is an easyconfig file for EasyBuild, see http://easybuilders.github.io/easybuild
+easyblock = 'Toolchain'
+
+name = 'iimpi'
+version = '2021.02'
+
+homepage = 'https://software.intel.com/parallel-studio-xe'
+description = """Intel C/C++ and Fortran compilers, alongside Intel MPI."""
+
+toolchain = SYSTEM
+
+local_compver = '2021.2.0'
+dependencies = [
+    ('intel-compilers', local_compver),
+    ('impi', '2021.2.0', '', ('intel-compilers', local_compver)),
+]
+
+moduleclass = 'toolchain'

+ 13 - 0
intel-2021.02/imkl-2021.2.0-iimpi-2021.02.eb

@@ -0,0 +1,13 @@
+name = 'imkl'
+version = '2021.2.0'
+
+homepage = 'https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/onemkl.html'
+description = "Intel oneAPI Math Kernel Library"
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+
+# see https://software.intel.com/content/www/us/en/develop/articles/oneapi-standalone-components.html
+source_urls = ['https://registrationcenter-download.intel.com/akdlm/irc_nas/17402/']
+sources = ['l_onemkl_p_%(version)s.296_offline.sh']
+
+moduleclass = 'numlib'

+ 21 - 0
intel-2021.02/intel-2021.02.eb

@@ -0,0 +1,21 @@
+easyblock = 'Toolchain'
+
+name = 'intel'
+version = '2021.02'
+
+homepage = 'https://easybuild.readthedocs.io/en/master/Common-toolchains.html#intel-toolchain'
+description = "Compiler toolchain including Intel compilers, Intel MPI and Intel Math Kernel Library (MKL)."
+
+toolchain = SYSTEM
+
+local_compver = '2021.2.0'
+local_gccver = '10.3.0'
+dependencies = [
+    ('GCCcore', local_gccver),
+    ('binutils', '2.36.1', '', ('GCCcore', local_gccver)),
+    ('intel-compilers', local_compver),
+    ('impi', '2021.2.0', '', ('intel-compilers', local_compver)),
+    ('imkl', '2021.2.0', '', ('iimpi', version)),
+]
+
+moduleclass = 'toolchain'

+ 37 - 0
intel-2021.02/libdap-3.20.7-GCCcore-10.3.0.eb

@@ -0,0 +1,37 @@
+easyblock = 'ConfigureMake'
+
+name = 'libdap'
+version = '3.20.7'
+
+homepage = 'https://www.opendap.org/software/libdap'
+description = """A C++ SDK which contains an implementation of DAP 2.0 and
+ DAP4.0. This includes both Client- and Server-side support classes."""
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+
+source_urls = ['https://www.opendap.org/pub/source/']
+sources = [SOURCE_TAR_GZ]
+checksums = ['6856813d0b29e70a36e8a53e9cf20ad680d21d615952263e9c6586704539e78c']
+
+builddependencies = [
+    ('binutils', '2.36.1'),
+    ('Bison', '3.7.6'),
+    ('flex', '2.6.4'),
+]
+
+dependencies = [
+    ('cURL', '7.76.0'),
+    ('libxml2', '2.9.10'),
+    ('libtirpc', '1.3.2'),
+    ('PCRE', '8.44'),
+    ('util-linux', '2.36'),
+]
+
+configopts = 'TIRPC_LIBS="-ltirpc"'
+
+sanity_check_paths = {
+    'files': ['bin/getdap', 'bin/getdap4', 'bin/dap-config', 'lib/libdap.a', 'lib/libdap.%s' % SHLIB_EXT],
+    'dirs': ['include'],
+}
+
+moduleclass = 'lib'

+ 26 - 0
intel-2021.02/makedepend-1.0.6-GCCcore-10.3.0.eb

@@ -0,0 +1,26 @@
+easyblock = 'ConfigureMake'
+
+name = 'makedepend'
+version = '1.0.6'
+
+homepage = 'https://linux.die.net/man/1/makedepend'
+description = "The makedepend package contains a C-preprocessor like utility to determine build-time dependencies."
+
+toolchain = {'name': 'GCCcore', 'version': '10.3.0'}
+
+source_urls = [XORG_UTIL_SOURCE]
+sources = [SOURCE_TAR_GZ]
+checksums = ['845f6708fc850bf53f5b1d0fb4352c4feab3949f140b26f71b22faba354c3365']
+
+builddependencies = [
+    ('binutils', '2.36.1'),
+    ('xproto', '7.0.31'),
+    ('xorg-macros', '1.19.3'),
+]
+
+sanity_check_paths = {
+    'files': ['bin/makedepend'],
+    'dirs': [],
+}
+
+moduleclass = 'devel'

+ 54 - 0
intel-2021.02/matplotlib-3.3.3-intel-2021.02.eb

@@ -0,0 +1,54 @@
+easyblock = 'PythonBundle'
+
+name = 'matplotlib'
+version = '3.3.3'
+
+homepage = 'https://matplotlib.org'
+description = """matplotlib is a python 2D plotting library which produces publication quality figures in a variety of
+ hardcopy formats and interactive environments across platforms. matplotlib can be used in python scripts, the python
+ and ipython shell, web application servers, and six graphical user interface toolkits."""
+
+toolchain = {'name': 'intel', 'version': '2021a'}
+
+builddependencies = [
+    ('pkg-config', '0.29.2'),
+]
+
+dependencies = [
+    ('Python', '3.9.5'),
+    ('SciPy-bundle', '2021.05'),
+    ('libpng', '1.6.37'),
+    ('freetype', '2.10.4'),
+    ('Tkinter', '3.8.6'),
+    ('Pillow', '8.2.0'),
+]
+
+use_pip = True
+sanity_pip_check = True
+
+exts_list = [
+    ('Cycler', '0.10.0', {
+        'modulename': 'cycler',
+        'source_tmpl': 'cycler-%(version)s.tar.gz',
+        'checksums': ['cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8'],
+    }),
+    ('kiwisolver', '1.3.0', {
+        'checksums': ['14f81644e1f3bf01fbc8b9c990a7889e9bb4400c4d0ff9155aa0bdd19cce24a9'],
+    }),
+    (name, version, {
+        'prebuildopts': "export CPLUS_INCLUDE_PATH=$EBROOTFREETYPE/include/freetype2:${CPLUS_INCLUDE_PATH} && ",
+        'preinstallopts': "export CPLUS_INCLUDE_PATH=$EBROOTFREETYPE/include/freetype2:${CPLUS_INCLUDE_PATH} && ",
+        'checksums': ['b1b60c6476c4cfe9e5cf8ab0d3127476fd3d5f05de0f343a452badaad0e4bdec'],
+    }),
+]
+
+sanity_check_commands = [
+    """python -c 'import matplotlib; matplotlib.use("Agg"); import matplotlib.pyplot' """,
+    "python -c 'from mpl_toolkits.mplot3d import Axes3D'",
+]
+
+# use non-interactive plotting backend as default
+# see https://matplotlib.org/tutorials/introductory/usage.html#what-is-a-backend
+modextravars = {'MPLBACKEND': 'Agg'}
+
+moduleclass = 'vis'

+ 42 - 0
intel-2021.02/ncview-2.1.8-iimpi-2021.02.eb

@@ -0,0 +1,42 @@
+##
+# This file is an EasyBuild reciPY as per https://github.com/easybuilders/easybuild
+##
+easyblock = 'ConfigureMake'
+
+name = 'ncview'
+version = '2.1.8'
+
+homepage = 'http://meteora.ucsd.edu/~pierce/ncview_home_page.html'
+description = """Ncview is a visual browser for netCDF format files. 
+Typically you would use ncview to get a quick and easy, push-button 
+look at your netCDF files. You can view simple movies of the data, 
+view along various dimensions, take a look at the actual data values, 
+change color maps, invert the data, etc."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+toolchainopts = {'usempi': True, 'pic': True}
+
+source_urls = ['ftp://cirrus.ucsd.edu/pub/ncview/']
+sources = [SOURCE_TAR_GZ]
+checksums = ['e8badc507b9b774801288d1c2d59eb79ab31b004df4858d0674ed0d87dfc91be']
+
+# specified compiler is hard checked against (full path to) compiler used for netCDF...
+preconfigopts = "CC=$(command -v $CC) "
+configopts = "--with-udunits2_incdir=$EBROOTUDUNITS/include --with-udunits2_libdir=$EBROOTUDUNITS/lib "
+configopts += "--with-nc-config=$EBROOTNETCDF/bin/nc-config"
+
+dependencies = [
+    ('netCDF', '4.8.0'),
+    ('netCDF-Fortran', '4.5.3'),
+    ('UDUNITS', '2.2.28'),
+    ('X11', '20210518'),
+    ('libpng', '1.6.37'),
+    ('zlib', '1.2.11'),
+]
+
+sanity_check_paths = {
+    'files': ['bin/ncview'],
+    'dirs': [],
+}
+
+moduleclass = 'vis'

+ 12 - 13
foss-2020b/netCDF-4.7.4-gompi-2020b.eb → intel-2021.02/netCDF-4.8.0-iimpi-2021.02.eb

@@ -1,38 +1,37 @@
 name = 'netCDF'
-version = '4.7.4'
+version = '4.8.0'
 
 homepage = 'https://www.unidata.ucar.edu/software/netcdf/'
 description = """NetCDF (network Common Data Form) is a set of software libraries
  and machine-independent data formats that support the creation, access, and sharing of array-oriented
  scientific data."""
 
-toolchain = {'name': 'gompi', 'version': '2020b'}
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
 toolchainopts = {'pic': True, 'usempi': True}
 
 source_urls = ['https://github.com/Unidata/netcdf-c/archive/']
 sources = ['v%(version)s.tar.gz']
-patches = ['netCDF-%(version)s_fix-ocdebug.patch']
-checksums = [
-    '99930ad7b3c4c1a8e8831fb061cb02b2170fc8e5ccaeda733bd99c3b9d31666b',  # v4.7.4.tar.gz
-    '94c9883005f189a4551947e04191a68199cf4cdcada4b2d313115720fb4690e9',  # netCDF-4.7.4_fix-ocdebug.patch
-]
+checksums = ['aff58f02b1c3e91dc68f989746f652fe51ff39e6270764e484920cb8db5ad092']
 
 builddependencies = [
-    ('Autotools', '20200321'),
-    ('CMake', '3.18.4'),
-    ('Doxygen', '1.8.20'),
+    ('Autotools', '20210128'),
+    ('CMake', '3.20.1'),
+    ('Doxygen', '1.9.1'),
 ]
 
 dependencies = [
     ('HDF5', '1.10.7'),
-    ('cURL', '7.72.0'),
+    ('cURL', '7.76.0'),
     ('Szip', '2.1.1'),
 ]
 
+# HDF5 version detection is missed in netCDF 4.8.0 when HDF5_C_LIBRARY, HDF5_INCLUDE_DIR, and HDF5_HL_LIBRARY are set
+local_hdf5_version_fix = '-DHDF5_VERSION=$EBVERSIONHDF5'
+
 # make sure both static and shared libs are built
 configopts = [
-    "-DBUILD_SHARED_LIBS=OFF ",
-    "-DBUILD_SHARED_LIBS=ON ",
+    "-DBUILD_SHARED_LIBS=OFF %s " % local_hdf5_version_fix,
+    "-DBUILD_SHARED_LIBS=ON %s " % local_hdf5_version_fix,
 ]
 
 moduleclass = 'data'

+ 25 - 0
intel-2021.02/netCDF-C++4-4.3.1-iimpi-2021.02.eb

@@ -0,0 +1,25 @@
+easyblock = 'ConfigureMake'
+
+name = 'netCDF-C++4'
+version = '4.3.1'
+
+homepage = 'https://www.unidata.ucar.edu/software/netcdf/'
+description = """NetCDF (network Common Data Form) is a set of software libraries
+ and machine-independent data formats that support the creation, access, and sharing of array-oriented
+ scientific data."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+toolchainopts = {'pic': True}
+
+source_urls = ['https://github.com/Unidata/netcdf-cxx4/archive/']
+sources = ['v%(version)s.tar.gz']
+checksums = ['e3fe3d2ec06c1c2772555bf1208d220aab5fee186d04bd265219b0bc7a978edc']
+
+dependencies = [('netCDF', '4.8.0')]
+
+sanity_check_paths = {
+    'files': ['include/netcdf', 'lib/libnetcdf_c++4.a', 'lib/libnetcdf_c++4.%s' % SHLIB_EXT],
+    'dirs': [],
+}
+
+moduleclass = 'data'

+ 25 - 0
intel-2021.02/netCDF-Fortran-4.5.3-iimpi-2021.02.eb

@@ -0,0 +1,25 @@
+name = 'netCDF-Fortran'
+version = '4.5.3'
+
+homepage = 'https://www.unidata.ucar.edu/software/netcdf/'
+description = """NetCDF (network Common Data Form) is a set of software libraries
+ and machine-independent data formats that support the creation, access, and sharing of array-oriented
+ scientific data."""
+
+toolchain = {'name': 'iimpi', 'version': '2021.02'}
+toolchainopts = {'pic': True, 'usempi': True}
+
+source_urls = ['https://github.com/Unidata/netcdf-fortran/archive/']
+sources = ['v%(version)s.tar.gz']
+checksums = ['c6da30c2fe7e4e614c1dff4124e857afbd45355c6798353eccfa60c0702b495a']
+
+builddependencies = [
+    ('M4', '1.4.18'),
+]
+
+dependencies = [('netCDF', '4.8.0')]
+
+# (too) parallel build fails, but single-core build is fairly quick anyway (~1min)
+parallel = 1
+
+moduleclass = 'data'

+ 57 - 0
intel-2021.02/numpy-1.20.3-mkl.patch

@@ -0,0 +1,57 @@
+fix issues in numpy distutils pkg w.r.t. detecting BLAS/LAPACK libraries
+by Pierre-Yves Barriat (UCLouvain)
+
+Remade against numpy-1.20.3
+diff -ru numpy-1.20.3.orig/numpy/distutils/fcompiler/__init__.py numpy-1.20.3/numpy/distutils/fcompiler/__init__.py
+--- numpy-1.20.3.orig/numpy/distutils/fcompiler/__init__.py	2021-05-09 12:38:00.000000000 +0200
++++ numpy-1.20.3/numpy/distutils/fcompiler/__init__.py	2021-08-06 12:57:51.306766559 +0200
+@@ -631,7 +631,10 @@
+         return options
+ 
+     def library_option(self, lib):
+-        return "-l" + lib
++        if lib[0]=='-':
++            return lib
++        else:
++            return "-l" + lib
+     def library_dir_option(self, dir):
+         return "-L" + dir
+ 
+diff -ru numpy-1.20.3.orig/numpy/distutils/system_info.py numpy-1.20.3/numpy/distutils/system_info.py
+--- numpy-1.20.3.orig/numpy/distutils/system_info.py	2021-05-09 12:38:00.000000000 +0200
++++ numpy-1.20.3/numpy/distutils/system_info.py	2021-08-06 13:00:54.466820115 +0200
+@@ -947,7 +947,7 @@
+             if is_string(default):
+                 return [default]
+             return default
+-        return [b for b in [a.strip() for a in libs.split(',')] if b]
++        return [b for b in [a.strip().replace(':',',') for a in libs.split(',')] if b]
+ 
+     def get_libraries(self, key='libraries'):
+         if hasattr(self, '_lib_names'):
+@@ -1037,6 +1037,9 @@
+         # make sure we preserve the order of libs, as it can be important
+         found_dirs, found_libs = [], []
+         for lib in libs:
++            if lib[0] == '-':
++                found_libs.append(lib)
++                continue
+             for lib_dir in lib_dirs:
+                 found_lib = self._find_lib(lib_dir, lib, exts)
+                 if found_lib:
+diff -ru numpy-1.20.3.orig/numpy/distutils/unixccompiler.py numpy-1.20.3/numpy/distutils/unixccompiler.py
+--- numpy-1.20.3.orig/numpy/distutils/unixccompiler.py	2021-05-09 12:38:00.000000000 +0200
++++ numpy-1.20.3/numpy/distutils/unixccompiler.py	2021-08-06 13:02:27.719484364 +0200
+@@ -138,3 +138,12 @@
+ 
+ replace_method(UnixCCompiler, 'create_static_lib',
+                UnixCCompiler_create_static_lib)
++
++def UnixCCompiler_library_option(self, lib):
++    if lib[0]=='-':
++        return lib
++    else:
++        return "-l" + lib
++
++replace_method(UnixCCompiler, 'library_option',
++               UnixCCompiler_library_option)

+ 1025 - 0
intel-2021.02/numpy/__init__.py

@@ -0,0 +1,1025 @@
+"""numpy.distutils.fcompiler
+
+Contains FCompiler, an abstract base class that defines the interface
+for the numpy.distutils Fortran compiler abstraction model.
+
+Terminology:
+
+To be consistent, where the term 'executable' is used, it means the single
+file, like 'gcc', that is executed, and should be a string. In contrast,
+'command' means the entire command line, like ['gcc', '-c', 'file.c'], and
+should be a list.
+
+But note that FCompiler.executables is actually a dictionary of commands.
+
+"""
+__all__ = ['FCompiler', 'new_fcompiler', 'show_fcompilers',
+           'dummy_fortran_file']
+
+import os
+import sys
+import re
+
+from distutils.sysconfig import get_python_lib
+from distutils.fancy_getopt import FancyGetopt
+from distutils.errors import DistutilsModuleError, \
+     DistutilsExecError, CompileError, LinkError, DistutilsPlatformError
+from distutils.util import split_quoted, strtobool
+
+from numpy.distutils.ccompiler import CCompiler, gen_lib_options
+from numpy.distutils import log
+from numpy.distutils.misc_util import is_string, all_strings, is_sequence, \
+    make_temp_file, get_shared_lib_extension
+from numpy.distutils.exec_command import find_executable
+from numpy.distutils import _shell_utils
+
+from .environment import EnvironmentConfig
+
+__metaclass__ = type
+
+class CompilerNotFound(Exception):
+    pass
+
+def flaglist(s):
+    if is_string(s):
+        return split_quoted(s)
+    else:
+        return s
+
+def str2bool(s):
+    if is_string(s):
+        return strtobool(s)
+    return bool(s)
+
+def is_sequence_of_strings(seq):
+    return is_sequence(seq) and all_strings(seq)
+
+class FCompiler(CCompiler):
+    """Abstract base class to define the interface that must be implemented
+    by real Fortran compiler classes.
+
+    Methods that subclasses may redefine:
+
+        update_executables(), find_executables(), get_version()
+        get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()
+        get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),
+        get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),
+        get_flags_arch_f90(), get_flags_debug_f90(),
+        get_flags_fix(), get_flags_linker_so()
+
+    DON'T call these methods (except get_version) after
+    constructing a compiler instance or inside any other method.
+    All methods, except update_executables() and find_executables(),
+    may call the get_version() method.
+
+    After constructing a compiler instance, always call customize(dist=None)
+    method that finalizes compiler construction and makes the following
+    attributes available:
+      compiler_f77
+      compiler_f90
+      compiler_fix
+      linker_so
+      archiver
+      ranlib
+      libraries
+      library_dirs
+    """
+
+    # These are the environment variables and distutils keys used.
+    # Each configuration description is
+    # (<hook name>, <environment variable>, <key in distutils.cfg>, <convert>, <append>)
+    # The hook names are handled by the self._environment_hook method.
+    #  - names starting with 'self.' call methods in this class
+    #  - names starting with 'exe.' return the key in the executables dict
+    #  - names like 'flags.YYY' return self.get_flag_YYY()
+    # convert is either None or a function to convert a string to the
+    # appropriate type used.
+
+    distutils_vars = EnvironmentConfig(
+        distutils_section='config_fc',
+        noopt = (None, None, 'noopt', str2bool, False),
+        noarch = (None, None, 'noarch', str2bool, False),
+        debug = (None, None, 'debug', str2bool, False),
+        verbose = (None, None, 'verbose', str2bool, False),
+    )
+
+    command_vars = EnvironmentConfig(
+        distutils_section='config_fc',
+        compiler_f77 = ('exe.compiler_f77', 'F77', 'f77exec', None, False),
+        compiler_f90 = ('exe.compiler_f90', 'F90', 'f90exec', None, False),
+        compiler_fix = ('exe.compiler_fix', 'F90', 'f90exec', None, False),
+        version_cmd = ('exe.version_cmd', None, None, None, False),
+        linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None, False),
+        linker_exe = ('exe.linker_exe', 'LD', 'ld', None, False),
+        archiver = (None, 'AR', 'ar', None, False),
+        ranlib = (None, 'RANLIB', 'ranlib', None, False),
+    )
+
+    flag_vars = EnvironmentConfig(
+        distutils_section='config_fc',
+        f77 = ('flags.f77', 'F77FLAGS', 'f77flags', flaglist, True),
+        f90 = ('flags.f90', 'F90FLAGS', 'f90flags', flaglist, True),
+        free = ('flags.free', 'FREEFLAGS', 'freeflags', flaglist, True),
+        fix = ('flags.fix', None, None, flaglist, False),
+        opt = ('flags.opt', 'FOPT', 'opt', flaglist, True),
+        opt_f77 = ('flags.opt_f77', None, None, flaglist, False),
+        opt_f90 = ('flags.opt_f90', None, None, flaglist, False),
+        arch = ('flags.arch', 'FARCH', 'arch', flaglist, False),
+        arch_f77 = ('flags.arch_f77', None, None, flaglist, False),
+        arch_f90 = ('flags.arch_f90', None, None, flaglist, False),
+        debug = ('flags.debug', 'FDEBUG', 'fdebug', flaglist, True),
+        debug_f77 = ('flags.debug_f77', None, None, flaglist, False),
+        debug_f90 = ('flags.debug_f90', None, None, flaglist, False),
+        flags = ('self.get_flags', 'FFLAGS', 'fflags', flaglist, True),
+        linker_so = ('flags.linker_so', 'LDFLAGS', 'ldflags', flaglist, True),
+        linker_exe = ('flags.linker_exe', 'LDFLAGS', 'ldflags', flaglist, True),
+        ar = ('flags.ar', 'ARFLAGS', 'arflags', flaglist, True),
+    )
+
+    language_map = {'.f': 'f77',
+                    '.for': 'f77',
+                    '.F': 'f77',    # XXX: needs preprocessor
+                    '.ftn': 'f77',
+                    '.f77': 'f77',
+                    '.f90': 'f90',
+                    '.F90': 'f90',  # XXX: needs preprocessor
+                    '.f95': 'f90',
+                    }
+    language_order = ['f90', 'f77']
+
+
+    # These will be set by the subclass
+
+    compiler_type = None
+    compiler_aliases = ()
+    version_pattern = None
+
+    possible_executables = []
+    executables = {
+        'version_cmd': ["f77", "-v"],
+        'compiler_f77': ["f77"],
+        'compiler_f90': ["f90"],
+        'compiler_fix': ["f90", "-fixed"],
+        'linker_so': ["f90", "-shared"],
+        'linker_exe': ["f90"],
+        'archiver': ["ar", "-cr"],
+        'ranlib': None,
+        }
+
+    # If compiler does not support compiling Fortran 90 then it can
+    # suggest using another compiler. For example, gnu would suggest
+    # gnu95 compiler type when there are F90 sources.
+    suggested_f90_compiler = None
+
+    compile_switch = "-c"
+    object_switch = "-o "   # Ending space matters! It will be stripped
+                            # but if it is missing then object_switch
+                            # will be prefixed to object file name by
+                            # string concatenation.
+    library_switch = "-o "  # Ditto!
+
+    # Switch to specify where module files are created and searched
+    # for USE statement.  Normally it is a string and also here ending
+    # space matters. See above.
+    module_dir_switch = None
+
+    # Switch to specify where module files are searched for USE statement.
+    module_include_switch = '-I'
+
+    pic_flags = []           # Flags to create position-independent code
+
+    src_extensions = ['.for', '.ftn', '.f77', '.f', '.f90', '.f95', '.F', '.F90', '.FOR']
+    obj_extension = ".o"
+
+    shared_lib_extension = get_shared_lib_extension()
+    static_lib_extension = ".a"  # or .lib
+    static_lib_format = "lib%s%s" # or %s%s
+    shared_lib_format = "%s%s"
+    exe_extension = ""
+
+    _exe_cache = {}
+
+    _executable_keys = ['version_cmd', 'compiler_f77', 'compiler_f90',
+                        'compiler_fix', 'linker_so', 'linker_exe', 'archiver',
+                        'ranlib']
+
+    # This will be set by new_fcompiler when called in
+    # command/{build_ext.py, build_clib.py, config.py} files.
+    c_compiler = None
+
+    # extra_{f77,f90}_compile_args are set by build_ext.build_extension method
+    extra_f77_compile_args = []
+    extra_f90_compile_args = []
+
+    def __init__(self, *args, **kw):
+        CCompiler.__init__(self, *args, **kw)
+        self.distutils_vars = self.distutils_vars.clone(self._environment_hook)
+        self.command_vars = self.command_vars.clone(self._environment_hook)
+        self.flag_vars = self.flag_vars.clone(self._environment_hook)
+        self.executables = self.executables.copy()
+        for e in self._executable_keys:
+            if e not in self.executables:
+                self.executables[e] = None
+
+        # Some methods depend on .customize() being called first, so
+        # this keeps track of whether that's happened yet.
+        self._is_customised = False
+
+    def __copy__(self):
+        obj = self.__new__(self.__class__)
+        obj.__dict__.update(self.__dict__)
+        obj.distutils_vars = obj.distutils_vars.clone(obj._environment_hook)
+        obj.command_vars = obj.command_vars.clone(obj._environment_hook)
+        obj.flag_vars = obj.flag_vars.clone(obj._environment_hook)
+        obj.executables = obj.executables.copy()
+        return obj
+
+    def copy(self):
+        return self.__copy__()
+
+    # Use properties for the attributes used by CCompiler. Setting them
+    # as attributes from the self.executables dictionary is error-prone,
+    # so we get them from there each time.
+    def _command_property(key):
+        def fget(self):
+            assert self._is_customised
+            return self.executables[key]
+        return property(fget=fget)
+    version_cmd = _command_property('version_cmd')
+    compiler_f77 = _command_property('compiler_f77')
+    compiler_f90 = _command_property('compiler_f90')
+    compiler_fix = _command_property('compiler_fix')
+    linker_so = _command_property('linker_so')
+    linker_exe = _command_property('linker_exe')
+    archiver = _command_property('archiver')
+    ranlib = _command_property('ranlib')
+
+    # Make our terminology consistent.
+    def set_executable(self, key, value):
+        self.set_command(key, value)
+
+    def set_commands(self, **kw):
+        for k, v in kw.items():
+            self.set_command(k, v)
+
+    def set_command(self, key, value):
+        if not key in self._executable_keys:
+            raise ValueError(
+                "unknown executable '%s' for class %s" %
+                (key, self.__class__.__name__))
+        if is_string(value):
+            value = split_quoted(value)
+        assert value is None or is_sequence_of_strings(value[1:]), (key, value)
+        self.executables[key] = value
+
+    ######################################################################
+    ## Methods that subclasses may redefine. But don't call these methods!
+    ## They are private to FCompiler class and may return unexpected
+    ## results if used elsewhere. So, you have been warned..
+
+    def find_executables(self):
+        """Go through the self.executables dictionary, and attempt to
+        find and assign appropriate executables.
+
+        Executable names are looked for in the environment (environment
+        variables, the distutils.cfg, and command line), the 0th-element of
+        the command list, and the self.possible_executables list.
+
+        Also, if the 0th element is "<F77>" or "<F90>", the Fortran 77
+        or the Fortran 90 compiler executable is used, unless overridden
+        by an environment setting.
+
+        Subclasses should call this if overridden.
+        """
+        assert self._is_customised
+        exe_cache = self._exe_cache
+        def cached_find_executable(exe):
+            if exe in exe_cache:
+                return exe_cache[exe]
+            fc_exe = find_executable(exe)
+            exe_cache[exe] = exe_cache[fc_exe] = fc_exe
+            return fc_exe
+        def verify_command_form(name, value):
+            if value is not None and not is_sequence_of_strings(value):
+                raise ValueError(
+                    "%s value %r is invalid in class %s" %
+                    (name, value, self.__class__.__name__))
+        def set_exe(exe_key, f77=None, f90=None):
+            cmd = self.executables.get(exe_key, None)
+            if not cmd:
+                return None
+            # Note that we get cmd[0] here if the environment doesn't
+            # have anything set
+            exe_from_environ = getattr(self.command_vars, exe_key)
+            if not exe_from_environ:
+                possibles = [f90, f77] + self.possible_executables
+            else:
+                possibles = [exe_from_environ] + self.possible_executables
+
+            seen = set()
+            unique_possibles = []
+            for e in possibles:
+                if e == '<F77>':
+                    e = f77
+                elif e == '<F90>':
+                    e = f90
+                if not e or e in seen:
+                    continue
+                seen.add(e)
+                unique_possibles.append(e)
+
+            for exe in unique_possibles:
+                fc_exe = cached_find_executable(exe)
+                if fc_exe:
+                    cmd[0] = fc_exe
+                    return fc_exe
+            self.set_command(exe_key, None)
+            return None
+
+        ctype = self.compiler_type
+        f90 = set_exe('compiler_f90')
+        if not f90:
+            f77 = set_exe('compiler_f77')
+            if f77:
+                log.warn('%s: no Fortran 90 compiler found' % ctype)
+            else:
+                raise CompilerNotFound('%s: f90 nor f77' % ctype)
+        else:
+            f77 = set_exe('compiler_f77', f90=f90)
+            if not f77:
+                log.warn('%s: no Fortran 77 compiler found' % ctype)
+            set_exe('compiler_fix', f90=f90)
+
+        set_exe('linker_so', f77=f77, f90=f90)
+        set_exe('linker_exe', f77=f77, f90=f90)
+        set_exe('version_cmd', f77=f77, f90=f90)
+        set_exe('archiver')
+        set_exe('ranlib')
+
+    def update_executables(self):
+        """Called at the beginning of customisation. Subclasses should
+        override this if they need to set up the executables dictionary.
+
+        Note that self.find_executables() is run afterwards, so the
+        self.executables dictionary values can contain <F77> or <F90> as
+        the command, which will be replaced by the found F77 or F90
+        compiler.
+        """
+        pass
+
+    def get_flags(self):
+        """List of flags common to all compiler types."""
+        return [] + self.pic_flags
+
+    def _get_command_flags(self, key):
+        cmd = self.executables.get(key, None)
+        if cmd is None:
+            return []
+        return cmd[1:]
+
+    def get_flags_f77(self):
+        """List of Fortran 77 specific flags."""
+        return self._get_command_flags('compiler_f77')
+    def get_flags_f90(self):
+        """List of Fortran 90 specific flags."""
+        return self._get_command_flags('compiler_f90')
+    def get_flags_free(self):
+        """List of Fortran 90 free format specific flags."""
+        return []
+    def get_flags_fix(self):
+        """List of Fortran 90 fixed format specific flags."""
+        return self._get_command_flags('compiler_fix')
+    def get_flags_linker_so(self):
+        """List of linker flags to build a shared library."""
+        return self._get_command_flags('linker_so')
+    def get_flags_linker_exe(self):
+        """List of linker flags to build an executable."""
+        return self._get_command_flags('linker_exe')
+    def get_flags_ar(self):
+        """List of archiver flags. """
+        return self._get_command_flags('archiver')
+    def get_flags_opt(self):
+        """List of architecture independent compiler flags."""
+        return []
+    def get_flags_arch(self):
+        """List of architecture dependent compiler flags."""
+        return []
+    def get_flags_debug(self):
+        """List of compiler flags to compile with debugging information."""
+        return []
+
+    get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt
+    get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch
+    get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug
+
+    def get_libraries(self):
+        """List of compiler libraries."""
+        return self.libraries[:]
+    def get_library_dirs(self):
+        """List of compiler library directories."""
+        return self.library_dirs[:]
+
+    def get_version(self, force=False, ok_status=[0]):
+        assert self._is_customised
+        version = CCompiler.get_version(self, force=force, ok_status=ok_status)
+        if version is None:
+            raise CompilerNotFound()
+        return version
+
+
+    ############################################################
+
+    ## Public methods:
+
+    def customize(self, dist = None):
+        """Customize Fortran compiler.
+
+        This method gets Fortran compiler specific information from
+        (i) class definition, (ii) environment, (iii) distutils config
+        files, and (iv) command line (later overrides earlier).
+
+        This method should be always called after constructing a
+        compiler instance. But not in __init__ because Distribution
+        instance is needed for (iii) and (iv).
+        """
+        log.info('customize %s' % (self.__class__.__name__))
+
+        self._is_customised = True
+
+        self.distutils_vars.use_distribution(dist)
+        self.command_vars.use_distribution(dist)
+        self.flag_vars.use_distribution(dist)
+
+        self.update_executables()
+
+        # find_executables takes care of setting the compiler commands,
+        # version_cmd, linker_so, linker_exe, ar, and ranlib
+        self.find_executables()
+
+        noopt = self.distutils_vars.get('noopt', False)
+        noarch = self.distutils_vars.get('noarch', noopt)
+        debug = self.distutils_vars.get('debug', False)
+
+        f77 = self.command_vars.compiler_f77
+        f90 = self.command_vars.compiler_f90
+
+        f77flags = []
+        f90flags = []
+        freeflags = []
+        fixflags = []
+
+        if f77:
+            f77 = _shell_utils.NativeParser.split(f77)
+            f77flags = self.flag_vars.f77
+        if f90:
+            f90 = _shell_utils.NativeParser.split(f90)
+            f90flags = self.flag_vars.f90
+            freeflags = self.flag_vars.free
+        # XXX Assuming that free format is default for f90 compiler.
+        fix = self.command_vars.compiler_fix
+        # NOTE: this and similar examples are probably just
+        # excluding --coverage flag when F90 = gfortran --coverage
+        # instead of putting that flag somewhere more appropriate
+        # this and similar examples where a Fortran compiler
+        # environment variable has been customized by CI or a user
+        # should perhaps eventually be more thoroughly tested and more
+        # robustly handled
+        if fix:
+            fix = _shell_utils.NativeParser.split(fix)
+            fixflags = self.flag_vars.fix + f90flags
+
+        oflags, aflags, dflags = [], [], []
+        # examine get_flags_<tag>_<compiler> for extra flags
+        # only add them if the method is different from get_flags_<tag>
+        def get_flags(tag, flags):
+            # note that self.flag_vars.<tag> calls self.get_flags_<tag>()
+            flags.extend(getattr(self.flag_vars, tag))
+            this_get = getattr(self, 'get_flags_' + tag)
+            for name, c, flagvar in [('f77', f77, f77flags),
+                                     ('f90', f90, f90flags),
+                                     ('f90', fix, fixflags)]:
+                t = '%s_%s' % (tag, name)
+                if c and this_get is not getattr(self, 'get_flags_' + t):
+                    flagvar.extend(getattr(self.flag_vars, t))
+        if not noopt:
+            get_flags('opt', oflags)
+            if not noarch:
+                get_flags('arch', aflags)
+        if debug:
+            get_flags('debug', dflags)
+
+        fflags = self.flag_vars.flags + dflags + oflags + aflags
+
+        if f77:
+            self.set_commands(compiler_f77=f77+f77flags+fflags)
+        if f90:
+            self.set_commands(compiler_f90=f90+freeflags+f90flags+fflags)
+        if fix:
+            self.set_commands(compiler_fix=fix+fixflags+fflags)
+
+
+        #XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS
+        linker_so = self.linker_so
+        if linker_so:
+            linker_so_flags = self.flag_vars.linker_so
+            if sys.platform.startswith('aix'):
+                python_lib = get_python_lib(standard_lib=1)
+                ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
+                python_exp = os.path.join(python_lib, 'config', 'python.exp')
+                linker_so = [ld_so_aix] + linker_so + ['-bI:'+python_exp]
+            self.set_commands(linker_so=linker_so+linker_so_flags)
+
+        linker_exe = self.linker_exe
+        if linker_exe:
+            linker_exe_flags = self.flag_vars.linker_exe
+            self.set_commands(linker_exe=linker_exe+linker_exe_flags)
+
+        ar = self.command_vars.archiver
+        if ar:
+            arflags = self.flag_vars.ar
+            self.set_commands(archiver=[ar]+arflags)
+
+        self.set_library_dirs(self.get_library_dirs())
+        self.set_libraries(self.get_libraries())
+
+    def dump_properties(self):
+        """Print out the attributes of a compiler instance."""
+        props = []
+        for key in list(self.executables.keys()) + \
+                ['version', 'libraries', 'library_dirs',
+                 'object_switch', 'compile_switch']:
+            if hasattr(self, key):
+                v = getattr(self, key)
+                props.append((key, None, '= '+repr(v)))
+        props.sort()
+
+        pretty_printer = FancyGetopt(props)
+        for l in pretty_printer.generate_help("%s instance properties:" \
+                                              % (self.__class__.__name__)):
+            if l[:4]=='  --':
+                l = '  ' + l[4:]
+            print(l)
+
+    ###################
+
+    def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+        """Compile 'src' to product 'obj'."""
+        src_flags = {}
+        if is_f_file(src) and not has_f90_header(src):
+            flavor = ':f77'
+            compiler = self.compiler_f77
+            src_flags = get_f77flags(src)
+            extra_compile_args = self.extra_f77_compile_args or []
+        elif is_free_format(src):
+            flavor = ':f90'
+            compiler = self.compiler_f90
+            if compiler is None:
+                raise DistutilsExecError('f90 not supported by %s needed for %s'\
+                      % (self.__class__.__name__, src))
+            extra_compile_args = self.extra_f90_compile_args or []
+        else:
+            flavor = ':fix'
+            compiler = self.compiler_fix
+            if compiler is None:
+                raise DistutilsExecError('f90 (fixed) not supported by %s needed for %s'\
+                      % (self.__class__.__name__, src))
+            extra_compile_args = self.extra_f90_compile_args or []
+        if self.object_switch[-1]==' ':
+            o_args = [self.object_switch.strip(), obj]
+        else:
+            o_args = [self.object_switch.strip()+obj]
+
+        assert self.compile_switch.strip()
+        s_args = [self.compile_switch, src]
+
+        if extra_compile_args:
+            log.info('extra %s options: %r' \
+                     % (flavor[1:], ' '.join(extra_compile_args)))
+
+        extra_flags = src_flags.get(self.compiler_type, [])
+        if extra_flags:
+            log.info('using compile options from source: %r' \
+                     % ' '.join(extra_flags))
+
+        command = compiler + cc_args + extra_flags + s_args + o_args \
+                  + extra_postargs + extra_compile_args
+
+        display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,
+                              src)
+        try:
+            self.spawn(command, display=display)
+        except DistutilsExecError as e:
+            msg = str(e)
+            raise CompileError(msg)
+
+    def module_options(self, module_dirs, module_build_dir):
+        options = []
+        if self.module_dir_switch is not None:
+            if self.module_dir_switch[-1]==' ':
+                options.extend([self.module_dir_switch.strip(), module_build_dir])
+            else:
+                options.append(self.module_dir_switch.strip()+module_build_dir)
+        else:
+            print('XXX: module_build_dir=%r option ignored' % (module_build_dir))
+            print('XXX: Fix module_dir_switch for ', self.__class__.__name__)
+        if self.module_include_switch is not None:
+            for d in [module_build_dir]+module_dirs:
+                options.append('%s%s' % (self.module_include_switch, d))
+        else:
+            print('XXX: module_dirs=%r option ignored' % (module_dirs))
+            print('XXX: Fix module_include_switch for ', self.__class__.__name__)
+        return options
+
+    def library_option(self, lib):
+        if lib[0]=='-':
+            return lib
+        else:
+            return "-l" + lib
+    def library_dir_option(self, dir):
+        return "-L" + dir
+
+    def link(self, target_desc, objects,
+             output_filename, output_dir=None, libraries=None,
+             library_dirs=None, runtime_library_dirs=None,
+             export_symbols=None, debug=0, extra_preargs=None,
+             extra_postargs=None, build_temp=None, target_lang=None):
+        objects, output_dir = self._fix_object_args(objects, output_dir)
+        libraries, library_dirs, runtime_library_dirs = \
+            self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
+
+        lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
+                                   libraries)
+        if is_string(output_dir):
+            output_filename = os.path.join(output_dir, output_filename)
+        elif output_dir is not None:
+            raise TypeError("'output_dir' must be a string or None")
+
+        if self._need_link(objects, output_filename):
+            if self.library_switch[-1]==' ':
+                o_args = [self.library_switch.strip(), output_filename]
+            else:
+                o_args = [self.library_switch.strip()+output_filename]
+
+            if is_string(self.objects):
+                ld_args = objects + [self.objects]
+            else:
+                ld_args = objects + self.objects
+            ld_args = ld_args + lib_opts + o_args
+            if debug:
+                ld_args[:0] = ['-g']
+            if extra_preargs:
+                ld_args[:0] = extra_preargs
+            if extra_postargs:
+                ld_args.extend(extra_postargs)
+            self.mkpath(os.path.dirname(output_filename))
+            if target_desc == CCompiler.EXECUTABLE:
+                linker = self.linker_exe[:]
+            else:
+                linker = self.linker_so[:]
+            command = linker + ld_args
+            try:
+                self.spawn(command)
+            except DistutilsExecError as e:
+                msg = str(e)
+                raise LinkError(msg)
+        else:
+            log.debug("skipping %s (up-to-date)", output_filename)
+
+    def _environment_hook(self, name, hook_name):
+        if hook_name is None:
+            return None
+        if is_string(hook_name):
+            if hook_name.startswith('self.'):
+                hook_name = hook_name[5:]
+                hook = getattr(self, hook_name)
+                return hook()
+            elif hook_name.startswith('exe.'):
+                hook_name = hook_name[4:]
+                var = self.executables[hook_name]
+                if var:
+                    return var[0]
+                else:
+                    return None
+            elif hook_name.startswith('flags.'):
+                hook_name = hook_name[6:]
+                hook = getattr(self, 'get_flags_' + hook_name)
+                return hook()
+        else:
+            return hook_name()
+
+    def can_ccompiler_link(self, ccompiler):
+        """
+        Check if the given C compiler can link objects produced by
+        this compiler.
+        """
+        return True
+
+    def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
+        """
+        Convert a set of object files that are not compatible with the default
+        linker, to a file that is compatible.
+
+        Parameters
+        ----------
+        objects : list
+            List of object files to include.
+        output_dir : str
+            Output directory to place generated object files.
+        extra_dll_dir : str
+            Output directory to place extra DLL files that need to be
+            included on Windows.
+
+        Returns
+        -------
+        converted_objects : list of str
+             List of converted object files.
+             Note that the number of output files is not necessarily
+             the same as inputs.
+
+        """
+        raise NotImplementedError()
+
+    ## class FCompiler
+
+_default_compilers = (
+    # sys.platform mappings
+    ('win32', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95',
+               'intelvem', 'intelem', 'flang')),
+    ('cygwin.*', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95')),
+    ('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'nv', 'absoft', 'nag', 'vast', 'compaq',
+                 'intele', 'intelem', 'gnu', 'g95', 'pathf95', 'nagfor', 'fujitsu')),
+    ('darwin.*', ('gnu95', 'nag', 'absoft', 'ibm', 'intel', 'gnu', 'g95', 'pg')),
+    ('sunos.*', ('sun', 'gnu', 'gnu95', 'g95')),
+    ('irix.*', ('mips', 'gnu', 'gnu95',)),
+    ('aix.*', ('ibm', 'gnu', 'gnu95',)),
+    # os.name mappings
+    ('posix', ('gnu', 'gnu95',)),
+    ('nt', ('gnu', 'gnu95',)),
+    ('mac', ('gnu95', 'gnu', 'pg')),
+    )
+
+fcompiler_class = None
+fcompiler_aliases = None
+
+def load_all_fcompiler_classes():
+    """Cache all the FCompiler classes found in modules in the
+    numpy.distutils.fcompiler package.
+    """
+    from glob import glob
+    global fcompiler_class, fcompiler_aliases
+    if fcompiler_class is not None:
+        return
+    pys = os.path.join(os.path.dirname(__file__), '*.py')
+    fcompiler_class = {}
+    fcompiler_aliases = {}
+    for fname in glob(pys):
+        module_name, ext = os.path.splitext(os.path.basename(fname))
+        module_name = 'numpy.distutils.fcompiler.' + module_name
+        __import__ (module_name)
+        module = sys.modules[module_name]
+        if hasattr(module, 'compilers'):
+            for cname in module.compilers:
+                klass = getattr(module, cname)
+                desc = (klass.compiler_type, klass, klass.description)
+                fcompiler_class[klass.compiler_type] = desc
+                for alias in klass.compiler_aliases:
+                    if alias in fcompiler_aliases:
+                        raise ValueError("alias %r defined for both %s and %s"
+                                         % (alias, klass.__name__,
+                                            fcompiler_aliases[alias][1].__name__))
+                    fcompiler_aliases[alias] = desc
+
+def _find_existing_fcompiler(compiler_types,
+                             osname=None, platform=None,
+                             requiref90=False,
+                             c_compiler=None):
+    from numpy.distutils.core import get_distribution
+    dist = get_distribution(always=True)
+    for compiler_type in compiler_types:
+        v = None
+        try:
+            c = new_fcompiler(plat=platform, compiler=compiler_type,
+                              c_compiler=c_compiler)
+            c.customize(dist)
+            v = c.get_version()
+            if requiref90 and c.compiler_f90 is None:
+                v = None
+                new_compiler = c.suggested_f90_compiler
+                if new_compiler:
+                    log.warn('Trying %r compiler as suggested by %r '
+                             'compiler for f90 support.' % (compiler_type,
+                                                            new_compiler))
+                    c = new_fcompiler(plat=platform, compiler=new_compiler,
+                                      c_compiler=c_compiler)
+                    c.customize(dist)
+                    v = c.get_version()
+                    if v is not None:
+                        compiler_type = new_compiler
+            if requiref90 and c.compiler_f90 is None:
+                raise ValueError('%s does not support compiling f90 codes, '
+                                 'skipping.' % (c.__class__.__name__))
+        except DistutilsModuleError:
+            log.debug("_find_existing_fcompiler: compiler_type='%s' raised DistutilsModuleError", compiler_type)
+        except CompilerNotFound:
+            log.debug("_find_existing_fcompiler: compiler_type='%s' not found", compiler_type)
+        if v is not None:
+            return compiler_type
+    return None
+
+def available_fcompilers_for_platform(osname=None, platform=None):
+    if osname is None:
+        osname = os.name
+    if platform is None:
+        platform = sys.platform
+    matching_compiler_types = []
+    for pattern, compiler_type in _default_compilers:
+        if re.match(pattern, platform) or re.match(pattern, osname):
+            for ct in compiler_type:
+                if ct not in matching_compiler_types:
+                    matching_compiler_types.append(ct)
+    if not matching_compiler_types:
+        matching_compiler_types.append('gnu')
+    return matching_compiler_types
+
+def get_default_fcompiler(osname=None, platform=None, requiref90=False,
+                          c_compiler=None):
+    """Determine the default Fortran compiler to use for the given
+    platform."""
+    matching_compiler_types = available_fcompilers_for_platform(osname,
+                                                                platform)
+    log.info("get_default_fcompiler: matching types: '%s'",
+             matching_compiler_types)
+    compiler_type =  _find_existing_fcompiler(matching_compiler_types,
+                                              osname=osname,
+                                              platform=platform,
+                                              requiref90=requiref90,
+                                              c_compiler=c_compiler)
+    return compiler_type
+
+# Flag to avoid rechecking for Fortran compiler every time
+failed_fcompilers = set()
+
+def new_fcompiler(plat=None,
+                  compiler=None,
+                  verbose=0,
+                  dry_run=0,
+                  force=0,
+                  requiref90=False,
+                  c_compiler = None):
+    """Generate an instance of some FCompiler subclass for the supplied
+    platform/compiler combination.
+    """
+    global failed_fcompilers
+    fcompiler_key = (plat, compiler)
+    if fcompiler_key in failed_fcompilers:
+        return None
+
+    load_all_fcompiler_classes()
+    if plat is None:
+        plat = os.name
+    if compiler is None:
+        compiler = get_default_fcompiler(plat, requiref90=requiref90,
+                                         c_compiler=c_compiler)
+    if compiler in fcompiler_class:
+        module_name, klass, long_description = fcompiler_class[compiler]
+    elif compiler in fcompiler_aliases:
+        module_name, klass, long_description = fcompiler_aliases[compiler]
+    else:
+        msg = "don't know how to compile Fortran code on platform '%s'" % plat
+        if compiler is not None:
+            msg = msg + " with '%s' compiler." % compiler
+            msg = msg + " Supported compilers are: %s)" \
+                  % (','.join(fcompiler_class.keys()))
+        log.warn(msg)
+        failed_fcompilers.add(fcompiler_key)
+        return None
+
+    compiler = klass(verbose=verbose, dry_run=dry_run, force=force)
+    compiler.c_compiler = c_compiler
+    return compiler
+
+def show_fcompilers(dist=None):
+    """Print list of available compilers (used by the "--help-fcompiler"
+    option to "config_fc").
+    """
+    if dist is None:
+        from distutils.dist import Distribution
+        from numpy.distutils.command.config_compiler import config_fc
+        dist = Distribution()
+        dist.script_name = os.path.basename(sys.argv[0])
+        dist.script_args = ['config_fc'] + sys.argv[1:]
+        try:
+            dist.script_args.remove('--help-fcompiler')
+        except ValueError:
+            pass
+        dist.cmdclass['config_fc'] = config_fc
+        dist.parse_config_files()
+        dist.parse_command_line()
+    compilers = []
+    compilers_na = []
+    compilers_ni = []
+    if not fcompiler_class:
+        load_all_fcompiler_classes()
+    platform_compilers = available_fcompilers_for_platform()
+    for compiler in platform_compilers:
+        v = None
+        log.set_verbosity(-2)
+        try:
+            c = new_fcompiler(compiler=compiler, verbose=dist.verbose)
+            c.customize(dist)
+            v = c.get_version()
+        except (DistutilsModuleError, CompilerNotFound) as e:
+            log.debug("show_fcompilers: %s not found" % (compiler,))
+            log.debug(repr(e))
+
+        if v is None:
+            compilers_na.append(("fcompiler="+compiler, None,
+                              fcompiler_class[compiler][2]))
+        else:
+            c.dump_properties()
+            compilers.append(("fcompiler="+compiler, None,
+                              fcompiler_class[compiler][2] + ' (%s)' % v))
+
+    compilers_ni = list(set(fcompiler_class.keys()) - set(platform_compilers))
+    compilers_ni = [("fcompiler="+fc, None, fcompiler_class[fc][2])
+                    for fc in compilers_ni]
+
+    compilers.sort()
+    compilers_na.sort()
+    compilers_ni.sort()
+    pretty_printer = FancyGetopt(compilers)
+    pretty_printer.print_help("Fortran compilers found:")
+    pretty_printer = FancyGetopt(compilers_na)
+    pretty_printer.print_help("Compilers available for this "
+                              "platform, but not found:")
+    if compilers_ni:
+        pretty_printer = FancyGetopt(compilers_ni)
+        pretty_printer.print_help("Compilers not available on this platform:")
+    print("For compiler details, run 'config_fc --verbose' setup command.")
+
+
+def dummy_fortran_file():
+    fo, name = make_temp_file(suffix='.f')
+    fo.write("      subroutine dummy()\n      end\n")
+    fo.close()
+    return name[:-2]
+
+
+is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).match
+_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).search
+_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).search
+_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).search
+_free_f90_start = re.compile(r'[^c*!]\s*[^\s\d\t]', re.I).match
+
+def is_free_format(file):
+    """Check if file is in free format Fortran."""
+    # f90 allows both fixed and free format, assuming fixed unless
+    # signs of free format are detected.
+    result = 0
+    with open(file, encoding='latin1') as f:
+        line = f.readline()
+        n = 10000 # the number of non-comment lines to scan for hints
+        if _has_f_header(line) or _has_fix_header(line):
+            n = 0
+        elif _has_f90_header(line):
+            n = 0
+            result = 1
+        while n>0 and line:
+            line = line.rstrip()
+            if line and line[0]!='!':
+                n -= 1
+                if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-1:]=='&':
+                    result = 1
+                    break
+            line = f.readline()
+    return result
+
+def has_f90_header(src):
+    with open(src, encoding='latin1') as f:
+        line = f.readline()
+    return _has_f90_header(line) or _has_fix_header(line)
+
+_f77flags_re = re.compile(r'(c|)f77flags\s*\(\s*(?P<fcname>\w+)\s*\)\s*=\s*(?P<fflags>.*)', re.I)
+def get_f77flags(src):
+    """
+    Search the first 20 lines of fortran 77 code for line pattern
+      `CF77FLAGS(<fcompiler type>)=<f77 flags>`
+    Return a dictionary {<fcompiler type>:<f77 flags>}.
+    """
+    flags = {}
+    with open(src, encoding='latin1') as f:
+        i = 0
+        for line in f:
+            i += 1
+            if i>20: break
+            m = _f77flags_re.match(line)
+            if not m: continue
+            fcname = m.group('fcname').strip()
+            fflags = m.group('fflags').strip()
+            flags[fcname] = split_quoted(fflags)
+    return flags
+
+# TODO: implement get_f90flags and use it in _compile similarly to get_f77flags
+
+if __name__ == '__main__':
+    show_fcompilers()

+ 3079 - 0
intel-2021.02/numpy/system_info.py

@@ -0,0 +1,3079 @@
+#!/usr/bin/env python3
+"""
+This file defines a set of system_info classes for getting
+information about various resources (libraries, library directories,
+include directories, etc.) in the system. Usage:
+    info_dict = get_info(<name>)
+  where <name> is a string 'atlas','x11','fftw','lapack','blas',
+  'lapack_src', 'blas_src', etc. For a complete list of allowed names,
+  see the definition of get_info() function below.
+
+  Returned info_dict is a dictionary which is compatible with
+  distutils.setup keyword arguments. If info_dict == {}, then the
+  asked resource is not available (system_info could not find it).
+
+  Several *_info classes specify an environment variable to specify
+  the locations of software. When setting the corresponding environment
+  variable to 'None' then the software will be ignored, even when it
+  is available in system.
+
+Global parameters:
+  system_info.search_static_first - search static libraries (.a)
+             in precedence to shared ones (.so, .sl) if enabled.
+  system_info.verbosity - output the results to stdout if enabled.
+
+The file 'site.cfg' is looked for in
+
+1) Directory of main setup.py file being run.
+2) Home directory of user running the setup.py file as ~/.numpy-site.cfg
+3) System wide directory (location of this file...)
+
+The first one found is used to get system configuration options The
+format is that used by ConfigParser (i.e., Windows .INI style). The
+section ALL is not intended for general use.
+
+Appropriate defaults are used if nothing is specified.
+
+The order of finding the locations of resources is the following:
+ 1. environment variable
+ 2. section in site.cfg
+ 3. DEFAULT section in site.cfg
+ 4. System default search paths (see ``default_*`` variables below).
+Only the first complete match is returned.
+
+Currently, the following classes are available, along with their section names:
+
+    Numeric_info:Numeric
+    _numpy_info:Numeric
+    _pkg_config_info:None
+    accelerate_info:accelerate
+    agg2_info:agg2
+    amd_info:amd
+    atlas_3_10_blas_info:atlas
+    atlas_3_10_blas_threads_info:atlas
+    atlas_3_10_info:atlas
+    atlas_3_10_threads_info:atlas
+    atlas_blas_info:atlas
+    atlas_blas_threads_info:atlas
+    atlas_info:atlas
+    atlas_threads_info:atlas
+    blas64__opt_info:ALL               # usage recommended (general ILP64 BLAS, 64_ symbol suffix)
+    blas_ilp64_opt_info:ALL            # usage recommended (general ILP64 BLAS)
+    blas_ilp64_plain_opt_info:ALL      # usage recommended (general ILP64 BLAS, no symbol suffix)
+    blas_info:blas
+    blas_mkl_info:mkl
+    blas_opt_info:ALL                  # usage recommended
+    blas_src_info:blas_src
+    blis_info:blis
+    boost_python_info:boost_python
+    dfftw_info:fftw
+    dfftw_threads_info:fftw
+    djbfft_info:djbfft
+    f2py_info:ALL
+    fft_opt_info:ALL
+    fftw2_info:fftw
+    fftw3_info:fftw3
+    fftw_info:fftw
+    fftw_threads_info:fftw
+    flame_info:flame
+    freetype2_info:freetype2
+    gdk_2_info:gdk_2
+    gdk_info:gdk
+    gdk_pixbuf_2_info:gdk_pixbuf_2
+    gdk_pixbuf_xlib_2_info:gdk_pixbuf_xlib_2
+    gdk_x11_2_info:gdk_x11_2
+    gtkp_2_info:gtkp_2
+    gtkp_x11_2_info:gtkp_x11_2
+    lapack64__opt_info:ALL             # usage recommended (general ILP64 LAPACK, 64_ symbol suffix)
+    lapack_atlas_3_10_info:atlas
+    lapack_atlas_3_10_threads_info:atlas
+    lapack_atlas_info:atlas
+    lapack_atlas_threads_info:atlas
+    lapack_ilp64_opt_info:ALL          # usage recommended (general ILP64 LAPACK)
+    lapack_ilp64_plain_opt_info:ALL    # usage recommended (general ILP64 LAPACK, no symbol suffix)
+    lapack_info:lapack
+    lapack_mkl_info:mkl
+    lapack_opt_info:ALL                # usage recommended
+    lapack_src_info:lapack_src
+    mkl_info:mkl
+    numarray_info:numarray
+    numerix_info:numerix
+    numpy_info:numpy
+    openblas64__info:openblas64_
+    openblas64__lapack_info:openblas64_
+    openblas_clapack_info:openblas
+    openblas_ilp64_info:openblas_ilp64
+    openblas_ilp64_lapack_info:openblas_ilp64
+    openblas_info:openblas
+    openblas_lapack_info:openblas
+    sfftw_info:fftw
+    sfftw_threads_info:fftw
+    system_info:ALL
+    umfpack_info:umfpack
+    wx_info:wx
+    x11_info:x11
+    xft_info:xft
+
+Example:
+----------
+[DEFAULT]
+# default section
+library_dirs = /usr/lib:/usr/local/lib:/opt/lib
+include_dirs = /usr/include:/usr/local/include:/opt/include
+src_dirs = /usr/local/src:/opt/src
+# search static libraries (.a) in preference to shared ones (.so)
+search_static_first = 0
+
+[fftw]
+libraries = rfftw, fftw
+
+[atlas]
+library_dirs = /usr/lib/3dnow:/usr/lib/3dnow/atlas
+# for overriding the names of the atlas libraries
+libraries = lapack, f77blas, cblas, atlas
+
+[x11]
+library_dirs = /usr/X11R6/lib
+include_dirs = /usr/X11R6/include
+----------
+
+Note that the ``libraries`` key is the default setting for libraries.
+
+Authors:
+  Pearu Peterson <pearu@cens.ioc.ee>, February 2002
+  David M. Cooke <cookedm@physics.mcmaster.ca>, April 2002
+
+Copyright 2002 Pearu Peterson all rights reserved,
+Pearu Peterson <pearu@cens.ioc.ee>
+Permission to use, modify, and distribute this software is given under the
+terms of the NumPy (BSD style) license.  See LICENSE.txt that came with
+this distribution for specifics.
+
+NO WARRANTY IS EXPRESSED OR IMPLIED.  USE AT YOUR OWN RISK.
+
+"""
+import sys
+import os
+import re
+import copy
+import warnings
+import subprocess
+import textwrap
+
+from glob import glob
+from functools import reduce
+from configparser import NoOptionError
+from configparser import RawConfigParser as ConfigParser
+# It seems that some people are importing ConfigParser from here so is
+# good to keep its class name. Use of RawConfigParser is needed in
+# order to be able to load path names with percent in them, like
+# `feature%2Fcool` which is common on git flow branch names.
+
+from distutils.errors import DistutilsError
+from distutils.dist import Distribution
+import sysconfig
+from numpy.distutils import log
+from distutils.util import get_platform
+
+from numpy.distutils.exec_command import (
+    find_executable, filepath_from_subprocess_output,
+    )
+from numpy.distutils.misc_util import (is_sequence, is_string,
+                                       get_shared_lib_extension)
+from numpy.distutils.command.config import config as cmd_config
+from numpy.distutils import customized_ccompiler as _customized_ccompiler
+from numpy.distutils import _shell_utils
+import distutils.ccompiler
+import tempfile
+import shutil
+
+__all__ = ['system_info']
+
+# Determine number of bits
+import platform
+_bits = {'32bit': 32, '64bit': 64}
+platform_bits = _bits[platform.architecture()[0]]
+
+
+global_compiler = None
+
+def customized_ccompiler():
+    global global_compiler
+    if not global_compiler:
+        global_compiler = _customized_ccompiler()
+    return global_compiler
+
+
+def _c_string_literal(s):
+    """
+    Convert a python string into a literal suitable for inclusion into C code
+    """
+    # only these three characters are forbidden in C strings
+    s = s.replace('\\', r'\\')
+    s = s.replace('"',  r'\"')
+    s = s.replace('\n', r'\n')
+    return '"{}"'.format(s)
+
+
+def libpaths(paths, bits):
+    """Return a list of library paths valid on 32 or 64 bit systems.
+
+    Inputs:
+      paths : sequence
+        A sequence of strings (typically paths)
+      bits : int
+        An integer, the only valid values are 32 or 64.  A ValueError exception
+      is raised otherwise.
+
+    Examples:
+
+    Consider a list of directories
+    >>> paths = ['/usr/X11R6/lib','/usr/X11/lib','/usr/lib']
+
+    For a 32-bit platform, this is already valid:
+    >>> np.distutils.system_info.libpaths(paths,32)
+    ['/usr/X11R6/lib', '/usr/X11/lib', '/usr/lib']
+
+    On 64 bits, we prepend the '64' postfix
+    >>> np.distutils.system_info.libpaths(paths,64)
+    ['/usr/X11R6/lib64', '/usr/X11R6/lib', '/usr/X11/lib64', '/usr/X11/lib',
+    '/usr/lib64', '/usr/lib']
+    """
+    if bits not in (32, 64):
+        raise ValueError("Invalid bit size in libpaths: 32 or 64 only")
+
+    # Handle 32bit case
+    if bits == 32:
+        return paths
+
+    # Handle 64bit case
+    out = []
+    for p in paths:
+        out.extend([p + '64', p])
+
+    return out
+
+
+if sys.platform == 'win32':
+    default_lib_dirs = ['C:\\',
+                        os.path.join(sysconfig.get_config_var('exec_prefix'),
+                                     'libs')]
+    default_runtime_dirs = []
+    default_include_dirs = []
+    default_src_dirs = ['.']
+    default_x11_lib_dirs = []
+    default_x11_include_dirs = []
+    _include_dirs = [
+        'include',
+        'include/suitesparse',
+    ]
+    _lib_dirs = [
+        'lib',
+    ]
+
+    _include_dirs = [d.replace('/', os.sep) for d in _include_dirs]
+    _lib_dirs = [d.replace('/', os.sep) for d in _lib_dirs]
+    def add_system_root(library_root):
+        """Add a package manager root to the include directories"""
+        global default_lib_dirs
+        global default_include_dirs
+
+        library_root = os.path.normpath(library_root)
+
+        default_lib_dirs.extend(
+            os.path.join(library_root, d) for d in _lib_dirs)
+        default_include_dirs.extend(
+            os.path.join(library_root, d) for d in _include_dirs)
+
+    # VCpkg is the de-facto package manager on windows for C/C++
+    # libraries. If it is on the PATH, then we append its paths here.
+    vcpkg = shutil.which('vcpkg')
+    if vcpkg:
+        vcpkg_dir = os.path.dirname(vcpkg)
+        if platform.architecture()[0] == '32bit':
+            specifier = 'x86'
+        else:
+            specifier = 'x64'
+
+        vcpkg_installed = os.path.join(vcpkg_dir, 'installed')
+        for vcpkg_root in [
+            os.path.join(vcpkg_installed, specifier + '-windows'),
+            os.path.join(vcpkg_installed, specifier + '-windows-static'),
+        ]:
+            add_system_root(vcpkg_root)
+
+    # Conda is another popular package manager that provides libraries
+    conda = shutil.which('conda')
+    if conda:
+        conda_dir = os.path.dirname(conda)
+        add_system_root(os.path.join(conda_dir, '..', 'Library'))
+        add_system_root(os.path.join(conda_dir, 'Library'))
+
+else:
+    default_lib_dirs = libpaths(['/usr/local/lib', '/opt/lib', '/usr/lib',
+                                 '/opt/local/lib', '/sw/lib'], platform_bits)
+    default_runtime_dirs = []
+    default_include_dirs = ['/usr/local/include',
+                            '/opt/include', '/usr/include',
+                            # path of umfpack under macports
+                            '/opt/local/include/ufsparse',
+                            '/opt/local/include', '/sw/include',
+                            '/usr/include/suitesparse']
+    default_src_dirs = ['.', '/usr/local/src', '/opt/src', '/sw/src']
+
+    default_x11_lib_dirs = libpaths(['/usr/X11R6/lib', '/usr/X11/lib',
+                                     '/usr/lib'], platform_bits)
+    default_x11_include_dirs = ['/usr/X11R6/include', '/usr/X11/include',
+                                '/usr/include']
+
+    if os.path.exists('/usr/lib/X11'):
+        globbed_x11_dir = glob('/usr/lib/*/libX11.so')
+        if globbed_x11_dir:
+            x11_so_dir = os.path.split(globbed_x11_dir[0])[0]
+            default_x11_lib_dirs.extend([x11_so_dir, '/usr/lib/X11'])
+            default_x11_include_dirs.extend(['/usr/lib/X11/include',
+                                             '/usr/include/X11'])
+
+    with open(os.devnull, 'w') as tmp:
+        try:
+            p = subprocess.Popen(["gcc", "-print-multiarch"], stdout=subprocess.PIPE,
+                         stderr=tmp)
+        except (OSError, DistutilsError):
+            # OSError if gcc is not installed, or SandboxViolation (DistutilsError
+            # subclass) if an old setuptools bug is triggered (see gh-3160).
+            pass
+        else:
+            triplet = str(p.communicate()[0].decode().strip())
+            if p.returncode == 0:
+                # gcc supports the "-print-multiarch" option
+                default_x11_lib_dirs += [os.path.join("/usr/lib/", triplet)]
+                default_lib_dirs += [os.path.join("/usr/lib/", triplet)]
+
+
+if os.path.join(sys.prefix, 'lib') not in default_lib_dirs:
+    default_lib_dirs.insert(0, os.path.join(sys.prefix, 'lib'))
+    default_include_dirs.append(os.path.join(sys.prefix, 'include'))
+    default_src_dirs.append(os.path.join(sys.prefix, 'src'))
+
+default_lib_dirs = [_m for _m in default_lib_dirs if os.path.isdir(_m)]
+default_runtime_dirs = [_m for _m in default_runtime_dirs if os.path.isdir(_m)]
+default_include_dirs = [_m for _m in default_include_dirs if os.path.isdir(_m)]
+default_src_dirs = [_m for _m in default_src_dirs if os.path.isdir(_m)]
+
+so_ext = get_shared_lib_extension()
+
+
+def is_symlink_to_accelerate(filename):
+    accelpath = '/System/Library/Frameworks/Accelerate.framework'
+    return (sys.platform == 'darwin' and os.path.islink(filename) and
+            os.path.realpath(filename).startswith(accelpath))
+
+
+_accel_msg = (
+    'Found {filename}, but that file is a symbolic link to the '
+    'MacOS Accelerate framework, which is not supported by NumPy. '
+    'You must configure the build to use a different optimized library, '
+    'or disable the use of optimized BLAS and LAPACK by setting the '
+    'environment variables NPY_BLAS_ORDER="" and NPY_LAPACK_ORDER="" '
+    'before building NumPy.'
+)
+
+
+def get_standard_file(fname):
+    """Returns a list of files named 'fname' from
+    1) System-wide directory (directory-location of this module)
+    2) Users HOME directory (os.environ['HOME'])
+    3) Local directory
+    """
+    # System-wide file
+    filenames = []
+    try:
+        f = __file__
+    except NameError:
+        f = sys.argv[0]
+    else:
+        sysfile = os.path.join(os.path.split(os.path.abspath(f))[0],
+                               fname)
+        if os.path.isfile(sysfile):
+            filenames.append(sysfile)
+
+    # Home directory
+    # And look for the user config file
+    try:
+        f = os.path.expanduser('~')
+    except KeyError:
+        pass
+    else:
+        user_file = os.path.join(f, fname)
+        if os.path.isfile(user_file):
+            filenames.append(user_file)
+
+    # Local file
+    if os.path.isfile(fname):
+        filenames.append(os.path.abspath(fname))
+
+    return filenames
+
+
+def _parse_env_order(base_order, env):
+    """ Parse an environment variable `env` by splitting with "," and only returning elements from `base_order`
+
+    This method will sequence the environment variable and check for their invidual elements in `base_order`.
+
+    The items in the environment variable may be negated via '^item' or '!itema,itemb'.
+    It must start with ^/! to negate all options.
+
+    Raises
+    ------
+    ValueError: for mixed negated and non-negated orders or multiple negated orders
+
+    Parameters
+    ----------
+    base_order : list of str
+       the base list of orders
+    env : str
+       the environment variable to be parsed, if none is found, `base_order` is returned
+
+    Returns
+    -------
+    allow_order : list of str
+        allowed orders in lower-case
+    unknown_order : list of str
+        for values not overlapping with `base_order`
+    """
+    order_str = os.environ.get(env, None)
+
+    # ensure all base-orders are lower-case (for easier comparison)
+    base_order = [order.lower() for order in base_order]
+    if order_str is None:
+        return base_order, []
+
+    neg = order_str.startswith('^') or order_str.startswith('!')
+    # Check format
+    order_str_l = list(order_str)
+    sum_neg = order_str_l.count('^') + order_str_l.count('!')
+    if neg:
+        if sum_neg > 1:
+            raise ValueError(f"Environment variable '{env}' may only contain a single (prefixed) negation: {order_str}")
+        # remove prefix
+        order_str = order_str[1:]
+    elif sum_neg > 0:
+        raise ValueError(f"Environment variable '{env}' may not mix negated an non-negated items: {order_str}")
+
+    # Split and lower case
+    orders = order_str.lower().split(',')
+
+    # to inform callee about non-overlapping elements
+    unknown_order = []
+
+    # if negated, we have to remove from the order
+    if neg:
+        allow_order = base_order.copy()
+
+        for order in orders:
+            if not order:
+                continue
+
+            if order not in base_order:
+                unknown_order.append(order)
+                continue
+
+            if order in allow_order:
+                allow_order.remove(order)
+
+    else:
+        allow_order = []
+
+        for order in orders:
+            if not order:
+                continue
+
+            if order not in base_order:
+                unknown_order.append(order)
+                continue
+
+            if order not in allow_order:
+                allow_order.append(order)
+
+    return allow_order, unknown_order
+
+
+def get_info(name, notfound_action=0):
+    """
+    notfound_action:
+      0 - do nothing
+      1 - display warning message
+      2 - raise error
+    """
+    cl = {'atlas': atlas_info,  # use lapack_opt or blas_opt instead
+          'atlas_threads': atlas_threads_info,                # ditto
+          'atlas_blas': atlas_blas_info,
+          'atlas_blas_threads': atlas_blas_threads_info,
+          'lapack_atlas': lapack_atlas_info,  # use lapack_opt instead
+          'lapack_atlas_threads': lapack_atlas_threads_info,  # ditto
+          'atlas_3_10': atlas_3_10_info,  # use lapack_opt or blas_opt instead
+          'atlas_3_10_threads': atlas_3_10_threads_info,                # ditto
+          'atlas_3_10_blas': atlas_3_10_blas_info,
+          'atlas_3_10_blas_threads': atlas_3_10_blas_threads_info,
+          'lapack_atlas_3_10': lapack_atlas_3_10_info,  # use lapack_opt instead
+          'lapack_atlas_3_10_threads': lapack_atlas_3_10_threads_info,  # ditto
+          'flame': flame_info,          # use lapack_opt instead
+          'mkl': mkl_info,
+          # openblas which may or may not have embedded lapack
+          'openblas': openblas_info,          # use blas_opt instead
+          # openblas with embedded lapack
+          'openblas_lapack': openblas_lapack_info, # use blas_opt instead
+          'openblas_clapack': openblas_clapack_info, # use blas_opt instead
+          'blis': blis_info,                  # use blas_opt instead
+          'lapack_mkl': lapack_mkl_info,      # use lapack_opt instead
+          'blas_mkl': blas_mkl_info,          # use blas_opt instead
+          'openblas64_': openblas64__info,
+          'openblas64__lapack': openblas64__lapack_info,
+          'openblas_ilp64': openblas_ilp64_info,
+          'openblas_ilp64_lapack': openblas_ilp64_lapack_info,
+          'x11': x11_info,
+          'fft_opt': fft_opt_info,
+          'fftw': fftw_info,
+          'fftw2': fftw2_info,
+          'fftw3': fftw3_info,
+          'dfftw': dfftw_info,
+          'sfftw': sfftw_info,
+          'fftw_threads': fftw_threads_info,
+          'dfftw_threads': dfftw_threads_info,
+          'sfftw_threads': sfftw_threads_info,
+          'djbfft': djbfft_info,
+          'blas': blas_info,                  # use blas_opt instead
+          'lapack': lapack_info,              # use lapack_opt instead
+          'lapack_src': lapack_src_info,
+          'blas_src': blas_src_info,
+          'numpy': numpy_info,
+          'f2py': f2py_info,
+          'Numeric': Numeric_info,
+          'numeric': Numeric_info,
+          'numarray': numarray_info,
+          'numerix': numerix_info,
+          'lapack_opt': lapack_opt_info,
+          'lapack_ilp64_opt': lapack_ilp64_opt_info,
+          'lapack_ilp64_plain_opt': lapack_ilp64_plain_opt_info,
+          'lapack64__opt': lapack64__opt_info,
+          'blas_opt': blas_opt_info,
+          'blas_ilp64_opt': blas_ilp64_opt_info,
+          'blas_ilp64_plain_opt': blas_ilp64_plain_opt_info,
+          'blas64__opt': blas64__opt_info,
+          'boost_python': boost_python_info,
+          'agg2': agg2_info,
+          'wx': wx_info,
+          'gdk_pixbuf_xlib_2': gdk_pixbuf_xlib_2_info,
+          'gdk-pixbuf-xlib-2.0': gdk_pixbuf_xlib_2_info,
+          'gdk_pixbuf_2': gdk_pixbuf_2_info,
+          'gdk-pixbuf-2.0': gdk_pixbuf_2_info,
+          'gdk': gdk_info,
+          'gdk_2': gdk_2_info,
+          'gdk-2.0': gdk_2_info,
+          'gdk_x11_2': gdk_x11_2_info,
+          'gdk-x11-2.0': gdk_x11_2_info,
+          'gtkp_x11_2': gtkp_x11_2_info,
+          'gtk+-x11-2.0': gtkp_x11_2_info,
+          'gtkp_2': gtkp_2_info,
+          'gtk+-2.0': gtkp_2_info,
+          'xft': xft_info,
+          'freetype2': freetype2_info,
+          'umfpack': umfpack_info,
+          'amd': amd_info,
+          }.get(name.lower(), system_info)
+    return cl().get_info(notfound_action)
+
+
+class NotFoundError(DistutilsError):
+    """Some third-party program or library is not found."""
+
+
+class AliasedOptionError(DistutilsError):
+    """
+    Aliases entries in config files should not be existing.
+    In section '{section}' we found multiple appearances of options {options}."""
+
+
+class AtlasNotFoundError(NotFoundError):
+    """
+    Atlas (http://github.com/math-atlas/math-atlas) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [atlas]) or by setting
+    the ATLAS environment variable."""
+
+
+class FlameNotFoundError(NotFoundError):
+    """
+    FLAME (http://www.cs.utexas.edu/~flame/web/) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [flame])."""
+
+
+class LapackNotFoundError(NotFoundError):
+    """
+    Lapack (http://www.netlib.org/lapack/) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [lapack]) or by setting
+    the LAPACK environment variable."""
+
+
+class LapackSrcNotFoundError(LapackNotFoundError):
+    """
+    Lapack (http://www.netlib.org/lapack/) sources not found.
+    Directories to search for the sources can be specified in the
+    numpy/distutils/site.cfg file (section [lapack_src]) or by setting
+    the LAPACK_SRC environment variable."""
+
+
+class LapackILP64NotFoundError(NotFoundError):
+    """
+    64-bit Lapack libraries not found.
+    Known libraries in numpy/distutils/site.cfg file are:
+    openblas64_, openblas_ilp64
+    """
+
+class BlasOptNotFoundError(NotFoundError):
+    """
+    Optimized (vendor) Blas libraries are not found.
+    Falls back to netlib Blas library which has worse performance.
+    A better performance should be easily gained by switching
+    Blas library."""
+
+class BlasNotFoundError(NotFoundError):
+    """
+    Blas (http://www.netlib.org/blas/) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [blas]) or by setting
+    the BLAS environment variable."""
+
+class BlasILP64NotFoundError(NotFoundError):
+    """
+    64-bit Blas libraries not found.
+    Known libraries in numpy/distutils/site.cfg file are:
+    openblas64_, openblas_ilp64
+    """
+
+class BlasSrcNotFoundError(BlasNotFoundError):
+    """
+    Blas (http://www.netlib.org/blas/) sources not found.
+    Directories to search for the sources can be specified in the
+    numpy/distutils/site.cfg file (section [blas_src]) or by setting
+    the BLAS_SRC environment variable."""
+
+
+class FFTWNotFoundError(NotFoundError):
+    """
+    FFTW (http://www.fftw.org/) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [fftw]) or by setting
+    the FFTW environment variable."""
+
+
+class DJBFFTNotFoundError(NotFoundError):
+    """
+    DJBFFT (https://cr.yp.to/djbfft.html) libraries not found.
+    Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [djbfft]) or by setting
+    the DJBFFT environment variable."""
+
+
+class NumericNotFoundError(NotFoundError):
+    """
+    Numeric (https://www.numpy.org/) module not found.
+    Get it from above location, install it, and retry setup.py."""
+
+
+class X11NotFoundError(NotFoundError):
+    """X11 libraries not found."""
+
+
+class UmfpackNotFoundError(NotFoundError):
+    """
+    UMFPACK sparse solver (https://www.cise.ufl.edu/research/sparse/umfpack/)
+    not found. Directories to search for the libraries can be specified in the
+    numpy/distutils/site.cfg file (section [umfpack]) or by setting
+    the UMFPACK environment variable."""
+
+
+class system_info:
+
+    """ get_info() is the only public method. Don't use others.
+    """
+    dir_env_var = None
+    # XXX: search_static_first is disabled by default, may disappear in
+    # future unless it is proved to be useful.
+    search_static_first = 0
+    # The base-class section name is a random word "ALL" and is not really
+    # intended for general use. It cannot be None nor can it be DEFAULT as
+    # these break the ConfigParser. See gh-15338
+    section = 'ALL'
+    saved_results = {}
+
+    notfounderror = NotFoundError
+
+    def __init__(self,
+                  default_lib_dirs=default_lib_dirs,
+                  default_include_dirs=default_include_dirs,
+                  ):
+        self.__class__.info = {}
+        self.local_prefixes = []
+        defaults = {'library_dirs': os.pathsep.join(default_lib_dirs),
+                    'include_dirs': os.pathsep.join(default_include_dirs),
+                    'runtime_library_dirs': os.pathsep.join(default_runtime_dirs),
+                    'rpath': '',
+                    'src_dirs': os.pathsep.join(default_src_dirs),
+                    'search_static_first': str(self.search_static_first),
+                    'extra_compile_args': '', 'extra_link_args': ''}
+        self.cp = ConfigParser(defaults)
+        self.files = []
+        self.files.extend(get_standard_file('.numpy-site.cfg'))
+        self.files.extend(get_standard_file('site.cfg'))
+        self.parse_config_files()
+
+        if self.section is not None:
+            self.search_static_first = self.cp.getboolean(
+                self.section, 'search_static_first')
+        assert isinstance(self.search_static_first, int)
+
+    def parse_config_files(self):
+        self.cp.read(self.files)
+        if not self.cp.has_section(self.section):
+            if self.section is not None:
+                self.cp.add_section(self.section)
+
+    def calc_libraries_info(self):
+        libs = self.get_libraries()
+        dirs = self.get_lib_dirs()
+        # The extensions use runtime_library_dirs
+        r_dirs = self.get_runtime_lib_dirs()
+        # Intrinsic distutils use rpath, we simply append both entries
+        # as though they were one entry
+        r_dirs.extend(self.get_runtime_lib_dirs(key='rpath'))
+        info = {}
+        for lib in libs:
+            i = self.check_libs(dirs, [lib])
+            if i is not None:
+                dict_append(info, **i)
+            else:
+                log.info('Library %s was not found. Ignoring' % (lib))
+
+            if r_dirs:
+                i = self.check_libs(r_dirs, [lib])
+                if i is not None:
+                    # Swap library keywords found to runtime_library_dirs
+                    # the libraries are insisting on the user having defined
+                    # them using the library_dirs, and not necessarily by
+                    # runtime_library_dirs
+                    del i['libraries']
+                    i['runtime_library_dirs'] = i.pop('library_dirs')
+                    dict_append(info, **i)
+                else:
+                    log.info('Runtime library %s was not found. Ignoring' % (lib))
+
+        return info
+
+    def set_info(self, **info):
+        if info:
+            lib_info = self.calc_libraries_info()
+            dict_append(info, **lib_info)
+            # Update extra information
+            extra_info = self.calc_extra_info()
+            dict_append(info, **extra_info)
+        self.saved_results[self.__class__.__name__] = info
+
+    def get_option_single(self, *options):
+        """ Ensure that only one of `options` are found in the section
+
+        Parameters
+        ----------
+        *options : list of str
+           a list of options to be found in the section (``self.section``)
+
+        Returns
+        -------
+        str :
+            the option that is uniquely found in the section
+
+        Raises
+        ------
+        AliasedOptionError :
+            in case more than one of the options are found
+        """
+        found = [self.cp.has_option(self.section, opt) for opt in options]
+        if sum(found) == 1:
+            return options[found.index(True)]
+        elif sum(found) == 0:
+            # nothing is found anyways
+            return options[0]
+
+        # Else we have more than 1 key found
+        if AliasedOptionError.__doc__ is None:
+            raise AliasedOptionError()
+        raise AliasedOptionError(AliasedOptionError.__doc__.format(
+            section=self.section, options='[{}]'.format(', '.join(options))))
+
+
+    def has_info(self):
+        return self.__class__.__name__ in self.saved_results
+
+    def calc_extra_info(self):
+        """ Updates the information in the current information with
+        respect to these flags:
+          extra_compile_args
+          extra_link_args
+        """
+        info = {}
+        for key in ['extra_compile_args', 'extra_link_args']:
+            # Get values
+            opt = self.cp.get(self.section, key)
+            opt = _shell_utils.NativeParser.split(opt)
+            if opt:
+                tmp = {key: opt}
+                dict_append(info, **tmp)
+        return info
+
+    def get_info(self, notfound_action=0):
+        """ Return a dictionary with items that are compatible
+            with numpy.distutils.setup keyword arguments.
+        """
+        flag = 0
+        if not self.has_info():
+            flag = 1
+            log.info(self.__class__.__name__ + ':')
+            if hasattr(self, 'calc_info'):
+                self.calc_info()
+            if notfound_action:
+                if not self.has_info():
+                    if notfound_action == 1:
+                        warnings.warn(self.notfounderror.__doc__, stacklevel=2)
+                    elif notfound_action == 2:
+                        raise self.notfounderror(self.notfounderror.__doc__)
+                    else:
+                        raise ValueError(repr(notfound_action))
+
+            if not self.has_info():
+                log.info('  NOT AVAILABLE')
+                self.set_info()
+            else:
+                log.info('  FOUND:')
+
+        res = self.saved_results.get(self.__class__.__name__)
+        if log.get_threshold() <= log.INFO and flag:
+            for k, v in res.items():
+                v = str(v)
+                if k in ['sources', 'libraries'] and len(v) > 270:
+                    v = v[:120] + '...\n...\n...' + v[-120:]
+                log.info('    %s = %s', k, v)
+            log.info('')
+
+        return copy.deepcopy(res)
+
+    def get_paths(self, section, key):
+        dirs = self.cp.get(section, key).split(os.pathsep)
+        env_var = self.dir_env_var
+        if env_var:
+            if is_sequence(env_var):
+                e0 = env_var[-1]
+                for e in env_var:
+                    if e in os.environ:
+                        e0 = e
+                        break
+                if not env_var[0] == e0:
+                    log.info('Setting %s=%s' % (env_var[0], e0))
+                env_var = e0
+        if env_var and env_var in os.environ:
+            d = os.environ[env_var]
+            if d == 'None':
+                log.info('Disabled %s: %s',
+                         self.__class__.__name__, '(%s is None)'
+                         % (env_var,))
+                return []
+            if os.path.isfile(d):
+                dirs = [os.path.dirname(d)] + dirs
+                l = getattr(self, '_lib_names', [])
+                if len(l) == 1:
+                    b = os.path.basename(d)
+                    b = os.path.splitext(b)[0]
+                    if b[:3] == 'lib':
+                        log.info('Replacing _lib_names[0]==%r with %r' \
+                              % (self._lib_names[0], b[3:]))
+                        self._lib_names[0] = b[3:]
+            else:
+                ds = d.split(os.pathsep)
+                ds2 = []
+                for d in ds:
+                    if os.path.isdir(d):
+                        ds2.append(d)
+                        for dd in ['include', 'lib']:
+                            d1 = os.path.join(d, dd)
+                            if os.path.isdir(d1):
+                                ds2.append(d1)
+                dirs = ds2 + dirs
+        default_dirs = self.cp.get(self.section, key).split(os.pathsep)
+        dirs.extend(default_dirs)
+        ret = []
+        for d in dirs:
+            if len(d) > 0 and not os.path.isdir(d):
+                warnings.warn('Specified path %s is invalid.' % d, stacklevel=2)
+                continue
+
+            if d not in ret:
+                ret.append(d)
+
+        log.debug('( %s = %s )', key, ':'.join(ret))
+        return ret
+
+    def get_lib_dirs(self, key='library_dirs'):
+        return self.get_paths(self.section, key)
+
+    def get_runtime_lib_dirs(self, key='runtime_library_dirs'):
+        path = self.get_paths(self.section, key)
+        if path == ['']:
+            path = []
+        return path
+
+    def get_include_dirs(self, key='include_dirs'):
+        return self.get_paths(self.section, key)
+
+    def get_src_dirs(self, key='src_dirs'):
+        return self.get_paths(self.section, key)
+
+    def get_libs(self, key, default):
+        try:
+            libs = self.cp.get(self.section, key)
+        except NoOptionError:
+            if not default:
+                return []
+            if is_string(default):
+                return [default]
+            return default
+        return [b for b in [a.strip().replace(':',',') for a in libs.split(',')] if b]
+
+    def get_libraries(self, key='libraries'):
+        if hasattr(self, '_lib_names'):
+            return self.get_libs(key, default=self._lib_names)
+        else:
+            return self.get_libs(key, '')
+
+    def library_extensions(self):
+        c = customized_ccompiler()
+        static_exts = []
+        if c.compiler_type != 'msvc':
+            # MSVC doesn't understand binutils
+            static_exts.append('.a')
+        if sys.platform == 'win32':
+            static_exts.append('.lib')  # .lib is used by MSVC and others
+        if self.search_static_first:
+            exts = static_exts + [so_ext]
+        else:
+            exts = [so_ext] + static_exts
+        if sys.platform == 'cygwin':
+            exts.append('.dll.a')
+        if sys.platform == 'darwin':
+            exts.append('.dylib')
+        return exts
+
+    def check_libs(self, lib_dirs, libs, opt_libs=[]):
+        """If static or shared libraries are available then return
+        their info dictionary.
+
+        Checks for all libraries as shared libraries first, then
+        static (or vice versa if self.search_static_first is True).
+        """
+        exts = self.library_extensions()
+        info = None
+        for ext in exts:
+            info = self._check_libs(lib_dirs, libs, opt_libs, [ext])
+            if info is not None:
+                break
+        if not info:
+            log.info('  libraries %s not found in %s', ','.join(libs),
+                     lib_dirs)
+        return info
+
+    def check_libs2(self, lib_dirs, libs, opt_libs=[]):
+        """If static or shared libraries are available then return
+        their info dictionary.
+
+        Checks each library for shared or static.
+        """
+        exts = self.library_extensions()
+        info = self._check_libs(lib_dirs, libs, opt_libs, exts)
+        if not info:
+            log.info('  libraries %s not found in %s', ','.join(libs),
+                     lib_dirs)
+
+        return info
+
+    def _find_lib(self, lib_dir, lib, exts):
+        assert is_string(lib_dir)
+        # under windows first try without 'lib' prefix
+        if sys.platform == 'win32':
+            lib_prefixes = ['', 'lib']
+        else:
+            lib_prefixes = ['lib']
+        # for each library name, see if we can find a file for it.
+        for ext in exts:
+            for prefix in lib_prefixes:
+                p = self.combine_paths(lib_dir, prefix + lib + ext)
+                if p:
+                    # p[0] is the full path to the binary library file.
+                    if is_symlink_to_accelerate(p[0]):
+                        raise RuntimeError(_accel_msg.format(filename=p[0]))
+                    break
+            if p:
+                assert len(p) == 1
+                # ??? splitext on p[0] would do this for cygwin
+                # doesn't seem correct
+                if ext == '.dll.a':
+                    lib += '.dll'
+                if ext == '.lib':
+                    lib = prefix + lib
+                return lib
+
+        return False
+
+    def _find_libs(self, lib_dirs, libs, exts):
+        # make sure we preserve the order of libs, as it can be important
+        found_dirs, found_libs = [], []
+        for lib in libs:
+            if lib[0] == '-':
+                found_libs.append(lib)
+                continue
+            for lib_dir in lib_dirs:
+                found_lib = self._find_lib(lib_dir, lib, exts)
+                if found_lib:
+                    found_libs.append(found_lib)
+                    if lib_dir not in found_dirs:
+                        found_dirs.append(lib_dir)
+                    break
+        return found_dirs, found_libs
+
+    def _check_libs(self, lib_dirs, libs, opt_libs, exts):
+        """Find mandatory and optional libs in expected paths.
+
+        Missing optional libraries are silently forgotten.
+        """
+        if not is_sequence(lib_dirs):
+            lib_dirs = [lib_dirs]
+        # First, try to find the mandatory libraries
+        found_dirs, found_libs = self._find_libs(lib_dirs, libs, exts)
+        if len(found_libs) > 0 and len(found_libs) == len(libs):
+            # Now, check for optional libraries
+            opt_found_dirs, opt_found_libs = self._find_libs(lib_dirs, opt_libs, exts)
+            found_libs.extend(opt_found_libs)
+            for lib_dir in opt_found_dirs:
+                if lib_dir not in found_dirs:
+                    found_dirs.append(lib_dir)
+            info = {'libraries': found_libs, 'library_dirs': found_dirs}
+            return info
+        else:
+            return None
+
+    def combine_paths(self, *args):
+        """Return a list of existing paths composed by all combinations
+        of items from the arguments.
+        """
+        return combine_paths(*args)
+
+
+class fft_opt_info(system_info):
+
+    def calc_info(self):
+        info = {}
+        fftw_info = get_info('fftw3') or get_info('fftw2') or get_info('dfftw')
+        djbfft_info = get_info('djbfft')
+        if fftw_info:
+            dict_append(info, **fftw_info)
+            if djbfft_info:
+                dict_append(info, **djbfft_info)
+            self.set_info(**info)
+            return
+
+
+class fftw_info(system_info):
+    #variables to override
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    notfounderror = FFTWNotFoundError
+    ver_info = [{'name':'fftw3',
+                    'libs':['fftw3'],
+                    'includes':['fftw3.h'],
+                    'macros':[('SCIPY_FFTW3_H', None)]},
+                  {'name':'fftw2',
+                    'libs':['rfftw', 'fftw'],
+                    'includes':['fftw.h', 'rfftw.h'],
+                    'macros':[('SCIPY_FFTW_H', None)]}]
+
+    def calc_ver_info(self, ver_param):
+        """Returns True on successful version detection, else False"""
+        lib_dirs = self.get_lib_dirs()
+        incl_dirs = self.get_include_dirs()
+
+        opt = self.get_option_single(self.section + '_libs', 'libraries')
+        libs = self.get_libs(opt, ver_param['libs'])
+        info = self.check_libs(lib_dirs, libs)
+        if info is not None:
+            flag = 0
+            for d in incl_dirs:
+                if len(self.combine_paths(d, ver_param['includes'])) \
+                   == len(ver_param['includes']):
+                    dict_append(info, include_dirs=[d])
+                    flag = 1
+                    break
+            if flag:
+                dict_append(info, define_macros=ver_param['macros'])
+            else:
+                info = None
+        if info is not None:
+            self.set_info(**info)
+            return True
+        else:
+            log.info('  %s not found' % (ver_param['name']))
+            return False
+
+    def calc_info(self):
+        for i in self.ver_info:
+            if self.calc_ver_info(i):
+                break
+
+
+class fftw2_info(fftw_info):
+    #variables to override
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    notfounderror = FFTWNotFoundError
+    ver_info = [{'name':'fftw2',
+                    'libs':['rfftw', 'fftw'],
+                    'includes':['fftw.h', 'rfftw.h'],
+                    'macros':[('SCIPY_FFTW_H', None)]}
+                  ]
+
+
+class fftw3_info(fftw_info):
+    #variables to override
+    section = 'fftw3'
+    dir_env_var = 'FFTW3'
+    notfounderror = FFTWNotFoundError
+    ver_info = [{'name':'fftw3',
+                    'libs':['fftw3'],
+                    'includes':['fftw3.h'],
+                    'macros':[('SCIPY_FFTW3_H', None)]},
+                  ]
+
+
+class dfftw_info(fftw_info):
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    ver_info = [{'name':'dfftw',
+                    'libs':['drfftw', 'dfftw'],
+                    'includes':['dfftw.h', 'drfftw.h'],
+                    'macros':[('SCIPY_DFFTW_H', None)]}]
+
+
+class sfftw_info(fftw_info):
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    ver_info = [{'name':'sfftw',
+                    'libs':['srfftw', 'sfftw'],
+                    'includes':['sfftw.h', 'srfftw.h'],
+                    'macros':[('SCIPY_SFFTW_H', None)]}]
+
+
+class fftw_threads_info(fftw_info):
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    ver_info = [{'name':'fftw threads',
+                    'libs':['rfftw_threads', 'fftw_threads'],
+                    'includes':['fftw_threads.h', 'rfftw_threads.h'],
+                    'macros':[('SCIPY_FFTW_THREADS_H', None)]}]
+
+
+class dfftw_threads_info(fftw_info):
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    ver_info = [{'name':'dfftw threads',
+                    'libs':['drfftw_threads', 'dfftw_threads'],
+                    'includes':['dfftw_threads.h', 'drfftw_threads.h'],
+                    'macros':[('SCIPY_DFFTW_THREADS_H', None)]}]
+
+
+class sfftw_threads_info(fftw_info):
+    section = 'fftw'
+    dir_env_var = 'FFTW'
+    ver_info = [{'name':'sfftw threads',
+                    'libs':['srfftw_threads', 'sfftw_threads'],
+                    'includes':['sfftw_threads.h', 'srfftw_threads.h'],
+                    'macros':[('SCIPY_SFFTW_THREADS_H', None)]}]
+
+
+class djbfft_info(system_info):
+    section = 'djbfft'
+    dir_env_var = 'DJBFFT'
+    notfounderror = DJBFFTNotFoundError
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend(self.combine_paths(d, ['djbfft']) + [d])
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        incl_dirs = self.get_include_dirs()
+        info = None
+        for d in lib_dirs:
+            p = self.combine_paths(d, ['djbfft.a'])
+            if p:
+                info = {'extra_objects': p}
+                break
+            p = self.combine_paths(d, ['libdjbfft.a', 'libdjbfft' + so_ext])
+            if p:
+                info = {'libraries': ['djbfft'], 'library_dirs': [d]}
+                break
+        if info is None:
+            return
+        for d in incl_dirs:
+            if len(self.combine_paths(d, ['fftc8.h', 'fftfreq.h'])) == 2:
+                dict_append(info, include_dirs=[d],
+                            define_macros=[('SCIPY_DJBFFT_H', None)])
+                self.set_info(**info)
+                return
+        return
+
+
+class mkl_info(system_info):
+    section = 'mkl'
+    dir_env_var = 'MKLROOT'
+    _lib_mkl = ['mkl_rt']
+
+    def get_mkl_rootdir(self):
+        mklroot = os.environ.get('MKLROOT', None)
+        if mklroot is not None:
+            return mklroot
+        paths = os.environ.get('LD_LIBRARY_PATH', '').split(os.pathsep)
+        ld_so_conf = '/etc/ld.so.conf'
+        if os.path.isfile(ld_so_conf):
+            with open(ld_so_conf, 'r') as f:
+                for d in f:
+                    d = d.strip()
+                    if d:
+                        paths.append(d)
+        intel_mkl_dirs = []
+        for path in paths:
+            path_atoms = path.split(os.sep)
+            for m in path_atoms:
+                if m.startswith('mkl'):
+                    d = os.sep.join(path_atoms[:path_atoms.index(m) + 2])
+                    intel_mkl_dirs.append(d)
+                    break
+        for d in paths:
+            dirs = glob(os.path.join(d, 'mkl', '*'))
+            dirs += glob(os.path.join(d, 'mkl*'))
+            for sub_dir in dirs:
+                if os.path.isdir(os.path.join(sub_dir, 'lib')):
+                    return sub_dir
+        return None
+
+    def __init__(self):
+        mklroot = self.get_mkl_rootdir()
+        if mklroot is None:
+            system_info.__init__(self)
+        else:
+            from .cpuinfo import cpu
+            if cpu.is_Itanium():
+                plt = '64'
+            elif cpu.is_Intel() and cpu.is_64bit():
+                plt = 'intel64'
+            else:
+                plt = '32'
+            system_info.__init__(
+                self,
+                default_lib_dirs=[os.path.join(mklroot, 'lib', plt)],
+                default_include_dirs=[os.path.join(mklroot, 'include')])
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        incl_dirs = self.get_include_dirs()
+        opt = self.get_option_single('mkl_libs', 'libraries')
+        mkl_libs = self.get_libs(opt, self._lib_mkl)
+        info = self.check_libs2(lib_dirs, mkl_libs)
+        if info is None:
+            return
+        dict_append(info,
+                    define_macros=[('SCIPY_MKL_H', None),
+                                   ('HAVE_CBLAS', None)],
+                    include_dirs=incl_dirs)
+        if sys.platform == 'win32':
+            pass  # win32 has no pthread library
+        else:
+            dict_append(info, libraries=['pthread'])
+        self.set_info(**info)
+
+
+class lapack_mkl_info(mkl_info):
+    pass
+
+
+class blas_mkl_info(mkl_info):
+    pass
+
+
+class atlas_info(system_info):
+    section = 'atlas'
+    dir_env_var = 'ATLAS'
+    _lib_names = ['f77blas', 'cblas']
+    if sys.platform[:7] == 'freebsd':
+        _lib_atlas = ['atlas_r']
+        _lib_lapack = ['alapack_r']
+    else:
+        _lib_atlas = ['atlas']
+        _lib_lapack = ['lapack']
+
+    notfounderror = AtlasNotFoundError
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend(self.combine_paths(d, ['atlas*', 'ATLAS*',
+                                         'sse', '3dnow', 'sse2']) + [d])
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        info = {}
+        opt = self.get_option_single('atlas_libs', 'libraries')
+        atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas)
+        lapack_libs = self.get_libs('lapack_libs', self._lib_lapack)
+        atlas = None
+        lapack = None
+        atlas_1 = None
+        for d in lib_dirs:
+            # FIXME: lapack_atlas is unused
+            lapack_atlas = self.check_libs2(d, ['lapack_atlas'], [])
+            atlas = self.check_libs2(d, atlas_libs, [])
+            if atlas is not None:
+                lib_dirs2 = [d] + self.combine_paths(d, ['atlas*', 'ATLAS*'])
+                lapack = self.check_libs2(lib_dirs2, lapack_libs, [])
+                if lapack is not None:
+                    break
+            if atlas:
+                atlas_1 = atlas
+        log.info(self.__class__)
+        if atlas is None:
+            atlas = atlas_1
+        if atlas is None:
+            return
+        include_dirs = self.get_include_dirs()
+        h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
+        h = h[0]
+        if h:
+            h = os.path.dirname(h)
+            dict_append(info, include_dirs=[h])
+        info['language'] = 'c'
+        if lapack is not None:
+            dict_append(info, **lapack)
+            dict_append(info, **atlas)
+        elif 'lapack_atlas' in atlas['libraries']:
+            dict_append(info, **atlas)
+            dict_append(info,
+                        define_macros=[('ATLAS_WITH_LAPACK_ATLAS', None)])
+            self.set_info(**info)
+            return
+        else:
+            dict_append(info, **atlas)
+            dict_append(info, define_macros=[('ATLAS_WITHOUT_LAPACK', None)])
+            message = textwrap.dedent("""
+                *********************************************************************
+                    Could not find lapack library within the ATLAS installation.
+                *********************************************************************
+                """)
+            warnings.warn(message, stacklevel=2)
+            self.set_info(**info)
+            return
+
+        # Check if lapack library is complete, only warn if it is not.
+        lapack_dir = lapack['library_dirs'][0]
+        lapack_name = lapack['libraries'][0]
+        lapack_lib = None
+        lib_prefixes = ['lib']
+        if sys.platform == 'win32':
+            lib_prefixes.append('')
+        for e in self.library_extensions():
+            for prefix in lib_prefixes:
+                fn = os.path.join(lapack_dir, prefix + lapack_name + e)
+                if os.path.exists(fn):
+                    lapack_lib = fn
+                    break
+            if lapack_lib:
+                break
+        if lapack_lib is not None:
+            sz = os.stat(lapack_lib)[6]
+            if sz <= 4000 * 1024:
+                message = textwrap.dedent("""
+                    *********************************************************************
+                        Lapack library (from ATLAS) is probably incomplete:
+                          size of %s is %sk (expected >4000k)
+
+                        Follow the instructions in the KNOWN PROBLEMS section of the file
+                        numpy/INSTALL.txt.
+                    *********************************************************************
+                    """) % (lapack_lib, sz / 1024)
+                warnings.warn(message, stacklevel=2)
+            else:
+                info['language'] = 'f77'
+
+        atlas_version, atlas_extra_info = get_atlas_version(**atlas)
+        dict_append(info, **atlas_extra_info)
+
+        self.set_info(**info)
+
+
+class atlas_blas_info(atlas_info):
+    _lib_names = ['f77blas', 'cblas']
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        info = {}
+        opt = self.get_option_single('atlas_libs', 'libraries')
+        atlas_libs = self.get_libs(opt, self._lib_names + self._lib_atlas)
+        atlas = self.check_libs2(lib_dirs, atlas_libs, [])
+        if atlas is None:
+            return
+        include_dirs = self.get_include_dirs()
+        h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
+        h = h[0]
+        if h:
+            h = os.path.dirname(h)
+            dict_append(info, include_dirs=[h])
+        info['language'] = 'c'
+        info['define_macros'] = [('HAVE_CBLAS', None)]
+
+        atlas_version, atlas_extra_info = get_atlas_version(**atlas)
+        dict_append(atlas, **atlas_extra_info)
+
+        dict_append(info, **atlas)
+
+        self.set_info(**info)
+        return
+
+
+class atlas_threads_info(atlas_info):
+    dir_env_var = ['PTATLAS', 'ATLAS']
+    _lib_names = ['ptf77blas', 'ptcblas']
+
+
+class atlas_blas_threads_info(atlas_blas_info):
+    dir_env_var = ['PTATLAS', 'ATLAS']
+    _lib_names = ['ptf77blas', 'ptcblas']
+
+
+class lapack_atlas_info(atlas_info):
+    _lib_names = ['lapack_atlas'] + atlas_info._lib_names
+
+
+class lapack_atlas_threads_info(atlas_threads_info):
+    _lib_names = ['lapack_atlas'] + atlas_threads_info._lib_names
+
+
+class atlas_3_10_info(atlas_info):
+    _lib_names = ['satlas']
+    _lib_atlas = _lib_names
+    _lib_lapack = _lib_names
+
+
+class atlas_3_10_blas_info(atlas_3_10_info):
+    _lib_names = ['satlas']
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        info = {}
+        opt = self.get_option_single('atlas_lib', 'libraries')
+        atlas_libs = self.get_libs(opt, self._lib_names)
+        atlas = self.check_libs2(lib_dirs, atlas_libs, [])
+        if atlas is None:
+            return
+        include_dirs = self.get_include_dirs()
+        h = (self.combine_paths(lib_dirs + include_dirs, 'cblas.h') or [None])
+        h = h[0]
+        if h:
+            h = os.path.dirname(h)
+            dict_append(info, include_dirs=[h])
+        info['language'] = 'c'
+        info['define_macros'] = [('HAVE_CBLAS', None)]
+
+        atlas_version, atlas_extra_info = get_atlas_version(**atlas)
+        dict_append(atlas, **atlas_extra_info)
+
+        dict_append(info, **atlas)
+
+        self.set_info(**info)
+        return
+
+
+class atlas_3_10_threads_info(atlas_3_10_info):
+    dir_env_var = ['PTATLAS', 'ATLAS']
+    _lib_names = ['tatlas']
+    _lib_atlas = _lib_names
+    _lib_lapack = _lib_names
+
+
+class atlas_3_10_blas_threads_info(atlas_3_10_blas_info):
+    dir_env_var = ['PTATLAS', 'ATLAS']
+    _lib_names = ['tatlas']
+
+
+class lapack_atlas_3_10_info(atlas_3_10_info):
+    pass
+
+
+class lapack_atlas_3_10_threads_info(atlas_3_10_threads_info):
+    pass
+
+
+class lapack_info(system_info):
+    section = 'lapack'
+    dir_env_var = 'LAPACK'
+    _lib_names = ['lapack']
+    notfounderror = LapackNotFoundError
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+
+        opt = self.get_option_single('lapack_libs', 'libraries')
+        lapack_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs(lib_dirs, lapack_libs, [])
+        if info is None:
+            return
+        info['language'] = 'f77'
+        self.set_info(**info)
+
+
+class lapack_src_info(system_info):
+    section = 'lapack_src'
+    dir_env_var = 'LAPACK_SRC'
+    notfounderror = LapackSrcNotFoundError
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend([d] + self.combine_paths(d, ['LAPACK*/SRC', 'SRC']))
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        src_dirs = self.get_src_dirs()
+        src_dir = ''
+        for d in src_dirs:
+            if os.path.isfile(os.path.join(d, 'dgesv.f')):
+                src_dir = d
+                break
+        if not src_dir:
+            #XXX: Get sources from netlib. May be ask first.
+            return
+        # The following is extracted from LAPACK-3.0/SRC/Makefile.
+        # Added missing names from lapack-lite-3.1.1/SRC/Makefile
+        # while keeping removed names for Lapack-3.0 compatibility.
+        allaux = '''
+        ilaenv ieeeck lsame lsamen xerbla
+        iparmq
+        '''  # *.f
+        laux = '''
+        bdsdc bdsqr disna labad lacpy ladiv lae2 laebz laed0 laed1
+        laed2 laed3 laed4 laed5 laed6 laed7 laed8 laed9 laeda laev2
+        lagtf lagts lamch lamrg lanst lapy2 lapy3 larnv larrb larre
+        larrf lartg laruv las2 lascl lasd0 lasd1 lasd2 lasd3 lasd4
+        lasd5 lasd6 lasd7 lasd8 lasd9 lasda lasdq lasdt laset lasq1
+        lasq2 lasq3 lasq4 lasq5 lasq6 lasr lasrt lassq lasv2 pttrf
+        stebz stedc steqr sterf
+
+        larra larrc larrd larr larrk larrj larrr laneg laisnan isnan
+        lazq3 lazq4
+        '''  # [s|d]*.f
+        lasrc = '''
+        gbbrd gbcon gbequ gbrfs gbsv gbsvx gbtf2 gbtrf gbtrs gebak
+        gebal gebd2 gebrd gecon geequ gees geesx geev geevx gegs gegv
+        gehd2 gehrd gelq2 gelqf gels gelsd gelss gelsx gelsy geql2
+        geqlf geqp3 geqpf geqr2 geqrf gerfs gerq2 gerqf gesc2 gesdd
+        gesv gesvd gesvx getc2 getf2 getrf getri getrs ggbak ggbal
+        gges ggesx ggev ggevx ggglm gghrd gglse ggqrf ggrqf ggsvd
+        ggsvp gtcon gtrfs gtsv gtsvx gttrf gttrs gtts2 hgeqz hsein
+        hseqr labrd lacon laein lags2 lagtm lahqr lahrd laic1 lals0
+        lalsa lalsd langb lange langt lanhs lansb lansp lansy lantb
+        lantp lantr lapll lapmt laqgb laqge laqp2 laqps laqsb laqsp
+        laqsy lar1v lar2v larf larfb larfg larft larfx largv larrv
+        lartv larz larzb larzt laswp lasyf latbs latdf latps latrd
+        latrs latrz latzm lauu2 lauum pbcon pbequ pbrfs pbstf pbsv
+        pbsvx pbtf2 pbtrf pbtrs pocon poequ porfs posv posvx potf2
+        potrf potri potrs ppcon ppequ pprfs ppsv ppsvx pptrf pptri
+        pptrs ptcon pteqr ptrfs ptsv ptsvx pttrs ptts2 spcon sprfs
+        spsv spsvx sptrf sptri sptrs stegr stein sycon syrfs sysv
+        sysvx sytf2 sytrf sytri sytrs tbcon tbrfs tbtrs tgevc tgex2
+        tgexc tgsen tgsja tgsna tgsy2 tgsyl tpcon tprfs tptri tptrs
+        trcon trevc trexc trrfs trsen trsna trsyl trti2 trtri trtrs
+        tzrqf tzrzf
+
+        lacn2 lahr2 stemr laqr0 laqr1 laqr2 laqr3 laqr4 laqr5
+        '''  # [s|c|d|z]*.f
+        sd_lasrc = '''
+        laexc lag2 lagv2 laln2 lanv2 laqtr lasy2 opgtr opmtr org2l
+        org2r orgbr orghr orgl2 orglq orgql orgqr orgr2 orgrq orgtr
+        orm2l orm2r ormbr ormhr orml2 ormlq ormql ormqr ormr2 ormr3
+        ormrq ormrz ormtr rscl sbev sbevd sbevx sbgst sbgv sbgvd sbgvx
+        sbtrd spev spevd spevx spgst spgv spgvd spgvx sptrd stev stevd
+        stevr stevx syev syevd syevr syevx sygs2 sygst sygv sygvd
+        sygvx sytd2 sytrd
+        '''  # [s|d]*.f
+        cz_lasrc = '''
+        bdsqr hbev hbevd hbevx hbgst hbgv hbgvd hbgvx hbtrd hecon heev
+        heevd heevr heevx hegs2 hegst hegv hegvd hegvx herfs hesv
+        hesvx hetd2 hetf2 hetrd hetrf hetri hetrs hpcon hpev hpevd
+        hpevx hpgst hpgv hpgvd hpgvx hprfs hpsv hpsvx hptrd hptrf
+        hptri hptrs lacgv lacp2 lacpy lacrm lacrt ladiv laed0 laed7
+        laed8 laesy laev2 lahef lanhb lanhe lanhp lanht laqhb laqhe
+        laqhp larcm larnv lartg lascl laset lasr lassq pttrf rot spmv
+        spr stedc steqr symv syr ung2l ung2r ungbr unghr ungl2 unglq
+        ungql ungqr ungr2 ungrq ungtr unm2l unm2r unmbr unmhr unml2
+        unmlq unmql unmqr unmr2 unmr3 unmrq unmrz unmtr upgtr upmtr
+        '''  # [c|z]*.f
+        #######
+        sclaux = laux + ' econd '                  # s*.f
+        dzlaux = laux + ' secnd '                  # d*.f
+        slasrc = lasrc + sd_lasrc                  # s*.f
+        dlasrc = lasrc + sd_lasrc                  # d*.f
+        clasrc = lasrc + cz_lasrc + ' srot srscl '  # c*.f
+        zlasrc = lasrc + cz_lasrc + ' drot drscl '  # z*.f
+        oclasrc = ' icmax1 scsum1 '                # *.f
+        ozlasrc = ' izmax1 dzsum1 '                # *.f
+        sources = ['s%s.f' % f for f in (sclaux + slasrc).split()] \
+                  + ['d%s.f' % f for f in (dzlaux + dlasrc).split()] \
+                  + ['c%s.f' % f for f in (clasrc).split()] \
+                  + ['z%s.f' % f for f in (zlasrc).split()] \
+                  + ['%s.f' % f for f in (allaux + oclasrc + ozlasrc).split()]
+        sources = [os.path.join(src_dir, f) for f in sources]
+        # Lapack 3.1:
+        src_dir2 = os.path.join(src_dir, '..', 'INSTALL')
+        sources += [os.path.join(src_dir2, p + 'lamch.f') for p in 'sdcz']
+        # Lapack 3.2.1:
+        sources += [os.path.join(src_dir, p + 'larfp.f') for p in 'sdcz']
+        sources += [os.path.join(src_dir, 'ila' + p + 'lr.f') for p in 'sdcz']
+        sources += [os.path.join(src_dir, 'ila' + p + 'lc.f') for p in 'sdcz']
+        # Should we check here actual existence of source files?
+        # Yes, the file listing is different between 3.0 and 3.1
+        # versions.
+        sources = [f for f in sources if os.path.isfile(f)]
+        info = {'sources': sources, 'language': 'f77'}
+        self.set_info(**info)
+
+atlas_version_c_text = r'''
+/* This file is generated from numpy/distutils/system_info.py */
+void ATL_buildinfo(void);
+int main(void) {
+  ATL_buildinfo();
+  return 0;
+}
+'''
+
+_cached_atlas_version = {}
+
+
+def get_atlas_version(**config):
+    libraries = config.get('libraries', [])
+    library_dirs = config.get('library_dirs', [])
+    key = (tuple(libraries), tuple(library_dirs))
+    if key in _cached_atlas_version:
+        return _cached_atlas_version[key]
+    c = cmd_config(Distribution())
+    atlas_version = None
+    info = {}
+    try:
+        s, o = c.get_output(atlas_version_c_text,
+                            libraries=libraries, library_dirs=library_dirs,
+                           )
+        if s and re.search(r'undefined reference to `_gfortran', o, re.M):
+            s, o = c.get_output(atlas_version_c_text,
+                                libraries=libraries + ['gfortran'],
+                                library_dirs=library_dirs,
+                               )
+            if not s:
+                warnings.warn(textwrap.dedent("""
+                    *****************************************************
+                    Linkage with ATLAS requires gfortran. Use
+
+                      python setup.py config_fc --fcompiler=gnu95 ...
+
+                    when building extension libraries that use ATLAS.
+                    Make sure that -lgfortran is used for C++ extensions.
+                    *****************************************************
+                    """), stacklevel=2)
+                dict_append(info, language='f90',
+                            define_macros=[('ATLAS_REQUIRES_GFORTRAN', None)])
+    except Exception:  # failed to get version from file -- maybe on Windows
+        # look at directory name
+        for o in library_dirs:
+            m = re.search(r'ATLAS_(?P<version>\d+[.]\d+[.]\d+)_', o)
+            if m:
+                atlas_version = m.group('version')
+            if atlas_version is not None:
+                break
+
+        # final choice --- look at ATLAS_VERSION environment
+        #   variable
+        if atlas_version is None:
+            atlas_version = os.environ.get('ATLAS_VERSION', None)
+        if atlas_version:
+            dict_append(info, define_macros=[(
+                'ATLAS_INFO', _c_string_literal(atlas_version))
+            ])
+        else:
+            dict_append(info, define_macros=[('NO_ATLAS_INFO', -1)])
+        return atlas_version or '?.?.?', info
+
+    if not s:
+        m = re.search(r'ATLAS version (?P<version>\d+[.]\d+[.]\d+)', o)
+        if m:
+            atlas_version = m.group('version')
+    if atlas_version is None:
+        if re.search(r'undefined symbol: ATL_buildinfo', o, re.M):
+            atlas_version = '3.2.1_pre3.3.6'
+        else:
+            log.info('Status: %d', s)
+            log.info('Output: %s', o)
+
+    elif atlas_version == '3.2.1_pre3.3.6':
+        dict_append(info, define_macros=[('NO_ATLAS_INFO', -2)])
+    else:
+        dict_append(info, define_macros=[(
+            'ATLAS_INFO', _c_string_literal(atlas_version))
+        ])
+    result = _cached_atlas_version[key] = atlas_version, info
+    return result
+
+
+class lapack_opt_info(system_info):
+    notfounderror = LapackNotFoundError
+    # List of all known LAPACK libraries, in the default order
+    lapack_order = ['mkl', 'openblas', 'flame', 'atlas', 'lapack']
+    order_env_var_name = 'NPY_LAPACK_ORDER'
+
+    def _calc_info_mkl(self):
+        info = get_info('lapack_mkl')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_openblas(self):
+        info = get_info('openblas_lapack')
+        if info:
+            self.set_info(**info)
+            return True
+        info = get_info('openblas_clapack')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_flame(self):
+        info = get_info('flame')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_atlas(self):
+        info = get_info('atlas_3_10_threads')
+        if not info:
+            info = get_info('atlas_3_10')
+        if not info:
+            info = get_info('atlas_threads')
+        if not info:
+            info = get_info('atlas')
+        if info:
+            # Figure out if ATLAS has lapack...
+            # If not we need the lapack library, but not BLAS!
+            l = info.get('define_macros', [])
+            if ('ATLAS_WITH_LAPACK_ATLAS', None) in l \
+               or ('ATLAS_WITHOUT_LAPACK', None) in l:
+                # Get LAPACK (with possible warnings)
+                # If not found we don't accept anything
+                # since we can't use ATLAS with LAPACK!
+                lapack_info = self._get_info_lapack()
+                if not lapack_info:
+                    return False
+                dict_append(info, **lapack_info)
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_accelerate(self):
+        info = get_info('accelerate')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _get_info_blas(self):
+        # Default to get the optimized BLAS implementation
+        info = get_info('blas_opt')
+        if not info:
+            warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
+            info_src = get_info('blas_src')
+            if not info_src:
+                warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
+                return {}
+            dict_append(info, libraries=[('fblas_src', info_src)])
+        return info
+
+    def _get_info_lapack(self):
+        info = get_info('lapack')
+        if not info:
+            warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=3)
+            info_src = get_info('lapack_src')
+            if not info_src:
+                warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=3)
+                return {}
+            dict_append(info, libraries=[('flapack_src', info_src)])
+        return info
+
+    def _calc_info_lapack(self):
+        info = self._get_info_lapack()
+        if info:
+            info_blas = self._get_info_blas()
+            dict_append(info, **info_blas)
+            dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info(self, name):
+        return getattr(self, '_calc_info_{}'.format(name))()
+
+    def calc_info(self):
+        lapack_order, unknown_order = _parse_env_order(self.lapack_order, self.order_env_var_name)
+        if len(unknown_order) > 0:
+            raise ValueError("lapack_opt_info user defined "
+                             "LAPACK order has unacceptable "
+                             "values: {}".format(unknown_order))
+
+        for lapack in lapack_order:
+            if self._calc_info(lapack):
+                return
+
+        if 'lapack' not in lapack_order:
+            # Since the user may request *not* to use any library, we still need
+            # to raise warnings to signal missing packages!
+            warnings.warn(LapackNotFoundError.__doc__ or '', stacklevel=2)
+            warnings.warn(LapackSrcNotFoundError.__doc__ or '', stacklevel=2)
+
+
+class _ilp64_opt_info_mixin:
+    symbol_suffix = None
+    symbol_prefix = None
+
+    def _check_info(self, info):
+        macros = dict(info.get('define_macros', []))
+        prefix = macros.get('BLAS_SYMBOL_PREFIX', '')
+        suffix = macros.get('BLAS_SYMBOL_SUFFIX', '')
+
+        if self.symbol_prefix not in (None, prefix):
+            return False
+
+        if self.symbol_suffix not in (None, suffix):
+            return False
+
+        return bool(info)
+
+
+class lapack_ilp64_opt_info(lapack_opt_info, _ilp64_opt_info_mixin):
+    notfounderror = LapackILP64NotFoundError
+    lapack_order = ['openblas64_', 'openblas_ilp64']
+    order_env_var_name = 'NPY_LAPACK_ILP64_ORDER'
+
+    def _calc_info(self, name):
+        info = get_info(name + '_lapack')
+        if self._check_info(info):
+            self.set_info(**info)
+            return True
+        return False
+
+
+class lapack_ilp64_plain_opt_info(lapack_ilp64_opt_info):
+    # Same as lapack_ilp64_opt_info, but fix symbol names
+    symbol_prefix = ''
+    symbol_suffix = ''
+
+
+class lapack64__opt_info(lapack_ilp64_opt_info):
+    symbol_prefix = ''
+    symbol_suffix = '64_'
+
+
+class blas_opt_info(system_info):
+    notfounderror = BlasNotFoundError
+    # List of all known BLAS libraries, in the default order
+    blas_order = ['mkl', 'blis', 'openblas', 'atlas', 'blas']
+    order_env_var_name = 'NPY_BLAS_ORDER'
+
+    def _calc_info_mkl(self):
+        info = get_info('blas_mkl')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_blis(self):
+        info = get_info('blis')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_openblas(self):
+        info = get_info('openblas')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_atlas(self):
+        info = get_info('atlas_3_10_blas_threads')
+        if not info:
+            info = get_info('atlas_3_10_blas')
+        if not info:
+            info = get_info('atlas_blas_threads')
+        if not info:
+            info = get_info('atlas_blas')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_accelerate(self):
+        info = get_info('accelerate')
+        if info:
+            self.set_info(**info)
+            return True
+        return False
+
+    def _calc_info_blas(self):
+        # Warn about a non-optimized BLAS library
+        warnings.warn(BlasOptNotFoundError.__doc__ or '', stacklevel=3)
+        info = {}
+        dict_append(info, define_macros=[('NO_ATLAS_INFO', 1)])
+
+        blas = get_info('blas')
+        if blas:
+            dict_append(info, **blas)
+        else:
+            # Not even BLAS was found!
+            warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=3)
+
+            blas_src = get_info('blas_src')
+            if not blas_src:
+                warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=3)
+                return False
+            dict_append(info, libraries=[('fblas_src', blas_src)])
+
+        self.set_info(**info)
+        return True
+
+    def _calc_info(self, name):
+        return getattr(self, '_calc_info_{}'.format(name))()
+
+    def calc_info(self):
+        blas_order, unknown_order = _parse_env_order(self.blas_order, self.order_env_var_name)
+        if len(unknown_order) > 0:
+            raise ValueError("blas_opt_info user defined BLAS order has unacceptable values: {}".format(unknown_order))
+
+        for blas in blas_order:
+            if self._calc_info(blas):
+                return
+
+        if 'blas' not in blas_order:
+            # Since the user may request *not* to use any library, we still need
+            # to raise warnings to signal missing packages!
+            warnings.warn(BlasNotFoundError.__doc__ or '', stacklevel=2)
+            warnings.warn(BlasSrcNotFoundError.__doc__ or '', stacklevel=2)
+
+
+class blas_ilp64_opt_info(blas_opt_info, _ilp64_opt_info_mixin):
+    notfounderror = BlasILP64NotFoundError
+    blas_order = ['openblas64_', 'openblas_ilp64']
+    order_env_var_name = 'NPY_BLAS_ILP64_ORDER'
+
+    def _calc_info(self, name):
+        info = get_info(name)
+        if self._check_info(info):
+            self.set_info(**info)
+            return True
+        return False
+
+
+class blas_ilp64_plain_opt_info(blas_ilp64_opt_info):
+    symbol_prefix = ''
+    symbol_suffix = ''
+
+
+class blas64__opt_info(blas_ilp64_opt_info):
+    symbol_prefix = ''
+    symbol_suffix = '64_'
+
+
+class cblas_info(system_info):
+    section = 'cblas'
+    dir_env_var = 'CBLAS'
+    # No default as it's used only in blas_info
+    _lib_names = []
+    notfounderror = BlasNotFoundError
+
+
+class blas_info(system_info):
+    section = 'blas'
+    dir_env_var = 'BLAS'
+    _lib_names = ['blas']
+    notfounderror = BlasNotFoundError
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        opt = self.get_option_single('blas_libs', 'libraries')
+        blas_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs(lib_dirs, blas_libs, [])
+        if info is None:
+            return
+        else:
+            info['include_dirs'] = self.get_include_dirs()
+        if platform.system() == 'Windows':
+            # The check for windows is needed because get_cblas_libs uses the
+            # same compiler that was used to compile Python and msvc is
+            # often not installed when mingw is being used. This rough
+            # treatment is not desirable, but windows is tricky.
+            info['language'] = 'f77'  # XXX: is it generally true?
+            # If cblas is given as an option, use those
+            cblas_info_obj = cblas_info()
+            cblas_opt = cblas_info_obj.get_option_single('cblas_libs', 'libraries')
+            cblas_libs = cblas_info_obj.get_libs(cblas_opt, None)
+            if cblas_libs:
+                info['libraries'] = cblas_libs + blas_libs
+                info['define_macros'] = [('HAVE_CBLAS', None)]
+        else:
+            lib = self.get_cblas_libs(info)
+            if lib is not None:
+                info['language'] = 'c'
+                info['libraries'] = lib
+                info['define_macros'] = [('HAVE_CBLAS', None)]
+        self.set_info(**info)
+
+    def get_cblas_libs(self, info):
+        """ Check whether we can link with CBLAS interface
+
+        This method will search through several combinations of libraries
+        to check whether CBLAS is present:
+
+        1. Libraries in ``info['libraries']``, as is
+        2. As 1. but also explicitly adding ``'cblas'`` as a library
+        3. As 1. but also explicitly adding ``'blas'`` as a library
+        4. Check only library ``'cblas'``
+        5. Check only library ``'blas'``
+
+        Parameters
+        ----------
+        info : dict
+           system information dictionary for compilation and linking
+
+        Returns
+        -------
+        libraries : list of str or None
+            a list of libraries that enables the use of CBLAS interface.
+            Returns None if not found or a compilation error occurs.
+
+            Since 1.17 returns a list.
+        """
+        # primitive cblas check by looking for the header and trying to link
+        # cblas or blas
+        c = customized_ccompiler()
+        tmpdir = tempfile.mkdtemp()
+        s = textwrap.dedent("""\
+            #include <cblas.h>
+            int main(int argc, const char *argv[])
+            {
+                double a[4] = {1,2,3,4};
+                double b[4] = {5,6,7,8};
+                return cblas_ddot(4, a, 1, b, 1) > 10;
+            }""")
+        src = os.path.join(tmpdir, 'source.c')
+        try:
+            with open(src, 'wt') as f:
+                f.write(s)
+
+            try:
+                # check we can compile (find headers)
+                obj = c.compile([src], output_dir=tmpdir,
+                                include_dirs=self.get_include_dirs())
+            except (distutils.ccompiler.CompileError, distutils.ccompiler.LinkError):
+                return None
+
+            # check we can link (find library)
+            # some systems have separate cblas and blas libs.
+            for libs in [info['libraries'], ['cblas'] + info['libraries'],
+                         ['blas'] + info['libraries'], ['cblas'], ['blas']]:
+                try:
+                    c.link_executable(obj, os.path.join(tmpdir, "a.out"),
+                                      libraries=libs,
+                                      library_dirs=info['library_dirs'],
+                                      extra_postargs=info.get('extra_link_args', []))
+                    return libs
+                except distutils.ccompiler.LinkError:
+                    pass
+        finally:
+            shutil.rmtree(tmpdir)
+        return None
+
+
+class openblas_info(blas_info):
+    section = 'openblas'
+    dir_env_var = 'OPENBLAS'
+    _lib_names = ['openblas']
+    _require_symbols = []
+    notfounderror = BlasNotFoundError
+
+    @property
+    def symbol_prefix(self):
+        try:
+            return self.cp.get(self.section, 'symbol_prefix')
+        except NoOptionError:
+            return ''
+
+    @property
+    def symbol_suffix(self):
+        try:
+            return self.cp.get(self.section, 'symbol_suffix')
+        except NoOptionError:
+            return ''
+
+    def _calc_info(self):
+        c = customized_ccompiler()
+
+        lib_dirs = self.get_lib_dirs()
+
+        # Prefer to use libraries over openblas_libs
+        opt = self.get_option_single('openblas_libs', 'libraries')
+        openblas_libs = self.get_libs(opt, self._lib_names)
+
+        info = self.check_libs(lib_dirs, openblas_libs, [])
+
+        if c.compiler_type == "msvc" and info is None:
+            from numpy.distutils.fcompiler import new_fcompiler
+            f = new_fcompiler(c_compiler=c)
+            if f and f.compiler_type == 'gnu95':
+                # Try gfortran-compatible library files
+                info = self.check_msvc_gfortran_libs(lib_dirs, openblas_libs)
+                # Skip lapack check, we'd need build_ext to do it
+                skip_symbol_check = True
+        elif info:
+            skip_symbol_check = False
+            info['language'] = 'c'
+
+        if info is None:
+            return None
+
+        # Add extra info for OpenBLAS
+        extra_info = self.calc_extra_info()
+        dict_append(info, **extra_info)
+
+        if not (skip_symbol_check or self.check_symbols(info)):
+            return None
+
+        info['define_macros'] = [('HAVE_CBLAS', None)]
+        if self.symbol_prefix:
+            info['define_macros'] += [('BLAS_SYMBOL_PREFIX', self.symbol_prefix)]
+        if self.symbol_suffix:
+            info['define_macros'] += [('BLAS_SYMBOL_SUFFIX', self.symbol_suffix)]
+
+        return info
+
+    def calc_info(self):
+        info = self._calc_info()
+        if info is not None:
+            self.set_info(**info)
+
+    def check_msvc_gfortran_libs(self, library_dirs, libraries):
+        # First, find the full path to each library directory
+        library_paths = []
+        for library in libraries:
+            for library_dir in library_dirs:
+                # MinGW static ext will be .a
+                fullpath = os.path.join(library_dir, library + '.a')
+                if os.path.isfile(fullpath):
+                    library_paths.append(fullpath)
+                    break
+            else:
+                return None
+
+        # Generate numpy.distutils virtual static library file
+        basename = self.__class__.__name__
+        tmpdir = os.path.join(os.getcwd(), 'build', basename)
+        if not os.path.isdir(tmpdir):
+            os.makedirs(tmpdir)
+
+        info = {'library_dirs': [tmpdir],
+                'libraries': [basename],
+                'language': 'f77'}
+
+        fake_lib_file = os.path.join(tmpdir, basename + '.fobjects')
+        fake_clib_file = os.path.join(tmpdir, basename + '.cobjects')
+        with open(fake_lib_file, 'w') as f:
+            f.write("\n".join(library_paths))
+        with open(fake_clib_file, 'w') as f:
+            pass
+
+        return info
+
+    def check_symbols(self, info):
+        res = False
+        c = customized_ccompiler()
+
+        tmpdir = tempfile.mkdtemp()
+
+        prototypes = "\n".join("void %s%s%s();" % (self.symbol_prefix,
+                                                   symbol_name,
+                                                   self.symbol_suffix)
+                               for symbol_name in self._require_symbols)
+        calls = "\n".join("%s%s%s();" % (self.symbol_prefix,
+                                         symbol_name,
+                                         self.symbol_suffix)
+                          for symbol_name in self._require_symbols)
+        s = textwrap.dedent("""\
+            %(prototypes)s
+            int main(int argc, const char *argv[])
+            {
+                %(calls)s
+                return 0;
+            }""") % dict(prototypes=prototypes, calls=calls)
+        src = os.path.join(tmpdir, 'source.c')
+        out = os.path.join(tmpdir, 'a.out')
+        # Add the additional "extra" arguments
+        try:
+            extra_args = info['extra_link_args']
+        except Exception:
+            extra_args = []
+        try:
+            with open(src, 'wt') as f:
+                f.write(s)
+            obj = c.compile([src], output_dir=tmpdir)
+            try:
+                c.link_executable(obj, out, libraries=info['libraries'],
+                                  library_dirs=info['library_dirs'],
+                                  extra_postargs=extra_args)
+                res = True
+            except distutils.ccompiler.LinkError:
+                res = False
+        finally:
+            shutil.rmtree(tmpdir)
+        return res
+
+class openblas_lapack_info(openblas_info):
+    section = 'openblas'
+    dir_env_var = 'OPENBLAS'
+    _lib_names = ['openblas']
+    _require_symbols = ['zungqr_']
+    notfounderror = BlasNotFoundError
+
+class openblas_clapack_info(openblas_lapack_info):
+    _lib_names = ['openblas', 'lapack']
+
+class openblas_ilp64_info(openblas_info):
+    section = 'openblas_ilp64'
+    dir_env_var = 'OPENBLAS_ILP64'
+    _lib_names = ['openblas64']
+    _require_symbols = ['dgemm_', 'cblas_dgemm']
+    notfounderror = BlasILP64NotFoundError
+
+    def _calc_info(self):
+        info = super()._calc_info()
+        if info is not None:
+            info['define_macros'] += [('HAVE_BLAS_ILP64', None)]
+        return info
+
+class openblas_ilp64_lapack_info(openblas_ilp64_info):
+    _require_symbols = ['dgemm_', 'cblas_dgemm', 'zungqr_', 'LAPACKE_zungqr']
+
+    def _calc_info(self):
+        info = super()._calc_info()
+        if info:
+            info['define_macros'] += [('HAVE_LAPACKE', None)]
+        return info
+
+class openblas64__info(openblas_ilp64_info):
+    # ILP64 Openblas, with default symbol suffix
+    section = 'openblas64_'
+    dir_env_var = 'OPENBLAS64_'
+    _lib_names = ['openblas64_']
+    symbol_suffix = '64_'
+    symbol_prefix = ''
+
+class openblas64__lapack_info(openblas_ilp64_lapack_info, openblas64__info):
+    pass
+
+class blis_info(blas_info):
+    section = 'blis'
+    dir_env_var = 'BLIS'
+    _lib_names = ['blis']
+    notfounderror = BlasNotFoundError
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        opt = self.get_option_single('blis_libs', 'libraries')
+        blis_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs2(lib_dirs, blis_libs, [])
+        if info is None:
+            return
+
+        # Add include dirs
+        incl_dirs = self.get_include_dirs()
+        dict_append(info,
+                    language='c',
+                    define_macros=[('HAVE_CBLAS', None)],
+                    include_dirs=incl_dirs)
+        self.set_info(**info)
+
+
+class flame_info(system_info):
+    """ Usage of libflame for LAPACK operations
+
+    This requires libflame to be compiled with lapack wrappers:
+
+    ./configure --enable-lapack2flame ...
+
+    Be aware that libflame 5.1.0 has some missing names in the shared library, so
+    if you have problems, try the static flame library.
+    """
+    section = 'flame'
+    _lib_names = ['flame']
+    notfounderror = FlameNotFoundError
+
+    def check_embedded_lapack(self, info):
+        """ libflame does not necessarily have a wrapper for fortran LAPACK, we need to check """
+        c = customized_ccompiler()
+
+        tmpdir = tempfile.mkdtemp()
+        s = textwrap.dedent("""\
+            void zungqr_();
+            int main(int argc, const char *argv[])
+            {
+                zungqr_();
+                return 0;
+            }""")
+        src = os.path.join(tmpdir, 'source.c')
+        out = os.path.join(tmpdir, 'a.out')
+        # Add the additional "extra" arguments
+        extra_args = info.get('extra_link_args', [])
+        try:
+            with open(src, 'wt') as f:
+                f.write(s)
+            obj = c.compile([src], output_dir=tmpdir)
+            try:
+                c.link_executable(obj, out, libraries=info['libraries'],
+                                  library_dirs=info['library_dirs'],
+                                  extra_postargs=extra_args)
+                return True
+            except distutils.ccompiler.LinkError:
+                return False
+        finally:
+            shutil.rmtree(tmpdir)
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+        flame_libs = self.get_libs('libraries', self._lib_names)
+
+        info = self.check_libs2(lib_dirs, flame_libs, [])
+        if info is None:
+            return
+
+        if self.check_embedded_lapack(info):
+            # check if the user has supplied all information required
+            self.set_info(**info)
+        else:
+            # Try and get the BLAS lib to see if we can get it to work
+            blas_info = get_info('blas_opt')
+            if not blas_info:
+                # since we already failed once, this ain't going to work either
+                return
+
+            # Now we need to merge the two dictionaries
+            for key in blas_info:
+                if isinstance(blas_info[key], list):
+                    info[key] = info.get(key, []) + blas_info[key]
+                elif isinstance(blas_info[key], tuple):
+                    info[key] = info.get(key, ()) + blas_info[key]
+                else:
+                    info[key] = info.get(key, '') + blas_info[key]
+
+            # Now check again
+            if self.check_embedded_lapack(info):
+                self.set_info(**info)
+
+
+class accelerate_info(system_info):
+    section = 'accelerate'
+    _lib_names = ['accelerate', 'veclib']
+    notfounderror = BlasNotFoundError
+
+    def calc_info(self):
+        # Make possible to enable/disable from config file/env var
+        libraries = os.environ.get('ACCELERATE')
+        if libraries:
+            libraries = [libraries]
+        else:
+            libraries = self.get_libs('libraries', self._lib_names)
+        libraries = [lib.strip().lower() for lib in libraries]
+
+        if (sys.platform == 'darwin' and
+                not os.getenv('_PYTHON_HOST_PLATFORM', None)):
+            # Use the system BLAS from Accelerate or vecLib under OSX
+            args = []
+            link_args = []
+            if get_platform()[-4:] == 'i386' or 'intel' in get_platform() or \
+               'x86_64' in get_platform() or \
+               'i386' in platform.platform():
+                intel = 1
+            else:
+                intel = 0
+            if (os.path.exists('/System/Library/Frameworks'
+                              '/Accelerate.framework/') and
+                    'accelerate' in libraries):
+                if intel:
+                    args.extend(['-msse3'])
+                args.extend([
+                    '-I/System/Library/Frameworks/vecLib.framework/Headers'])
+                link_args.extend(['-Wl,-framework', '-Wl,Accelerate'])
+            elif (os.path.exists('/System/Library/Frameworks'
+                                 '/vecLib.framework/') and
+                      'veclib' in libraries):
+                if intel:
+                    args.extend(['-msse3'])
+                args.extend([
+                    '-I/System/Library/Frameworks/vecLib.framework/Headers'])
+                link_args.extend(['-Wl,-framework', '-Wl,vecLib'])
+
+            if args:
+                self.set_info(extra_compile_args=args,
+                              extra_link_args=link_args,
+                              define_macros=[('NO_ATLAS_INFO', 3),
+                                             ('HAVE_CBLAS', None)])
+
+        return
+
+class blas_src_info(system_info):
+    section = 'blas_src'
+    dir_env_var = 'BLAS_SRC'
+    notfounderror = BlasSrcNotFoundError
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend([d] + self.combine_paths(d, ['blas']))
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        src_dirs = self.get_src_dirs()
+        src_dir = ''
+        for d in src_dirs:
+            if os.path.isfile(os.path.join(d, 'daxpy.f')):
+                src_dir = d
+                break
+        if not src_dir:
+            #XXX: Get sources from netlib. May be ask first.
+            return
+        blas1 = '''
+        caxpy csscal dnrm2 dzasum saxpy srotg zdotc ccopy cswap drot
+        dznrm2 scasum srotm zdotu cdotc dasum drotg icamax scnrm2
+        srotmg zdrot cdotu daxpy drotm idamax scopy sscal zdscal crotg
+        dcabs1 drotmg isamax sdot sswap zrotg cscal dcopy dscal izamax
+        snrm2 zaxpy zscal csrot ddot dswap sasum srot zcopy zswap
+        scabs1
+        '''
+        blas2 = '''
+        cgbmv chpmv ctrsv dsymv dtrsv sspr2 strmv zhemv ztpmv cgemv
+        chpr dgbmv dsyr lsame ssymv strsv zher ztpsv cgerc chpr2 dgemv
+        dsyr2 sgbmv ssyr xerbla zher2 ztrmv cgeru ctbmv dger dtbmv
+        sgemv ssyr2 zgbmv zhpmv ztrsv chbmv ctbsv dsbmv dtbsv sger
+        stbmv zgemv zhpr chemv ctpmv dspmv dtpmv ssbmv stbsv zgerc
+        zhpr2 cher ctpsv dspr dtpsv sspmv stpmv zgeru ztbmv cher2
+        ctrmv dspr2 dtrmv sspr stpsv zhbmv ztbsv
+        '''
+        blas3 = '''
+        cgemm csymm ctrsm dsyrk sgemm strmm zhemm zsyr2k chemm csyr2k
+        dgemm dtrmm ssymm strsm zher2k zsyrk cher2k csyrk dsymm dtrsm
+        ssyr2k zherk ztrmm cherk ctrmm dsyr2k ssyrk zgemm zsymm ztrsm
+        '''
+        sources = [os.path.join(src_dir, f + '.f') \
+                   for f in (blas1 + blas2 + blas3).split()]
+        #XXX: should we check here actual existence of source files?
+        sources = [f for f in sources if os.path.isfile(f)]
+        info = {'sources': sources, 'language': 'f77'}
+        self.set_info(**info)
+
+
+class x11_info(system_info):
+    section = 'x11'
+    notfounderror = X11NotFoundError
+    _lib_names = ['X11']
+
+    def __init__(self):
+        system_info.__init__(self,
+                             default_lib_dirs=default_x11_lib_dirs,
+                             default_include_dirs=default_x11_include_dirs)
+
+    def calc_info(self):
+        if sys.platform  in ['win32']:
+            return
+        lib_dirs = self.get_lib_dirs()
+        include_dirs = self.get_include_dirs()
+        opt = self.get_option_single('x11_libs', 'libraries')
+        x11_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs(lib_dirs, x11_libs, [])
+        if info is None:
+            return
+        inc_dir = None
+        for d in include_dirs:
+            if self.combine_paths(d, 'X11/X.h'):
+                inc_dir = d
+                break
+        if inc_dir is not None:
+            dict_append(info, include_dirs=[inc_dir])
+        self.set_info(**info)
+
+
+class _numpy_info(system_info):
+    section = 'Numeric'
+    modulename = 'Numeric'
+    notfounderror = NumericNotFoundError
+
+    def __init__(self):
+        include_dirs = []
+        try:
+            module = __import__(self.modulename)
+            prefix = []
+            for name in module.__file__.split(os.sep):
+                if name == 'lib':
+                    break
+                prefix.append(name)
+
+            # Ask numpy for its own include path before attempting
+            # anything else
+            try:
+                include_dirs.append(getattr(module, 'get_include')())
+            except AttributeError:
+                pass
+
+            include_dirs.append(sysconfig.get_path('include'))
+        except ImportError:
+            pass
+        py_incl_dir = sysconfig.get_path('include')
+        include_dirs.append(py_incl_dir)
+        py_pincl_dir = sysconfig.get_path('platinclude')
+        if py_pincl_dir not in include_dirs:
+            include_dirs.append(py_pincl_dir)
+        for d in default_include_dirs:
+            d = os.path.join(d, os.path.basename(py_incl_dir))
+            if d not in include_dirs:
+                include_dirs.append(d)
+        system_info.__init__(self,
+                             default_lib_dirs=[],
+                             default_include_dirs=include_dirs)
+
+    def calc_info(self):
+        try:
+            module = __import__(self.modulename)
+        except ImportError:
+            return
+        info = {}
+        macros = []
+        for v in ['__version__', 'version']:
+            vrs = getattr(module, v, None)
+            if vrs is None:
+                continue
+            macros = [(self.modulename.upper() + '_VERSION',
+                      _c_string_literal(vrs)),
+                      (self.modulename.upper(), None)]
+            break
+        dict_append(info, define_macros=macros)
+        include_dirs = self.get_include_dirs()
+        inc_dir = None
+        for d in include_dirs:
+            if self.combine_paths(d,
+                                  os.path.join(self.modulename,
+                                               'arrayobject.h')):
+                inc_dir = d
+                break
+        if inc_dir is not None:
+            dict_append(info, include_dirs=[inc_dir])
+        if info:
+            self.set_info(**info)
+        return
+
+
+class numarray_info(_numpy_info):
+    section = 'numarray'
+    modulename = 'numarray'
+
+
+class Numeric_info(_numpy_info):
+    section = 'Numeric'
+    modulename = 'Numeric'
+
+
+class numpy_info(_numpy_info):
+    section = 'numpy'
+    modulename = 'numpy'
+
+
+class numerix_info(system_info):
+    section = 'numerix'
+
+    def calc_info(self):
+        which = None, None
+        if os.getenv("NUMERIX"):
+            which = os.getenv("NUMERIX"), "environment var"
+        # If all the above fail, default to numpy.
+        if which[0] is None:
+            which = "numpy", "defaulted"
+            try:
+                import numpy  # noqa: F401
+                which = "numpy", "defaulted"
+            except ImportError as e:
+                msg1 = str(e)
+                try:
+                    import Numeric  # noqa: F401
+                    which = "numeric", "defaulted"
+                except ImportError as e:
+                    msg2 = str(e)
+                    try:
+                        import numarray  # noqa: F401
+                        which = "numarray", "defaulted"
+                    except ImportError as e:
+                        msg3 = str(e)
+                        log.info(msg1)
+                        log.info(msg2)
+                        log.info(msg3)
+        which = which[0].strip().lower(), which[1]
+        if which[0] not in ["numeric", "numarray", "numpy"]:
+            raise ValueError("numerix selector must be either 'Numeric' "
+                             "or 'numarray' or 'numpy' but the value obtained"
+                             " from the %s was '%s'." % (which[1], which[0]))
+        os.environ['NUMERIX'] = which[0]
+        self.set_info(**get_info(which[0]))
+
+
+class f2py_info(system_info):
+    def calc_info(self):
+        try:
+            import numpy.f2py as f2py
+        except ImportError:
+            return
+        f2py_dir = os.path.join(os.path.dirname(f2py.__file__), 'src')
+        self.set_info(sources=[os.path.join(f2py_dir, 'fortranobject.c')],
+                      include_dirs=[f2py_dir])
+        return
+
+
+class boost_python_info(system_info):
+    section = 'boost_python'
+    dir_env_var = 'BOOST'
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend([d] + self.combine_paths(d, ['boost*']))
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        src_dirs = self.get_src_dirs()
+        src_dir = ''
+        for d in src_dirs:
+            if os.path.isfile(os.path.join(d, 'libs', 'python', 'src',
+                                           'module.cpp')):
+                src_dir = d
+                break
+        if not src_dir:
+            return
+        py_incl_dirs = [sysconfig.get_path('include')]
+        py_pincl_dir = sysconfig.get_path('platinclude')
+        if py_pincl_dir not in py_incl_dirs:
+            py_incl_dirs.append(py_pincl_dir)
+        srcs_dir = os.path.join(src_dir, 'libs', 'python', 'src')
+        bpl_srcs = glob(os.path.join(srcs_dir, '*.cpp'))
+        bpl_srcs += glob(os.path.join(srcs_dir, '*', '*.cpp'))
+        info = {'libraries': [('boost_python_src',
+                               {'include_dirs': [src_dir] + py_incl_dirs,
+                                'sources':bpl_srcs}
+                              )],
+                'include_dirs': [src_dir],
+                }
+        if info:
+            self.set_info(**info)
+        return
+
+
+class agg2_info(system_info):
+    section = 'agg2'
+    dir_env_var = 'AGG2'
+
+    def get_paths(self, section, key):
+        pre_dirs = system_info.get_paths(self, section, key)
+        dirs = []
+        for d in pre_dirs:
+            dirs.extend([d] + self.combine_paths(d, ['agg2*']))
+        return [d for d in dirs if os.path.isdir(d)]
+
+    def calc_info(self):
+        src_dirs = self.get_src_dirs()
+        src_dir = ''
+        for d in src_dirs:
+            if os.path.isfile(os.path.join(d, 'src', 'agg_affine_matrix.cpp')):
+                src_dir = d
+                break
+        if not src_dir:
+            return
+        if sys.platform == 'win32':
+            agg2_srcs = glob(os.path.join(src_dir, 'src', 'platform',
+                                          'win32', 'agg_win32_bmp.cpp'))
+        else:
+            agg2_srcs = glob(os.path.join(src_dir, 'src', '*.cpp'))
+            agg2_srcs += [os.path.join(src_dir, 'src', 'platform',
+                                       'X11',
+                                       'agg_platform_support.cpp')]
+
+        info = {'libraries':
+                [('agg2_src',
+                  {'sources': agg2_srcs,
+                   'include_dirs': [os.path.join(src_dir, 'include')],
+                  }
+                 )],
+                'include_dirs': [os.path.join(src_dir, 'include')],
+                }
+        if info:
+            self.set_info(**info)
+        return
+
+
+class _pkg_config_info(system_info):
+    section = None
+    config_env_var = 'PKG_CONFIG'
+    default_config_exe = 'pkg-config'
+    append_config_exe = ''
+    version_macro_name = None
+    release_macro_name = None
+    version_flag = '--modversion'
+    cflags_flag = '--cflags'
+
+    def get_config_exe(self):
+        if self.config_env_var in os.environ:
+            return os.environ[self.config_env_var]
+        return self.default_config_exe
+
+    def get_config_output(self, config_exe, option):
+        cmd = config_exe + ' ' + self.append_config_exe + ' ' + option
+        try:
+            o = subprocess.check_output(cmd)
+        except (OSError, subprocess.CalledProcessError):
+            pass
+        else:
+            o = filepath_from_subprocess_output(o)
+            return o
+
+    def calc_info(self):
+        config_exe = find_executable(self.get_config_exe())
+        if not config_exe:
+            log.warn('File not found: %s. Cannot determine %s info.' \
+                  % (config_exe, self.section))
+            return
+        info = {}
+        macros = []
+        libraries = []
+        library_dirs = []
+        include_dirs = []
+        extra_link_args = []
+        extra_compile_args = []
+        version = self.get_config_output(config_exe, self.version_flag)
+        if version:
+            macros.append((self.__class__.__name__.split('.')[-1].upper(),
+                           _c_string_literal(version)))
+            if self.version_macro_name:
+                macros.append((self.version_macro_name + '_%s'
+                               % (version.replace('.', '_')), None))
+        if self.release_macro_name:
+            release = self.get_config_output(config_exe, '--release')
+            if release:
+                macros.append((self.release_macro_name + '_%s'
+                               % (release.replace('.', '_')), None))
+        opts = self.get_config_output(config_exe, '--libs')
+        if opts:
+            for opt in opts.split():
+                if opt[:2] == '-l':
+                    libraries.append(opt[2:])
+                elif opt[:2] == '-L':
+                    library_dirs.append(opt[2:])
+                else:
+                    extra_link_args.append(opt)
+        opts = self.get_config_output(config_exe, self.cflags_flag)
+        if opts:
+            for opt in opts.split():
+                if opt[:2] == '-I':
+                    include_dirs.append(opt[2:])
+                elif opt[:2] == '-D':
+                    if '=' in opt:
+                        n, v = opt[2:].split('=')
+                        macros.append((n, v))
+                    else:
+                        macros.append((opt[2:], None))
+                else:
+                    extra_compile_args.append(opt)
+        if macros:
+            dict_append(info, define_macros=macros)
+        if libraries:
+            dict_append(info, libraries=libraries)
+        if library_dirs:
+            dict_append(info, library_dirs=library_dirs)
+        if include_dirs:
+            dict_append(info, include_dirs=include_dirs)
+        if extra_link_args:
+            dict_append(info, extra_link_args=extra_link_args)
+        if extra_compile_args:
+            dict_append(info, extra_compile_args=extra_compile_args)
+        if info:
+            self.set_info(**info)
+        return
+
+
+class wx_info(_pkg_config_info):
+    section = 'wx'
+    config_env_var = 'WX_CONFIG'
+    default_config_exe = 'wx-config'
+    append_config_exe = ''
+    version_macro_name = 'WX_VERSION'
+    release_macro_name = 'WX_RELEASE'
+    version_flag = '--version'
+    cflags_flag = '--cxxflags'
+
+
+class gdk_pixbuf_xlib_2_info(_pkg_config_info):
+    section = 'gdk_pixbuf_xlib_2'
+    append_config_exe = 'gdk-pixbuf-xlib-2.0'
+    version_macro_name = 'GDK_PIXBUF_XLIB_VERSION'
+
+
+class gdk_pixbuf_2_info(_pkg_config_info):
+    section = 'gdk_pixbuf_2'
+    append_config_exe = 'gdk-pixbuf-2.0'
+    version_macro_name = 'GDK_PIXBUF_VERSION'
+
+
+class gdk_x11_2_info(_pkg_config_info):
+    section = 'gdk_x11_2'
+    append_config_exe = 'gdk-x11-2.0'
+    version_macro_name = 'GDK_X11_VERSION'
+
+
+class gdk_2_info(_pkg_config_info):
+    section = 'gdk_2'
+    append_config_exe = 'gdk-2.0'
+    version_macro_name = 'GDK_VERSION'
+
+
+class gdk_info(_pkg_config_info):
+    section = 'gdk'
+    append_config_exe = 'gdk'
+    version_macro_name = 'GDK_VERSION'
+
+
+class gtkp_x11_2_info(_pkg_config_info):
+    section = 'gtkp_x11_2'
+    append_config_exe = 'gtk+-x11-2.0'
+    version_macro_name = 'GTK_X11_VERSION'
+
+
+class gtkp_2_info(_pkg_config_info):
+    section = 'gtkp_2'
+    append_config_exe = 'gtk+-2.0'
+    version_macro_name = 'GTK_VERSION'
+
+
+class xft_info(_pkg_config_info):
+    section = 'xft'
+    append_config_exe = 'xft'
+    version_macro_name = 'XFT_VERSION'
+
+
+class freetype2_info(_pkg_config_info):
+    section = 'freetype2'
+    append_config_exe = 'freetype2'
+    version_macro_name = 'FREETYPE2_VERSION'
+
+
+class amd_info(system_info):
+    section = 'amd'
+    dir_env_var = 'AMD'
+    _lib_names = ['amd']
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+
+        opt = self.get_option_single('amd_libs', 'libraries')
+        amd_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs(lib_dirs, amd_libs, [])
+        if info is None:
+            return
+
+        include_dirs = self.get_include_dirs()
+
+        inc_dir = None
+        for d in include_dirs:
+            p = self.combine_paths(d, 'amd.h')
+            if p:
+                inc_dir = os.path.dirname(p[0])
+                break
+        if inc_dir is not None:
+            dict_append(info, include_dirs=[inc_dir],
+                        define_macros=[('SCIPY_AMD_H', None)],
+                        swig_opts=['-I' + inc_dir])
+
+        self.set_info(**info)
+        return
+
+
+class umfpack_info(system_info):
+    section = 'umfpack'
+    dir_env_var = 'UMFPACK'
+    notfounderror = UmfpackNotFoundError
+    _lib_names = ['umfpack']
+
+    def calc_info(self):
+        lib_dirs = self.get_lib_dirs()
+
+        opt = self.get_option_single('umfpack_libs', 'libraries')
+        umfpack_libs = self.get_libs(opt, self._lib_names)
+        info = self.check_libs(lib_dirs, umfpack_libs, [])
+        if info is None:
+            return
+
+        include_dirs = self.get_include_dirs()
+
+        inc_dir = None
+        for d in include_dirs:
+            p = self.combine_paths(d, ['', 'umfpack'], 'umfpack.h')
+            if p:
+                inc_dir = os.path.dirname(p[0])
+                break
+        if inc_dir is not None:
+            dict_append(info, include_dirs=[inc_dir],
+                        define_macros=[('SCIPY_UMFPACK_H', None)],
+                        swig_opts=['-I' + inc_dir])
+
+        dict_append(info, **get_info('amd'))
+
+        self.set_info(**info)
+        return
+
+
+def combine_paths(*args, **kws):
+    """ Return a list of existing paths composed by all combinations of
+        items from arguments.
+    """
+    r = []
+    for a in args:
+        if not a:
+            continue
+        if is_string(a):
+            a = [a]
+        r.append(a)
+    args = r
+    if not args:
+        return []
+    if len(args) == 1:
+        result = reduce(lambda a, b: a + b, map(glob, args[0]), [])
+    elif len(args) == 2:
+        result = []
+        for a0 in args[0]:
+            for a1 in args[1]:
+                result.extend(glob(os.path.join(a0, a1)))
+    else:
+        result = combine_paths(*(combine_paths(args[0], args[1]) + args[2:]))
+    log.debug('(paths: %s)', ','.join(result))
+    return result
+
+language_map = {'c': 0, 'c++': 1, 'f77': 2, 'f90': 3}
+inv_language_map = {0: 'c', 1: 'c++', 2: 'f77', 3: 'f90'}
+
+
+def dict_append(d, **kws):
+    languages = []
+    for k, v in kws.items():
+        if k == 'language':
+            languages.append(v)
+            continue
+        if k in d:
+            if k in ['library_dirs', 'include_dirs',
+                     'extra_compile_args', 'extra_link_args',
+                     'runtime_library_dirs', 'define_macros']:
+                [d[k].append(vv) for vv in v if vv not in d[k]]
+            else:
+                d[k].extend(v)
+        else:
+            d[k] = v
+    if languages:
+        l = inv_language_map[max([language_map.get(l, 0) for l in languages])]
+        d['language'] = l
+    return
+
+
+def parseCmdLine(argv=(None,)):
+    import optparse
+    parser = optparse.OptionParser("usage: %prog [-v] [info objs]")
+    parser.add_option('-v', '--verbose', action='store_true', dest='verbose',
+                      default=False,
+                      help='be verbose and print more messages')
+
+    opts, args = parser.parse_args(args=argv[1:])
+    return opts, args
+
+
+def show_all(argv=None):
+    import inspect
+    if argv is None:
+        argv = sys.argv
+    opts, args = parseCmdLine(argv)
+    if opts.verbose:
+        log.set_threshold(log.DEBUG)
+    else:
+        log.set_threshold(log.INFO)
+    show_only = []
+    for n in args:
+        if n[-5:] != '_info':
+            n = n + '_info'
+        show_only.append(n)
+    show_all = not show_only
+    _gdict_ = globals().copy()
+    for name, c in _gdict_.items():
+        if not inspect.isclass(c):
+            continue
+        if not issubclass(c, system_info) or c is system_info:
+            continue
+        if not show_all:
+            if name not in show_only:
+                continue
+            del show_only[show_only.index(name)]
+        conf = c()
+        conf.verbosity = 2
+        # FIXME: r not used
+        r = conf.get_info()
+    if show_only:
+        log.info('Info classes not defined: %s', ','.join(show_only))
+
+if __name__ == "__main__":
+    show_all()

+ 149 - 0
intel-2021.02/numpy/unixccompiler.py

@@ -0,0 +1,149 @@
+"""
+unixccompiler - can handle very long argument lists for ar.
+
+"""
+import os
+import sys
+import subprocess
+
+from distutils.errors import CompileError, DistutilsExecError, LibError
+from distutils.unixccompiler import UnixCCompiler
+from numpy.distutils.ccompiler import replace_method
+from numpy.distutils.misc_util import _commandline_dep_string
+from numpy.distutils import log
+
+# Note that UnixCCompiler._compile appeared in Python 2.3
+def UnixCCompiler__compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
+    """Compile a single source files with a Unix-style compiler."""
+    # HP ad-hoc fix, see ticket 1383
+    ccomp = self.compiler_so
+    if ccomp[0] == 'aCC':
+        # remove flags that will trigger ANSI-C mode for aCC
+        if '-Ae' in ccomp:
+            ccomp.remove('-Ae')
+        if '-Aa' in ccomp:
+            ccomp.remove('-Aa')
+        # add flags for (almost) sane C++ handling
+        ccomp += ['-AA']
+        self.compiler_so = ccomp
+    # ensure OPT environment variable is read
+    if 'OPT' in os.environ:
+        # XXX who uses this?
+        from sysconfig import get_config_vars
+        opt = " ".join(os.environ['OPT'].split())
+        gcv_opt = " ".join(get_config_vars('OPT')[0].split())
+        ccomp_s = " ".join(self.compiler_so)
+        if opt not in ccomp_s:
+            ccomp_s = ccomp_s.replace(gcv_opt, opt)
+            self.compiler_so = ccomp_s.split()
+        llink_s = " ".join(self.linker_so)
+        if opt not in llink_s:
+            self.linker_so = llink_s.split() + opt.split()
+
+    display = '%s: %s' % (os.path.basename(self.compiler_so[0]), src)
+
+    # gcc style automatic dependencies, outputs a makefile (-MF) that lists
+    # all headers needed by a c file as a side effect of compilation (-MMD)
+    if getattr(self, '_auto_depends', False):
+        deps = ['-MMD', '-MF', obj + '.d']
+    else:
+        deps = []
+
+    try:
+        self.spawn(self.compiler_so + cc_args + [src, '-o', obj] + deps +
+                   extra_postargs, display = display)
+    except DistutilsExecError as e:
+        msg = str(e)
+        raise CompileError(msg)
+
+    # add commandline flags to dependency file
+    if deps:
+        # After running the compiler, the file created will be in EBCDIC
+        # but will not be tagged as such. This tags it so the file does not
+        # have multiple different encodings being written to it
+        if sys.platform == 'zos':
+            subprocess.check_output(['chtag', '-tc', 'IBM1047', obj + '.d'])
+        with open(obj + '.d', 'a') as f:
+            f.write(_commandline_dep_string(cc_args, extra_postargs, pp_opts))
+
+replace_method(UnixCCompiler, '_compile', UnixCCompiler__compile)
+
+
+def UnixCCompiler_create_static_lib(self, objects, output_libname,
+                                    output_dir=None, debug=0, target_lang=None):
+    """
+    Build a static library in a separate sub-process.
+
+    Parameters
+    ----------
+    objects : list or tuple of str
+        List of paths to object files used to build the static library.
+    output_libname : str
+        The library name as an absolute or relative (if `output_dir` is used)
+        path.
+    output_dir : str, optional
+        The path to the output directory. Default is None, in which case
+        the ``output_dir`` attribute of the UnixCCompiler instance.
+    debug : bool, optional
+        This parameter is not used.
+    target_lang : str, optional
+        This parameter is not used.
+
+    Returns
+    -------
+    None
+
+    """
+    objects, output_dir = self._fix_object_args(objects, output_dir)
+
+    output_filename = \
+                    self.library_filename(output_libname, output_dir=output_dir)
+
+    if self._need_link(objects, output_filename):
+        try:
+            # previous .a may be screwed up; best to remove it first
+            # and recreate.
+            # Also, ar on OS X doesn't handle updating universal archives
+            os.unlink(output_filename)
+        except (IOError, OSError):
+            pass
+        self.mkpath(os.path.dirname(output_filename))
+        tmp_objects = objects + self.objects
+        while tmp_objects:
+            objects = tmp_objects[:50]
+            tmp_objects = tmp_objects[50:]
+            display = '%s: adding %d object files to %s' % (
+                           os.path.basename(self.archiver[0]),
+                           len(objects), output_filename)
+            self.spawn(self.archiver + [output_filename] + objects,
+                       display = display)
+
+        # Not many Unices required ranlib anymore -- SunOS 4.x is, I
+        # think the only major Unix that does.  Maybe we need some
+        # platform intelligence here to skip ranlib if it's not
+        # needed -- or maybe Python's configure script took care of
+        # it for us, hence the check for leading colon.
+        if self.ranlib:
+            display = '%s:@ %s' % (os.path.basename(self.ranlib[0]),
+                                   output_filename)
+            try:
+                self.spawn(self.ranlib + [output_filename],
+                           display = display)
+            except DistutilsExecError as e:
+                msg = str(e)
+                raise LibError(msg)
+    else:
+        log.debug("skipping %s (up-to-date)", output_filename)
+    return
+
+replace_method(UnixCCompiler, 'create_static_lib',
+               UnixCCompiler_create_static_lib)
+
+def UnixCCompiler_library_option(self, lib):
+    if lib[0]=='-':
+        return lib
+    else:
+        return "-l" + lib
+
+replace_method(UnixCCompiler, 'library_option',
+               UnixCCompiler_library_option)