forked from numpy/numpy
-
Notifications
You must be signed in to change notification settings - Fork 0
/
site.cfg.example
226 lines (216 loc) · 8.97 KB
/
site.cfg.example
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
# This file provides configuration information about non-Python dependencies for
# numpy.distutils-using packages. Create a file like this called "site.cfg" next
# to your package's setup.py file and fill in the appropriate sections. Not all
# packages will use all sections so you should leave out sections that your
# package does not use.
# To assist automatic installation like easy_install, the user's home directory
# will also be checked for the file ~/.numpy-site.cfg .
# The format of the file is that of the standard library's ConfigParser module.
# No interpolation is allowed, RawConfigParser class being used to load it.
#
# https://docs.python.org/library/configparser.html
#
# Each section defines settings that apply to one particular dependency. Some of
# the settings are general and apply to nearly any section and are defined here.
# Settings specific to a particular section will be defined near their section.
#
# libraries
# Comma-separated list of library names to add to compile the extension
# with. Note that these should be just the names, not the filenames. For
# example, the file "libfoo.so" would become simply "foo".
# libraries = lapack,f77blas,cblas,atlas
#
# library_dirs
# List of directories to add to the library search path when compiling
# extensions with this dependency. Use the character given by os.pathsep
# to separate the items in the list. Note that this character is known to
# vary on some unix-like systems; if a colon does not work, try a comma.
# This also applies to include_dirs and src_dirs (see below).
# On UN*X-type systems (OS X, most BSD and Linux systems):
# library_dirs = /usr/lib:/usr/local/lib
# On Windows:
# library_dirs = c:\mingw\lib,c:\atlas\lib
# On some BSD and Linux systems:
# library_dirs = /usr/lib,/usr/local/lib
#
# include_dirs
# List of directories to add to the header file search path.
# include_dirs = /usr/include:/usr/local/include
#
# src_dirs
# List of directories that contain extracted source code for the
# dependency. For some dependencies, numpy.distutils will be able to build
# them from source if binaries cannot be found. The FORTRAN BLAS and
# LAPACK libraries are one example. However, most dependencies are more
# complicated and require actual installation that you need to do
# yourself.
# src_dirs = /home/rkern/src/BLAS_SRC:/home/rkern/src/LAPACK_SRC
#
# search_static_first
# Boolean (one of (0, false, no, off) for False or (1, true, yes, on) for
# True) to tell numpy.distutils to prefer static libraries (.a) over
# shared libraries (.so). It is turned off by default.
# search_static_first = false
#
# runtime_library_dirs/rpath
# List of directories that contains the libraries that should be
# used at runtime, thereby disregarding the LD_LIBRARY_PATH variable.
# See 'library_dirs' for formatting on different platforms.
# runtime_library_dirs = /opt/blas/lib:/opt/lapack/lib
# or equivalently
# rpath = /opt/blas/lib:/opt/lapack/lib
#
# extra_compile_args
# Add additional arguments to the compilation of sources.
# Simple variable with no parsing done.
# Provide a single line with all complete flags.
# extra_compile_args = -g -ftree-vectorize
#
# extra_link_args
# Add additional arguments when libraries/executables
# are linked.
# Simple variable with no parsing done.
# Provide a single line with all complete flags.
# extra_link_args = -lgfortran
#
# Defaults
# ========
# The settings given here will apply to all other sections if not overridden.
# This is a good place to add general library and include directories like
# /usr/local/{lib,include}
#
#[ALL]
#library_dirs = /usr/local/lib
#include_dirs = /usr/local/include
#
# Atlas
# -----
# Atlas is an open source optimized implementation of the BLAS and Lapack
# routines. NumPy will try to build against Atlas by default when available in
# the system library dirs. To build numpy against a custom installation of
# Atlas you can add an explicit section such as the following. Here we assume
# that Atlas was configured with ``prefix=/opt/atlas``.
#
# [atlas]
# library_dirs = /opt/atlas/lib
# include_dirs = /opt/atlas/include
# OpenBLAS
# --------
# OpenBLAS is another open source optimized implementation of BLAS and Lapack
# and can be seen as an alternative to Atlas. To build numpy against OpenBLAS
# instead of Atlas, use this section instead of the above, adjusting as needed
# for your configuration (in the following example we installed OpenBLAS with
# ``make install PREFIX=/opt/OpenBLAS``.
# OpenBLAS is generically installed as a shared library, to force the OpenBLAS
# library linked to also be used at runtime you can utilize the
# runtime_library_dirs variable.
#
# **Warning**: OpenBLAS, by default, is built in multithreaded mode. Due to the
# way Python's multiprocessing is implemented, a multithreaded OpenBLAS can
# cause programs using both to hang as soon as a worker process is forked on
# POSIX systems (Linux, Mac).
# This is fixed in Openblas 0.2.9 for the pthread build, the OpenMP build using
# GNU openmp is as of gcc-4.9 not fixed yet.
# Python 3.4 will introduce a new feature in multiprocessing, called the
# "forkserver", which solves this problem. For older versions, make sure
# OpenBLAS is built using pthreads or use Python threads instead of
# multiprocessing.
# (This problem does not exist with multithreaded ATLAS.)
#
# https://docs.python.org/library/multiprocessing.html#contexts-and-start-methods
# https://github.com/xianyi/OpenBLAS/issues/294
#
# [openblas]
# libraries = openblas
# library_dirs = /opt/OpenBLAS/lib
# include_dirs = /opt/OpenBLAS/include
# runtime_library_dirs = /opt/OpenBLAS/lib
# BLIS
# ----
# BLIS (https://github.com/flame/blis) also provides a BLAS interface. It's a
# relatively new library, its performance in some cases seems to match that of
# MKL and OpenBLAS, but it hasn't been benchmarked with NumPy or Scipy yet.
#
# Notes on compiling BLIS itself:
# - the CBLAS interface (needed by NumPy) isn't built by default; define
# BLIS_ENABLE_CBLAS to build it.
# - ``./configure auto`` doesn't support 32-bit builds, see gh-7294 for
# details.
# Notes on compiling NumPy against BLIS:
# - ``include_dirs`` below should be the directory where the BLIS cblas.h
# header is installed.
#
# [blis]
# libraries = blis
# library_dirs = /home/username/blis/lib
# include_dirs = /home/username/blis/include/blis
# runtime_library_dirs = /home/username/blis/lib
# MKL
#----
# Intel MKL is Intel's very optimized yet proprietary implementation of BLAS and
# Lapack. Find the latest info on building numpy with Intel MKL in this article:
# https://software.intel.com/en-us/articles/numpyscipy-with-intel-mkl
# Assuming you installed the mkl in /opt/intel/compilers_and_libraries_2018/linux/mkl,
# for 64 bits code at Linux:
# [mkl]
# library_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/lib/intel64
# include_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/include
# mkl_libs = mkl_rt
# lapack_libs =
#
# For 32 bit code at Linux:
# [mkl]
# library_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/lib/ia32
# include_dirs = /opt/intel/compilers_and_libraries_2018/linux/mkl/include
# mkl_libs = mkl_rt
# lapack_libs =
#
# On win-64, the following options compiles numpy with the MKL library
# dynamically linked.
# [mkl]
# include_dirs = C:\Program Files (x86)\IntelSWTools\compilers_and_libraries_2018\windows\mkl\include
# library_dirs = C:\Program Files (x86)\IntelSWTools\compilers_and_libraries_2018\windows\mkl\lib\intel64
# mkl_libs = mkl_rt
# lapack_libs =
# ACCELERATE
# ----------
# Accelerate/vecLib is an OSX framework providing a BLAS and LAPACK implementations.
#
# [accelerate]
# libraries = Accelerate, vecLib
# #libraries = None
# UMFPACK
# -------
# The UMFPACK library is used in scikits.umfpack to factor large sparse matrices.
# It, in turn, depends on the AMD library for reordering the matrices for
# better performance. Note that the AMD library has nothing to do with AMD
# (Advanced Micro Devices), the CPU company.
#
# UMFPACK is not used by numpy.
#
# https://www.cise.ufl.edu/research/sparse/umfpack/
# https://www.cise.ufl.edu/research/sparse/amd/
# https://scikit-umfpack.github.io/scikit-umfpack/
#
#[amd]
#amd_libs = amd
#
#[umfpack]
#umfpack_libs = umfpack
# FFT libraries
# -------------
# There are two FFT libraries that we can configure here: FFTW (2 and 3) and djbfft.
# Note that these libraries are not used by numpy or scipy.
#
# http://fftw.org/
# https://cr.yp.to/djbfft.html
#
# Given only this section, numpy.distutils will try to figure out which version
# of FFTW you are using.
#[fftw]
#libraries = fftw3
#
# For djbfft, numpy.distutils will look for either djbfft.a or libdjbfft.a .
#[djbfft]
#include_dirs = /usr/local/djbfft/include
#library_dirs = /usr/local/djbfft/lib