diff --git a/PyInstaller/hooks/hook-numpy.py b/PyInstaller/hooks/hook-numpy.py index da0c21bb987..f88146b820c 100644 --- a/PyInstaller/hooks/hook-numpy.py +++ b/PyInstaller/hooks/hook-numpy.py @@ -12,7 +12,6 @@ # [1] PyInstaller: https://github.com/pyinstaller/pyinstaller/ # [2] NumPy's license: https://github.com/numpy/numpy/blob/master/LICENSE.txt # - """ This hook should collect all binary files and any hidden modules that numpy needs. @@ -28,8 +27,11 @@ import os import re from pathlib import Path +import ctypes -from PyInstaller.utils.hooks import collect_dynamic_libs, exec_statement, logger +from PyInstaller.utils.hooks import ( + collect_dynamic_libs, exec_statement, logger +) from PyInstaller import compat # --- Plain official numpy from PyPI --- @@ -50,12 +52,11 @@ # include one. hiddenimports = ['numpy.core._dtype_ctypes'] - # --- Additional support for less official mkl builds --- # Check if MKL is being used. -# We avoid using `import numpy` directly in hooks in-case doing so alters either -# sys.path or PATH which could confuse the build. +# We avoid using `import numpy` directly in hooks in-case doing so alters +# either sys.path or PATH which could confuse the build. is_mkl = exec_statement(""" # XXX: Numpy devs - is this a good way to test if using MKL? import numpy @@ -65,6 +66,7 @@ # The MKL binaries themselves are included inside the numpy folder and will # therefore already have been found by `collect_dynamic_libs()` above. + def find_library(name): """Glob-find and include a dll (like) binary file which is usually found by searching PATH. @@ -76,14 +78,13 @@ def find_library(name): binaries = [] for folder in os.environ["PATH"].split(os.pathsep): for path in Path(folder).glob(name): - if not path.name in names: + if path.name not in names: binaries.append((str(path), ".")) names.add(path.name) if not binaries: logger.warning( - "Failed to find '%s' DLL in PATH. Your app will likely crash if run" - " on a different machine that doesn't already have it.", name - ) + "Failed to find '%s' DLL in PATH. Your app will likely crash if " + "run on a different machine that doesn't already have it.", name) return binaries @@ -94,7 +95,6 @@ def find_library(name): for lib in ["libcrypto*", "libffi*", "libssl*"]: binaries.extend(find_library(lib)) - # --- A vain attempt at Conda's numpy support --- # Regular numpy, even with unofficial mkl builds, is pretty trivial to support @@ -104,29 +104,28 @@ def find_library(name): if compat.is_conda: logger.warning( "Anaconda detected. Anaconda restructures NumPy in a way which makes " - "it's binary dependencies undetectable to PyInstaller. This can lead to" - " unexpected failures during build, runtime, runtime after your Conda " - "environment is deactivated, or on moving your compiled app to a " - "different machine. If you encounter any of these issues then please " - "switch to regular Python." - ) + "it's binary dependencies undetectable to PyInstaller. This can lead " + "to unexpected failures during build, runtime, runtime after your " + "Conda environment is deactivated, or on moving your compiled app to a" + " different machine. If you encounter any of these issues then please " + "switch to regular Python.") hiddenimports.append("six") # There are so many hidden binary dependencies. This list is heavily, OS, # Python and NumPy versions dependent. Omitting any of these can lead to # obscure and often traceback-less crashes. # XXX: As you can see, this is really not a scalable solution. Needs help! - conda_dll_patterns = [re.compile(i) for i in ( - 'apphelp.*', 'crypt32.*', 'imagehlp.*', 'libblas.*', - 'libcblas.*', 'libcrypto.*', 'libffi.*', 'libgcc_.*', - 'libgfortran.*', 'libifcoremd.*', r'libiomp\d+md.*', 'liblapack.*', - 'libmmd.*', 'libomp.*', - 'libopenblas.*', 'libquadmath.*', 'libssl.*', 'libuuid.*', - 'libz.*', 'mkl_avx.*', 'mkl_core.*', - 'mkl_intel_thread.*', 'mkl_rt.*', 'mkl_vml_avx.*', - 'mkl_vml_avx.*', 'msasn.*', 'mswsock.*', 'ole.*', - 'oleaut.*', 'tbbmalloc.*', 'urandom' - )] + conda_dll_patterns = [ + re.compile(i) + for i in ('apphelp.*', 'crypt32.*', 'imagehlp.*', 'libblas.*', + 'libcblas.*', 'libcrypto.*', 'libffi.*', 'libgcc_.*', + 'libgfortran.*', 'libifcoremd.*', r'libiomp\d+md.*', + 'liblapack.*', 'libmmd.*', 'libomp.*', 'libopenblas.*', + 'libquadmath.*', 'libssl.*', 'libuuid.*', 'libz.*', + 'mkl_avx.*', 'mkl_core.*', 'mkl_intel_thread.*', 'mkl_rt.*', + 'mkl_vml_avx.*', 'mkl_vml_avx.*', 'msasn.*', 'mswsock.*', + 'ole.*', 'oleaut.*', 'tbbmalloc.*', 'urandom') + ] if compat.is_win: lib_dir = os.path.join(compat.base_prefix, "Library", "bin") @@ -141,11 +140,12 @@ def _is_required(name): for name in _to_add: binaries.append((os.path.join(lib_dir, name), ".")) - # --- Remove testing and building code --- -excludedimports = ["scipy", "pytest", "nose", "distutils", "f2py", "setuptools", - "numpy.f2py", "numpy.distutils"] +excludedimports = [ + "scipy", "pytest", "nose", "distutils", "f2py", "setuptools", "numpy.f2py", + "numpy.distutils" +] # I would suggest using the following to remove all the `tests` submodules but # we don't need it. They will be included if any modules that are included @@ -156,17 +156,14 @@ def _is_required(name): # is_tests = lambda x: "tests" in x.split(".") # excludedimports += collect_submodules("numpy", filter=is_tests) - # --- Remove binaries that aren't DLLs --- -import ctypes - def _is_valid(source, dest): # There really should be a less brute-force way of doing this. if source.endswith(".pdb"): - # Attempting to load a pdb causes a pop-up window. Check for and exclude - # them here. + # Attempting to load a pdb causes a pop-up window. Check for and + # exclude them here. return False try: ctypes.CDLL(source) @@ -174,4 +171,5 @@ def _is_valid(source, dest): except OSError: return False + binaries = [i for i in binaries if _is_valid(*i)]