mit neuen venv und exe-Files
This commit is contained in:
95
venv3_12/Lib/site-packages/cx_Freeze/__init__.py
Normal file
95
venv3_12/Lib/site-packages/cx_Freeze/__init__.py
Normal file
@@ -0,0 +1,95 @@
|
||||
"""Create standalone executables from Python scripts, with the same performance
|
||||
and is cross-platform.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
import setuptools
|
||||
|
||||
from cx_Freeze.command.build_exe import build_exe
|
||||
from cx_Freeze.command.install import Install as install
|
||||
from cx_Freeze.command.install_exe import install_exe
|
||||
from cx_Freeze.executable import Executable, validate_executables
|
||||
from cx_Freeze.finder import Module, ModuleFinder
|
||||
from cx_Freeze.freezer import ConstantsModule, Freezer
|
||||
|
||||
__all__ = [
|
||||
"build_exe",
|
||||
"install",
|
||||
"install_exe",
|
||||
"setup",
|
||||
"ConstantsModule",
|
||||
"Executable",
|
||||
"Freezer",
|
||||
"Module",
|
||||
"ModuleFinder",
|
||||
"__version__",
|
||||
]
|
||||
|
||||
if sys.platform == "win32":
|
||||
from cx_Freeze.command.bdist_msi import bdist_msi
|
||||
|
||||
__all__ += ["bdist_msi"]
|
||||
elif sys.platform == "darwin":
|
||||
from cx_Freeze.command.bdist_dmg import bdist_dmg
|
||||
from cx_Freeze.command.bdist_mac import bdist_mac
|
||||
|
||||
__all__ += ["bdist_dmg", "bdist_mac"]
|
||||
else:
|
||||
from cx_Freeze.command.bdist_appimage import bdist_appimage
|
||||
from cx_Freeze.command.bdist_deb import bdist_deb
|
||||
from cx_Freeze.command.bdist_rpm import bdist_rpm
|
||||
|
||||
__all__ += ["bdist_appimage", "bdist_deb", "bdist_rpm"]
|
||||
|
||||
|
||||
__version__ = "7.2.4"
|
||||
|
||||
|
||||
def setup(**attrs) -> setuptools.Distribution: # noqa: D103
|
||||
cmdclass = attrs.setdefault("cmdclass", {})
|
||||
if sys.platform == "win32":
|
||||
cmdclass.setdefault("bdist_msi", bdist_msi)
|
||||
elif sys.platform == "darwin":
|
||||
cmdclass.setdefault("bdist_dmg", bdist_dmg)
|
||||
cmdclass.setdefault("bdist_mac", bdist_mac)
|
||||
else:
|
||||
cmdclass.setdefault("bdist_appimage", bdist_appimage)
|
||||
cmdclass.setdefault("bdist_deb", bdist_deb)
|
||||
cmdclass.setdefault("bdist_rpm", bdist_rpm)
|
||||
cmdclass.setdefault("build_exe", build_exe)
|
||||
cmdclass.setdefault("install", install)
|
||||
cmdclass.setdefault("install_exe", install_exe)
|
||||
attrs.setdefault("executables", [])
|
||||
return setuptools.setup(**attrs)
|
||||
|
||||
|
||||
setup.__doc__ = setuptools.setup.__doc__
|
||||
|
||||
|
||||
def plugin_install(dist: setuptools.Distribution) -> None:
|
||||
"""Use a setuptools extension to customize Distribution options."""
|
||||
if getattr(dist, "executables", None) is None:
|
||||
return
|
||||
validate_executables(dist, "executables", dist.executables)
|
||||
|
||||
# Disable package discovery (setuptools >= 61) and/or misuse of packages
|
||||
dist.py_modules = []
|
||||
dist.packages = []
|
||||
|
||||
# Add/update commands (provisional)
|
||||
cmdclass = dist.cmdclass
|
||||
cmdclass.setdefault("build_exe", build_exe)
|
||||
cmdclass.setdefault("install", install)
|
||||
cmdclass.setdefault("install_exe", install_exe)
|
||||
|
||||
# Add build_exe as subcommand of setuptools build (plugin)
|
||||
build = dist.get_command_obj("build")
|
||||
build.user_options.insert(
|
||||
1,
|
||||
("build-exe=", None, "[REMOVED]"),
|
||||
)
|
||||
build.sub_commands = [*build.sub_commands, ("build_exe", None)]
|
||||
build.build_exe = None
|
||||
8
venv3_12/Lib/site-packages/cx_Freeze/__main__.py
Normal file
8
venv3_12/Lib/site-packages/cx_Freeze/__main__.py
Normal file
@@ -0,0 +1,8 @@
|
||||
"""cx_Freeze command line tool (enable python -m cx_Freeze syntax)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import cx_Freeze.cli
|
||||
|
||||
if __name__ == "__main__":
|
||||
cx_Freeze.cli.main()
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
33
venv3_12/Lib/site-packages/cx_Freeze/_compat.py
Normal file
33
venv3_12/Lib/site-packages/cx_Freeze/_compat.py
Normal file
@@ -0,0 +1,33 @@
|
||||
"""Internal compatible module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import sysconfig
|
||||
from pathlib import Path
|
||||
|
||||
__all__ = [
|
||||
"BUILD_EXE_DIR",
|
||||
"EXE_SUFFIX",
|
||||
"IS_CONDA",
|
||||
"IS_LINUX",
|
||||
"IS_MACOS",
|
||||
"IS_MINGW",
|
||||
"IS_WINDOWS",
|
||||
"PLATFORM",
|
||||
"PYTHON_VERSION",
|
||||
]
|
||||
|
||||
PLATFORM = sysconfig.get_platform()
|
||||
PYTHON_VERSION = sysconfig.get_python_version()
|
||||
|
||||
BUILD_EXE_DIR = Path(f"build/exe.{PLATFORM}-{PYTHON_VERSION}")
|
||||
EXE_SUFFIX = sysconfig.get_config_var("EXE")
|
||||
|
||||
IS_CONDA = Path(sys.prefix, "conda-meta").is_dir()
|
||||
|
||||
IS_LINUX = PLATFORM.startswith("linux")
|
||||
IS_MACOS = PLATFORM.startswith("macos")
|
||||
IS_MINGW = PLATFORM.startswith("mingw")
|
||||
IS_MINGW64 = PLATFORM.startswith("mingw_x86_64")
|
||||
IS_WINDOWS = PLATFORM.startswith("win")
|
||||
12
venv3_12/Lib/site-packages/cx_Freeze/_importlib.py
Normal file
12
venv3_12/Lib/site-packages/cx_Freeze/_importlib.py
Normal file
@@ -0,0 +1,12 @@
|
||||
"""The internal _importlib module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 10, 2):
|
||||
from importlib import metadata
|
||||
else:
|
||||
import importlib_metadata as metadata
|
||||
|
||||
__all__ = ["metadata"]
|
||||
26
venv3_12/Lib/site-packages/cx_Freeze/_pyproject.py
Normal file
26
venv3_12/Lib/site-packages/cx_Freeze/_pyproject.py
Normal file
@@ -0,0 +1,26 @@
|
||||
"""Internal module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
from tomllib import loads as toml_loads
|
||||
except ImportError:
|
||||
from tomli import loads as toml_loads
|
||||
|
||||
|
||||
def get_pyproject_tool_data() -> dict:
|
||||
pyproject_toml = Path("pyproject.toml")
|
||||
if not pyproject_toml.exists():
|
||||
return {}
|
||||
data = toml_loads(pyproject_toml.read_bytes().decode())
|
||||
tool_data = data.get("tool", {}).get("cxfreeze", {})
|
||||
executables = tool_data.pop("executables", [])
|
||||
options = {}
|
||||
for cmd, data in tool_data.items():
|
||||
for option, value in data.items():
|
||||
options.setdefault(cmd, {})
|
||||
options[cmd].setdefault(option, ("tool.cxfreeze", value))
|
||||
options["executables"] = executables
|
||||
return options
|
||||
22
venv3_12/Lib/site-packages/cx_Freeze/_typing.py
Normal file
22
venv3_12/Lib/site-packages/cx_Freeze/_typing.py
Normal file
@@ -0,0 +1,22 @@
|
||||
"""The internal _typing module."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path, PurePath
|
||||
|
||||
try:
|
||||
from typing import TypeAlias # 3.10+
|
||||
except ImportError:
|
||||
from typing_extensions import TypeAlias
|
||||
|
||||
from cx_Freeze.module import Module
|
||||
|
||||
DeferredList: TypeAlias = list[tuple[Module, Module, list[str]]]
|
||||
|
||||
IncludesList: TypeAlias = list[
|
||||
str | Path | tuple[str | Path, str | Path | None]
|
||||
]
|
||||
|
||||
InternalIncludesList: TypeAlias = list[tuple[Path, PurePath]]
|
||||
|
||||
__all__ = ["TypeAlias", "DeferredList", "IncludesList", "InternalIncludesList"]
|
||||
Binary file not shown.
Binary file not shown.
1
venv3_12/Lib/site-packages/cx_Freeze/bases/__init__.py
Normal file
1
venv3_12/Lib/site-packages/cx_Freeze/bases/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
# 485b639d985fffe9a0b2205c05d2f6f38fd3eb24cb72a14b9c00c06afd237ffb
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
255
venv3_12/Lib/site-packages/cx_Freeze/cli.py
Normal file
255
venv3_12/Lib/site-packages/cx_Freeze/cli.py
Normal file
@@ -0,0 +1,255 @@
|
||||
"""cxfreeze command line tool."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
from cx_Freeze import __version__, setup
|
||||
from cx_Freeze._pyproject import get_pyproject_tool_data
|
||||
|
||||
__all__ = ["main"]
|
||||
|
||||
DESCRIPTION = """
|
||||
Freeze a Python script and all of its referenced modules to a base \
|
||||
executable which can then be distributed without requiring a Python \
|
||||
installation.
|
||||
"""
|
||||
|
||||
VERSION = f"""
|
||||
%(prog)s {__version__}
|
||||
Copyright (c) 2020-2024 Marcelo Duarte. All rights reserved.
|
||||
Copyright (c) 2007-2019 Anthony Tuininga. All rights reserved.
|
||||
Copyright (c) 2001-2006 Computronix Corporation. All rights reserved.
|
||||
"""
|
||||
|
||||
EPILOG = """
|
||||
Note:
|
||||
* Windows only options are ignored by other OS and \
|
||||
when used by Python app from Microsoft Store.
|
||||
|
||||
Additional help:
|
||||
%(prog)s build_exe --help
|
||||
|
||||
Linux and similar OS:
|
||||
%(prog)s bdist_appimage --help
|
||||
%(prog)s bdist_deb --help
|
||||
%(prog)s bdist_rpm --help
|
||||
macOS:
|
||||
%(prog)s bdist_dmg --help
|
||||
%(prog)s bdist_mac --help
|
||||
Windows:
|
||||
%(prog)s bdist_msi --help
|
||||
"""
|
||||
|
||||
|
||||
def prepare_parser() -> argparse.ArgumentParser:
|
||||
"""Helper function to parse the arguments."""
|
||||
parser = argparse.ArgumentParser(
|
||||
prog="cxfreeze",
|
||||
description=DESCRIPTION,
|
||||
epilog=EPILOG,
|
||||
formatter_class=argparse.RawDescriptionHelpFormatter,
|
||||
add_help=False,
|
||||
)
|
||||
# Executable parameters
|
||||
parser.add_argument(
|
||||
"--script",
|
||||
metavar="NAME",
|
||||
help="the name of the file containing the script which is to be "
|
||||
"frozen",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--init-script",
|
||||
metavar="NAME",
|
||||
help="script which will be executed upon startup; if the name of the "
|
||||
"file is not an absolute file name, the subdirectory initscripts "
|
||||
"(rooted in the directory in which the cx_Freeze package is found) "
|
||||
"will be searched for a file matching the name",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--base",
|
||||
"--base-name",
|
||||
metavar="NAME",
|
||||
help="the name of the base executable; the pre-defined values are: "
|
||||
'"console", "gui" and "service"; a user-defined base is accepted '
|
||||
"if it is given with an absolute path name [default: console]",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-name",
|
||||
metavar="NAME",
|
||||
help="the name of the target executable; the default value is the "
|
||||
"name of the script; it is recommended NOT to use an extension "
|
||||
"(automatically added on Windows); target-name with version is "
|
||||
"supported; if specified a path, raise an error",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--target-dir",
|
||||
metavar="DIR",
|
||||
help="directory for built executables and dependent files",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--icon",
|
||||
metavar="NAME",
|
||||
help="name of icon which should be included in the executable itself "
|
||||
"on Windows or placed in the target directory for other platforms; "
|
||||
"it is recommended NOT to use an extension (automatically added "
|
||||
'".ico" on Windows, ".icns" on macOS and ".png" or ".svg" on Linux '
|
||||
"and others)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--manifest",
|
||||
metavar="NAME",
|
||||
help="name of manifest which should be included in the executable "
|
||||
"itself (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--uac-admin",
|
||||
action="store_true",
|
||||
help="creates a manifest for an application that will request "
|
||||
"elevation (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--uac-uiaccess",
|
||||
action="store_true",
|
||||
dest="uac_uiaccess",
|
||||
help="changes the application manifest to bypass user interface "
|
||||
"control (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--shortcut-name",
|
||||
metavar="NAME",
|
||||
help="the name to give a shortcut for the executable when included in "
|
||||
"an MSI package (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--shortcut-dir",
|
||||
metavar="DIR",
|
||||
help="the directory in which to place the shortcut when being instal"
|
||||
"led by an MSI package; see the MSI Shortcut table documentation for "
|
||||
"more information on what values can be placed here (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--copyright",
|
||||
help="the copyright value to include in the version resource "
|
||||
"associated with executable (Windows only)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--trademarks",
|
||||
help="the trademarks value to include in the version resource "
|
||||
"associated with the executable (Windows only)",
|
||||
)
|
||||
# Command positional parameter
|
||||
parser.add_argument(
|
||||
"command",
|
||||
nargs=argparse.OPTIONAL,
|
||||
metavar="COMMAND",
|
||||
help="build, build_exe or supported bdist commands (and to be "
|
||||
"backwards compatible, can replace --script option)",
|
||||
)
|
||||
# Version
|
||||
parser.add_argument("--version", action="version", version=VERSION)
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def main() -> None:
|
||||
"""Entry point for cxfreeze command line tool."""
|
||||
sys.setrecursionlimit(sys.getrecursionlimit() * 10)
|
||||
|
||||
parser = prepare_parser()
|
||||
args, argv = parser.parse_known_args()
|
||||
script = args.script
|
||||
command = args.command
|
||||
|
||||
# help
|
||||
if "-h" in argv or "--help" in argv:
|
||||
if command is None:
|
||||
parser.print_help()
|
||||
else:
|
||||
setup(
|
||||
executables=None,
|
||||
script_args=[command, "--help"],
|
||||
script_name=parser.prog,
|
||||
)
|
||||
parser.exit()
|
||||
|
||||
# usage
|
||||
deprecated = []
|
||||
if script is None:
|
||||
if command is None:
|
||||
parser.error("--script or command must be specified")
|
||||
elif not command.startswith(("build", "bdist", "install")):
|
||||
args.script, command = command, script # backwards compatible
|
||||
deprecated.append("usage: required to use --script NAME")
|
||||
if command is None:
|
||||
command = "build_exe"
|
||||
|
||||
# deprecated options
|
||||
if command == "build_exe" or "build_exe" in argv:
|
||||
args_to_replace = [
|
||||
("--install-dir", "--build-exe"),
|
||||
("--exclude-modules", "--excludes"),
|
||||
("--include-modules", "--includes"),
|
||||
("-c", None),
|
||||
("--compress", None),
|
||||
("-OO", "--optimize=2"), # test -OO before -O
|
||||
("-O", "--optimize=1"),
|
||||
("-z", "--zip-includes"),
|
||||
("--default-path", "--path"),
|
||||
("-s", "--silent"),
|
||||
]
|
||||
new_argv = []
|
||||
for arg in argv:
|
||||
new_argv.append(arg)
|
||||
for search, replace in args_to_replace:
|
||||
if arg.startswith(search):
|
||||
new_argv.pop()
|
||||
if replace is None:
|
||||
deprecated.append(f"{search} option removed")
|
||||
else:
|
||||
new_argv.append(arg.replace(search, replace))
|
||||
deprecated.append(
|
||||
f"{search} option replaced by {replace}"
|
||||
)
|
||||
break
|
||||
argv = new_argv
|
||||
|
||||
# redirected options
|
||||
if args.target_dir:
|
||||
argv.append(f"--build-exe={args.target_dir}")
|
||||
delattr(args, "target_dir")
|
||||
|
||||
# finalize command line options
|
||||
executables = []
|
||||
script_args = [command, *argv]
|
||||
if args.script:
|
||||
delattr(args, "command")
|
||||
executables = [vars(args)]
|
||||
if script_args[0] == "build" and "build_exe" not in script_args:
|
||||
script_args.insert(1, "build_exe")
|
||||
|
||||
# fix sys.path for cxfreeze command line
|
||||
command = Path(sys.argv[0])
|
||||
if command.stem == "cxfreeze":
|
||||
path_to_remove = os.fspath(command.parent)
|
||||
if path_to_remove in sys.path:
|
||||
sys.path.remove(path_to_remove)
|
||||
sys.path.insert(0, os.getcwd())
|
||||
|
||||
# get options from pyproject.toml
|
||||
options = get_pyproject_tool_data()
|
||||
executables.extend(options.pop("executables", []))
|
||||
|
||||
setup(
|
||||
command_options=options,
|
||||
executables=executables,
|
||||
script_args=script_args,
|
||||
script_name=parser.prog,
|
||||
)
|
||||
|
||||
if deprecated:
|
||||
for warning_msg in deprecated:
|
||||
print("WARNING: deprecated", warning_msg)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
94
venv3_12/Lib/site-packages/cx_Freeze/command/_pydialog.py
Normal file
94
venv3_12/Lib/site-packages/cx_Freeze/command/_pydialog.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Extend msilib Dialog."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from msilib import Control, Dialog
|
||||
|
||||
|
||||
class PyDialog(Dialog):
|
||||
"""Dialog class with a fixed layout: controls at the top, then a ruler,
|
||||
then a list of buttons: back, next, cancel. Optionally a bitmap at the
|
||||
left.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
db,
|
||||
name,
|
||||
x,
|
||||
y,
|
||||
w,
|
||||
h,
|
||||
attr,
|
||||
title,
|
||||
first,
|
||||
default,
|
||||
cancel,
|
||||
bitmap=True, # noqa: ARG002
|
||||
) -> None:
|
||||
Dialog.__init__(
|
||||
self, db, name, x, y, w, h, attr, title, first, default, cancel
|
||||
)
|
||||
ruler = self.h - 36
|
||||
# bmwidth = 152 * ruler / 328
|
||||
# if kw.get("bitmap", True):
|
||||
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
|
||||
self.line("BottomLine", 0, ruler, self.w, 0)
|
||||
|
||||
def title(self, title) -> None:
|
||||
"""Set the title text of the dialog at the top."""
|
||||
# flags=0x30003=Visible|Enabled|Transparent|NoPrefix
|
||||
# text, in VerdanaBold10
|
||||
font = r"{\VerdanaBold10}"
|
||||
self.text("Title", 15, 10, 320, 60, 0x30003, f"{font}{title}")
|
||||
|
||||
def backbutton(self, title, tabnext, name="Back", active=1) -> Control:
|
||||
"""Add a back button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated
|
||||
"""
|
||||
flags = 3 if active else 1 # Visible|Enabled or Visible
|
||||
return self.pushbutton(
|
||||
name, 180, self.h - 27, 56, 17, flags, title, tabnext
|
||||
)
|
||||
|
||||
def cancelbutton(self, title, tabnext, name="Cancel", active=1) -> Control:
|
||||
"""Add a cancel button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated
|
||||
"""
|
||||
flags = 3 if active else 1 # Visible|Enabled or Visible
|
||||
return self.pushbutton(
|
||||
name, 304, self.h - 27, 56, 17, flags, title, tabnext
|
||||
)
|
||||
|
||||
def nextbutton(self, title, tabnext, name="Next", active=1) -> Control:
|
||||
"""Add a Next button with a given title, the tab-next button,
|
||||
its name in the Control table, possibly initially disabled.
|
||||
|
||||
Return the button, so that events can be associated
|
||||
"""
|
||||
flags = 3 if active else 1 # Visible|Enabled or Visible
|
||||
return self.pushbutton(
|
||||
name, 236, self.h - 27, 56, 17, flags, title, tabnext
|
||||
)
|
||||
|
||||
def xbutton(self, name, title, tabnext, xpos) -> Control:
|
||||
"""Add a button with a given title, the tab-next button,
|
||||
its name in the Control table, giving its x position; the
|
||||
y-position is aligned with the other buttons.
|
||||
|
||||
Return the button, so that events can be associated
|
||||
"""
|
||||
return self.pushbutton(
|
||||
name,
|
||||
int(self.w * xpos - 28),
|
||||
self.h - 27,
|
||||
56,
|
||||
17,
|
||||
3,
|
||||
title,
|
||||
tabnext,
|
||||
)
|
||||
313
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_appimage.py
Normal file
313
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_appimage.py
Normal file
@@ -0,0 +1,313 @@
|
||||
"""Implements the 'bdist_appimage' command (create Linux AppImage format).
|
||||
|
||||
https://appimage.org/
|
||||
https://docs.appimage.org/
|
||||
https://docs.appimage.org/packaging-guide/manual.html#ref-manual
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import stat
|
||||
from ctypes.util import find_library
|
||||
from logging import INFO, WARNING
|
||||
from pathlib import Path
|
||||
from textwrap import dedent
|
||||
from typing import ClassVar
|
||||
from urllib.request import urlretrieve
|
||||
from zipfile import ZipFile
|
||||
|
||||
from filelock import FileLock
|
||||
from setuptools import Command
|
||||
|
||||
import cx_Freeze.icons
|
||||
from cx_Freeze.exception import ExecError, PlatformError
|
||||
|
||||
__all__ = ["bdist_appimage"]
|
||||
|
||||
ARCH = platform.machine()
|
||||
APPIMAGEKIT_URL = "https://github.com/AppImage/AppImageKit/releases"
|
||||
APPIMAGEKIT_PATH = f"download/continuous/appimagetool-{ARCH}.AppImage"
|
||||
APPIMAGEKIT_TOOL = "~/.local/bin/appimagetool"
|
||||
|
||||
|
||||
class bdist_appimage(Command):
|
||||
"""Create a Linux AppImage."""
|
||||
|
||||
description = "create a Linux AppImage"
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
(
|
||||
"appimagekit=",
|
||||
None,
|
||||
f'path to AppImageKit [default: "{APPIMAGEKIT_TOOL}"]',
|
||||
),
|
||||
(
|
||||
"bdist-base=",
|
||||
None,
|
||||
"base directory for creating built distributions",
|
||||
),
|
||||
(
|
||||
"build-dir=",
|
||||
"b",
|
||||
"directory of built executables and dependent files",
|
||||
),
|
||||
(
|
||||
"dist-dir=",
|
||||
"d",
|
||||
'directory to put final built distributions in [default: "dist"]',
|
||||
),
|
||||
(
|
||||
"skip-build",
|
||||
None,
|
||||
"skip rebuilding everything (for testing/debugging)",
|
||||
),
|
||||
("target-name=", None, "name of the file to create"),
|
||||
("target-version=", None, "version of the file to create"),
|
||||
("silent", "s", "suppress all output except warnings"),
|
||||
]
|
||||
boolean_options: ClassVar[list[str]] = [
|
||||
"skip-build",
|
||||
"silent",
|
||||
]
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.appimagekit = None
|
||||
|
||||
self.bdist_base = None
|
||||
self.build_dir = None
|
||||
self.dist_dir = None
|
||||
self.skip_build = None
|
||||
|
||||
self.target_name = None
|
||||
self.target_version = None
|
||||
self.fullname = None
|
||||
self.silent = None
|
||||
|
||||
self._warnings = []
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
if os.name != "posix":
|
||||
msg = (
|
||||
"don't know how to create AppImage "
|
||||
f"distributions on platform {os.name}"
|
||||
)
|
||||
raise PlatformError(msg)
|
||||
|
||||
# inherit options
|
||||
self.set_undefined_options(
|
||||
"build_exe",
|
||||
("build_exe", "build_dir"),
|
||||
("silent", "silent"),
|
||||
)
|
||||
self.set_undefined_options(
|
||||
"bdist",
|
||||
("bdist_base", "bdist_base"),
|
||||
("dist_dir", "dist_dir"),
|
||||
("skip_build", "skip_build"),
|
||||
)
|
||||
# for the bdist commands, there is a chance that build_exe has already
|
||||
# been executed, so check skip_build if build_exe have_run
|
||||
if not self.skip_build and self.distribution.have_run.get("build_exe"):
|
||||
self.skip_build = 1
|
||||
|
||||
if self.target_name is None:
|
||||
if self.distribution.metadata.name:
|
||||
self.target_name = self.distribution.metadata.name
|
||||
else:
|
||||
executables = self.distribution.executables
|
||||
executable = executables[0]
|
||||
self.warn_delayed(
|
||||
"using the first executable as target_name: "
|
||||
f"{executable.target_name}"
|
||||
)
|
||||
self.target_name = executable.target_name
|
||||
|
||||
if self.target_version is None and self.distribution.metadata.version:
|
||||
self.target_version = self.distribution.metadata.version
|
||||
|
||||
name = self.target_name
|
||||
version = self.target_version
|
||||
name, ext = os.path.splitext(name)
|
||||
if ext == ".AppImage":
|
||||
self.app_name = self.target_name
|
||||
self.fullname = name
|
||||
elif version:
|
||||
self.app_name = f"{name}-{version}-{ARCH}.AppImage"
|
||||
self.fullname = f"{name}-{version}"
|
||||
else:
|
||||
self.app_name = f"{name}-{ARCH}.AppImage"
|
||||
self.fullname = name
|
||||
|
||||
if self.silent is not None:
|
||||
self.verbose = 0 if self.silent else 2
|
||||
build_exe = self.distribution.command_obj.get("build_exe")
|
||||
if build_exe:
|
||||
build_exe.silent = self.silent
|
||||
|
||||
# validate or download appimagekit
|
||||
self._get_appimagekit()
|
||||
|
||||
def _get_appimagekit(self) -> None:
|
||||
"""Fetch AppImageKit from the web if not available locally."""
|
||||
appimagekit = os.path.expanduser(self.appimagekit or APPIMAGEKIT_TOOL)
|
||||
appimagekit_dir = os.path.dirname(appimagekit)
|
||||
self.mkpath(appimagekit_dir)
|
||||
with FileLock(appimagekit + ".lock"):
|
||||
if not os.path.exists(appimagekit):
|
||||
self.announce(
|
||||
f"download and install AppImageKit from {APPIMAGEKIT_URL}",
|
||||
INFO,
|
||||
)
|
||||
name = os.path.basename(APPIMAGEKIT_PATH)
|
||||
filename = os.path.join(appimagekit_dir, name)
|
||||
if not os.path.exists(filename):
|
||||
urlretrieve( # noqa: S310
|
||||
os.path.join(APPIMAGEKIT_URL, APPIMAGEKIT_PATH),
|
||||
filename,
|
||||
)
|
||||
os.chmod(filename, stat.S_IRWXU)
|
||||
if not os.path.exists(appimagekit):
|
||||
self.execute(
|
||||
os.symlink,
|
||||
(filename, appimagekit),
|
||||
msg=f"linking {appimagekit} -> {filename}",
|
||||
)
|
||||
self.appimagekit = appimagekit
|
||||
|
||||
def run(self) -> None:
|
||||
# Create the application bundle
|
||||
if not self.skip_build:
|
||||
self.run_command("build_exe")
|
||||
|
||||
# Make appimage (by default in dist directory)
|
||||
# Set the full path of appimage to be built
|
||||
self.mkpath(self.dist_dir)
|
||||
output = os.path.abspath(os.path.join(self.dist_dir, self.app_name))
|
||||
if os.path.exists(output):
|
||||
os.unlink(output)
|
||||
|
||||
# Make AppDir folder
|
||||
appdir = os.path.join(self.bdist_base, "AppDir")
|
||||
if os.path.exists(appdir):
|
||||
self.execute(shutil.rmtree, (appdir,), msg=f"removing {appdir}")
|
||||
self.mkpath(appdir)
|
||||
|
||||
# Copy from build_exe
|
||||
self.copy_tree(self.build_dir, appdir, preserve_symlinks=True)
|
||||
|
||||
# Remove zip file after putting all files in the file system
|
||||
# (appimage is a compressed file, no need of internal zip file)
|
||||
library_data = Path(appdir, "lib", "library.dat")
|
||||
if library_data.exists():
|
||||
target_lib_dir = library_data.parent
|
||||
filename = target_lib_dir / library_data.read_bytes().decode()
|
||||
with ZipFile(filename) as outfile:
|
||||
outfile.extractall(target_lib_dir)
|
||||
filename.unlink()
|
||||
library_data.unlink()
|
||||
|
||||
# Add icon, desktop file, entrypoint
|
||||
share_icons = os.path.join("share", "icons")
|
||||
icons_dir = os.path.join(appdir, share_icons)
|
||||
self.mkpath(icons_dir)
|
||||
|
||||
executables = self.distribution.executables
|
||||
executable = executables[0]
|
||||
if len(executables) > 1:
|
||||
self.warn_delayed(
|
||||
"using the first executable as entrypoint: "
|
||||
f"{executable.target_name}"
|
||||
)
|
||||
if executable.icon is None:
|
||||
icon_name = "logox128.png"
|
||||
icon_source_dir = os.path.dirname(cx_Freeze.icons.__file__)
|
||||
self.copy_file(os.path.join(icon_source_dir, icon_name), icons_dir)
|
||||
else:
|
||||
icon_name = executable.icon.name
|
||||
self.move_file(os.path.join(appdir, icon_name), icons_dir)
|
||||
relative_reference = os.path.join(share_icons, icon_name)
|
||||
origin = os.path.join(appdir, ".DirIcon")
|
||||
self.execute(
|
||||
os.symlink,
|
||||
(relative_reference, origin),
|
||||
msg=f"linking {origin} -> {relative_reference}",
|
||||
)
|
||||
|
||||
desktop_entry = f"""\
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Name={self.target_name}
|
||||
Exec={executable.target_name}
|
||||
Comment={self.distribution.get_description()}
|
||||
Icon=/{share_icons}/{os.path.splitext(icon_name)[0]}
|
||||
Categories=Development;
|
||||
Terminal=true
|
||||
X-AppImage-Arch={ARCH}
|
||||
X-AppImage-Name={self.target_name}
|
||||
X-AppImage-Version={self.target_version or ''}
|
||||
"""
|
||||
self.save_as_file(
|
||||
dedent(desktop_entry),
|
||||
os.path.join(appdir, f"{self.target_name}.desktop"),
|
||||
)
|
||||
entrypoint = f"""\
|
||||
#! /bin/bash
|
||||
# If running from an extracted image, fix APPDIR
|
||||
if [ -z "$APPIMAGE" ]; then
|
||||
self="$(readlink -f -- $0)"
|
||||
export APPDIR="${{self%/*}}"
|
||||
fi
|
||||
# Call the application entry point
|
||||
"$APPDIR/{executable.target_name}" "$@"
|
||||
"""
|
||||
self.save_as_file(
|
||||
dedent(entrypoint), os.path.join(appdir, "AppRun"), mode="x"
|
||||
)
|
||||
|
||||
# Build an AppImage from an AppDir
|
||||
os.environ["ARCH"] = ARCH
|
||||
cmd = [self.appimagekit, "--no-appstream", appdir, output]
|
||||
if find_library("fuse") is None: # libfuse.so.2 is not found
|
||||
cmd.insert(1, "--appimage-extract-and-run")
|
||||
with FileLock(self.appimagekit + ".lock"):
|
||||
self.spawn(cmd, search_path=0)
|
||||
if not os.path.exists(output):
|
||||
msg = "Could not build AppImage"
|
||||
raise ExecError(msg)
|
||||
|
||||
self.warnings()
|
||||
|
||||
def save_as_file(self, data, outfile, mode="r") -> tuple[str, int]:
|
||||
"""Save an input data to a file respecting verbose, dry-run and force
|
||||
flags.
|
||||
"""
|
||||
if not self.force and os.path.exists(outfile):
|
||||
if self.verbose >= 1:
|
||||
self.warn_delayed(f"not creating {outfile} (output exists)")
|
||||
return (outfile, 0)
|
||||
if self.verbose >= 1:
|
||||
self.announce(f"creating {outfile}", INFO)
|
||||
|
||||
if self.dry_run:
|
||||
return (outfile, 1)
|
||||
|
||||
if isinstance(data, str):
|
||||
data = data.encode()
|
||||
with open(outfile, "wb") as out:
|
||||
out.write(data)
|
||||
st_mode = stat.S_IRUSR
|
||||
if "w" in mode:
|
||||
st_mode = st_mode | stat.S_IWUSR
|
||||
if "x" in mode:
|
||||
st_mode = st_mode | stat.S_IXUSR
|
||||
os.chmod(outfile, st_mode)
|
||||
return (outfile, 1)
|
||||
|
||||
def warn_delayed(self, msg) -> None:
|
||||
self._warnings.append(msg)
|
||||
|
||||
def warnings(self) -> None:
|
||||
for msg in self._warnings:
|
||||
self.announce(f"WARNING: {msg}", WARNING)
|
||||
122
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_deb.py
Normal file
122
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_deb.py
Normal file
@@ -0,0 +1,122 @@
|
||||
"""Implements the 'bdist_deb' command (create DEB binary distributions).
|
||||
|
||||
This is a simple wrapper around 'alien' that converts a rpm to deb.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
from typing import ClassVar
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
from cx_Freeze.command.bdist_rpm import bdist_rpm
|
||||
from cx_Freeze.exception import ExecError, PlatformError
|
||||
|
||||
__all__ = ["bdist_deb"]
|
||||
|
||||
|
||||
class bdist_deb(Command):
|
||||
"""Create an DEB distribution."""
|
||||
|
||||
description = "create an DEB distribution"
|
||||
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
(
|
||||
"bdist-base=",
|
||||
None,
|
||||
"base directory for creating built distributions",
|
||||
),
|
||||
(
|
||||
"build-dir=",
|
||||
"b",
|
||||
"directory of built executables and dependent files",
|
||||
),
|
||||
("dist-dir=", "d", "directory to put final built distributions in"),
|
||||
]
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.bdist_base = None
|
||||
self.build_dir = None
|
||||
self.dist_dir = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
if os.name != "posix":
|
||||
msg = (
|
||||
"don't know how to create DEB "
|
||||
f"distributions on platform {os.name}"
|
||||
)
|
||||
raise PlatformError(msg)
|
||||
if not shutil.which("alien"):
|
||||
msg = "failed to find 'alien' for this platform."
|
||||
raise PlatformError(msg)
|
||||
if os.getuid() != 0 and not shutil.which("fakeroot"):
|
||||
msg = "failed to find 'fakeroot' for this platform."
|
||||
raise PlatformError(msg)
|
||||
|
||||
self.set_undefined_options("bdist", ("bdist_base", "bdist_base"))
|
||||
self.set_undefined_options(
|
||||
"bdist",
|
||||
("bdist_base", "bdist_base"),
|
||||
("dist_dir", "dist_dir"),
|
||||
)
|
||||
|
||||
def run(self) -> None:
|
||||
# make a binary RPM to convert
|
||||
cmd_rpm = bdist_rpm(
|
||||
self.distribution,
|
||||
bdist_base=self.bdist_base,
|
||||
dist_dir=self.dist_dir,
|
||||
)
|
||||
cmd_rpm.ensure_finalized()
|
||||
if not self.dry_run:
|
||||
cmd_rpm.run()
|
||||
rpm_filename = None
|
||||
for command, _, filename in self.distribution.dist_files:
|
||||
if command == "bdist_rpm":
|
||||
rpm_filename = os.path.basename(filename)
|
||||
break
|
||||
if rpm_filename is None:
|
||||
msg = "could not build rpm"
|
||||
raise ExecError(msg)
|
||||
else:
|
||||
rpm_filename = "filename.rpm"
|
||||
|
||||
# convert rpm to deb (by default in dist directory)
|
||||
logging.info("building DEB")
|
||||
cmd = ["alien", "--to-deb", rpm_filename]
|
||||
if os.getuid() != 0:
|
||||
cmd.insert(0, "fakeroot")
|
||||
if self.dry_run:
|
||||
self.spawn(cmd)
|
||||
else:
|
||||
logging.info(subprocess.list2cmdline(cmd))
|
||||
process = subprocess.run(
|
||||
cmd,
|
||||
text=True,
|
||||
capture_output=True,
|
||||
check=False,
|
||||
cwd=self.dist_dir,
|
||||
)
|
||||
if process.returncode != 0:
|
||||
msg = process.stderr.splitlines()[0]
|
||||
if msg.startswith(f"Unpacking of '{rpm_filename}' failed at"):
|
||||
info = [
|
||||
"\n\t\x08Please check if you have `cpio 2.13` on "
|
||||
"Ubuntu 22.04.",
|
||||
"\t\x08You can try to install a previous version:",
|
||||
"\t\x08$ sudo apt-get install cpio=2.13+dfsg-7",
|
||||
]
|
||||
msg += "\n".join(info)
|
||||
raise ExecError(msg)
|
||||
output = process.stdout
|
||||
logging.info(output)
|
||||
filename = output.splitlines()[0].split()[0]
|
||||
filename = os.path.join(self.dist_dir, filename)
|
||||
if not os.path.exists(filename):
|
||||
msg = "could not build deb"
|
||||
raise ExecError(msg)
|
||||
self.distribution.dist_files.append(("bdist_deb", "any", filename))
|
||||
385
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_dmg.py
Normal file
385
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_dmg.py
Normal file
@@ -0,0 +1,385 @@
|
||||
"""Implements the 'bdist_dmg' command (create macOS dmg and/or app bundle)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
from typing import ClassVar
|
||||
|
||||
from dmgbuild import build_dmg
|
||||
from setuptools import Command
|
||||
|
||||
import cx_Freeze.icons
|
||||
from cx_Freeze import Executable
|
||||
from cx_Freeze.exception import OptionError
|
||||
|
||||
__all__ = ["bdist_dmg"]
|
||||
|
||||
|
||||
class bdist_dmg(Command):
|
||||
"""Create a Mac DMG disk image containing the Mac application bundle."""
|
||||
|
||||
description = (
|
||||
"create a Mac DMG disk image containing the Mac application bundle"
|
||||
)
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
("volume-label=", None, "Volume label of the DMG disk image"),
|
||||
(
|
||||
"applications-shortcut=",
|
||||
None,
|
||||
"Boolean for whether to include "
|
||||
"shortcut to Applications in the DMG disk image",
|
||||
),
|
||||
("silent", "s", "suppress all output except warnings"),
|
||||
("format=", None, 'format of the disk image [default: "UDZO"]'),
|
||||
(
|
||||
"filesystem=",
|
||||
None,
|
||||
'filesystem of the disk image [default: "HFS+"]',
|
||||
),
|
||||
(
|
||||
"size=",
|
||||
None,
|
||||
"If defined, specifies the size of the filesystem within the "
|
||||
"image. If this is not defined, cx_Freeze (and then dmgbuild) "
|
||||
"will attempt to determine a reasonable size for the image. "
|
||||
"If you set this, you should set it large enough to hold the "
|
||||
"files you intend to copy into the image. The syntax is the "
|
||||
"same as for the -size argument to hdiutil, i.e. you can use "
|
||||
"the suffixes `b`, `k`, `m`, `g`, `t`, `p` and `e` for bytes, "
|
||||
"kilobytes, megabytes, gigabytes, terabytes, exabytes and "
|
||||
"petabytes respectively.",
|
||||
),
|
||||
(
|
||||
"background",
|
||||
"b",
|
||||
"A rgb color in the form #3344ff, svg named color like goldenrod, "
|
||||
"a path to an image, or the words 'builtin-arrow' [default: None]",
|
||||
),
|
||||
(
|
||||
"show-status-bar",
|
||||
None,
|
||||
"Show the status bar in the Finder window. Default is False.",
|
||||
),
|
||||
(
|
||||
"show-tab-view",
|
||||
None,
|
||||
"Show the tab view in the Finder window. Default is False.",
|
||||
),
|
||||
(
|
||||
"show-path-bar",
|
||||
None,
|
||||
"Show the path bar in the Finder window. Default is False.",
|
||||
),
|
||||
(
|
||||
"show-sidebar",
|
||||
None,
|
||||
"Show the sidebar in the Finder window. Default is False.",
|
||||
),
|
||||
(
|
||||
"sidebar-width",
|
||||
None,
|
||||
"Width of the sidebar in the Finder window. Default is None.",
|
||||
),
|
||||
(
|
||||
"window-rect",
|
||||
None,
|
||||
"Window rectangle in the form x, y, width, height. The position "
|
||||
"of the window in ((x, y), (w, h)) format, with y co-ordinates "
|
||||
"running from bottom to top. The Finder makes sure that the "
|
||||
"window will be on the user's display, so if you want your window "
|
||||
"at the top left of the display you could use (0, 100000) as the "
|
||||
"x, y co-ordinates. Unfortunately it doesn't appear to be "
|
||||
"possible to position the window relative to the top left or "
|
||||
"relative to the centre of the user's screen.",
|
||||
),
|
||||
(
|
||||
"icon-locations",
|
||||
None,
|
||||
"A dictionary specifying the co-ordinates of items in the root "
|
||||
"directory of the disk image, where the keys are filenames and "
|
||||
"the values are (x, y) tuples. e.g.: "
|
||||
'icon-locations = { "Applications": (100, 100), '
|
||||
'"README.txt": (200, 100) }',
|
||||
),
|
||||
(
|
||||
"default-view",
|
||||
None,
|
||||
"The default view of the Finder window. Possible values are "
|
||||
'"icon-view", "list-view", "column-view", "coverflow".',
|
||||
),
|
||||
(
|
||||
"show-icon-preview",
|
||||
None,
|
||||
"Show icon preview in the Finder window. Default is False.",
|
||||
),
|
||||
(
|
||||
"license",
|
||||
None,
|
||||
"Dictionary specifying license details with 'default-language', "
|
||||
"'licenses', and 'buttons'."
|
||||
"default-language: Language code (e.g., 'en_US') if no matching "
|
||||
"system language."
|
||||
"licenses: Map of language codes to license file paths "
|
||||
"(e.g., {'en_US': 'path/to/license_en.txt'})."
|
||||
"buttons: Map of language codes to UI strings "
|
||||
"([language, agree, disagree, print, save, instruction])."
|
||||
"Example: {'default-language': 'en_US', "
|
||||
"'licenses': {'en_US': 'path/to/license_en.txt'}, "
|
||||
"'buttons': {'en_US': ['English', 'Agree', 'Disagree', 'Print', "
|
||||
"'Save', 'Instruction text']}}",
|
||||
),
|
||||
]
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.silent = None
|
||||
self.volume_label = self.distribution.get_fullname()
|
||||
self.applications_shortcut = False
|
||||
self._symlinks = {}
|
||||
self._files = []
|
||||
self.format = "UDZO"
|
||||
self.filesystem = "HFS+"
|
||||
self.size = None
|
||||
self.background = None
|
||||
self.show_status_bar = False
|
||||
self.show_tab_view = False
|
||||
self.show_path_bar = False
|
||||
self.show_sidebar = False
|
||||
self.sidebar_width = None
|
||||
self.window_rect = None
|
||||
self.hide = None
|
||||
self.hide_extensions = None
|
||||
self.icon_locations = None
|
||||
self.default_view = None
|
||||
self.show_icon_preview = False
|
||||
self.license = None
|
||||
|
||||
# Non-exposed options
|
||||
self.include_icon_view_settings = "auto"
|
||||
self.include_list_view_settings = "auto"
|
||||
self.arrange_by = None
|
||||
self.grid_offset = None
|
||||
self.grid_spacing = None
|
||||
self.scroll_position = None
|
||||
self.label_pos = None
|
||||
self.text_size = None
|
||||
self.icon_size = None
|
||||
self.list_icon_size = None
|
||||
self.list_text_size = None
|
||||
self.list_scroll_position = None
|
||||
self.list_sort_by = None
|
||||
self.list_use_relative_dates = None
|
||||
self.list_calculate_all_sizes = None
|
||||
self.list_columns = None
|
||||
self.list_column_widths = None
|
||||
self.list_column_sort_directions = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
if not self.volume_label:
|
||||
msg = "volume-label must be set"
|
||||
raise OptionError(msg)
|
||||
if self.applications_shortcut:
|
||||
self._symlinks["Applications"] = "/Applications"
|
||||
if self.silent is None:
|
||||
self.silent = False
|
||||
|
||||
self.finalize_dmgbuild_options()
|
||||
|
||||
def finalize_dmgbuild_options(self) -> None:
|
||||
if self.background:
|
||||
self.background = self.background.strip()
|
||||
if self.background == "builtin-arrow" and (
|
||||
self.icon_locations or self.window_rect
|
||||
):
|
||||
msg = (
|
||||
"background='builtin-arrow' cannot be used with "
|
||||
"icon_locations or window_rect"
|
||||
)
|
||||
raise OptionError(msg)
|
||||
if not self.arrange_by:
|
||||
self.arrange_by = None
|
||||
if not self.grid_offset:
|
||||
self.grid_offset = (0, 0)
|
||||
if not self.grid_spacing:
|
||||
self.grid_spacing = 100
|
||||
if not self.scroll_position:
|
||||
self.scroll_position = (0, 0)
|
||||
if not self.label_pos:
|
||||
self.label_pos = "bottom"
|
||||
if not self.text_size:
|
||||
self.text_size = 16
|
||||
if not self.icon_size:
|
||||
self.icon_size = 128
|
||||
|
||||
def build_dmg(self) -> None:
|
||||
# Remove DMG if it already exists
|
||||
if os.path.exists(self.dmg_name):
|
||||
os.unlink(self.dmg_name)
|
||||
|
||||
# Make dist folder
|
||||
self.dist_dir = os.path.join(self.build_dir, "dist")
|
||||
if os.path.exists(self.dist_dir):
|
||||
shutil.rmtree(self.dist_dir)
|
||||
self.mkpath(self.dist_dir)
|
||||
|
||||
# Copy App Bundle
|
||||
dest_dir = os.path.join(
|
||||
self.dist_dir, os.path.basename(self.bundle_dir)
|
||||
)
|
||||
if self.silent:
|
||||
shutil.copytree(self.bundle_dir, dest_dir, symlinks=True)
|
||||
else:
|
||||
self.copy_tree(self.bundle_dir, dest_dir, preserve_symlinks=True)
|
||||
|
||||
# Add the App Bundle to the list of files
|
||||
self._files.append(self.bundle_dir)
|
||||
|
||||
# set the app_name for the application bundle
|
||||
app_name = os.path.basename(self.bundle_dir)
|
||||
# Set the defaults
|
||||
if (
|
||||
self.background == "builtin-arrow"
|
||||
and not self.icon_locations
|
||||
and not self.window_rect
|
||||
):
|
||||
self.icon_locations = {
|
||||
"Applications": (500, 120),
|
||||
app_name: (140, 120),
|
||||
}
|
||||
self.window_rect = ((100, 100), (640, 380))
|
||||
|
||||
executables = self.distribution.executables # type: list[Executable]
|
||||
executable: Executable = executables[0]
|
||||
if len(executables) > 1:
|
||||
self.warn(
|
||||
"using the first executable as entrypoint: "
|
||||
f"{executable.target_name}"
|
||||
)
|
||||
if executable.icon is None:
|
||||
icon_name = "setup.icns"
|
||||
icon_source_dir = os.path.dirname(cx_Freeze.icons.__file__)
|
||||
self.icon = os.path.join(icon_source_dir, icon_name)
|
||||
else:
|
||||
self.icon = os.path.abspath(executable.icon)
|
||||
|
||||
with open("settings.py", "w") as f:
|
||||
|
||||
def add_param(name, value) -> None:
|
||||
# if value is a string, add quotes
|
||||
if isinstance(value, (str)):
|
||||
f.write(f"{name} = '{value}'\n")
|
||||
else:
|
||||
f.write(f"{name} = {value}\n")
|
||||
|
||||
# Some fields expect and allow None, others don't
|
||||
# so we need to check for None and not add them for
|
||||
# the fields that don't allow it
|
||||
|
||||
# Disk Image Settings
|
||||
add_param("filename", self.dmg_name)
|
||||
add_param("volume_label", self.volume_label)
|
||||
add_param("format", self.format)
|
||||
add_param("filesystem", self.filesystem)
|
||||
add_param("size", self.size)
|
||||
|
||||
# Content Settings
|
||||
add_param("files", self._files)
|
||||
add_param("symlinks", self._symlinks)
|
||||
if self.hide:
|
||||
add_param("hide", self.hide)
|
||||
if self.hide_extensions:
|
||||
add_param("hide_extensions", self.hide_extensions)
|
||||
# Only one of these can be set
|
||||
if self.icon_locations:
|
||||
add_param("icon_locations", self.icon_locations)
|
||||
if self.icon:
|
||||
add_param("icon", self.icon)
|
||||
# We don't need to set this, as we only support icns
|
||||
# add param ( "badge_icon", self.badge_icon)
|
||||
|
||||
# Window Settings
|
||||
add_param("background", self.background)
|
||||
add_param("show_status_bar", self.show_status_bar)
|
||||
add_param("show_tab_view", self.show_tab_view)
|
||||
add_param("show_pathbar", self.show_path_bar)
|
||||
add_param("show_sidebar", self.show_sidebar)
|
||||
add_param("sidebar_width", self.sidebar_width)
|
||||
if self.window_rect:
|
||||
add_param("window_rect", self.window_rect)
|
||||
if self.default_view:
|
||||
add_param("default_view", self.default_view)
|
||||
|
||||
add_param("show_icon_preview", self.show_icon_preview)
|
||||
add_param(
|
||||
"include_icon_view_settings", self.include_icon_view_settings
|
||||
)
|
||||
add_param(
|
||||
"include_list_view_settings", self.include_list_view_settings
|
||||
)
|
||||
|
||||
# Icon View Settings\
|
||||
add_param("arrange_by", self.arrange_by)
|
||||
add_param("grid_offset", self.grid_offset)
|
||||
add_param("grid_spacing", self.grid_spacing)
|
||||
add_param("scroll_position", self.scroll_position)
|
||||
add_param("label_pos", self.label_pos)
|
||||
if self.text_size:
|
||||
add_param("text_size", self.text_size)
|
||||
if self.icon_size:
|
||||
add_param("icon_size", self.icon_size)
|
||||
if self.icon_locations:
|
||||
add_param("icon_locations", self.icon_locations)
|
||||
|
||||
# List View Settings
|
||||
if self.list_icon_size:
|
||||
add_param("list_icon_size", self.list_icon_size)
|
||||
if self.list_text_size:
|
||||
add_param("list_text_size", self.list_text_size)
|
||||
if self.list_scroll_position:
|
||||
add_param("list_scroll_position", self.list_scroll_position)
|
||||
add_param("list_sort_by", self.list_sort_by)
|
||||
add_param("list_use_relative_dates", self.list_use_relative_dates)
|
||||
add_param(
|
||||
"list_calculate_all_sizes", self.list_calculate_all_sizes
|
||||
)
|
||||
if self.list_columns:
|
||||
add_param("list_columns", self.list_columns)
|
||||
if self.list_column_widths:
|
||||
add_param("list_column_widths", self.list_column_widths)
|
||||
if self.list_column_sort_directions:
|
||||
add_param(
|
||||
"list_column_sort_directions",
|
||||
self.list_column_sort_directions,
|
||||
)
|
||||
|
||||
# License Settings
|
||||
add_param("license", self.license)
|
||||
|
||||
def log_handler(msg: dict[str, str]) -> None:
|
||||
if not self.silent:
|
||||
loggable = ",".join(
|
||||
f"{key}: {value}" for key, value in msg.items()
|
||||
)
|
||||
self.announce(loggable)
|
||||
|
||||
build_dmg(
|
||||
self.dmg_name,
|
||||
self.volume_label,
|
||||
"settings.py",
|
||||
callback=log_handler,
|
||||
)
|
||||
|
||||
def run(self) -> None:
|
||||
# Create the application bundle
|
||||
self.run_command("bdist_mac")
|
||||
|
||||
# Find the location of the application bundle and the build dir
|
||||
self.bundle_dir = self.get_finalized_command("bdist_mac").bundle_dir
|
||||
self.build_dir = self.get_finalized_command("build_exe").build_base
|
||||
|
||||
# Set the file name of the DMG to be built
|
||||
self.dmg_name = os.path.join(
|
||||
self.build_dir, self.volume_label + ".dmg"
|
||||
)
|
||||
|
||||
self.execute(self.build_dmg, ())
|
||||
529
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_mac.py
Normal file
529
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_mac.py
Normal file
@@ -0,0 +1,529 @@
|
||||
"""Implements the 'bdist_mac' commands (create macOS
|
||||
app blundle).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import plistlib
|
||||
import shutil
|
||||
import subprocess
|
||||
from pathlib import Path
|
||||
from typing import ClassVar
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
from cx_Freeze.common import normalize_to_list
|
||||
from cx_Freeze.darwintools import (
|
||||
apply_adhoc_signature,
|
||||
change_load_reference,
|
||||
isMachOFile,
|
||||
)
|
||||
from cx_Freeze.exception import OptionError
|
||||
|
||||
__all__ = ["bdist_mac"]
|
||||
|
||||
|
||||
class bdist_mac(Command):
|
||||
"""Create a Mac application bundle."""
|
||||
|
||||
description = "create a Mac application bundle"
|
||||
|
||||
plist_items: list[tuple[str, str]]
|
||||
include_frameworks: list[str]
|
||||
include_resources: list[str]
|
||||
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
("iconfile=", None, "Path to an icns icon file for the application."),
|
||||
(
|
||||
"qt-menu-nib=",
|
||||
None,
|
||||
"Location of qt_menu.nib folder for Qt "
|
||||
"applications. Will be auto-detected by default.",
|
||||
),
|
||||
(
|
||||
"bundle-name=",
|
||||
None,
|
||||
"File name for the bundle application "
|
||||
"without the .app extension.",
|
||||
),
|
||||
(
|
||||
"plist-items=",
|
||||
None,
|
||||
"A list of key-value pairs (type: list[tuple[str, str]]) to "
|
||||
"be added to the app bundle Info.plist file.",
|
||||
),
|
||||
(
|
||||
"custom-info-plist=",
|
||||
None,
|
||||
"File to be used as the Info.plist in "
|
||||
"the app bundle. A basic one will be generated by default.",
|
||||
),
|
||||
(
|
||||
"include-frameworks=",
|
||||
None,
|
||||
"A comma separated list of Framework "
|
||||
"directories to include in the app bundle.",
|
||||
),
|
||||
(
|
||||
"include-resources=",
|
||||
None,
|
||||
"A list of tuples of additional "
|
||||
"files to include in the app bundle's resources directory, with "
|
||||
"the first element being the source, and second the destination "
|
||||
"file or directory name.",
|
||||
),
|
||||
(
|
||||
"codesign-identity=",
|
||||
None,
|
||||
"The identity of the key to be used to sign the app bundle.",
|
||||
),
|
||||
(
|
||||
"codesign-entitlements=",
|
||||
None,
|
||||
"The path to an entitlements file "
|
||||
"to use for your application's code signature.",
|
||||
),
|
||||
(
|
||||
"codesign-deep=",
|
||||
None,
|
||||
"Boolean for whether to codesign using the --deep option.",
|
||||
),
|
||||
(
|
||||
"codesign-timestamp",
|
||||
None,
|
||||
"Boolean for whether to codesign using the --timestamp option.",
|
||||
),
|
||||
(
|
||||
"codesign-resource-rules",
|
||||
None,
|
||||
"Plist file to be passed to "
|
||||
"codesign's --resource-rules option.",
|
||||
),
|
||||
(
|
||||
"absolute-reference-path=",
|
||||
None,
|
||||
"Path to use for all referenced "
|
||||
"libraries instead of @executable_path.",
|
||||
),
|
||||
(
|
||||
"codesign-verify",
|
||||
None,
|
||||
"Boolean to verify codesign of the .app bundle using the codesign "
|
||||
"command",
|
||||
),
|
||||
(
|
||||
"spctl-assess",
|
||||
None,
|
||||
"Boolean to verify codesign of the .app bundle using the spctl "
|
||||
"command",
|
||||
),
|
||||
(
|
||||
"codesign-strict=",
|
||||
None,
|
||||
"Boolean for whether to codesign using the --strict option.",
|
||||
),
|
||||
(
|
||||
"codesign-options=",
|
||||
None,
|
||||
"Option flags to be embedded in the code signature",
|
||||
),
|
||||
]
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.list_options = [
|
||||
"plist_items",
|
||||
"include_frameworks",
|
||||
"include_resources",
|
||||
]
|
||||
for option in self.list_options:
|
||||
setattr(self, option, [])
|
||||
|
||||
self.absolute_reference_path = None
|
||||
self.bundle_name = self.distribution.get_fullname()
|
||||
self.codesign_deep = None
|
||||
self.codesign_entitlements = None
|
||||
self.codesign_identity = None
|
||||
self.codesign_timestamp = None
|
||||
self.codesign_strict = None
|
||||
self.codesign_options = None
|
||||
self.codesign_resource_rules = None
|
||||
self.codesign_verify = None
|
||||
self.spctl_assess = None
|
||||
self.custom_info_plist = None
|
||||
self.iconfile = None
|
||||
self.qt_menu_nib = False
|
||||
|
||||
self.build_base = None
|
||||
self.build_dir = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
# Make sure all options of multiple values are lists
|
||||
for option in self.list_options:
|
||||
setattr(self, option, normalize_to_list(getattr(self, option)))
|
||||
for item in self.plist_items:
|
||||
if not isinstance(item, tuple) or len(item) != 2:
|
||||
msg = (
|
||||
"Error, plist_items must be a list of key, value pairs "
|
||||
"(list[tuple[str, str]]) (bad list item)."
|
||||
)
|
||||
raise OptionError(msg)
|
||||
|
||||
# Define the paths within the application bundle
|
||||
self.set_undefined_options(
|
||||
"build_exe",
|
||||
("build_base", "build_base"),
|
||||
("build_exe", "build_dir"),
|
||||
)
|
||||
self.bundle_dir = os.path.join(
|
||||
self.build_base, f"{self.bundle_name}.app"
|
||||
)
|
||||
self.contents_dir = os.path.join(self.bundle_dir, "Contents")
|
||||
self.bin_dir = os.path.join(self.contents_dir, "MacOS")
|
||||
self.frameworks_dir = os.path.join(self.contents_dir, "Frameworks")
|
||||
self.resources_dir = os.path.join(self.contents_dir, "Resources")
|
||||
self.helpers_dir = os.path.join(self.contents_dir, "Helpers")
|
||||
|
||||
def create_plist(self) -> None:
|
||||
"""Create the Contents/Info.plist file."""
|
||||
# Use custom plist if supplied, otherwise create a simple default.
|
||||
if self.custom_info_plist:
|
||||
with open(self.custom_info_plist, "rb") as file:
|
||||
contents = plistlib.load(file)
|
||||
else:
|
||||
contents = {
|
||||
"CFBundleIconFile": "icon.icns",
|
||||
"CFBundleDevelopmentRegion": "English",
|
||||
"CFBundleIdentifier": self.bundle_name,
|
||||
# Specify that bundle is an application bundle
|
||||
"CFBundlePackageType": "APPL",
|
||||
# Cause application to run in high-resolution mode by default
|
||||
# (Without this, applications run from application bundle may
|
||||
# be pixelated)
|
||||
"NSHighResolutionCapable": "True",
|
||||
}
|
||||
|
||||
# Ensure CFBundleExecutable is set correctly
|
||||
contents["CFBundleExecutable"] = self.bundle_executable
|
||||
|
||||
# add custom items to the plist file
|
||||
for key, value in self.plist_items:
|
||||
contents[key] = value
|
||||
|
||||
with open(os.path.join(self.contents_dir, "Info.plist"), "wb") as file:
|
||||
plistlib.dump(contents, file)
|
||||
|
||||
def set_absolute_reference_paths(self, path=None) -> None:
|
||||
"""For all files in Contents/MacOS, set their linked library paths to
|
||||
be absolute paths using the given path instead of @executable_path.
|
||||
"""
|
||||
if not path:
|
||||
path = self.absolute_reference_path
|
||||
|
||||
files = os.listdir(self.bin_dir)
|
||||
|
||||
for filename in files:
|
||||
filepath = os.path.join(self.bin_dir, filename)
|
||||
|
||||
# Skip some file types
|
||||
if filepath[-1:] in ("txt", "zip") or os.path.isdir(filepath):
|
||||
continue
|
||||
|
||||
out = subprocess.check_output(
|
||||
("otool", "-L", filepath), encoding="utf_8"
|
||||
)
|
||||
for line in out.splitlines()[1:]:
|
||||
lib = line.lstrip("\t").split(" (compat")[0]
|
||||
|
||||
if lib.startswith("@executable_path"):
|
||||
replacement = lib.replace("@executable_path", path)
|
||||
|
||||
path, name = os.path.split(replacement)
|
||||
|
||||
# see if we provide the referenced file;
|
||||
# if so, change the reference
|
||||
if name in files:
|
||||
change_load_reference(filepath, lib, replacement)
|
||||
apply_adhoc_signature(filepath)
|
||||
|
||||
def find_qt_menu_nib(self) -> str | None:
|
||||
"""Returns a location of a qt_menu.nib folder, or None if this is not
|
||||
a Qt application.
|
||||
"""
|
||||
if self.qt_menu_nib:
|
||||
return self.qt_menu_nib
|
||||
if any(n.startswith("PyQt4.QtCore") for n in os.listdir(self.bin_dir)):
|
||||
name = "PyQt4"
|
||||
elif any(
|
||||
n.startswith("PySide.QtCore") for n in os.listdir(self.bin_dir)
|
||||
):
|
||||
name = "PySide"
|
||||
else:
|
||||
return None
|
||||
|
||||
qtcore = __import__(name, fromlist=["QtCore"]).QtCore
|
||||
libpath = str(
|
||||
qtcore.QLibraryInfo.location(qtcore.QLibraryInfo.LibrariesPath)
|
||||
)
|
||||
for subpath in [
|
||||
"QtGui.framework/Resources/qt_menu.nib",
|
||||
"Resources/qt_menu.nib",
|
||||
]:
|
||||
path = os.path.join(libpath, subpath)
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
# Last resort: fixed paths (macports)
|
||||
for path in [
|
||||
"/opt/local/Library/Frameworks/QtGui.framework/Versions/"
|
||||
"4/Resources/qt_menu.nib"
|
||||
]:
|
||||
if os.path.exists(path):
|
||||
return path
|
||||
|
||||
print("Could not find qt_menu.nib")
|
||||
msg = "Could not find qt_menu.nib"
|
||||
raise OSError(msg)
|
||||
|
||||
def prepare_qt_app(self) -> None:
|
||||
"""Add resource files for a Qt application. Should do nothing if the
|
||||
application does not use QtCore.
|
||||
"""
|
||||
qt_conf = os.path.join(self.resources_dir, "qt.conf")
|
||||
qt_conf_2 = os.path.join(self.resources_dir, "qt_bdist_mac.conf")
|
||||
if os.path.exists(qt_conf_2):
|
||||
self.execute(
|
||||
shutil.move,
|
||||
(qt_conf_2, qt_conf),
|
||||
msg=f"moving {qt_conf_2} -> {qt_conf}",
|
||||
)
|
||||
|
||||
nib_locn = self.find_qt_menu_nib()
|
||||
if nib_locn is None:
|
||||
return
|
||||
|
||||
# Copy qt_menu.nib
|
||||
self.copy_tree(
|
||||
nib_locn, os.path.join(self.resources_dir, "qt_menu.nib")
|
||||
)
|
||||
|
||||
# qt.conf needs to exist, but needn't have any content
|
||||
if not os.path.exists(qt_conf):
|
||||
with open(qt_conf, "wb"):
|
||||
pass
|
||||
|
||||
def run(self) -> None:
|
||||
self.run_command("build_exe")
|
||||
|
||||
# Remove App if it already exists
|
||||
# ( avoids confusing issues where prior builds persist! )
|
||||
if os.path.exists(self.bundle_dir):
|
||||
self.execute(
|
||||
shutil.rmtree,
|
||||
(self.bundle_dir,),
|
||||
msg=f"staging - removed existing '{self.bundle_dir}'",
|
||||
)
|
||||
|
||||
# Find the executable name
|
||||
executable = self.distribution.executables[0].target_name
|
||||
_, self.bundle_executable = os.path.split(executable)
|
||||
print(f"Executable name: {self.build_dir}/{executable}")
|
||||
|
||||
# Build the app directory structure
|
||||
self.mkpath(self.bin_dir) # /MacOS
|
||||
self.mkpath(self.frameworks_dir) # /Frameworks
|
||||
self.mkpath(self.resources_dir) # /Resources
|
||||
|
||||
# Copy the full build_exe to Contents/Resources
|
||||
self.copy_tree(self.build_dir, self.resources_dir)
|
||||
|
||||
# Move only executables in Contents/Resources to Contents/MacOS
|
||||
for executable in self.distribution.executables:
|
||||
source = os.path.join(self.resources_dir, executable.target_name)
|
||||
target = os.path.join(self.bin_dir, executable.target_name)
|
||||
self.move_file(source, target)
|
||||
|
||||
# Make symlink between folders under Resources such as lib and others
|
||||
# specified by the user in include_files and Contents/MacOS so we can
|
||||
# use non-relative reference paths to pass codesign...
|
||||
for filename in os.listdir(self.resources_dir):
|
||||
target = os.path.join(self.resources_dir, filename)
|
||||
if os.path.isdir(target):
|
||||
origin = os.path.join(self.bin_dir, filename)
|
||||
relative_reference = os.path.relpath(target, self.bin_dir)
|
||||
self.execute(
|
||||
os.symlink,
|
||||
(relative_reference, origin, True),
|
||||
msg=f"linking {origin} -> {relative_reference}",
|
||||
)
|
||||
|
||||
# Copy the icon
|
||||
if self.iconfile:
|
||||
self.copy_file(
|
||||
self.iconfile, os.path.join(self.resources_dir, "icon.icns")
|
||||
)
|
||||
|
||||
# Copy in Frameworks
|
||||
for framework in self.include_frameworks:
|
||||
self.copy_tree(
|
||||
framework,
|
||||
os.path.join(self.frameworks_dir, os.path.basename(framework)),
|
||||
)
|
||||
|
||||
# Copy in Resources
|
||||
for resource, destination in self.include_resources:
|
||||
if os.path.isdir(resource):
|
||||
self.copy_tree(
|
||||
resource, os.path.join(self.resources_dir, destination)
|
||||
)
|
||||
else:
|
||||
parent_dirs = os.path.dirname(
|
||||
os.path.join(self.resources_dir, destination)
|
||||
)
|
||||
os.makedirs(parent_dirs, exist_ok=True)
|
||||
self.copy_file(
|
||||
resource, os.path.join(self.resources_dir, destination)
|
||||
)
|
||||
|
||||
# Create the Info.plist file
|
||||
self.execute(self.create_plist, (), msg="creating Contents/Info.plist")
|
||||
|
||||
# Make library references absolute if enabled
|
||||
if self.absolute_reference_path:
|
||||
self.execute(
|
||||
self.set_absolute_reference_paths,
|
||||
(),
|
||||
msg="set absolute reference path "
|
||||
f"'{self.absolute_reference_path}",
|
||||
)
|
||||
|
||||
# For a Qt application, run some tweaks
|
||||
self.execute(self.prepare_qt_app, ())
|
||||
|
||||
# Move Contents/Resources/share/*.app to Contents/Helpers
|
||||
share_dir = os.path.join(self.resources_dir, "share")
|
||||
if os.path.isdir(share_dir):
|
||||
for filename in os.listdir(share_dir):
|
||||
if not filename.endswith(".app"):
|
||||
continue
|
||||
# create /Helpers only if required
|
||||
self.mkpath(self.helpers_dir)
|
||||
source = os.path.join(share_dir, filename)
|
||||
target = os.path.join(self.helpers_dir, filename)
|
||||
self.execute(
|
||||
shutil.move,
|
||||
(source, target),
|
||||
msg=f"moving {source} -> {target}",
|
||||
)
|
||||
if os.path.isdir(target):
|
||||
origin = os.path.join(target, "Contents", "MacOS", "lib")
|
||||
relative_reference = os.path.relpath(
|
||||
os.path.join(self.resources_dir, "lib"),
|
||||
os.path.join(target, "Contents", "MacOS"),
|
||||
)
|
||||
self.execute(
|
||||
os.symlink,
|
||||
(relative_reference, origin, True),
|
||||
msg=f"linking {origin} -> {relative_reference}",
|
||||
)
|
||||
|
||||
# Sign the app bundle if a key is specified
|
||||
self.execute(
|
||||
self._codesign,
|
||||
(self.bundle_dir,),
|
||||
msg=f"sign: '{self.bundle_dir}'",
|
||||
)
|
||||
|
||||
def _codesign(self, root_path) -> None:
|
||||
"""Run codesign on all .so, .dylib and binary files in reverse order.
|
||||
Signing from inside-out.
|
||||
"""
|
||||
if not self.codesign_identity:
|
||||
return
|
||||
|
||||
binaries_to_sign = []
|
||||
|
||||
# Identify all binary files
|
||||
for dirpath, _, filenames in os.walk(root_path):
|
||||
for filename in filenames:
|
||||
full_path = Path(os.path.join(dirpath, filename))
|
||||
|
||||
if isMachOFile(full_path):
|
||||
binaries_to_sign.append(full_path)
|
||||
|
||||
# Sort files by depth, so we sign the deepest files first
|
||||
binaries_to_sign.sort(key=lambda x: str(x).count(os.sep), reverse=True)
|
||||
|
||||
for binary_path in binaries_to_sign:
|
||||
self._codesign_file(binary_path, self._get_sign_args())
|
||||
|
||||
self._verify_signature()
|
||||
print("Finished .app signing")
|
||||
|
||||
def _get_sign_args(self) -> list[str]:
|
||||
signargs = ["codesign", "--sign", self.codesign_identity, "--force"]
|
||||
|
||||
if self.codesign_timestamp:
|
||||
signargs.append("--timestamp")
|
||||
|
||||
if self.codesign_strict:
|
||||
signargs.append(f"--strict={self.codesign_strict}")
|
||||
|
||||
if self.codesign_deep:
|
||||
signargs.append("--deep")
|
||||
|
||||
if self.codesign_options:
|
||||
signargs.append("--options")
|
||||
signargs.append(self.codesign_options)
|
||||
|
||||
if self.codesign_entitlements:
|
||||
signargs.append("--entitlements")
|
||||
signargs.append(self.codesign_entitlements)
|
||||
return signargs
|
||||
|
||||
def _codesign_file(self, file_path, sign_args) -> None:
|
||||
print(f"Signing file: {file_path}")
|
||||
sign_args.append(file_path)
|
||||
subprocess.run(sign_args, check=False)
|
||||
|
||||
def _verify_signature(self) -> None:
|
||||
if self.codesign_verify:
|
||||
verify_args = [
|
||||
"codesign",
|
||||
"-vvv",
|
||||
"--deep",
|
||||
"--strict",
|
||||
self.bundle_dir,
|
||||
]
|
||||
print("Running codesign verification")
|
||||
result = subprocess.run(
|
||||
verify_args, capture_output=True, text=True, check=False
|
||||
)
|
||||
print("ExitCode:", result.returncode)
|
||||
print(" stdout:", result.stdout)
|
||||
print(" stderr:", result.stderr)
|
||||
|
||||
if self.spctl_assess:
|
||||
spctl_args = [
|
||||
"spctl",
|
||||
"--assess",
|
||||
"--raw",
|
||||
"--verbose=10",
|
||||
"--type",
|
||||
"exec",
|
||||
self.bundle_dir,
|
||||
]
|
||||
try:
|
||||
completed_process = subprocess.run(
|
||||
spctl_args,
|
||||
check=True,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
)
|
||||
print(
|
||||
"spctl command's output: "
|
||||
f"{completed_process.stdout.decode()}"
|
||||
)
|
||||
except subprocess.CalledProcessError as error:
|
||||
print(f"spctl check got an error: {error.stdout.decode()}")
|
||||
raise
|
||||
1198
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_msi.py
Normal file
1198
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_msi.py
Normal file
File diff suppressed because it is too large
Load Diff
562
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_rpm.py
Normal file
562
venv3_12/Lib/site-packages/cx_Freeze/command/bdist_rpm.py
Normal file
@@ -0,0 +1,562 @@
|
||||
"""Implements the 'bdist_rpm' command (create RPM binary distributions).
|
||||
|
||||
Borrowed from distutils.command.bdist_rpm of Python 3.10 and merged with
|
||||
bdist_rpm subclass of cx_Freeze 6.10.
|
||||
|
||||
https://rpm.org/documentation.html
|
||||
https://rpm-packaging-guide.github.io/
|
||||
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import sys
|
||||
import tarfile
|
||||
from subprocess import CalledProcessError, check_output
|
||||
from sysconfig import get_python_version
|
||||
from typing import ClassVar
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
from cx_Freeze._compat import IS_CONDA
|
||||
from cx_Freeze.exception import ExecError, FileError, PlatformError
|
||||
|
||||
__all__ = ["bdist_rpm"]
|
||||
|
||||
|
||||
class bdist_rpm(Command):
|
||||
"""Create an RPM distribution."""
|
||||
|
||||
description = "create an RPM distribution"
|
||||
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
(
|
||||
"bdist-base=",
|
||||
None,
|
||||
"base directory for creating built distributions",
|
||||
),
|
||||
(
|
||||
"rpm-base=",
|
||||
None,
|
||||
"base directory for creating RPMs "
|
||||
'[defaults to "rpm" under "--bdist-base"]',
|
||||
),
|
||||
(
|
||||
"dist-dir=",
|
||||
"d",
|
||||
"directory to put final RPM files in "
|
||||
"(and .spec files if --spec-only)",
|
||||
),
|
||||
("spec-only", None, "only regenerate spec file"),
|
||||
# More meta-data: too RPM-specific to put in the setup script,
|
||||
# but needs to go in the .spec file -- so we make these options
|
||||
# to "bdist_rpm". The idea is that packagers would put this
|
||||
# info in pyproject.toml or setup.cfg, although they are of course free
|
||||
# to supply it on the command line.
|
||||
(
|
||||
"distribution-name=",
|
||||
None,
|
||||
"name of the (Linux) distribution to which this "
|
||||
"RPM applies (*not* the name of the module distribution!)",
|
||||
),
|
||||
(
|
||||
"group=",
|
||||
None,
|
||||
'package classification [default: "Development/Libraries"]',
|
||||
),
|
||||
("release=", None, "RPM release number"),
|
||||
("serial=", None, "RPM serial number"),
|
||||
(
|
||||
"vendor=",
|
||||
None,
|
||||
'RPM "vendor" (eg. "Joe Blow <joe@example.com>") '
|
||||
"[default: maintainer or author from setup script]",
|
||||
),
|
||||
(
|
||||
"packager=",
|
||||
None,
|
||||
'RPM packager (eg. "Jane Doe <jane@example.net>") '
|
||||
"[default: same as vendor]",
|
||||
),
|
||||
(
|
||||
"doc-files=",
|
||||
None,
|
||||
"list of documentation files (space or comma-separated)",
|
||||
),
|
||||
("changelog=", None, "RPM changelog"),
|
||||
("icon=", None, "name of icon file"),
|
||||
("provides=", None, "capabilities provided by this package"),
|
||||
("requires=", None, "capabilities required by this package"),
|
||||
("conflicts=", None, "capabilities which conflict with this package"),
|
||||
(
|
||||
"build-requires=",
|
||||
None,
|
||||
"capabilities required to build this package",
|
||||
),
|
||||
("obsoletes=", None, "capabilities made obsolete by this package"),
|
||||
("no-autoreq", None, "do not automatically calculate dependencies"),
|
||||
# Actions to take when building RPM
|
||||
("keep-temp", "k", "don't clean up RPM build directory"),
|
||||
("no-keep-temp", None, "clean up RPM build directory [default]"),
|
||||
# Add the hooks necessary for specifying custom scripts
|
||||
(
|
||||
"prep-script=",
|
||||
None,
|
||||
"Specify a script for the PREP phase of RPM building",
|
||||
),
|
||||
(
|
||||
"build-script=",
|
||||
None,
|
||||
"Specify a script for the BUILD phase of RPM building",
|
||||
),
|
||||
(
|
||||
"pre-install=",
|
||||
None,
|
||||
"Specify a script for the pre-INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
"install-script=",
|
||||
None,
|
||||
"Specify a script for the INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
"post-install=",
|
||||
None,
|
||||
"Specify a script for the post-INSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
"pre-uninstall=",
|
||||
None,
|
||||
"Specify a script for the pre-UNINSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
"post-uninstall=",
|
||||
None,
|
||||
"Specify a script for the post-UNINSTALL phase of RPM building",
|
||||
),
|
||||
(
|
||||
"clean-script=",
|
||||
None,
|
||||
"Specify a script for the CLEAN phase of RPM building",
|
||||
),
|
||||
(
|
||||
"verify-script=",
|
||||
None,
|
||||
"Specify a script for the VERIFY phase of the RPM build",
|
||||
),
|
||||
("quiet", "q", "Run the INSTALL phase of RPM building in quiet mode"),
|
||||
("debug", "g", "Run in debug mode"),
|
||||
]
|
||||
|
||||
boolean_options: ClassVar[list[str]] = [
|
||||
"keep-temp",
|
||||
"no-autoreq",
|
||||
"quiet",
|
||||
"debug",
|
||||
]
|
||||
|
||||
negative_opt: ClassVar[dict[str, str]] = {
|
||||
"no-keep-temp": "keep-temp",
|
||||
}
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.bdist_base = None
|
||||
self.dist_dir = None
|
||||
|
||||
self.rpm_base = None
|
||||
self.spec_only = None
|
||||
|
||||
self.distribution_name = None
|
||||
self.group = None
|
||||
self.release = None
|
||||
self.serial = None
|
||||
self.vendor = None
|
||||
self.packager = None
|
||||
self.doc_files = None
|
||||
self.changelog = None
|
||||
self.icon = None
|
||||
|
||||
self.prep_script = None
|
||||
self.build_script = None
|
||||
self.install_script = None
|
||||
self.clean_script = None
|
||||
self.verify_script = None
|
||||
self.pre_install = None
|
||||
self.post_install = None
|
||||
self.pre_uninstall = None
|
||||
self.post_uninstall = None
|
||||
self.prep = None
|
||||
self.provides = None
|
||||
self.requires = None
|
||||
self.conflicts = None
|
||||
self.build_requires = None
|
||||
self.obsoletes = None
|
||||
|
||||
self.keep_temp = 0
|
||||
self.no_autoreq = 0
|
||||
|
||||
self.quiet = 0
|
||||
self.debug = 0
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
if os.name != "posix":
|
||||
msg = (
|
||||
"don't know how to create RPM "
|
||||
f"distributions on platform {os.name}"
|
||||
)
|
||||
raise PlatformError(msg)
|
||||
|
||||
self._rpm = shutil.which("rpm")
|
||||
self._rpmbuild = shutil.which("rpmbuild")
|
||||
if not self._rpmbuild:
|
||||
msg = "failed to find rpmbuild for this platform."
|
||||
raise PlatformError(msg)
|
||||
|
||||
self.set_undefined_options(
|
||||
"bdist",
|
||||
("bdist_base", "bdist_base"),
|
||||
("dist_dir", "dist_dir"),
|
||||
)
|
||||
if self.rpm_base is None:
|
||||
self.rpm_base = os.path.join(self.bdist_base, "rpm")
|
||||
|
||||
self.finalize_package_data()
|
||||
|
||||
def finalize_package_data(self) -> None:
|
||||
self.ensure_string("group", "Development/Libraries")
|
||||
contact = self.distribution.get_contact() or "UNKNOWN"
|
||||
contact_email = self.distribution.get_contact_email() or "UNKNOWN"
|
||||
self.ensure_string("vendor", f"{contact} <{contact_email}>")
|
||||
self.ensure_string("packager")
|
||||
self.ensure_string_list("doc_files")
|
||||
if isinstance(self.doc_files, list):
|
||||
doc_files = set(self.doc_files)
|
||||
for readme in ("README", "README.txt"):
|
||||
if os.path.exists(readme) and readme not in doc_files:
|
||||
self.doc_files.append(readme)
|
||||
|
||||
self.ensure_string("release", "1")
|
||||
self.ensure_string("serial") # should it be an int?
|
||||
|
||||
self.ensure_string("distribution_name")
|
||||
|
||||
self.ensure_string("changelog")
|
||||
# Format changelog correctly
|
||||
self.changelog = self._format_changelog(self.changelog)
|
||||
|
||||
self.ensure_filename("icon")
|
||||
|
||||
self.ensure_filename("prep_script")
|
||||
self.ensure_filename("build_script")
|
||||
self.ensure_filename("install_script")
|
||||
self.ensure_filename("clean_script")
|
||||
self.ensure_filename("verify_script")
|
||||
self.ensure_filename("pre_install")
|
||||
self.ensure_filename("post_install")
|
||||
self.ensure_filename("pre_uninstall")
|
||||
self.ensure_filename("post_uninstall")
|
||||
|
||||
# Now *this* is some meta-data that belongs in the setup script...
|
||||
self.ensure_string_list("provides")
|
||||
self.ensure_string_list("requires")
|
||||
self.ensure_string_list("conflicts")
|
||||
self.ensure_string_list("build_requires")
|
||||
self.ensure_string_list("obsoletes")
|
||||
|
||||
def run(self) -> None:
|
||||
if self.debug:
|
||||
print("before _get_package_data():")
|
||||
print("vendor =", self.vendor)
|
||||
print("packager =", self.packager)
|
||||
print("doc_files =", self.doc_files)
|
||||
print("changelog =", self.changelog)
|
||||
|
||||
# make directories
|
||||
if self.spec_only:
|
||||
spec_dir = self.dist_dir
|
||||
else:
|
||||
rpm_dir = {}
|
||||
for data in ("SOURCES", "SPECS", "BUILD", "RPMS", "SRPMS"):
|
||||
rpm_dir[data] = os.path.join(self.rpm_base, data)
|
||||
self.mkpath(rpm_dir[data])
|
||||
spec_dir = rpm_dir["SPECS"]
|
||||
self.mkpath(self.dist_dir)
|
||||
|
||||
# Spec file goes into 'dist_dir' if '--spec-only specified',
|
||||
# build/rpm.<plat> otherwise.
|
||||
distribution_name = self.distribution.get_name()
|
||||
spec_path = os.path.join(spec_dir, f"{distribution_name}.spec")
|
||||
self.execute(
|
||||
write_file,
|
||||
(spec_path, self._make_spec_file()),
|
||||
f"writing '{spec_path}'",
|
||||
)
|
||||
|
||||
if self.spec_only: # stop if requested
|
||||
return
|
||||
|
||||
# Make a source distribution and copy to SOURCES directory with
|
||||
# optional icon.
|
||||
def exclude_filter(info: tarfile.TarInfo) -> tarfile.TarInfo | None:
|
||||
if (
|
||||
os.path.basename(info.name) in ("build", "dist")
|
||||
and info.isdir()
|
||||
):
|
||||
return None
|
||||
return info
|
||||
|
||||
name = self.distribution.get_name()
|
||||
version = self.distribution.get_version()
|
||||
source = f"{name}-{version}"
|
||||
source_dir = rpm_dir["SOURCES"]
|
||||
source_fullname = os.path.join(source_dir, source + ".tar.gz")
|
||||
with tarfile.open(source_fullname, "w:gz") as tar:
|
||||
tar.add(".", source, filter=exclude_filter)
|
||||
if self.icon:
|
||||
if os.path.exists(self.icon):
|
||||
self.copy_file(self.icon, source_dir)
|
||||
else:
|
||||
msg = f"icon file {self.icon!r} does not exist"
|
||||
raise FileError(msg)
|
||||
|
||||
# build package, binary only (-bb)
|
||||
logging.info("building RPMs")
|
||||
rpm_cmd = [self._rpmbuild, "-bb"]
|
||||
if not self.keep_temp:
|
||||
rpm_cmd.append("--clean")
|
||||
|
||||
if self.quiet:
|
||||
rpm_cmd.append("--quiet")
|
||||
|
||||
rpm_cmd.append(spec_path)
|
||||
# Determine the binary rpm names that should be built out of this spec
|
||||
# file
|
||||
# Note that some of these may not be really built (if the file
|
||||
# list is empty)
|
||||
nvr_string = "%{name}-%{version}-%{release}"
|
||||
src_rpm = nvr_string + ".src.rpm"
|
||||
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
|
||||
q_cmd = [
|
||||
self._rpm,
|
||||
"-q",
|
||||
"--qf",
|
||||
rf"{src_rpm} {non_src_rpm}\n",
|
||||
"--specfile",
|
||||
spec_path,
|
||||
]
|
||||
try:
|
||||
out = check_output(q_cmd, text=True)
|
||||
except CalledProcessError as exc:
|
||||
msg = f"Failed to execute: {' '.join(q_cmd)!r}"
|
||||
raise ExecError(msg) from exc
|
||||
|
||||
binary_rpms = []
|
||||
for line in out.splitlines():
|
||||
rows = line.split()
|
||||
assert len(rows) == 2 # noqa: S101
|
||||
binary_rpms.append(rows[1])
|
||||
|
||||
self.spawn(rpm_cmd)
|
||||
|
||||
if not self.dry_run:
|
||||
pyversion = get_python_version()
|
||||
|
||||
for binary_rpm in binary_rpms:
|
||||
rpm = os.path.join(rpm_dir["RPMS"], binary_rpm)
|
||||
if os.path.exists(rpm):
|
||||
self.move_file(rpm, self.dist_dir)
|
||||
filename = os.path.join(
|
||||
self.dist_dir, os.path.basename(rpm)
|
||||
)
|
||||
self.distribution.dist_files.append(
|
||||
("bdist_rpm", pyversion, filename)
|
||||
)
|
||||
|
||||
def _make_spec_file(self) -> list[str]:
|
||||
"""Generate the text of an RPM spec file and return it as a
|
||||
list of strings (one per line).
|
||||
"""
|
||||
# definitions and headers
|
||||
dist = self.distribution
|
||||
spec_file = [
|
||||
f"%define _topdir {os.path.abspath(self.rpm_base)}",
|
||||
# cx_Freeze specific
|
||||
"%define __prelink_undo_cmd %{nil}",
|
||||
"%define __strip /bin/true",
|
||||
"",
|
||||
f"%define name {dist.get_name()}",
|
||||
f"%define version {dist.get_version().replace('-', '_')}",
|
||||
f"%define unmangled_version {dist.get_version()}",
|
||||
f"%define release {self.release.replace('-', '_')}",
|
||||
"",
|
||||
f"Summary: {dist.get_description() or 'UNKNOWN'}",
|
||||
"Name: %{name}",
|
||||
"Version: %{version}",
|
||||
"Release: %{release}",
|
||||
f"License: {dist.get_license() or 'UNKNOWN'}",
|
||||
f"Group: {self.group}",
|
||||
"BuildRoot: %{buildroot}",
|
||||
"Prefix: %{_prefix}",
|
||||
f"BuildArch: {platform.machine()}",
|
||||
]
|
||||
|
||||
# Fix for conda
|
||||
if IS_CONDA:
|
||||
spec_file.append("%define debug_package %{nil}")
|
||||
|
||||
# Workaround for #14443 which affects some RPM based systems such as
|
||||
# RHEL6 (and probably derivatives)
|
||||
vendor_hook = check_output(
|
||||
[self._rpm, "--eval", "%{__os_install_post}"], text=True
|
||||
)
|
||||
# Generate a potential replacement value for __os_install_post (whilst
|
||||
# normalizing the whitespace to simplify the test for whether the
|
||||
# invocation of brp-python-bytecompile passes in __python):
|
||||
vendor_hook = "\n".join(
|
||||
[f" {line.strip()} \\" for line in vendor_hook.splitlines()]
|
||||
)
|
||||
problem = "brp-python-bytecompile \\\n"
|
||||
fixed = "brp-python-bytecompile %{__python} \\\n"
|
||||
fixed_hook = vendor_hook.replace(problem, fixed)
|
||||
if fixed_hook != vendor_hook:
|
||||
spec_file += [
|
||||
"# Workaround for http://bugs.python.org/issue14443",
|
||||
f"%define __python {sys.executable}",
|
||||
f"%define __os_install_post {fixed_hook}",
|
||||
"",
|
||||
]
|
||||
|
||||
# we create the spec file before running 'tar' in case of --spec-only.
|
||||
spec_file.append("Source0: %{name}-%{unmangled_version}.tar.gz")
|
||||
|
||||
for field in (
|
||||
"Vendor",
|
||||
"Packager",
|
||||
"Provides",
|
||||
"Requires",
|
||||
"Conflicts",
|
||||
"Obsoletes",
|
||||
):
|
||||
val = getattr(self, field.lower())
|
||||
if isinstance(val, list):
|
||||
join_val = " ".join(val)
|
||||
spec_file.append(f"{field}: {join_val}")
|
||||
elif val is not None:
|
||||
spec_file.append(f"{field}: {val}")
|
||||
|
||||
if dist.get_url() not in (None, "UNKNOWN"):
|
||||
spec_file.append(f"Url: {dist.get_url()}")
|
||||
|
||||
if self.distribution_name:
|
||||
spec_file.append(f"Distribution: {self.distribution_name}")
|
||||
|
||||
if self.build_requires:
|
||||
spec_file.append("BuildRequires: " + " ".join(self.build_requires))
|
||||
|
||||
if self.icon:
|
||||
spec_file.append("Icon: " + os.path.basename(self.icon))
|
||||
|
||||
if self.no_autoreq:
|
||||
spec_file.append("AutoReq: 0")
|
||||
|
||||
spec_file += [
|
||||
"",
|
||||
"%description",
|
||||
dist.get_long_description() or dist.get_description() or "UNKNOWN",
|
||||
]
|
||||
|
||||
# rpm scripts - figure out default build script
|
||||
if dist.script_name == "cxfreeze":
|
||||
def_setup_call = shutil.which(dist.script_name)
|
||||
else:
|
||||
def_setup_call = f"{sys.executable} {dist.script_name}"
|
||||
def_build = f"{def_setup_call} build_exe --optimize=1 --silent"
|
||||
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
|
||||
|
||||
# insert contents of files
|
||||
|
||||
# this is kind of misleading: user-supplied options are files
|
||||
# that we open and interpolate into the spec file, but the defaults
|
||||
# are just text that we drop in as-is.
|
||||
|
||||
install_cmd = (
|
||||
f"{def_setup_call} install --skip-build"
|
||||
" --prefix=%{_prefix} --root=%{buildroot}"
|
||||
)
|
||||
|
||||
script_options = [
|
||||
("prep", "prep_script", "%setup -n %{name}-%{unmangled_version}"),
|
||||
("build", "build_script", def_build),
|
||||
("install", "install_script", install_cmd),
|
||||
("clean", "clean_script", "rm -rf %{buildroot}"),
|
||||
("verifyscript", "verify_script", None),
|
||||
("pre", "pre_install", None),
|
||||
("post", "post_install", None),
|
||||
("preun", "pre_uninstall", None),
|
||||
("postun", "post_uninstall", None),
|
||||
]
|
||||
|
||||
for rpm_opt, attr, default in script_options:
|
||||
# Insert contents of file referred to, if no file is referred to
|
||||
# use 'default' as contents of script
|
||||
val = getattr(self, attr)
|
||||
if val or default:
|
||||
spec_file.extend(["", "%" + rpm_opt])
|
||||
if val:
|
||||
with open(val, encoding="utf_8") as file:
|
||||
spec_file.extend(file.read().split("\n"))
|
||||
else:
|
||||
spec_file.append(default)
|
||||
|
||||
# files section
|
||||
spec_file += [
|
||||
"",
|
||||
"%files",
|
||||
"%dir %{_prefix}/lib/%{name}-%{unmangled_version}",
|
||||
"%{_prefix}/lib/%{name}-%{unmangled_version}/*",
|
||||
"%{_bindir}/%{name}",
|
||||
"%defattr(-,root,root)",
|
||||
]
|
||||
|
||||
if self.doc_files:
|
||||
spec_file.append("%doc " + " ".join(self.doc_files))
|
||||
|
||||
if self.changelog:
|
||||
spec_file.extend(["", "%changelog"])
|
||||
spec_file.extend(self.changelog)
|
||||
|
||||
return spec_file
|
||||
|
||||
@staticmethod
|
||||
def _format_changelog(changelog) -> list[str]:
|
||||
"""Format the changelog correctly and convert it to a string list."""
|
||||
if not changelog:
|
||||
return changelog
|
||||
new_changelog = []
|
||||
for raw_line in changelog.strip().split("\n"):
|
||||
line = raw_line.strip()
|
||||
if line[0] == "*":
|
||||
new_changelog.extend(["", line])
|
||||
elif line[0] == "-":
|
||||
new_changelog.append(line)
|
||||
else:
|
||||
new_changelog.append(" " + line)
|
||||
|
||||
# strip trailing newline inserted by first changelog entry
|
||||
if not new_changelog[0]:
|
||||
del new_changelog[0]
|
||||
|
||||
return new_changelog
|
||||
|
||||
|
||||
def write_file(filename, contents) -> None:
|
||||
"""Create a file with the specified name and write 'contents'
|
||||
(a sequence of strings without line terminators) to it.
|
||||
"""
|
||||
with open(filename, "w", encoding="utf_8") as file:
|
||||
for line in contents:
|
||||
file.write(line + "\n")
|
||||
321
venv3_12/Lib/site-packages/cx_Freeze/command/build_exe.py
Normal file
321
venv3_12/Lib/site-packages/cx_Freeze/command/build_exe.py
Normal file
@@ -0,0 +1,321 @@
|
||||
"""Implements the 'build_exe' command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from sysconfig import get_platform, get_python_version
|
||||
from typing import ClassVar
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
from cx_Freeze._compat import IS_WINDOWS
|
||||
from cx_Freeze.common import normalize_to_list
|
||||
from cx_Freeze.exception import OptionError, SetupError
|
||||
from cx_Freeze.freezer import Freezer
|
||||
from cx_Freeze.module import ConstantsModule
|
||||
|
||||
__all__ = ["build_exe"]
|
||||
|
||||
|
||||
class build_exe(Command):
|
||||
"""Build executables from Python scripts."""
|
||||
|
||||
description = "build executables from Python scripts"
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
(
|
||||
"build-exe=",
|
||||
"b",
|
||||
"directory for built executables and dependent files",
|
||||
),
|
||||
("includes=", "i", "comma-separated list of modules to include"),
|
||||
("excludes=", "e", "comma-separated list of modules to exclude"),
|
||||
(
|
||||
"packages=",
|
||||
"p",
|
||||
"comma-separated list of packages to include, "
|
||||
"which includes all submodules in the package",
|
||||
),
|
||||
(
|
||||
"replace-paths=",
|
||||
None,
|
||||
"comma-separated list of paths to replace in included modules, "
|
||||
"using the form <search>=<replace>",
|
||||
),
|
||||
(
|
||||
"path=",
|
||||
None,
|
||||
"comma-separated list of paths to search for modules; the default "
|
||||
"value is sys.path (use only if you know what you are doing)",
|
||||
),
|
||||
(
|
||||
"include-path=",
|
||||
None,
|
||||
"comma-separated list of paths to modify the search for modules",
|
||||
),
|
||||
("constants=", None, "comma-separated list of constants to include"),
|
||||
(
|
||||
"bin-includes=",
|
||||
None,
|
||||
"list of files to include when determining "
|
||||
"dependencies of binary files that would normally be excluded",
|
||||
),
|
||||
(
|
||||
"bin-excludes=",
|
||||
None,
|
||||
"list of files to exclude when determining "
|
||||
"dependencies of binary files that would normally be included",
|
||||
),
|
||||
(
|
||||
"bin-path-includes=",
|
||||
None,
|
||||
"list of paths from which to include files when determining "
|
||||
"dependencies of binary files",
|
||||
),
|
||||
(
|
||||
"bin-path-excludes=",
|
||||
None,
|
||||
"list of paths from which to exclude files when determining "
|
||||
"dependencies of binary files",
|
||||
),
|
||||
(
|
||||
"include-files=",
|
||||
"f",
|
||||
"list of tuples of additional files to include in distribution",
|
||||
),
|
||||
(
|
||||
"zip-includes=",
|
||||
None,
|
||||
"list of tuples of additional files to include in zip file",
|
||||
),
|
||||
(
|
||||
"zip-include-packages=",
|
||||
None,
|
||||
"comma-separated list of packages to include in the zip file "
|
||||
"(or * for all) [default: none]",
|
||||
),
|
||||
(
|
||||
"zip-exclude-packages=",
|
||||
None,
|
||||
"comma-separated list of packages to exclude from the zip file "
|
||||
"and place in the file system instead (or * for all) "
|
||||
"[default: *]",
|
||||
),
|
||||
(
|
||||
"zip-filename=",
|
||||
None,
|
||||
"filename for the shared zipfile (.zip) "
|
||||
'[default: "library.zip" or None if --no-compress is used]',
|
||||
),
|
||||
(
|
||||
"no-compress",
|
||||
None,
|
||||
"create a zip file with no compression (See also --zip-filename)",
|
||||
),
|
||||
(
|
||||
"optimize=",
|
||||
"O",
|
||||
'optimization level: -O1 for "python -O", '
|
||||
'-O2 for "python -OO" and -O0 to disable [default: -O0]',
|
||||
),
|
||||
(
|
||||
"silent",
|
||||
"s",
|
||||
"suppress all output except warnings "
|
||||
"(equivalent to --silent-level=1)",
|
||||
),
|
||||
(
|
||||
"silent-level=",
|
||||
None,
|
||||
"suppress output from build_exe command."
|
||||
" level 0: get all messages; [default]"
|
||||
" level 1: suppress information messages, but still get warnings;"
|
||||
" (equivalent to --silent)"
|
||||
" level 2: suppress missing missing-module warnings"
|
||||
" level 3: suppress all warning messages",
|
||||
),
|
||||
(
|
||||
"include-msvcr",
|
||||
None,
|
||||
"include the Microsoft Visual C runtime files",
|
||||
),
|
||||
]
|
||||
boolean_options: ClassVar[list[str]] = [
|
||||
"no-compress",
|
||||
"include-msvcr",
|
||||
"silent",
|
||||
]
|
||||
|
||||
def add_to_path(self, name) -> None:
|
||||
source_dir = getattr(self, name.lower())
|
||||
if source_dir is not None:
|
||||
sys.path.insert(0, source_dir)
|
||||
|
||||
def build_extension(self, name, module_name=None) -> str | None:
|
||||
# XXX: This method, add_to_path and set_source_location can be deleted?
|
||||
if module_name is None:
|
||||
module_name = name
|
||||
source_dir = getattr(self, name.lower())
|
||||
if source_dir is None:
|
||||
return None
|
||||
orig_dir = os.getcwd()
|
||||
script_args = ["build"]
|
||||
command = self.distribution.get_command_obj("build")
|
||||
if command.compiler is not None:
|
||||
script_args.append(f"--compiler={command.compiler}")
|
||||
os.chdir(source_dir)
|
||||
logging.info("building '%s' extension in '%s'", name, source_dir)
|
||||
distutils_core = __import__("distutils.core", fromlist=["run_setup"])
|
||||
distribution = distutils_core.run_setup("setup.py", script_args)
|
||||
ext_modules = distribution.ext_modules
|
||||
modules = [m for m in ext_modules if m.name == module_name]
|
||||
if not modules:
|
||||
msg = f"no module named '{module_name}' in '{source_dir}'"
|
||||
raise SetupError(msg)
|
||||
command = distribution.get_command_obj("build_ext")
|
||||
command.ensure_finalized()
|
||||
if command.compiler is None:
|
||||
command.run()
|
||||
else:
|
||||
command.build_extensions()
|
||||
dir_name = os.path.join(source_dir, command.build_lib)
|
||||
os.chdir(orig_dir)
|
||||
if dir_name not in sys.path:
|
||||
sys.path.insert(0, dir_name)
|
||||
return os.path.join(
|
||||
source_dir,
|
||||
command.build_lib,
|
||||
command.get_ext_filename(module_name),
|
||||
)
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.list_options = [
|
||||
"excludes",
|
||||
"includes",
|
||||
"packages",
|
||||
"replace_paths",
|
||||
"constants",
|
||||
"include_files",
|
||||
"include_path",
|
||||
"bin_excludes",
|
||||
"bin_includes",
|
||||
"bin_path_excludes",
|
||||
"bin_path_includes",
|
||||
"zip_includes",
|
||||
"zip_exclude_packages",
|
||||
"zip_include_packages",
|
||||
]
|
||||
for option in self.list_options:
|
||||
setattr(self, option, [])
|
||||
self.zip_exclude_packages = ["*"]
|
||||
|
||||
self.build_exe = None
|
||||
self.include_msvcr = None
|
||||
self.no_compress = False
|
||||
self.optimize = 0
|
||||
self.path = None
|
||||
self.silent = None
|
||||
self.silent_level = None
|
||||
self.zip_filename = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
build = self.get_finalized_command("build")
|
||||
# check use of deprecated option
|
||||
options = build.distribution.get_option_dict("build")
|
||||
if options.get("build_exe", (None, None)) != (None, None):
|
||||
msg = (
|
||||
"[REMOVED] The use of build command with 'build-exe' "
|
||||
"option is deprecated.\n\t\t"
|
||||
"Use build_exe command with 'build-exe' option instead."
|
||||
)
|
||||
raise OptionError(msg)
|
||||
# check values of build_base and build_exe
|
||||
self.build_base = build.build_base
|
||||
if self.build_exe == self.build_base:
|
||||
msg = "build_exe option cannot be the same as build_base directory"
|
||||
raise SetupError(msg)
|
||||
if not self.build_exe: # empty or None
|
||||
dir_name = f"exe.{get_platform()}-{get_python_version()}"
|
||||
self.build_exe = os.path.join(self.build_base, dir_name)
|
||||
|
||||
# make sure all options of multiple values are lists
|
||||
for option in self.list_options:
|
||||
setattr(self, option, normalize_to_list(getattr(self, option)))
|
||||
|
||||
# path - accepts os.pathsep to be backwards compatible with CLI
|
||||
if self.path and isinstance(self.path, str):
|
||||
self.path = self.path.replace(os.pathsep, ",")
|
||||
include_path = self.include_path
|
||||
if include_path:
|
||||
self.path = include_path + normalize_to_list(self.path or sys.path)
|
||||
|
||||
# the degree of silencing, set from either the silent or silent-level
|
||||
# option, as appropriate
|
||||
self.silent = int(self.silent or self.silent_level or 0)
|
||||
|
||||
# compression options
|
||||
self.no_compress = bool(self.no_compress)
|
||||
if self.zip_filename:
|
||||
self.zip_filename = os.path.basename(
|
||||
os.path.splitext(self.zip_filename)[0] + ".zip"
|
||||
)
|
||||
elif self.no_compress is False:
|
||||
self.zip_filename = "library.zip"
|
||||
|
||||
# include-msvcr is used on Windows, but not in MingW
|
||||
self.include_msvcr = IS_WINDOWS and bool(self.include_msvcr)
|
||||
|
||||
# optimization level: 0,1,2
|
||||
self.optimize = int(self.optimize or 0)
|
||||
|
||||
def run(self) -> None:
|
||||
metadata = self.distribution.metadata
|
||||
constants_module = ConstantsModule(
|
||||
metadata.version, constants=self.constants
|
||||
)
|
||||
|
||||
freezer: Freezer = Freezer(
|
||||
self.distribution.executables,
|
||||
constants_module,
|
||||
self.includes,
|
||||
self.excludes,
|
||||
self.packages,
|
||||
self.replace_paths,
|
||||
(not self.no_compress),
|
||||
self.optimize,
|
||||
self.path,
|
||||
self.build_exe,
|
||||
bin_includes=self.bin_includes,
|
||||
bin_excludes=self.bin_excludes,
|
||||
bin_path_includes=self.bin_path_includes,
|
||||
bin_path_excludes=self.bin_path_excludes,
|
||||
include_files=self.include_files,
|
||||
zip_includes=self.zip_includes,
|
||||
zip_include_packages=self.zip_include_packages,
|
||||
zip_exclude_packages=self.zip_exclude_packages,
|
||||
silent=self.silent,
|
||||
metadata=metadata,
|
||||
include_msvcr=self.include_msvcr,
|
||||
zip_filename=self.zip_filename,
|
||||
)
|
||||
|
||||
freezer.freeze()
|
||||
freezer.print_report()
|
||||
|
||||
def set_source_location(self, name, *pathParts) -> None:
|
||||
env_name = f"{name.upper()}_BASE"
|
||||
attr_name = name.lower()
|
||||
source_dir = getattr(self, attr_name)
|
||||
if source_dir is None:
|
||||
base_dir = os.environ.get(env_name)
|
||||
if base_dir is None:
|
||||
return
|
||||
source_dir = os.path.join(base_dir, *pathParts)
|
||||
if os.path.isdir(source_dir):
|
||||
setattr(self, attr_name, source_dir)
|
||||
|
||||
# -- Predicates for the sub-command list ---------------------------
|
||||
|
||||
def has_executables(self) -> bool:
|
||||
return getattr(self.distribution, "executables", None) is not None
|
||||
78
venv3_12/Lib/site-packages/cx_Freeze/command/install.py
Normal file
78
venv3_12/Lib/site-packages/cx_Freeze/command/install.py
Normal file
@@ -0,0 +1,78 @@
|
||||
"""Extends setuptools 'install' command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import contextlib
|
||||
import os
|
||||
import sys
|
||||
import warnings
|
||||
from typing import ClassVar, ContextManager
|
||||
|
||||
from setuptools.command.install import install as _install
|
||||
|
||||
__all__ = ["Install"]
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def suppress_known_deprecation() -> ContextManager:
|
||||
with warnings.catch_warnings():
|
||||
warnings.filterwarnings("ignore", "setup.py install is deprecated")
|
||||
yield
|
||||
|
||||
|
||||
class Install(_install):
|
||||
"""Install everything from build directory."""
|
||||
|
||||
command_name = "install"
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
*_install.user_options,
|
||||
("install-exe=", None, "installation directory for executables"),
|
||||
]
|
||||
|
||||
def expand_dirs(self) -> None:
|
||||
super().expand_dirs()
|
||||
self._expand_attrs(["install_exe"])
|
||||
|
||||
def get_sub_commands(self) -> list[str]:
|
||||
sub_commands = super().get_sub_commands()[:]
|
||||
if self.distribution.executables:
|
||||
sub_commands.remove("install_egg_info")
|
||||
sub_commands.remove("install_scripts")
|
||||
sub_commands.append("install_exe")
|
||||
return sub_commands
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
with suppress_known_deprecation():
|
||||
super().initialize_options()
|
||||
self.install_exe = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
if self.prefix is None and sys.platform == "win32":
|
||||
winreg = __import__("winreg")
|
||||
key = winreg.OpenKey(
|
||||
winreg.HKEY_LOCAL_MACHINE,
|
||||
r"Software\Microsoft\Windows\CurrentVersion",
|
||||
)
|
||||
base = winreg.QueryValueEx(key, "ProgramFilesDir")[0]
|
||||
metadata = self.distribution.metadata
|
||||
self.prefix = os.path.join(
|
||||
os.path.normpath(base), metadata.get_name()
|
||||
)
|
||||
super().finalize_options()
|
||||
self.convert_paths("exe")
|
||||
if self.root is not None:
|
||||
self.change_roots("exe")
|
||||
|
||||
def select_scheme(self, name) -> None:
|
||||
super().select_scheme(name)
|
||||
if self.install_exe is None:
|
||||
if sys.platform == "win32":
|
||||
self.install_exe = "$base"
|
||||
else:
|
||||
metadata = self.distribution.metadata
|
||||
dir_name = f"{metadata.get_name()}-{metadata.get_version()}"
|
||||
self.install_exe = f"$base/lib/{dir_name}"
|
||||
|
||||
def run(self) -> None:
|
||||
# setuptools used inspect.currentframe(), this method needs to exist.
|
||||
super().run()
|
||||
76
venv3_12/Lib/site-packages/cx_Freeze/command/install_exe.py
Normal file
76
venv3_12/Lib/site-packages/cx_Freeze/command/install_exe.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Implements the 'install_exe' command."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import sys
|
||||
from typing import ClassVar
|
||||
|
||||
from setuptools import Command
|
||||
|
||||
__all__ = ["install_exe"]
|
||||
|
||||
|
||||
class install_exe(Command):
|
||||
"""Install executables built from Python scripts."""
|
||||
|
||||
command_name = "install_exe"
|
||||
description = "install executables built from Python scripts"
|
||||
user_options: ClassVar[list[tuple[str, str | None, str]]] = [
|
||||
("install-dir=", "d", "directory to install executables to"),
|
||||
("build-dir=", "b", "build directory (where to install from)"),
|
||||
("force", "f", "force installation (overwrite existing files)"),
|
||||
("skip-build", None, "skip the build steps"),
|
||||
]
|
||||
|
||||
def initialize_options(self) -> None:
|
||||
self.install_dir: str | None = None
|
||||
self.force = 0
|
||||
self.build_dir = None
|
||||
self.skip_build = None
|
||||
self.outfiles = None
|
||||
|
||||
def finalize_options(self) -> None:
|
||||
self.set_undefined_options("build_exe", ("build_exe", "build_dir"))
|
||||
self.set_undefined_options(
|
||||
"install",
|
||||
("install_exe", "install_dir"),
|
||||
("force", "force"),
|
||||
("skip_build", "skip_build"),
|
||||
)
|
||||
|
||||
def run(self) -> None:
|
||||
if not self.skip_build:
|
||||
self.run_command("build_exe")
|
||||
|
||||
self.mkpath(self.install_dir)
|
||||
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
|
||||
|
||||
if sys.platform == "win32":
|
||||
return
|
||||
|
||||
# in posix, make symlinks to the executables
|
||||
install_dir = self.install_dir
|
||||
bin_dir = os.path.join(
|
||||
os.path.dirname(os.path.dirname(install_dir)), "bin"
|
||||
)
|
||||
self.execute(shutil.rmtree, (bin_dir, True), msg=f"removing {bin_dir}")
|
||||
self.mkpath(bin_dir)
|
||||
for executable in self.get_inputs():
|
||||
name = executable.target_name
|
||||
target = os.path.join(install_dir, name)
|
||||
origin = os.path.join(bin_dir, name)
|
||||
relative_reference = os.path.relpath(target, bin_dir)
|
||||
self.execute(
|
||||
os.symlink,
|
||||
(relative_reference, origin, True),
|
||||
msg=f"linking {origin} -> {relative_reference}",
|
||||
)
|
||||
self.outfiles.append(origin)
|
||||
|
||||
def get_inputs(self) -> list[str]:
|
||||
return self.distribution.executables or []
|
||||
|
||||
def get_outputs(self) -> list[str]:
|
||||
return self.outfiles or []
|
||||
119
venv3_12/Lib/site-packages/cx_Freeze/common.py
Normal file
119
venv3_12/Lib/site-packages/cx_Freeze/common.py
Normal file
@@ -0,0 +1,119 @@
|
||||
"""Common utility functions shared between cx_Freeze modules."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextlib import suppress
|
||||
from pathlib import Path, PurePath
|
||||
from textwrap import dedent
|
||||
from types import CodeType
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from cx_Freeze.exception import OptionError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from cx_Freeze._typing import IncludesList, InternalIncludesList
|
||||
|
||||
|
||||
def get_resource_file_path(
|
||||
dirname: str | Path, name: str | Path, ext: str
|
||||
) -> Path | None:
|
||||
"""Return the path to a resource file shipped with cx_Freeze.
|
||||
|
||||
This is used to find our base executables and initscripts when they are
|
||||
just specified by name.
|
||||
"""
|
||||
pname = Path(name)
|
||||
if pname.is_absolute():
|
||||
return pname
|
||||
pname = Path(__file__).resolve().parent / dirname / pname.with_suffix(ext)
|
||||
if pname.exists():
|
||||
return pname
|
||||
# Support for name argument in the old Camelcase value
|
||||
pname = pname.with_name(pname.name.lower())
|
||||
if pname.exists():
|
||||
return pname
|
||||
return None
|
||||
|
||||
|
||||
def normalize_to_list(
|
||||
value: str | list[str] | tuple[str, ...] | None,
|
||||
) -> list[str]:
|
||||
"""Takes the different formats of options containing multiple values and
|
||||
returns the value as a list object.
|
||||
"""
|
||||
if not value: # empty or None
|
||||
return []
|
||||
if isinstance(value, str):
|
||||
return value.split(",")
|
||||
return list(value)
|
||||
|
||||
|
||||
def process_path_specs(specs: IncludesList | None) -> InternalIncludesList:
|
||||
"""Prepare paths specified as config.
|
||||
|
||||
The input is a list of either strings, or 2-tuples (source, target).
|
||||
Where single strings are supplied, the basenames are used as targets.
|
||||
Where targets are given explicitly, they must not be absolute paths.
|
||||
|
||||
Returns a list of 2-tuples, or throws OptionError if something is wrong
|
||||
in the input.
|
||||
"""
|
||||
if specs is None:
|
||||
specs = []
|
||||
processed_specs: InternalIncludesList = []
|
||||
for spec in specs:
|
||||
if not isinstance(spec, (list, tuple)):
|
||||
source = spec
|
||||
target = None
|
||||
elif len(spec) != 2:
|
||||
msg = "path spec must be a list or tuple of length two"
|
||||
raise OptionError(msg)
|
||||
else:
|
||||
source, target = spec
|
||||
source = Path(source)
|
||||
if not source.exists():
|
||||
msg = f"cannot find file/directory named {source!s}"
|
||||
raise OptionError(msg)
|
||||
target = PurePath(target or source.name)
|
||||
if target.is_absolute():
|
||||
msg = f"target path named {target!s} cannot be absolute"
|
||||
raise OptionError(msg)
|
||||
processed_specs.append((source, target))
|
||||
return processed_specs
|
||||
|
||||
|
||||
def code_object_replace(code: CodeType, **kwargs) -> CodeType:
|
||||
"""Return a copy of the code object with new values for the specified
|
||||
fields.
|
||||
"""
|
||||
with suppress(ValueError, KeyError):
|
||||
kwargs["co_consts"] = tuple(kwargs["co_consts"])
|
||||
return code.replace(**kwargs)
|
||||
|
||||
|
||||
def code_object_replace_function(
|
||||
code: CodeType, name: str, source: str
|
||||
) -> CodeType:
|
||||
"""Return a copy of the code object with the function 'name' replaced."""
|
||||
if code is None:
|
||||
return code
|
||||
|
||||
new_code = compile(
|
||||
dedent(source), code.co_filename, "exec", dont_inherit=True
|
||||
)
|
||||
new_co_func = None
|
||||
for constant in new_code.co_consts:
|
||||
if isinstance(constant, CodeType) and constant.co_name == name:
|
||||
new_co_func = constant
|
||||
break
|
||||
if new_co_func is None:
|
||||
return code
|
||||
|
||||
consts = list(code.co_consts)
|
||||
for i, constant in enumerate(consts):
|
||||
if isinstance(constant, CodeType) and constant.co_name == name:
|
||||
consts[i] = code_object_replace(
|
||||
new_co_func, co_firstlineno=constant.co_firstlineno
|
||||
)
|
||||
break
|
||||
return code_object_replace(code, co_consts=consts)
|
||||
719
venv3_12/Lib/site-packages/cx_Freeze/darwintools.py
Normal file
719
venv3_12/Lib/site-packages/cx_Freeze/darwintools.py
Normal file
@@ -0,0 +1,719 @@
|
||||
# ruff: noqa
|
||||
from __future__ import annotations
|
||||
import sysconfig
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import stat
|
||||
import subprocess
|
||||
from tempfile import TemporaryDirectory
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
from cx_Freeze.exception import PlatformError
|
||||
|
||||
|
||||
# In a MachO file, need to deal specially with links that use @executable_path,
|
||||
# @loader_path, @rpath
|
||||
#
|
||||
# @executable_path - where ultimate calling executable is
|
||||
# @loader_path - directory of current object
|
||||
# @rpath - list of paths to check
|
||||
# (earlier rpaths have higher priority, i believe)
|
||||
#
|
||||
# Resolving these variables (particularly @rpath) requires tracing through the
|
||||
# sequence linked MachO files leading the the current file, to determine which
|
||||
# directories are included in the current rpath.
|
||||
|
||||
|
||||
def isMachOFile(path: Path) -> bool:
|
||||
"""Determines whether the file is a Mach-O file."""
|
||||
if not path.is_file():
|
||||
return False
|
||||
if b"Mach-O" in subprocess.check_output(("file", path)):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
class MachOReference:
|
||||
"""Represents a linking reference from MachO file to another file."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
source_file: DarwinFile,
|
||||
raw_path: str,
|
||||
resolved_path: Path | None,
|
||||
):
|
||||
""":param source_file: DarwinFile object for file in which the reference
|
||||
was found
|
||||
:param raw_path: The load path that appears in the file
|
||||
(may include @rpath, etc.)
|
||||
:param resolved_path: The path resolved to an explicit path to a file
|
||||
on system. Or None, if the path could not be resolved at the time the
|
||||
DarwinFile was processed.
|
||||
"""
|
||||
self.source_file: DarwinFile = source_file
|
||||
self.raw_path: str = raw_path
|
||||
self.resolved_path: Path | None = resolved_path
|
||||
|
||||
# True if the referenced file is copied into the frozen package
|
||||
# (i.e., not a non-copied system file)
|
||||
self.is_copied = False
|
||||
# reference to target DarwinFile (but only if file is copied into app)
|
||||
self.target_file: DarwinFile | None = None
|
||||
|
||||
def isResolved(self) -> bool:
|
||||
return self.resolved_path is not None
|
||||
|
||||
def setTargetFile(self, darwin_file: DarwinFile):
|
||||
self.target_file = darwin_file
|
||||
self.is_copied = True
|
||||
|
||||
|
||||
class DarwinFile:
|
||||
"""A DarwinFile object represents a file that will be copied into the
|
||||
application, and record where it was ultimately moved to in the application
|
||||
bundle. Mostly used to provide special handling for copied files that are
|
||||
Mach-O files.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
path: str | Path,
|
||||
referencing_file: DarwinFile | None = None,
|
||||
strict: bool = False,
|
||||
):
|
||||
""":param path: The original path of the DarwinFile
|
||||
(before copying into app)
|
||||
:param referencing_file: DarwinFile object representing the referencing
|
||||
source file
|
||||
:param strict: Do not make guesses about rpath resolution. If the
|
||||
load does not resolve, throw an Exception.
|
||||
"""
|
||||
self.path = Path(path).resolve()
|
||||
self.referencing_file: DarwinFile | None = None
|
||||
self.strict = strict
|
||||
|
||||
# path to file in build directory (set as part of freeze process)
|
||||
self._build_path: Path | None = None
|
||||
|
||||
# commands in a Mach-O file
|
||||
self.commands: list[MachOCommand] = []
|
||||
self.loadCommands: list[MachOLoadCommand] = []
|
||||
self.rpathCommands: list[MachORPathCommand] = []
|
||||
|
||||
# note: if file gets referenced twice (or more), it will only be the
|
||||
# first reference that gets recorded.
|
||||
# mapping of raw load paths to absolute resolved paths
|
||||
# (or None, if no resolution was determined)
|
||||
self.libraryPathResolution: dict[str, Path | None] = {}
|
||||
# the is of entries in the rpath in effect for this file.
|
||||
self._rpath: list[Path] | None = None
|
||||
|
||||
# dictionary of MachOReference objects, by their paths.
|
||||
# Path used is the resolved path, if available, and otherwise the
|
||||
# unresolved load path.
|
||||
self.machOReferenceForTargetPath: dict[Path, MachOReference] = {}
|
||||
|
||||
if not isMachOFile(self.path):
|
||||
self.isMachO = False
|
||||
return
|
||||
|
||||
# if this is a MachO file, extract linking information from it
|
||||
self.isMachO = True
|
||||
self.commands = MachOCommand._getMachOCommands(self.path)
|
||||
self.loadCommands = [
|
||||
c for c in self.commands if isinstance(c, MachOLoadCommand)
|
||||
]
|
||||
self.rpathCommands = [
|
||||
c for c in self.commands if isinstance(c, MachORPathCommand)
|
||||
]
|
||||
self.referencing_file = referencing_file
|
||||
|
||||
self.getRPath()
|
||||
self.resolveLibraryPaths()
|
||||
|
||||
# Create MachOReference objects for all the binaries referenced form
|
||||
# this file.
|
||||
for raw_path, resolved_path in self.libraryPathResolution.items():
|
||||
# the path to use for storing in dictionary
|
||||
if resolved_path is None:
|
||||
dict_path = Path(raw_path)
|
||||
else:
|
||||
dict_path = resolved_path
|
||||
if dict_path in self.machOReferenceForTargetPath:
|
||||
if self.strict:
|
||||
raise PlatformError(
|
||||
f"ERROR: Multiple dynamic libraries from {self.path}"
|
||||
f" resolved to the same file ({dict_path})."
|
||||
)
|
||||
print(
|
||||
f"WARNING: Multiple dynamic libraries from {self.path}"
|
||||
f" resolved to the same file ({dict_path})."
|
||||
)
|
||||
continue
|
||||
reference = MachOReference(
|
||||
source_file=self,
|
||||
raw_path=raw_path,
|
||||
resolved_path=resolved_path,
|
||||
)
|
||||
self.machOReferenceForTargetPath[dict_path] = reference
|
||||
|
||||
def __str__(self):
|
||||
parts = []
|
||||
# parts.append("RPath Commands: {}".format(self.rpathCommands))
|
||||
# parts.append("Load commands: {}".format(self.loadCommands))
|
||||
parts.append(f"Mach-O File: {self.path}")
|
||||
parts.append("Resolved rpath:")
|
||||
for rpath in self.getRPath():
|
||||
parts.append(f" {rpath}")
|
||||
parts.append("Loaded libraries:")
|
||||
for rpath in self.libraryPathResolution:
|
||||
parts.append(f" {rpath} -> {self.libraryPathResolution[rpath]}")
|
||||
return "\n".join(parts)
|
||||
|
||||
def fileReferenceDepth(self) -> int:
|
||||
"""Returns how deep this Mach-O file is in the dynamic load order."""
|
||||
if self.referencing_file is not None:
|
||||
return self.referencing_file.fileReferenceDepth() + 1
|
||||
return 0
|
||||
|
||||
def printFileInformation(self):
|
||||
"""Prints information about the Mach-O file."""
|
||||
print(f"[{self.fileReferenceDepth()}] File: {self.path}")
|
||||
print(" Commands:")
|
||||
if len(self.commands) > 0:
|
||||
for cmd in self.commands:
|
||||
print(f" {cmd}")
|
||||
else:
|
||||
print(" [None]")
|
||||
|
||||
# This can be included for even more detail on the problem file.
|
||||
# print(" Load commands:")
|
||||
# if len(self.loadCommands) > 0:
|
||||
# for cmd in self.loadCommands: print(f' {cmd}')
|
||||
# else: print(" [None]")
|
||||
|
||||
print(" RPath commands:")
|
||||
if len(self.rpathCommands) > 0:
|
||||
for rpc in self.rpathCommands:
|
||||
print(f" {rpc}")
|
||||
else:
|
||||
print(" [None]")
|
||||
print(" Calculated RPath:")
|
||||
rpath = self.getRPath()
|
||||
if len(rpath) > 0:
|
||||
for path in rpath:
|
||||
print(f" {path}")
|
||||
else:
|
||||
print(" [None]")
|
||||
if self.referencing_file is not None:
|
||||
print("Referenced from:")
|
||||
self.referencing_file.printFileInformation()
|
||||
|
||||
def setBuildPath(self, path: Path):
|
||||
self._build_path = path
|
||||
|
||||
def getBuildPath(self) -> Path | None:
|
||||
return self._build_path
|
||||
|
||||
@staticmethod
|
||||
def isExecutablePath(path: str) -> bool:
|
||||
return path.startswith("@executable_path")
|
||||
|
||||
@staticmethod
|
||||
def isLoaderPath(path: str) -> bool:
|
||||
return path.startswith("@loader_path")
|
||||
|
||||
@staticmethod
|
||||
def isRPath(path: str) -> bool:
|
||||
return path.startswith("@rpath")
|
||||
|
||||
def resolveLoader(self, path: str) -> Path | None:
|
||||
"""Resolve a path that includes @loader_path. @loader_path represents
|
||||
the directory in which the DarwinFile is located.
|
||||
"""
|
||||
if self.isLoaderPath(path):
|
||||
return self.path.parent / Path(path).relative_to("@loader_path")
|
||||
raise PlatformError(f"resolveLoader() called on bad path: {path}")
|
||||
|
||||
def resolveExecutable(self, path: str) -> Path:
|
||||
"""@executable_path should resolve to the directory where the original
|
||||
executable was located. By default, we set that to the directory of
|
||||
the library, so it would resolve in the same was as if linked from an
|
||||
executable in the same directory.
|
||||
"""
|
||||
# consider making this resolve to the directory of the python
|
||||
# interpreter? Apparently not a big issue in practice, since the
|
||||
# code has been like this forever.
|
||||
if self.isExecutablePath(path):
|
||||
return self.path.parent / Path(path).relative_to(
|
||||
"@executable_path/"
|
||||
)
|
||||
raise PlatformError(f"resolveExecutable() called on bad path: {path}")
|
||||
|
||||
def resolveRPath(self, path: str) -> Path | None:
|
||||
for rpath in self.getRPath():
|
||||
test_path = rpath / Path(path).relative_to("@rpath")
|
||||
if isMachOFile(test_path):
|
||||
return test_path
|
||||
if not self.strict:
|
||||
# If not strictly enforcing rpath, return None here, and leave any
|
||||
# error to .finalizeReferences() instead.
|
||||
return None
|
||||
print(f"\nERROR: Problem resolving RPath [{path}] in file:")
|
||||
self.printFileInformation()
|
||||
raise PlatformError(f"resolveRPath() failed to resolve path: {path}")
|
||||
|
||||
def getRPath(self) -> list[Path]:
|
||||
"""Returns the rpath in effect for this file. Determined by rpath
|
||||
commands in this file and (recursively) the chain of files that
|
||||
referenced this file.
|
||||
"""
|
||||
if self._rpath is not None:
|
||||
return self._rpath
|
||||
raw_paths = [c.rpath for c in self.rpathCommands]
|
||||
rpath = []
|
||||
for raw_path in raw_paths:
|
||||
test_rp = Path(raw_path)
|
||||
if test_rp.is_absolute():
|
||||
rpath.append(test_rp)
|
||||
elif self.isLoaderPath(raw_path):
|
||||
rpath.append(self.resolveLoader(raw_path).resolve())
|
||||
elif self.isExecutablePath(raw_path):
|
||||
rpath.append(self.resolveExecutable(raw_path).resolve())
|
||||
rpath = [raw_path for raw_path in rpath if raw_path.exists()]
|
||||
|
||||
if self.referencing_file is not None:
|
||||
rpath = self.referencing_file.getRPath() + rpath
|
||||
self._rpath = rpath
|
||||
return rpath
|
||||
|
||||
def resolvePath(self, path: str) -> Path | None:
|
||||
"""Resolves any @executable_path, @loader_path, and @rpath references
|
||||
in a path.
|
||||
"""
|
||||
if self.isLoaderPath(path): # replace @loader_path
|
||||
return self.resolveLoader(path)
|
||||
if self.isExecutablePath(path): # replace @executable_path
|
||||
return self.resolveExecutable(path)
|
||||
if self.isRPath(path): # replace @rpath
|
||||
return self.resolveRPath(path)
|
||||
test_path = Path(path)
|
||||
if test_path.is_absolute(): # just use the path, if it is absolute
|
||||
return test_path
|
||||
test_path = self.path.parent / path
|
||||
if isMachOFile(test_path):
|
||||
return test_path.resolve()
|
||||
if self.strict:
|
||||
raise PlatformError(
|
||||
f"Could not resolve path: {path} from file {self.path}."
|
||||
)
|
||||
print(
|
||||
f"WARNING: Unable to resolve reference to {path} from "
|
||||
f"file {self.path}. Frozen application may not "
|
||||
f"function correctly."
|
||||
)
|
||||
return None
|
||||
|
||||
def resolveLibraryPaths(self):
|
||||
for cmd in self.loadCommands:
|
||||
raw_path = cmd.load_path
|
||||
resolved_path = self.resolvePath(raw_path)
|
||||
self.libraryPathResolution[raw_path] = resolved_path
|
||||
|
||||
def getDependentFilePaths(self) -> set[Path]:
|
||||
"""Returns a list the available resolved paths to dependencies."""
|
||||
dependents: set[Path] = set()
|
||||
for ref in self.machOReferenceForTargetPath.values():
|
||||
# skip load references that could not be resolved
|
||||
if ref.isResolved():
|
||||
dependents.add(ref.resolved_path)
|
||||
return dependents
|
||||
|
||||
def getMachOReferenceList(self) -> list[MachOReference]:
|
||||
return list(self.machOReferenceForTargetPath.values())
|
||||
|
||||
def getMachOReferenceForPath(self, path: Path) -> MachOReference:
|
||||
"""Returns the reference pointing to the specified path, baed on paths
|
||||
stored in self.machOReferenceTargetPath. Raises Exception if not
|
||||
available.
|
||||
"""
|
||||
try:
|
||||
return self.machOReferenceForTargetPath[path]
|
||||
except KeyError:
|
||||
raise PlatformError(
|
||||
f"Path {path} is not a path referenced from DarwinFile"
|
||||
) from None
|
||||
|
||||
|
||||
class MachOCommand:
|
||||
"""Represents a load command in a MachO file."""
|
||||
|
||||
def __init__(self, lines: list[str]):
|
||||
self.lines = lines
|
||||
|
||||
def displayString(self) -> str:
|
||||
parts: list[str] = []
|
||||
if len(self.lines) > 0:
|
||||
parts.append(self.lines[0].strip())
|
||||
if len(self.lines) > 1:
|
||||
parts.append(self.lines[1].strip())
|
||||
return " / ".join(parts)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<MachOCommand ({self.displayString()})>"
|
||||
|
||||
@staticmethod
|
||||
def _getMachOCommands(path: Path) -> list[MachOCommand]:
|
||||
"""Returns a list of load commands in the specified file, using
|
||||
otool.
|
||||
"""
|
||||
shell_command = ("otool", "-l", path)
|
||||
commands: list[MachOCommand] = []
|
||||
current_command_lines = None
|
||||
|
||||
# split the output into separate load commands
|
||||
out = subprocess.check_output(shell_command, encoding="utf_8")
|
||||
for raw_line in out.splitlines():
|
||||
line = raw_line.strip()
|
||||
if line[:12] == "Load command":
|
||||
if current_command_lines is not None:
|
||||
commands.append(
|
||||
MachOCommand.parseLines(current_command_lines)
|
||||
)
|
||||
current_command_lines = []
|
||||
if current_command_lines is not None:
|
||||
current_command_lines.append(line)
|
||||
if current_command_lines is not None:
|
||||
commands.append(MachOCommand.parseLines(current_command_lines))
|
||||
return commands
|
||||
|
||||
@staticmethod
|
||||
def parseLines(lines: list[str]) -> MachOCommand:
|
||||
if len(lines) < 2:
|
||||
return MachOCommand(lines)
|
||||
parts = lines[1].split(" ")
|
||||
if parts[0] != "cmd":
|
||||
return MachOCommand(lines)
|
||||
if parts[1] == "LC_LOAD_DYLIB":
|
||||
return MachOLoadCommand(lines)
|
||||
if parts[1] == "LC_RPATH":
|
||||
return MachORPathCommand(lines)
|
||||
return MachOCommand(lines)
|
||||
|
||||
|
||||
class MachOLoadCommand(MachOCommand):
|
||||
def __init__(self, lines: list[str]):
|
||||
super().__init__(lines)
|
||||
self.load_path = None
|
||||
if len(self.lines) < 4:
|
||||
return
|
||||
pathline = self.lines[3]
|
||||
pathline = pathline.strip()
|
||||
if not pathline.startswith("name "):
|
||||
return
|
||||
pathline = pathline[4:].strip()
|
||||
pathline = pathline.split("(offset")[0].strip()
|
||||
self.load_path = pathline
|
||||
|
||||
def getPath(self):
|
||||
return self.load_path
|
||||
|
||||
def __repr__(self):
|
||||
return f"<LoadCommand path={self.load_path!r}>"
|
||||
|
||||
|
||||
class MachORPathCommand(MachOCommand):
|
||||
def __init__(self, lines: list[str]):
|
||||
super().__init__(lines)
|
||||
self.rpath = None
|
||||
if len(self.lines) < 4:
|
||||
return
|
||||
pathline = self.lines[3]
|
||||
pathline = pathline.strip()
|
||||
if not pathline.startswith("path "):
|
||||
return
|
||||
pathline = pathline[4:].strip()
|
||||
pathline = pathline.split("(offset")[0].strip()
|
||||
self.rpath = pathline
|
||||
|
||||
def __repr__(self):
|
||||
return f"<RPath path={self.rpath!r}>"
|
||||
|
||||
|
||||
def _printFile(
|
||||
darwinFile: DarwinFile,
|
||||
seenFiles: set[DarwinFile],
|
||||
level: int,
|
||||
noRecurse=False,
|
||||
):
|
||||
"""Utility function to prints details about a DarwinFile and (optionally)
|
||||
recursively any other DarwinFiles that it references.
|
||||
"""
|
||||
print("{}{}".format(level * "| ", os.fspath(darwinFile.path)), end="")
|
||||
print(" (already seen)" if noRecurse else "")
|
||||
if noRecurse:
|
||||
return
|
||||
for ref in darwinFile.machOReferenceForTargetPath.values():
|
||||
if not ref.is_copied:
|
||||
continue
|
||||
file = ref.target_file
|
||||
_printFile(
|
||||
file,
|
||||
seenFiles=seenFiles,
|
||||
level=level + 1,
|
||||
noRecurse=(file in seenFiles),
|
||||
)
|
||||
seenFiles.add(file)
|
||||
return
|
||||
|
||||
|
||||
def printMachOFiles(fileList: list[DarwinFile]):
|
||||
seenFiles = set()
|
||||
for file in fileList:
|
||||
if file not in seenFiles:
|
||||
seenFiles.add(file)
|
||||
_printFile(file, seenFiles=seenFiles, level=0)
|
||||
|
||||
|
||||
def change_load_reference(
|
||||
filename: str, old_reference: str, new_reference: str, verbose: bool = True
|
||||
):
|
||||
"""Utility function that uses install_name_tool to change old_reference to
|
||||
new_reference in the machO file specified by filename.
|
||||
"""
|
||||
if verbose:
|
||||
print("Redirecting load reference for ", end="")
|
||||
print(f"<{filename}> {old_reference} -> {new_reference}")
|
||||
original = os.stat(filename).st_mode
|
||||
new_mode = original | stat.S_IWUSR
|
||||
if new_mode != original:
|
||||
os.chmod(filename, new_mode)
|
||||
subprocess.call(
|
||||
(
|
||||
"install_name_tool",
|
||||
"-change",
|
||||
old_reference,
|
||||
new_reference,
|
||||
filename,
|
||||
)
|
||||
)
|
||||
if new_mode != original:
|
||||
os.chmod(filename, original)
|
||||
|
||||
|
||||
def apply_adhoc_signature(filename: str):
|
||||
if sysconfig.get_platform().endswith("x86_64"):
|
||||
return
|
||||
# Apply for universal2 and arm64 machines
|
||||
print("Applying AdHocSignature")
|
||||
args = (
|
||||
"codesign",
|
||||
"--sign",
|
||||
"-",
|
||||
"--force",
|
||||
"--preserve-metadata=entitlements,requirements,flags,runtime",
|
||||
filename,
|
||||
)
|
||||
if subprocess.call(args):
|
||||
# It may be a bug in Apple's codesign utility
|
||||
# The workaround is to copy the file to another inode, then move it
|
||||
# back erasing the previous file. The sign again.
|
||||
with TemporaryDirectory(prefix="cxfreeze-") as tmp_dir:
|
||||
tempname = os.path.join(tmp_dir, os.path.basename(filename))
|
||||
shutil.copy(filename, tempname)
|
||||
shutil.move(tempname, filename)
|
||||
subprocess.call(args)
|
||||
|
||||
|
||||
class DarwinFileTracker:
|
||||
"""Object to track the DarwinFiles that have been added during a freeze."""
|
||||
|
||||
def __init__(self, strict: bool = False):
|
||||
self.strict = strict
|
||||
# list of DarwinFile objects for files being copied into project
|
||||
self._copied_file_list: list[DarwinFile] = []
|
||||
|
||||
# mapping of (build directory) target paths to DarwinFile objects
|
||||
self._darwin_file_for_build_path: dict[Path, DarwinFile] = {}
|
||||
|
||||
# mapping of (source location) paths to DarwinFile objects
|
||||
self._darwin_file_for_source_path: dict[Path, DarwinFile] = {}
|
||||
|
||||
# a cache of MachOReference objects pointing to a given source path
|
||||
self._reference_cache: dict[Path, MachOReference] = {}
|
||||
|
||||
def __iter__(self) -> Iterable[DarwinFile]:
|
||||
return iter(self._copied_file_list)
|
||||
|
||||
def pathIsAlreadyCopiedTo(self, target_path: Path) -> bool:
|
||||
"""Check if the given target_path has already has a file copied to
|
||||
it.
|
||||
"""
|
||||
if target_path in self._darwin_file_for_build_path:
|
||||
return True
|
||||
return False
|
||||
|
||||
def getDarwinFile(
|
||||
self, source_path: Path, target_path: Path
|
||||
) -> DarwinFile:
|
||||
"""Gets the DarwinFile for file copied from source_path to target_path.
|
||||
If either (i) nothing, or (ii) a different file has been copied to
|
||||
targetPath, raises a PlatformError.
|
||||
"""
|
||||
# check that the target file came from the specified source
|
||||
targetDarwinFile: DarwinFile
|
||||
try:
|
||||
targetDarwinFile = self._darwin_file_for_build_path[target_path]
|
||||
except KeyError:
|
||||
raise PlatformError(
|
||||
f"File {target_path} already copied to, "
|
||||
"but no DarwinFile object found for it."
|
||||
) from None
|
||||
real_source = source_path.resolve()
|
||||
target_real_source = targetDarwinFile.path.resolve()
|
||||
if real_source != target_real_source:
|
||||
# raise PlatformError(
|
||||
print(
|
||||
"*** WARNING ***\n"
|
||||
f"Attempting to copy two files to {target_path}\n"
|
||||
f"source 1: {targetDarwinFile.path} "
|
||||
f"(real: {target_real_source})\n"
|
||||
f"source 2: {source_path} (real: {real_source})\n"
|
||||
"(This may be caused by including modules in the zip file "
|
||||
"that rely on binary libraries with the same name.)"
|
||||
"\nUsing only source 1."
|
||||
)
|
||||
return targetDarwinFile
|
||||
|
||||
def recordCopiedFile(self, target_path: Path, darwin_file: DarwinFile):
|
||||
"""Record that a DarwinFile is being copied to a given path. If a
|
||||
file has been copied to that path, raise a PlatformError.
|
||||
"""
|
||||
if self.pathIsAlreadyCopiedTo(target_path):
|
||||
raise PlatformError(
|
||||
"addFile() called with target_path already copied to "
|
||||
f"(target_path={target_path})"
|
||||
)
|
||||
|
||||
self._copied_file_list.append(darwin_file)
|
||||
self._darwin_file_for_build_path[target_path] = darwin_file
|
||||
self._darwin_file_for_source_path[darwin_file.path] = darwin_file
|
||||
|
||||
def cacheReferenceTo(self, source_path: Path, reference: MachOReference):
|
||||
self._reference_cache[source_path] = reference
|
||||
|
||||
def getCachedReferenceTo(self, source_path: Path) -> MachOReference | None:
|
||||
return self._reference_cache.get(source_path)
|
||||
|
||||
def findDarwinFileForFilename(self, filename: str) -> DarwinFile | None:
|
||||
"""Attempts to locate a copied DarwinFile with the specified filename
|
||||
and returns that. Otherwise returns None.
|
||||
"""
|
||||
basename = Path(filename).name
|
||||
for file in self._copied_file_list:
|
||||
if file.path.name == basename:
|
||||
return file
|
||||
return None
|
||||
|
||||
def finalizeReferences(self):
|
||||
"""This function does a final pass through the references for all the
|
||||
copied DarwinFiles and attempts to clean up any remaining references
|
||||
that are not already marked as copied. It covers two cases where the
|
||||
reference might not be marked as copied:
|
||||
1) Files where _CopyFile was called without copyDependentFiles=True
|
||||
(in which the information would not have been added to the
|
||||
references at that time).
|
||||
2) Files with broken @rpath references. We try to fix that up here by
|
||||
seeing if the relevant file was located *anywhere* as part of the
|
||||
freeze process.
|
||||
"""
|
||||
copied_file: DarwinFile
|
||||
reference: MachOReference
|
||||
for copied_file in self._copied_file_list:
|
||||
for reference in copied_file.getMachOReferenceList():
|
||||
if not reference.is_copied:
|
||||
if reference.isResolved():
|
||||
# if reference is resolve, simply check if the resolved
|
||||
# path was otherwise copied and lookup the DarwinFile
|
||||
# object.
|
||||
target_path = reference.resolved_path.resolve()
|
||||
if target_path in self._darwin_file_for_source_path:
|
||||
reference.setTargetFile(
|
||||
self._darwin_file_for_source_path[target_path]
|
||||
)
|
||||
else:
|
||||
# if reference is not resolved, look through the copied
|
||||
# files and try to find a candidate, and use it if
|
||||
# found.
|
||||
potential_target = self.findDarwinFileForFilename(
|
||||
reference.raw_path
|
||||
)
|
||||
if potential_target is None:
|
||||
# If we cannot find any likely candidate, fail.
|
||||
if self.strict:
|
||||
copied_file.printFileInformation()
|
||||
raise PlatformError(
|
||||
f"finalizeReferences() failed to resolve"
|
||||
f" path [{reference.raw_path}] in file "
|
||||
f"[{copied_file.path}]."
|
||||
)
|
||||
print(
|
||||
"\nWARNING: Could not resolve dynamic link to "
|
||||
f"[{reference.raw_path}] in file "
|
||||
f"[{copied_file.path}], and could "
|
||||
"not find any likely intended target."
|
||||
)
|
||||
continue
|
||||
print(
|
||||
f"WARNING: In file [{copied_file.path}]"
|
||||
f" guessing that {reference.raw_path} "
|
||||
f"resolved to {potential_target.path}."
|
||||
)
|
||||
reference.resolved_path = potential_target.path
|
||||
reference.setTargetFile(potential_target)
|
||||
|
||||
def set_relative_reference_paths(self, build_dir: str, bin_dir: str):
|
||||
"""Make all the references from included Mach-O files to other included
|
||||
Mach-O files relative.
|
||||
"""
|
||||
darwin_file: DarwinFile
|
||||
|
||||
for darwin_file in self._copied_file_list:
|
||||
# Skip text files
|
||||
if darwin_file.path.suffix == ".txt":
|
||||
continue
|
||||
|
||||
# get the relative path to darwin_file in build directory
|
||||
print(f"Setting relative_reference_path for: {darwin_file}")
|
||||
relative_copy_dest = os.path.relpath(
|
||||
darwin_file.getBuildPath(), build_dir
|
||||
)
|
||||
# figure out directory where it will go in binary directory for
|
||||
# .app bundle, this would be the Content/MacOS subdirectory in
|
||||
# bundle. This is the file that needs to have its dynamic load
|
||||
# references updated.
|
||||
file_path_in_bin_dir = os.path.join(bin_dir, relative_copy_dest)
|
||||
# for each file that this darwin_file references, update the
|
||||
# reference as necessary; if the file is copied into the binary
|
||||
# package, change the reference to be relative to @executable_path
|
||||
# (so an .app bundle will work wherever it is moved)
|
||||
for reference in darwin_file.getMachOReferenceList():
|
||||
if not reference.is_copied:
|
||||
# referenced file not copied -- assume this is a system
|
||||
# file that will also be present on the user's machine,
|
||||
# and do not change reference
|
||||
continue
|
||||
# this is the reference in the machO file that needs to be
|
||||
# updated
|
||||
raw_path = reference.raw_path
|
||||
ref_target_file: DarwinFile = reference.target_file
|
||||
# this is where file copied in build dir
|
||||
abs_build_dest = ref_target_file.getBuildPath()
|
||||
rel_build_dest = os.path.relpath(abs_build_dest, build_dir)
|
||||
exe_path = f"@executable_path/{rel_build_dest}"
|
||||
change_load_reference(
|
||||
file_path_in_bin_dir, raw_path, exe_path, verbose=False
|
||||
)
|
||||
|
||||
apply_adhoc_signature(file_path_in_bin_dir)
|
||||
41
venv3_12/Lib/site-packages/cx_Freeze/exception.py
Normal file
41
venv3_12/Lib/site-packages/cx_Freeze/exception.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Internal exception classes."""
|
||||
|
||||
# Only re-export setuptools errors to avoid exceptions not handled correctly
|
||||
from setuptools.errors import (
|
||||
ExecError,
|
||||
FileError,
|
||||
ModuleError,
|
||||
OptionError,
|
||||
PlatformError,
|
||||
SetupError,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"ExecError",
|
||||
"FileError",
|
||||
"ModuleError",
|
||||
"OptionError",
|
||||
"PlatformError",
|
||||
"SetupError",
|
||||
]
|
||||
|
||||
|
||||
ExecError.__doc__ = """\
|
||||
Raised when there are problems executing an external program."""
|
||||
|
||||
FileError.__doc__ = """\
|
||||
Raised when an error is detected related to file/resource not found."""
|
||||
|
||||
ModuleError.__doc__ = """\
|
||||
Raised when there are problems to load the module or module metadata."""
|
||||
|
||||
OptionError.__doc__ = """\
|
||||
Raised when an error is detected in the configuration. The associated value is
|
||||
a string indicating what precisely went wrong."""
|
||||
|
||||
PlatformError.__doc__ = """\
|
||||
Raised when an error is detected in the module that is platform specific."""
|
||||
|
||||
SetupError.__doc__ = """\
|
||||
Raised for errors that can be definitely blamed on the setup script, such as
|
||||
invalid keyword arguments to 'setup()'."""
|
||||
265
venv3_12/Lib/site-packages/cx_Freeze/executable.py
Normal file
265
venv3_12/Lib/site-packages/cx_Freeze/executable.py
Normal file
@@ -0,0 +1,265 @@
|
||||
"""Module for the Executable base class."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import string
|
||||
import sys
|
||||
from collections.abc import Mapping
|
||||
from pathlib import Path
|
||||
from sysconfig import get_config_var, get_platform
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from cx_Freeze._compat import EXE_SUFFIX, IS_MACOS, IS_MINGW, IS_WINDOWS
|
||||
from cx_Freeze.common import get_resource_file_path
|
||||
from cx_Freeze.exception import OptionError, SetupError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from setuptools import Distribution
|
||||
|
||||
STRINGREPLACE = list(
|
||||
string.whitespace + string.punctuation.replace(".", "").replace("_", "")
|
||||
)
|
||||
|
||||
__all__ = ["Executable", "validate_executables"]
|
||||
|
||||
|
||||
class Executable:
|
||||
"""Base Executable class."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
script: str | Path,
|
||||
init_script: str | Path | None = None,
|
||||
base: str | Path | None = None,
|
||||
target_name: str | None = None,
|
||||
icon: str | Path | None = None,
|
||||
shortcut_name: str | None = None,
|
||||
shortcut_dir: str | Path | None = None,
|
||||
copyright: str | None = None, # noqa: A002
|
||||
trademarks: str | None = None,
|
||||
manifest: str | Path | None = None,
|
||||
uac_admin: bool = False,
|
||||
uac_uiaccess: bool = False,
|
||||
) -> None:
|
||||
self.main_script = script
|
||||
self.init_script = init_script
|
||||
self.base = base
|
||||
self.target_name = target_name
|
||||
self.icon = icon
|
||||
self.shortcut_name = shortcut_name
|
||||
self.shortcut_dir = shortcut_dir
|
||||
self.copyright = copyright
|
||||
self.trademarks = trademarks
|
||||
self.manifest = manifest
|
||||
self.uac_admin = uac_admin
|
||||
self.uac_uiaccess = uac_uiaccess
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f"<Executable script={self.main_script}>"
|
||||
|
||||
@property
|
||||
def base(self) -> Path:
|
||||
""":return: the name of the base executable
|
||||
:rtype: Path
|
||||
|
||||
"""
|
||||
return self._base
|
||||
|
||||
@base.setter
|
||||
def base(self, name: str | Path | None) -> None:
|
||||
name = name or "console"
|
||||
if name == "gui":
|
||||
name = "Win32GUI" if IS_WINDOWS or IS_MINGW else "console"
|
||||
elif name == "service":
|
||||
name = "Win32Service" if IS_WINDOWS or IS_MINGW else "console"
|
||||
if IS_WINDOWS or IS_MINGW:
|
||||
platform_nodot = get_platform().replace(".", "").replace("-", "_")
|
||||
soabi = f"{sys.implementation.cache_tag}-{platform_nodot}"
|
||||
else:
|
||||
soabi = get_config_var("SOABI")
|
||||
suffix = EXE_SUFFIX
|
||||
name_base = f"{name}-{soabi}"
|
||||
self._base: Path = get_resource_file_path("bases", name_base, suffix)
|
||||
if self._base is None:
|
||||
msg = f"no base named {name!r} ({name_base!r})"
|
||||
raise OptionError(msg)
|
||||
self._ext: str = suffix
|
||||
|
||||
@property
|
||||
def icon(self) -> Path | None:
|
||||
""":return: the path of the icon
|
||||
:rtype: Path
|
||||
|
||||
"""
|
||||
return self._icon
|
||||
|
||||
@icon.setter
|
||||
def icon(self, name: str | Path | None) -> None:
|
||||
iconfile: Path = Path(name) if name else None
|
||||
if iconfile and not iconfile.suffix:
|
||||
# add an extension
|
||||
valid_extensions = [".png", ".svg"]
|
||||
if IS_WINDOWS or IS_MINGW:
|
||||
valid_extensions.insert(0, ".ico")
|
||||
elif IS_MACOS:
|
||||
valid_extensions.insert(0, ".icns")
|
||||
for ext in valid_extensions:
|
||||
iconfile = iconfile.with_suffix(ext)
|
||||
if iconfile.exists():
|
||||
break
|
||||
self._icon: Path | None = iconfile
|
||||
|
||||
@property
|
||||
def init_module_name(self) -> str:
|
||||
""":return: the name of the init module in zip file
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return f"__init__{self._internal_name}"
|
||||
|
||||
@property
|
||||
def init_script(self) -> Path:
|
||||
""":return: the name of the initialization script that will be executed
|
||||
before the main script is executed
|
||||
:rtype: Path
|
||||
|
||||
"""
|
||||
return self._init_script
|
||||
|
||||
@init_script.setter
|
||||
def init_script(self, name: str | Path | None) -> None:
|
||||
name = name or "console"
|
||||
self._init_script: Path = get_resource_file_path(
|
||||
"initscripts", name, ".py"
|
||||
)
|
||||
if self._init_script is None:
|
||||
msg = f"no init_script named {name}"
|
||||
raise OptionError(msg)
|
||||
|
||||
@property
|
||||
def main_module_name(self) -> str:
|
||||
""":return: the name of the main module in zip file
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return f"__main__{self._internal_name}"
|
||||
|
||||
@property
|
||||
def main_script(self) -> Path:
|
||||
""":return: the path of the file containing the script which is to be
|
||||
frozen
|
||||
:rtype: Path
|
||||
|
||||
"""
|
||||
return self._main_script
|
||||
|
||||
@main_script.setter
|
||||
def main_script(self, name: str | Path) -> None:
|
||||
self._main_script: Path = Path(name)
|
||||
|
||||
@property
|
||||
def manifest(self) -> str | None:
|
||||
""":return: the XML schema of the manifest which is to be included in
|
||||
the frozen executable
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return self._manifest
|
||||
|
||||
@manifest.setter
|
||||
def manifest(self, name: str | Path | None) -> None:
|
||||
self._manifest: str | None = None
|
||||
if name is None:
|
||||
return
|
||||
if isinstance(name, str):
|
||||
name = Path(name)
|
||||
self._manifest = name.read_text(encoding="utf-8")
|
||||
|
||||
@property
|
||||
def shortcut_name(self) -> str:
|
||||
""":return: the name to give a shortcut for the executable when
|
||||
included in an MSI package (Windows only).
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return self._shortcut_name
|
||||
|
||||
@shortcut_name.setter
|
||||
def shortcut_name(self, name: str) -> None:
|
||||
self._shortcut_name: str = name
|
||||
|
||||
@property
|
||||
def shortcut_dir(self) -> Path:
|
||||
""":return: tthe directory in which to place the shortcut when being
|
||||
installed by an MSI package; see the MSI Shortcut table documentation
|
||||
for more information on what values can be placed here (Windows only).
|
||||
:rtype: Path
|
||||
|
||||
"""
|
||||
return self._shortcut_dir
|
||||
|
||||
@shortcut_dir.setter
|
||||
def shortcut_dir(self, name: str | Path) -> None:
|
||||
self._shortcut_dir: Path = Path(name) if name else None
|
||||
|
||||
@property
|
||||
def target_name(self) -> str:
|
||||
""":return: the name of the target executable
|
||||
:rtype: str
|
||||
|
||||
"""
|
||||
return self._name + self._ext
|
||||
|
||||
@target_name.setter
|
||||
def target_name(self, name: str | None) -> None:
|
||||
if name is None:
|
||||
name = self.main_script.stem
|
||||
else:
|
||||
pathname = Path(name)
|
||||
if name != pathname.name:
|
||||
msg = (
|
||||
"target_name cannot contain the path, only the filename: "
|
||||
f"{pathname.name}"
|
||||
)
|
||||
raise OptionError(msg)
|
||||
if sys.platform == "win32" and pathname.suffix.lower() == ".exe":
|
||||
name = pathname.stem
|
||||
self._name: str = name
|
||||
name = name.partition(".")[0]
|
||||
if not name.isidentifier():
|
||||
for invalid in STRINGREPLACE:
|
||||
name = name.replace(invalid, "_")
|
||||
name = os.path.normcase(name)
|
||||
self._internal_name: str = name
|
||||
if not self.init_module_name.isidentifier():
|
||||
msg = f"target_name is invalid: {self._name!r}"
|
||||
raise OptionError(msg)
|
||||
|
||||
|
||||
def validate_executables(dist: Distribution, attr: str, value) -> None:
|
||||
"""Verify that value is a valid executables attribute, which could be an
|
||||
Executable list, a mapping list or a string list.
|
||||
"""
|
||||
try:
|
||||
# verify that value is a list or tuple to exclude unordered
|
||||
# or single-use iterables
|
||||
assert isinstance(value, (list, tuple)) # noqa: S101
|
||||
assert value # noqa: S101
|
||||
# verify that elements of value are Executable, Dict or string
|
||||
for executable in value:
|
||||
assert isinstance(executable, (Executable, Mapping, str)) # noqa: S101
|
||||
except (TypeError, ValueError, AttributeError, AssertionError) as exc:
|
||||
msg = f"{attr!r} must be a list of Executable (got {value!r})"
|
||||
raise SetupError(msg) from exc
|
||||
|
||||
# Returns valid Executable list
|
||||
if dist.executables == value:
|
||||
dist.executables = []
|
||||
executables = list(value)
|
||||
for i, executable in enumerate(executables):
|
||||
if isinstance(executable, str):
|
||||
executables[i] = Executable(executable)
|
||||
elif isinstance(executable, Mapping):
|
||||
executables[i] = Executable(**executable)
|
||||
dist.executables.extend(executables)
|
||||
838
venv3_12/Lib/site-packages/cx_Freeze/finder.py
Normal file
838
venv3_12/Lib/site-packages/cx_Freeze/finder.py
Normal file
@@ -0,0 +1,838 @@
|
||||
"""Module Finder - discovers what modules are required by the code."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import importlib.machinery
|
||||
import logging
|
||||
import opcode
|
||||
import os
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
from functools import cached_property
|
||||
from importlib import import_module
|
||||
from pathlib import Path, PurePath
|
||||
from sysconfig import get_config_var
|
||||
from tempfile import TemporaryDirectory
|
||||
from typing import TYPE_CHECKING, Any
|
||||
|
||||
from cx_Freeze.common import (
|
||||
code_object_replace,
|
||||
get_resource_file_path,
|
||||
process_path_specs,
|
||||
)
|
||||
from cx_Freeze.module import ConstantsModule, Module
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Sequence
|
||||
from importlib.abc import ExecutionLoader
|
||||
from types import CodeType
|
||||
|
||||
from cx_Freeze._typing import (
|
||||
DeferredList,
|
||||
IncludesList,
|
||||
InternalIncludesList,
|
||||
)
|
||||
|
||||
ALL_SUFFIXES = importlib.machinery.all_suffixes()
|
||||
|
||||
|
||||
CALL_FUNCTION = opcode.opmap.get("CALL_FUNCTION")
|
||||
CALL = opcode.opmap.get("CALL")
|
||||
PRECALL = opcode.opmap.get("PRECALL")
|
||||
|
||||
EXTENDED_ARG = opcode.opmap["EXTENDED_ARG"]
|
||||
LOAD_CONST = opcode.opmap["LOAD_CONST"]
|
||||
LOAD_NAME = opcode.opmap["LOAD_NAME"]
|
||||
IMPORT_NAME = opcode.opmap["IMPORT_NAME"]
|
||||
IMPORT_FROM = opcode.opmap["IMPORT_FROM"]
|
||||
# Python 3.12+ uses CALL_INTRINSIC_1 with argument 2
|
||||
IMPORT_STAR = (
|
||||
opcode.opmap.get("IMPORT_STAR") or opcode.opmap["CALL_INTRINSIC_1"]
|
||||
)
|
||||
STORE_NAME = opcode.opmap["STORE_NAME"]
|
||||
STORE_GLOBAL = opcode.opmap["STORE_GLOBAL"]
|
||||
STORE_OPS = (STORE_NAME, STORE_GLOBAL)
|
||||
HAVE_ARGUMENT = opcode.HAVE_ARGUMENT
|
||||
|
||||
__all__ = ["Module", "ModuleFinder"]
|
||||
|
||||
|
||||
class ModuleFinder:
|
||||
"""ModuleFinder base class."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
constants_module: ConstantsModule | None = None,
|
||||
excludes: list[str] | None = None,
|
||||
include_files: IncludesList | None = None,
|
||||
path: list[str | Path] | None = None,
|
||||
replace_paths: list[tuple[str, str]] | None = None,
|
||||
zip_exclude_packages: Sequence[str] | None = None,
|
||||
zip_include_packages: Sequence[str] | None = None,
|
||||
zip_include_all_packages: bool = False,
|
||||
zip_includes: IncludesList | None = None,
|
||||
) -> None:
|
||||
self.included_files: InternalIncludesList = process_path_specs(
|
||||
include_files
|
||||
)
|
||||
self.excludes: dict[str, Any] = dict.fromkeys(excludes or [])
|
||||
self.optimize = 0
|
||||
self.path: list[str] = list(map(os.fspath, path or sys.path))
|
||||
self.replace_paths = replace_paths or []
|
||||
self.zip_include_all_packages = zip_include_all_packages
|
||||
self.zip_exclude_packages: set = zip_exclude_packages or set()
|
||||
self.zip_include_packages: set = zip_include_packages or set()
|
||||
self.constants_module = constants_module
|
||||
self.zip_includes: InternalIncludesList = process_path_specs(
|
||||
zip_includes
|
||||
)
|
||||
self.modules = []
|
||||
self.aliases = {}
|
||||
self.excluded_dependent_files: set[Path] = set()
|
||||
self._modules: dict[str, Module | None] = dict.fromkeys(excludes or [])
|
||||
self._bad_modules = {}
|
||||
self._exclude_unused_modules()
|
||||
self._tmp_dir = TemporaryDirectory(prefix="cxfreeze-")
|
||||
self.cache_path = Path(self._tmp_dir.name)
|
||||
self.lib_files: dict[Path, str] = {}
|
||||
|
||||
def _add_module(
|
||||
self,
|
||||
name: str,
|
||||
path: Sequence[Path | str] | None = None,
|
||||
filename: Path | None = None,
|
||||
parent: Module | None = None,
|
||||
) -> Module:
|
||||
"""Add a module to the list of modules but if one is already found,
|
||||
then return it instead; this is done so that packages can be
|
||||
handled properly.
|
||||
"""
|
||||
module = self._modules.get(name)
|
||||
if module is None:
|
||||
module = Module(name, path, filename, parent)
|
||||
self._modules[name] = module
|
||||
self.modules.append(module)
|
||||
if name in self._bad_modules:
|
||||
logging.debug(
|
||||
"Removing module [%s] from list of bad modules", name
|
||||
)
|
||||
del self._bad_modules[name]
|
||||
if (
|
||||
self.zip_include_all_packages
|
||||
and module.name not in self.zip_exclude_packages
|
||||
or module.name in self.zip_include_packages
|
||||
):
|
||||
module.in_file_system = 0
|
||||
module.cache_path = self.cache_path
|
||||
module.update_distribution()
|
||||
if module.path is None and path is not None:
|
||||
module.path = list(map(Path, path))
|
||||
if module.file is None and filename is not None:
|
||||
module.file = filename
|
||||
return module
|
||||
|
||||
@cached_property
|
||||
def _builtin_modules(self) -> set[str]:
|
||||
"""The built-in modules are determined based on the cx_Freeze build."""
|
||||
builtin_modules: set[str] = set(sys.builtin_module_names)
|
||||
dynload = get_resource_file_path("bases", "lib-dynload", "")
|
||||
if dynload and dynload.is_dir():
|
||||
# discard modules that exist in bases/lib-dynload
|
||||
ext_suffix = get_config_var("EXT_SUFFIX")
|
||||
for file in dynload.glob(f"*{ext_suffix}"):
|
||||
builtin_modules.discard(file.name.partition(".")[0])
|
||||
return builtin_modules
|
||||
|
||||
def _determine_parent(self, caller: Module | None) -> Module | None:
|
||||
"""Determine the parent to use when searching packages."""
|
||||
if caller is not None:
|
||||
if caller.path is not None:
|
||||
return caller
|
||||
return self._get_parent_by_name(caller.name)
|
||||
return None
|
||||
|
||||
def _exclude_unused_modules(self) -> None:
|
||||
"""Exclude unused modules in the current platform."""
|
||||
exclude = import_module("cx_Freeze.hooks._unused_modules")
|
||||
for name in exclude.MODULES:
|
||||
self.exclude_module(name)
|
||||
|
||||
def _ensure_from_list(
|
||||
self,
|
||||
caller: Module,
|
||||
package_module: Module,
|
||||
from_list: list[str],
|
||||
deferred_imports: DeferredList,
|
||||
) -> None:
|
||||
"""Ensure that the from list is satisfied. This is only necessary for
|
||||
package modules. If the package module has not been completely
|
||||
imported yet, defer the import until it has been completely imported
|
||||
in order to avoid spurious errors about missing modules.
|
||||
"""
|
||||
if package_module.in_import and caller is not package_module:
|
||||
deferred_imports.append((caller, package_module, from_list))
|
||||
else:
|
||||
for name in from_list:
|
||||
if name in package_module.global_names:
|
||||
continue
|
||||
sub_module_name = f"{package_module.name}.{name}"
|
||||
self._import_module(sub_module_name, deferred_imports, caller)
|
||||
|
||||
def _get_parent_by_name(self, name: str) -> Module | None:
|
||||
"""Return the parent module given the name of a module."""
|
||||
pos = name.rfind(".")
|
||||
if pos > 0:
|
||||
parent_name = name[:pos]
|
||||
return self._modules[parent_name]
|
||||
return None
|
||||
|
||||
def _import_all_sub_modules(
|
||||
self,
|
||||
module: Module,
|
||||
deferred_imports: DeferredList,
|
||||
recursive: bool = True,
|
||||
) -> None:
|
||||
"""Import all sub modules to the given package."""
|
||||
for path in module.path:
|
||||
for fullname in path.iterdir():
|
||||
if fullname.is_dir():
|
||||
if not fullname.joinpath("__init__.py").exists():
|
||||
continue
|
||||
name = fullname.name
|
||||
else:
|
||||
# We need to run through these in order to correctly pick
|
||||
# up PEP 3149 library names
|
||||
# (e.g. .cpython-39-x86_64-linux-gnu.so).
|
||||
for suffix in ALL_SUFFIXES:
|
||||
if fullname.name.endswith(suffix):
|
||||
name = fullname.name[: -len(suffix)]
|
||||
|
||||
# Skip modules whose names appear to contain '.',
|
||||
# as we may be using the wrong suffix, and even if
|
||||
# we're not, such module names will break the
|
||||
# import code.
|
||||
if "." not in name:
|
||||
break
|
||||
|
||||
else:
|
||||
continue
|
||||
if name == "__init__":
|
||||
continue
|
||||
|
||||
sub_module_name = f"{module.name}.{name}"
|
||||
sub_module = self._internal_import_module(
|
||||
sub_module_name, deferred_imports
|
||||
)
|
||||
if sub_module is None:
|
||||
if sub_module_name not in self.excludes:
|
||||
msg = f"No module named {sub_module_name!r}"
|
||||
raise ImportError(msg)
|
||||
else:
|
||||
module.global_names.add(name)
|
||||
if sub_module.path and recursive:
|
||||
self._import_all_sub_modules(
|
||||
sub_module, deferred_imports, recursive
|
||||
)
|
||||
|
||||
def _import_deferred_imports(
|
||||
self, deferred_imports: DeferredList, skip_in_import: bool = False
|
||||
) -> None:
|
||||
"""Import any sub modules that were deferred, if applicable."""
|
||||
while deferred_imports:
|
||||
new_deferred_imports: DeferredList = []
|
||||
for caller, package_module, sub_module_names in deferred_imports:
|
||||
if package_module.in_import and skip_in_import:
|
||||
continue
|
||||
self._ensure_from_list(
|
||||
caller,
|
||||
package_module,
|
||||
sub_module_names,
|
||||
new_deferred_imports,
|
||||
)
|
||||
deferred_imports = new_deferred_imports
|
||||
skip_in_import = True
|
||||
|
||||
def _import_module(
|
||||
self,
|
||||
name: str,
|
||||
deferred_imports: DeferredList,
|
||||
caller: Module | None = None,
|
||||
relative_import_index: int = 0,
|
||||
) -> Module:
|
||||
"""Attempt to find the named module and return it or None if no module
|
||||
by that name could be found.
|
||||
"""
|
||||
# absolute import (available in Python 2.5 and up)
|
||||
# the name given is the only name that will be searched
|
||||
if relative_import_index == 0:
|
||||
module = self._internal_import_module(name, deferred_imports)
|
||||
|
||||
# old style relative import (regular 'import foo' in Python 2)
|
||||
# the name given is tried in the current package, and if no match
|
||||
# is found, self.path is searched for a top-level module/pockage
|
||||
elif relative_import_index < 0:
|
||||
parent = self._determine_parent(caller)
|
||||
if parent is not None:
|
||||
fullname = f"{parent.name}.{name}"
|
||||
module = self._internal_import_module(
|
||||
fullname, deferred_imports
|
||||
)
|
||||
if module is not None:
|
||||
parent.global_names.add(name)
|
||||
return module
|
||||
|
||||
module = self._internal_import_module(name, deferred_imports)
|
||||
|
||||
# new style relative import (available in Python 2.5 and up)
|
||||
# the index indicates how many levels to traverse and only that level
|
||||
# is searched for the named module
|
||||
elif relative_import_index > 0:
|
||||
parent = caller
|
||||
if parent.path is not None:
|
||||
relative_import_index -= 1
|
||||
while parent is not None and relative_import_index > 0:
|
||||
parent = self._get_parent_by_name(parent.name)
|
||||
relative_import_index -= 1
|
||||
if parent is None:
|
||||
module = None
|
||||
elif not name:
|
||||
module = parent
|
||||
else:
|
||||
name = f"{parent.name}.{name}"
|
||||
module = self._internal_import_module(name, deferred_imports)
|
||||
|
||||
# if module not found, track that fact
|
||||
if module is None:
|
||||
if caller is None:
|
||||
msg = f"No module named {name!r}"
|
||||
raise ImportError(msg)
|
||||
self._missing_hook(caller, name)
|
||||
|
||||
return module
|
||||
|
||||
def _internal_import_module(
|
||||
self, name: str, deferred_imports: DeferredList
|
||||
) -> Module | None:
|
||||
"""Internal method used for importing a module which assumes that the
|
||||
name given is an absolute name. None is returned if the module
|
||||
cannot be found.
|
||||
"""
|
||||
with suppress(KeyError):
|
||||
# Check in module cache before trying to import it again.
|
||||
return self._modules[name]
|
||||
|
||||
if name in self._builtin_modules:
|
||||
module = self._add_module(name)
|
||||
logging.debug("Adding module [%s] [C_BUILTIN]", name)
|
||||
if module.hook:
|
||||
module.hook(self)
|
||||
module.in_import = False
|
||||
return module
|
||||
|
||||
pos = name.rfind(".")
|
||||
if pos < 0: # Top-level module
|
||||
path = self.path
|
||||
parent_module = None
|
||||
else: # Dotted module name - look up the parent module
|
||||
parent_name = name[:pos]
|
||||
parent_module = self._internal_import_module(
|
||||
parent_name, deferred_imports
|
||||
)
|
||||
if parent_module is None:
|
||||
return None
|
||||
path = parent_module.path
|
||||
path = self.path if path is None else list(map(os.fspath, path))
|
||||
|
||||
if name in self.aliases:
|
||||
actual_name = self.aliases[name]
|
||||
module = self._internal_import_module(
|
||||
actual_name, deferred_imports
|
||||
)
|
||||
self._modules[name] = module
|
||||
return module
|
||||
|
||||
try:
|
||||
module = self._load_module(
|
||||
name, path, deferred_imports, parent_module
|
||||
)
|
||||
except ImportError:
|
||||
logging.debug("Module [%s] cannot be imported", name)
|
||||
self._modules[name] = None
|
||||
return None
|
||||
return module
|
||||
|
||||
def _load_module(
|
||||
self,
|
||||
name: str,
|
||||
path: Sequence[str] | None,
|
||||
deferred_imports: DeferredList,
|
||||
parent: Module | None = None,
|
||||
) -> Module | None:
|
||||
"""Load the module, searching the module spec."""
|
||||
spec: importlib.machinery.ModuleSpec | None = None
|
||||
loader: ExecutionLoader | None = None
|
||||
module: Module | None = None
|
||||
|
||||
# Find modules to load
|
||||
try:
|
||||
# It's recommended to clear the caches first.
|
||||
importlib.machinery.PathFinder.invalidate_caches()
|
||||
spec = importlib.machinery.PathFinder.find_spec(name, path)
|
||||
except KeyError:
|
||||
if parent:
|
||||
# some packages use a directory with vendored modules
|
||||
# without an __init__.py and are not considered namespace
|
||||
# packages, then simulate a subpackage
|
||||
module = self._add_module(
|
||||
name,
|
||||
path=[Path(path[0], name.rpartition(".")[-1])],
|
||||
parent=parent,
|
||||
)
|
||||
logging.debug("Adding module [%s] [PACKAGE]", name)
|
||||
module.file = Path(path[0]) / "__init__.py"
|
||||
module.source_is_string = True
|
||||
|
||||
if spec:
|
||||
loader = spec.loader
|
||||
# Ignore built-in importers
|
||||
if loader is importlib.machinery.BuiltinImporter:
|
||||
return None
|
||||
if loader is importlib.machinery.FrozenImporter:
|
||||
return None
|
||||
# Load package or namespace package
|
||||
if spec.submodule_search_locations:
|
||||
module = self._add_module(
|
||||
name,
|
||||
path=list(spec.submodule_search_locations),
|
||||
parent=parent,
|
||||
)
|
||||
if spec.origin in (None, "namespace"):
|
||||
logging.debug("Adding module [%s] [NAMESPACE]", name)
|
||||
module.file = module.path[0] / "__init__.py"
|
||||
module.source_is_string = True
|
||||
else:
|
||||
logging.debug("Adding module [%s] [PACKAGE]", name)
|
||||
module.file = Path(spec.origin) # path of __init__.py
|
||||
else:
|
||||
module = self._add_module(
|
||||
name, filename=Path(spec.origin), parent=parent
|
||||
)
|
||||
|
||||
if module is not None:
|
||||
self._load_module_code(module, loader, deferred_imports)
|
||||
return module
|
||||
|
||||
def _load_module_code(
|
||||
self,
|
||||
module: Module,
|
||||
loader: ExecutionLoader | None,
|
||||
deferred_imports: DeferredList,
|
||||
) -> Module | None:
|
||||
name = module.name
|
||||
path = os.fspath(module.file)
|
||||
|
||||
if isinstance(loader, importlib.machinery.SourceFileLoader):
|
||||
logging.debug("Adding module [%s] [SOURCE]", name)
|
||||
# Load & compile Python source code
|
||||
source_bytes = loader.get_data(path)
|
||||
try:
|
||||
module.code = loader.source_to_code(
|
||||
source_bytes, path, _optimize=self.optimize
|
||||
)
|
||||
except SyntaxError:
|
||||
logging.debug("Invalid syntax in [%s]", name)
|
||||
msg = f"Invalid syntax in {path}"
|
||||
raise ImportError(msg, name=name) from None
|
||||
elif isinstance(loader, importlib.machinery.SourcelessFileLoader):
|
||||
logging.debug("Adding module [%s] [BYTECODE]", name)
|
||||
# Load Python bytecode
|
||||
module.code = loader.get_code(name)
|
||||
if module.code is None:
|
||||
msg = f"Bad magic number in {path}"
|
||||
raise ImportError(msg, name=name)
|
||||
elif isinstance(loader, importlib.machinery.ExtensionFileLoader):
|
||||
logging.debug("Adding module [%s] [EXTENSION]", name)
|
||||
elif module.source_is_string:
|
||||
module.code = compile(
|
||||
"", path, "exec", dont_inherit=True, optimize=self.optimize
|
||||
)
|
||||
else:
|
||||
msg = f"Unknown module loader in {path}"
|
||||
raise ImportError(msg, name=name)
|
||||
|
||||
# Run custom hook for the module
|
||||
if module.hook:
|
||||
module.hook(self)
|
||||
|
||||
if module.code is not None:
|
||||
if self.replace_paths:
|
||||
module.code = self._replace_paths_in_code(module)
|
||||
|
||||
# Scan the module code for import statements
|
||||
self._scan_code(module, deferred_imports)
|
||||
|
||||
# Verify __package__ in use
|
||||
module.code = self._replace_package_in_code(module)
|
||||
|
||||
elif module.stub_code is not None:
|
||||
self._scan_code(module, deferred_imports, module.stub_code)
|
||||
|
||||
module.in_import = False
|
||||
return module
|
||||
|
||||
def _load_module_from_file(
|
||||
self, name: str, filename: Path, deferred_imports: DeferredList
|
||||
) -> Module | None:
|
||||
"""Load the module from the filename."""
|
||||
loader: ExecutionLoader | None = None
|
||||
|
||||
ext = filename.suffix
|
||||
path = os.fspath(filename)
|
||||
if not ext or ext in importlib.machinery.SOURCE_SUFFIXES:
|
||||
loader = importlib.machinery.SourceFileLoader(name, path)
|
||||
elif ext in importlib.machinery.BYTECODE_SUFFIXES:
|
||||
loader = importlib.machinery.SourcelessFileLoader(name, path)
|
||||
elif ext in importlib.machinery.EXTENSION_SUFFIXES:
|
||||
loader = importlib.machinery.ExtensionFileLoader(name, path)
|
||||
|
||||
module = self._add_module(name, filename=filename)
|
||||
self._load_module_code(module, loader, deferred_imports)
|
||||
return module
|
||||
|
||||
def _missing_hook(self, caller: Module, module_name: str) -> None:
|
||||
"""Run hook for missing module."""
|
||||
hooks = import_module("cx_Freeze.hooks")
|
||||
normalized_name = module_name.replace(".", "_")
|
||||
method = getattr(hooks, f"missing_{normalized_name}", None)
|
||||
if method:
|
||||
method(self, caller)
|
||||
if module_name not in caller.ignore_names:
|
||||
callers = self._bad_modules.setdefault(module_name, {})
|
||||
callers[caller.name] = None
|
||||
|
||||
@staticmethod
|
||||
def _replace_package_in_code(module: Module) -> CodeType:
|
||||
"""Replace the value of __package__ directly in the code, when the
|
||||
module is in a package and will be stored in shared zip file.
|
||||
"""
|
||||
code = module.code
|
||||
# Check if module is in a package and will be stored in zip file
|
||||
# and is not defined in the module, like 'six' do
|
||||
if (
|
||||
code is None
|
||||
or module.parent is None
|
||||
or "__package__" in module.global_names
|
||||
or module.in_file_system >= 1
|
||||
):
|
||||
return code
|
||||
# Only if the code references it.
|
||||
if "__package__" in code.co_names:
|
||||
consts = list(code.co_consts)
|
||||
pkg_const_index = len(consts)
|
||||
pkg_name_index = code.co_names.index("__package__")
|
||||
if pkg_const_index > 255 or pkg_name_index > 255:
|
||||
# Don't touch modules with many constants or names;
|
||||
# This is good for now.
|
||||
return code
|
||||
# Insert a bytecode to set __package__ as module.parent.name
|
||||
codes = [LOAD_CONST, pkg_const_index, STORE_NAME, pkg_name_index]
|
||||
codestring = bytes(codes) + code.co_code
|
||||
if module.file.stem == "__init__":
|
||||
consts.append(module.name)
|
||||
else:
|
||||
consts.append(module.parent.name)
|
||||
code = code_object_replace(
|
||||
code, co_code=codestring, co_consts=consts
|
||||
)
|
||||
return code
|
||||
|
||||
def _replace_paths_in_code(
|
||||
self, module: Module, code: CodeType | None = None
|
||||
) -> CodeType:
|
||||
"""Replace paths in the code as directed, returning a new code object
|
||||
with the modified paths in place.
|
||||
"""
|
||||
top_level_module: Module = module.root
|
||||
if code is None:
|
||||
code = module.code
|
||||
# Prepare the new filename.
|
||||
original_filename = Path(code.co_filename)
|
||||
for search_value, replace_value in self.replace_paths:
|
||||
if search_value == "*":
|
||||
if top_level_module.file is None:
|
||||
continue
|
||||
if top_level_module.path:
|
||||
search_dir = top_level_module.file.parent.parent
|
||||
else:
|
||||
search_dir = top_level_module.file.parent
|
||||
else:
|
||||
search_dir = Path(search_value)
|
||||
with suppress(ValueError):
|
||||
new_filename = original_filename.relative_to(search_dir)
|
||||
new_filename = replace_value / new_filename
|
||||
break
|
||||
else:
|
||||
new_filename = original_filename
|
||||
|
||||
# Run on subordinate code objects from function & class definitions.
|
||||
consts = list(code.co_consts)
|
||||
for i, const in enumerate(consts):
|
||||
if isinstance(const, type(code)):
|
||||
consts[i] = self._replace_paths_in_code(
|
||||
top_level_module, const
|
||||
)
|
||||
|
||||
return code_object_replace(
|
||||
code, co_consts=consts, co_filename=os.fspath(new_filename)
|
||||
)
|
||||
|
||||
def _scan_code(
|
||||
self,
|
||||
module: Module,
|
||||
deferred_imports: DeferredList,
|
||||
code: CodeType | None = None,
|
||||
top_level: bool = True,
|
||||
) -> None:
|
||||
"""Scan code, looking for imported modules and keeping track of the
|
||||
constants that have been created in order to better tell which
|
||||
modules are truly missing.
|
||||
"""
|
||||
if code is None:
|
||||
code = module.code
|
||||
arguments = []
|
||||
name = None
|
||||
import_call = 0
|
||||
imported_module = None
|
||||
extended_arg = 0
|
||||
co_code = code.co_code
|
||||
for i in range(0, len(co_code), 2):
|
||||
opc = co_code[i]
|
||||
if opc >= HAVE_ARGUMENT:
|
||||
arg = co_code[i + 1] | extended_arg
|
||||
extended_arg = (arg << 8) if opc == EXTENDED_ARG else 0
|
||||
else:
|
||||
arg = None
|
||||
extended_arg = 0
|
||||
|
||||
# keep track of constants (these are used for importing)
|
||||
# immediately restart loop so arguments are retained
|
||||
if opc == LOAD_CONST:
|
||||
arguments.append(code.co_consts[arg])
|
||||
continue
|
||||
|
||||
# __import__ call
|
||||
if opc == LOAD_NAME:
|
||||
name = code.co_names[arg]
|
||||
continue
|
||||
if name and name == "__import__" and len(arguments) == 1:
|
||||
# Try to identify a __import__ call
|
||||
# Python 3.12 bytecode:
|
||||
# 20 2 PUSH_NULL
|
||||
# 4 LOAD_NAME 0 (__import__)
|
||||
# 6 LOAD_CONST 0 ('pkgutil')
|
||||
# 8 CALL 1
|
||||
# Python 3.6 to 3.10 uses CALL_FUNCTION instead fo CALL
|
||||
# Python 3.11 uses PRECALL then CALL
|
||||
if CALL_FUNCTION and (opc, arg) == (CALL_FUNCTION, 1):
|
||||
import_call = 1
|
||||
elif PRECALL:
|
||||
if (opc, arg) == (PRECALL, 1):
|
||||
import_call = arg
|
||||
continue
|
||||
arg = import_call
|
||||
if CALL and (opc, arg) == (CALL, 1):
|
||||
import_call = 1
|
||||
|
||||
# import statement: attempt to import module or __import__
|
||||
if opc == IMPORT_NAME or import_call == 1:
|
||||
if opc == IMPORT_NAME:
|
||||
name = code.co_names[arg]
|
||||
else:
|
||||
name = arguments[0]
|
||||
arguments = []
|
||||
logging.debug("Scan code detected __import__(%r)", name)
|
||||
if len(arguments) >= 2:
|
||||
relative_import_index, from_list = arguments[-2:]
|
||||
else:
|
||||
relative_import_index = -1
|
||||
from_list = arguments[0] if arguments else []
|
||||
if name not in module.exclude_names:
|
||||
imported_module = self._import_module(
|
||||
name, deferred_imports, module, relative_import_index
|
||||
)
|
||||
if imported_module is not None and (
|
||||
from_list
|
||||
and from_list != ("*",)
|
||||
and imported_module.path is not None
|
||||
):
|
||||
self._ensure_from_list(
|
||||
module,
|
||||
imported_module,
|
||||
from_list,
|
||||
deferred_imports,
|
||||
)
|
||||
|
||||
# import * statement: copy all global names
|
||||
elif (
|
||||
opc == IMPORT_STAR
|
||||
and (arg == 2 if opc > HAVE_ARGUMENT else None)
|
||||
and top_level
|
||||
and imported_module is not None
|
||||
):
|
||||
module.global_names.update(imported_module.global_names)
|
||||
|
||||
# store operation: track only top level
|
||||
elif top_level and opc in STORE_OPS:
|
||||
name = code.co_names[arg]
|
||||
module.global_names.add(name)
|
||||
|
||||
# reset arguments; these are only needed for import statements so
|
||||
# ignore them in all other cases!
|
||||
arguments = []
|
||||
name = None
|
||||
import_call = 0
|
||||
|
||||
# Scan the code objects from function & class definitions
|
||||
for constant in code.co_consts:
|
||||
if isinstance(constant, type(code)):
|
||||
self._scan_code(
|
||||
module, deferred_imports, code=constant, top_level=False
|
||||
)
|
||||
|
||||
def add_alias(self, name: str, alias_for: str) -> None:
|
||||
"""Add an alias for a particular module; when an attempt is made to
|
||||
import a module using the alias name, import the actual name instead.
|
||||
"""
|
||||
self.aliases[name] = alias_for
|
||||
|
||||
def add_base_modules(self) -> None:
|
||||
"""Add the base modules to the finder. These are the modules that
|
||||
Python imports itself during initialization and, if not found,
|
||||
can result in behavior that differs from running from source;
|
||||
also include modules used within the bootstrap code.
|
||||
|
||||
When cx_Freeze is built, these modules (and modules they load) are
|
||||
included in the startup zip file.
|
||||
"""
|
||||
self.include_package("collections")
|
||||
self.include_package("encodings")
|
||||
self.include_package("importlib")
|
||||
self.include_module("io")
|
||||
self.include_module("os")
|
||||
self.include_module("sys")
|
||||
self.include_module("traceback")
|
||||
self.include_module("unicodedata")
|
||||
self.include_module("warnings")
|
||||
self.include_module("zlib")
|
||||
|
||||
def add_constant(self, name: str, value: str) -> None:
|
||||
"""Makes available a constant in the module BUILD_CONSTANTS which is
|
||||
used in the initscripts.
|
||||
"""
|
||||
self.constants_module.values[name] = value
|
||||
|
||||
def exclude_dependent_files(self, filename: Path | str) -> None:
|
||||
"""Exclude the dependent files of the named file from the resulting
|
||||
frozen executable.
|
||||
"""
|
||||
if not isinstance(filename, Path):
|
||||
filename = Path(filename)
|
||||
self.excluded_dependent_files.add(filename)
|
||||
|
||||
def exclude_module(self, name: str) -> None:
|
||||
"""Exclude the named module and its submodules from the resulting
|
||||
frozen executable.
|
||||
"""
|
||||
modules_to_exclude = [name] + [
|
||||
mod for mod in self._modules if mod.startswith(f"{name}.")
|
||||
]
|
||||
for mod in modules_to_exclude:
|
||||
self.excludes[mod] = None
|
||||
self._modules[mod] = None
|
||||
|
||||
def include_file_as_module(
|
||||
self, path: Path | str, name: str | None = None
|
||||
) -> Module:
|
||||
"""Include the named file as a module in the frozen executable."""
|
||||
if isinstance(path, str):
|
||||
path = Path(path)
|
||||
if name is None:
|
||||
name = path.name.partition(".")[0]
|
||||
deferred_imports: DeferredList = []
|
||||
module = self._load_module_from_file(name, path, deferred_imports)
|
||||
if module is not None:
|
||||
parent = self._get_parent_by_name(name)
|
||||
if parent is not None:
|
||||
parent.global_names.add(module.name)
|
||||
module.parent = parent
|
||||
self._import_deferred_imports(deferred_imports)
|
||||
return module
|
||||
|
||||
def include_files(
|
||||
self,
|
||||
source_path: Path | str,
|
||||
target_path: Path | str,
|
||||
copy_dependent_files: bool = True,
|
||||
) -> None:
|
||||
"""Include the files in the given directory in the target build."""
|
||||
self.included_files += process_path_specs([(source_path, target_path)])
|
||||
if not copy_dependent_files:
|
||||
self.exclude_dependent_files(source_path)
|
||||
|
||||
def include_module(self, name: str) -> Module:
|
||||
"""Include the named module in the frozen executable."""
|
||||
# includes has priority over excludes
|
||||
if name in self.excludes and self._modules.get(name) is None:
|
||||
self.excludes.pop(name)
|
||||
self._modules.pop(name, None)
|
||||
# include module
|
||||
deferred_imports: DeferredList = []
|
||||
module = self._import_module(name, deferred_imports)
|
||||
self._import_deferred_imports(deferred_imports, skip_in_import=True)
|
||||
return module
|
||||
|
||||
def include_package(self, name: str) -> Module:
|
||||
"""Include the named package and any submodules in the frozen
|
||||
executable.
|
||||
"""
|
||||
deferred_imports: DeferredList = []
|
||||
module = self._import_module(name, deferred_imports)
|
||||
if module.path:
|
||||
self._import_all_sub_modules(module, deferred_imports)
|
||||
self._import_deferred_imports(deferred_imports, skip_in_import=True)
|
||||
return module
|
||||
|
||||
def report_missing_modules(self) -> None:
|
||||
"""Display a list of modules that weren't found."""
|
||||
if self._bad_modules:
|
||||
print("Missing modules:")
|
||||
for name in sorted(self._bad_modules.keys()):
|
||||
callers = sorted(self._bad_modules[name].keys())
|
||||
print(f"? {name} imported from", ", ".join(callers))
|
||||
print("This is not necessarily a problem - the modules ", end="")
|
||||
print("may not be needed on this platform.\n")
|
||||
|
||||
@property
|
||||
def optimize(self) -> int:
|
||||
"""The value of optimize flag propagated according to the user's
|
||||
choice.
|
||||
"""
|
||||
return self._optimize_flag
|
||||
|
||||
@optimize.setter
|
||||
def optimize(self, value: int) -> None:
|
||||
# The value of optimize is checked in '.command.build_exe' or '.cli'.
|
||||
# This value is unlikely to be wrong, yet we check and ignore any
|
||||
# divergent value.
|
||||
if -1 <= value <= 2:
|
||||
self._optimize_flag = value
|
||||
|
||||
def zip_include_files(
|
||||
self,
|
||||
source_path: str | Path,
|
||||
target_path: str | Path | PurePath | None = None,
|
||||
) -> None:
|
||||
"""Include files or all of the files in a directory to the zip file."""
|
||||
self.zip_includes.extend(
|
||||
process_path_specs([(source_path, target_path)])
|
||||
)
|
||||
1382
venv3_12/Lib/site-packages/cx_Freeze/freezer.py
Normal file
1382
venv3_12/Lib/site-packages/cx_Freeze/freezer.py
Normal file
File diff suppressed because it is too large
Load Diff
801
venv3_12/Lib/site-packages/cx_Freeze/hooks/__init__.py
Normal file
801
venv3_12/Lib/site-packages/cx_Freeze/hooks/__init__.py
Normal file
@@ -0,0 +1,801 @@
|
||||
"""A collection of functions which are triggered automatically by finder when
|
||||
certain packages are included or not found.
|
||||
"""
|
||||
|
||||
# ruff: noqa: ARG001
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import sysconfig
|
||||
from contextlib import suppress
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from cx_Freeze._compat import IS_MACOS, IS_MINGW, IS_WINDOWS
|
||||
from cx_Freeze.hooks._qthooks import get_qt_plugins_paths # noqa: F401
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from cx_Freeze.finder import ModuleFinder
|
||||
from cx_Freeze.module import Module
|
||||
|
||||
|
||||
def load_aiofiles(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The aiofiles must be loaded as a package."""
|
||||
finder.include_package("aiofiles")
|
||||
|
||||
|
||||
def load_babel(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The babel must be loaded as a package, and has pickeable data."""
|
||||
finder.include_package("babel")
|
||||
module.in_file_system = 1
|
||||
|
||||
|
||||
def load_bcrypt(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The bcrypt < 4.0 package requires the _cffi_backend module
|
||||
(loaded implicitly).
|
||||
"""
|
||||
include_cffi = True
|
||||
if module.distribution and module.distribution.version[0] >= 4:
|
||||
include_cffi = False
|
||||
if include_cffi:
|
||||
finder.include_module("_cffi_backend")
|
||||
|
||||
|
||||
def load_boto(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The boto package uses 'six' fake modules."""
|
||||
finder.exclude_module("boto.vendored.six.moves")
|
||||
|
||||
|
||||
def load_boto3(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The boto3 package."""
|
||||
finder.include_package("boto3.dynamodb")
|
||||
finder.include_package("boto3.ec2")
|
||||
finder.include_package("boto3.s3")
|
||||
finder.include_files(module.file.parent / "data", "lib/boto3/data")
|
||||
|
||||
|
||||
def load_cElementTree(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The cElementTree module implicitly loads the elementtree.ElementTree
|
||||
module; make sure this happens.
|
||||
"""
|
||||
finder.include_module("elementtree.ElementTree")
|
||||
|
||||
|
||||
def load_ceODBC(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The ceODBC module implicitly imports both datetime and decimal;
|
||||
make sure this happens.
|
||||
"""
|
||||
finder.include_module("datetime")
|
||||
finder.include_module("decimal")
|
||||
|
||||
|
||||
def load_certifi(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The certifi package uses importlib.resources to locate the cacert.pem
|
||||
in zip packages.
|
||||
"""
|
||||
if module.in_file_system == 0:
|
||||
cacert = Path(__import__("certifi").where())
|
||||
finder.zip_include_files(cacert, Path("certifi", cacert.name))
|
||||
|
||||
|
||||
def load__cffi_backend(finder: ModuleFinder, module: Module) -> None:
|
||||
"""Add the cffi metadata for _cffi_backend module."""
|
||||
module.update_distribution("cffi")
|
||||
|
||||
|
||||
def load_cffi_cparser(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The cffi.cparser module can use a extension if present."""
|
||||
try:
|
||||
cffi = __import__("cffi", fromlist=["_pycparser"])
|
||||
pycparser = getattr(cffi, "_pycparser") # noqa: B009
|
||||
finder.include_module(pycparser.__name__)
|
||||
except (ImportError, AttributeError):
|
||||
finder.exclude_module("cffi._pycparser")
|
||||
|
||||
|
||||
def load_charset_normalizer(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The charset_normalizer package."""
|
||||
finder.exclude_module("charset_normalizer.cli")
|
||||
|
||||
|
||||
def load_charset_normalizer_md(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The charset_normalizer package implicitly imports a extension module."""
|
||||
mypyc = module.file.parent / ("md__mypyc" + "".join(module.file.suffixes))
|
||||
if mypyc.exists():
|
||||
finder.include_module("charset_normalizer.md__mypyc")
|
||||
|
||||
|
||||
def load_copy(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The copy module should filter import names."""
|
||||
if not sys.platform.startswith("java"):
|
||||
module.exclude_names.add("org.python.core")
|
||||
|
||||
|
||||
def load_crc32c(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The google.crc32c module requires _cffi_backend module."""
|
||||
finder.include_module("_cffi_backend")
|
||||
|
||||
|
||||
def load_cryptography(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The cryptography module requires the _cffi_backend module."""
|
||||
if module.distribution and module.distribution.requires:
|
||||
include_cffi = False
|
||||
for req in module.distribution.requires:
|
||||
if req.startswith("cffi"):
|
||||
include_cffi = True
|
||||
break
|
||||
else:
|
||||
include_cffi = True
|
||||
if include_cffi:
|
||||
finder.include_module("_cffi_backend")
|
||||
|
||||
|
||||
def load_ctypes(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The ctypes module should filter import names."""
|
||||
if not IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.add("nt")
|
||||
|
||||
|
||||
def load_ctypes_util(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The ctypes.util module should filter import names."""
|
||||
if not IS_MACOS:
|
||||
module.exclude_names.add("ctypes.macholib.dyld")
|
||||
|
||||
|
||||
def load__ctypes(finder: ModuleFinder, module: Module) -> None:
|
||||
"""In Windows, the _ctypes module in Python 3.8+ requires an additional
|
||||
libffi dll to be present in the build directory.
|
||||
"""
|
||||
if IS_WINDOWS:
|
||||
dll_pattern = "libffi-*.dll"
|
||||
dll_dir = Path(sys.base_prefix, "DLLs")
|
||||
for dll_path in dll_dir.glob(dll_pattern):
|
||||
finder.include_files(dll_path, Path("lib", dll_path.name))
|
||||
|
||||
|
||||
def load_cx_Oracle(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The cx_Oracle module implicitly imports datetime; make sure this
|
||||
happens.
|
||||
"""
|
||||
finder.include_module("datetime")
|
||||
finder.include_module("decimal")
|
||||
|
||||
|
||||
def load_datetime(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The datetime module implicitly imports time; make sure this happens."""
|
||||
finder.include_module("time")
|
||||
|
||||
|
||||
def load_discord(finder: ModuleFinder, module: Module) -> None:
|
||||
"""py-cord requires its metadata."""
|
||||
module.update_distribution("py-cord")
|
||||
|
||||
|
||||
def load_difflib(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The difflib module uses doctest for tests and shouldn't be imported."""
|
||||
module.exclude_names.add("doctest")
|
||||
|
||||
|
||||
def load_docutils_frontend(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The optik module is the old name for the optparse module; ignore the
|
||||
module if it cannot be found.
|
||||
"""
|
||||
module.ignore_names.add("optik")
|
||||
|
||||
|
||||
def load_dummy_threading(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The dummy_threading module plays games with the name of the threading
|
||||
module for its own purposes; ignore that here.
|
||||
"""
|
||||
finder.exclude_module("_dummy_threading")
|
||||
|
||||
|
||||
def load_encodings(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The encodings module should filter import names."""
|
||||
if not IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.add("_winapi")
|
||||
|
||||
|
||||
def load_flask_compress(finder: ModuleFinder, module: Module) -> None:
|
||||
"""flask-compress requires its metadata."""
|
||||
module.update_distribution("Flask_Compress")
|
||||
|
||||
|
||||
def load_ftplib(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The ftplib module attempts to import the SOCKS module; ignore this
|
||||
module if it cannot be found.
|
||||
"""
|
||||
module.ignore_names.add("SOCKS")
|
||||
|
||||
|
||||
def load_gevent(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The gevent must be loaded as a package."""
|
||||
finder.include_package("gevent")
|
||||
|
||||
|
||||
def load_GifImagePlugin(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The GifImagePlugin module optionally imports the _imaging_gif module."""
|
||||
module.ignore_names.add("_imaging_gif")
|
||||
|
||||
|
||||
def load_googleapiclient(finder: ModuleFinder, module: Module) -> None:
|
||||
"""Add the googleapiclient metadata for googleapiclient package."""
|
||||
module.update_distribution("google_api_python_client")
|
||||
|
||||
|
||||
def load_googleapiclient_discovery(
|
||||
finder: ModuleFinder, module: Module
|
||||
) -> None:
|
||||
"""The googleapiclient.discovery module needs discovery_cache subpackage
|
||||
in file system.
|
||||
"""
|
||||
discovery_cache = finder.include_package("googleapiclient.discovery_cache")
|
||||
discovery_cache.in_file_system = 1
|
||||
|
||||
|
||||
def load_google_cloud_storage(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The google.cloud.storage package always uses the parent module."""
|
||||
finder.include_package("google.cloud")
|
||||
|
||||
|
||||
def load_gtk__gtk(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The gtk._gtk module has a number of implicit imports."""
|
||||
finder.include_module("atk")
|
||||
finder.include_module("cairo")
|
||||
finder.include_module("gio")
|
||||
finder.include_module("pango")
|
||||
finder.include_module("pangocairo")
|
||||
|
||||
|
||||
def load_h5py(finder: ModuleFinder, module: Module) -> None:
|
||||
"""h5py module has a number of implicit imports."""
|
||||
finder.include_module("h5py.defs")
|
||||
finder.include_module("h5py.utils")
|
||||
finder.include_module("h5py._proxy")
|
||||
try:
|
||||
api_gen = __import__("h5py", fromlist=["api_gen"]).api_gen
|
||||
finder.include_module(api_gen.__name__)
|
||||
except (ImportError, AttributeError):
|
||||
pass
|
||||
finder.include_module("h5py._errors")
|
||||
finder.include_module("h5py.h5ac")
|
||||
|
||||
|
||||
def load_h5py_wrapper(finder: ModuleFinder, module: Module) -> None:
|
||||
"""h5py_wrapper module requires future and pytest-runner."""
|
||||
finder.include_module("future")
|
||||
finder.include_module("ptr")
|
||||
|
||||
|
||||
def load_hashlib(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The hashlib's fallback modules don't exist if the equivalent OpenSSL
|
||||
algorithms are loaded from _hashlib, so we can ignore the error.
|
||||
"""
|
||||
module.ignore_names.update(["_md5", "_sha", "_sha256", "_sha512"])
|
||||
|
||||
|
||||
def load_heapq(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The heapq module uses doctest for tests and shouldn't be imported."""
|
||||
module.exclude_names.add("doctest")
|
||||
|
||||
|
||||
def load_hdfdict(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The hdfdict module requires h5py_wrapper and PyYAML."""
|
||||
finder.include_module("h5py_wrapper")
|
||||
finder.include_package("yaml")
|
||||
|
||||
|
||||
def load_idna(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The idna module implicitly loads data; make sure this happens."""
|
||||
finder.include_module("idna.idnadata")
|
||||
|
||||
|
||||
def load_imagej(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pyimagej package requires its metadata."""
|
||||
module.update_distribution("pyimagej")
|
||||
|
||||
|
||||
def load_jpype(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The JPype1 package requires its binary."""
|
||||
source = module.file.parent.parent / "org.jpype.jar"
|
||||
if source.exists():
|
||||
finder.include_files(
|
||||
source, f"lib/{source.name}", copy_dependent_files=False
|
||||
)
|
||||
|
||||
|
||||
def load_lazy_loader(finder: ModuleFinder, module: Module) -> None:
|
||||
"""Use load_lazy_loader 0.2+ to work with .pyc files."""
|
||||
if module.distribution.version < (0, 2):
|
||||
msg = "Please upgrade 'lazy_loader>=0.2' to support cx_Freeze"
|
||||
raise SystemExit(msg)
|
||||
|
||||
|
||||
def load_librosa(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The librosa must be loaded as package."""
|
||||
finder.include_package("librosa")
|
||||
|
||||
|
||||
def load_llvmlite(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The llvmlite must be loaded as package."""
|
||||
finder.include_package("llvmlite")
|
||||
finder.exclude_module("llvmlite.tests")
|
||||
|
||||
|
||||
def load_lxml(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The lxml package uses an extension."""
|
||||
finder.include_module("lxml._elementpath")
|
||||
|
||||
|
||||
def load_markdown(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The markdown package implicitly loads html.parser; make sure this
|
||||
happens.
|
||||
"""
|
||||
finder.include_module("html.parser")
|
||||
|
||||
|
||||
def load_mimetypes(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The mimetypes module should filter import names."""
|
||||
if not IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.update(("_winapi", "winreg"))
|
||||
|
||||
|
||||
def load_ntpath(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The ntpath module should filter import names."""
|
||||
if not IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.update(("nt", "_winapi"))
|
||||
|
||||
|
||||
def load_Numeric(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The Numeric module optionally loads the dotblas module; ignore the error
|
||||
if this modules does not exist.
|
||||
"""
|
||||
module.ignore_names.add("dotblas")
|
||||
|
||||
|
||||
def load_orjson(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The orjson has dynamic imports."""
|
||||
finder.include_module("dataclasses")
|
||||
finder.include_module("datetime")
|
||||
finder.include_module("decimal")
|
||||
finder.include_module("enum")
|
||||
finder.include_package("json")
|
||||
finder.include_module("uuid")
|
||||
finder.include_package("zoneinfo")
|
||||
|
||||
|
||||
def load_os(finder: ModuleFinder, module: Module) -> None:
|
||||
"""Sets the alias for os.path."""
|
||||
if "posix" in sys.builtin_module_names:
|
||||
finder.add_alias("os.path", "posixpath")
|
||||
module.ignore_names.add("nt")
|
||||
else:
|
||||
finder.add_alias("os.path", "ntpath")
|
||||
module.ignore_names.add("posix")
|
||||
|
||||
|
||||
def load_pathlib(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pathlib module should filter import names."""
|
||||
if IS_WINDOWS:
|
||||
module.exclude_names.update(("grp", "pwd"))
|
||||
elif not IS_MINGW:
|
||||
module.exclude_names.add("nt")
|
||||
|
||||
|
||||
def load_pickle(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pickle module uses doctest for tests and shouldn't be imported."""
|
||||
module.exclude_names.add("doctest")
|
||||
if not sys.platform.startswith("java"):
|
||||
module.exclude_names.add("org.python.core")
|
||||
|
||||
|
||||
def load_pickletools(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pickletools module uses doctest that shouldn't be imported."""
|
||||
module.exclude_names.add("doctest")
|
||||
|
||||
|
||||
def load_pikepdf(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pikepdf must be loaded as a package."""
|
||||
finder.include_package("pikepdf")
|
||||
|
||||
|
||||
def load_platform(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The platform module should filter import names."""
|
||||
if not sys.platform.startswith("java"):
|
||||
module.exclude_names.add("java.lang")
|
||||
if not sys.platform.startswith("OpenVMS"):
|
||||
module.exclude_names.add("vms_lib")
|
||||
if not IS_MACOS:
|
||||
module.exclude_names.add("plistlib")
|
||||
if not IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.add("winreg")
|
||||
module.exclude_names.add("_winreg")
|
||||
|
||||
|
||||
def load_plotly(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The plotly must be loaded as a package."""
|
||||
finder.include_package("plotly")
|
||||
|
||||
|
||||
def load_posixpath(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The posixpath module should filter import names."""
|
||||
if IS_WINDOWS and not IS_MINGW:
|
||||
module.exclude_names.add("posix")
|
||||
module.exclude_names.add("pwd")
|
||||
|
||||
|
||||
def load_postgresql_lib(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The postgresql.lib module requires the libsys.sql file to be included
|
||||
so make sure that file is included.
|
||||
"""
|
||||
libsys = module.path[0] / "libsys.sql"
|
||||
if libsys.exists():
|
||||
finder.include_files(libsys, libsys.name)
|
||||
|
||||
|
||||
def load_pty(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The sgi module is not needed for this module to function."""
|
||||
module.ignore_names.add("sgi")
|
||||
|
||||
|
||||
def load_ptr(finder: ModuleFinder, module: Module) -> None:
|
||||
"""pytest-runner requires its metadata."""
|
||||
module.update_distribution("pytest-runner")
|
||||
|
||||
|
||||
def load_pycountry(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pycountry module has data in subdirectories."""
|
||||
finder.exclude_module("pycountry.tests")
|
||||
module.in_file_system = 1
|
||||
|
||||
|
||||
def load_pycparser(finder: ModuleFinder, module: Module) -> None:
|
||||
"""These files are missing which causes
|
||||
permission denied issues on windows when they are regenerated.
|
||||
"""
|
||||
finder.include_module("pycparser.lextab")
|
||||
finder.include_module("pycparser.yacctab")
|
||||
|
||||
|
||||
def load_pydantic(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pydantic package is compiled by Cython (the imports are hidden)."""
|
||||
finder.include_module("colorsys")
|
||||
finder.include_module("dataclasses") # support in v 1.7+
|
||||
finder.include_module("datetime")
|
||||
finder.include_module("decimal")
|
||||
finder.include_module("functools")
|
||||
finder.include_module("ipaddress")
|
||||
finder.include_package("json")
|
||||
finder.include_module("pathlib")
|
||||
finder.include_module("typing_extensions") # support in v 1.8
|
||||
finder.include_module("uuid")
|
||||
|
||||
|
||||
def load_pygments(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pygments package dynamically load styles."""
|
||||
finder.include_package("pygments.styles")
|
||||
finder.include_package("pygments.lexers")
|
||||
finder.include_package("pygments.formatters")
|
||||
|
||||
|
||||
def load_pyodbc(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pyodbc module implicitly imports others modules;
|
||||
make sure this happens.
|
||||
"""
|
||||
for mod in ("datetime", "decimal", "hashlib", "locale", "uuid"):
|
||||
finder.include_module(mod)
|
||||
|
||||
|
||||
def load_pyreadstat(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pyreadstat package must be loaded as a package."""
|
||||
finder.include_package("pyreadstat")
|
||||
finder.include_module("pandas")
|
||||
|
||||
|
||||
def load_pyqtgraph(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pyqtgraph package must be loaded as a package."""
|
||||
finder.include_package("pyqtgraph")
|
||||
|
||||
|
||||
def load_pytest(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pytest package implicitly imports others modules;
|
||||
make sure this happens.
|
||||
"""
|
||||
pytest = __import__("pytest")
|
||||
for mod in pytest.freeze_includes():
|
||||
finder.include_module(mod)
|
||||
|
||||
|
||||
def load_pythoncom(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pythoncom module is actually contained in a DLL but since those
|
||||
cannot be loaded directly in Python 2.5 and higher a special module is
|
||||
used to perform that task; simply use that technique directly to
|
||||
determine the name of the DLL and ensure it is included as a file in
|
||||
the target directory.
|
||||
"""
|
||||
pythoncom = __import__("pythoncom")
|
||||
filename = Path(pythoncom.__file__)
|
||||
finder.include_files(
|
||||
filename, Path("lib", filename.name), copy_dependent_files=False
|
||||
)
|
||||
|
||||
|
||||
def load_pywintypes(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The pywintypes module is actually contained in a DLL but since those
|
||||
cannot be loaded directly in Python 2.5 and higher a special module is
|
||||
used to perform that task; simply use that technique directly to
|
||||
determine the name of the DLL and ensure it is included as a file in the
|
||||
target directory.
|
||||
"""
|
||||
pywintypes = __import__("pywintypes")
|
||||
filename = Path(pywintypes.__file__)
|
||||
finder.include_files(
|
||||
filename, Path("lib", filename.name), copy_dependent_files=False
|
||||
)
|
||||
|
||||
|
||||
def load_reportlab(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The reportlab module loads a submodule rl_settings via exec so force
|
||||
its inclusion here.
|
||||
"""
|
||||
finder.include_module("reportlab.rl_settings")
|
||||
|
||||
|
||||
def load_sentry_sdk(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The Sentry.io SDK."""
|
||||
finder.include_module("sentry_sdk.integrations.stdlib")
|
||||
finder.include_module("sentry_sdk.integrations.excepthook")
|
||||
finder.include_module("sentry_sdk.integrations.dedupe")
|
||||
finder.include_module("sentry_sdk.integrations.atexit")
|
||||
finder.include_module("sentry_sdk.integrations.modules")
|
||||
finder.include_module("sentry_sdk.integrations.argv")
|
||||
finder.include_module("sentry_sdk.integrations.logging")
|
||||
finder.include_module("sentry_sdk.integrations.threading")
|
||||
|
||||
|
||||
def load_shapely(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The Shapely.libs directory is not copied."""
|
||||
libs_name = "Shapely.libs"
|
||||
source_dir = module.path[0].parent / libs_name
|
||||
if source_dir.exists():
|
||||
finder.include_files(source_dir, f"lib/{libs_name}")
|
||||
|
||||
|
||||
def load_shutil(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The shutil module should filter import names."""
|
||||
if IS_WINDOWS:
|
||||
module.exclude_names.update(("grp", "posix", "pwd"))
|
||||
elif not IS_MINGW:
|
||||
module.exclude_names.update(("nt", "_winapi"))
|
||||
|
||||
|
||||
def load_site(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The site module optionally loads the sitecustomize and usercustomize
|
||||
modules; ignore the error if these modules do not exist.
|
||||
"""
|
||||
module.ignore_names.update(["sitecustomize", "usercustomize"])
|
||||
|
||||
|
||||
def load_six(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The six module maps old modules."""
|
||||
module.ignore_names.add("StringIO")
|
||||
|
||||
|
||||
def load_sqlite3(finder: ModuleFinder, module: Module) -> None:
|
||||
"""In Windows, the sqlite3 module requires an additional dll sqlite3.dll to
|
||||
be present in the build directory.
|
||||
"""
|
||||
if IS_WINDOWS:
|
||||
dll_name = "sqlite3.dll"
|
||||
dll_path = Path(sys.base_prefix, "DLLs", dll_name)
|
||||
if not dll_path.exists():
|
||||
dll_path = Path(sys.base_prefix, "Library", "bin", dll_name)
|
||||
if dll_path.exists():
|
||||
finder.include_files(dll_path, Path("lib", dll_name))
|
||||
finder.include_module("sqlite3.dump")
|
||||
|
||||
|
||||
def load_subprocess(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The subprocess module should filter import names."""
|
||||
if IS_WINDOWS:
|
||||
exclude_names = ("_posixsubprocess", "fcntl", "grp", "pwd")
|
||||
elif not IS_MINGW:
|
||||
exclude_names = ("msvcrt", "_winapi")
|
||||
else:
|
||||
return
|
||||
module.exclude_names.update(exclude_names)
|
||||
|
||||
|
||||
def load_sysconfig(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The sysconfig module implicitly loads _sysconfigdata."""
|
||||
if IS_WINDOWS:
|
||||
return
|
||||
get_data_name = getattr(sysconfig, "_get_sysconfigdata_name", None)
|
||||
if get_data_name is None:
|
||||
return
|
||||
with suppress(ImportError):
|
||||
finder.include_module(get_data_name())
|
||||
|
||||
|
||||
def load_tarfile(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The tarfile module should filter import names."""
|
||||
if IS_WINDOWS:
|
||||
module.exclude_names.update(("grp", "pwd"))
|
||||
|
||||
|
||||
def load_time(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The time module implicitly loads _strptime; make sure this happens."""
|
||||
finder.include_module("_strptime")
|
||||
|
||||
|
||||
def load_tokenizers(finder: ModuleFinder, module: Module) -> None:
|
||||
"""On Linux the tokenizers.libs directory is not copied."""
|
||||
if module.path is None:
|
||||
return
|
||||
libs_name = "tokenizers.libs"
|
||||
source_dir = module.path[0].parent / libs_name
|
||||
if source_dir.exists():
|
||||
finder.include_files(source_dir, Path("lib", libs_name))
|
||||
|
||||
|
||||
def load_twisted_conch_ssh_transport(
|
||||
finder: ModuleFinder, module: Module
|
||||
) -> None:
|
||||
"""The twisted.conch.ssh.transport module uses __import__ builtin to
|
||||
dynamically load different ciphers at runtime.
|
||||
"""
|
||||
finder.include_package("Crypto.Cipher")
|
||||
|
||||
|
||||
def load_twitter(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The twitter module tries to load the simplejson, json and django.utils
|
||||
module in an attempt to locate any module that will implement the
|
||||
necessary protocol; ignore these modules if they cannot be found.
|
||||
"""
|
||||
module.ignore_names.update(["json", "simplejson", "django.utils"])
|
||||
|
||||
|
||||
def load_uvloop(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The uvloop module implicitly loads an extension module."""
|
||||
finder.include_module("uvloop._noop")
|
||||
|
||||
|
||||
def load_win32api(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The win32api module implicitly loads the pywintypes module; make sure
|
||||
this happens.
|
||||
"""
|
||||
finder.exclude_dependent_files(module.file)
|
||||
finder.include_module("pywintypes")
|
||||
|
||||
|
||||
def load_win32com(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The win32com package manipulates its search path at runtime to include
|
||||
the sibling directory called win32comext; simulate that by changing the
|
||||
search path in a similar fashion here.
|
||||
"""
|
||||
module.path.append(module.file.parent.parent / "win32comext")
|
||||
|
||||
|
||||
def load_win32file(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The win32file module implicitly loads the pywintypes and win32timezone
|
||||
module; make sure this happens.
|
||||
"""
|
||||
finder.include_module("pywintypes")
|
||||
finder.include_module("win32timezone")
|
||||
|
||||
|
||||
def load_wx_lib_pubsub_core(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The wx.lib.pubsub.core module modifies the search path which cannot
|
||||
be done in a frozen application in the same way; modify the module
|
||||
search path here instead so that the right modules are found; note
|
||||
that this only works if the import of wx.lib.pubsub.setupkwargs
|
||||
occurs first.
|
||||
"""
|
||||
module.path.insert(0, module.file.parent / "kwargs")
|
||||
|
||||
|
||||
def load_xml_etree_cElementTree(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The xml.etree.cElementTree module implicitly loads the
|
||||
xml.etree.ElementTree module; make sure this happens.
|
||||
"""
|
||||
finder.include_module("xml.etree.ElementTree")
|
||||
|
||||
|
||||
def load_yaml(finder: ModuleFinder, module: Module) -> None:
|
||||
"""PyYAML requires its metadata."""
|
||||
module.update_distribution("PyYAML")
|
||||
|
||||
|
||||
def load_zipimport(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The module shouldn't import internal names."""
|
||||
module.exclude_names.add("_frozen_importlib")
|
||||
module.exclude_names.add("_frozen_importlib_external")
|
||||
|
||||
|
||||
def load_zmq(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The zmq package loads zmq.backend.cython dynamically and links
|
||||
dynamically to zmq.libzmq or shared lib. Tested in pyzmq 16.0.4 (py36),
|
||||
19.0.2 (MSYS2 py39) up to 22.2.1 (from pip and from conda).
|
||||
"""
|
||||
finder.include_package("zmq.backend.cython")
|
||||
if IS_WINDOWS or IS_MINGW:
|
||||
# For pyzmq 22 the libzmq dependencies are located in
|
||||
# site-packages/pyzmq.libs
|
||||
libzmq_folder = "pyzmq.libs"
|
||||
libs_dir = module.path[0].parent / libzmq_folder
|
||||
if libs_dir.exists():
|
||||
finder.include_files(libs_dir, Path("lib", libzmq_folder))
|
||||
# Include the bundled libzmq library, if it exists
|
||||
with suppress(ImportError):
|
||||
finder.include_module("zmq.libzmq")
|
||||
finder.exclude_module("zmq.tests")
|
||||
|
||||
|
||||
def load_zope_component(finder: ModuleFinder, module: Module) -> None:
|
||||
"""The zope.component package requires the presence of the pkg_resources
|
||||
module but it uses a dynamic, not static import to do its work.
|
||||
"""
|
||||
finder.include_module("pkg_resources")
|
||||
|
||||
|
||||
def missing_backports_zoneinfo(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The backports.zoneinfo module should be a drop-in replacement for the
|
||||
Python 3.9 standard library module zoneinfo.
|
||||
"""
|
||||
if sys.version_info >= (3, 9):
|
||||
caller.ignore_names.add("backports.zoneinfo")
|
||||
|
||||
|
||||
def missing_gdk(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The gdk module is buried inside gtk so there is no need to concern
|
||||
ourselves with an error saying that it cannot be found.
|
||||
"""
|
||||
caller.ignore_names.add("gdk")
|
||||
|
||||
|
||||
def missing_ltihooks(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The ltihooks module is not necessairly present so ignore it when it
|
||||
cannot be found.
|
||||
"""
|
||||
caller.ignore_names.add("ltihooks")
|
||||
|
||||
|
||||
def missing_jnius(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The jnius module is present on java and android."""
|
||||
if not sys.platform.startswith("java"):
|
||||
caller.ignore_names.add("jnius")
|
||||
|
||||
|
||||
def missing__manylinux(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The _manylinux module is a flag."""
|
||||
caller.ignore_names.add("_manylinux")
|
||||
|
||||
|
||||
def missing_os_path(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The os.path is an alias to posixpath or ntpath."""
|
||||
caller.ignore_names.add("os.path")
|
||||
|
||||
|
||||
def missing_readline(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The readline module is not normally present on Windows but it also may
|
||||
be so instead of excluding it completely, ignore it if it can't be found.
|
||||
"""
|
||||
if IS_WINDOWS:
|
||||
caller.ignore_names.add("readline")
|
||||
|
||||
|
||||
def missing_six_moves(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The six module creates fake modules."""
|
||||
caller.ignore_names.add("six.moves")
|
||||
|
||||
|
||||
def missing_winreg(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The winreg module is present on Windows only."""
|
||||
if not IS_WINDOWS:
|
||||
caller.ignore_names.add("winreg")
|
||||
|
||||
|
||||
def missing_zoneinfo(finder: ModuleFinder, caller: Module) -> None:
|
||||
"""The zoneinfo module is present in Python 3.9+ standard library."""
|
||||
if sys.version_info < (3, 9):
|
||||
caller.ignore_names.add("zoneinfo")
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user