Spaces:
Build error
Build error
Upload 15 files
Browse files- __init__.py +15 -0
- _elffile.py +110 -0
- _manylinux.py +263 -0
- _musllinux.py +85 -0
- _parser.py +354 -0
- _structures.py +61 -0
- _tokenizer.py +194 -0
- markers.py +331 -0
- metadata.py +863 -0
- py.typed +0 -0
- requirements.py +91 -0
- specifiers.py +1020 -0
- tags.py +617 -0
- utils.py +163 -0
- version.py +582 -0
__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
__title__ = "packaging"
|
6 |
+
__summary__ = "Core utilities for Python packages"
|
7 |
+
__uri__ = "https://github.com/pypa/packaging"
|
8 |
+
|
9 |
+
__version__ = "24.2"
|
10 |
+
|
11 |
+
__author__ = "Donald Stufft and individual contributors"
|
12 |
+
__email__ = "[email protected]"
|
13 |
+
|
14 |
+
__license__ = "BSD-2-Clause or Apache-2.0"
|
15 |
+
__copyright__ = f"2014 {__author__}"
|
_elffile.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
ELF file parser.
|
3 |
+
|
4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
6 |
+
|
7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
9 |
+
"""
|
10 |
+
|
11 |
+
from __future__ import annotations
|
12 |
+
|
13 |
+
import enum
|
14 |
+
import os
|
15 |
+
import struct
|
16 |
+
from typing import IO
|
17 |
+
|
18 |
+
|
19 |
+
class ELFInvalid(ValueError):
|
20 |
+
pass
|
21 |
+
|
22 |
+
|
23 |
+
class EIClass(enum.IntEnum):
|
24 |
+
C32 = 1
|
25 |
+
C64 = 2
|
26 |
+
|
27 |
+
|
28 |
+
class EIData(enum.IntEnum):
|
29 |
+
Lsb = 1
|
30 |
+
Msb = 2
|
31 |
+
|
32 |
+
|
33 |
+
class EMachine(enum.IntEnum):
|
34 |
+
I386 = 3
|
35 |
+
S390 = 22
|
36 |
+
Arm = 40
|
37 |
+
X8664 = 62
|
38 |
+
AArc64 = 183
|
39 |
+
|
40 |
+
|
41 |
+
class ELFFile:
|
42 |
+
"""
|
43 |
+
Representation of an ELF executable.
|
44 |
+
"""
|
45 |
+
|
46 |
+
def __init__(self, f: IO[bytes]) -> None:
|
47 |
+
self._f = f
|
48 |
+
|
49 |
+
try:
|
50 |
+
ident = self._read("16B")
|
51 |
+
except struct.error as e:
|
52 |
+
raise ELFInvalid("unable to parse identification") from e
|
53 |
+
magic = bytes(ident[:4])
|
54 |
+
if magic != b"\x7fELF":
|
55 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
56 |
+
|
57 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
58 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
59 |
+
|
60 |
+
try:
|
61 |
+
# e_fmt: Format for program header.
|
62 |
+
# p_fmt: Format for section header.
|
63 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
64 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
65 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
66 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
67 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
68 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
69 |
+
}[(self.capacity, self.encoding)]
|
70 |
+
except KeyError as e:
|
71 |
+
raise ELFInvalid(
|
72 |
+
f"unrecognized capacity ({self.capacity}) or "
|
73 |
+
f"encoding ({self.encoding})"
|
74 |
+
) from e
|
75 |
+
|
76 |
+
try:
|
77 |
+
(
|
78 |
+
_,
|
79 |
+
self.machine, # Architecture type.
|
80 |
+
_,
|
81 |
+
_,
|
82 |
+
self._e_phoff, # Offset of program header.
|
83 |
+
_,
|
84 |
+
self.flags, # Processor-specific flags.
|
85 |
+
_,
|
86 |
+
self._e_phentsize, # Size of section.
|
87 |
+
self._e_phnum, # Number of sections.
|
88 |
+
) = self._read(e_fmt)
|
89 |
+
except struct.error as e:
|
90 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
91 |
+
|
92 |
+
def _read(self, fmt: str) -> tuple[int, ...]:
|
93 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
94 |
+
|
95 |
+
@property
|
96 |
+
def interpreter(self) -> str | None:
|
97 |
+
"""
|
98 |
+
The path recorded in the ``PT_INTERP`` section header.
|
99 |
+
"""
|
100 |
+
for index in range(self._e_phnum):
|
101 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
102 |
+
try:
|
103 |
+
data = self._read(self._p_fmt)
|
104 |
+
except struct.error:
|
105 |
+
continue
|
106 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
107 |
+
continue
|
108 |
+
self._f.seek(data[self._p_idx[1]])
|
109 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
110 |
+
return None
|
_manylinux.py
ADDED
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import collections
|
4 |
+
import contextlib
|
5 |
+
import functools
|
6 |
+
import os
|
7 |
+
import re
|
8 |
+
import sys
|
9 |
+
import warnings
|
10 |
+
from typing import Generator, Iterator, NamedTuple, Sequence
|
11 |
+
|
12 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
13 |
+
|
14 |
+
EF_ARM_ABIMASK = 0xFF000000
|
15 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
16 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
17 |
+
|
18 |
+
|
19 |
+
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
20 |
+
# as the type for `path` until then.
|
21 |
+
@contextlib.contextmanager
|
22 |
+
def _parse_elf(path: str) -> Generator[ELFFile | None, None, None]:
|
23 |
+
try:
|
24 |
+
with open(path, "rb") as f:
|
25 |
+
yield ELFFile(f)
|
26 |
+
except (OSError, TypeError, ValueError):
|
27 |
+
yield None
|
28 |
+
|
29 |
+
|
30 |
+
def _is_linux_armhf(executable: str) -> bool:
|
31 |
+
# hard-float ABI can be detected from the ELF header of the running
|
32 |
+
# process
|
33 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
34 |
+
with _parse_elf(executable) as f:
|
35 |
+
return (
|
36 |
+
f is not None
|
37 |
+
and f.capacity == EIClass.C32
|
38 |
+
and f.encoding == EIData.Lsb
|
39 |
+
and f.machine == EMachine.Arm
|
40 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
41 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
42 |
+
)
|
43 |
+
|
44 |
+
|
45 |
+
def _is_linux_i686(executable: str) -> bool:
|
46 |
+
with _parse_elf(executable) as f:
|
47 |
+
return (
|
48 |
+
f is not None
|
49 |
+
and f.capacity == EIClass.C32
|
50 |
+
and f.encoding == EIData.Lsb
|
51 |
+
and f.machine == EMachine.I386
|
52 |
+
)
|
53 |
+
|
54 |
+
|
55 |
+
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
56 |
+
if "armv7l" in archs:
|
57 |
+
return _is_linux_armhf(executable)
|
58 |
+
if "i686" in archs:
|
59 |
+
return _is_linux_i686(executable)
|
60 |
+
allowed_archs = {
|
61 |
+
"x86_64",
|
62 |
+
"aarch64",
|
63 |
+
"ppc64",
|
64 |
+
"ppc64le",
|
65 |
+
"s390x",
|
66 |
+
"loongarch64",
|
67 |
+
"riscv64",
|
68 |
+
}
|
69 |
+
return any(arch in allowed_archs for arch in archs)
|
70 |
+
|
71 |
+
|
72 |
+
# If glibc ever changes its major version, we need to know what the last
|
73 |
+
# minor version was, so we can build the complete list of all versions.
|
74 |
+
# For now, guess what the highest minor version might be, assume it will
|
75 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
76 |
+
# with the actual value.
|
77 |
+
_LAST_GLIBC_MINOR: dict[int, int] = collections.defaultdict(lambda: 50)
|
78 |
+
|
79 |
+
|
80 |
+
class _GLibCVersion(NamedTuple):
|
81 |
+
major: int
|
82 |
+
minor: int
|
83 |
+
|
84 |
+
|
85 |
+
def _glibc_version_string_confstr() -> str | None:
|
86 |
+
"""
|
87 |
+
Primary implementation of glibc_version_string using os.confstr.
|
88 |
+
"""
|
89 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
90 |
+
# to be broken or missing. This strategy is used in the standard library
|
91 |
+
# platform module.
|
92 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
93 |
+
try:
|
94 |
+
# Should be a string like "glibc 2.17".
|
95 |
+
version_string: str | None = os.confstr("CS_GNU_LIBC_VERSION")
|
96 |
+
assert version_string is not None
|
97 |
+
_, version = version_string.rsplit()
|
98 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
99 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
100 |
+
return None
|
101 |
+
return version
|
102 |
+
|
103 |
+
|
104 |
+
def _glibc_version_string_ctypes() -> str | None:
|
105 |
+
"""
|
106 |
+
Fallback implementation of glibc_version_string using ctypes.
|
107 |
+
"""
|
108 |
+
try:
|
109 |
+
import ctypes
|
110 |
+
except ImportError:
|
111 |
+
return None
|
112 |
+
|
113 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
114 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
115 |
+
# main program". This way we can let the linker do the work to figure out
|
116 |
+
# which libc our process is actually using.
|
117 |
+
#
|
118 |
+
# We must also handle the special case where the executable is not a
|
119 |
+
# dynamically linked executable. This can occur when using musl libc,
|
120 |
+
# for example. In this situation, dlopen() will error, leading to an
|
121 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
122 |
+
# errno set on the OSError. The single string argument used to construct
|
123 |
+
# OSError comes from libc itself and is therefore not portable to
|
124 |
+
# hard code here. In any case, failure to call dlopen() means we
|
125 |
+
# can proceed, so we bail on our attempt.
|
126 |
+
try:
|
127 |
+
process_namespace = ctypes.CDLL(None)
|
128 |
+
except OSError:
|
129 |
+
return None
|
130 |
+
|
131 |
+
try:
|
132 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
133 |
+
except AttributeError:
|
134 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
135 |
+
# glibc.
|
136 |
+
return None
|
137 |
+
|
138 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
139 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
140 |
+
version_str: str = gnu_get_libc_version()
|
141 |
+
# py2 / py3 compatibility:
|
142 |
+
if not isinstance(version_str, str):
|
143 |
+
version_str = version_str.decode("ascii")
|
144 |
+
|
145 |
+
return version_str
|
146 |
+
|
147 |
+
|
148 |
+
def _glibc_version_string() -> str | None:
|
149 |
+
"""Returns glibc version string, or None if not using glibc."""
|
150 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
151 |
+
|
152 |
+
|
153 |
+
def _parse_glibc_version(version_str: str) -> tuple[int, int]:
|
154 |
+
"""Parse glibc version.
|
155 |
+
|
156 |
+
We use a regexp instead of str.split because we want to discard any
|
157 |
+
random junk that might come after the minor version -- this might happen
|
158 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
159 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
160 |
+
"""
|
161 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
162 |
+
if not m:
|
163 |
+
warnings.warn(
|
164 |
+
f"Expected glibc version with 2 components major.minor,"
|
165 |
+
f" got: {version_str}",
|
166 |
+
RuntimeWarning,
|
167 |
+
stacklevel=2,
|
168 |
+
)
|
169 |
+
return -1, -1
|
170 |
+
return int(m.group("major")), int(m.group("minor"))
|
171 |
+
|
172 |
+
|
173 |
+
@functools.lru_cache
|
174 |
+
def _get_glibc_version() -> tuple[int, int]:
|
175 |
+
version_str = _glibc_version_string()
|
176 |
+
if version_str is None:
|
177 |
+
return (-1, -1)
|
178 |
+
return _parse_glibc_version(version_str)
|
179 |
+
|
180 |
+
|
181 |
+
# From PEP 513, PEP 600
|
182 |
+
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
183 |
+
sys_glibc = _get_glibc_version()
|
184 |
+
if sys_glibc < version:
|
185 |
+
return False
|
186 |
+
# Check for presence of _manylinux module.
|
187 |
+
try:
|
188 |
+
import _manylinux
|
189 |
+
except ImportError:
|
190 |
+
return True
|
191 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
192 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
193 |
+
if result is not None:
|
194 |
+
return bool(result)
|
195 |
+
return True
|
196 |
+
if version == _GLibCVersion(2, 5):
|
197 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
198 |
+
return bool(_manylinux.manylinux1_compatible)
|
199 |
+
if version == _GLibCVersion(2, 12):
|
200 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
201 |
+
return bool(_manylinux.manylinux2010_compatible)
|
202 |
+
if version == _GLibCVersion(2, 17):
|
203 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
204 |
+
return bool(_manylinux.manylinux2014_compatible)
|
205 |
+
return True
|
206 |
+
|
207 |
+
|
208 |
+
_LEGACY_MANYLINUX_MAP = {
|
209 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
210 |
+
(2, 17): "manylinux2014",
|
211 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
212 |
+
(2, 12): "manylinux2010",
|
213 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
214 |
+
(2, 5): "manylinux1",
|
215 |
+
}
|
216 |
+
|
217 |
+
|
218 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
219 |
+
"""Generate manylinux tags compatible to the current platform.
|
220 |
+
|
221 |
+
:param archs: Sequence of compatible architectures.
|
222 |
+
The first one shall be the closest to the actual architecture and be the part of
|
223 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
224 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
225 |
+
be manylinux-compatible.
|
226 |
+
|
227 |
+
:returns: An iterator of compatible manylinux tags.
|
228 |
+
"""
|
229 |
+
if not _have_compatible_abi(sys.executable, archs):
|
230 |
+
return
|
231 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
232 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
233 |
+
if set(archs) & {"x86_64", "i686"}:
|
234 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
235 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
236 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
237 |
+
glibc_max_list = [current_glibc]
|
238 |
+
# We can assume compatibility across glibc major versions.
|
239 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
240 |
+
#
|
241 |
+
# Build a list of maximum glibc versions so that we can
|
242 |
+
# output the canonical list of all glibc from current_glibc
|
243 |
+
# down to too_old_glibc2, including all intermediary versions.
|
244 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
245 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
246 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
247 |
+
for arch in archs:
|
248 |
+
for glibc_max in glibc_max_list:
|
249 |
+
if glibc_max.major == too_old_glibc2.major:
|
250 |
+
min_minor = too_old_glibc2.minor
|
251 |
+
else:
|
252 |
+
# For other glibc major versions oldest supported is (x, 0).
|
253 |
+
min_minor = -1
|
254 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
255 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
256 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
257 |
+
if _is_compatible(arch, glibc_version):
|
258 |
+
yield f"{tag}_{arch}"
|
259 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
260 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
261 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
262 |
+
if _is_compatible(arch, glibc_version):
|
263 |
+
yield f"{legacy_tag}_{arch}"
|
_musllinux.py
ADDED
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""PEP 656 support.
|
2 |
+
|
3 |
+
This module implements logic to detect if the currently running Python is
|
4 |
+
linked against musl, and what musl version is used.
|
5 |
+
"""
|
6 |
+
|
7 |
+
from __future__ import annotations
|
8 |
+
|
9 |
+
import functools
|
10 |
+
import re
|
11 |
+
import subprocess
|
12 |
+
import sys
|
13 |
+
from typing import Iterator, NamedTuple, Sequence
|
14 |
+
|
15 |
+
from ._elffile import ELFFile
|
16 |
+
|
17 |
+
|
18 |
+
class _MuslVersion(NamedTuple):
|
19 |
+
major: int
|
20 |
+
minor: int
|
21 |
+
|
22 |
+
|
23 |
+
def _parse_musl_version(output: str) -> _MuslVersion | None:
|
24 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
25 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
26 |
+
return None
|
27 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
28 |
+
if not m:
|
29 |
+
return None
|
30 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
31 |
+
|
32 |
+
|
33 |
+
@functools.lru_cache
|
34 |
+
def _get_musl_version(executable: str) -> _MuslVersion | None:
|
35 |
+
"""Detect currently-running musl runtime version.
|
36 |
+
|
37 |
+
This is done by checking the specified executable's dynamic linking
|
38 |
+
information, and invoking the loader to parse its output for a version
|
39 |
+
string. If the loader is musl, the output would be something like::
|
40 |
+
|
41 |
+
musl libc (x86_64)
|
42 |
+
Version 1.2.2
|
43 |
+
Dynamic Program Loader
|
44 |
+
"""
|
45 |
+
try:
|
46 |
+
with open(executable, "rb") as f:
|
47 |
+
ld = ELFFile(f).interpreter
|
48 |
+
except (OSError, TypeError, ValueError):
|
49 |
+
return None
|
50 |
+
if ld is None or "musl" not in ld:
|
51 |
+
return None
|
52 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
53 |
+
return _parse_musl_version(proc.stderr)
|
54 |
+
|
55 |
+
|
56 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
57 |
+
"""Generate musllinux tags compatible to the current platform.
|
58 |
+
|
59 |
+
:param archs: Sequence of compatible architectures.
|
60 |
+
The first one shall be the closest to the actual architecture and be the part of
|
61 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
62 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
63 |
+
be musllinux-compatible.
|
64 |
+
|
65 |
+
:returns: An iterator of compatible musllinux tags.
|
66 |
+
"""
|
67 |
+
sys_musl = _get_musl_version(sys.executable)
|
68 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
69 |
+
return
|
70 |
+
for arch in archs:
|
71 |
+
for minor in range(sys_musl.minor, -1, -1):
|
72 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
73 |
+
|
74 |
+
|
75 |
+
if __name__ == "__main__": # pragma: no cover
|
76 |
+
import sysconfig
|
77 |
+
|
78 |
+
plat = sysconfig.get_platform()
|
79 |
+
assert plat.startswith("linux-"), "not linux"
|
80 |
+
|
81 |
+
print("plat:", plat)
|
82 |
+
print("musl:", _get_musl_version(sys.executable))
|
83 |
+
print("tags:", end=" ")
|
84 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
85 |
+
print(t, end="\n ")
|
_parser.py
ADDED
@@ -0,0 +1,354 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Handwritten parser of dependency specifiers.
|
2 |
+
|
3 |
+
The docstring for each __parse_* function contains EBNF-inspired grammar representing
|
4 |
+
the implementation.
|
5 |
+
"""
|
6 |
+
|
7 |
+
from __future__ import annotations
|
8 |
+
|
9 |
+
import ast
|
10 |
+
from typing import NamedTuple, Sequence, Tuple, Union
|
11 |
+
|
12 |
+
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
13 |
+
|
14 |
+
|
15 |
+
class Node:
|
16 |
+
def __init__(self, value: str) -> None:
|
17 |
+
self.value = value
|
18 |
+
|
19 |
+
def __str__(self) -> str:
|
20 |
+
return self.value
|
21 |
+
|
22 |
+
def __repr__(self) -> str:
|
23 |
+
return f"<{self.__class__.__name__}('{self}')>"
|
24 |
+
|
25 |
+
def serialize(self) -> str:
|
26 |
+
raise NotImplementedError
|
27 |
+
|
28 |
+
|
29 |
+
class Variable(Node):
|
30 |
+
def serialize(self) -> str:
|
31 |
+
return str(self)
|
32 |
+
|
33 |
+
|
34 |
+
class Value(Node):
|
35 |
+
def serialize(self) -> str:
|
36 |
+
return f'"{self}"'
|
37 |
+
|
38 |
+
|
39 |
+
class Op(Node):
|
40 |
+
def serialize(self) -> str:
|
41 |
+
return str(self)
|
42 |
+
|
43 |
+
|
44 |
+
MarkerVar = Union[Variable, Value]
|
45 |
+
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
46 |
+
MarkerAtom = Union[MarkerItem, Sequence["MarkerAtom"]]
|
47 |
+
MarkerList = Sequence[Union["MarkerList", MarkerAtom, str]]
|
48 |
+
|
49 |
+
|
50 |
+
class ParsedRequirement(NamedTuple):
|
51 |
+
name: str
|
52 |
+
url: str
|
53 |
+
extras: list[str]
|
54 |
+
specifier: str
|
55 |
+
marker: MarkerList | None
|
56 |
+
|
57 |
+
|
58 |
+
# --------------------------------------------------------------------------------------
|
59 |
+
# Recursive descent parser for dependency specifier
|
60 |
+
# --------------------------------------------------------------------------------------
|
61 |
+
def parse_requirement(source: str) -> ParsedRequirement:
|
62 |
+
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
63 |
+
|
64 |
+
|
65 |
+
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
66 |
+
"""
|
67 |
+
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
68 |
+
"""
|
69 |
+
tokenizer.consume("WS")
|
70 |
+
|
71 |
+
name_token = tokenizer.expect(
|
72 |
+
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
73 |
+
)
|
74 |
+
name = name_token.text
|
75 |
+
tokenizer.consume("WS")
|
76 |
+
|
77 |
+
extras = _parse_extras(tokenizer)
|
78 |
+
tokenizer.consume("WS")
|
79 |
+
|
80 |
+
url, specifier, marker = _parse_requirement_details(tokenizer)
|
81 |
+
tokenizer.expect("END", expected="end of dependency specifier")
|
82 |
+
|
83 |
+
return ParsedRequirement(name, url, extras, specifier, marker)
|
84 |
+
|
85 |
+
|
86 |
+
def _parse_requirement_details(
|
87 |
+
tokenizer: Tokenizer,
|
88 |
+
) -> tuple[str, str, MarkerList | None]:
|
89 |
+
"""
|
90 |
+
requirement_details = AT URL (WS requirement_marker?)?
|
91 |
+
| specifier WS? (requirement_marker)?
|
92 |
+
"""
|
93 |
+
|
94 |
+
specifier = ""
|
95 |
+
url = ""
|
96 |
+
marker = None
|
97 |
+
|
98 |
+
if tokenizer.check("AT"):
|
99 |
+
tokenizer.read()
|
100 |
+
tokenizer.consume("WS")
|
101 |
+
|
102 |
+
url_start = tokenizer.position
|
103 |
+
url = tokenizer.expect("URL", expected="URL after @").text
|
104 |
+
if tokenizer.check("END", peek=True):
|
105 |
+
return (url, specifier, marker)
|
106 |
+
|
107 |
+
tokenizer.expect("WS", expected="whitespace after URL")
|
108 |
+
|
109 |
+
# The input might end after whitespace.
|
110 |
+
if tokenizer.check("END", peek=True):
|
111 |
+
return (url, specifier, marker)
|
112 |
+
|
113 |
+
marker = _parse_requirement_marker(
|
114 |
+
tokenizer, span_start=url_start, after="URL and whitespace"
|
115 |
+
)
|
116 |
+
else:
|
117 |
+
specifier_start = tokenizer.position
|
118 |
+
specifier = _parse_specifier(tokenizer)
|
119 |
+
tokenizer.consume("WS")
|
120 |
+
|
121 |
+
if tokenizer.check("END", peek=True):
|
122 |
+
return (url, specifier, marker)
|
123 |
+
|
124 |
+
marker = _parse_requirement_marker(
|
125 |
+
tokenizer,
|
126 |
+
span_start=specifier_start,
|
127 |
+
after=(
|
128 |
+
"version specifier"
|
129 |
+
if specifier
|
130 |
+
else "name and no valid version specifier"
|
131 |
+
),
|
132 |
+
)
|
133 |
+
|
134 |
+
return (url, specifier, marker)
|
135 |
+
|
136 |
+
|
137 |
+
def _parse_requirement_marker(
|
138 |
+
tokenizer: Tokenizer, *, span_start: int, after: str
|
139 |
+
) -> MarkerList:
|
140 |
+
"""
|
141 |
+
requirement_marker = SEMICOLON marker WS?
|
142 |
+
"""
|
143 |
+
|
144 |
+
if not tokenizer.check("SEMICOLON"):
|
145 |
+
tokenizer.raise_syntax_error(
|
146 |
+
f"Expected end or semicolon (after {after})",
|
147 |
+
span_start=span_start,
|
148 |
+
)
|
149 |
+
tokenizer.read()
|
150 |
+
|
151 |
+
marker = _parse_marker(tokenizer)
|
152 |
+
tokenizer.consume("WS")
|
153 |
+
|
154 |
+
return marker
|
155 |
+
|
156 |
+
|
157 |
+
def _parse_extras(tokenizer: Tokenizer) -> list[str]:
|
158 |
+
"""
|
159 |
+
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
160 |
+
"""
|
161 |
+
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
162 |
+
return []
|
163 |
+
|
164 |
+
with tokenizer.enclosing_tokens(
|
165 |
+
"LEFT_BRACKET",
|
166 |
+
"RIGHT_BRACKET",
|
167 |
+
around="extras",
|
168 |
+
):
|
169 |
+
tokenizer.consume("WS")
|
170 |
+
extras = _parse_extras_list(tokenizer)
|
171 |
+
tokenizer.consume("WS")
|
172 |
+
|
173 |
+
return extras
|
174 |
+
|
175 |
+
|
176 |
+
def _parse_extras_list(tokenizer: Tokenizer) -> list[str]:
|
177 |
+
"""
|
178 |
+
extras_list = identifier (wsp* ',' wsp* identifier)*
|
179 |
+
"""
|
180 |
+
extras: list[str] = []
|
181 |
+
|
182 |
+
if not tokenizer.check("IDENTIFIER"):
|
183 |
+
return extras
|
184 |
+
|
185 |
+
extras.append(tokenizer.read().text)
|
186 |
+
|
187 |
+
while True:
|
188 |
+
tokenizer.consume("WS")
|
189 |
+
if tokenizer.check("IDENTIFIER", peek=True):
|
190 |
+
tokenizer.raise_syntax_error("Expected comma between extra names")
|
191 |
+
elif not tokenizer.check("COMMA"):
|
192 |
+
break
|
193 |
+
|
194 |
+
tokenizer.read()
|
195 |
+
tokenizer.consume("WS")
|
196 |
+
|
197 |
+
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
198 |
+
extras.append(extra_token.text)
|
199 |
+
|
200 |
+
return extras
|
201 |
+
|
202 |
+
|
203 |
+
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
204 |
+
"""
|
205 |
+
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
206 |
+
| WS? version_many WS?
|
207 |
+
"""
|
208 |
+
with tokenizer.enclosing_tokens(
|
209 |
+
"LEFT_PARENTHESIS",
|
210 |
+
"RIGHT_PARENTHESIS",
|
211 |
+
around="version specifier",
|
212 |
+
):
|
213 |
+
tokenizer.consume("WS")
|
214 |
+
parsed_specifiers = _parse_version_many(tokenizer)
|
215 |
+
tokenizer.consume("WS")
|
216 |
+
|
217 |
+
return parsed_specifiers
|
218 |
+
|
219 |
+
|
220 |
+
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
221 |
+
"""
|
222 |
+
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
223 |
+
"""
|
224 |
+
parsed_specifiers = ""
|
225 |
+
while tokenizer.check("SPECIFIER"):
|
226 |
+
span_start = tokenizer.position
|
227 |
+
parsed_specifiers += tokenizer.read().text
|
228 |
+
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
229 |
+
tokenizer.raise_syntax_error(
|
230 |
+
".* suffix can only be used with `==` or `!=` operators",
|
231 |
+
span_start=span_start,
|
232 |
+
span_end=tokenizer.position + 1,
|
233 |
+
)
|
234 |
+
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
235 |
+
tokenizer.raise_syntax_error(
|
236 |
+
"Local version label can only be used with `==` or `!=` operators",
|
237 |
+
span_start=span_start,
|
238 |
+
span_end=tokenizer.position,
|
239 |
+
)
|
240 |
+
tokenizer.consume("WS")
|
241 |
+
if not tokenizer.check("COMMA"):
|
242 |
+
break
|
243 |
+
parsed_specifiers += tokenizer.read().text
|
244 |
+
tokenizer.consume("WS")
|
245 |
+
|
246 |
+
return parsed_specifiers
|
247 |
+
|
248 |
+
|
249 |
+
# --------------------------------------------------------------------------------------
|
250 |
+
# Recursive descent parser for marker expression
|
251 |
+
# --------------------------------------------------------------------------------------
|
252 |
+
def parse_marker(source: str) -> MarkerList:
|
253 |
+
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
254 |
+
|
255 |
+
|
256 |
+
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
257 |
+
retval = _parse_marker(tokenizer)
|
258 |
+
tokenizer.expect("END", expected="end of marker expression")
|
259 |
+
return retval
|
260 |
+
|
261 |
+
|
262 |
+
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
263 |
+
"""
|
264 |
+
marker = marker_atom (BOOLOP marker_atom)+
|
265 |
+
"""
|
266 |
+
expression = [_parse_marker_atom(tokenizer)]
|
267 |
+
while tokenizer.check("BOOLOP"):
|
268 |
+
token = tokenizer.read()
|
269 |
+
expr_right = _parse_marker_atom(tokenizer)
|
270 |
+
expression.extend((token.text, expr_right))
|
271 |
+
return expression
|
272 |
+
|
273 |
+
|
274 |
+
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
275 |
+
"""
|
276 |
+
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
277 |
+
| WS? marker_item WS?
|
278 |
+
"""
|
279 |
+
|
280 |
+
tokenizer.consume("WS")
|
281 |
+
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
282 |
+
with tokenizer.enclosing_tokens(
|
283 |
+
"LEFT_PARENTHESIS",
|
284 |
+
"RIGHT_PARENTHESIS",
|
285 |
+
around="marker expression",
|
286 |
+
):
|
287 |
+
tokenizer.consume("WS")
|
288 |
+
marker: MarkerAtom = _parse_marker(tokenizer)
|
289 |
+
tokenizer.consume("WS")
|
290 |
+
else:
|
291 |
+
marker = _parse_marker_item(tokenizer)
|
292 |
+
tokenizer.consume("WS")
|
293 |
+
return marker
|
294 |
+
|
295 |
+
|
296 |
+
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
297 |
+
"""
|
298 |
+
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
299 |
+
"""
|
300 |
+
tokenizer.consume("WS")
|
301 |
+
marker_var_left = _parse_marker_var(tokenizer)
|
302 |
+
tokenizer.consume("WS")
|
303 |
+
marker_op = _parse_marker_op(tokenizer)
|
304 |
+
tokenizer.consume("WS")
|
305 |
+
marker_var_right = _parse_marker_var(tokenizer)
|
306 |
+
tokenizer.consume("WS")
|
307 |
+
return (marker_var_left, marker_op, marker_var_right)
|
308 |
+
|
309 |
+
|
310 |
+
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
311 |
+
"""
|
312 |
+
marker_var = VARIABLE | QUOTED_STRING
|
313 |
+
"""
|
314 |
+
if tokenizer.check("VARIABLE"):
|
315 |
+
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
316 |
+
elif tokenizer.check("QUOTED_STRING"):
|
317 |
+
return process_python_str(tokenizer.read().text)
|
318 |
+
else:
|
319 |
+
tokenizer.raise_syntax_error(
|
320 |
+
message="Expected a marker variable or quoted string"
|
321 |
+
)
|
322 |
+
|
323 |
+
|
324 |
+
def process_env_var(env_var: str) -> Variable:
|
325 |
+
if env_var in ("platform_python_implementation", "python_implementation"):
|
326 |
+
return Variable("platform_python_implementation")
|
327 |
+
else:
|
328 |
+
return Variable(env_var)
|
329 |
+
|
330 |
+
|
331 |
+
def process_python_str(python_str: str) -> Value:
|
332 |
+
value = ast.literal_eval(python_str)
|
333 |
+
return Value(str(value))
|
334 |
+
|
335 |
+
|
336 |
+
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
337 |
+
"""
|
338 |
+
marker_op = IN | NOT IN | OP
|
339 |
+
"""
|
340 |
+
if tokenizer.check("IN"):
|
341 |
+
tokenizer.read()
|
342 |
+
return Op("in")
|
343 |
+
elif tokenizer.check("NOT"):
|
344 |
+
tokenizer.read()
|
345 |
+
tokenizer.expect("WS", expected="whitespace after 'not'")
|
346 |
+
tokenizer.expect("IN", expected="'in' after 'not'")
|
347 |
+
return Op("not in")
|
348 |
+
elif tokenizer.check("OP"):
|
349 |
+
return Op(tokenizer.read().text)
|
350 |
+
else:
|
351 |
+
return tokenizer.raise_syntax_error(
|
352 |
+
"Expected marker operator, one of "
|
353 |
+
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
354 |
+
)
|
_structures.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
|
6 |
+
class InfinityType:
|
7 |
+
def __repr__(self) -> str:
|
8 |
+
return "Infinity"
|
9 |
+
|
10 |
+
def __hash__(self) -> int:
|
11 |
+
return hash(repr(self))
|
12 |
+
|
13 |
+
def __lt__(self, other: object) -> bool:
|
14 |
+
return False
|
15 |
+
|
16 |
+
def __le__(self, other: object) -> bool:
|
17 |
+
return False
|
18 |
+
|
19 |
+
def __eq__(self, other: object) -> bool:
|
20 |
+
return isinstance(other, self.__class__)
|
21 |
+
|
22 |
+
def __gt__(self, other: object) -> bool:
|
23 |
+
return True
|
24 |
+
|
25 |
+
def __ge__(self, other: object) -> bool:
|
26 |
+
return True
|
27 |
+
|
28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
29 |
+
return NegativeInfinity
|
30 |
+
|
31 |
+
|
32 |
+
Infinity = InfinityType()
|
33 |
+
|
34 |
+
|
35 |
+
class NegativeInfinityType:
|
36 |
+
def __repr__(self) -> str:
|
37 |
+
return "-Infinity"
|
38 |
+
|
39 |
+
def __hash__(self) -> int:
|
40 |
+
return hash(repr(self))
|
41 |
+
|
42 |
+
def __lt__(self, other: object) -> bool:
|
43 |
+
return True
|
44 |
+
|
45 |
+
def __le__(self, other: object) -> bool:
|
46 |
+
return True
|
47 |
+
|
48 |
+
def __eq__(self, other: object) -> bool:
|
49 |
+
return isinstance(other, self.__class__)
|
50 |
+
|
51 |
+
def __gt__(self, other: object) -> bool:
|
52 |
+
return False
|
53 |
+
|
54 |
+
def __ge__(self, other: object) -> bool:
|
55 |
+
return False
|
56 |
+
|
57 |
+
def __neg__(self: object) -> InfinityType:
|
58 |
+
return Infinity
|
59 |
+
|
60 |
+
|
61 |
+
NegativeInfinity = NegativeInfinityType()
|
_tokenizer.py
ADDED
@@ -0,0 +1,194 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import contextlib
|
4 |
+
import re
|
5 |
+
from dataclasses import dataclass
|
6 |
+
from typing import Iterator, NoReturn
|
7 |
+
|
8 |
+
from .specifiers import Specifier
|
9 |
+
|
10 |
+
|
11 |
+
@dataclass
|
12 |
+
class Token:
|
13 |
+
name: str
|
14 |
+
text: str
|
15 |
+
position: int
|
16 |
+
|
17 |
+
|
18 |
+
class ParserSyntaxError(Exception):
|
19 |
+
"""The provided source text could not be parsed correctly."""
|
20 |
+
|
21 |
+
def __init__(
|
22 |
+
self,
|
23 |
+
message: str,
|
24 |
+
*,
|
25 |
+
source: str,
|
26 |
+
span: tuple[int, int],
|
27 |
+
) -> None:
|
28 |
+
self.span = span
|
29 |
+
self.message = message
|
30 |
+
self.source = source
|
31 |
+
|
32 |
+
super().__init__()
|
33 |
+
|
34 |
+
def __str__(self) -> str:
|
35 |
+
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
36 |
+
return "\n ".join([self.message, self.source, marker])
|
37 |
+
|
38 |
+
|
39 |
+
DEFAULT_RULES: dict[str, str | re.Pattern[str]] = {
|
40 |
+
"LEFT_PARENTHESIS": r"\(",
|
41 |
+
"RIGHT_PARENTHESIS": r"\)",
|
42 |
+
"LEFT_BRACKET": r"\[",
|
43 |
+
"RIGHT_BRACKET": r"\]",
|
44 |
+
"SEMICOLON": r";",
|
45 |
+
"COMMA": r",",
|
46 |
+
"QUOTED_STRING": re.compile(
|
47 |
+
r"""
|
48 |
+
(
|
49 |
+
('[^']*')
|
50 |
+
|
|
51 |
+
("[^"]*")
|
52 |
+
)
|
53 |
+
""",
|
54 |
+
re.VERBOSE,
|
55 |
+
),
|
56 |
+
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
57 |
+
"BOOLOP": r"\b(or|and)\b",
|
58 |
+
"IN": r"\bin\b",
|
59 |
+
"NOT": r"\bnot\b",
|
60 |
+
"VARIABLE": re.compile(
|
61 |
+
r"""
|
62 |
+
\b(
|
63 |
+
python_version
|
64 |
+
|python_full_version
|
65 |
+
|os[._]name
|
66 |
+
|sys[._]platform
|
67 |
+
|platform_(release|system)
|
68 |
+
|platform[._](version|machine|python_implementation)
|
69 |
+
|python_implementation
|
70 |
+
|implementation_(name|version)
|
71 |
+
|extra
|
72 |
+
)\b
|
73 |
+
""",
|
74 |
+
re.VERBOSE,
|
75 |
+
),
|
76 |
+
"SPECIFIER": re.compile(
|
77 |
+
Specifier._operator_regex_str + Specifier._version_regex_str,
|
78 |
+
re.VERBOSE | re.IGNORECASE,
|
79 |
+
),
|
80 |
+
"AT": r"\@",
|
81 |
+
"URL": r"[^ \t]+",
|
82 |
+
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
83 |
+
"VERSION_PREFIX_TRAIL": r"\.\*",
|
84 |
+
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
85 |
+
"WS": r"[ \t]+",
|
86 |
+
"END": r"$",
|
87 |
+
}
|
88 |
+
|
89 |
+
|
90 |
+
class Tokenizer:
|
91 |
+
"""Context-sensitive token parsing.
|
92 |
+
|
93 |
+
Provides methods to examine the input stream to check whether the next token
|
94 |
+
matches.
|
95 |
+
"""
|
96 |
+
|
97 |
+
def __init__(
|
98 |
+
self,
|
99 |
+
source: str,
|
100 |
+
*,
|
101 |
+
rules: dict[str, str | re.Pattern[str]],
|
102 |
+
) -> None:
|
103 |
+
self.source = source
|
104 |
+
self.rules: dict[str, re.Pattern[str]] = {
|
105 |
+
name: re.compile(pattern) for name, pattern in rules.items()
|
106 |
+
}
|
107 |
+
self.next_token: Token | None = None
|
108 |
+
self.position = 0
|
109 |
+
|
110 |
+
def consume(self, name: str) -> None:
|
111 |
+
"""Move beyond provided token name, if at current position."""
|
112 |
+
if self.check(name):
|
113 |
+
self.read()
|
114 |
+
|
115 |
+
def check(self, name: str, *, peek: bool = False) -> bool:
|
116 |
+
"""Check whether the next token has the provided name.
|
117 |
+
|
118 |
+
By default, if the check succeeds, the token *must* be read before
|
119 |
+
another check. If `peek` is set to `True`, the token is not loaded and
|
120 |
+
would need to be checked again.
|
121 |
+
"""
|
122 |
+
assert (
|
123 |
+
self.next_token is None
|
124 |
+
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
125 |
+
assert name in self.rules, f"Unknown token name: {name!r}"
|
126 |
+
|
127 |
+
expression = self.rules[name]
|
128 |
+
|
129 |
+
match = expression.match(self.source, self.position)
|
130 |
+
if match is None:
|
131 |
+
return False
|
132 |
+
if not peek:
|
133 |
+
self.next_token = Token(name, match[0], self.position)
|
134 |
+
return True
|
135 |
+
|
136 |
+
def expect(self, name: str, *, expected: str) -> Token:
|
137 |
+
"""Expect a certain token name next, failing with a syntax error otherwise.
|
138 |
+
|
139 |
+
The token is *not* read.
|
140 |
+
"""
|
141 |
+
if not self.check(name):
|
142 |
+
raise self.raise_syntax_error(f"Expected {expected}")
|
143 |
+
return self.read()
|
144 |
+
|
145 |
+
def read(self) -> Token:
|
146 |
+
"""Consume the next token and return it."""
|
147 |
+
token = self.next_token
|
148 |
+
assert token is not None
|
149 |
+
|
150 |
+
self.position += len(token.text)
|
151 |
+
self.next_token = None
|
152 |
+
|
153 |
+
return token
|
154 |
+
|
155 |
+
def raise_syntax_error(
|
156 |
+
self,
|
157 |
+
message: str,
|
158 |
+
*,
|
159 |
+
span_start: int | None = None,
|
160 |
+
span_end: int | None = None,
|
161 |
+
) -> NoReturn:
|
162 |
+
"""Raise ParserSyntaxError at the given position."""
|
163 |
+
span = (
|
164 |
+
self.position if span_start is None else span_start,
|
165 |
+
self.position if span_end is None else span_end,
|
166 |
+
)
|
167 |
+
raise ParserSyntaxError(
|
168 |
+
message,
|
169 |
+
source=self.source,
|
170 |
+
span=span,
|
171 |
+
)
|
172 |
+
|
173 |
+
@contextlib.contextmanager
|
174 |
+
def enclosing_tokens(
|
175 |
+
self, open_token: str, close_token: str, *, around: str
|
176 |
+
) -> Iterator[None]:
|
177 |
+
if self.check(open_token):
|
178 |
+
open_position = self.position
|
179 |
+
self.read()
|
180 |
+
else:
|
181 |
+
open_position = None
|
182 |
+
|
183 |
+
yield
|
184 |
+
|
185 |
+
if open_position is None:
|
186 |
+
return
|
187 |
+
|
188 |
+
if not self.check(close_token):
|
189 |
+
self.raise_syntax_error(
|
190 |
+
f"Expected matching {close_token} for {open_token}, after {around}",
|
191 |
+
span_start=open_position,
|
192 |
+
)
|
193 |
+
|
194 |
+
self.read()
|
markers.py
ADDED
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import operator
|
8 |
+
import os
|
9 |
+
import platform
|
10 |
+
import sys
|
11 |
+
from typing import Any, Callable, TypedDict, cast
|
12 |
+
|
13 |
+
from ._parser import MarkerAtom, MarkerList, Op, Value, Variable
|
14 |
+
from ._parser import parse_marker as _parse_marker
|
15 |
+
from ._tokenizer import ParserSyntaxError
|
16 |
+
from .specifiers import InvalidSpecifier, Specifier
|
17 |
+
from .utils import canonicalize_name
|
18 |
+
|
19 |
+
__all__ = [
|
20 |
+
"InvalidMarker",
|
21 |
+
"Marker",
|
22 |
+
"UndefinedComparison",
|
23 |
+
"UndefinedEnvironmentName",
|
24 |
+
"default_environment",
|
25 |
+
]
|
26 |
+
|
27 |
+
Operator = Callable[[str, str], bool]
|
28 |
+
|
29 |
+
|
30 |
+
class InvalidMarker(ValueError):
|
31 |
+
"""
|
32 |
+
An invalid marker was found, users should refer to PEP 508.
|
33 |
+
"""
|
34 |
+
|
35 |
+
|
36 |
+
class UndefinedComparison(ValueError):
|
37 |
+
"""
|
38 |
+
An invalid operation was attempted on a value that doesn't support it.
|
39 |
+
"""
|
40 |
+
|
41 |
+
|
42 |
+
class UndefinedEnvironmentName(ValueError):
|
43 |
+
"""
|
44 |
+
A name was attempted to be used that does not exist inside of the
|
45 |
+
environment.
|
46 |
+
"""
|
47 |
+
|
48 |
+
|
49 |
+
class Environment(TypedDict):
|
50 |
+
implementation_name: str
|
51 |
+
"""The implementation's identifier, e.g. ``'cpython'``."""
|
52 |
+
|
53 |
+
implementation_version: str
|
54 |
+
"""
|
55 |
+
The implementation's version, e.g. ``'3.13.0a2'`` for CPython 3.13.0a2, or
|
56 |
+
``'7.3.13'`` for PyPy3.10 v7.3.13.
|
57 |
+
"""
|
58 |
+
|
59 |
+
os_name: str
|
60 |
+
"""
|
61 |
+
The value of :py:data:`os.name`. The name of the operating system dependent module
|
62 |
+
imported, e.g. ``'posix'``.
|
63 |
+
"""
|
64 |
+
|
65 |
+
platform_machine: str
|
66 |
+
"""
|
67 |
+
Returns the machine type, e.g. ``'i386'``.
|
68 |
+
|
69 |
+
An empty string if the value cannot be determined.
|
70 |
+
"""
|
71 |
+
|
72 |
+
platform_release: str
|
73 |
+
"""
|
74 |
+
The system's release, e.g. ``'2.2.0'`` or ``'NT'``.
|
75 |
+
|
76 |
+
An empty string if the value cannot be determined.
|
77 |
+
"""
|
78 |
+
|
79 |
+
platform_system: str
|
80 |
+
"""
|
81 |
+
The system/OS name, e.g. ``'Linux'``, ``'Windows'`` or ``'Java'``.
|
82 |
+
|
83 |
+
An empty string if the value cannot be determined.
|
84 |
+
"""
|
85 |
+
|
86 |
+
platform_version: str
|
87 |
+
"""
|
88 |
+
The system's release version, e.g. ``'#3 on degas'``.
|
89 |
+
|
90 |
+
An empty string if the value cannot be determined.
|
91 |
+
"""
|
92 |
+
|
93 |
+
python_full_version: str
|
94 |
+
"""
|
95 |
+
The Python version as string ``'major.minor.patchlevel'``.
|
96 |
+
|
97 |
+
Note that unlike the Python :py:data:`sys.version`, this value will always include
|
98 |
+
the patchlevel (it defaults to 0).
|
99 |
+
"""
|
100 |
+
|
101 |
+
platform_python_implementation: str
|
102 |
+
"""
|
103 |
+
A string identifying the Python implementation, e.g. ``'CPython'``.
|
104 |
+
"""
|
105 |
+
|
106 |
+
python_version: str
|
107 |
+
"""The Python version as string ``'major.minor'``."""
|
108 |
+
|
109 |
+
sys_platform: str
|
110 |
+
"""
|
111 |
+
This string contains a platform identifier that can be used to append
|
112 |
+
platform-specific components to :py:data:`sys.path`, for instance.
|
113 |
+
|
114 |
+
For Unix systems, except on Linux and AIX, this is the lowercased OS name as
|
115 |
+
returned by ``uname -s`` with the first part of the version as returned by
|
116 |
+
``uname -r`` appended, e.g. ``'sunos5'`` or ``'freebsd8'``, at the time when Python
|
117 |
+
was built.
|
118 |
+
"""
|
119 |
+
|
120 |
+
|
121 |
+
def _normalize_extra_values(results: Any) -> Any:
|
122 |
+
"""
|
123 |
+
Normalize extra values.
|
124 |
+
"""
|
125 |
+
if isinstance(results[0], tuple):
|
126 |
+
lhs, op, rhs = results[0]
|
127 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
128 |
+
normalized_extra = canonicalize_name(rhs.value)
|
129 |
+
rhs = Value(normalized_extra)
|
130 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
131 |
+
normalized_extra = canonicalize_name(lhs.value)
|
132 |
+
lhs = Value(normalized_extra)
|
133 |
+
results[0] = lhs, op, rhs
|
134 |
+
return results
|
135 |
+
|
136 |
+
|
137 |
+
def _format_marker(
|
138 |
+
marker: list[str] | MarkerAtom | str, first: bool | None = True
|
139 |
+
) -> str:
|
140 |
+
assert isinstance(marker, (list, tuple, str))
|
141 |
+
|
142 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
143 |
+
# where the single item is itself it's own list. In that case we want skip
|
144 |
+
# the rest of this function so that we don't get extraneous () on the
|
145 |
+
# outside.
|
146 |
+
if (
|
147 |
+
isinstance(marker, list)
|
148 |
+
and len(marker) == 1
|
149 |
+
and isinstance(marker[0], (list, tuple))
|
150 |
+
):
|
151 |
+
return _format_marker(marker[0])
|
152 |
+
|
153 |
+
if isinstance(marker, list):
|
154 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
155 |
+
if first:
|
156 |
+
return " ".join(inner)
|
157 |
+
else:
|
158 |
+
return "(" + " ".join(inner) + ")"
|
159 |
+
elif isinstance(marker, tuple):
|
160 |
+
return " ".join([m.serialize() for m in marker])
|
161 |
+
else:
|
162 |
+
return marker
|
163 |
+
|
164 |
+
|
165 |
+
_operators: dict[str, Operator] = {
|
166 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
167 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
168 |
+
"<": operator.lt,
|
169 |
+
"<=": operator.le,
|
170 |
+
"==": operator.eq,
|
171 |
+
"!=": operator.ne,
|
172 |
+
">=": operator.ge,
|
173 |
+
">": operator.gt,
|
174 |
+
}
|
175 |
+
|
176 |
+
|
177 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
178 |
+
try:
|
179 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
180 |
+
except InvalidSpecifier:
|
181 |
+
pass
|
182 |
+
else:
|
183 |
+
return spec.contains(lhs, prereleases=True)
|
184 |
+
|
185 |
+
oper: Operator | None = _operators.get(op.serialize())
|
186 |
+
if oper is None:
|
187 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
188 |
+
|
189 |
+
return oper(lhs, rhs)
|
190 |
+
|
191 |
+
|
192 |
+
def _normalize(*values: str, key: str) -> tuple[str, ...]:
|
193 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
194 |
+
# https://peps.python.org/pep-0685/
|
195 |
+
# > When comparing extra names, tools MUST normalize the names being
|
196 |
+
# > compared using the semantics outlined in PEP 503 for names
|
197 |
+
if key == "extra":
|
198 |
+
return tuple(canonicalize_name(v) for v in values)
|
199 |
+
|
200 |
+
# other environment markers don't have such standards
|
201 |
+
return values
|
202 |
+
|
203 |
+
|
204 |
+
def _evaluate_markers(markers: MarkerList, environment: dict[str, str]) -> bool:
|
205 |
+
groups: list[list[bool]] = [[]]
|
206 |
+
|
207 |
+
for marker in markers:
|
208 |
+
assert isinstance(marker, (list, tuple, str))
|
209 |
+
|
210 |
+
if isinstance(marker, list):
|
211 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
212 |
+
elif isinstance(marker, tuple):
|
213 |
+
lhs, op, rhs = marker
|
214 |
+
|
215 |
+
if isinstance(lhs, Variable):
|
216 |
+
environment_key = lhs.value
|
217 |
+
lhs_value = environment[environment_key]
|
218 |
+
rhs_value = rhs.value
|
219 |
+
else:
|
220 |
+
lhs_value = lhs.value
|
221 |
+
environment_key = rhs.value
|
222 |
+
rhs_value = environment[environment_key]
|
223 |
+
|
224 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
225 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
226 |
+
else:
|
227 |
+
assert marker in ["and", "or"]
|
228 |
+
if marker == "or":
|
229 |
+
groups.append([])
|
230 |
+
|
231 |
+
return any(all(item) for item in groups)
|
232 |
+
|
233 |
+
|
234 |
+
def format_full_version(info: sys._version_info) -> str:
|
235 |
+
version = f"{info.major}.{info.minor}.{info.micro}"
|
236 |
+
kind = info.releaselevel
|
237 |
+
if kind != "final":
|
238 |
+
version += kind[0] + str(info.serial)
|
239 |
+
return version
|
240 |
+
|
241 |
+
|
242 |
+
def default_environment() -> Environment:
|
243 |
+
iver = format_full_version(sys.implementation.version)
|
244 |
+
implementation_name = sys.implementation.name
|
245 |
+
return {
|
246 |
+
"implementation_name": implementation_name,
|
247 |
+
"implementation_version": iver,
|
248 |
+
"os_name": os.name,
|
249 |
+
"platform_machine": platform.machine(),
|
250 |
+
"platform_release": platform.release(),
|
251 |
+
"platform_system": platform.system(),
|
252 |
+
"platform_version": platform.version(),
|
253 |
+
"python_full_version": platform.python_version(),
|
254 |
+
"platform_python_implementation": platform.python_implementation(),
|
255 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
256 |
+
"sys_platform": sys.platform,
|
257 |
+
}
|
258 |
+
|
259 |
+
|
260 |
+
class Marker:
|
261 |
+
def __init__(self, marker: str) -> None:
|
262 |
+
# Note: We create a Marker object without calling this constructor in
|
263 |
+
# packaging.requirements.Requirement. If any additional logic is
|
264 |
+
# added here, make sure to mirror/adapt Requirement.
|
265 |
+
try:
|
266 |
+
self._markers = _normalize_extra_values(_parse_marker(marker))
|
267 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
268 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
269 |
+
#
|
270 |
+
# For example, the following expression:
|
271 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
272 |
+
#
|
273 |
+
# is parsed into:
|
274 |
+
# [
|
275 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
276 |
+
# 'and',
|
277 |
+
# [
|
278 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
279 |
+
# 'or',
|
280 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
281 |
+
# ]
|
282 |
+
# ]
|
283 |
+
except ParserSyntaxError as e:
|
284 |
+
raise InvalidMarker(str(e)) from e
|
285 |
+
|
286 |
+
def __str__(self) -> str:
|
287 |
+
return _format_marker(self._markers)
|
288 |
+
|
289 |
+
def __repr__(self) -> str:
|
290 |
+
return f"<Marker('{self}')>"
|
291 |
+
|
292 |
+
def __hash__(self) -> int:
|
293 |
+
return hash((self.__class__.__name__, str(self)))
|
294 |
+
|
295 |
+
def __eq__(self, other: Any) -> bool:
|
296 |
+
if not isinstance(other, Marker):
|
297 |
+
return NotImplemented
|
298 |
+
|
299 |
+
return str(self) == str(other)
|
300 |
+
|
301 |
+
def evaluate(self, environment: dict[str, str] | None = None) -> bool:
|
302 |
+
"""Evaluate a marker.
|
303 |
+
|
304 |
+
Return the boolean from evaluating the given marker against the
|
305 |
+
environment. environment is an optional argument to override all or
|
306 |
+
part of the determined environment.
|
307 |
+
|
308 |
+
The environment is determined from the current Python process.
|
309 |
+
"""
|
310 |
+
current_environment = cast("dict[str, str]", default_environment())
|
311 |
+
current_environment["extra"] = ""
|
312 |
+
if environment is not None:
|
313 |
+
current_environment.update(environment)
|
314 |
+
# The API used to allow setting extra to None. We need to handle this
|
315 |
+
# case for backwards compatibility.
|
316 |
+
if current_environment["extra"] is None:
|
317 |
+
current_environment["extra"] = ""
|
318 |
+
|
319 |
+
return _evaluate_markers(
|
320 |
+
self._markers, _repair_python_full_version(current_environment)
|
321 |
+
)
|
322 |
+
|
323 |
+
|
324 |
+
def _repair_python_full_version(env: dict[str, str]) -> dict[str, str]:
|
325 |
+
"""
|
326 |
+
Work around platform.python_version() returning something that is not PEP 440
|
327 |
+
compliant for non-tagged Python builds.
|
328 |
+
"""
|
329 |
+
if env["python_full_version"].endswith("+"):
|
330 |
+
env["python_full_version"] += "local"
|
331 |
+
return env
|
metadata.py
ADDED
@@ -0,0 +1,863 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import email.feedparser
|
4 |
+
import email.header
|
5 |
+
import email.message
|
6 |
+
import email.parser
|
7 |
+
import email.policy
|
8 |
+
import pathlib
|
9 |
+
import sys
|
10 |
+
import typing
|
11 |
+
from typing import (
|
12 |
+
Any,
|
13 |
+
Callable,
|
14 |
+
Generic,
|
15 |
+
Literal,
|
16 |
+
TypedDict,
|
17 |
+
cast,
|
18 |
+
)
|
19 |
+
|
20 |
+
from . import licenses, requirements, specifiers, utils
|
21 |
+
from . import version as version_module
|
22 |
+
from .licenses import NormalizedLicenseExpression
|
23 |
+
|
24 |
+
T = typing.TypeVar("T")
|
25 |
+
|
26 |
+
|
27 |
+
if sys.version_info >= (3, 11): # pragma: no cover
|
28 |
+
ExceptionGroup = ExceptionGroup
|
29 |
+
else: # pragma: no cover
|
30 |
+
|
31 |
+
class ExceptionGroup(Exception):
|
32 |
+
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
33 |
+
|
34 |
+
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
35 |
+
that version is used instead.
|
36 |
+
"""
|
37 |
+
|
38 |
+
message: str
|
39 |
+
exceptions: list[Exception]
|
40 |
+
|
41 |
+
def __init__(self, message: str, exceptions: list[Exception]) -> None:
|
42 |
+
self.message = message
|
43 |
+
self.exceptions = exceptions
|
44 |
+
|
45 |
+
def __repr__(self) -> str:
|
46 |
+
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
47 |
+
|
48 |
+
|
49 |
+
class InvalidMetadata(ValueError):
|
50 |
+
"""A metadata field contains invalid data."""
|
51 |
+
|
52 |
+
field: str
|
53 |
+
"""The name of the field that contains invalid data."""
|
54 |
+
|
55 |
+
def __init__(self, field: str, message: str) -> None:
|
56 |
+
self.field = field
|
57 |
+
super().__init__(message)
|
58 |
+
|
59 |
+
|
60 |
+
# The RawMetadata class attempts to make as few assumptions about the underlying
|
61 |
+
# serialization formats as possible. The idea is that as long as a serialization
|
62 |
+
# formats offer some very basic primitives in *some* way then we can support
|
63 |
+
# serializing to and from that format.
|
64 |
+
class RawMetadata(TypedDict, total=False):
|
65 |
+
"""A dictionary of raw core metadata.
|
66 |
+
|
67 |
+
Each field in core metadata maps to a key of this dictionary (when data is
|
68 |
+
provided). The key is lower-case and underscores are used instead of dashes
|
69 |
+
compared to the equivalent core metadata field. Any core metadata field that
|
70 |
+
can be specified multiple times or can hold multiple values in a single
|
71 |
+
field have a key with a plural name. See :class:`Metadata` whose attributes
|
72 |
+
match the keys of this dictionary.
|
73 |
+
|
74 |
+
Core metadata fields that can be specified multiple times are stored as a
|
75 |
+
list or dict depending on which is appropriate for the field. Any fields
|
76 |
+
which hold multiple values in a single field are stored as a list.
|
77 |
+
|
78 |
+
"""
|
79 |
+
|
80 |
+
# Metadata 1.0 - PEP 241
|
81 |
+
metadata_version: str
|
82 |
+
name: str
|
83 |
+
version: str
|
84 |
+
platforms: list[str]
|
85 |
+
summary: str
|
86 |
+
description: str
|
87 |
+
keywords: list[str]
|
88 |
+
home_page: str
|
89 |
+
author: str
|
90 |
+
author_email: str
|
91 |
+
license: str
|
92 |
+
|
93 |
+
# Metadata 1.1 - PEP 314
|
94 |
+
supported_platforms: list[str]
|
95 |
+
download_url: str
|
96 |
+
classifiers: list[str]
|
97 |
+
requires: list[str]
|
98 |
+
provides: list[str]
|
99 |
+
obsoletes: list[str]
|
100 |
+
|
101 |
+
# Metadata 1.2 - PEP 345
|
102 |
+
maintainer: str
|
103 |
+
maintainer_email: str
|
104 |
+
requires_dist: list[str]
|
105 |
+
provides_dist: list[str]
|
106 |
+
obsoletes_dist: list[str]
|
107 |
+
requires_python: str
|
108 |
+
requires_external: list[str]
|
109 |
+
project_urls: dict[str, str]
|
110 |
+
|
111 |
+
# Metadata 2.0
|
112 |
+
# PEP 426 attempted to completely revamp the metadata format
|
113 |
+
# but got stuck without ever being able to build consensus on
|
114 |
+
# it and ultimately ended up withdrawn.
|
115 |
+
#
|
116 |
+
# However, a number of tools had started emitting METADATA with
|
117 |
+
# `2.0` Metadata-Version, so for historical reasons, this version
|
118 |
+
# was skipped.
|
119 |
+
|
120 |
+
# Metadata 2.1 - PEP 566
|
121 |
+
description_content_type: str
|
122 |
+
provides_extra: list[str]
|
123 |
+
|
124 |
+
# Metadata 2.2 - PEP 643
|
125 |
+
dynamic: list[str]
|
126 |
+
|
127 |
+
# Metadata 2.3 - PEP 685
|
128 |
+
# No new fields were added in PEP 685, just some edge case were
|
129 |
+
# tightened up to provide better interoptability.
|
130 |
+
|
131 |
+
# Metadata 2.4 - PEP 639
|
132 |
+
license_expression: str
|
133 |
+
license_files: list[str]
|
134 |
+
|
135 |
+
|
136 |
+
_STRING_FIELDS = {
|
137 |
+
"author",
|
138 |
+
"author_email",
|
139 |
+
"description",
|
140 |
+
"description_content_type",
|
141 |
+
"download_url",
|
142 |
+
"home_page",
|
143 |
+
"license",
|
144 |
+
"license_expression",
|
145 |
+
"maintainer",
|
146 |
+
"maintainer_email",
|
147 |
+
"metadata_version",
|
148 |
+
"name",
|
149 |
+
"requires_python",
|
150 |
+
"summary",
|
151 |
+
"version",
|
152 |
+
}
|
153 |
+
|
154 |
+
_LIST_FIELDS = {
|
155 |
+
"classifiers",
|
156 |
+
"dynamic",
|
157 |
+
"license_files",
|
158 |
+
"obsoletes",
|
159 |
+
"obsoletes_dist",
|
160 |
+
"platforms",
|
161 |
+
"provides",
|
162 |
+
"provides_dist",
|
163 |
+
"provides_extra",
|
164 |
+
"requires",
|
165 |
+
"requires_dist",
|
166 |
+
"requires_external",
|
167 |
+
"supported_platforms",
|
168 |
+
}
|
169 |
+
|
170 |
+
_DICT_FIELDS = {
|
171 |
+
"project_urls",
|
172 |
+
}
|
173 |
+
|
174 |
+
|
175 |
+
def _parse_keywords(data: str) -> list[str]:
|
176 |
+
"""Split a string of comma-separated keywords into a list of keywords."""
|
177 |
+
return [k.strip() for k in data.split(",")]
|
178 |
+
|
179 |
+
|
180 |
+
def _parse_project_urls(data: list[str]) -> dict[str, str]:
|
181 |
+
"""Parse a list of label/URL string pairings separated by a comma."""
|
182 |
+
urls = {}
|
183 |
+
for pair in data:
|
184 |
+
# Our logic is slightly tricky here as we want to try and do
|
185 |
+
# *something* reasonable with malformed data.
|
186 |
+
#
|
187 |
+
# The main thing that we have to worry about, is data that does
|
188 |
+
# not have a ',' at all to split the label from the Value. There
|
189 |
+
# isn't a singular right answer here, and we will fail validation
|
190 |
+
# later on (if the caller is validating) so it doesn't *really*
|
191 |
+
# matter, but since the missing value has to be an empty str
|
192 |
+
# and our return value is dict[str, str], if we let the key
|
193 |
+
# be the missing value, then they'd have multiple '' values that
|
194 |
+
# overwrite each other in a accumulating dict.
|
195 |
+
#
|
196 |
+
# The other potentional issue is that it's possible to have the
|
197 |
+
# same label multiple times in the metadata, with no solid "right"
|
198 |
+
# answer with what to do in that case. As such, we'll do the only
|
199 |
+
# thing we can, which is treat the field as unparseable and add it
|
200 |
+
# to our list of unparsed fields.
|
201 |
+
parts = [p.strip() for p in pair.split(",", 1)]
|
202 |
+
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
203 |
+
|
204 |
+
# TODO: The spec doesn't say anything about if the keys should be
|
205 |
+
# considered case sensitive or not... logically they should
|
206 |
+
# be case-preserving and case-insensitive, but doing that
|
207 |
+
# would open up more cases where we might have duplicate
|
208 |
+
# entries.
|
209 |
+
label, url = parts
|
210 |
+
if label in urls:
|
211 |
+
# The label already exists in our set of urls, so this field
|
212 |
+
# is unparseable, and we can just add the whole thing to our
|
213 |
+
# unparseable data and stop processing it.
|
214 |
+
raise KeyError("duplicate labels in project urls")
|
215 |
+
urls[label] = url
|
216 |
+
|
217 |
+
return urls
|
218 |
+
|
219 |
+
|
220 |
+
def _get_payload(msg: email.message.Message, source: bytes | str) -> str:
|
221 |
+
"""Get the body of the message."""
|
222 |
+
# If our source is a str, then our caller has managed encodings for us,
|
223 |
+
# and we don't need to deal with it.
|
224 |
+
if isinstance(source, str):
|
225 |
+
payload = msg.get_payload()
|
226 |
+
assert isinstance(payload, str)
|
227 |
+
return payload
|
228 |
+
# If our source is a bytes, then we're managing the encoding and we need
|
229 |
+
# to deal with it.
|
230 |
+
else:
|
231 |
+
bpayload = msg.get_payload(decode=True)
|
232 |
+
assert isinstance(bpayload, bytes)
|
233 |
+
try:
|
234 |
+
return bpayload.decode("utf8", "strict")
|
235 |
+
except UnicodeDecodeError as exc:
|
236 |
+
raise ValueError("payload in an invalid encoding") from exc
|
237 |
+
|
238 |
+
|
239 |
+
# The various parse_FORMAT functions here are intended to be as lenient as
|
240 |
+
# possible in their parsing, while still returning a correctly typed
|
241 |
+
# RawMetadata.
|
242 |
+
#
|
243 |
+
# To aid in this, we also generally want to do as little touching of the
|
244 |
+
# data as possible, except where there are possibly some historic holdovers
|
245 |
+
# that make valid data awkward to work with.
|
246 |
+
#
|
247 |
+
# While this is a lower level, intermediate format than our ``Metadata``
|
248 |
+
# class, some light touch ups can make a massive difference in usability.
|
249 |
+
|
250 |
+
# Map METADATA fields to RawMetadata.
|
251 |
+
_EMAIL_TO_RAW_MAPPING = {
|
252 |
+
"author": "author",
|
253 |
+
"author-email": "author_email",
|
254 |
+
"classifier": "classifiers",
|
255 |
+
"description": "description",
|
256 |
+
"description-content-type": "description_content_type",
|
257 |
+
"download-url": "download_url",
|
258 |
+
"dynamic": "dynamic",
|
259 |
+
"home-page": "home_page",
|
260 |
+
"keywords": "keywords",
|
261 |
+
"license": "license",
|
262 |
+
"license-expression": "license_expression",
|
263 |
+
"license-file": "license_files",
|
264 |
+
"maintainer": "maintainer",
|
265 |
+
"maintainer-email": "maintainer_email",
|
266 |
+
"metadata-version": "metadata_version",
|
267 |
+
"name": "name",
|
268 |
+
"obsoletes": "obsoletes",
|
269 |
+
"obsoletes-dist": "obsoletes_dist",
|
270 |
+
"platform": "platforms",
|
271 |
+
"project-url": "project_urls",
|
272 |
+
"provides": "provides",
|
273 |
+
"provides-dist": "provides_dist",
|
274 |
+
"provides-extra": "provides_extra",
|
275 |
+
"requires": "requires",
|
276 |
+
"requires-dist": "requires_dist",
|
277 |
+
"requires-external": "requires_external",
|
278 |
+
"requires-python": "requires_python",
|
279 |
+
"summary": "summary",
|
280 |
+
"supported-platform": "supported_platforms",
|
281 |
+
"version": "version",
|
282 |
+
}
|
283 |
+
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
284 |
+
|
285 |
+
|
286 |
+
def parse_email(data: bytes | str) -> tuple[RawMetadata, dict[str, list[str]]]:
|
287 |
+
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
288 |
+
|
289 |
+
This function returns a two-item tuple of dicts. The first dict is of
|
290 |
+
recognized fields from the core metadata specification. Fields that can be
|
291 |
+
parsed and translated into Python's built-in types are converted
|
292 |
+
appropriately. All other fields are left as-is. Fields that are allowed to
|
293 |
+
appear multiple times are stored as lists.
|
294 |
+
|
295 |
+
The second dict contains all other fields from the metadata. This includes
|
296 |
+
any unrecognized fields. It also includes any fields which are expected to
|
297 |
+
be parsed into a built-in type but were not formatted appropriately. Finally,
|
298 |
+
any fields that are expected to appear only once but are repeated are
|
299 |
+
included in this dict.
|
300 |
+
|
301 |
+
"""
|
302 |
+
raw: dict[str, str | list[str] | dict[str, str]] = {}
|
303 |
+
unparsed: dict[str, list[str]] = {}
|
304 |
+
|
305 |
+
if isinstance(data, str):
|
306 |
+
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
307 |
+
else:
|
308 |
+
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
309 |
+
|
310 |
+
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
311 |
+
# values for a key (a list), the key will appear multiple times in the
|
312 |
+
# list of keys, but we're avoiding that by using get_all().
|
313 |
+
for name in frozenset(parsed.keys()):
|
314 |
+
# Header names in RFC are case insensitive, so we'll normalize to all
|
315 |
+
# lower case to make comparisons easier.
|
316 |
+
name = name.lower()
|
317 |
+
|
318 |
+
# We use get_all() here, even for fields that aren't multiple use,
|
319 |
+
# because otherwise someone could have e.g. two Name fields, and we
|
320 |
+
# would just silently ignore it rather than doing something about it.
|
321 |
+
headers = parsed.get_all(name) or []
|
322 |
+
|
323 |
+
# The way the email module works when parsing bytes is that it
|
324 |
+
# unconditionally decodes the bytes as ascii using the surrogateescape
|
325 |
+
# handler. When you pull that data back out (such as with get_all() ),
|
326 |
+
# it looks to see if the str has any surrogate escapes, and if it does
|
327 |
+
# it wraps it in a Header object instead of returning the string.
|
328 |
+
#
|
329 |
+
# As such, we'll look for those Header objects, and fix up the encoding.
|
330 |
+
value = []
|
331 |
+
# Flag if we have run into any issues processing the headers, thus
|
332 |
+
# signalling that the data belongs in 'unparsed'.
|
333 |
+
valid_encoding = True
|
334 |
+
for h in headers:
|
335 |
+
# It's unclear if this can return more types than just a Header or
|
336 |
+
# a str, so we'll just assert here to make sure.
|
337 |
+
assert isinstance(h, (email.header.Header, str))
|
338 |
+
|
339 |
+
# If it's a header object, we need to do our little dance to get
|
340 |
+
# the real data out of it. In cases where there is invalid data
|
341 |
+
# we're going to end up with mojibake, but there's no obvious, good
|
342 |
+
# way around that without reimplementing parts of the Header object
|
343 |
+
# ourselves.
|
344 |
+
#
|
345 |
+
# That should be fine since, if mojibacked happens, this key is
|
346 |
+
# going into the unparsed dict anyways.
|
347 |
+
if isinstance(h, email.header.Header):
|
348 |
+
# The Header object stores it's data as chunks, and each chunk
|
349 |
+
# can be independently encoded, so we'll need to check each
|
350 |
+
# of them.
|
351 |
+
chunks: list[tuple[bytes, str | None]] = []
|
352 |
+
for bin, encoding in email.header.decode_header(h):
|
353 |
+
try:
|
354 |
+
bin.decode("utf8", "strict")
|
355 |
+
except UnicodeDecodeError:
|
356 |
+
# Enable mojibake.
|
357 |
+
encoding = "latin1"
|
358 |
+
valid_encoding = False
|
359 |
+
else:
|
360 |
+
encoding = "utf8"
|
361 |
+
chunks.append((bin, encoding))
|
362 |
+
|
363 |
+
# Turn our chunks back into a Header object, then let that
|
364 |
+
# Header object do the right thing to turn them into a
|
365 |
+
# string for us.
|
366 |
+
value.append(str(email.header.make_header(chunks)))
|
367 |
+
# This is already a string, so just add it.
|
368 |
+
else:
|
369 |
+
value.append(h)
|
370 |
+
|
371 |
+
# We've processed all of our values to get them into a list of str,
|
372 |
+
# but we may have mojibake data, in which case this is an unparsed
|
373 |
+
# field.
|
374 |
+
if not valid_encoding:
|
375 |
+
unparsed[name] = value
|
376 |
+
continue
|
377 |
+
|
378 |
+
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
379 |
+
if raw_name is None:
|
380 |
+
# This is a bit of a weird situation, we've encountered a key that
|
381 |
+
# we don't know what it means, so we don't know whether it's meant
|
382 |
+
# to be a list or not.
|
383 |
+
#
|
384 |
+
# Since we can't really tell one way or another, we'll just leave it
|
385 |
+
# as a list, even though it may be a single item list, because that's
|
386 |
+
# what makes the most sense for email headers.
|
387 |
+
unparsed[name] = value
|
388 |
+
continue
|
389 |
+
|
390 |
+
# If this is one of our string fields, then we'll check to see if our
|
391 |
+
# value is a list of a single item. If it is then we'll assume that
|
392 |
+
# it was emitted as a single string, and unwrap the str from inside
|
393 |
+
# the list.
|
394 |
+
#
|
395 |
+
# If it's any other kind of data, then we haven't the faintest clue
|
396 |
+
# what we should parse it as, and we have to just add it to our list
|
397 |
+
# of unparsed stuff.
|
398 |
+
if raw_name in _STRING_FIELDS and len(value) == 1:
|
399 |
+
raw[raw_name] = value[0]
|
400 |
+
# If this is one of our list of string fields, then we can just assign
|
401 |
+
# the value, since email *only* has strings, and our get_all() call
|
402 |
+
# above ensures that this is a list.
|
403 |
+
elif raw_name in _LIST_FIELDS:
|
404 |
+
raw[raw_name] = value
|
405 |
+
# Special Case: Keywords
|
406 |
+
# The keywords field is implemented in the metadata spec as a str,
|
407 |
+
# but it conceptually is a list of strings, and is serialized using
|
408 |
+
# ", ".join(keywords), so we'll do some light data massaging to turn
|
409 |
+
# this into what it logically is.
|
410 |
+
elif raw_name == "keywords" and len(value) == 1:
|
411 |
+
raw[raw_name] = _parse_keywords(value[0])
|
412 |
+
# Special Case: Project-URL
|
413 |
+
# The project urls is implemented in the metadata spec as a list of
|
414 |
+
# specially-formatted strings that represent a key and a value, which
|
415 |
+
# is fundamentally a mapping, however the email format doesn't support
|
416 |
+
# mappings in a sane way, so it was crammed into a list of strings
|
417 |
+
# instead.
|
418 |
+
#
|
419 |
+
# We will do a little light data massaging to turn this into a map as
|
420 |
+
# it logically should be.
|
421 |
+
elif raw_name == "project_urls":
|
422 |
+
try:
|
423 |
+
raw[raw_name] = _parse_project_urls(value)
|
424 |
+
except KeyError:
|
425 |
+
unparsed[name] = value
|
426 |
+
# Nothing that we've done has managed to parse this, so it'll just
|
427 |
+
# throw it in our unparseable data and move on.
|
428 |
+
else:
|
429 |
+
unparsed[name] = value
|
430 |
+
|
431 |
+
# We need to support getting the Description from the message payload in
|
432 |
+
# addition to getting it from the the headers. This does mean, though, there
|
433 |
+
# is the possibility of it being set both ways, in which case we put both
|
434 |
+
# in 'unparsed' since we don't know which is right.
|
435 |
+
try:
|
436 |
+
payload = _get_payload(parsed, data)
|
437 |
+
except ValueError:
|
438 |
+
unparsed.setdefault("description", []).append(
|
439 |
+
parsed.get_payload(decode=isinstance(data, bytes)) # type: ignore[call-overload]
|
440 |
+
)
|
441 |
+
else:
|
442 |
+
if payload:
|
443 |
+
# Check to see if we've already got a description, if so then both
|
444 |
+
# it, and this body move to unparseable.
|
445 |
+
if "description" in raw:
|
446 |
+
description_header = cast(str, raw.pop("description"))
|
447 |
+
unparsed.setdefault("description", []).extend(
|
448 |
+
[description_header, payload]
|
449 |
+
)
|
450 |
+
elif "description" in unparsed:
|
451 |
+
unparsed["description"].append(payload)
|
452 |
+
else:
|
453 |
+
raw["description"] = payload
|
454 |
+
|
455 |
+
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
456 |
+
# literal key names, but we're computing our key names on purpose, but the
|
457 |
+
# way this function is implemented, our `TypedDict` can only have valid key
|
458 |
+
# names.
|
459 |
+
return cast(RawMetadata, raw), unparsed
|
460 |
+
|
461 |
+
|
462 |
+
_NOT_FOUND = object()
|
463 |
+
|
464 |
+
|
465 |
+
# Keep the two values in sync.
|
466 |
+
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
|
467 |
+
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3", "2.4"]
|
468 |
+
|
469 |
+
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
470 |
+
|
471 |
+
|
472 |
+
class _Validator(Generic[T]):
|
473 |
+
"""Validate a metadata field.
|
474 |
+
|
475 |
+
All _process_*() methods correspond to a core metadata field. The method is
|
476 |
+
called with the field's raw value. If the raw value is valid it is returned
|
477 |
+
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
478 |
+
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
479 |
+
as appropriate).
|
480 |
+
"""
|
481 |
+
|
482 |
+
name: str
|
483 |
+
raw_name: str
|
484 |
+
added: _MetadataVersion
|
485 |
+
|
486 |
+
def __init__(
|
487 |
+
self,
|
488 |
+
*,
|
489 |
+
added: _MetadataVersion = "1.0",
|
490 |
+
) -> None:
|
491 |
+
self.added = added
|
492 |
+
|
493 |
+
def __set_name__(self, _owner: Metadata, name: str) -> None:
|
494 |
+
self.name = name
|
495 |
+
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
496 |
+
|
497 |
+
def __get__(self, instance: Metadata, _owner: type[Metadata]) -> T:
|
498 |
+
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
499 |
+
# No need to check the cache as attribute lookup will resolve into the
|
500 |
+
# instance's __dict__ before __get__ is called.
|
501 |
+
cache = instance.__dict__
|
502 |
+
value = instance._raw.get(self.name)
|
503 |
+
|
504 |
+
# To make the _process_* methods easier, we'll check if the value is None
|
505 |
+
# and if this field is NOT a required attribute, and if both of those
|
506 |
+
# things are true, we'll skip the the converter. This will mean that the
|
507 |
+
# converters never have to deal with the None union.
|
508 |
+
if self.name in _REQUIRED_ATTRS or value is not None:
|
509 |
+
try:
|
510 |
+
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
511 |
+
except AttributeError:
|
512 |
+
pass
|
513 |
+
else:
|
514 |
+
value = converter(value)
|
515 |
+
|
516 |
+
cache[self.name] = value
|
517 |
+
try:
|
518 |
+
del instance._raw[self.name] # type: ignore[misc]
|
519 |
+
except KeyError:
|
520 |
+
pass
|
521 |
+
|
522 |
+
return cast(T, value)
|
523 |
+
|
524 |
+
def _invalid_metadata(
|
525 |
+
self, msg: str, cause: Exception | None = None
|
526 |
+
) -> InvalidMetadata:
|
527 |
+
exc = InvalidMetadata(
|
528 |
+
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
529 |
+
)
|
530 |
+
exc.__cause__ = cause
|
531 |
+
return exc
|
532 |
+
|
533 |
+
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
534 |
+
# Implicitly makes Metadata-Version required.
|
535 |
+
if value not in _VALID_METADATA_VERSIONS:
|
536 |
+
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
537 |
+
return cast(_MetadataVersion, value)
|
538 |
+
|
539 |
+
def _process_name(self, value: str) -> str:
|
540 |
+
if not value:
|
541 |
+
raise self._invalid_metadata("{field} is a required field")
|
542 |
+
# Validate the name as a side-effect.
|
543 |
+
try:
|
544 |
+
utils.canonicalize_name(value, validate=True)
|
545 |
+
except utils.InvalidName as exc:
|
546 |
+
raise self._invalid_metadata(
|
547 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
548 |
+
) from exc
|
549 |
+
else:
|
550 |
+
return value
|
551 |
+
|
552 |
+
def _process_version(self, value: str) -> version_module.Version:
|
553 |
+
if not value:
|
554 |
+
raise self._invalid_metadata("{field} is a required field")
|
555 |
+
try:
|
556 |
+
return version_module.parse(value)
|
557 |
+
except version_module.InvalidVersion as exc:
|
558 |
+
raise self._invalid_metadata(
|
559 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
560 |
+
) from exc
|
561 |
+
|
562 |
+
def _process_summary(self, value: str) -> str:
|
563 |
+
"""Check the field contains no newlines."""
|
564 |
+
if "\n" in value:
|
565 |
+
raise self._invalid_metadata("{field} must be a single line")
|
566 |
+
return value
|
567 |
+
|
568 |
+
def _process_description_content_type(self, value: str) -> str:
|
569 |
+
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
570 |
+
message = email.message.EmailMessage()
|
571 |
+
message["content-type"] = value
|
572 |
+
|
573 |
+
content_type, parameters = (
|
574 |
+
# Defaults to `text/plain` if parsing failed.
|
575 |
+
message.get_content_type().lower(),
|
576 |
+
message["content-type"].params,
|
577 |
+
)
|
578 |
+
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
579 |
+
# not parseable.
|
580 |
+
if content_type not in content_types or content_type not in value.lower():
|
581 |
+
raise self._invalid_metadata(
|
582 |
+
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
583 |
+
)
|
584 |
+
|
585 |
+
charset = parameters.get("charset", "UTF-8")
|
586 |
+
if charset != "UTF-8":
|
587 |
+
raise self._invalid_metadata(
|
588 |
+
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
589 |
+
)
|
590 |
+
|
591 |
+
markdown_variants = {"GFM", "CommonMark"}
|
592 |
+
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
593 |
+
if content_type == "text/markdown" and variant not in markdown_variants:
|
594 |
+
raise self._invalid_metadata(
|
595 |
+
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
596 |
+
f"not {variant!r}",
|
597 |
+
)
|
598 |
+
return value
|
599 |
+
|
600 |
+
def _process_dynamic(self, value: list[str]) -> list[str]:
|
601 |
+
for dynamic_field in map(str.lower, value):
|
602 |
+
if dynamic_field in {"name", "version", "metadata-version"}:
|
603 |
+
raise self._invalid_metadata(
|
604 |
+
f"{dynamic_field!r} is not allowed as a dynamic field"
|
605 |
+
)
|
606 |
+
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
607 |
+
raise self._invalid_metadata(
|
608 |
+
f"{dynamic_field!r} is not a valid dynamic field"
|
609 |
+
)
|
610 |
+
return list(map(str.lower, value))
|
611 |
+
|
612 |
+
def _process_provides_extra(
|
613 |
+
self,
|
614 |
+
value: list[str],
|
615 |
+
) -> list[utils.NormalizedName]:
|
616 |
+
normalized_names = []
|
617 |
+
try:
|
618 |
+
for name in value:
|
619 |
+
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
620 |
+
except utils.InvalidName as exc:
|
621 |
+
raise self._invalid_metadata(
|
622 |
+
f"{name!r} is invalid for {{field}}", cause=exc
|
623 |
+
) from exc
|
624 |
+
else:
|
625 |
+
return normalized_names
|
626 |
+
|
627 |
+
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
628 |
+
try:
|
629 |
+
return specifiers.SpecifierSet(value)
|
630 |
+
except specifiers.InvalidSpecifier as exc:
|
631 |
+
raise self._invalid_metadata(
|
632 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
633 |
+
) from exc
|
634 |
+
|
635 |
+
def _process_requires_dist(
|
636 |
+
self,
|
637 |
+
value: list[str],
|
638 |
+
) -> list[requirements.Requirement]:
|
639 |
+
reqs = []
|
640 |
+
try:
|
641 |
+
for req in value:
|
642 |
+
reqs.append(requirements.Requirement(req))
|
643 |
+
except requirements.InvalidRequirement as exc:
|
644 |
+
raise self._invalid_metadata(
|
645 |
+
f"{req!r} is invalid for {{field}}", cause=exc
|
646 |
+
) from exc
|
647 |
+
else:
|
648 |
+
return reqs
|
649 |
+
|
650 |
+
def _process_license_expression(
|
651 |
+
self, value: str
|
652 |
+
) -> NormalizedLicenseExpression | None:
|
653 |
+
try:
|
654 |
+
return licenses.canonicalize_license_expression(value)
|
655 |
+
except ValueError as exc:
|
656 |
+
raise self._invalid_metadata(
|
657 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
658 |
+
) from exc
|
659 |
+
|
660 |
+
def _process_license_files(self, value: list[str]) -> list[str]:
|
661 |
+
paths = []
|
662 |
+
for path in value:
|
663 |
+
if ".." in path:
|
664 |
+
raise self._invalid_metadata(
|
665 |
+
f"{path!r} is invalid for {{field}}, "
|
666 |
+
"parent directory indicators are not allowed"
|
667 |
+
)
|
668 |
+
if "*" in path:
|
669 |
+
raise self._invalid_metadata(
|
670 |
+
f"{path!r} is invalid for {{field}}, paths must be resolved"
|
671 |
+
)
|
672 |
+
if (
|
673 |
+
pathlib.PurePosixPath(path).is_absolute()
|
674 |
+
or pathlib.PureWindowsPath(path).is_absolute()
|
675 |
+
):
|
676 |
+
raise self._invalid_metadata(
|
677 |
+
f"{path!r} is invalid for {{field}}, paths must be relative"
|
678 |
+
)
|
679 |
+
if pathlib.PureWindowsPath(path).as_posix() != path:
|
680 |
+
raise self._invalid_metadata(
|
681 |
+
f"{path!r} is invalid for {{field}}, "
|
682 |
+
"paths must use '/' delimiter"
|
683 |
+
)
|
684 |
+
paths.append(path)
|
685 |
+
return paths
|
686 |
+
|
687 |
+
|
688 |
+
class Metadata:
|
689 |
+
"""Representation of distribution metadata.
|
690 |
+
|
691 |
+
Compared to :class:`RawMetadata`, this class provides objects representing
|
692 |
+
metadata fields instead of only using built-in types. Any invalid metadata
|
693 |
+
will cause :exc:`InvalidMetadata` to be raised (with a
|
694 |
+
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
695 |
+
"""
|
696 |
+
|
697 |
+
_raw: RawMetadata
|
698 |
+
|
699 |
+
@classmethod
|
700 |
+
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> Metadata:
|
701 |
+
"""Create an instance from :class:`RawMetadata`.
|
702 |
+
|
703 |
+
If *validate* is true, all metadata will be validated. All exceptions
|
704 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
705 |
+
"""
|
706 |
+
ins = cls()
|
707 |
+
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
708 |
+
|
709 |
+
if validate:
|
710 |
+
exceptions: list[Exception] = []
|
711 |
+
try:
|
712 |
+
metadata_version = ins.metadata_version
|
713 |
+
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
714 |
+
except InvalidMetadata as metadata_version_exc:
|
715 |
+
exceptions.append(metadata_version_exc)
|
716 |
+
metadata_version = None
|
717 |
+
|
718 |
+
# Make sure to check for the fields that are present, the required
|
719 |
+
# fields (so their absence can be reported).
|
720 |
+
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
721 |
+
# Remove fields that have already been checked.
|
722 |
+
fields_to_check -= {"metadata_version"}
|
723 |
+
|
724 |
+
for key in fields_to_check:
|
725 |
+
try:
|
726 |
+
if metadata_version:
|
727 |
+
# Can't use getattr() as that triggers descriptor protocol which
|
728 |
+
# will fail due to no value for the instance argument.
|
729 |
+
try:
|
730 |
+
field_metadata_version = cls.__dict__[key].added
|
731 |
+
except KeyError:
|
732 |
+
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
733 |
+
exceptions.append(exc)
|
734 |
+
continue
|
735 |
+
field_age = _VALID_METADATA_VERSIONS.index(
|
736 |
+
field_metadata_version
|
737 |
+
)
|
738 |
+
if field_age > metadata_age:
|
739 |
+
field = _RAW_TO_EMAIL_MAPPING[key]
|
740 |
+
exc = InvalidMetadata(
|
741 |
+
field,
|
742 |
+
f"{field} introduced in metadata version "
|
743 |
+
f"{field_metadata_version}, not {metadata_version}",
|
744 |
+
)
|
745 |
+
exceptions.append(exc)
|
746 |
+
continue
|
747 |
+
getattr(ins, key)
|
748 |
+
except InvalidMetadata as exc:
|
749 |
+
exceptions.append(exc)
|
750 |
+
|
751 |
+
if exceptions:
|
752 |
+
raise ExceptionGroup("invalid metadata", exceptions)
|
753 |
+
|
754 |
+
return ins
|
755 |
+
|
756 |
+
@classmethod
|
757 |
+
def from_email(cls, data: bytes | str, *, validate: bool = True) -> Metadata:
|
758 |
+
"""Parse metadata from email headers.
|
759 |
+
|
760 |
+
If *validate* is true, the metadata will be validated. All exceptions
|
761 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
762 |
+
"""
|
763 |
+
raw, unparsed = parse_email(data)
|
764 |
+
|
765 |
+
if validate:
|
766 |
+
exceptions: list[Exception] = []
|
767 |
+
for unparsed_key in unparsed:
|
768 |
+
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
769 |
+
message = f"{unparsed_key!r} has invalid data"
|
770 |
+
else:
|
771 |
+
message = f"unrecognized field: {unparsed_key!r}"
|
772 |
+
exceptions.append(InvalidMetadata(unparsed_key, message))
|
773 |
+
|
774 |
+
if exceptions:
|
775 |
+
raise ExceptionGroup("unparsed", exceptions)
|
776 |
+
|
777 |
+
try:
|
778 |
+
return cls.from_raw(raw, validate=validate)
|
779 |
+
except ExceptionGroup as exc_group:
|
780 |
+
raise ExceptionGroup(
|
781 |
+
"invalid or unparsed metadata", exc_group.exceptions
|
782 |
+
) from None
|
783 |
+
|
784 |
+
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
785 |
+
""":external:ref:`core-metadata-metadata-version`
|
786 |
+
(required; validated to be a valid metadata version)"""
|
787 |
+
# `name` is not normalized/typed to NormalizedName so as to provide access to
|
788 |
+
# the original/raw name.
|
789 |
+
name: _Validator[str] = _Validator()
|
790 |
+
""":external:ref:`core-metadata-name`
|
791 |
+
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
792 |
+
*validate* parameter)"""
|
793 |
+
version: _Validator[version_module.Version] = _Validator()
|
794 |
+
""":external:ref:`core-metadata-version` (required)"""
|
795 |
+
dynamic: _Validator[list[str] | None] = _Validator(
|
796 |
+
added="2.2",
|
797 |
+
)
|
798 |
+
""":external:ref:`core-metadata-dynamic`
|
799 |
+
(validated against core metadata field names and lowercased)"""
|
800 |
+
platforms: _Validator[list[str] | None] = _Validator()
|
801 |
+
""":external:ref:`core-metadata-platform`"""
|
802 |
+
supported_platforms: _Validator[list[str] | None] = _Validator(added="1.1")
|
803 |
+
""":external:ref:`core-metadata-supported-platform`"""
|
804 |
+
summary: _Validator[str | None] = _Validator()
|
805 |
+
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
806 |
+
description: _Validator[str | None] = _Validator() # TODO 2.1: can be in body
|
807 |
+
""":external:ref:`core-metadata-description`"""
|
808 |
+
description_content_type: _Validator[str | None] = _Validator(added="2.1")
|
809 |
+
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
810 |
+
keywords: _Validator[list[str] | None] = _Validator()
|
811 |
+
""":external:ref:`core-metadata-keywords`"""
|
812 |
+
home_page: _Validator[str | None] = _Validator()
|
813 |
+
""":external:ref:`core-metadata-home-page`"""
|
814 |
+
download_url: _Validator[str | None] = _Validator(added="1.1")
|
815 |
+
""":external:ref:`core-metadata-download-url`"""
|
816 |
+
author: _Validator[str | None] = _Validator()
|
817 |
+
""":external:ref:`core-metadata-author`"""
|
818 |
+
author_email: _Validator[str | None] = _Validator()
|
819 |
+
""":external:ref:`core-metadata-author-email`"""
|
820 |
+
maintainer: _Validator[str | None] = _Validator(added="1.2")
|
821 |
+
""":external:ref:`core-metadata-maintainer`"""
|
822 |
+
maintainer_email: _Validator[str | None] = _Validator(added="1.2")
|
823 |
+
""":external:ref:`core-metadata-maintainer-email`"""
|
824 |
+
license: _Validator[str | None] = _Validator()
|
825 |
+
""":external:ref:`core-metadata-license`"""
|
826 |
+
license_expression: _Validator[NormalizedLicenseExpression | None] = _Validator(
|
827 |
+
added="2.4"
|
828 |
+
)
|
829 |
+
""":external:ref:`core-metadata-license-expression`"""
|
830 |
+
license_files: _Validator[list[str] | None] = _Validator(added="2.4")
|
831 |
+
""":external:ref:`core-metadata-license-file`"""
|
832 |
+
classifiers: _Validator[list[str] | None] = _Validator(added="1.1")
|
833 |
+
""":external:ref:`core-metadata-classifier`"""
|
834 |
+
requires_dist: _Validator[list[requirements.Requirement] | None] = _Validator(
|
835 |
+
added="1.2"
|
836 |
+
)
|
837 |
+
""":external:ref:`core-metadata-requires-dist`"""
|
838 |
+
requires_python: _Validator[specifiers.SpecifierSet | None] = _Validator(
|
839 |
+
added="1.2"
|
840 |
+
)
|
841 |
+
""":external:ref:`core-metadata-requires-python`"""
|
842 |
+
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
843 |
+
# don't do any processing on the values.
|
844 |
+
requires_external: _Validator[list[str] | None] = _Validator(added="1.2")
|
845 |
+
""":external:ref:`core-metadata-requires-external`"""
|
846 |
+
project_urls: _Validator[dict[str, str] | None] = _Validator(added="1.2")
|
847 |
+
""":external:ref:`core-metadata-project-url`"""
|
848 |
+
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
849 |
+
# regardless of metadata version.
|
850 |
+
provides_extra: _Validator[list[utils.NormalizedName] | None] = _Validator(
|
851 |
+
added="2.1",
|
852 |
+
)
|
853 |
+
""":external:ref:`core-metadata-provides-extra`"""
|
854 |
+
provides_dist: _Validator[list[str] | None] = _Validator(added="1.2")
|
855 |
+
""":external:ref:`core-metadata-provides-dist`"""
|
856 |
+
obsoletes_dist: _Validator[list[str] | None] = _Validator(added="1.2")
|
857 |
+
""":external:ref:`core-metadata-obsoletes-dist`"""
|
858 |
+
requires: _Validator[list[str] | None] = _Validator(added="1.1")
|
859 |
+
"""``Requires`` (deprecated)"""
|
860 |
+
provides: _Validator[list[str] | None] = _Validator(added="1.1")
|
861 |
+
"""``Provides`` (deprecated)"""
|
862 |
+
obsoletes: _Validator[list[str] | None] = _Validator(added="1.1")
|
863 |
+
"""``Obsoletes`` (deprecated)"""
|
py.typed
ADDED
File without changes
|
requirements.py
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
from typing import Any, Iterator
|
7 |
+
|
8 |
+
from ._parser import parse_requirement as _parse_requirement
|
9 |
+
from ._tokenizer import ParserSyntaxError
|
10 |
+
from .markers import Marker, _normalize_extra_values
|
11 |
+
from .specifiers import SpecifierSet
|
12 |
+
from .utils import canonicalize_name
|
13 |
+
|
14 |
+
|
15 |
+
class InvalidRequirement(ValueError):
|
16 |
+
"""
|
17 |
+
An invalid requirement was found, users should refer to PEP 508.
|
18 |
+
"""
|
19 |
+
|
20 |
+
|
21 |
+
class Requirement:
|
22 |
+
"""Parse a requirement.
|
23 |
+
|
24 |
+
Parse a given requirement string into its parts, such as name, specifier,
|
25 |
+
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
26 |
+
string.
|
27 |
+
"""
|
28 |
+
|
29 |
+
# TODO: Can we test whether something is contained within a requirement?
|
30 |
+
# If so how do we do that? Do we need to test against the _name_ of
|
31 |
+
# the thing as well as the version? What about the markers?
|
32 |
+
# TODO: Can we normalize the name and extra name?
|
33 |
+
|
34 |
+
def __init__(self, requirement_string: str) -> None:
|
35 |
+
try:
|
36 |
+
parsed = _parse_requirement(requirement_string)
|
37 |
+
except ParserSyntaxError as e:
|
38 |
+
raise InvalidRequirement(str(e)) from e
|
39 |
+
|
40 |
+
self.name: str = parsed.name
|
41 |
+
self.url: str | None = parsed.url or None
|
42 |
+
self.extras: set[str] = set(parsed.extras or [])
|
43 |
+
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
44 |
+
self.marker: Marker | None = None
|
45 |
+
if parsed.marker is not None:
|
46 |
+
self.marker = Marker.__new__(Marker)
|
47 |
+
self.marker._markers = _normalize_extra_values(parsed.marker)
|
48 |
+
|
49 |
+
def _iter_parts(self, name: str) -> Iterator[str]:
|
50 |
+
yield name
|
51 |
+
|
52 |
+
if self.extras:
|
53 |
+
formatted_extras = ",".join(sorted(self.extras))
|
54 |
+
yield f"[{formatted_extras}]"
|
55 |
+
|
56 |
+
if self.specifier:
|
57 |
+
yield str(self.specifier)
|
58 |
+
|
59 |
+
if self.url:
|
60 |
+
yield f"@ {self.url}"
|
61 |
+
if self.marker:
|
62 |
+
yield " "
|
63 |
+
|
64 |
+
if self.marker:
|
65 |
+
yield f"; {self.marker}"
|
66 |
+
|
67 |
+
def __str__(self) -> str:
|
68 |
+
return "".join(self._iter_parts(self.name))
|
69 |
+
|
70 |
+
def __repr__(self) -> str:
|
71 |
+
return f"<Requirement('{self}')>"
|
72 |
+
|
73 |
+
def __hash__(self) -> int:
|
74 |
+
return hash(
|
75 |
+
(
|
76 |
+
self.__class__.__name__,
|
77 |
+
*self._iter_parts(canonicalize_name(self.name)),
|
78 |
+
)
|
79 |
+
)
|
80 |
+
|
81 |
+
def __eq__(self, other: Any) -> bool:
|
82 |
+
if not isinstance(other, Requirement):
|
83 |
+
return NotImplemented
|
84 |
+
|
85 |
+
return (
|
86 |
+
canonicalize_name(self.name) == canonicalize_name(other.name)
|
87 |
+
and self.extras == other.extras
|
88 |
+
and self.specifier == other.specifier
|
89 |
+
and self.url == other.url
|
90 |
+
and self.marker == other.marker
|
91 |
+
)
|
specifiers.py
ADDED
@@ -0,0 +1,1020 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
"""
|
5 |
+
.. testsetup::
|
6 |
+
|
7 |
+
from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
8 |
+
from packaging.version import Version
|
9 |
+
"""
|
10 |
+
|
11 |
+
from __future__ import annotations
|
12 |
+
|
13 |
+
import abc
|
14 |
+
import itertools
|
15 |
+
import re
|
16 |
+
from typing import Callable, Iterable, Iterator, TypeVar, Union
|
17 |
+
|
18 |
+
from .utils import canonicalize_version
|
19 |
+
from .version import Version
|
20 |
+
|
21 |
+
UnparsedVersion = Union[Version, str]
|
22 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
23 |
+
CallableOperator = Callable[[Version, str], bool]
|
24 |
+
|
25 |
+
|
26 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
27 |
+
if not isinstance(version, Version):
|
28 |
+
version = Version(version)
|
29 |
+
return version
|
30 |
+
|
31 |
+
|
32 |
+
class InvalidSpecifier(ValueError):
|
33 |
+
"""
|
34 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
35 |
+
string that is invalid.
|
36 |
+
|
37 |
+
>>> Specifier("lolwat")
|
38 |
+
Traceback (most recent call last):
|
39 |
+
...
|
40 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
41 |
+
"""
|
42 |
+
|
43 |
+
|
44 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
45 |
+
@abc.abstractmethod
|
46 |
+
def __str__(self) -> str:
|
47 |
+
"""
|
48 |
+
Returns the str representation of this Specifier-like object. This
|
49 |
+
should be representative of the Specifier itself.
|
50 |
+
"""
|
51 |
+
|
52 |
+
@abc.abstractmethod
|
53 |
+
def __hash__(self) -> int:
|
54 |
+
"""
|
55 |
+
Returns a hash value for this Specifier-like object.
|
56 |
+
"""
|
57 |
+
|
58 |
+
@abc.abstractmethod
|
59 |
+
def __eq__(self, other: object) -> bool:
|
60 |
+
"""
|
61 |
+
Returns a boolean representing whether or not the two Specifier-like
|
62 |
+
objects are equal.
|
63 |
+
|
64 |
+
:param other: The other object to check against.
|
65 |
+
"""
|
66 |
+
|
67 |
+
@property
|
68 |
+
@abc.abstractmethod
|
69 |
+
def prereleases(self) -> bool | None:
|
70 |
+
"""Whether or not pre-releases as a whole are allowed.
|
71 |
+
|
72 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
73 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
74 |
+
"""
|
75 |
+
|
76 |
+
@prereleases.setter
|
77 |
+
def prereleases(self, value: bool) -> None:
|
78 |
+
"""Setter for :attr:`prereleases`.
|
79 |
+
|
80 |
+
:param value: The value to set.
|
81 |
+
"""
|
82 |
+
|
83 |
+
@abc.abstractmethod
|
84 |
+
def contains(self, item: str, prereleases: bool | None = None) -> bool:
|
85 |
+
"""
|
86 |
+
Determines if the given item is contained within this specifier.
|
87 |
+
"""
|
88 |
+
|
89 |
+
@abc.abstractmethod
|
90 |
+
def filter(
|
91 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
92 |
+
) -> Iterator[UnparsedVersionVar]:
|
93 |
+
"""
|
94 |
+
Takes an iterable of items and filters them so that only items which
|
95 |
+
are contained within this specifier are allowed in it.
|
96 |
+
"""
|
97 |
+
|
98 |
+
|
99 |
+
class Specifier(BaseSpecifier):
|
100 |
+
"""This class abstracts handling of version specifiers.
|
101 |
+
|
102 |
+
.. tip::
|
103 |
+
|
104 |
+
It is generally not required to instantiate this manually. You should instead
|
105 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
106 |
+
comma-separated version specifiers (which is what package metadata contains).
|
107 |
+
"""
|
108 |
+
|
109 |
+
_operator_regex_str = r"""
|
110 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
111 |
+
"""
|
112 |
+
_version_regex_str = r"""
|
113 |
+
(?P<version>
|
114 |
+
(?:
|
115 |
+
# The identity operators allow for an escape hatch that will
|
116 |
+
# do an exact string match of the version you wish to install.
|
117 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
118 |
+
# any semantic meaning from it. This operator is discouraged
|
119 |
+
# but included entirely as an escape hatch.
|
120 |
+
(?<====) # Only match for the identity operator
|
121 |
+
\s*
|
122 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
123 |
+
# we match everything except for whitespace, a
|
124 |
+
# semi-colon for marker support, and a closing paren
|
125 |
+
# since versions can be enclosed in them.
|
126 |
+
)
|
127 |
+
|
|
128 |
+
(?:
|
129 |
+
# The (non)equality operators allow for wild card and local
|
130 |
+
# versions to be specified so we have to define these two
|
131 |
+
# operators separately to enable that.
|
132 |
+
(?<===|!=) # Only match for equals and not equals
|
133 |
+
|
134 |
+
\s*
|
135 |
+
v?
|
136 |
+
(?:[0-9]+!)? # epoch
|
137 |
+
[0-9]+(?:\.[0-9]+)* # release
|
138 |
+
|
139 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
140 |
+
# local version together so group them with a | and make them optional.
|
141 |
+
(?:
|
142 |
+
\.\* # Wild card syntax of .*
|
143 |
+
|
|
144 |
+
(?: # pre release
|
145 |
+
[-_\.]?
|
146 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
147 |
+
[-_\.]?
|
148 |
+
[0-9]*
|
149 |
+
)?
|
150 |
+
(?: # post release
|
151 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
152 |
+
)?
|
153 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
154 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
155 |
+
)?
|
156 |
+
)
|
157 |
+
|
|
158 |
+
(?:
|
159 |
+
# The compatible operator requires at least two digits in the
|
160 |
+
# release segment.
|
161 |
+
(?<=~=) # Only match for the compatible operator
|
162 |
+
|
163 |
+
\s*
|
164 |
+
v?
|
165 |
+
(?:[0-9]+!)? # epoch
|
166 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
167 |
+
(?: # pre release
|
168 |
+
[-_\.]?
|
169 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
170 |
+
[-_\.]?
|
171 |
+
[0-9]*
|
172 |
+
)?
|
173 |
+
(?: # post release
|
174 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
175 |
+
)?
|
176 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
177 |
+
)
|
178 |
+
|
|
179 |
+
(?:
|
180 |
+
# All other operators only allow a sub set of what the
|
181 |
+
# (non)equality operators do. Specifically they do not allow
|
182 |
+
# local versions to be specified nor do they allow the prefix
|
183 |
+
# matching wild cards.
|
184 |
+
(?<!==|!=|~=) # We have special cases for these
|
185 |
+
# operators so we want to make sure they
|
186 |
+
# don't match here.
|
187 |
+
|
188 |
+
\s*
|
189 |
+
v?
|
190 |
+
(?:[0-9]+!)? # epoch
|
191 |
+
[0-9]+(?:\.[0-9]+)* # release
|
192 |
+
(?: # pre release
|
193 |
+
[-_\.]?
|
194 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
195 |
+
[-_\.]?
|
196 |
+
[0-9]*
|
197 |
+
)?
|
198 |
+
(?: # post release
|
199 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
200 |
+
)?
|
201 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
202 |
+
)
|
203 |
+
)
|
204 |
+
"""
|
205 |
+
|
206 |
+
_regex = re.compile(
|
207 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
208 |
+
re.VERBOSE | re.IGNORECASE,
|
209 |
+
)
|
210 |
+
|
211 |
+
_operators = {
|
212 |
+
"~=": "compatible",
|
213 |
+
"==": "equal",
|
214 |
+
"!=": "not_equal",
|
215 |
+
"<=": "less_than_equal",
|
216 |
+
">=": "greater_than_equal",
|
217 |
+
"<": "less_than",
|
218 |
+
">": "greater_than",
|
219 |
+
"===": "arbitrary",
|
220 |
+
}
|
221 |
+
|
222 |
+
def __init__(self, spec: str = "", prereleases: bool | None = None) -> None:
|
223 |
+
"""Initialize a Specifier instance.
|
224 |
+
|
225 |
+
:param spec:
|
226 |
+
The string representation of a specifier which will be parsed and
|
227 |
+
normalized before use.
|
228 |
+
:param prereleases:
|
229 |
+
This tells the specifier if it should accept prerelease versions if
|
230 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
231 |
+
given specifiers.
|
232 |
+
:raises InvalidSpecifier:
|
233 |
+
If the given specifier is invalid (i.e. bad syntax).
|
234 |
+
"""
|
235 |
+
match = self._regex.search(spec)
|
236 |
+
if not match:
|
237 |
+
raise InvalidSpecifier(f"Invalid specifier: {spec!r}")
|
238 |
+
|
239 |
+
self._spec: tuple[str, str] = (
|
240 |
+
match.group("operator").strip(),
|
241 |
+
match.group("version").strip(),
|
242 |
+
)
|
243 |
+
|
244 |
+
# Store whether or not this Specifier should accept prereleases
|
245 |
+
self._prereleases = prereleases
|
246 |
+
|
247 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
248 |
+
@property # type: ignore[override]
|
249 |
+
def prereleases(self) -> bool:
|
250 |
+
# If there is an explicit prereleases set for this, then we'll just
|
251 |
+
# blindly use that.
|
252 |
+
if self._prereleases is not None:
|
253 |
+
return self._prereleases
|
254 |
+
|
255 |
+
# Look at all of our specifiers and determine if they are inclusive
|
256 |
+
# operators, and if they are if they are including an explicit
|
257 |
+
# prerelease.
|
258 |
+
operator, version = self._spec
|
259 |
+
if operator in ["==", ">=", "<=", "~=", "===", ">", "<"]:
|
260 |
+
# The == specifier can include a trailing .*, if it does we
|
261 |
+
# want to remove before parsing.
|
262 |
+
if operator == "==" and version.endswith(".*"):
|
263 |
+
version = version[:-2]
|
264 |
+
|
265 |
+
# Parse the version, and if it is a pre-release than this
|
266 |
+
# specifier allows pre-releases.
|
267 |
+
if Version(version).is_prerelease:
|
268 |
+
return True
|
269 |
+
|
270 |
+
return False
|
271 |
+
|
272 |
+
@prereleases.setter
|
273 |
+
def prereleases(self, value: bool) -> None:
|
274 |
+
self._prereleases = value
|
275 |
+
|
276 |
+
@property
|
277 |
+
def operator(self) -> str:
|
278 |
+
"""The operator of this specifier.
|
279 |
+
|
280 |
+
>>> Specifier("==1.2.3").operator
|
281 |
+
'=='
|
282 |
+
"""
|
283 |
+
return self._spec[0]
|
284 |
+
|
285 |
+
@property
|
286 |
+
def version(self) -> str:
|
287 |
+
"""The version of this specifier.
|
288 |
+
|
289 |
+
>>> Specifier("==1.2.3").version
|
290 |
+
'1.2.3'
|
291 |
+
"""
|
292 |
+
return self._spec[1]
|
293 |
+
|
294 |
+
def __repr__(self) -> str:
|
295 |
+
"""A representation of the Specifier that shows all internal state.
|
296 |
+
|
297 |
+
>>> Specifier('>=1.0.0')
|
298 |
+
<Specifier('>=1.0.0')>
|
299 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
300 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
301 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
302 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
303 |
+
"""
|
304 |
+
pre = (
|
305 |
+
f", prereleases={self.prereleases!r}"
|
306 |
+
if self._prereleases is not None
|
307 |
+
else ""
|
308 |
+
)
|
309 |
+
|
310 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
311 |
+
|
312 |
+
def __str__(self) -> str:
|
313 |
+
"""A string representation of the Specifier that can be round-tripped.
|
314 |
+
|
315 |
+
>>> str(Specifier('>=1.0.0'))
|
316 |
+
'>=1.0.0'
|
317 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
318 |
+
'>=1.0.0'
|
319 |
+
"""
|
320 |
+
return "{}{}".format(*self._spec)
|
321 |
+
|
322 |
+
@property
|
323 |
+
def _canonical_spec(self) -> tuple[str, str]:
|
324 |
+
canonical_version = canonicalize_version(
|
325 |
+
self._spec[1],
|
326 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
327 |
+
)
|
328 |
+
return self._spec[0], canonical_version
|
329 |
+
|
330 |
+
def __hash__(self) -> int:
|
331 |
+
return hash(self._canonical_spec)
|
332 |
+
|
333 |
+
def __eq__(self, other: object) -> bool:
|
334 |
+
"""Whether or not the two Specifier-like objects are equal.
|
335 |
+
|
336 |
+
:param other: The other object to check against.
|
337 |
+
|
338 |
+
The value of :attr:`prereleases` is ignored.
|
339 |
+
|
340 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
341 |
+
True
|
342 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
343 |
+
... Specifier("==1.2.3", prereleases=True))
|
344 |
+
True
|
345 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
346 |
+
True
|
347 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
348 |
+
False
|
349 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
350 |
+
False
|
351 |
+
"""
|
352 |
+
if isinstance(other, str):
|
353 |
+
try:
|
354 |
+
other = self.__class__(str(other))
|
355 |
+
except InvalidSpecifier:
|
356 |
+
return NotImplemented
|
357 |
+
elif not isinstance(other, self.__class__):
|
358 |
+
return NotImplemented
|
359 |
+
|
360 |
+
return self._canonical_spec == other._canonical_spec
|
361 |
+
|
362 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
363 |
+
operator_callable: CallableOperator = getattr(
|
364 |
+
self, f"_compare_{self._operators[op]}"
|
365 |
+
)
|
366 |
+
return operator_callable
|
367 |
+
|
368 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
369 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
370 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
371 |
+
# implement this in terms of the other specifiers instead of
|
372 |
+
# implementing it ourselves. The only thing we need to do is construct
|
373 |
+
# the other specifiers.
|
374 |
+
|
375 |
+
# We want everything but the last item in the version, but we want to
|
376 |
+
# ignore suffix segments.
|
377 |
+
prefix = _version_join(
|
378 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
379 |
+
)
|
380 |
+
|
381 |
+
# Add the prefix notation to the end of our string
|
382 |
+
prefix += ".*"
|
383 |
+
|
384 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
385 |
+
prospective, prefix
|
386 |
+
)
|
387 |
+
|
388 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
389 |
+
# We need special logic to handle prefix matching
|
390 |
+
if spec.endswith(".*"):
|
391 |
+
# In the case of prefix matching we want to ignore local segment.
|
392 |
+
normalized_prospective = canonicalize_version(
|
393 |
+
prospective.public, strip_trailing_zero=False
|
394 |
+
)
|
395 |
+
# Get the normalized version string ignoring the trailing .*
|
396 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
397 |
+
# Split the spec out by bangs and dots, and pretend that there is
|
398 |
+
# an implicit dot in between a release segment and a pre-release segment.
|
399 |
+
split_spec = _version_split(normalized_spec)
|
400 |
+
|
401 |
+
# Split the prospective version out by bangs and dots, and pretend
|
402 |
+
# that there is an implicit dot in between a release segment and
|
403 |
+
# a pre-release segment.
|
404 |
+
split_prospective = _version_split(normalized_prospective)
|
405 |
+
|
406 |
+
# 0-pad the prospective version before shortening it to get the correct
|
407 |
+
# shortened version.
|
408 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
409 |
+
|
410 |
+
# Shorten the prospective version to be the same length as the spec
|
411 |
+
# so that we can determine if the specifier is a prefix of the
|
412 |
+
# prospective version or not.
|
413 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
414 |
+
|
415 |
+
return shortened_prospective == split_spec
|
416 |
+
else:
|
417 |
+
# Convert our spec string into a Version
|
418 |
+
spec_version = Version(spec)
|
419 |
+
|
420 |
+
# If the specifier does not have a local segment, then we want to
|
421 |
+
# act as if the prospective version also does not have a local
|
422 |
+
# segment.
|
423 |
+
if not spec_version.local:
|
424 |
+
prospective = Version(prospective.public)
|
425 |
+
|
426 |
+
return prospective == spec_version
|
427 |
+
|
428 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
429 |
+
return not self._compare_equal(prospective, spec)
|
430 |
+
|
431 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
432 |
+
# NB: Local version identifiers are NOT permitted in the version
|
433 |
+
# specifier, so local version labels can be universally removed from
|
434 |
+
# the prospective version.
|
435 |
+
return Version(prospective.public) <= Version(spec)
|
436 |
+
|
437 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
438 |
+
# NB: Local version identifiers are NOT permitted in the version
|
439 |
+
# specifier, so local version labels can be universally removed from
|
440 |
+
# the prospective version.
|
441 |
+
return Version(prospective.public) >= Version(spec)
|
442 |
+
|
443 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
444 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
445 |
+
# it as a version.
|
446 |
+
spec = Version(spec_str)
|
447 |
+
|
448 |
+
# Check to see if the prospective version is less than the spec
|
449 |
+
# version. If it's not we can short circuit and just return False now
|
450 |
+
# instead of doing extra unneeded work.
|
451 |
+
if not prospective < spec:
|
452 |
+
return False
|
453 |
+
|
454 |
+
# This special case is here so that, unless the specifier itself
|
455 |
+
# includes is a pre-release version, that we do not accept pre-release
|
456 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
457 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
458 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
459 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
460 |
+
return False
|
461 |
+
|
462 |
+
# If we've gotten to here, it means that prospective version is both
|
463 |
+
# less than the spec version *and* it's not a pre-release of the same
|
464 |
+
# version in the spec.
|
465 |
+
return True
|
466 |
+
|
467 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
468 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
469 |
+
# it as a version.
|
470 |
+
spec = Version(spec_str)
|
471 |
+
|
472 |
+
# Check to see if the prospective version is greater than the spec
|
473 |
+
# version. If it's not we can short circuit and just return False now
|
474 |
+
# instead of doing extra unneeded work.
|
475 |
+
if not prospective > spec:
|
476 |
+
return False
|
477 |
+
|
478 |
+
# This special case is here so that, unless the specifier itself
|
479 |
+
# includes is a post-release version, that we do not accept
|
480 |
+
# post-release versions for the version mentioned in the specifier
|
481 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
482 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
483 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
484 |
+
return False
|
485 |
+
|
486 |
+
# Ensure that we do not allow a local version of the version mentioned
|
487 |
+
# in the specifier, which is technically greater than, to match.
|
488 |
+
if prospective.local is not None:
|
489 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
490 |
+
return False
|
491 |
+
|
492 |
+
# If we've gotten to here, it means that prospective version is both
|
493 |
+
# greater than the spec version *and* it's not a pre-release of the
|
494 |
+
# same version in the spec.
|
495 |
+
return True
|
496 |
+
|
497 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
498 |
+
return str(prospective).lower() == str(spec).lower()
|
499 |
+
|
500 |
+
def __contains__(self, item: str | Version) -> bool:
|
501 |
+
"""Return whether or not the item is contained in this specifier.
|
502 |
+
|
503 |
+
:param item: The item to check for.
|
504 |
+
|
505 |
+
This is used for the ``in`` operator and behaves the same as
|
506 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
507 |
+
|
508 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
509 |
+
True
|
510 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
511 |
+
True
|
512 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
513 |
+
False
|
514 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
515 |
+
False
|
516 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
517 |
+
True
|
518 |
+
"""
|
519 |
+
return self.contains(item)
|
520 |
+
|
521 |
+
def contains(self, item: UnparsedVersion, prereleases: bool | None = None) -> bool:
|
522 |
+
"""Return whether or not the item is contained in this specifier.
|
523 |
+
|
524 |
+
:param item:
|
525 |
+
The item to check for, which can be a version string or a
|
526 |
+
:class:`Version` instance.
|
527 |
+
:param prereleases:
|
528 |
+
Whether or not to match prereleases with this Specifier. If set to
|
529 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
530 |
+
whether or not prereleases are allowed.
|
531 |
+
|
532 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
533 |
+
True
|
534 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
535 |
+
True
|
536 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
537 |
+
False
|
538 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
539 |
+
False
|
540 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
541 |
+
True
|
542 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
543 |
+
True
|
544 |
+
"""
|
545 |
+
|
546 |
+
# Determine if prereleases are to be allowed or not.
|
547 |
+
if prereleases is None:
|
548 |
+
prereleases = self.prereleases
|
549 |
+
|
550 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
551 |
+
# "2.0" in Specifier(">=2")
|
552 |
+
normalized_item = _coerce_version(item)
|
553 |
+
|
554 |
+
# Determine if we should be supporting prereleases in this specifier
|
555 |
+
# or not, if we do not support prereleases than we can short circuit
|
556 |
+
# logic if this version is a prereleases.
|
557 |
+
if normalized_item.is_prerelease and not prereleases:
|
558 |
+
return False
|
559 |
+
|
560 |
+
# Actually do the comparison to determine if this item is contained
|
561 |
+
# within this Specifier or not.
|
562 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
563 |
+
return operator_callable(normalized_item, self.version)
|
564 |
+
|
565 |
+
def filter(
|
566 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
567 |
+
) -> Iterator[UnparsedVersionVar]:
|
568 |
+
"""Filter items in the given iterable, that match the specifier.
|
569 |
+
|
570 |
+
:param iterable:
|
571 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
572 |
+
The items in the iterable will be filtered according to the specifier.
|
573 |
+
:param prereleases:
|
574 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
575 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
576 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
577 |
+
whether the only versions matching are prereleases).
|
578 |
+
|
579 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
580 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
581 |
+
SHOULD be accepted if no other versions match the given specifier.
|
582 |
+
|
583 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
584 |
+
['1.3']
|
585 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
586 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
587 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
588 |
+
['1.5a1']
|
589 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
590 |
+
['1.3', '1.5a1']
|
591 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
592 |
+
['1.3', '1.5a1']
|
593 |
+
"""
|
594 |
+
|
595 |
+
yielded = False
|
596 |
+
found_prereleases = []
|
597 |
+
|
598 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
599 |
+
|
600 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
601 |
+
# them match, yield them.
|
602 |
+
for version in iterable:
|
603 |
+
parsed_version = _coerce_version(version)
|
604 |
+
|
605 |
+
if self.contains(parsed_version, **kw):
|
606 |
+
# If our version is a prerelease, and we were not set to allow
|
607 |
+
# prereleases, then we'll store it for later in case nothing
|
608 |
+
# else matches this specifier.
|
609 |
+
if parsed_version.is_prerelease and not (
|
610 |
+
prereleases or self.prereleases
|
611 |
+
):
|
612 |
+
found_prereleases.append(version)
|
613 |
+
# Either this is not a prerelease, or we should have been
|
614 |
+
# accepting prereleases from the beginning.
|
615 |
+
else:
|
616 |
+
yielded = True
|
617 |
+
yield version
|
618 |
+
|
619 |
+
# Now that we've iterated over everything, determine if we've yielded
|
620 |
+
# any values, and if we have not and we have any prereleases stored up
|
621 |
+
# then we will go ahead and yield the prereleases.
|
622 |
+
if not yielded and found_prereleases:
|
623 |
+
for version in found_prereleases:
|
624 |
+
yield version
|
625 |
+
|
626 |
+
|
627 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
628 |
+
|
629 |
+
|
630 |
+
def _version_split(version: str) -> list[str]:
|
631 |
+
"""Split version into components.
|
632 |
+
|
633 |
+
The split components are intended for version comparison. The logic does
|
634 |
+
not attempt to retain the original version string, so joining the
|
635 |
+
components back with :func:`_version_join` may not produce the original
|
636 |
+
version string.
|
637 |
+
"""
|
638 |
+
result: list[str] = []
|
639 |
+
|
640 |
+
epoch, _, rest = version.rpartition("!")
|
641 |
+
result.append(epoch or "0")
|
642 |
+
|
643 |
+
for item in rest.split("."):
|
644 |
+
match = _prefix_regex.search(item)
|
645 |
+
if match:
|
646 |
+
result.extend(match.groups())
|
647 |
+
else:
|
648 |
+
result.append(item)
|
649 |
+
return result
|
650 |
+
|
651 |
+
|
652 |
+
def _version_join(components: list[str]) -> str:
|
653 |
+
"""Join split version components into a version string.
|
654 |
+
|
655 |
+
This function assumes the input came from :func:`_version_split`, where the
|
656 |
+
first component must be the epoch (either empty or numeric), and all other
|
657 |
+
components numeric.
|
658 |
+
"""
|
659 |
+
epoch, *rest = components
|
660 |
+
return f"{epoch}!{'.'.join(rest)}"
|
661 |
+
|
662 |
+
|
663 |
+
def _is_not_suffix(segment: str) -> bool:
|
664 |
+
return not any(
|
665 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
666 |
+
)
|
667 |
+
|
668 |
+
|
669 |
+
def _pad_version(left: list[str], right: list[str]) -> tuple[list[str], list[str]]:
|
670 |
+
left_split, right_split = [], []
|
671 |
+
|
672 |
+
# Get the release segment of our versions
|
673 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
674 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
675 |
+
|
676 |
+
# Get the rest of our versions
|
677 |
+
left_split.append(left[len(left_split[0]) :])
|
678 |
+
right_split.append(right[len(right_split[0]) :])
|
679 |
+
|
680 |
+
# Insert our padding
|
681 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
682 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
683 |
+
|
684 |
+
return (
|
685 |
+
list(itertools.chain.from_iterable(left_split)),
|
686 |
+
list(itertools.chain.from_iterable(right_split)),
|
687 |
+
)
|
688 |
+
|
689 |
+
|
690 |
+
class SpecifierSet(BaseSpecifier):
|
691 |
+
"""This class abstracts handling of a set of version specifiers.
|
692 |
+
|
693 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
694 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
695 |
+
"""
|
696 |
+
|
697 |
+
def __init__(
|
698 |
+
self,
|
699 |
+
specifiers: str | Iterable[Specifier] = "",
|
700 |
+
prereleases: bool | None = None,
|
701 |
+
) -> None:
|
702 |
+
"""Initialize a SpecifierSet instance.
|
703 |
+
|
704 |
+
:param specifiers:
|
705 |
+
The string representation of a specifier or a comma-separated list of
|
706 |
+
specifiers which will be parsed and normalized before use.
|
707 |
+
May also be an iterable of ``Specifier`` instances, which will be used
|
708 |
+
as is.
|
709 |
+
:param prereleases:
|
710 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
711 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
712 |
+
given specifiers.
|
713 |
+
|
714 |
+
:raises InvalidSpecifier:
|
715 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
716 |
+
raised.
|
717 |
+
"""
|
718 |
+
|
719 |
+
if isinstance(specifiers, str):
|
720 |
+
# Split on `,` to break each individual specifier into its own item, and
|
721 |
+
# strip each item to remove leading/trailing whitespace.
|
722 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
723 |
+
|
724 |
+
# Make each individual specifier a Specifier and save in a frozen set
|
725 |
+
# for later.
|
726 |
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
727 |
+
else:
|
728 |
+
# Save the supplied specifiers in a frozen set.
|
729 |
+
self._specs = frozenset(specifiers)
|
730 |
+
|
731 |
+
# Store our prereleases value so we can use it later to determine if
|
732 |
+
# we accept prereleases or not.
|
733 |
+
self._prereleases = prereleases
|
734 |
+
|
735 |
+
@property
|
736 |
+
def prereleases(self) -> bool | None:
|
737 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
738 |
+
# pass that through here.
|
739 |
+
if self._prereleases is not None:
|
740 |
+
return self._prereleases
|
741 |
+
|
742 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
743 |
+
# then we'll just return None since we don't know if this should have
|
744 |
+
# pre-releases or not.
|
745 |
+
if not self._specs:
|
746 |
+
return None
|
747 |
+
|
748 |
+
# Otherwise we'll see if any of the given specifiers accept
|
749 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
750 |
+
return any(s.prereleases for s in self._specs)
|
751 |
+
|
752 |
+
@prereleases.setter
|
753 |
+
def prereleases(self, value: bool) -> None:
|
754 |
+
self._prereleases = value
|
755 |
+
|
756 |
+
def __repr__(self) -> str:
|
757 |
+
"""A representation of the specifier set that shows all internal state.
|
758 |
+
|
759 |
+
Note that the ordering of the individual specifiers within the set may not
|
760 |
+
match the input string.
|
761 |
+
|
762 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
763 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
764 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
765 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
766 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
767 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
768 |
+
"""
|
769 |
+
pre = (
|
770 |
+
f", prereleases={self.prereleases!r}"
|
771 |
+
if self._prereleases is not None
|
772 |
+
else ""
|
773 |
+
)
|
774 |
+
|
775 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
776 |
+
|
777 |
+
def __str__(self) -> str:
|
778 |
+
"""A string representation of the specifier set that can be round-tripped.
|
779 |
+
|
780 |
+
Note that the ordering of the individual specifiers within the set may not
|
781 |
+
match the input string.
|
782 |
+
|
783 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
784 |
+
'!=1.0.1,>=1.0.0'
|
785 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
786 |
+
'!=1.0.1,>=1.0.0'
|
787 |
+
"""
|
788 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
789 |
+
|
790 |
+
def __hash__(self) -> int:
|
791 |
+
return hash(self._specs)
|
792 |
+
|
793 |
+
def __and__(self, other: SpecifierSet | str) -> SpecifierSet:
|
794 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
795 |
+
|
796 |
+
:param other: The other object to combine with.
|
797 |
+
|
798 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
799 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
800 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
801 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
802 |
+
"""
|
803 |
+
if isinstance(other, str):
|
804 |
+
other = SpecifierSet(other)
|
805 |
+
elif not isinstance(other, SpecifierSet):
|
806 |
+
return NotImplemented
|
807 |
+
|
808 |
+
specifier = SpecifierSet()
|
809 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
810 |
+
|
811 |
+
if self._prereleases is None and other._prereleases is not None:
|
812 |
+
specifier._prereleases = other._prereleases
|
813 |
+
elif self._prereleases is not None and other._prereleases is None:
|
814 |
+
specifier._prereleases = self._prereleases
|
815 |
+
elif self._prereleases == other._prereleases:
|
816 |
+
specifier._prereleases = self._prereleases
|
817 |
+
else:
|
818 |
+
raise ValueError(
|
819 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
820 |
+
"overrides."
|
821 |
+
)
|
822 |
+
|
823 |
+
return specifier
|
824 |
+
|
825 |
+
def __eq__(self, other: object) -> bool:
|
826 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
827 |
+
|
828 |
+
:param other: The other object to check against.
|
829 |
+
|
830 |
+
The value of :attr:`prereleases` is ignored.
|
831 |
+
|
832 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
833 |
+
True
|
834 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
835 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
836 |
+
True
|
837 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
838 |
+
True
|
839 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
840 |
+
False
|
841 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
842 |
+
False
|
843 |
+
"""
|
844 |
+
if isinstance(other, (str, Specifier)):
|
845 |
+
other = SpecifierSet(str(other))
|
846 |
+
elif not isinstance(other, SpecifierSet):
|
847 |
+
return NotImplemented
|
848 |
+
|
849 |
+
return self._specs == other._specs
|
850 |
+
|
851 |
+
def __len__(self) -> int:
|
852 |
+
"""Returns the number of specifiers in this specifier set."""
|
853 |
+
return len(self._specs)
|
854 |
+
|
855 |
+
def __iter__(self) -> Iterator[Specifier]:
|
856 |
+
"""
|
857 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
858 |
+
in this specifier set.
|
859 |
+
|
860 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
861 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
862 |
+
"""
|
863 |
+
return iter(self._specs)
|
864 |
+
|
865 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
866 |
+
"""Return whether or not the item is contained in this specifier.
|
867 |
+
|
868 |
+
:param item: The item to check for.
|
869 |
+
|
870 |
+
This is used for the ``in`` operator and behaves the same as
|
871 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
872 |
+
|
873 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
874 |
+
True
|
875 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
876 |
+
True
|
877 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
878 |
+
False
|
879 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
880 |
+
False
|
881 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
882 |
+
True
|
883 |
+
"""
|
884 |
+
return self.contains(item)
|
885 |
+
|
886 |
+
def contains(
|
887 |
+
self,
|
888 |
+
item: UnparsedVersion,
|
889 |
+
prereleases: bool | None = None,
|
890 |
+
installed: bool | None = None,
|
891 |
+
) -> bool:
|
892 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
893 |
+
|
894 |
+
:param item:
|
895 |
+
The item to check for, which can be a version string or a
|
896 |
+
:class:`Version` instance.
|
897 |
+
:param prereleases:
|
898 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
899 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
900 |
+
whether or not prereleases are allowed.
|
901 |
+
|
902 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
903 |
+
True
|
904 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
905 |
+
True
|
906 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
907 |
+
False
|
908 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
909 |
+
False
|
910 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
911 |
+
True
|
912 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
913 |
+
True
|
914 |
+
"""
|
915 |
+
# Ensure that our item is a Version instance.
|
916 |
+
if not isinstance(item, Version):
|
917 |
+
item = Version(item)
|
918 |
+
|
919 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
920 |
+
# one for this particular filter call, then we'll use whatever the
|
921 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
922 |
+
if prereleases is None:
|
923 |
+
prereleases = self.prereleases
|
924 |
+
|
925 |
+
# We can determine if we're going to allow pre-releases by looking to
|
926 |
+
# see if any of the underlying items supports them. If none of them do
|
927 |
+
# and this item is a pre-release then we do not allow it and we can
|
928 |
+
# short circuit that here.
|
929 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
930 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
931 |
+
if not prereleases and item.is_prerelease:
|
932 |
+
return False
|
933 |
+
|
934 |
+
if installed and item.is_prerelease:
|
935 |
+
item = Version(item.base_version)
|
936 |
+
|
937 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
938 |
+
# given version is contained within all of them.
|
939 |
+
# Note: This use of all() here means that an empty set of specifiers
|
940 |
+
# will always return True, this is an explicit design decision.
|
941 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
942 |
+
|
943 |
+
def filter(
|
944 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: bool | None = None
|
945 |
+
) -> Iterator[UnparsedVersionVar]:
|
946 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
947 |
+
|
948 |
+
:param iterable:
|
949 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
950 |
+
The items in the iterable will be filtered according to the specifier.
|
951 |
+
:param prereleases:
|
952 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
953 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
954 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
955 |
+
whether the only versions matching are prereleases).
|
956 |
+
|
957 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
958 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
959 |
+
SHOULD be accepted if no other versions match the given specifier.
|
960 |
+
|
961 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
962 |
+
['1.3']
|
963 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
964 |
+
['1.3', <Version('1.4')>]
|
965 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
966 |
+
[]
|
967 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
968 |
+
['1.3', '1.5a1']
|
969 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
970 |
+
['1.3', '1.5a1']
|
971 |
+
|
972 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
973 |
+
versions in the set.
|
974 |
+
|
975 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
976 |
+
['1.3']
|
977 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
978 |
+
['1.5a1']
|
979 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
980 |
+
['1.3', '1.5a1']
|
981 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
982 |
+
['1.3', '1.5a1']
|
983 |
+
"""
|
984 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
985 |
+
# one for this particular filter call, then we'll use whatever the
|
986 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
987 |
+
if prereleases is None:
|
988 |
+
prereleases = self.prereleases
|
989 |
+
|
990 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
991 |
+
# filter method for each one, this will act as a logical AND amongst
|
992 |
+
# each specifier.
|
993 |
+
if self._specs:
|
994 |
+
for spec in self._specs:
|
995 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
996 |
+
return iter(iterable)
|
997 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
998 |
+
# which will filter out any pre-releases, unless there are no final
|
999 |
+
# releases.
|
1000 |
+
else:
|
1001 |
+
filtered: list[UnparsedVersionVar] = []
|
1002 |
+
found_prereleases: list[UnparsedVersionVar] = []
|
1003 |
+
|
1004 |
+
for item in iterable:
|
1005 |
+
parsed_version = _coerce_version(item)
|
1006 |
+
|
1007 |
+
# Store any item which is a pre-release for later unless we've
|
1008 |
+
# already found a final version or we are accepting prereleases
|
1009 |
+
if parsed_version.is_prerelease and not prereleases:
|
1010 |
+
if not filtered:
|
1011 |
+
found_prereleases.append(item)
|
1012 |
+
else:
|
1013 |
+
filtered.append(item)
|
1014 |
+
|
1015 |
+
# If we've found no items except for pre-releases, then we'll go
|
1016 |
+
# ahead and use the pre-releases
|
1017 |
+
if not filtered and found_prereleases and prereleases is None:
|
1018 |
+
return iter(found_prereleases)
|
1019 |
+
|
1020 |
+
return iter(filtered)
|
tags.py
ADDED
@@ -0,0 +1,617 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import logging
|
8 |
+
import platform
|
9 |
+
import re
|
10 |
+
import struct
|
11 |
+
import subprocess
|
12 |
+
import sys
|
13 |
+
import sysconfig
|
14 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
15 |
+
from typing import (
|
16 |
+
Iterable,
|
17 |
+
Iterator,
|
18 |
+
Sequence,
|
19 |
+
Tuple,
|
20 |
+
cast,
|
21 |
+
)
|
22 |
+
|
23 |
+
from . import _manylinux, _musllinux
|
24 |
+
|
25 |
+
logger = logging.getLogger(__name__)
|
26 |
+
|
27 |
+
PythonVersion = Sequence[int]
|
28 |
+
AppleVersion = Tuple[int, int]
|
29 |
+
|
30 |
+
INTERPRETER_SHORT_NAMES: dict[str, str] = {
|
31 |
+
"python": "py", # Generic.
|
32 |
+
"cpython": "cp",
|
33 |
+
"pypy": "pp",
|
34 |
+
"ironpython": "ip",
|
35 |
+
"jython": "jy",
|
36 |
+
}
|
37 |
+
|
38 |
+
|
39 |
+
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
40 |
+
|
41 |
+
|
42 |
+
class Tag:
|
43 |
+
"""
|
44 |
+
A representation of the tag triple for a wheel.
|
45 |
+
|
46 |
+
Instances are considered immutable and thus are hashable. Equality checking
|
47 |
+
is also supported.
|
48 |
+
"""
|
49 |
+
|
50 |
+
__slots__ = ["_abi", "_hash", "_interpreter", "_platform"]
|
51 |
+
|
52 |
+
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
53 |
+
self._interpreter = interpreter.lower()
|
54 |
+
self._abi = abi.lower()
|
55 |
+
self._platform = platform.lower()
|
56 |
+
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
57 |
+
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
58 |
+
# times when scanning a page of links for packages with tags matching that
|
59 |
+
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
60 |
+
# downstream consumers.
|
61 |
+
self._hash = hash((self._interpreter, self._abi, self._platform))
|
62 |
+
|
63 |
+
@property
|
64 |
+
def interpreter(self) -> str:
|
65 |
+
return self._interpreter
|
66 |
+
|
67 |
+
@property
|
68 |
+
def abi(self) -> str:
|
69 |
+
return self._abi
|
70 |
+
|
71 |
+
@property
|
72 |
+
def platform(self) -> str:
|
73 |
+
return self._platform
|
74 |
+
|
75 |
+
def __eq__(self, other: object) -> bool:
|
76 |
+
if not isinstance(other, Tag):
|
77 |
+
return NotImplemented
|
78 |
+
|
79 |
+
return (
|
80 |
+
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
81 |
+
and (self._platform == other._platform)
|
82 |
+
and (self._abi == other._abi)
|
83 |
+
and (self._interpreter == other._interpreter)
|
84 |
+
)
|
85 |
+
|
86 |
+
def __hash__(self) -> int:
|
87 |
+
return self._hash
|
88 |
+
|
89 |
+
def __str__(self) -> str:
|
90 |
+
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
91 |
+
|
92 |
+
def __repr__(self) -> str:
|
93 |
+
return f"<{self} @ {id(self)}>"
|
94 |
+
|
95 |
+
|
96 |
+
def parse_tag(tag: str) -> frozenset[Tag]:
|
97 |
+
"""
|
98 |
+
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
99 |
+
|
100 |
+
Returning a set is required due to the possibility that the tag is a
|
101 |
+
compressed tag set.
|
102 |
+
"""
|
103 |
+
tags = set()
|
104 |
+
interpreters, abis, platforms = tag.split("-")
|
105 |
+
for interpreter in interpreters.split("."):
|
106 |
+
for abi in abis.split("."):
|
107 |
+
for platform_ in platforms.split("."):
|
108 |
+
tags.add(Tag(interpreter, abi, platform_))
|
109 |
+
return frozenset(tags)
|
110 |
+
|
111 |
+
|
112 |
+
def _get_config_var(name: str, warn: bool = False) -> int | str | None:
|
113 |
+
value: int | str | None = sysconfig.get_config_var(name)
|
114 |
+
if value is None and warn:
|
115 |
+
logger.debug(
|
116 |
+
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
117 |
+
)
|
118 |
+
return value
|
119 |
+
|
120 |
+
|
121 |
+
def _normalize_string(string: str) -> str:
|
122 |
+
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
123 |
+
|
124 |
+
|
125 |
+
def _is_threaded_cpython(abis: list[str]) -> bool:
|
126 |
+
"""
|
127 |
+
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
|
128 |
+
|
129 |
+
The threaded builds are indicated by a "t" in the abiflags.
|
130 |
+
"""
|
131 |
+
if len(abis) == 0:
|
132 |
+
return False
|
133 |
+
# expect e.g., cp313
|
134 |
+
m = re.match(r"cp\d+(.*)", abis[0])
|
135 |
+
if not m:
|
136 |
+
return False
|
137 |
+
abiflags = m.group(1)
|
138 |
+
return "t" in abiflags
|
139 |
+
|
140 |
+
|
141 |
+
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
|
142 |
+
"""
|
143 |
+
Determine if the Python version supports abi3.
|
144 |
+
|
145 |
+
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
|
146 |
+
builds do not support abi3.
|
147 |
+
"""
|
148 |
+
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
|
149 |
+
|
150 |
+
|
151 |
+
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> list[str]:
|
152 |
+
py_version = tuple(py_version) # To allow for version comparison.
|
153 |
+
abis = []
|
154 |
+
version = _version_nodot(py_version[:2])
|
155 |
+
threading = debug = pymalloc = ucs4 = ""
|
156 |
+
with_debug = _get_config_var("Py_DEBUG", warn)
|
157 |
+
has_refcount = hasattr(sys, "gettotalrefcount")
|
158 |
+
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
159 |
+
# extension modules is the best option.
|
160 |
+
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
161 |
+
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
162 |
+
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
163 |
+
debug = "d"
|
164 |
+
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
|
165 |
+
threading = "t"
|
166 |
+
if py_version < (3, 8):
|
167 |
+
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
168 |
+
if with_pymalloc or with_pymalloc is None:
|
169 |
+
pymalloc = "m"
|
170 |
+
if py_version < (3, 3):
|
171 |
+
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
172 |
+
if unicode_size == 4 or (
|
173 |
+
unicode_size is None and sys.maxunicode == 0x10FFFF
|
174 |
+
):
|
175 |
+
ucs4 = "u"
|
176 |
+
elif debug:
|
177 |
+
# Debug builds can also load "normal" extension modules.
|
178 |
+
# We can also assume no UCS-4 or pymalloc requirement.
|
179 |
+
abis.append(f"cp{version}{threading}")
|
180 |
+
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
|
181 |
+
return abis
|
182 |
+
|
183 |
+
|
184 |
+
def cpython_tags(
|
185 |
+
python_version: PythonVersion | None = None,
|
186 |
+
abis: Iterable[str] | None = None,
|
187 |
+
platforms: Iterable[str] | None = None,
|
188 |
+
*,
|
189 |
+
warn: bool = False,
|
190 |
+
) -> Iterator[Tag]:
|
191 |
+
"""
|
192 |
+
Yields the tags for a CPython interpreter.
|
193 |
+
|
194 |
+
The tags consist of:
|
195 |
+
- cp<python_version>-<abi>-<platform>
|
196 |
+
- cp<python_version>-abi3-<platform>
|
197 |
+
- cp<python_version>-none-<platform>
|
198 |
+
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
199 |
+
|
200 |
+
If python_version only specifies a major version then user-provided ABIs and
|
201 |
+
the 'none' ABItag will be used.
|
202 |
+
|
203 |
+
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
204 |
+
their normal position and not at the beginning.
|
205 |
+
"""
|
206 |
+
if not python_version:
|
207 |
+
python_version = sys.version_info[:2]
|
208 |
+
|
209 |
+
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
210 |
+
|
211 |
+
if abis is None:
|
212 |
+
if len(python_version) > 1:
|
213 |
+
abis = _cpython_abis(python_version, warn)
|
214 |
+
else:
|
215 |
+
abis = []
|
216 |
+
abis = list(abis)
|
217 |
+
# 'abi3' and 'none' are explicitly handled later.
|
218 |
+
for explicit_abi in ("abi3", "none"):
|
219 |
+
try:
|
220 |
+
abis.remove(explicit_abi)
|
221 |
+
except ValueError:
|
222 |
+
pass
|
223 |
+
|
224 |
+
platforms = list(platforms or platform_tags())
|
225 |
+
for abi in abis:
|
226 |
+
for platform_ in platforms:
|
227 |
+
yield Tag(interpreter, abi, platform_)
|
228 |
+
|
229 |
+
threading = _is_threaded_cpython(abis)
|
230 |
+
use_abi3 = _abi3_applies(python_version, threading)
|
231 |
+
if use_abi3:
|
232 |
+
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
233 |
+
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
234 |
+
|
235 |
+
if use_abi3:
|
236 |
+
for minor_version in range(python_version[1] - 1, 1, -1):
|
237 |
+
for platform_ in platforms:
|
238 |
+
version = _version_nodot((python_version[0], minor_version))
|
239 |
+
interpreter = f"cp{version}"
|
240 |
+
yield Tag(interpreter, "abi3", platform_)
|
241 |
+
|
242 |
+
|
243 |
+
def _generic_abi() -> list[str]:
|
244 |
+
"""
|
245 |
+
Return the ABI tag based on EXT_SUFFIX.
|
246 |
+
"""
|
247 |
+
# The following are examples of `EXT_SUFFIX`.
|
248 |
+
# We want to keep the parts which are related to the ABI and remove the
|
249 |
+
# parts which are related to the platform:
|
250 |
+
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
251 |
+
# - mac: '.cpython-310-darwin.so' => cp310
|
252 |
+
# - win: '.cp310-win_amd64.pyd' => cp310
|
253 |
+
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
254 |
+
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
255 |
+
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
256 |
+
# => graalpy_38_native
|
257 |
+
|
258 |
+
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
259 |
+
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
260 |
+
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
261 |
+
parts = ext_suffix.split(".")
|
262 |
+
if len(parts) < 3:
|
263 |
+
# CPython3.7 and earlier uses ".pyd" on Windows.
|
264 |
+
return _cpython_abis(sys.version_info[:2])
|
265 |
+
soabi = parts[1]
|
266 |
+
if soabi.startswith("cpython"):
|
267 |
+
# non-windows
|
268 |
+
abi = "cp" + soabi.split("-")[1]
|
269 |
+
elif soabi.startswith("cp"):
|
270 |
+
# windows
|
271 |
+
abi = soabi.split("-")[0]
|
272 |
+
elif soabi.startswith("pypy"):
|
273 |
+
abi = "-".join(soabi.split("-")[:2])
|
274 |
+
elif soabi.startswith("graalpy"):
|
275 |
+
abi = "-".join(soabi.split("-")[:3])
|
276 |
+
elif soabi:
|
277 |
+
# pyston, ironpython, others?
|
278 |
+
abi = soabi
|
279 |
+
else:
|
280 |
+
return []
|
281 |
+
return [_normalize_string(abi)]
|
282 |
+
|
283 |
+
|
284 |
+
def generic_tags(
|
285 |
+
interpreter: str | None = None,
|
286 |
+
abis: Iterable[str] | None = None,
|
287 |
+
platforms: Iterable[str] | None = None,
|
288 |
+
*,
|
289 |
+
warn: bool = False,
|
290 |
+
) -> Iterator[Tag]:
|
291 |
+
"""
|
292 |
+
Yields the tags for a generic interpreter.
|
293 |
+
|
294 |
+
The tags consist of:
|
295 |
+
- <interpreter>-<abi>-<platform>
|
296 |
+
|
297 |
+
The "none" ABI will be added if it was not explicitly provided.
|
298 |
+
"""
|
299 |
+
if not interpreter:
|
300 |
+
interp_name = interpreter_name()
|
301 |
+
interp_version = interpreter_version(warn=warn)
|
302 |
+
interpreter = "".join([interp_name, interp_version])
|
303 |
+
if abis is None:
|
304 |
+
abis = _generic_abi()
|
305 |
+
else:
|
306 |
+
abis = list(abis)
|
307 |
+
platforms = list(platforms or platform_tags())
|
308 |
+
if "none" not in abis:
|
309 |
+
abis.append("none")
|
310 |
+
for abi in abis:
|
311 |
+
for platform_ in platforms:
|
312 |
+
yield Tag(interpreter, abi, platform_)
|
313 |
+
|
314 |
+
|
315 |
+
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
316 |
+
"""
|
317 |
+
Yields Python versions in descending order.
|
318 |
+
|
319 |
+
After the latest version, the major-only version will be yielded, and then
|
320 |
+
all previous versions of that major version.
|
321 |
+
"""
|
322 |
+
if len(py_version) > 1:
|
323 |
+
yield f"py{_version_nodot(py_version[:2])}"
|
324 |
+
yield f"py{py_version[0]}"
|
325 |
+
if len(py_version) > 1:
|
326 |
+
for minor in range(py_version[1] - 1, -1, -1):
|
327 |
+
yield f"py{_version_nodot((py_version[0], minor))}"
|
328 |
+
|
329 |
+
|
330 |
+
def compatible_tags(
|
331 |
+
python_version: PythonVersion | None = None,
|
332 |
+
interpreter: str | None = None,
|
333 |
+
platforms: Iterable[str] | None = None,
|
334 |
+
) -> Iterator[Tag]:
|
335 |
+
"""
|
336 |
+
Yields the sequence of tags that are compatible with a specific version of Python.
|
337 |
+
|
338 |
+
The tags consist of:
|
339 |
+
- py*-none-<platform>
|
340 |
+
- <interpreter>-none-any # ... if `interpreter` is provided.
|
341 |
+
- py*-none-any
|
342 |
+
"""
|
343 |
+
if not python_version:
|
344 |
+
python_version = sys.version_info[:2]
|
345 |
+
platforms = list(platforms or platform_tags())
|
346 |
+
for version in _py_interpreter_range(python_version):
|
347 |
+
for platform_ in platforms:
|
348 |
+
yield Tag(version, "none", platform_)
|
349 |
+
if interpreter:
|
350 |
+
yield Tag(interpreter, "none", "any")
|
351 |
+
for version in _py_interpreter_range(python_version):
|
352 |
+
yield Tag(version, "none", "any")
|
353 |
+
|
354 |
+
|
355 |
+
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
356 |
+
if not is_32bit:
|
357 |
+
return arch
|
358 |
+
|
359 |
+
if arch.startswith("ppc"):
|
360 |
+
return "ppc"
|
361 |
+
|
362 |
+
return "i386"
|
363 |
+
|
364 |
+
|
365 |
+
def _mac_binary_formats(version: AppleVersion, cpu_arch: str) -> list[str]:
|
366 |
+
formats = [cpu_arch]
|
367 |
+
if cpu_arch == "x86_64":
|
368 |
+
if version < (10, 4):
|
369 |
+
return []
|
370 |
+
formats.extend(["intel", "fat64", "fat32"])
|
371 |
+
|
372 |
+
elif cpu_arch == "i386":
|
373 |
+
if version < (10, 4):
|
374 |
+
return []
|
375 |
+
formats.extend(["intel", "fat32", "fat"])
|
376 |
+
|
377 |
+
elif cpu_arch == "ppc64":
|
378 |
+
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
379 |
+
if version > (10, 5) or version < (10, 4):
|
380 |
+
return []
|
381 |
+
formats.append("fat64")
|
382 |
+
|
383 |
+
elif cpu_arch == "ppc":
|
384 |
+
if version > (10, 6):
|
385 |
+
return []
|
386 |
+
formats.extend(["fat32", "fat"])
|
387 |
+
|
388 |
+
if cpu_arch in {"arm64", "x86_64"}:
|
389 |
+
formats.append("universal2")
|
390 |
+
|
391 |
+
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
392 |
+
formats.append("universal")
|
393 |
+
|
394 |
+
return formats
|
395 |
+
|
396 |
+
|
397 |
+
def mac_platforms(
|
398 |
+
version: AppleVersion | None = None, arch: str | None = None
|
399 |
+
) -> Iterator[str]:
|
400 |
+
"""
|
401 |
+
Yields the platform tags for a macOS system.
|
402 |
+
|
403 |
+
The `version` parameter is a two-item tuple specifying the macOS version to
|
404 |
+
generate platform tags for. The `arch` parameter is the CPU architecture to
|
405 |
+
generate platform tags for. Both parameters default to the appropriate value
|
406 |
+
for the current system.
|
407 |
+
"""
|
408 |
+
version_str, _, cpu_arch = platform.mac_ver()
|
409 |
+
if version is None:
|
410 |
+
version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
|
411 |
+
if version == (10, 16):
|
412 |
+
# When built against an older macOS SDK, Python will report macOS 10.16
|
413 |
+
# instead of the real version.
|
414 |
+
version_str = subprocess.run(
|
415 |
+
[
|
416 |
+
sys.executable,
|
417 |
+
"-sS",
|
418 |
+
"-c",
|
419 |
+
"import platform; print(platform.mac_ver()[0])",
|
420 |
+
],
|
421 |
+
check=True,
|
422 |
+
env={"SYSTEM_VERSION_COMPAT": "0"},
|
423 |
+
stdout=subprocess.PIPE,
|
424 |
+
text=True,
|
425 |
+
).stdout
|
426 |
+
version = cast("AppleVersion", tuple(map(int, version_str.split(".")[:2])))
|
427 |
+
else:
|
428 |
+
version = version
|
429 |
+
if arch is None:
|
430 |
+
arch = _mac_arch(cpu_arch)
|
431 |
+
else:
|
432 |
+
arch = arch
|
433 |
+
|
434 |
+
if (10, 0) <= version and version < (11, 0):
|
435 |
+
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
436 |
+
# "minor" version number. The major version was always 10.
|
437 |
+
major_version = 10
|
438 |
+
for minor_version in range(version[1], -1, -1):
|
439 |
+
compat_version = major_version, minor_version
|
440 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
441 |
+
for binary_format in binary_formats:
|
442 |
+
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
|
443 |
+
|
444 |
+
if version >= (11, 0):
|
445 |
+
# Starting with Mac OS 11, each yearly release bumps the major version
|
446 |
+
# number. The minor versions are now the midyear updates.
|
447 |
+
minor_version = 0
|
448 |
+
for major_version in range(version[0], 10, -1):
|
449 |
+
compat_version = major_version, minor_version
|
450 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
451 |
+
for binary_format in binary_formats:
|
452 |
+
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
|
453 |
+
|
454 |
+
if version >= (11, 0):
|
455 |
+
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
456 |
+
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
457 |
+
# releases exist.
|
458 |
+
#
|
459 |
+
# However, the "universal2" binary format can have a
|
460 |
+
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
461 |
+
# that version of macOS.
|
462 |
+
major_version = 10
|
463 |
+
if arch == "x86_64":
|
464 |
+
for minor_version in range(16, 3, -1):
|
465 |
+
compat_version = major_version, minor_version
|
466 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
467 |
+
for binary_format in binary_formats:
|
468 |
+
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
|
469 |
+
else:
|
470 |
+
for minor_version in range(16, 3, -1):
|
471 |
+
compat_version = major_version, minor_version
|
472 |
+
binary_format = "universal2"
|
473 |
+
yield f"macosx_{major_version}_{minor_version}_{binary_format}"
|
474 |
+
|
475 |
+
|
476 |
+
def ios_platforms(
|
477 |
+
version: AppleVersion | None = None, multiarch: str | None = None
|
478 |
+
) -> Iterator[str]:
|
479 |
+
"""
|
480 |
+
Yields the platform tags for an iOS system.
|
481 |
+
|
482 |
+
:param version: A two-item tuple specifying the iOS version to generate
|
483 |
+
platform tags for. Defaults to the current iOS version.
|
484 |
+
:param multiarch: The CPU architecture+ABI to generate platform tags for -
|
485 |
+
(the value used by `sys.implementation._multiarch` e.g.,
|
486 |
+
`arm64_iphoneos` or `x84_64_iphonesimulator`). Defaults to the current
|
487 |
+
multiarch value.
|
488 |
+
"""
|
489 |
+
if version is None:
|
490 |
+
# if iOS is the current platform, ios_ver *must* be defined. However,
|
491 |
+
# it won't exist for CPython versions before 3.13, which causes a mypy
|
492 |
+
# error.
|
493 |
+
_, release, _, _ = platform.ios_ver() # type: ignore[attr-defined, unused-ignore]
|
494 |
+
version = cast("AppleVersion", tuple(map(int, release.split(".")[:2])))
|
495 |
+
|
496 |
+
if multiarch is None:
|
497 |
+
multiarch = sys.implementation._multiarch
|
498 |
+
multiarch = multiarch.replace("-", "_")
|
499 |
+
|
500 |
+
ios_platform_template = "ios_{major}_{minor}_{multiarch}"
|
501 |
+
|
502 |
+
# Consider any iOS major.minor version from the version requested, down to
|
503 |
+
# 12.0. 12.0 is the first iOS version that is known to have enough features
|
504 |
+
# to support CPython. Consider every possible minor release up to X.9. There
|
505 |
+
# highest the minor has ever gone is 8 (14.8 and 15.8) but having some extra
|
506 |
+
# candidates that won't ever match doesn't really hurt, and it saves us from
|
507 |
+
# having to keep an explicit list of known iOS versions in the code. Return
|
508 |
+
# the results descending order of version number.
|
509 |
+
|
510 |
+
# If the requested major version is less than 12, there won't be any matches.
|
511 |
+
if version[0] < 12:
|
512 |
+
return
|
513 |
+
|
514 |
+
# Consider the actual X.Y version that was requested.
|
515 |
+
yield ios_platform_template.format(
|
516 |
+
major=version[0], minor=version[1], multiarch=multiarch
|
517 |
+
)
|
518 |
+
|
519 |
+
# Consider every minor version from X.0 to the minor version prior to the
|
520 |
+
# version requested by the platform.
|
521 |
+
for minor in range(version[1] - 1, -1, -1):
|
522 |
+
yield ios_platform_template.format(
|
523 |
+
major=version[0], minor=minor, multiarch=multiarch
|
524 |
+
)
|
525 |
+
|
526 |
+
for major in range(version[0] - 1, 11, -1):
|
527 |
+
for minor in range(9, -1, -1):
|
528 |
+
yield ios_platform_template.format(
|
529 |
+
major=major, minor=minor, multiarch=multiarch
|
530 |
+
)
|
531 |
+
|
532 |
+
|
533 |
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
534 |
+
linux = _normalize_string(sysconfig.get_platform())
|
535 |
+
if not linux.startswith("linux_"):
|
536 |
+
# we should never be here, just yield the sysconfig one and return
|
537 |
+
yield linux
|
538 |
+
return
|
539 |
+
if is_32bit:
|
540 |
+
if linux == "linux_x86_64":
|
541 |
+
linux = "linux_i686"
|
542 |
+
elif linux == "linux_aarch64":
|
543 |
+
linux = "linux_armv8l"
|
544 |
+
_, arch = linux.split("_", 1)
|
545 |
+
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
546 |
+
yield from _manylinux.platform_tags(archs)
|
547 |
+
yield from _musllinux.platform_tags(archs)
|
548 |
+
for arch in archs:
|
549 |
+
yield f"linux_{arch}"
|
550 |
+
|
551 |
+
|
552 |
+
def _generic_platforms() -> Iterator[str]:
|
553 |
+
yield _normalize_string(sysconfig.get_platform())
|
554 |
+
|
555 |
+
|
556 |
+
def platform_tags() -> Iterator[str]:
|
557 |
+
"""
|
558 |
+
Provides the platform tags for this installation.
|
559 |
+
"""
|
560 |
+
if platform.system() == "Darwin":
|
561 |
+
return mac_platforms()
|
562 |
+
elif platform.system() == "iOS":
|
563 |
+
return ios_platforms()
|
564 |
+
elif platform.system() == "Linux":
|
565 |
+
return _linux_platforms()
|
566 |
+
else:
|
567 |
+
return _generic_platforms()
|
568 |
+
|
569 |
+
|
570 |
+
def interpreter_name() -> str:
|
571 |
+
"""
|
572 |
+
Returns the name of the running interpreter.
|
573 |
+
|
574 |
+
Some implementations have a reserved, two-letter abbreviation which will
|
575 |
+
be returned when appropriate.
|
576 |
+
"""
|
577 |
+
name = sys.implementation.name
|
578 |
+
return INTERPRETER_SHORT_NAMES.get(name) or name
|
579 |
+
|
580 |
+
|
581 |
+
def interpreter_version(*, warn: bool = False) -> str:
|
582 |
+
"""
|
583 |
+
Returns the version of the running interpreter.
|
584 |
+
"""
|
585 |
+
version = _get_config_var("py_version_nodot", warn=warn)
|
586 |
+
if version:
|
587 |
+
version = str(version)
|
588 |
+
else:
|
589 |
+
version = _version_nodot(sys.version_info[:2])
|
590 |
+
return version
|
591 |
+
|
592 |
+
|
593 |
+
def _version_nodot(version: PythonVersion) -> str:
|
594 |
+
return "".join(map(str, version))
|
595 |
+
|
596 |
+
|
597 |
+
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
598 |
+
"""
|
599 |
+
Returns the sequence of tag triples for the running interpreter.
|
600 |
+
|
601 |
+
The order of the sequence corresponds to priority order for the
|
602 |
+
interpreter, from most to least important.
|
603 |
+
"""
|
604 |
+
|
605 |
+
interp_name = interpreter_name()
|
606 |
+
if interp_name == "cp":
|
607 |
+
yield from cpython_tags(warn=warn)
|
608 |
+
else:
|
609 |
+
yield from generic_tags()
|
610 |
+
|
611 |
+
if interp_name == "pp":
|
612 |
+
interp = "pp3"
|
613 |
+
elif interp_name == "cp":
|
614 |
+
interp = "cp" + interpreter_version(warn=warn)
|
615 |
+
else:
|
616 |
+
interp = None
|
617 |
+
yield from compatible_tags(interpreter=interp)
|
utils.py
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import functools
|
8 |
+
import re
|
9 |
+
from typing import NewType, Tuple, Union, cast
|
10 |
+
|
11 |
+
from .tags import Tag, parse_tag
|
12 |
+
from .version import InvalidVersion, Version, _TrimmedRelease
|
13 |
+
|
14 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
15 |
+
NormalizedName = NewType("NormalizedName", str)
|
16 |
+
|
17 |
+
|
18 |
+
class InvalidName(ValueError):
|
19 |
+
"""
|
20 |
+
An invalid distribution name; users should refer to the packaging user guide.
|
21 |
+
"""
|
22 |
+
|
23 |
+
|
24 |
+
class InvalidWheelFilename(ValueError):
|
25 |
+
"""
|
26 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
27 |
+
"""
|
28 |
+
|
29 |
+
|
30 |
+
class InvalidSdistFilename(ValueError):
|
31 |
+
"""
|
32 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
33 |
+
"""
|
34 |
+
|
35 |
+
|
36 |
+
# Core metadata spec for `Name`
|
37 |
+
_validate_regex = re.compile(
|
38 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
39 |
+
)
|
40 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
41 |
+
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
42 |
+
# PEP 427: The build number must start with a digit.
|
43 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
44 |
+
|
45 |
+
|
46 |
+
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
47 |
+
if validate and not _validate_regex.match(name):
|
48 |
+
raise InvalidName(f"name is invalid: {name!r}")
|
49 |
+
# This is taken from PEP 503.
|
50 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
51 |
+
return cast(NormalizedName, value)
|
52 |
+
|
53 |
+
|
54 |
+
def is_normalized_name(name: str) -> bool:
|
55 |
+
return _normalized_regex.match(name) is not None
|
56 |
+
|
57 |
+
|
58 |
+
@functools.singledispatch
|
59 |
+
def canonicalize_version(
|
60 |
+
version: Version | str, *, strip_trailing_zero: bool = True
|
61 |
+
) -> str:
|
62 |
+
"""
|
63 |
+
Return a canonical form of a version as a string.
|
64 |
+
|
65 |
+
>>> canonicalize_version('1.0.1')
|
66 |
+
'1.0.1'
|
67 |
+
|
68 |
+
Per PEP 625, versions may have multiple canonical forms, differing
|
69 |
+
only by trailing zeros.
|
70 |
+
|
71 |
+
>>> canonicalize_version('1.0.0')
|
72 |
+
'1'
|
73 |
+
>>> canonicalize_version('1.0.0', strip_trailing_zero=False)
|
74 |
+
'1.0.0'
|
75 |
+
|
76 |
+
Invalid versions are returned unaltered.
|
77 |
+
|
78 |
+
>>> canonicalize_version('foo bar baz')
|
79 |
+
'foo bar baz'
|
80 |
+
"""
|
81 |
+
return str(_TrimmedRelease(str(version)) if strip_trailing_zero else version)
|
82 |
+
|
83 |
+
|
84 |
+
@canonicalize_version.register
|
85 |
+
def _(version: str, *, strip_trailing_zero: bool = True) -> str:
|
86 |
+
try:
|
87 |
+
parsed = Version(version)
|
88 |
+
except InvalidVersion:
|
89 |
+
# Legacy versions cannot be normalized
|
90 |
+
return version
|
91 |
+
return canonicalize_version(parsed, strip_trailing_zero=strip_trailing_zero)
|
92 |
+
|
93 |
+
|
94 |
+
def parse_wheel_filename(
|
95 |
+
filename: str,
|
96 |
+
) -> tuple[NormalizedName, Version, BuildTag, frozenset[Tag]]:
|
97 |
+
if not filename.endswith(".whl"):
|
98 |
+
raise InvalidWheelFilename(
|
99 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename!r}"
|
100 |
+
)
|
101 |
+
|
102 |
+
filename = filename[:-4]
|
103 |
+
dashes = filename.count("-")
|
104 |
+
if dashes not in (4, 5):
|
105 |
+
raise InvalidWheelFilename(
|
106 |
+
f"Invalid wheel filename (wrong number of parts): {filename!r}"
|
107 |
+
)
|
108 |
+
|
109 |
+
parts = filename.split("-", dashes - 2)
|
110 |
+
name_part = parts[0]
|
111 |
+
# See PEP 427 for the rules on escaping the project name.
|
112 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
113 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename!r}")
|
114 |
+
name = canonicalize_name(name_part)
|
115 |
+
|
116 |
+
try:
|
117 |
+
version = Version(parts[1])
|
118 |
+
except InvalidVersion as e:
|
119 |
+
raise InvalidWheelFilename(
|
120 |
+
f"Invalid wheel filename (invalid version): {filename!r}"
|
121 |
+
) from e
|
122 |
+
|
123 |
+
if dashes == 5:
|
124 |
+
build_part = parts[2]
|
125 |
+
build_match = _build_tag_regex.match(build_part)
|
126 |
+
if build_match is None:
|
127 |
+
raise InvalidWheelFilename(
|
128 |
+
f"Invalid build number: {build_part} in {filename!r}"
|
129 |
+
)
|
130 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
131 |
+
else:
|
132 |
+
build = ()
|
133 |
+
tags = parse_tag(parts[-1])
|
134 |
+
return (name, version, build, tags)
|
135 |
+
|
136 |
+
|
137 |
+
def parse_sdist_filename(filename: str) -> tuple[NormalizedName, Version]:
|
138 |
+
if filename.endswith(".tar.gz"):
|
139 |
+
file_stem = filename[: -len(".tar.gz")]
|
140 |
+
elif filename.endswith(".zip"):
|
141 |
+
file_stem = filename[: -len(".zip")]
|
142 |
+
else:
|
143 |
+
raise InvalidSdistFilename(
|
144 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
145 |
+
f" {filename!r}"
|
146 |
+
)
|
147 |
+
|
148 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
149 |
+
# so we split on the last dash.
|
150 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
151 |
+
if not sep:
|
152 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename!r}")
|
153 |
+
|
154 |
+
name = canonicalize_name(name_part)
|
155 |
+
|
156 |
+
try:
|
157 |
+
version = Version(version_part)
|
158 |
+
except InvalidVersion as e:
|
159 |
+
raise InvalidSdistFilename(
|
160 |
+
f"Invalid sdist filename (invalid version): {filename!r}"
|
161 |
+
) from e
|
162 |
+
|
163 |
+
return (name, version)
|
version.py
ADDED
@@ -0,0 +1,582 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
"""
|
5 |
+
.. testsetup::
|
6 |
+
|
7 |
+
from packaging.version import parse, Version
|
8 |
+
"""
|
9 |
+
|
10 |
+
from __future__ import annotations
|
11 |
+
|
12 |
+
import itertools
|
13 |
+
import re
|
14 |
+
from typing import Any, Callable, NamedTuple, SupportsInt, Tuple, Union
|
15 |
+
|
16 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
17 |
+
|
18 |
+
__all__ = ["VERSION_PATTERN", "InvalidVersion", "Version", "parse"]
|
19 |
+
|
20 |
+
LocalType = Tuple[Union[int, str], ...]
|
21 |
+
|
22 |
+
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
23 |
+
CmpLocalType = Union[
|
24 |
+
NegativeInfinityType,
|
25 |
+
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
26 |
+
]
|
27 |
+
CmpKey = Tuple[
|
28 |
+
int,
|
29 |
+
Tuple[int, ...],
|
30 |
+
CmpPrePostDevType,
|
31 |
+
CmpPrePostDevType,
|
32 |
+
CmpPrePostDevType,
|
33 |
+
CmpLocalType,
|
34 |
+
]
|
35 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
36 |
+
|
37 |
+
|
38 |
+
class _Version(NamedTuple):
|
39 |
+
epoch: int
|
40 |
+
release: tuple[int, ...]
|
41 |
+
dev: tuple[str, int] | None
|
42 |
+
pre: tuple[str, int] | None
|
43 |
+
post: tuple[str, int] | None
|
44 |
+
local: LocalType | None
|
45 |
+
|
46 |
+
|
47 |
+
def parse(version: str) -> Version:
|
48 |
+
"""Parse the given version string.
|
49 |
+
|
50 |
+
>>> parse('1.0.dev1')
|
51 |
+
<Version('1.0.dev1')>
|
52 |
+
|
53 |
+
:param version: The version string to parse.
|
54 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
55 |
+
"""
|
56 |
+
return Version(version)
|
57 |
+
|
58 |
+
|
59 |
+
class InvalidVersion(ValueError):
|
60 |
+
"""Raised when a version string is not a valid version.
|
61 |
+
|
62 |
+
>>> Version("invalid")
|
63 |
+
Traceback (most recent call last):
|
64 |
+
...
|
65 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
66 |
+
"""
|
67 |
+
|
68 |
+
|
69 |
+
class _BaseVersion:
|
70 |
+
_key: tuple[Any, ...]
|
71 |
+
|
72 |
+
def __hash__(self) -> int:
|
73 |
+
return hash(self._key)
|
74 |
+
|
75 |
+
# Please keep the duplicated `isinstance` check
|
76 |
+
# in the six comparisons hereunder
|
77 |
+
# unless you find a way to avoid adding overhead function calls.
|
78 |
+
def __lt__(self, other: _BaseVersion) -> bool:
|
79 |
+
if not isinstance(other, _BaseVersion):
|
80 |
+
return NotImplemented
|
81 |
+
|
82 |
+
return self._key < other._key
|
83 |
+
|
84 |
+
def __le__(self, other: _BaseVersion) -> bool:
|
85 |
+
if not isinstance(other, _BaseVersion):
|
86 |
+
return NotImplemented
|
87 |
+
|
88 |
+
return self._key <= other._key
|
89 |
+
|
90 |
+
def __eq__(self, other: object) -> bool:
|
91 |
+
if not isinstance(other, _BaseVersion):
|
92 |
+
return NotImplemented
|
93 |
+
|
94 |
+
return self._key == other._key
|
95 |
+
|
96 |
+
def __ge__(self, other: _BaseVersion) -> bool:
|
97 |
+
if not isinstance(other, _BaseVersion):
|
98 |
+
return NotImplemented
|
99 |
+
|
100 |
+
return self._key >= other._key
|
101 |
+
|
102 |
+
def __gt__(self, other: _BaseVersion) -> bool:
|
103 |
+
if not isinstance(other, _BaseVersion):
|
104 |
+
return NotImplemented
|
105 |
+
|
106 |
+
return self._key > other._key
|
107 |
+
|
108 |
+
def __ne__(self, other: object) -> bool:
|
109 |
+
if not isinstance(other, _BaseVersion):
|
110 |
+
return NotImplemented
|
111 |
+
|
112 |
+
return self._key != other._key
|
113 |
+
|
114 |
+
|
115 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
116 |
+
# easier for 3rd party code to reuse
|
117 |
+
_VERSION_PATTERN = r"""
|
118 |
+
v?
|
119 |
+
(?:
|
120 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
121 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
122 |
+
(?P<pre> # pre-release
|
123 |
+
[-_\.]?
|
124 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
125 |
+
[-_\.]?
|
126 |
+
(?P<pre_n>[0-9]+)?
|
127 |
+
)?
|
128 |
+
(?P<post> # post release
|
129 |
+
(?:-(?P<post_n1>[0-9]+))
|
130 |
+
|
|
131 |
+
(?:
|
132 |
+
[-_\.]?
|
133 |
+
(?P<post_l>post|rev|r)
|
134 |
+
[-_\.]?
|
135 |
+
(?P<post_n2>[0-9]+)?
|
136 |
+
)
|
137 |
+
)?
|
138 |
+
(?P<dev> # dev release
|
139 |
+
[-_\.]?
|
140 |
+
(?P<dev_l>dev)
|
141 |
+
[-_\.]?
|
142 |
+
(?P<dev_n>[0-9]+)?
|
143 |
+
)?
|
144 |
+
)
|
145 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
146 |
+
"""
|
147 |
+
|
148 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
149 |
+
"""
|
150 |
+
A string containing the regular expression used to match a valid version.
|
151 |
+
|
152 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
153 |
+
expressions (for example, matching a version number as part of a file name). The
|
154 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
155 |
+
flags set.
|
156 |
+
|
157 |
+
:meta hide-value:
|
158 |
+
"""
|
159 |
+
|
160 |
+
|
161 |
+
class Version(_BaseVersion):
|
162 |
+
"""This class abstracts handling of a project's versions.
|
163 |
+
|
164 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
165 |
+
sorted using the standard Python interfaces.
|
166 |
+
|
167 |
+
>>> v1 = Version("1.0a5")
|
168 |
+
>>> v2 = Version("1.0")
|
169 |
+
>>> v1
|
170 |
+
<Version('1.0a5')>
|
171 |
+
>>> v2
|
172 |
+
<Version('1.0')>
|
173 |
+
>>> v1 < v2
|
174 |
+
True
|
175 |
+
>>> v1 == v2
|
176 |
+
False
|
177 |
+
>>> v1 > v2
|
178 |
+
False
|
179 |
+
>>> v1 >= v2
|
180 |
+
False
|
181 |
+
>>> v1 <= v2
|
182 |
+
True
|
183 |
+
"""
|
184 |
+
|
185 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
186 |
+
_key: CmpKey
|
187 |
+
|
188 |
+
def __init__(self, version: str) -> None:
|
189 |
+
"""Initialize a Version object.
|
190 |
+
|
191 |
+
:param version:
|
192 |
+
The string representation of a version which will be parsed and normalized
|
193 |
+
before use.
|
194 |
+
:raises InvalidVersion:
|
195 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
196 |
+
exception will be raised.
|
197 |
+
"""
|
198 |
+
|
199 |
+
# Validate the version and parse it into pieces
|
200 |
+
match = self._regex.search(version)
|
201 |
+
if not match:
|
202 |
+
raise InvalidVersion(f"Invalid version: {version!r}")
|
203 |
+
|
204 |
+
# Store the parsed out pieces of the version
|
205 |
+
self._version = _Version(
|
206 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
207 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
208 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
209 |
+
post=_parse_letter_version(
|
210 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
211 |
+
),
|
212 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
213 |
+
local=_parse_local_version(match.group("local")),
|
214 |
+
)
|
215 |
+
|
216 |
+
# Generate a key which will be used for sorting
|
217 |
+
self._key = _cmpkey(
|
218 |
+
self._version.epoch,
|
219 |
+
self._version.release,
|
220 |
+
self._version.pre,
|
221 |
+
self._version.post,
|
222 |
+
self._version.dev,
|
223 |
+
self._version.local,
|
224 |
+
)
|
225 |
+
|
226 |
+
def __repr__(self) -> str:
|
227 |
+
"""A representation of the Version that shows all internal state.
|
228 |
+
|
229 |
+
>>> Version('1.0.0')
|
230 |
+
<Version('1.0.0')>
|
231 |
+
"""
|
232 |
+
return f"<Version('{self}')>"
|
233 |
+
|
234 |
+
def __str__(self) -> str:
|
235 |
+
"""A string representation of the version that can be round-tripped.
|
236 |
+
|
237 |
+
>>> str(Version("1.0a5"))
|
238 |
+
'1.0a5'
|
239 |
+
"""
|
240 |
+
parts = []
|
241 |
+
|
242 |
+
# Epoch
|
243 |
+
if self.epoch != 0:
|
244 |
+
parts.append(f"{self.epoch}!")
|
245 |
+
|
246 |
+
# Release segment
|
247 |
+
parts.append(".".join(str(x) for x in self.release))
|
248 |
+
|
249 |
+
# Pre-release
|
250 |
+
if self.pre is not None:
|
251 |
+
parts.append("".join(str(x) for x in self.pre))
|
252 |
+
|
253 |
+
# Post-release
|
254 |
+
if self.post is not None:
|
255 |
+
parts.append(f".post{self.post}")
|
256 |
+
|
257 |
+
# Development release
|
258 |
+
if self.dev is not None:
|
259 |
+
parts.append(f".dev{self.dev}")
|
260 |
+
|
261 |
+
# Local version segment
|
262 |
+
if self.local is not None:
|
263 |
+
parts.append(f"+{self.local}")
|
264 |
+
|
265 |
+
return "".join(parts)
|
266 |
+
|
267 |
+
@property
|
268 |
+
def epoch(self) -> int:
|
269 |
+
"""The epoch of the version.
|
270 |
+
|
271 |
+
>>> Version("2.0.0").epoch
|
272 |
+
0
|
273 |
+
>>> Version("1!2.0.0").epoch
|
274 |
+
1
|
275 |
+
"""
|
276 |
+
return self._version.epoch
|
277 |
+
|
278 |
+
@property
|
279 |
+
def release(self) -> tuple[int, ...]:
|
280 |
+
"""The components of the "release" segment of the version.
|
281 |
+
|
282 |
+
>>> Version("1.2.3").release
|
283 |
+
(1, 2, 3)
|
284 |
+
>>> Version("2.0.0").release
|
285 |
+
(2, 0, 0)
|
286 |
+
>>> Version("1!2.0.0.post0").release
|
287 |
+
(2, 0, 0)
|
288 |
+
|
289 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
290 |
+
post-release suffixes.
|
291 |
+
"""
|
292 |
+
return self._version.release
|
293 |
+
|
294 |
+
@property
|
295 |
+
def pre(self) -> tuple[str, int] | None:
|
296 |
+
"""The pre-release segment of the version.
|
297 |
+
|
298 |
+
>>> print(Version("1.2.3").pre)
|
299 |
+
None
|
300 |
+
>>> Version("1.2.3a1").pre
|
301 |
+
('a', 1)
|
302 |
+
>>> Version("1.2.3b1").pre
|
303 |
+
('b', 1)
|
304 |
+
>>> Version("1.2.3rc1").pre
|
305 |
+
('rc', 1)
|
306 |
+
"""
|
307 |
+
return self._version.pre
|
308 |
+
|
309 |
+
@property
|
310 |
+
def post(self) -> int | None:
|
311 |
+
"""The post-release number of the version.
|
312 |
+
|
313 |
+
>>> print(Version("1.2.3").post)
|
314 |
+
None
|
315 |
+
>>> Version("1.2.3.post1").post
|
316 |
+
1
|
317 |
+
"""
|
318 |
+
return self._version.post[1] if self._version.post else None
|
319 |
+
|
320 |
+
@property
|
321 |
+
def dev(self) -> int | None:
|
322 |
+
"""The development number of the version.
|
323 |
+
|
324 |
+
>>> print(Version("1.2.3").dev)
|
325 |
+
None
|
326 |
+
>>> Version("1.2.3.dev1").dev
|
327 |
+
1
|
328 |
+
"""
|
329 |
+
return self._version.dev[1] if self._version.dev else None
|
330 |
+
|
331 |
+
@property
|
332 |
+
def local(self) -> str | None:
|
333 |
+
"""The local version segment of the version.
|
334 |
+
|
335 |
+
>>> print(Version("1.2.3").local)
|
336 |
+
None
|
337 |
+
>>> Version("1.2.3+abc").local
|
338 |
+
'abc'
|
339 |
+
"""
|
340 |
+
if self._version.local:
|
341 |
+
return ".".join(str(x) for x in self._version.local)
|
342 |
+
else:
|
343 |
+
return None
|
344 |
+
|
345 |
+
@property
|
346 |
+
def public(self) -> str:
|
347 |
+
"""The public portion of the version.
|
348 |
+
|
349 |
+
>>> Version("1.2.3").public
|
350 |
+
'1.2.3'
|
351 |
+
>>> Version("1.2.3+abc").public
|
352 |
+
'1.2.3'
|
353 |
+
>>> Version("1!1.2.3dev1+abc").public
|
354 |
+
'1!1.2.3.dev1'
|
355 |
+
"""
|
356 |
+
return str(self).split("+", 1)[0]
|
357 |
+
|
358 |
+
@property
|
359 |
+
def base_version(self) -> str:
|
360 |
+
"""The "base version" of the version.
|
361 |
+
|
362 |
+
>>> Version("1.2.3").base_version
|
363 |
+
'1.2.3'
|
364 |
+
>>> Version("1.2.3+abc").base_version
|
365 |
+
'1.2.3'
|
366 |
+
>>> Version("1!1.2.3dev1+abc").base_version
|
367 |
+
'1!1.2.3'
|
368 |
+
|
369 |
+
The "base version" is the public version of the project without any pre or post
|
370 |
+
release markers.
|
371 |
+
"""
|
372 |
+
parts = []
|
373 |
+
|
374 |
+
# Epoch
|
375 |
+
if self.epoch != 0:
|
376 |
+
parts.append(f"{self.epoch}!")
|
377 |
+
|
378 |
+
# Release segment
|
379 |
+
parts.append(".".join(str(x) for x in self.release))
|
380 |
+
|
381 |
+
return "".join(parts)
|
382 |
+
|
383 |
+
@property
|
384 |
+
def is_prerelease(self) -> bool:
|
385 |
+
"""Whether this version is a pre-release.
|
386 |
+
|
387 |
+
>>> Version("1.2.3").is_prerelease
|
388 |
+
False
|
389 |
+
>>> Version("1.2.3a1").is_prerelease
|
390 |
+
True
|
391 |
+
>>> Version("1.2.3b1").is_prerelease
|
392 |
+
True
|
393 |
+
>>> Version("1.2.3rc1").is_prerelease
|
394 |
+
True
|
395 |
+
>>> Version("1.2.3dev1").is_prerelease
|
396 |
+
True
|
397 |
+
"""
|
398 |
+
return self.dev is not None or self.pre is not None
|
399 |
+
|
400 |
+
@property
|
401 |
+
def is_postrelease(self) -> bool:
|
402 |
+
"""Whether this version is a post-release.
|
403 |
+
|
404 |
+
>>> Version("1.2.3").is_postrelease
|
405 |
+
False
|
406 |
+
>>> Version("1.2.3.post1").is_postrelease
|
407 |
+
True
|
408 |
+
"""
|
409 |
+
return self.post is not None
|
410 |
+
|
411 |
+
@property
|
412 |
+
def is_devrelease(self) -> bool:
|
413 |
+
"""Whether this version is a development release.
|
414 |
+
|
415 |
+
>>> Version("1.2.3").is_devrelease
|
416 |
+
False
|
417 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
418 |
+
True
|
419 |
+
"""
|
420 |
+
return self.dev is not None
|
421 |
+
|
422 |
+
@property
|
423 |
+
def major(self) -> int:
|
424 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
425 |
+
|
426 |
+
>>> Version("1.2.3").major
|
427 |
+
1
|
428 |
+
"""
|
429 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
430 |
+
|
431 |
+
@property
|
432 |
+
def minor(self) -> int:
|
433 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
434 |
+
|
435 |
+
>>> Version("1.2.3").minor
|
436 |
+
2
|
437 |
+
>>> Version("1").minor
|
438 |
+
0
|
439 |
+
"""
|
440 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
441 |
+
|
442 |
+
@property
|
443 |
+
def micro(self) -> int:
|
444 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
445 |
+
|
446 |
+
>>> Version("1.2.3").micro
|
447 |
+
3
|
448 |
+
>>> Version("1").micro
|
449 |
+
0
|
450 |
+
"""
|
451 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
452 |
+
|
453 |
+
|
454 |
+
class _TrimmedRelease(Version):
|
455 |
+
@property
|
456 |
+
def release(self) -> tuple[int, ...]:
|
457 |
+
"""
|
458 |
+
Release segment without any trailing zeros.
|
459 |
+
|
460 |
+
>>> _TrimmedRelease('1.0.0').release
|
461 |
+
(1,)
|
462 |
+
>>> _TrimmedRelease('0.0').release
|
463 |
+
(0,)
|
464 |
+
"""
|
465 |
+
rel = super().release
|
466 |
+
nonzeros = (index for index, val in enumerate(rel) if val)
|
467 |
+
last_nonzero = max(nonzeros, default=0)
|
468 |
+
return rel[: last_nonzero + 1]
|
469 |
+
|
470 |
+
|
471 |
+
def _parse_letter_version(
|
472 |
+
letter: str | None, number: str | bytes | SupportsInt | None
|
473 |
+
) -> tuple[str, int] | None:
|
474 |
+
if letter:
|
475 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
476 |
+
# not a numeral associated with it.
|
477 |
+
if number is None:
|
478 |
+
number = 0
|
479 |
+
|
480 |
+
# We normalize any letters to their lower case form
|
481 |
+
letter = letter.lower()
|
482 |
+
|
483 |
+
# We consider some words to be alternate spellings of other words and
|
484 |
+
# in those cases we want to normalize the spellings to our preferred
|
485 |
+
# spelling.
|
486 |
+
if letter == "alpha":
|
487 |
+
letter = "a"
|
488 |
+
elif letter == "beta":
|
489 |
+
letter = "b"
|
490 |
+
elif letter in ["c", "pre", "preview"]:
|
491 |
+
letter = "rc"
|
492 |
+
elif letter in ["rev", "r"]:
|
493 |
+
letter = "post"
|
494 |
+
|
495 |
+
return letter, int(number)
|
496 |
+
|
497 |
+
assert not letter
|
498 |
+
if number:
|
499 |
+
# We assume if we are given a number, but we are not given a letter
|
500 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
501 |
+
letter = "post"
|
502 |
+
|
503 |
+
return letter, int(number)
|
504 |
+
|
505 |
+
return None
|
506 |
+
|
507 |
+
|
508 |
+
_local_version_separators = re.compile(r"[\._-]")
|
509 |
+
|
510 |
+
|
511 |
+
def _parse_local_version(local: str | None) -> LocalType | None:
|
512 |
+
"""
|
513 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
514 |
+
"""
|
515 |
+
if local is not None:
|
516 |
+
return tuple(
|
517 |
+
part.lower() if not part.isdigit() else int(part)
|
518 |
+
for part in _local_version_separators.split(local)
|
519 |
+
)
|
520 |
+
return None
|
521 |
+
|
522 |
+
|
523 |
+
def _cmpkey(
|
524 |
+
epoch: int,
|
525 |
+
release: tuple[int, ...],
|
526 |
+
pre: tuple[str, int] | None,
|
527 |
+
post: tuple[str, int] | None,
|
528 |
+
dev: tuple[str, int] | None,
|
529 |
+
local: LocalType | None,
|
530 |
+
) -> CmpKey:
|
531 |
+
# When we compare a release version, we want to compare it with all of the
|
532 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
533 |
+
# leading zeros until we come to something non zero, then take the rest
|
534 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
535 |
+
# that for our sorting key.
|
536 |
+
_release = tuple(
|
537 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
538 |
+
)
|
539 |
+
|
540 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
541 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
542 |
+
# if there is not a pre or a post segment. If we have one of those then
|
543 |
+
# the normal sorting rules will handle this case correctly.
|
544 |
+
if pre is None and post is None and dev is not None:
|
545 |
+
_pre: CmpPrePostDevType = NegativeInfinity
|
546 |
+
# Versions without a pre-release (except as noted above) should sort after
|
547 |
+
# those with one.
|
548 |
+
elif pre is None:
|
549 |
+
_pre = Infinity
|
550 |
+
else:
|
551 |
+
_pre = pre
|
552 |
+
|
553 |
+
# Versions without a post segment should sort before those with one.
|
554 |
+
if post is None:
|
555 |
+
_post: CmpPrePostDevType = NegativeInfinity
|
556 |
+
|
557 |
+
else:
|
558 |
+
_post = post
|
559 |
+
|
560 |
+
# Versions without a development segment should sort after those with one.
|
561 |
+
if dev is None:
|
562 |
+
_dev: CmpPrePostDevType = Infinity
|
563 |
+
|
564 |
+
else:
|
565 |
+
_dev = dev
|
566 |
+
|
567 |
+
if local is None:
|
568 |
+
# Versions without a local segment should sort before those with one.
|
569 |
+
_local: CmpLocalType = NegativeInfinity
|
570 |
+
else:
|
571 |
+
# Versions with a local segment need that segment parsed to implement
|
572 |
+
# the sorting rules in PEP440.
|
573 |
+
# - Alpha numeric segments sort before numeric segments
|
574 |
+
# - Alpha numeric segments sort lexicographically
|
575 |
+
# - Numeric segments sort numerically
|
576 |
+
# - Shorter versions sort before longer versions when the prefixes
|
577 |
+
# match exactly
|
578 |
+
_local = tuple(
|
579 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
580 |
+
)
|
581 |
+
|
582 |
+
return epoch, _release, _pre, _post, _dev, _local
|