diff --git a/.gitattributes b/.gitattributes index 18d20be7f2f3890da7118c502889e148d91b198f..db0bbba34bdbd4be8e34fe1ea84693e1ed13383f 100644 --- a/.gitattributes +++ b/.gitattributes @@ -136,3 +136,7 @@ lib/python3.10/site-packages/babel/locale-data/yue.dat filter=lfs diff=lfs merge lib/python3.10/site-packages/babel/locale-data/hi.dat filter=lfs diff=lfs merge=lfs -text lib/python3.10/site-packages/babel/locale-data/lo.dat filter=lfs diff=lfs merge=lfs -text lib/python3.10/site-packages/babel/locale-data/ak.dat filter=lfs diff=lfs merge=lfs -text +lib/python3.10/site-packages/babel/locale-data/to.dat filter=lfs diff=lfs merge=lfs -text +lib/python3.10/site-packages/babel/locale-data/yrl.dat filter=lfs diff=lfs merge=lfs -text +lib/python3.10/site-packages/babel/locale-data/yo.dat filter=lfs diff=lfs merge=lfs -text +lib/python3.10/site-packages/babel/locale-data/uk.dat filter=lfs diff=lfs merge=lfs -text diff --git a/lib/python3.10/site-packages/babel/locale-data/to.dat b/lib/python3.10/site-packages/babel/locale-data/to.dat new file mode 100644 index 0000000000000000000000000000000000000000..f36af97a72791fd576255701bc47d7fdd303394c --- /dev/null +++ b/lib/python3.10/site-packages/babel/locale-data/to.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:081afa81182ed2aa3ffa42283a5b8fc2efd9747e3e48f3c91eda3f111703cc70 +size 145255 diff --git a/lib/python3.10/site-packages/babel/locale-data/uk.dat b/lib/python3.10/site-packages/babel/locale-data/uk.dat new file mode 100644 index 0000000000000000000000000000000000000000..79ee78c9b7f0386c2f4aa0f4db327b0d088dba63 --- /dev/null +++ b/lib/python3.10/site-packages/babel/locale-data/uk.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fc88486f2dd431abbaed96aa71d5cf6fa7afa7de33463c64ae0563616409c50 +size 339125 diff --git a/lib/python3.10/site-packages/babel/locale-data/yo.dat b/lib/python3.10/site-packages/babel/locale-data/yo.dat new file mode 100644 index 0000000000000000000000000000000000000000..f0aa6c3556f74cdca3b56e137b8799e723aad38a --- /dev/null +++ b/lib/python3.10/site-packages/babel/locale-data/yo.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8962e0fb4f0593d4092d93eea2481b09e5e4c9fc9035aa2fe754d3de2d692a06 +size 110427 diff --git a/lib/python3.10/site-packages/babel/locale-data/yrl.dat b/lib/python3.10/site-packages/babel/locale-data/yrl.dat new file mode 100644 index 0000000000000000000000000000000000000000..143454882bac99eae575c7bfc5d38d9339496365 --- /dev/null +++ b/lib/python3.10/site-packages/babel/locale-data/yrl.dat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da7c03bb0e50fdd2f81f63b19790a5cd9536530d748fe9283f94607e727aef2d +size 186854 diff --git a/lib/python3.10/site-packages/binaryornot/__init__.py b/lib/python3.10/site-packages/binaryornot/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..518255b16b20d95222190745acdf28f717e1913e --- /dev/null +++ b/lib/python3.10/site-packages/binaryornot/__init__.py @@ -0,0 +1,3 @@ +__author__ = 'Audrey Roy' +__email__ = 'audreyr@gmail.com' +__version__ = '0.4.4' diff --git a/lib/python3.10/site-packages/binaryornot/check.py b/lib/python3.10/site-packages/binaryornot/check.py new file mode 100644 index 0000000000000000000000000000000000000000..a784e3a77f6730edf64ee3c9c908c30778c60083 --- /dev/null +++ b/lib/python3.10/site-packages/binaryornot/check.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- + +""" +binaryornot.check +----------------- + +Main code for checking if a file is binary or text. +""" + +import logging + +from .helpers import get_starting_chunk, is_binary_string + + +logger = logging.getLogger(__name__) + + +def is_binary(filename): + """ + :param filename: File to check. + :returns: True if it's a binary file, otherwise False. + """ + logger.debug('is_binary: %(filename)r', locals()) + + # Check if the file extension is in a list of known binary types + binary_extensions = ['.pyc', ] + for ext in binary_extensions: + if filename.endswith(ext): + return True + + # Check if the starting chunk is a binary string + chunk = get_starting_chunk(filename) + return is_binary_string(chunk) diff --git a/lib/python3.10/site-packages/binaryornot/helpers.py b/lib/python3.10/site-packages/binaryornot/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..3f034a695aa76fe736bce2dab34548fde654b6fa --- /dev/null +++ b/lib/python3.10/site-packages/binaryornot/helpers.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- + + +""" +binaryornot.helpers +------------------- + +Helper utilities used by BinaryOrNot. +""" + +import chardet +import logging + + +logger = logging.getLogger(__name__) + + +def print_as_hex(s): + """ + Print a string as hex bytes. + """ + print(":".join("{0:x}".format(ord(c)) for c in s)) + + +def get_starting_chunk(filename, length=1024): + """ + :param filename: File to open and get the first little chunk of. + :param length: Number of bytes to read, default 1024. + :returns: Starting chunk of bytes. + """ + # Ensure we open the file in binary mode + try: + with open(filename, 'rb') as f: + chunk = f.read(length) + return chunk + except IOError as e: + print(e) + + +_control_chars = b'\n\r\t\f\b' +if bytes is str: + # Python 2 means we need to invoke chr() explicitly + _printable_ascii = _control_chars + b''.join(map(chr, range(32, 127))) + _printable_high_ascii = b''.join(map(chr, range(127, 256))) +else: + # Python 3 means bytes accepts integer input directly + _printable_ascii = _control_chars + bytes(range(32, 127)) + _printable_high_ascii = bytes(range(127, 256)) + + +def is_binary_string(bytes_to_check): + """ + Uses a simplified version of the Perl detection algorithm, + based roughly on Eli Bendersky's translation to Python: + http://eli.thegreenplace.net/2011/10/19/perls-guess-if-file-is-text-or-binary-implemented-in-python/ + + This is biased slightly more in favour of deeming files as text + files than the Perl algorithm, since all ASCII compatible character + sets are accepted as text, not just utf-8. + + :param bytes: A chunk of bytes to check. + :returns: True if appears to be a binary, otherwise False. + """ + + # Empty files are considered text files + if not bytes_to_check: + return False + + # Now check for a high percentage of ASCII control characters + # Binary if control chars are > 30% of the string + low_chars = bytes_to_check.translate(None, _printable_ascii) + nontext_ratio1 = float(len(low_chars)) / float(len(bytes_to_check)) + logger.debug('nontext_ratio1: %(nontext_ratio1)r', locals()) + + # and check for a low percentage of high ASCII characters: + # Binary if high ASCII chars are < 5% of the string + # From: https://en.wikipedia.org/wiki/UTF-8 + # If the bytes are random, the chances of a byte with the high bit set + # starting a valid UTF-8 character is only 6.64%. The chances of finding 7 + # of these without finding an invalid sequence is actually lower than the + # chance of the first three bytes randomly being the UTF-8 BOM. + + high_chars = bytes_to_check.translate(None, _printable_high_ascii) + nontext_ratio2 = float(len(high_chars)) / float(len(bytes_to_check)) + logger.debug('nontext_ratio2: %(nontext_ratio2)r', locals()) + + is_likely_binary = ( + (nontext_ratio1 > 0.3 and nontext_ratio2 < 0.05) or + (nontext_ratio1 > 0.8 and nontext_ratio2 > 0.8) + ) + logger.debug('is_likely_binary: %(is_likely_binary)r', locals()) + + # then check for binary for possible encoding detection with chardet + detected_encoding = chardet.detect(bytes_to_check) + logger.debug('detected_encoding: %(detected_encoding)r', locals()) + + # finally use all the check to decide binary or text + decodable_as_unicode = False + if (detected_encoding['confidence'] > 0.9 and + detected_encoding['encoding'] != 'ascii'): + try: + try: + bytes_to_check.decode(encoding=detected_encoding['encoding']) + except TypeError: + # happens only on Python 2.6 + unicode(bytes_to_check, encoding=detected_encoding['encoding']) # noqa + decodable_as_unicode = True + logger.debug('success: decodable_as_unicode: ' + '%(decodable_as_unicode)r', locals()) + except LookupError: + logger.debug('failure: could not look up encoding %(encoding)s', + detected_encoding) + except UnicodeDecodeError: + logger.debug('failure: decodable_as_unicode: ' + '%(decodable_as_unicode)r', locals()) + + logger.debug('failure: decodable_as_unicode: ' + '%(decodable_as_unicode)r', locals()) + if is_likely_binary: + if decodable_as_unicode: + return False + else: + return True + else: + if decodable_as_unicode: + return False + else: + if b'\x00' in bytes_to_check or b'\xff' in bytes_to_check: + # Check for NULL bytes last + logger.debug('has nulls:' + repr(b'\x00' in bytes_to_check)) + return True + return False diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/INSTALLER b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/METADATA b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..acbd21a58d939bd0d19b44e48acf3b5be750a794 --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/METADATA @@ -0,0 +1,140 @@ +Metadata-Version: 2.3 +Name: cryptography +Version: 44.0.2 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Natural Language :: English +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: POSIX +Classifier: Operating System :: POSIX :: BSD +Classifier: Operating System :: POSIX :: Linux +Classifier: Operating System :: Microsoft :: Windows +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Security :: Cryptography +Requires-Dist: cffi >=1.12 ; platform_python_implementation != 'PyPy' +Requires-Dist: bcrypt >=3.1.5 ; extra == 'ssh' +Requires-Dist: nox >=2024.4.15 ; extra == 'nox' +Requires-Dist: nox[uv] >=2024.3.2 ; python_version >= '3.8' and extra == 'nox' +Requires-Dist: cryptography-vectors ==44.0.2 ; extra == 'test' +Requires-Dist: pytest >=7.4.0 ; extra == 'test' +Requires-Dist: pytest-benchmark >=4.0 ; extra == 'test' +Requires-Dist: pytest-cov >=2.10.1 ; extra == 'test' +Requires-Dist: pytest-xdist >=3.5.0 ; extra == 'test' +Requires-Dist: pretend >=0.7 ; extra == 'test' +Requires-Dist: certifi >=2024 ; extra == 'test' +Requires-Dist: pytest-randomly ; extra == 'test-randomorder' +Requires-Dist: sphinx >=5.3.0 ; extra == 'docs' +Requires-Dist: sphinx-rtd-theme >=3.0.0 ; python_version >= '3.8' and extra == 'docs' +Requires-Dist: pyenchant >=3 ; extra == 'docstest' +Requires-Dist: readme-renderer >=30.0 ; extra == 'docstest' +Requires-Dist: sphinxcontrib-spelling >=7.3.1 ; extra == 'docstest' +Requires-Dist: build >=1.0.0 ; extra == 'sdist' +Requires-Dist: ruff >=0.3.6 ; extra == 'pep8test' +Requires-Dist: mypy >=1.4 ; extra == 'pep8test' +Requires-Dist: check-sdist ; python_version >= '3.8' and extra == 'pep8test' +Requires-Dist: click >=8.0.1 ; extra == 'pep8test' +Provides-Extra: ssh +Provides-Extra: nox +Provides-Extra: test +Provides-Extra: test-randomorder +Provides-Extra: docs +Provides-Extra: docstest +Provides-Extra: sdist +Provides-Extra: pep8test +License-File: LICENSE +License-File: LICENSE.APACHE +License-File: LICENSE.BSD +Summary: cryptography is a package which provides cryptographic recipes and primitives to Python developers. +Author: The cryptography developers +Author-email: The Python Cryptographic Authority and individual contributors +License: Apache-2.0 OR BSD-3-Clause +Requires-Python: >=3.7, !=3.9.0, !=3.9.1 +Description-Content-Type: text/x-rst; charset=UTF-8 +Project-URL: homepage, https://github.com/pyca/cryptography +Project-URL: documentation, https://cryptography.io/ +Project-URL: source, https://github.com/pyca/cryptography/ +Project-URL: issues, https://github.com/pyca/cryptography/issues +Project-URL: changelog, https://cryptography.io/en/latest/changelog/ + +pyca/cryptography +================= + +.. image:: https://img.shields.io/pypi/v/cryptography.svg + :target: https://pypi.org/project/cryptography/ + :alt: Latest Version + +.. image:: https://readthedocs.org/projects/cryptography/badge/?version=latest + :target: https://cryptography.io + :alt: Latest Docs + +.. image:: https://github.com/pyca/cryptography/workflows/CI/badge.svg?branch=main + :target: https://github.com/pyca/cryptography/actions?query=workflow%3ACI+branch%3Amain + + +``cryptography`` is a package which provides cryptographic recipes and +primitives to Python developers. Our goal is for it to be your "cryptographic +standard library". It supports Python 3.7+ and PyPy3 7.3.11+. + +``cryptography`` includes both high level recipes and low level interfaces to +common cryptographic algorithms such as symmetric ciphers, message digests, and +key derivation functions. For example, to encrypt something with +``cryptography``'s high level symmetric encryption recipe: + +.. code-block:: pycon + + >>> from cryptography.fernet import Fernet + >>> # Put this somewhere safe! + >>> key = Fernet.generate_key() + >>> f = Fernet(key) + >>> token = f.encrypt(b"A really secret message. Not for prying eyes.") + >>> token + b'...' + >>> f.decrypt(token) + b'A really secret message. Not for prying eyes.' + +You can find more information in the `documentation`_. + +You can install ``cryptography`` with: + +.. code-block:: console + + $ pip install cryptography + +For full details see `the installation documentation`_. + +Discussion +~~~~~~~~~~ + +If you run into bugs, you can file them in our `issue tracker`_. + +We maintain a `cryptography-dev`_ mailing list for development discussion. + +You can also join ``#pyca`` on ``irc.libera.chat`` to ask questions or get +involved. + +Security +~~~~~~~~ + +Need to report a security issue? Please consult our `security reporting`_ +documentation. + + +.. _`documentation`: https://cryptography.io/ +.. _`the installation documentation`: https://cryptography.io/en/latest/installation/ +.. _`issue tracker`: https://github.com/pyca/cryptography/issues +.. _`cryptography-dev`: https://mail.python.org/mailman/listinfo/cryptography-dev +.. _`security reporting`: https://cryptography.io/en/latest/security/ + diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/RECORD b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..79aa087f8b7b4924316b8e5439738f660d723c5a --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/RECORD @@ -0,0 +1,113 @@ +cryptography-44.0.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +cryptography-44.0.2.dist-info/METADATA,sha256=cgphHIHoULnRY3G2C7Eh6Wp2QMwMBmNMLXtz_1ld7ho,5724 +cryptography-44.0.2.dist-info/RECORD,, +cryptography-44.0.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography-44.0.2.dist-info/WHEEL,sha256=UpI5w5M2-qnzZdHNjisIw1dag11TStMro3kowqBDR6k,107 +cryptography-44.0.2.dist-info/licenses/LICENSE,sha256=Pgx8CRqUi4JTO6mP18u0BDLW8amsv4X1ki0vmak65rs,197 +cryptography-44.0.2.dist-info/licenses/LICENSE.APACHE,sha256=qsc7MUj20dcRHbyjIJn2jSbGRMaBOuHk8F9leaomY_4,11360 +cryptography-44.0.2.dist-info/licenses/LICENSE.BSD,sha256=YCxMdILeZHndLpeTzaJ15eY9dz2s0eymiSMqtwCPtPs,1532 +cryptography/__about__.py,sha256=LsHy-0b4kwxfAD0ryobJhitlFn7Tk8Sepunxo8YcUZs,445 +cryptography/__init__.py,sha256=XsRL_PxbU6UgoyoglAgJQSrJCP97ovBA8YIEQ2-uI68,762 +cryptography/exceptions.py,sha256=835EWILc2fwxw-gyFMriciC2SqhViETB10LBSytnDIc,1087 +cryptography/fernet.py,sha256=aMU2HyDJ5oRGjg8AkFvHwE7BSmHY4fVUCaioxZcd8gA,6933 +cryptography/hazmat/__init__.py,sha256=5IwrLWrVp0AjEr_4FdWG_V057NSJGY_W4egNNsuct0g,455 +cryptography/hazmat/_oid.py,sha256=xcGtygUQX1p2ozVjhqKk016E5--BC7ituI1EGuoiWds,15294 +cryptography/hazmat/backends/__init__.py,sha256=O5jvKFQdZnXhKeqJ-HtulaEL9Ni7mr1mDzZY5kHlYhI,361 +cryptography/hazmat/backends/openssl/__init__.py,sha256=p3jmJfnCag9iE5sdMrN6VvVEu55u46xaS_IjoI0SrmA,305 +cryptography/hazmat/backends/openssl/backend.py,sha256=Bk_inezh7fBN3jsxMu1YIkf10zryfup6opBDLVFiNms,9413 +cryptography/hazmat/bindings/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/_rust.abi3.so,sha256=-fS4tbc3TKtgMDEaLqdxNdIjuJjKA2IIIkbtufR1Gd8,11514880 +cryptography/hazmat/bindings/_rust/__init__.pyi,sha256=s73-NWxZs-5r2vAzDT9Eqo9mRiWE__A4VJKyFBkjHdM,879 +cryptography/hazmat/bindings/_rust/_openssl.pyi,sha256=mpNJLuYLbCVrd5i33FBTmWwL_55Dw7JPkSLlSX9Q7oI,230 +cryptography/hazmat/bindings/_rust/asn1.pyi,sha256=BrGjC8J6nwuS-r3EVcdXJB8ndotfY9mbQYOfpbPG0HA,354 +cryptography/hazmat/bindings/_rust/exceptions.pyi,sha256=exXr2xw_0pB1kk93cYbM3MohbzoUkjOms1ZMUi0uQZE,640 +cryptography/hazmat/bindings/_rust/ocsp.pyi,sha256=mNrMO5sYEnftD_b2-NvvR6M8QdYGZ1jpTdazpgzXgl0,4004 +cryptography/hazmat/bindings/_rust/openssl/__init__.pyi,sha256=FS2gi2eALVzqTTic8an8enD431pkwKbRxeAZaNMV4Ts,1410 +cryptography/hazmat/bindings/_rust/openssl/aead.pyi,sha256=i0gA3jUQ4rkJXTGGZrq-AuY-VQLN31lyDeWuDZ0zJYw,2553 +cryptography/hazmat/bindings/_rust/openssl/ciphers.pyi,sha256=iK0ZhQ-WyCQbjaraaFgK6q4PpD-7Rf5RDHkFD3YEW_g,1301 +cryptography/hazmat/bindings/_rust/openssl/cmac.pyi,sha256=nPH0X57RYpsAkRowVpjQiHE566ThUTx7YXrsadmrmHk,564 +cryptography/hazmat/bindings/_rust/openssl/dh.pyi,sha256=Z3TC-G04-THtSdAOPLM1h2G7ml5bda1ElZUcn5wpuhk,1564 +cryptography/hazmat/bindings/_rust/openssl/dsa.pyi,sha256=qBtkgj2albt2qFcnZ9UDrhzoNhCVO7HTby5VSf1EXMI,1299 +cryptography/hazmat/bindings/_rust/openssl/ec.pyi,sha256=zJy0pRa5n-_p2dm45PxECB_-B6SVZyNKfjxFDpPqT38,1691 +cryptography/hazmat/bindings/_rust/openssl/ed25519.pyi,sha256=OJsrblS2nHptZctva-pAKFL5q8yPEAkhmjPZpJ6TA94,493 +cryptography/hazmat/bindings/_rust/openssl/ed448.pyi,sha256=SkPHK2HdbYN02TVQEUOgW3iTdiEY7HBE4DijpdkAzmk,475 +cryptography/hazmat/bindings/_rust/openssl/hashes.pyi,sha256=p8sdf41mPBlV_W9v_18JItuMoHE8UkBxj9Tuqi0WiTE,639 +cryptography/hazmat/bindings/_rust/openssl/hmac.pyi,sha256=ZmLJ73pmxcZFC1XosWEiXMRYtvJJor3ZLdCQOJu85Cw,662 +cryptography/hazmat/bindings/_rust/openssl/kdf.pyi,sha256=hvZSV2C3MQd9jC1Tuh5Lsb0iGBgcLVF2xFYdTo7USO4,1129 +cryptography/hazmat/bindings/_rust/openssl/keys.pyi,sha256=JSrlGNaW49ZCZ1hcb-YJdS1EAbsMwRbVEcLL0P9OApA,872 +cryptography/hazmat/bindings/_rust/openssl/poly1305.pyi,sha256=9iogF7Q4i81IkOS-IMXp6HvxFF_3cNy_ucrAjVQnn14,540 +cryptography/hazmat/bindings/_rust/openssl/rsa.pyi,sha256=2OQCNSXkxgc-3uw1xiCCloIQTV6p9_kK79Yu0rhZgPc,1364 +cryptography/hazmat/bindings/_rust/openssl/x25519.pyi,sha256=2BKdbrddM_9SMUpdvHKGhb9MNjURCarPxccbUDzHeoA,484 +cryptography/hazmat/bindings/_rust/openssl/x448.pyi,sha256=AoRMWNvCJTiH5L-lkIkCdPlrPLUdJvvfXpIvf1GmxpM,466 +cryptography/hazmat/bindings/_rust/pkcs12.pyi,sha256=afhB_6M8xI1MIE5vxkaDF1jSxA48ib1--NiOxtf6boM,1394 +cryptography/hazmat/bindings/_rust/pkcs7.pyi,sha256=Ag9coB8kRwrUJEg1do6BJABs9DqxZiY8WJIFUVa7StE,1545 +cryptography/hazmat/bindings/_rust/test_support.pyi,sha256=FXe7t_tqI3e9ULirYcr5Zlw5szGY7TiZyb7W83ak0Nk,718 +cryptography/hazmat/bindings/_rust/x509.pyi,sha256=0p-Ak_zj-9WfyZKPo08YT6cOx1c-lhjeYd0jJ8c4oT0,8318 +cryptography/hazmat/bindings/openssl/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/bindings/openssl/_conditional.py,sha256=dkGKGU-22uR2ZKeOOwaSxEJCGaafgUjb2romWcu03QE,5163 +cryptography/hazmat/bindings/openssl/binding.py,sha256=e1gnFAZBPrkJ3CsiZV-ug6kaPdNTAEROaUFiFrUh71M,4042 +cryptography/hazmat/decrepit/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/__init__.py,sha256=wHCbWfaefa-fk6THSw9th9fJUsStJo7245wfFBqmduA,216 +cryptography/hazmat/decrepit/ciphers/algorithms.py,sha256=HWA4PKDS2w4D2dQoRerpLRU7Kntt5vJeJC7j--AlZVU,2520 +cryptography/hazmat/primitives/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/_asymmetric.py,sha256=RhgcouUB6HTiFDBrR1LxqkMjpUxIiNvQ1r_zJjRG6qQ,532 +cryptography/hazmat/primitives/_cipheralgorithm.py,sha256=gKa0WrLz6K4fqhnGbfBYKDSxgLxsPU0uj_EK2UT47W4,1495 +cryptography/hazmat/primitives/_serialization.py,sha256=qrozc8fw2WZSbjk3DAlSl3ResxpauwJ74ZgGoUL-mj0,5142 +cryptography/hazmat/primitives/asymmetric/__init__.py,sha256=s9oKCQ2ycFdXoERdS1imafueSkBsL9kvbyfghaauZ9Y,180 +cryptography/hazmat/primitives/asymmetric/dh.py,sha256=OOCjMClH1Bf14Sy7jAdwzEeCxFPb8XUe2qePbExvXwc,3420 +cryptography/hazmat/primitives/asymmetric/dsa.py,sha256=xBwdf0pZOgvqjUKcO7Q0L3NxwalYj0SJDUqThemhSmI,3945 +cryptography/hazmat/primitives/asymmetric/ec.py,sha256=lwZmtAwi3PM8lsY1MsNaby_bVi--49OCxwE_1yqKC-A,10428 +cryptography/hazmat/primitives/asymmetric/ed25519.py,sha256=kl63fg7myuMjNTmMoVFeH6iVr0x5FkjNmggxIRTloJk,3423 +cryptography/hazmat/primitives/asymmetric/ed448.py,sha256=2UzEDzzfkPn83UFVFlMZfIMbAixxY09WmQyrwinWTn8,3456 +cryptography/hazmat/primitives/asymmetric/padding.py,sha256=eZcvUqVLbe3u48SunLdeniaPlV4-k6pwBl67OW4jSy8,2885 +cryptography/hazmat/primitives/asymmetric/rsa.py,sha256=dvj4i2js78qpgotEKn3SU5Eh2unDSMiZpTVo2kx_NWU,7668 +cryptography/hazmat/primitives/asymmetric/types.py,sha256=LnsOJym-wmPUJ7Knu_7bCNU3kIiELCd6krOaW_JU08I,2996 +cryptography/hazmat/primitives/asymmetric/utils.py,sha256=DPTs6T4F-UhwzFQTh-1fSEpQzazH2jf2xpIro3ItF4o,790 +cryptography/hazmat/primitives/asymmetric/x25519.py,sha256=VGYuRdIYuVBtizpFdNWd2bTrT10JRa1admQdBr08xz8,3341 +cryptography/hazmat/primitives/asymmetric/x448.py,sha256=GKKJBqYLr03VewMF18bXIM941aaWcZIQ4rC02GLLEmw,3374 +cryptography/hazmat/primitives/ciphers/__init__.py,sha256=eyEXmjk6_CZXaOPYDr7vAYGXr29QvzgWL2-4CSolLFs,680 +cryptography/hazmat/primitives/ciphers/aead.py,sha256=Fzlyx7w8KYQakzDp1zWgJnIr62zgZrgVh1u2h4exB54,634 +cryptography/hazmat/primitives/ciphers/algorithms.py,sha256=cPzrUizm_RfUi7DDqf3WNezkFy2IxfllsJv6s16bWS8,4493 +cryptography/hazmat/primitives/ciphers/base.py,sha256=tg-XNaKUyETBi7ounGDEL1_ICn-s4FF9LR7moV58blI,4211 +cryptography/hazmat/primitives/ciphers/modes.py,sha256=BFpxEGSaxoeZjrQ4sqpyPDvKClrqfDKIBv7kYtFURhE,8192 +cryptography/hazmat/primitives/cmac.py,sha256=sz_s6H_cYnOvx-VNWdIKhRhe3Ymp8z8J0D3CBqOX3gg,338 +cryptography/hazmat/primitives/constant_time.py,sha256=xdunWT0nf8OvKdcqUhhlFKayGp4_PgVJRU2W1wLSr_A,422 +cryptography/hazmat/primitives/hashes.py,sha256=EvDIJBhj83Z7f-oHbsA0TzZLFSDV_Yv8hQRdM4o8FD0,5091 +cryptography/hazmat/primitives/hmac.py,sha256=RpB3z9z5skirCQrm7zQbtnp9pLMnAjrlTUvKqF5aDDc,423 +cryptography/hazmat/primitives/kdf/__init__.py,sha256=4XibZnrYq4hh5xBjWiIXzaYW6FKx8hPbVaa_cB9zS64,750 +cryptography/hazmat/primitives/kdf/argon2.py,sha256=UFDNXG0v-rw3DqAQTB1UQAsQC2M5Ejg0k_6OCyhLKus,460 +cryptography/hazmat/primitives/kdf/concatkdf.py,sha256=bcn4NGXse-EsFl7nlU83e5ilop7TSHcX-CJJS107W80,3686 +cryptography/hazmat/primitives/kdf/hkdf.py,sha256=uhN5L87w4JvtAqQcPh_Ji2TPSc18IDThpaYJiHOWy3A,3015 +cryptography/hazmat/primitives/kdf/kbkdf.py,sha256=eSuLK1sATkamgCAit794jLr7sDNlu5X0USdcWhwJdmk,9146 +cryptography/hazmat/primitives/kdf/pbkdf2.py,sha256=Xj3YIeX30h2BUaoJAtOo1RMXV_em0-eCG0PU_0FHJzM,1950 +cryptography/hazmat/primitives/kdf/scrypt.py,sha256=XyWUdUUmhuI9V6TqAPOvujCSMGv1XQdg0a21IWCmO-U,590 +cryptography/hazmat/primitives/kdf/x963kdf.py,sha256=wCpWmwQjZ2vAu2rlk3R_PX0nINl8WGXYBmlyMOC5iPw,1992 +cryptography/hazmat/primitives/keywrap.py,sha256=XV4Pj2fqSeD-RqZVvY2cA3j5_7RwJSFygYuLfk2ujCo,5650 +cryptography/hazmat/primitives/padding.py,sha256=Qu1VVsCiqfQMPPqU0qU6ig9Y802jZlXVOUDLIxN5KeQ,4932 +cryptography/hazmat/primitives/poly1305.py,sha256=P5EPQV-RB_FJPahpg01u0Ts4S_PnAmsroxIGXbGeRRo,355 +cryptography/hazmat/primitives/serialization/__init__.py,sha256=jyNx_7NcOEbVRBY4nP9ks0IVXBafbcYnTK27vafPLW8,1653 +cryptography/hazmat/primitives/serialization/base.py,sha256=ikq5MJIwp_oUnjiaBco_PmQwOTYuGi-XkYUYHKy8Vo0,615 +cryptography/hazmat/primitives/serialization/pkcs12.py,sha256=7vVXbiP7qhhvKAHJT_M8-LBZdbpOwrpWRHWxNrNqzXE,4492 +cryptography/hazmat/primitives/serialization/pkcs7.py,sha256=n25jEw__vkZWSlumwgYnqJ0lzyPh5xljMsJDyp2QomM,12346 +cryptography/hazmat/primitives/serialization/ssh.py,sha256=VKscMrVdYK5B9PQISjjdRMglRvqa_L3sDNm5vdjVHJY,51915 +cryptography/hazmat/primitives/twofactor/__init__.py,sha256=tmMZGB-g4IU1r7lIFqASU019zr0uPp_wEBYcwdDCKCA,258 +cryptography/hazmat/primitives/twofactor/hotp.py,sha256=rv507uNznUs22XlaqGBbZKkkGjmiTUAcwghTYMem6uM,3219 +cryptography/hazmat/primitives/twofactor/totp.py,sha256=BQ0oPTp2JW1SMZqdgv95NBG3u_ODiDtzVJENHWYhvXY,1613 +cryptography/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cryptography/utils.py,sha256=Rp7ppg4XIBVVzNQ6XngGndwkICJoYp6FoFOOgTWLJ7g,3925 +cryptography/x509/__init__.py,sha256=Q8P-MnUGrgFxRt5423bE-gzSvgZLAdddWuPheHnuA_c,8132 +cryptography/x509/base.py,sha256=-F5KWjxbyjSqluUSr7LRC_sqN_s-qHP5K0rW-41PI4E,26909 +cryptography/x509/certificate_transparency.py,sha256=JqoOIDhlwInrYMFW6IFn77WJ0viF-PB_rlZV3vs9MYc,797 +cryptography/x509/extensions.py,sha256=iX-3WFm4yFjstFIs1F30f3tixIp6i0WgGdc6GwJ-QiQ,76158 +cryptography/x509/general_name.py,sha256=sP_rV11Qlpsk4x3XXGJY_Mv0Q_s9dtjeLckHsjpLQoQ,7836 +cryptography/x509/name.py,sha256=MYCxCSTQTpzhjxFPZaANqJ9fGrhESH73vPkoay8HSWM,14830 +cryptography/x509/ocsp.py,sha256=vbrg3p1hBJQEEFIZ35GHcjbGwTrsxPhlot-OVpyP-C8,11390 +cryptography/x509/oid.py,sha256=X8EbhkRTLrGuv9vHZSGqPd9zpvRVsonU_joWAL5LLY8,885 +cryptography/x509/verification.py,sha256=alfx3VaTSb2bMz7_7s788oL90vzgHwBjVINssdz0Gv0,796 +rust/Cargo.toml,sha256=gaBJTn9TwBCG7U3JgETYbTmK8DNUxl4gKKS65nDWuwM,1320 +rust/cryptography-cffi/Cargo.toml,sha256=CjVBJTYW1TwzXgLgY8TZ92NP_9XSmHzSfRIzVaZh9Bk,386 +rust/cryptography-keepalive/Cargo.toml,sha256=_ABt1o-uFnxDqhb7YzNynb6YEQ2eW2QpnPD1RXBUsrI,210 +rust/cryptography-key-parsing/Cargo.toml,sha256=yLWh172kspq6BJVZA2PjFw17Rt0xTYKn_TTzp3IVhxg,455 +rust/cryptography-openssl/Cargo.toml,sha256=mI0cIDv-kQTl24C-bLvDCqiWn6QobBdqCMYSi_UWPE0,545 +rust/cryptography-x509-verification/Cargo.toml,sha256=vECbxPiNu-dQhW4baTuSPzgqaBnBgwZYnJCSaJQbIUA,426 +rust/cryptography-x509/Cargo.toml,sha256=wAuwnc1eKnSUNFjf4GpQM__FTig-hqF2ZPXJPmqb6cA,248 diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/REQUESTED b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/WHEEL b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..cec5cd708218dd5a0816512e5d562f258698b125 --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: maturin (1.7.5) +Root-Is-Purelib: false +Tag: cp39-abi3-manylinux_2_34_x86_64 + diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..b11f379efe1504d235b4d2d42685ba5dc6af6e9f --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to cryptography are made +under the terms of *both* these licenses. diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.APACHE b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.APACHE new file mode 100644 index 0000000000000000000000000000000000000000..62589edd12a37dd28b6b6fed1e2d728ac9f05c8d --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.APACHE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + https://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.BSD b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.BSD new file mode 100644 index 0000000000000000000000000000000000000000..ec1a29d34d6e419411c75523408aca72f705345c --- /dev/null +++ b/lib/python3.10/site-packages/cryptography-44.0.2.dist-info/licenses/LICENSE.BSD @@ -0,0 +1,27 @@ +Copyright (c) Individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + + 3. Neither the name of PyCA Cryptography nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/python3.10/site-packages/csvw-3.5.1.dist-info/LICENSE b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..261eeb9e9f8b2b4b0d119366dda99c6fd7d35c64 --- /dev/null +++ b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/python3.10/site-packages/csvw-3.5.1.dist-info/METADATA b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..c36bdf25c12b7d5e577b5b8a0b6ea271ae0636f4 --- /dev/null +++ b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/METADATA @@ -0,0 +1,301 @@ +Metadata-Version: 2.1 +Name: csvw +Version: 3.5.1 +Summary: Python library to work with CSVW described tabular data +Home-page: https://github.com/cldf/csvw +Author: Robert Forkel +Author-email: robert_forkel@eva.mpg.de +License: Apache 2.0 +Project-URL: Bug Tracker, https://github.com/cldf/csvw/issues +Keywords: csv,w3c,tabular-data +Platform: any +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: Natural Language :: English +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: License :: OSI Approved :: Apache Software License +Requires-Python: >=3.8 +Description-Content-Type: text/markdown +License-File: LICENSE +Requires-Dist: attrs>=18.1 +Requires-Dist: isodate +Requires-Dist: python-dateutil +Requires-Dist: rfc3986<2 +Requires-Dist: uritemplate>=3.0.0 +Requires-Dist: babel +Requires-Dist: requests +Requires-Dist: language-tags +Requires-Dist: rdflib +Requires-Dist: colorama +Requires-Dist: jsonschema +Provides-Extra: dev +Requires-Dist: flake8; extra == "dev" +Requires-Dist: wheel; extra == "dev" +Requires-Dist: twine; extra == "dev" +Requires-Dist: build; extra == "dev" +Provides-Extra: docs +Requires-Dist: sphinx<7; extra == "docs" +Requires-Dist: sphinx-autodoc-typehints; extra == "docs" +Requires-Dist: sphinx-rtd-theme; extra == "docs" +Provides-Extra: test +Requires-Dist: frictionless; extra == "test" +Requires-Dist: pytest>=5; extra == "test" +Requires-Dist: pytest-mock; extra == "test" +Requires-Dist: requests-mock; extra == "test" +Requires-Dist: pytest-cov; extra == "test" + +# csvw + +[![Build Status](https://github.com/cldf/csvw/workflows/tests/badge.svg)](https://github.com/cldf/csvw/actions?query=workflow%3Atests) +[![PyPI](https://img.shields.io/pypi/v/csvw.svg)](https://pypi.org/project/csvw) +[![Documentation Status](https://readthedocs.org/projects/csvw/badge/?version=latest)](https://csvw.readthedocs.io/en/latest/?badge=latest) + + +This package provides +- a Python API to read and write relational, tabular data according to the [CSV on the Web](https://csvw.org/) specification and +- commandline tools for reading and validating CSVW data. + + +## Links + +- GitHub: https://github.com/cldf/csvw +- PyPI: https://pypi.org/project/csvw +- Issue Tracker: https://github.com/cldf/csvw/issues + + +## Installation + +This package runs under Python >=3.8, use pip to install: + +```bash +$ pip install csvw +``` + + +## CLI + +### `csvw2json` + +Converting CSVW data [to JSON](https://www.w3.org/TR/csv2json/) + +```shell +$ csvw2json tests/fixtures/zipped-metadata.json +{ + "tables": [ + { + "url": "tests/fixtures/zipped.csv", + "row": [ + { + "url": "tests/fixtures/zipped.csv#row=2", + "rownum": 1, + "describes": [ + { + "ID": "abc", + "Value": "the value" + } + ] + }, + { + "url": "tests/fixtures/zipped.csv#row=3", + "rownum": 2, + "describes": [ + { + "ID": "cde", + "Value": "another one" + } + ] + } + ] + } + ] +} +``` + +### `csvwvalidate` + +Validating CSVW data + +```shell +$ csvwvalidate tests/fixtures/zipped-metadata.json +OK +``` + +### `csvwdescribe` + +Describing tabular-data files with CSVW metadata + +```shell +$ csvwdescribe --delimiter "|" tests/fixtures/frictionless-data.csv +{ + "@context": "http://www.w3.org/ns/csvw", + "dc:conformsTo": "data-package", + "tables": [ + { + "dialect": { + "delimiter": "|" + }, + "tableSchema": { + "columns": [ + { + "datatype": "string", + "name": "FK" + }, + { + "datatype": "integer", + "name": "Year" + }, + { + "datatype": "string", + "name": "Location name" + }, + { + "datatype": "string", + "name": "Value" + }, + { + "datatype": "string", + "name": "binary" + }, + { + "datatype": "string", + "name": "anyURI" + }, + { + "datatype": "string", + "name": "email" + }, + { + "datatype": "string", + "name": "boolean" + }, + { + "datatype": { + "dc:format": "application/json", + "base": "json" + }, + "name": "array" + }, + { + "datatype": { + "dc:format": "application/json", + "base": "json" + }, + "name": "geojson" + } + ] + }, + "url": "tests/fixtures/frictionless-data.csv" + } + ] +} +``` + + +## Python API + +Find the Python API documentation at [csvw.readthedocs.io](https://csvw.readthedocs.io/en/latest/). + +A quick example for using `csvw` from Python code: + +```python +import json +from csvw import CSVW +data = CSVW('https://raw.githubusercontent.com/cldf/csvw/master/tests/fixtures/test.tsv') +print(json.dumps(data.to_json(minimal=True), indent=4)) +[ + { + "province": "Hello", + "territory": "world", + "precinct": "1" + } +] +``` + + +## Known limitations + +- We read **all** data which is specified as UTF-8 encoded using the + [`utf-8-sig` codecs](https://docs.python.org/3/library/codecs.html#module-encodings.utf_8_sig). + Thus, if such data starts with `U+FEFF` this will be interpreted as [BOM](https://en.wikipedia.org/wiki/Byte_order_mark) + and skipped. +- Low level CSV parsing is delegated to the `csv` module in Python's standard library. Thus, if a `commentPrefix` + is specified in a `Dialect` instance, this will lead to skipping rows where the first value starts + with `commentPrefix`, **even if the value was quoted**. +- Also, cell content containing `escapechar` may not be round-tripped as expected (when specifying + `escapechar` or a `csvw.Dialect` with `quoteChar` but `doubleQuote==False`), + when minimal quoting is specified. This is due to inconsistent `csv` behaviour + across Python versions (see https://bugs.python.org/issue44861). + + +## CSVW conformance + +While we use the CSVW specification as guideline, this package does not (and +probably never will) implement the full extent of this spec. + +- When CSV files with a header are read, columns are not matched in order with + column descriptions in the `tableSchema`, but instead are matched based on the + CSV column header and the column descriptions' `name` and `titles` atributes. + This allows for more flexibility, because columns in the CSV file may be + re-ordered without invalidating the metadata. A stricter matching can be forced + by specifying `"header": false` and `"skipRows": 1` in the table's dialect + description. + +However, `csvw.CSVW` works correctly for +- 269 out of 270 [JSON tests](https://w3c.github.io/csvw/tests/#manifest-json), +- 280 out of 282 [validation tests](https://w3c.github.io/csvw/tests/#manifest-validation), +- 10 out of 18 [non-normative tests](https://w3c.github.io/csvw/tests/#manifest-nonnorm) + +from the [CSVW Test suites](https://w3c.github.io/csvw/tests/). + + +## Compatibility with [Frictionless Data Specs](https://specs.frictionlessdata.io/) + +A CSVW-described dataset is basically equivalent to a Frictionless DataPackage where all +[Data Resources](https://specs.frictionlessdata.io/data-resource/) are [Tabular Data](https://specs.frictionlessdata.io/tabular-data-resource/). +Thus, the `csvw` package provides some conversion functionality. To +"read CSVW data from a Data Package", there's the `csvw.TableGroup.from_frictionless_datapackage` method: +```python +from csvw import TableGroup +tg = TableGroup.from_frictionless_datapackage('PATH/TO/datapackage.json') +``` +To convert the metadata, the `TableGroup` can then be serialzed: +```python +tg.to_file('csvw-metadata.json') +``` + +Note that the CSVW metadata file must be written to the Data Package's directory +to make sure relative paths to data resources work. + +This functionality - together with the schema inference capabilities +of [`frictionless describe`](https://framework.frictionlessdata.io/docs/guides/describing-data/) - provides +a convenient way to bootstrap CSVW metadata for a set of "raw" CSV +files, implemented in the [`csvwdescribe` command described above](#csvwdescribe). + + +## See also + +- https://www.w3.org/2013/csvw/wiki/Main_Page +- https://csvw.org +- https://github.com/CLARIAH/COW +- https://github.com/CLARIAH/ruminator +- https://github.com/bloomberg/pycsvw +- https://specs.frictionlessdata.io/table-schema/ +- https://github.com/theodi/csvlint.rb +- https://github.com/ruby-rdf/rdf-tabular +- https://github.com/rdf-ext/rdf-parser-csvw +- https://github.com/Robsteranium/csvwr + + +## License + +This package is distributed under the [Apache 2.0 license](https://opensource.org/licenses/Apache-2.0). diff --git a/lib/python3.10/site-packages/csvw-3.5.1.dist-info/top_level.txt b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..1c9a63e96d032d440164fc41a82657b92a2bdf92 --- /dev/null +++ b/lib/python3.10/site-packages/csvw-3.5.1.dist-info/top_level.txt @@ -0,0 +1 @@ +csvw diff --git a/lib/python3.10/site-packages/fsspec/implementations/arrow.py b/lib/python3.10/site-packages/fsspec/implementations/arrow.py new file mode 100644 index 0000000000000000000000000000000000000000..530df901a7225bab4afb9f08d06540bc18e91aef --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/arrow.py @@ -0,0 +1,304 @@ +import errno +import io +import os +import secrets +import shutil +from contextlib import suppress +from functools import cached_property, wraps +from urllib.parse import parse_qs + +from fsspec.spec import AbstractFileSystem +from fsspec.utils import ( + get_package_version_without_import, + infer_storage_options, + mirror_from, + tokenize, +) + + +def wrap_exceptions(func): + @wraps(func) + def wrapper(*args, **kwargs): + try: + return func(*args, **kwargs) + except OSError as exception: + if not exception.args: + raise + + message, *args = exception.args + if isinstance(message, str) and "does not exist" in message: + raise FileNotFoundError(errno.ENOENT, message) from exception + else: + raise + + return wrapper + + +PYARROW_VERSION = None + + +class ArrowFSWrapper(AbstractFileSystem): + """FSSpec-compatible wrapper of pyarrow.fs.FileSystem. + + Parameters + ---------- + fs : pyarrow.fs.FileSystem + + """ + + root_marker = "/" + + def __init__(self, fs, **kwargs): + global PYARROW_VERSION + PYARROW_VERSION = get_package_version_without_import("pyarrow") + self.fs = fs + super().__init__(**kwargs) + + @property + def protocol(self): + return self.fs.type_name + + @cached_property + def fsid(self): + return "hdfs_" + tokenize(self.fs.host, self.fs.port) + + @classmethod + def _strip_protocol(cls, path): + ops = infer_storage_options(path) + path = ops["path"] + if path.startswith("//"): + # special case for "hdfs://path" (without the triple slash) + path = path[1:] + return path + + def ls(self, path, detail=False, **kwargs): + path = self._strip_protocol(path) + from pyarrow.fs import FileSelector + + entries = [ + self._make_entry(entry) + for entry in self.fs.get_file_info(FileSelector(path)) + ] + if detail: + return entries + else: + return [entry["name"] for entry in entries] + + def info(self, path, **kwargs): + path = self._strip_protocol(path) + [info] = self.fs.get_file_info([path]) + return self._make_entry(info) + + def exists(self, path): + path = self._strip_protocol(path) + try: + self.info(path) + except FileNotFoundError: + return False + else: + return True + + def _make_entry(self, info): + from pyarrow.fs import FileType + + if info.type is FileType.Directory: + kind = "directory" + elif info.type is FileType.File: + kind = "file" + elif info.type is FileType.NotFound: + raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), info.path) + else: + kind = "other" + + return { + "name": info.path, + "size": info.size, + "type": kind, + "mtime": info.mtime, + } + + @wrap_exceptions + def cp_file(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1).rstrip("/") + path2 = self._strip_protocol(path2).rstrip("/") + + with self._open(path1, "rb") as lstream: + tmp_fname = f"{path2}.tmp.{secrets.token_hex(6)}" + try: + with self.open(tmp_fname, "wb") as rstream: + shutil.copyfileobj(lstream, rstream) + self.fs.move(tmp_fname, path2) + except BaseException: + with suppress(FileNotFoundError): + self.fs.delete_file(tmp_fname) + raise + + @wrap_exceptions + def mv(self, path1, path2, **kwargs): + path1 = self._strip_protocol(path1).rstrip("/") + path2 = self._strip_protocol(path2).rstrip("/") + self.fs.move(path1, path2) + + @wrap_exceptions + def rm_file(self, path): + path = self._strip_protocol(path) + self.fs.delete_file(path) + + @wrap_exceptions + def rm(self, path, recursive=False, maxdepth=None): + path = self._strip_protocol(path).rstrip("/") + if self.isdir(path): + if recursive: + self.fs.delete_dir(path) + else: + raise ValueError("Can't delete directories without recursive=False") + else: + self.fs.delete_file(path) + + @wrap_exceptions + def _open(self, path, mode="rb", block_size=None, seekable=True, **kwargs): + if mode == "rb": + if seekable: + method = self.fs.open_input_file + else: + method = self.fs.open_input_stream + elif mode == "wb": + method = self.fs.open_output_stream + elif mode == "ab": + method = self.fs.open_append_stream + else: + raise ValueError(f"unsupported mode for Arrow filesystem: {mode!r}") + + _kwargs = {} + if mode != "rb" or not seekable: + if int(PYARROW_VERSION.split(".")[0]) >= 4: + # disable compression auto-detection + _kwargs["compression"] = None + stream = method(path, **_kwargs) + + return ArrowFile(self, stream, path, mode, block_size, **kwargs) + + @wrap_exceptions + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if create_parents: + self.makedirs(path, exist_ok=True) + else: + self.fs.create_dir(path, recursive=False) + + @wrap_exceptions + def makedirs(self, path, exist_ok=False): + path = self._strip_protocol(path) + self.fs.create_dir(path, recursive=True) + + @wrap_exceptions + def rmdir(self, path): + path = self._strip_protocol(path) + self.fs.delete_dir(path) + + @wrap_exceptions + def modified(self, path): + path = self._strip_protocol(path) + return self.fs.get_file_info(path).mtime + + def cat_file(self, path, start=None, end=None, **kwargs): + kwargs["seekable"] = start not in [None, 0] + return super().cat_file(path, start=None, end=None, **kwargs) + + def get_file(self, rpath, lpath, **kwargs): + kwargs["seekable"] = False + super().get_file(rpath, lpath, **kwargs) + + +@mirror_from( + "stream", + [ + "read", + "seek", + "tell", + "write", + "readable", + "writable", + "close", + "size", + "seekable", + ], +) +class ArrowFile(io.IOBase): + def __init__(self, fs, stream, path, mode, block_size=None, **kwargs): + self.path = path + self.mode = mode + + self.fs = fs + self.stream = stream + + self.blocksize = self.block_size = block_size + self.kwargs = kwargs + + def __enter__(self): + return self + + def __exit__(self, *args): + return self.close() + + +class HadoopFileSystem(ArrowFSWrapper): + """A wrapper on top of the pyarrow.fs.HadoopFileSystem + to connect it's interface with fsspec""" + + protocol = "hdfs" + + def __init__( + self, + host="default", + port=0, + user=None, + kerb_ticket=None, + replication=3, + extra_conf=None, + **kwargs, + ): + """ + + Parameters + ---------- + host: str + Hostname, IP or "default" to try to read from Hadoop config + port: int + Port to connect on, or default from Hadoop config if 0 + user: str or None + If given, connect as this username + kerb_ticket: str or None + If given, use this ticket for authentication + replication: int + set replication factor of file for write operations. default value is 3. + extra_conf: None or dict + Passed on to HadoopFileSystem + """ + from pyarrow.fs import HadoopFileSystem + + fs = HadoopFileSystem( + host=host, + port=port, + user=user, + kerb_ticket=kerb_ticket, + replication=replication, + extra_conf=extra_conf, + ) + super().__init__(fs=fs, **kwargs) + + @staticmethod + def _get_kwargs_from_urls(path): + ops = infer_storage_options(path) + out = {} + if ops.get("host", None): + out["host"] = ops["host"] + if ops.get("username", None): + out["user"] = ops["username"] + if ops.get("port", None): + out["port"] = ops["port"] + if ops.get("url_query", None): + queries = parse_qs(ops["url_query"]) + if queries.get("replication", None): + out["replication"] = int(queries["replication"][0]) + return out diff --git a/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py b/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..ee009f3bbcb5718e6c534f36f53ce4307999d54e --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/asyn_wrapper.py @@ -0,0 +1,103 @@ +import asyncio +import functools +import inspect + +from fsspec.asyn import AsyncFileSystem, running_async + + +def async_wrapper(func, obj=None): + """ + Wraps a synchronous function to make it awaitable. + + Parameters + ---------- + func : callable + The synchronous function to wrap. + obj : object, optional + The instance to bind the function to, if applicable. + + Returns + ------- + coroutine + An awaitable version of the function. + """ + + @functools.wraps(func) + async def wrapper(*args, **kwargs): + return await asyncio.to_thread(func, *args, **kwargs) + + return wrapper + + +class AsyncFileSystemWrapper(AsyncFileSystem): + """ + A wrapper class to convert a synchronous filesystem into an asynchronous one. + + This class takes an existing synchronous filesystem implementation and wraps all + its methods to provide an asynchronous interface. + + Parameters + ---------- + sync_fs : AbstractFileSystem + The synchronous filesystem instance to wrap. + """ + + protocol = "async_wrapper" + cachable = False + + def __init__(self, fs, *args, asynchronous=None, **kwargs): + if asynchronous is None: + asynchronous = running_async() + super().__init__(*args, asynchronous=asynchronous, **kwargs) + self.sync_fs = fs + self.protocol = self.sync_fs.protocol + self._wrap_all_sync_methods() + + @property + def fsid(self): + return f"async_{self.sync_fs.fsid}" + + def _wrap_all_sync_methods(self): + """ + Wrap all synchronous methods of the underlying filesystem with asynchronous versions. + """ + excluded_methods = {"open"} + for method_name in dir(self.sync_fs): + if method_name.startswith("_") or method_name in excluded_methods: + continue + + attr = inspect.getattr_static(self.sync_fs, method_name) + if isinstance(attr, property): + continue + + method = getattr(self.sync_fs, method_name) + if callable(method) and not asyncio.iscoroutinefunction(method): + async_method = async_wrapper(method, obj=self) + setattr(self, f"_{method_name}", async_method) + + @classmethod + def wrap_class(cls, sync_fs_class): + """ + Create a new class that can be used to instantiate an AsyncFileSystemWrapper + with lazy instantiation of the underlying synchronous filesystem. + + Parameters + ---------- + sync_fs_class : type + The class of the synchronous filesystem to wrap. + + Returns + ------- + type + A new class that wraps the provided synchronous filesystem class. + """ + + class GeneratedAsyncFileSystemWrapper(cls): + def __init__(self, *args, **kwargs): + sync_fs = sync_fs_class(*args, **kwargs) + super().__init__(sync_fs) + + GeneratedAsyncFileSystemWrapper.__name__ = ( + f"Async{sync_fs_class.__name__}Wrapper" + ) + return GeneratedAsyncFileSystemWrapper diff --git a/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py b/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py new file mode 100644 index 0000000000000000000000000000000000000000..6e7c7d88afdddf12f77b26bb635bd8bf1e2bd7f1 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/cache_mapper.py @@ -0,0 +1,75 @@ +from __future__ import annotations + +import abc +import hashlib + +from fsspec.implementations.local import make_path_posix + + +class AbstractCacheMapper(abc.ABC): + """Abstract super-class for mappers from remote URLs to local cached + basenames. + """ + + @abc.abstractmethod + def __call__(self, path: str) -> str: ... + + def __eq__(self, other: object) -> bool: + # Identity only depends on class. When derived classes have attributes + # they will need to be included. + return isinstance(other, type(self)) + + def __hash__(self) -> int: + # Identity only depends on class. When derived classes have attributes + # they will need to be included. + return hash(type(self)) + + +class BasenameCacheMapper(AbstractCacheMapper): + """Cache mapper that uses the basename of the remote URL and a fixed number + of directory levels above this. + + The default is zero directory levels, meaning different paths with the same + basename will have the same cached basename. + """ + + def __init__(self, directory_levels: int = 0): + if directory_levels < 0: + raise ValueError( + "BasenameCacheMapper requires zero or positive directory_levels" + ) + self.directory_levels = directory_levels + + # Separator for directories when encoded as strings. + self._separator = "_@_" + + def __call__(self, path: str) -> str: + path = make_path_posix(path) + prefix, *bits = path.rsplit("/", self.directory_levels + 1) + if bits: + return self._separator.join(bits) + else: + return prefix # No separator found, simple filename + + def __eq__(self, other: object) -> bool: + return super().__eq__(other) and self.directory_levels == other.directory_levels + + def __hash__(self) -> int: + return super().__hash__() ^ hash(self.directory_levels) + + +class HashCacheMapper(AbstractCacheMapper): + """Cache mapper that uses a hash of the remote URL.""" + + def __call__(self, path: str) -> str: + return hashlib.sha256(path.encode()).hexdigest() + + +def create_cache_mapper(same_names: bool) -> AbstractCacheMapper: + """Factory method to create cache mapper for backward compatibility with + ``CachingFileSystem`` constructor using ``same_names`` kwarg. + """ + if same_names: + return BasenameCacheMapper() + else: + return HashCacheMapper() diff --git a/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py b/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..bd9b5cdd99d7f4a0a989c0f7d0c70ddcf324816a --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/cache_metadata.py @@ -0,0 +1,232 @@ +from __future__ import annotations + +import os +import pickle +import time +from typing import TYPE_CHECKING + +from fsspec.utils import atomic_write + +try: + import ujson as json +except ImportError: + if not TYPE_CHECKING: + import json + +if TYPE_CHECKING: + from typing import Any, Dict, Iterator, Literal + + from typing_extensions import TypeAlias + + from .cached import CachingFileSystem + + Detail: TypeAlias = Dict[str, Any] + + +class CacheMetadata: + """Cache metadata. + + All reading and writing of cache metadata is performed by this class, + accessing the cached files and blocks is not. + + Metadata is stored in a single file per storage directory in JSON format. + For backward compatibility, also reads metadata stored in pickle format + which is converted to JSON when next saved. + """ + + def __init__(self, storage: list[str]): + """ + + Parameters + ---------- + storage: list[str] + Directories containing cached files, must be at least one. Metadata + is stored in the last of these directories by convention. + """ + if not storage: + raise ValueError("CacheMetadata expects at least one storage location") + + self._storage = storage + self.cached_files: list[Detail] = [{}] + + # Private attribute to force saving of metadata in pickle format rather than + # JSON for use in tests to confirm can read both pickle and JSON formats. + self._force_save_pickle = False + + def _load(self, fn: str) -> Detail: + """Low-level function to load metadata from specific file""" + try: + with open(fn, "r") as f: + loaded = json.load(f) + except ValueError: + with open(fn, "rb") as f: + loaded = pickle.load(f) + for c in loaded.values(): + if isinstance(c.get("blocks"), list): + c["blocks"] = set(c["blocks"]) + return loaded + + def _save(self, metadata_to_save: Detail, fn: str) -> None: + """Low-level function to save metadata to specific file""" + if self._force_save_pickle: + with atomic_write(fn) as f: + pickle.dump(metadata_to_save, f) + else: + with atomic_write(fn, mode="w") as f: + json.dump(metadata_to_save, f) + + def _scan_locations( + self, writable_only: bool = False + ) -> Iterator[tuple[str, str, bool]]: + """Yield locations (filenames) where metadata is stored, and whether + writable or not. + + Parameters + ---------- + writable: bool + Set to True to only yield writable locations. + + Returns + ------- + Yields (str, str, bool) + """ + n = len(self._storage) + for i, storage in enumerate(self._storage): + writable = i == n - 1 + if writable_only and not writable: + continue + yield os.path.join(storage, "cache"), storage, writable + + def check_file( + self, path: str, cfs: CachingFileSystem | None + ) -> Literal[False] | tuple[Detail, str]: + """If path is in cache return its details, otherwise return ``False``. + + If the optional CachingFileSystem is specified then it is used to + perform extra checks to reject possible matches, such as if they are + too old. + """ + for (fn, base, _), cache in zip(self._scan_locations(), self.cached_files): + if path not in cache: + continue + detail = cache[path].copy() + + if cfs is not None: + if cfs.check_files and detail["uid"] != cfs.fs.ukey(path): + # Wrong file as determined by hash of file properties + continue + if cfs.expiry and time.time() - detail["time"] > cfs.expiry: + # Cached file has expired + continue + + fn = os.path.join(base, detail["fn"]) + if os.path.exists(fn): + return detail, fn + return False + + def clear_expired(self, expiry_time: int) -> tuple[list[str], bool]: + """Remove expired metadata from the cache. + + Returns names of files corresponding to expired metadata and a boolean + flag indicating whether the writable cache is empty. Caller is + responsible for deleting the expired files. + """ + expired_files = [] + for path, detail in self.cached_files[-1].copy().items(): + if time.time() - detail["time"] > expiry_time: + fn = detail.get("fn", "") + if not fn: + raise RuntimeError( + f"Cache metadata does not contain 'fn' for {path}" + ) + fn = os.path.join(self._storage[-1], fn) + expired_files.append(fn) + self.cached_files[-1].pop(path) + + if self.cached_files[-1]: + cache_path = os.path.join(self._storage[-1], "cache") + self._save(self.cached_files[-1], cache_path) + + writable_cache_empty = not self.cached_files[-1] + return expired_files, writable_cache_empty + + def load(self) -> None: + """Load all metadata from disk and store in ``self.cached_files``""" + cached_files = [] + for fn, _, _ in self._scan_locations(): + if os.path.exists(fn): + # TODO: consolidate blocks here + cached_files.append(self._load(fn)) + else: + cached_files.append({}) + self.cached_files = cached_files or [{}] + + def on_close_cached_file(self, f: Any, path: str) -> None: + """Perform side-effect actions on closing a cached file. + + The actual closing of the file is the responsibility of the caller. + """ + # File must be writeble, so in self.cached_files[-1] + c = self.cached_files[-1][path] + if c["blocks"] is not True and len(c["blocks"]) * f.blocksize >= f.size: + c["blocks"] = True + + def pop_file(self, path: str) -> str | None: + """Remove metadata of cached file. + + If path is in the cache, return the filename of the cached file, + otherwise return ``None``. Caller is responsible for deleting the + cached file. + """ + details = self.check_file(path, None) + if not details: + return None + _, fn = details + if fn.startswith(self._storage[-1]): + self.cached_files[-1].pop(path) + self.save() + else: + raise PermissionError( + "Can only delete cached file in last, writable cache location" + ) + return fn + + def save(self) -> None: + """Save metadata to disk""" + for (fn, _, writable), cache in zip(self._scan_locations(), self.cached_files): + if not writable: + continue + + if os.path.exists(fn): + cached_files = self._load(fn) + for k, c in cached_files.items(): + if k in cache: + if c["blocks"] is True or cache[k]["blocks"] is True: + c["blocks"] = True + else: + # self.cached_files[*][*]["blocks"] must continue to + # point to the same set object so that updates + # performed by MMapCache are propagated back to + # self.cached_files. + blocks = cache[k]["blocks"] + blocks.update(c["blocks"]) + c["blocks"] = blocks + c["time"] = max(c["time"], cache[k]["time"]) + c["uid"] = cache[k]["uid"] + + # Files can be added to cache after it was written once + for k, c in cache.items(): + if k not in cached_files: + cached_files[k] = c + else: + cached_files = cache + cache = {k: v.copy() for k, v in cached_files.items()} + for c in cache.values(): + if isinstance(c["blocks"], set): + c["blocks"] = list(c["blocks"]) + self._save(cache, fn) + self.cached_files[-1] = cached_files + + def update_file(self, path: str, detail: Detail) -> None: + """Update metadata for specific file in memory, do not save""" + self.cached_files[-1][path] = detail diff --git a/lib/python3.10/site-packages/fsspec/implementations/dask.py b/lib/python3.10/site-packages/fsspec/implementations/dask.py new file mode 100644 index 0000000000000000000000000000000000000000..3e1276463db6866665e6a0fe114efc247971b57e --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/dask.py @@ -0,0 +1,152 @@ +import dask +from distributed.client import Client, _get_global_client +from distributed.worker import Worker + +from fsspec import filesystem +from fsspec.spec import AbstractBufferedFile, AbstractFileSystem +from fsspec.utils import infer_storage_options + + +def _get_client(client): + if client is None: + return _get_global_client() + elif isinstance(client, Client): + return client + else: + # e.g., connection string + return Client(client) + + +def _in_worker(): + return bool(Worker._instances) + + +class DaskWorkerFileSystem(AbstractFileSystem): + """View files accessible to a worker as any other remote file-system + + When instances are run on the worker, uses the real filesystem. When + run on the client, they call the worker to provide information or data. + + **Warning** this implementation is experimental, and read-only for now. + """ + + def __init__( + self, target_protocol=None, target_options=None, fs=None, client=None, **kwargs + ): + super().__init__(**kwargs) + if not (fs is None) ^ (target_protocol is None): + raise ValueError( + "Please provide one of filesystem instance (fs) or" + " target_protocol, not both" + ) + self.target_protocol = target_protocol + self.target_options = target_options + self.worker = None + self.client = client + self.fs = fs + self._determine_worker() + + @staticmethod + def _get_kwargs_from_urls(path): + so = infer_storage_options(path) + if "host" in so and "port" in so: + return {"client": f"{so['host']}:{so['port']}"} + else: + return {} + + def _determine_worker(self): + if _in_worker(): + self.worker = True + if self.fs is None: + self.fs = filesystem( + self.target_protocol, **(self.target_options or {}) + ) + else: + self.worker = False + self.client = _get_client(self.client) + self.rfs = dask.delayed(self) + + def mkdir(self, *args, **kwargs): + if self.worker: + self.fs.mkdir(*args, **kwargs) + else: + self.rfs.mkdir(*args, **kwargs).compute() + + def rm(self, *args, **kwargs): + if self.worker: + self.fs.rm(*args, **kwargs) + else: + self.rfs.rm(*args, **kwargs).compute() + + def copy(self, *args, **kwargs): + if self.worker: + self.fs.copy(*args, **kwargs) + else: + self.rfs.copy(*args, **kwargs).compute() + + def mv(self, *args, **kwargs): + if self.worker: + self.fs.mv(*args, **kwargs) + else: + self.rfs.mv(*args, **kwargs).compute() + + def ls(self, *args, **kwargs): + if self.worker: + return self.fs.ls(*args, **kwargs) + else: + return self.rfs.ls(*args, **kwargs).compute() + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + if self.worker: + return self.fs._open( + path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + **kwargs, + ) + else: + return DaskFile( + fs=self, + path=path, + mode=mode, + block_size=block_size, + autocommit=autocommit, + cache_options=cache_options, + **kwargs, + ) + + def fetch_range(self, path, mode, start, end): + if self.worker: + with self._open(path, mode) as f: + f.seek(start) + return f.read(end - start) + else: + return self.rfs.fetch_range(path, mode, start, end).compute() + + +class DaskFile(AbstractBufferedFile): + def __init__(self, mode="rb", **kwargs): + if mode != "rb": + raise ValueError('Remote dask files can only be opened in "rb" mode') + super().__init__(**kwargs) + + def _upload_chunk(self, final=False): + pass + + def _initiate_upload(self): + """Create remote file/upload""" + pass + + def _fetch_range(self, start, end): + """Get the specified set of bytes from remote""" + return self.fs.fetch_range(self.path, self.mode, start, end) diff --git a/lib/python3.10/site-packages/fsspec/implementations/data.py b/lib/python3.10/site-packages/fsspec/implementations/data.py new file mode 100644 index 0000000000000000000000000000000000000000..519032305bed633f2ba8a6148076433caf81710b --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/data.py @@ -0,0 +1,58 @@ +import base64 +import io +from typing import Optional +from urllib.parse import unquote + +from fsspec import AbstractFileSystem + + +class DataFileSystem(AbstractFileSystem): + """A handy decoder for data-URLs + + Example + ------- + >>> with fsspec.open("data:,Hello%2C%20World%21") as f: + ... print(f.read()) + b"Hello, World!" + + See https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/Data_URLs + """ + + protocol = "data" + + def __init__(self, **kwargs): + """No parameters for this filesystem""" + super().__init__(**kwargs) + + def cat_file(self, path, start=None, end=None, **kwargs): + pref, data = path.split(",", 1) + if pref.endswith("base64"): + return base64.b64decode(data)[start:end] + return unquote(data).encode()[start:end] + + def info(self, path, **kwargs): + pref, name = path.split(",", 1) + data = self.cat_file(path) + mime = pref.split(":", 1)[1].split(";", 1)[0] + return {"name": name, "size": len(data), "type": "file", "mimetype": mime} + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + if "r" not in mode: + raise ValueError("Read only filesystem") + return io.BytesIO(self.cat_file(path)) + + @staticmethod + def encode(data: bytes, mime: Optional[str] = None): + """Format the given data into data-URL syntax + + This version always base64 encodes, even when the data is ascii/url-safe. + """ + return f"data:{mime or ''};base64,{base64.b64encode(data).decode()}" diff --git a/lib/python3.10/site-packages/fsspec/implementations/git.py b/lib/python3.10/site-packages/fsspec/implementations/git.py new file mode 100644 index 0000000000000000000000000000000000000000..7b9d3539a013935e9a5652605cdd1c21cee0f0ee --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/git.py @@ -0,0 +1,115 @@ +import os + +import pygit2 + +from fsspec.spec import AbstractFileSystem + +from .memory import MemoryFile + + +class GitFileSystem(AbstractFileSystem): + """Browse the files of a local git repo at any hash/tag/branch + + (experimental backend) + """ + + root_marker = "" + cachable = True + + def __init__(self, path=None, fo=None, ref=None, **kwargs): + """ + + Parameters + ---------- + path: str (optional) + Local location of the repo (uses current directory if not given). + May be deprecated in favour of ``fo``. When used with a higher + level function such as fsspec.open(), may be of the form + "git://[path-to-repo[:]][ref@]path/to/file" (but the actual + file path should not contain "@" or ":"). + fo: str (optional) + Same as ``path``, but passed as part of a chained URL. This one + takes precedence if both are given. + ref: str (optional) + Reference to work with, could be a hash, tag or branch name. Defaults + to current working tree. Note that ``ls`` and ``open`` also take hash, + so this becomes the default for those operations + kwargs + """ + super().__init__(**kwargs) + self.repo = pygit2.Repository(fo or path or os.getcwd()) + self.ref = ref or "master" + + @classmethod + def _strip_protocol(cls, path): + path = super()._strip_protocol(path).lstrip("/") + if ":" in path: + path = path.split(":", 1)[1] + if "@" in path: + path = path.split("@", 1)[1] + return path.lstrip("/") + + def _path_to_object(self, path, ref): + comm, ref = self.repo.resolve_refish(ref or self.ref) + parts = path.split("/") + tree = comm.tree + for part in parts: + if part and isinstance(tree, pygit2.Tree): + if part not in tree: + raise FileNotFoundError(path) + tree = tree[part] + return tree + + @staticmethod + def _get_kwargs_from_urls(path): + if path.startswith("git://"): + path = path[6:] + out = {} + if ":" in path: + out["path"], path = path.split(":", 1) + if "@" in path: + out["ref"], path = path.split("@", 1) + return out + + @staticmethod + def _object_to_info(obj, path=None): + # obj.name and obj.filemode are None for the root tree! + is_dir = isinstance(obj, pygit2.Tree) + return { + "type": "directory" if is_dir else "file", + "name": ( + "/".join([path, obj.name or ""]).lstrip("/") if path else obj.name + ), + "hex": str(obj.id), + "mode": "100644" if obj.filemode is None else f"{obj.filemode:o}", + "size": 0 if is_dir else obj.size, + } + + def ls(self, path, detail=True, ref=None, **kwargs): + tree = self._path_to_object(self._strip_protocol(path), ref) + return [ + GitFileSystem._object_to_info(obj, path) + if detail + else GitFileSystem._object_to_info(obj, path)["name"] + for obj in (tree if isinstance(tree, pygit2.Tree) else [tree]) + ] + + def info(self, path, ref=None, **kwargs): + tree = self._path_to_object(self._strip_protocol(path), ref) + return GitFileSystem._object_to_info(tree, path) + + def ukey(self, path, ref=None): + return self.info(path, ref=ref)["hex"] + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + ref=None, + **kwargs, + ): + obj = self._path_to_object(path, ref or self.ref) + return MemoryFile(data=obj.data) diff --git a/lib/python3.10/site-packages/fsspec/implementations/github.py b/lib/python3.10/site-packages/fsspec/implementations/github.py new file mode 100644 index 0000000000000000000000000000000000000000..3650b8ebaa4eae3caa75a5290305fefe0a80d30b --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/github.py @@ -0,0 +1,239 @@ +import requests + +import fsspec + +from ..spec import AbstractFileSystem +from ..utils import infer_storage_options +from .memory import MemoryFile + +# TODO: add GIST backend, would be very similar + + +class GithubFileSystem(AbstractFileSystem): + """Interface to files in github + + An instance of this class provides the files residing within a remote github + repository. You may specify a point in the repos history, by SHA, branch + or tag (default is current master). + + Given that code files tend to be small, and that github does not support + retrieving partial content, we always fetch whole files. + + When using fsspec.open, allows URIs of the form: + + - "github://path/file", in which case you must specify org, repo and + may specify sha in the extra args + - 'github://org:repo@/precip/catalog.yml', where the org and repo are + part of the URI + - 'github://org:repo@sha/precip/catalog.yml', where the sha is also included + + ``sha`` can be the full or abbreviated hex of the commit you want to fetch + from, or a branch or tag name (so long as it doesn't contain special characters + like "/", "?", which would have to be HTTP-encoded). + + For authorised access, you must provide username and token, which can be made + at https://github.com/settings/tokens + """ + + url = "https://api.github.com/repos/{org}/{repo}/git/trees/{sha}" + rurl = "https://raw.githubusercontent.com/{org}/{repo}/{sha}/{path}" + protocol = "github" + timeout = (60, 60) # connect, read timeouts + + def __init__( + self, org, repo, sha=None, username=None, token=None, timeout=None, **kwargs + ): + super().__init__(**kwargs) + self.org = org + self.repo = repo + if (username is None) ^ (token is None): + raise ValueError("Auth required both username and token") + self.username = username + self.token = token + if timeout is not None: + self.timeout = timeout + if sha is None: + # look up default branch (not necessarily "master") + u = "https://api.github.com/repos/{org}/{repo}" + r = requests.get( + u.format(org=org, repo=repo), timeout=self.timeout, **self.kw + ) + r.raise_for_status() + sha = r.json()["default_branch"] + + self.root = sha + self.ls("") + + @property + def kw(self): + if self.username: + return {"auth": (self.username, self.token)} + return {} + + @classmethod + def repos(cls, org_or_user, is_org=True): + """List repo names for given org or user + + This may become the top level of the FS + + Parameters + ---------- + org_or_user: str + Name of the github org or user to query + is_org: bool (default True) + Whether the name is an organisation (True) or user (False) + + Returns + ------- + List of string + """ + r = requests.get( + f"https://api.github.com/{['users', 'orgs'][is_org]}/{org_or_user}/repos", + timeout=cls.timeout, + ) + r.raise_for_status() + return [repo["name"] for repo in r.json()] + + @property + def tags(self): + """Names of tags in the repo""" + r = requests.get( + f"https://api.github.com/repos/{self.org}/{self.repo}/tags", + timeout=self.timeout, + **self.kw, + ) + r.raise_for_status() + return [t["name"] for t in r.json()] + + @property + def branches(self): + """Names of branches in the repo""" + r = requests.get( + f"https://api.github.com/repos/{self.org}/{self.repo}/branches", + timeout=self.timeout, + **self.kw, + ) + r.raise_for_status() + return [t["name"] for t in r.json()] + + @property + def refs(self): + """Named references, tags and branches""" + return {"tags": self.tags, "branches": self.branches} + + def ls(self, path, detail=False, sha=None, _sha=None, **kwargs): + """List files at given path + + Parameters + ---------- + path: str + Location to list, relative to repo root + detail: bool + If True, returns list of dicts, one per file; if False, returns + list of full filenames only + sha: str (optional) + List at the given point in the repo history, branch or tag name or commit + SHA + _sha: str (optional) + List this specific tree object (used internally to descend into trees) + """ + path = self._strip_protocol(path) + if path == "": + _sha = sha or self.root + if _sha is None: + parts = path.rstrip("/").split("/") + so_far = "" + _sha = sha or self.root + for part in parts: + out = self.ls(so_far, True, sha=sha, _sha=_sha) + so_far += "/" + part if so_far else part + out = [o for o in out if o["name"] == so_far] + if not out: + raise FileNotFoundError(path) + out = out[0] + if out["type"] == "file": + if detail: + return [out] + else: + return path + _sha = out["sha"] + if path not in self.dircache or sha not in [self.root, None]: + r = requests.get( + self.url.format(org=self.org, repo=self.repo, sha=_sha), + timeout=self.timeout, + **self.kw, + ) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + types = {"blob": "file", "tree": "directory"} + out = [ + { + "name": path + "/" + f["path"] if path else f["path"], + "mode": f["mode"], + "type": types[f["type"]], + "size": f.get("size", 0), + "sha": f["sha"], + } + for f in r.json()["tree"] + if f["type"] in types + ] + if sha in [self.root, None]: + self.dircache[path] = out + else: + out = self.dircache[path] + if detail: + return out + else: + return sorted([f["name"] for f in out]) + + def invalidate_cache(self, path=None): + self.dircache.clear() + + @classmethod + def _strip_protocol(cls, path): + opts = infer_storage_options(path) + if "username" not in opts: + return super()._strip_protocol(path) + return opts["path"].lstrip("/") + + @staticmethod + def _get_kwargs_from_urls(path): + opts = infer_storage_options(path) + if "username" not in opts: + return {} + out = {"org": opts["username"], "repo": opts["password"]} + if opts["host"]: + out["sha"] = opts["host"] + return out + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + sha=None, + **kwargs, + ): + if mode != "rb": + raise NotImplementedError + url = self.rurl.format( + org=self.org, repo=self.repo, path=path, sha=sha or self.root + ) + r = requests.get(url, timeout=self.timeout, **self.kw) + if r.status_code == 404: + raise FileNotFoundError(path) + r.raise_for_status() + return MemoryFile(None, None, r.content) + + def cat(self, path, recursive=False, on_error="raise", **kwargs): + paths = self.expand_path(path, recursive=recursive) + urls = [ + self.rurl.format(org=self.org, repo=self.repo, path=u, sha=self.root) + for u, sh in paths + ] + fs = fsspec.filesystem("http") + data = fs.cat(urls, on_error="return") + return {u: v for ((k, v), u) in zip(data.items(), urls)} diff --git a/lib/python3.10/site-packages/fsspec/implementations/http_sync.py b/lib/python3.10/site-packages/fsspec/implementations/http_sync.py new file mode 100644 index 0000000000000000000000000000000000000000..2a1caf1b0aaa0c8b2bc58f5364aac5ee5cf44d1a --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/http_sync.py @@ -0,0 +1,932 @@ +"""This file is largely copied from http.py""" + +import io +import logging +import re +import urllib.error +import urllib.parse +from copy import copy +from json import dumps, loads +from urllib.parse import urlparse + +try: + import yarl +except (ImportError, ModuleNotFoundError, OSError): + yarl = False + +from fsspec.callbacks import _DEFAULT_CALLBACK +from fsspec.registry import register_implementation +from fsspec.spec import AbstractBufferedFile, AbstractFileSystem +from fsspec.utils import DEFAULT_BLOCK_SIZE, isfilelike, nullcontext, tokenize + +from ..caching import AllBytes + +# https://stackoverflow.com/a/15926317/3821154 +ex = re.compile(r"""<(a|A)\s+(?:[^>]*?\s+)?(href|HREF)=["'](?P[^"']+)""") +ex2 = re.compile(r"""(?Phttp[s]?://[-a-zA-Z0-9@:%_+.~#?&/=]+)""") +logger = logging.getLogger("fsspec.http") + + +class JsHttpException(urllib.error.HTTPError): ... + + +class StreamIO(io.BytesIO): + # fake class, so you can set attributes on it + # will eventually actually stream + ... + + +class ResponseProxy: + """Looks like a requests response""" + + def __init__(self, req, stream=False): + self.request = req + self.stream = stream + self._data = None + self._headers = None + + @property + def raw(self): + if self._data is None: + b = self.request.response.to_bytes() + if self.stream: + self._data = StreamIO(b) + else: + self._data = b + return self._data + + def close(self): + if hasattr(self, "_data"): + del self._data + + @property + def headers(self): + if self._headers is None: + self._headers = dict( + [ + _.split(": ") + for _ in self.request.getAllResponseHeaders().strip().split("\r\n") + ] + ) + return self._headers + + @property + def status_code(self): + return int(self.request.status) + + def raise_for_status(self): + if not self.ok: + raise JsHttpException( + self.url, self.status_code, self.reason, self.headers, None + ) + + def iter_content(self, chunksize, *_, **__): + while True: + out = self.raw.read(chunksize) + if out: + yield out + else: + break + + @property + def reason(self): + return self.request.statusText + + @property + def ok(self): + return self.status_code < 400 + + @property + def url(self): + return self.request.response.responseURL + + @property + def text(self): + # TODO: encoding from headers + return self.content.decode() + + @property + def content(self): + self.stream = False + return self.raw + + @property + def json(self): + return loads(self.text) + + +class RequestsSessionShim: + def __init__(self): + self.headers = {} + + def request( + self, + method, + url, + params=None, + data=None, + headers=None, + cookies=None, + files=None, + auth=None, + timeout=None, + allow_redirects=None, + proxies=None, + hooks=None, + stream=None, + verify=None, + cert=None, + json=None, + ): + from js import Blob, XMLHttpRequest + + logger.debug("JS request: %s %s", method, url) + + if cert or verify or proxies or files or cookies or hooks: + raise NotImplementedError + if data and json: + raise ValueError("Use json= or data=, not both") + req = XMLHttpRequest.new() + extra = auth if auth else () + if params: + url = f"{url}?{urllib.parse.urlencode(params)}" + req.open(method, url, False, *extra) + if timeout: + req.timeout = timeout + if headers: + for k, v in headers.items(): + req.setRequestHeader(k, v) + + req.setRequestHeader("Accept", "application/octet-stream") + req.responseType = "arraybuffer" + if json: + blob = Blob.new([dumps(data)], {type: "application/json"}) + req.send(blob) + elif data: + if isinstance(data, io.IOBase): + data = data.read() + blob = Blob.new([data], {type: "application/octet-stream"}) + req.send(blob) + else: + req.send(None) + return ResponseProxy(req, stream=stream) + + def get(self, url, **kwargs): + return self.request("GET", url, **kwargs) + + def head(self, url, **kwargs): + return self.request("HEAD", url, **kwargs) + + def post(self, url, **kwargs): + return self.request("POST}", url, **kwargs) + + def put(self, url, **kwargs): + return self.request("PUT", url, **kwargs) + + def patch(self, url, **kwargs): + return self.request("PATCH", url, **kwargs) + + def delete(self, url, **kwargs): + return self.request("DELETE", url, **kwargs) + + +class HTTPFileSystem(AbstractFileSystem): + """ + Simple File-System for fetching data via HTTP(S) + + This is the BLOCKING version of the normal HTTPFileSystem. It uses + requests in normal python and the JS runtime in pyodide. + + ***This implementation is extremely experimental, do not use unless + you are testing pyodide/pyscript integration*** + """ + + protocol = ("http", "https", "sync_http", "sync_https") + sep = "/" + + def __init__( + self, + simple_links=True, + block_size=None, + same_scheme=True, + cache_type="readahead", + cache_options=None, + client_kwargs=None, + encoded=False, + **storage_options, + ): + """ + + Parameters + ---------- + block_size: int + Blocks to read bytes; if 0, will default to raw requests file-like + objects instead of HTTPFile instances + simple_links: bool + If True, will consider both HTML tags and anything that looks + like a URL; if False, will consider only the former. + same_scheme: True + When doing ls/glob, if this is True, only consider paths that have + http/https matching the input URLs. + size_policy: this argument is deprecated + client_kwargs: dict + Passed to aiohttp.ClientSession, see + https://docs.aiohttp.org/en/stable/client_reference.html + For example, ``{'auth': aiohttp.BasicAuth('user', 'pass')}`` + storage_options: key-value + Any other parameters passed on to requests + cache_type, cache_options: defaults used in open + """ + super().__init__(self, **storage_options) + self.block_size = block_size if block_size is not None else DEFAULT_BLOCK_SIZE + self.simple_links = simple_links + self.same_schema = same_scheme + self.cache_type = cache_type + self.cache_options = cache_options + self.client_kwargs = client_kwargs or {} + self.encoded = encoded + self.kwargs = storage_options + + try: + import js # noqa: F401 + + logger.debug("Starting JS session") + self.session = RequestsSessionShim() + self.js = True + except Exception as e: + import requests + + logger.debug("Starting cpython session because of: %s", e) + self.session = requests.Session(**(client_kwargs or {})) + self.js = False + + request_options = copy(storage_options) + self.use_listings_cache = request_options.pop("use_listings_cache", False) + request_options.pop("listings_expiry_time", None) + request_options.pop("max_paths", None) + request_options.pop("skip_instance_cache", None) + self.kwargs = request_options + + @property + def fsid(self): + return "http_sync" + + def encode_url(self, url): + if yarl: + return yarl.URL(url, encoded=self.encoded) + return url + + @classmethod + def _strip_protocol(cls, path: str) -> str: + """For HTTP, we always want to keep the full URL""" + path = path.replace("http_sync://", "http://").replace( + "https_sync://", "https://" + ) + return path + + @classmethod + def _parent(cls, path): + # override, since _strip_protocol is different for URLs + par = super()._parent(path) + if len(par) > 7: # "http://..." + return par + return "" + + def _ls_real(self, url, detail=True, **kwargs): + # ignoring URL-encoded arguments + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + r = self.session.get(self.encode_url(url), **self.kwargs) + self._raise_not_found_for_status(r, url) + text = r.text + if self.simple_links: + links = ex2.findall(text) + [u[2] for u in ex.findall(text)] + else: + links = [u[2] for u in ex.findall(text)] + out = set() + parts = urlparse(url) + for l in links: + if isinstance(l, tuple): + l = l[1] + if l.startswith("/") and len(l) > 1: + # absolute URL on this server + l = parts.scheme + "://" + parts.netloc + l + if l.startswith("http"): + if self.same_schema and l.startswith(url.rstrip("/") + "/"): + out.add(l) + elif l.replace("https", "http").startswith( + url.replace("https", "http").rstrip("/") + "/" + ): + # allowed to cross http <-> https + out.add(l) + else: + if l not in ["..", "../"]: + # Ignore FTP-like "parent" + out.add("/".join([url.rstrip("/"), l.lstrip("/")])) + if not out and url.endswith("/"): + out = self._ls_real(url.rstrip("/"), detail=False) + if detail: + return [ + { + "name": u, + "size": None, + "type": "directory" if u.endswith("/") else "file", + } + for u in out + ] + else: + return sorted(out) + + def ls(self, url, detail=True, **kwargs): + if self.use_listings_cache and url in self.dircache: + out = self.dircache[url] + else: + out = self._ls_real(url, detail=detail, **kwargs) + self.dircache[url] = out + return out + + def _raise_not_found_for_status(self, response, url): + """ + Raises FileNotFoundError for 404s, otherwise uses raise_for_status. + """ + if response.status_code == 404: + raise FileNotFoundError(url) + response.raise_for_status() + + def cat_file(self, url, start=None, end=None, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(url) + + if start is not None or end is not None: + if start == end: + return b"" + headers = kw.pop("headers", {}).copy() + + headers["Range"] = self._process_limits(url, start, end) + kw["headers"] = headers + r = self.session.get(self.encode_url(url), **kw) + self._raise_not_found_for_status(r, url) + return r.content + + def get_file( + self, rpath, lpath, chunk_size=5 * 2**20, callback=_DEFAULT_CALLBACK, **kwargs + ): + kw = self.kwargs.copy() + kw.update(kwargs) + logger.debug(rpath) + r = self.session.get(self.encode_url(rpath), **kw) + try: + size = int( + r.headers.get("content-length", None) + or r.headers.get("Content-Length", None) + ) + except (ValueError, KeyError, TypeError): + size = None + + callback.set_size(size) + self._raise_not_found_for_status(r, rpath) + if not isfilelike(lpath): + lpath = open(lpath, "wb") + for chunk in r.iter_content(chunk_size, decode_unicode=False): + lpath.write(chunk) + callback.relative_update(len(chunk)) + + def put_file( + self, + lpath, + rpath, + chunk_size=5 * 2**20, + callback=_DEFAULT_CALLBACK, + method="post", + **kwargs, + ): + def gen_chunks(): + # Support passing arbitrary file-like objects + # and use them instead of streams. + if isinstance(lpath, io.IOBase): + context = nullcontext(lpath) + use_seek = False # might not support seeking + else: + context = open(lpath, "rb") + use_seek = True + + with context as f: + if use_seek: + callback.set_size(f.seek(0, 2)) + f.seek(0) + else: + callback.set_size(getattr(f, "size", None)) + + chunk = f.read(chunk_size) + while chunk: + yield chunk + callback.relative_update(len(chunk)) + chunk = f.read(chunk_size) + + kw = self.kwargs.copy() + kw.update(kwargs) + + method = method.lower() + if method not in ("post", "put"): + raise ValueError( + f"method has to be either 'post' or 'put', not: {method!r}" + ) + + meth = getattr(self.session, method) + resp = meth(rpath, data=gen_chunks(), **kw) + self._raise_not_found_for_status(resp, rpath) + + def _process_limits(self, url, start, end): + """Helper for "Range"-based _cat_file""" + size = None + suff = False + if start is not None and start < 0: + # if start is negative and end None, end is the "suffix length" + if end is None: + end = -start + start = "" + suff = True + else: + size = size or self.info(url)["size"] + start = size + start + elif start is None: + start = 0 + if not suff: + if end is not None and end < 0: + if start is not None: + size = size or self.info(url)["size"] + end = size + end + elif end is None: + end = "" + if isinstance(end, int): + end -= 1 # bytes range is inclusive + return f"bytes={start}-{end}" + + def exists(self, path, **kwargs): + kw = self.kwargs.copy() + kw.update(kwargs) + try: + logger.debug(path) + r = self.session.get(self.encode_url(path), **kw) + return r.status_code < 400 + except Exception: + return False + + def isfile(self, path, **kwargs): + return self.exists(path, **kwargs) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=None, # XXX: This differs from the base class. + cache_type=None, + cache_options=None, + size=None, + **kwargs, + ): + """Make a file-like object + + Parameters + ---------- + path: str + Full URL with protocol + mode: string + must be "rb" + block_size: int or None + Bytes to download in one request; use instance value if None. If + zero, will return a streaming Requests file-like instance. + kwargs: key-value + Any other parameters, passed to requests calls + """ + if mode != "rb": + raise NotImplementedError + block_size = block_size if block_size is not None else self.block_size + kw = self.kwargs.copy() + kw.update(kwargs) + size = size or self.info(path, **kwargs)["size"] + if block_size and size: + return HTTPFile( + self, + path, + session=self.session, + block_size=block_size, + mode=mode, + size=size, + cache_type=cache_type or self.cache_type, + cache_options=cache_options or self.cache_options, + **kw, + ) + else: + return HTTPStreamFile( + self, + path, + mode=mode, + session=self.session, + **kw, + ) + + def ukey(self, url): + """Unique identifier; assume HTTP files are static, unchanging""" + return tokenize(url, self.kwargs, self.protocol) + + def info(self, url, **kwargs): + """Get info of URL + + Tries to access location via HEAD, and then GET methods, but does + not fetch the data. + + It is possible that the server does not supply any size information, in + which case size will be given as None (and certain operations on the + corresponding file will not work). + """ + info = {} + for policy in ["head", "get"]: + try: + info.update( + _file_info( + self.encode_url(url), + size_policy=policy, + session=self.session, + **self.kwargs, + **kwargs, + ) + ) + if info.get("size") is not None: + break + except Exception as exc: + if policy == "get": + # If get failed, then raise a FileNotFoundError + raise FileNotFoundError(url) from exc + logger.debug(str(exc)) + + return {"name": url, "size": None, **info, "type": "file"} + + def glob(self, path, maxdepth=None, **kwargs): + """ + Find files by glob-matching. + + This implementation is idntical to the one in AbstractFileSystem, + but "?" is not considered as a character for globbing, because it is + so common in URLs, often identifying the "query" part. + """ + import re + + ends = path.endswith("/") + path = self._strip_protocol(path) + indstar = path.find("*") if path.find("*") >= 0 else len(path) + indbrace = path.find("[") if path.find("[") >= 0 else len(path) + + ind = min(indstar, indbrace) + + detail = kwargs.pop("detail", False) + + if not has_magic(path): + root = path + depth = 1 + if ends: + path += "/*" + elif self.exists(path): + if not detail: + return [path] + else: + return {path: self.info(path)} + else: + if not detail: + return [] # glob of non-existent returns empty + else: + return {} + elif "/" in path[:ind]: + ind2 = path[:ind].rindex("/") + root = path[: ind2 + 1] + depth = None if "**" in path else path[ind2 + 1 :].count("/") + 1 + else: + root = "" + depth = None if "**" in path else path[ind + 1 :].count("/") + 1 + + allpaths = self.find( + root, maxdepth=maxdepth or depth, withdirs=True, detail=True, **kwargs + ) + # Escape characters special to python regex, leaving our supported + # special characters in place. + # See https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html + # for shell globbing details. + pattern = ( + "^" + + ( + path.replace("\\", r"\\") + .replace(".", r"\.") + .replace("+", r"\+") + .replace("//", "/") + .replace("(", r"\(") + .replace(")", r"\)") + .replace("|", r"\|") + .replace("^", r"\^") + .replace("$", r"\$") + .replace("{", r"\{") + .replace("}", r"\}") + .rstrip("/") + ) + + "$" + ) + pattern = re.sub("[*]{2}", "=PLACEHOLDER=", pattern) + pattern = re.sub("[*]", "[^/]*", pattern) + pattern = re.compile(pattern.replace("=PLACEHOLDER=", ".*")) + out = { + p: allpaths[p] + for p in sorted(allpaths) + if pattern.match(p.replace("//", "/").rstrip("/")) + } + if detail: + return out + else: + return list(out) + + def isdir(self, path): + # override, since all URLs are (also) files + try: + return bool(self.ls(path)) + except (FileNotFoundError, ValueError): + return False + + +class HTTPFile(AbstractBufferedFile): + """ + A file-like object pointing to a remove HTTP(S) resource + + Supports only reading, with read-ahead of a predermined block-size. + + In the case that the server does not supply the filesize, only reading of + the complete file in one go is supported. + + Parameters + ---------- + url: str + Full URL of the remote resource, including the protocol + session: requests.Session or None + All calls will be made within this session, to avoid restarting + connections where the server allows this + block_size: int or None + The amount of read-ahead to do, in bytes. Default is 5MB, or the value + configured for the FileSystem creating this file + size: None or int + If given, this is the size of the file in bytes, and we don't attempt + to call the server to find the value. + kwargs: all other key-values are passed to requests calls. + """ + + def __init__( + self, + fs, + url, + session=None, + block_size=None, + mode="rb", + cache_type="bytes", + cache_options=None, + size=None, + **kwargs, + ): + if mode != "rb": + raise NotImplementedError("File mode not supported") + self.url = url + self.session = session + self.details = {"name": url, "size": size, "type": "file"} + super().__init__( + fs=fs, + path=url, + mode=mode, + block_size=block_size, + cache_type=cache_type, + cache_options=cache_options, + **kwargs, + ) + + def read(self, length=-1): + """Read bytes from file + + Parameters + ---------- + length: int + Read up to this many bytes. If negative, read all content to end of + file. If the server has not supplied the filesize, attempting to + read only part of the data will raise a ValueError. + """ + if ( + (length < 0 and self.loc == 0) # explicit read all + # but not when the size is known and fits into a block anyways + and not (self.size is not None and self.size <= self.blocksize) + ): + self._fetch_all() + if self.size is None: + if length < 0: + self._fetch_all() + else: + length = min(self.size - self.loc, length) + return super().read(length) + + def _fetch_all(self): + """Read whole file in one shot, without caching + + This is only called when position is still at zero, + and read() is called without a byte-count. + """ + logger.debug(f"Fetch all for {self}") + if not isinstance(self.cache, AllBytes): + r = self.session.get(self.fs.encode_url(self.url), **self.kwargs) + r.raise_for_status() + out = r.content + self.cache = AllBytes(size=len(out), fetcher=None, blocksize=None, data=out) + self.size = len(out) + + def _parse_content_range(self, headers): + """Parse the Content-Range header""" + s = headers.get("Content-Range", "") + m = re.match(r"bytes (\d+-\d+|\*)/(\d+|\*)", s) + if not m: + return None, None, None + + if m[1] == "*": + start = end = None + else: + start, end = [int(x) for x in m[1].split("-")] + total = None if m[2] == "*" else int(m[2]) + return start, end, total + + def _fetch_range(self, start, end): + """Download a block of data + + The expectation is that the server returns only the requested bytes, + with HTTP code 206. If this is not the case, we first check the headers, + and then stream the output - if the data size is bigger than we + requested, an exception is raised. + """ + logger.debug(f"Fetch range for {self}: {start}-{end}") + kwargs = self.kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + logger.debug("%s : %s", self.url, headers["Range"]) + r = self.session.get(self.fs.encode_url(self.url), headers=headers, **kwargs) + if r.status_code == 416: + # range request outside file + return b"" + r.raise_for_status() + + # If the server has handled the range request, it should reply + # with status 206 (partial content). But we'll guess that a suitable + # Content-Range header or a Content-Length no more than the + # requested range also mean we have got the desired range. + cl = r.headers.get("Content-Length", r.headers.get("content-length", end + 1)) + response_is_range = ( + r.status_code == 206 + or self._parse_content_range(r.headers)[0] == start + or int(cl) <= end - start + ) + + if response_is_range: + # partial content, as expected + out = r.content + elif start > 0: + raise ValueError( + "The HTTP server doesn't appear to support range requests. " + "Only reading this file from the beginning is supported. " + "Open with block_size=0 for a streaming file interface." + ) + else: + # Response is not a range, but we want the start of the file, + # so we can read the required amount anyway. + cl = 0 + out = [] + for chunk in r.iter_content(2**20, False): + out.append(chunk) + cl += len(chunk) + out = b"".join(out)[: end - start] + return out + + +magic_check = re.compile("([*[])") + + +def has_magic(s): + match = magic_check.search(s) + return match is not None + + +class HTTPStreamFile(AbstractBufferedFile): + def __init__(self, fs, url, mode="rb", session=None, **kwargs): + self.url = url + self.session = session + if mode != "rb": + raise ValueError + self.details = {"name": url, "size": None} + super().__init__(fs=fs, path=url, mode=mode, cache_type="readahead", **kwargs) + + r = self.session.get(self.fs.encode_url(url), stream=True, **kwargs) + self.fs._raise_not_found_for_status(r, url) + self.it = r.iter_content(1024, False) + self.leftover = b"" + + self.r = r + + def seek(self, *args, **kwargs): + raise ValueError("Cannot seek streaming HTTP file") + + def read(self, num=-1): + bufs = [self.leftover] + leng = len(self.leftover) + while leng < num or num < 0: + try: + out = self.it.__next__() + except StopIteration: + break + if out: + bufs.append(out) + else: + break + leng += len(out) + out = b"".join(bufs) + if num >= 0: + self.leftover = out[num:] + out = out[:num] + else: + self.leftover = b"" + self.loc += len(out) + return out + + def close(self): + self.r.close() + self.closed = True + + +def get_range(session, url, start, end, **kwargs): + # explicit get a range when we know it must be safe + kwargs = kwargs.copy() + headers = kwargs.pop("headers", {}).copy() + headers["Range"] = f"bytes={start}-{end - 1}" + r = session.get(url, headers=headers, **kwargs) + r.raise_for_status() + return r.content + + +def _file_info(url, session, size_policy="head", **kwargs): + """Call HEAD on the server to get details about the file (size/checksum etc.) + + Default operation is to explicitly allow redirects and use encoding + 'identity' (no compression) to get the true size of the target. + """ + logger.debug("Retrieve file size for %s", url) + kwargs = kwargs.copy() + ar = kwargs.pop("allow_redirects", True) + head = kwargs.get("headers", {}).copy() + # TODO: not allowed in JS + # head["Accept-Encoding"] = "identity" + kwargs["headers"] = head + + info = {} + if size_policy == "head": + r = session.head(url, allow_redirects=ar, **kwargs) + elif size_policy == "get": + r = session.get(url, allow_redirects=ar, **kwargs) + else: + raise TypeError(f'size_policy must be "head" or "get", got {size_policy}') + r.raise_for_status() + + # TODO: + # recognise lack of 'Accept-Ranges', + # or 'Accept-Ranges': 'none' (not 'bytes') + # to mean streaming only, no random access => return None + if "Content-Length" in r.headers: + info["size"] = int(r.headers["Content-Length"]) + elif "Content-Range" in r.headers: + info["size"] = int(r.headers["Content-Range"].split("/")[1]) + elif "content-length" in r.headers: + info["size"] = int(r.headers["content-length"]) + elif "content-range" in r.headers: + info["size"] = int(r.headers["content-range"].split("/")[1]) + + for checksum_field in ["ETag", "Content-MD5", "Digest"]: + if r.headers.get(checksum_field): + info[checksum_field] = r.headers[checksum_field] + + return info + + +# importing this is enough to register it +def register(): + register_implementation("http", HTTPFileSystem, clobber=True) + register_implementation("https", HTTPFileSystem, clobber=True) + register_implementation("sync_http", HTTPFileSystem, clobber=True) + register_implementation("sync_https", HTTPFileSystem, clobber=True) + + +register() + + +def unregister(): + from fsspec.implementations.http import HTTPFileSystem + + register_implementation("http", HTTPFileSystem, clobber=True) + register_implementation("https", HTTPFileSystem, clobber=True) diff --git a/lib/python3.10/site-packages/fsspec/implementations/jupyter.py b/lib/python3.10/site-packages/fsspec/implementations/jupyter.py new file mode 100644 index 0000000000000000000000000000000000000000..2839f4c1feea56dddd54bdc00f0b884c8461d29e --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/jupyter.py @@ -0,0 +1,124 @@ +import base64 +import io +import re + +import requests + +import fsspec + + +class JupyterFileSystem(fsspec.AbstractFileSystem): + """View of the files as seen by a Jupyter server (notebook or lab)""" + + protocol = ("jupyter", "jlab") + + def __init__(self, url, tok=None, **kwargs): + """ + + Parameters + ---------- + url : str + Base URL of the server, like "http://127.0.0.1:8888". May include + token in the string, which is given by the process when starting up + tok : str + If the token is obtained separately, can be given here + kwargs + """ + if "?" in url: + if tok is None: + try: + tok = re.findall("token=([a-z0-9]+)", url)[0] + except IndexError as e: + raise ValueError("Could not determine token") from e + url = url.split("?", 1)[0] + self.url = url.rstrip("/") + "/api/contents" + self.session = requests.Session() + if tok: + self.session.headers["Authorization"] = f"token {tok}" + + super().__init__(**kwargs) + + def ls(self, path, detail=True, **kwargs): + path = self._strip_protocol(path) + r = self.session.get(f"{self.url}/{path}") + if r.status_code == 404: + return FileNotFoundError(path) + r.raise_for_status() + out = r.json() + + if out["type"] == "directory": + out = out["content"] + else: + out = [out] + for o in out: + o["name"] = o.pop("path") + o.pop("content") + if o["type"] == "notebook": + o["type"] = "file" + if detail: + return out + return [o["name"] for o in out] + + def cat_file(self, path, start=None, end=None, **kwargs): + path = self._strip_protocol(path) + r = self.session.get(f"{self.url}/{path}") + if r.status_code == 404: + return FileNotFoundError(path) + r.raise_for_status() + out = r.json() + if out["format"] == "text": + # data should be binary + b = out["content"].encode() + else: + b = base64.b64decode(out["content"]) + return b[start:end] + + def pipe_file(self, path, value, **_): + path = self._strip_protocol(path) + json = { + "name": path.rsplit("/", 1)[-1], + "path": path, + "size": len(value), + "content": base64.b64encode(value).decode(), + "format": "base64", + "type": "file", + } + self.session.put(f"{self.url}/{path}", json=json) + + def mkdir(self, path, create_parents=True, **kwargs): + path = self._strip_protocol(path) + if create_parents and "/" in path: + self.mkdir(path.rsplit("/", 1)[0], True) + json = { + "name": path.rsplit("/", 1)[-1], + "path": path, + "size": None, + "content": None, + "type": "directory", + } + self.session.put(f"{self.url}/{path}", json=json) + + def _rm(self, path): + path = self._strip_protocol(path) + self.session.delete(f"{self.url}/{path}") + + def _open(self, path, mode="rb", **kwargs): + path = self._strip_protocol(path) + if mode == "rb": + data = self.cat_file(path) + return io.BytesIO(data) + else: + return SimpleFileWriter(self, path, mode="wb") + + +class SimpleFileWriter(fsspec.spec.AbstractBufferedFile): + def _upload_chunk(self, final=False): + """Never uploads a chunk until file is done + + Not suitable for large files + """ + if final is False: + return False + self.buffer.seek(0) + data = self.buffer.read() + self.fs.pipe_file(self.path, data) diff --git a/lib/python3.10/site-packages/fsspec/implementations/tar.py b/lib/python3.10/site-packages/fsspec/implementations/tar.py new file mode 100644 index 0000000000000000000000000000000000000000..412e5ba4d2cdea7db090dc96412e697909a38d78 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/tar.py @@ -0,0 +1,124 @@ +import logging +import tarfile + +import fsspec +from fsspec.archive import AbstractArchiveFileSystem +from fsspec.compression import compr +from fsspec.utils import infer_compression + +typemap = {b"0": "file", b"5": "directory"} + +logger = logging.getLogger("tar") + + +class TarFileSystem(AbstractArchiveFileSystem): + """Compressed Tar archives as a file-system (read-only) + + Supports the following formats: + tar.gz, tar.bz2, tar.xz + """ + + root_marker = "" + protocol = "tar" + cachable = False + + def __init__( + self, + fo="", + index_store=None, + target_options=None, + target_protocol=None, + compression=None, + **kwargs, + ): + super().__init__(**kwargs) + target_options = target_options or {} + + if isinstance(fo, str): + self.of = fsspec.open(fo, protocol=target_protocol, **target_options) + fo = self.of.open() # keep the reference + + # Try to infer compression. + if compression is None: + name = None + + # Try different ways to get hold of the filename. `fo` might either + # be a `fsspec.LocalFileOpener`, an `io.BufferedReader` or an + # `fsspec.AbstractFileSystem` instance. + try: + # Amended io.BufferedReader or similar. + # This uses a "protocol extension" where original filenames are + # propagated to archive-like filesystems in order to let them + # infer the right compression appropriately. + if hasattr(fo, "original"): + name = fo.original + + # fsspec.LocalFileOpener + elif hasattr(fo, "path"): + name = fo.path + + # io.BufferedReader + elif hasattr(fo, "name"): + name = fo.name + + # fsspec.AbstractFileSystem + elif hasattr(fo, "info"): + name = fo.info()["name"] + + except Exception as ex: + logger.warning( + f"Unable to determine file name, not inferring compression: {ex}" + ) + + if name is not None: + compression = infer_compression(name) + logger.info(f"Inferred compression {compression} from file name {name}") + + if compression is not None: + # TODO: tarfile already implements compression with modes like "'r:gz'", + # but then would seek to offset in the file work? + fo = compr[compression](fo) + + self._fo_ref = fo + self.fo = fo # the whole instance is a context + self.tar = tarfile.TarFile(fileobj=self.fo) + self.dir_cache = None + + self.index_store = index_store + self.index = None + self._index() + + def _index(self): + # TODO: load and set saved index, if exists + out = {} + for ti in self.tar: + info = ti.get_info() + info["type"] = typemap.get(info["type"], "file") + name = ti.get_info()["name"].rstrip("/") + out[name] = (info, ti.offset_data) + + self.index = out + # TODO: save index to self.index_store here, if set + + def _get_dirs(self): + if self.dir_cache is not None: + return + + # This enables ls to get directories as children as well as files + self.dir_cache = { + dirname: {"name": dirname, "size": 0, "type": "directory"} + for dirname in self._all_dirnames(self.tar.getnames()) + } + for member in self.tar.getmembers(): + info = member.get_info() + info["name"] = info["name"].rstrip("/") + info["type"] = typemap.get(info["type"], "file") + self.dir_cache[info["name"]] = info + + def _open(self, path, mode="rb", **kwargs): + if mode != "rb": + raise ValueError("Read-only filesystem implementation") + details, offset = self.index[path] + if details["type"] != "file": + raise ValueError("Can only handle regular files") + return self.tar.extractfile(path) diff --git a/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py b/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py new file mode 100644 index 0000000000000000000000000000000000000000..c6e0d8446f875ec9578cccf055aeb47cd1c2e996 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/webhdfs.py @@ -0,0 +1,485 @@ +# https://hadoop.apache.org/docs/r1.0.4/webhdfs.html + +import logging +import os +import secrets +import shutil +import tempfile +import uuid +from contextlib import suppress +from urllib.parse import quote + +import requests + +from ..spec import AbstractBufferedFile, AbstractFileSystem +from ..utils import infer_storage_options, tokenize + +logger = logging.getLogger("webhdfs") + + +class WebHDFS(AbstractFileSystem): + """ + Interface to HDFS over HTTP using the WebHDFS API. Supports also HttpFS gateways. + + Four auth mechanisms are supported: + + insecure: no auth is done, and the user is assumed to be whoever they + say they are (parameter ``user``), or a predefined value such as + "dr.who" if not given + spnego: when kerberos authentication is enabled, auth is negotiated by + requests_kerberos https://github.com/requests/requests-kerberos . + This establishes a session based on existing kinit login and/or + specified principal/password; parameters are passed with ``kerb_kwargs`` + token: uses an existing Hadoop delegation token from another secured + service. Indeed, this client can also generate such tokens when + not insecure. Note that tokens expire, but can be renewed (by a + previously specified user) and may allow for proxying. + basic-auth: used when both parameter ``user`` and parameter ``password`` + are provided. + + """ + + tempdir = str(tempfile.gettempdir()) + protocol = "webhdfs", "webHDFS" + + def __init__( + self, + host, + port=50070, + kerberos=False, + token=None, + user=None, + password=None, + proxy_to=None, + kerb_kwargs=None, + data_proxy=None, + use_https=False, + session_cert=None, + session_verify=True, + **kwargs, + ): + """ + Parameters + ---------- + host: str + Name-node address + port: int + Port for webHDFS + kerberos: bool + Whether to authenticate with kerberos for this connection + token: str or None + If given, use this token on every call to authenticate. A user + and user-proxy may be encoded in the token and should not be also + given + user: str or None + If given, assert the user name to connect with + password: str or None + If given, assert the password to use for basic auth. If password + is provided, user must be provided also + proxy_to: str or None + If given, the user has the authority to proxy, and this value is + the user in who's name actions are taken + kerb_kwargs: dict + Any extra arguments for HTTPKerberosAuth, see + ``_ + data_proxy: dict, callable or None + If given, map data-node addresses. This can be necessary if the + HDFS cluster is behind a proxy, running on Docker or otherwise has + a mismatch between the host-names given by the name-node and the + address by which to refer to them from the client. If a dict, + maps host names ``host->data_proxy[host]``; if a callable, full + URLs are passed, and function must conform to + ``url->data_proxy(url)``. + use_https: bool + Whether to connect to the Name-node using HTTPS instead of HTTP + session_cert: str or Tuple[str, str] or None + Path to a certificate file, or tuple of (cert, key) files to use + for the requests.Session + session_verify: str, bool or None + Path to a certificate file to use for verifying the requests.Session. + kwargs + """ + if self._cached: + return + super().__init__(**kwargs) + self.url = f"{'https' if use_https else 'http'}://{host}:{port}/webhdfs/v1" + self.kerb = kerberos + self.kerb_kwargs = kerb_kwargs or {} + self.pars = {} + self.proxy = data_proxy or {} + if token is not None: + if user is not None or proxy_to is not None: + raise ValueError( + "If passing a delegation token, must not set " + "user or proxy_to, as these are encoded in the" + " token" + ) + self.pars["delegation"] = token + self.user = user + self.password = password + + if password is not None: + if user is None: + raise ValueError( + "If passing a password, the user must also be" + "set in order to set up the basic-auth" + ) + else: + if user is not None: + self.pars["user.name"] = user + + if proxy_to is not None: + self.pars["doas"] = proxy_to + if kerberos and user is not None: + raise ValueError( + "If using Kerberos auth, do not specify the " + "user, this is handled by kinit." + ) + + self.session_cert = session_cert + self.session_verify = session_verify + + self._connect() + + self._fsid = f"webhdfs_{tokenize(host, port)}" + + @property + def fsid(self): + return self._fsid + + def _connect(self): + self.session = requests.Session() + + if self.session_cert: + self.session.cert = self.session_cert + + self.session.verify = self.session_verify + + if self.kerb: + from requests_kerberos import HTTPKerberosAuth + + self.session.auth = HTTPKerberosAuth(**self.kerb_kwargs) + + if self.user is not None and self.password is not None: + from requests.auth import HTTPBasicAuth + + self.session.auth = HTTPBasicAuth(self.user, self.password) + + def _call(self, op, method="get", path=None, data=None, redirect=True, **kwargs): + path = self._strip_protocol(path) if path is not None else "" + url = self._apply_proxy(self.url + quote(path, safe="/=")) + args = kwargs.copy() + args.update(self.pars) + args["op"] = op.upper() + logger.debug("sending %s with %s", url, method) + out = self.session.request( + method=method.upper(), + url=url, + params=args, + data=data, + allow_redirects=redirect, + ) + if out.status_code in [400, 401, 403, 404, 500]: + try: + err = out.json() + msg = err["RemoteException"]["message"] + exp = err["RemoteException"]["exception"] + except (ValueError, KeyError): + pass + else: + if exp in ["IllegalArgumentException", "UnsupportedOperationException"]: + raise ValueError(msg) + elif exp in ["SecurityException", "AccessControlException"]: + raise PermissionError(msg) + elif exp in ["FileNotFoundException"]: + raise FileNotFoundError(msg) + else: + raise RuntimeError(msg) + out.raise_for_status() + return out + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + replication=None, + permissions=None, + **kwargs, + ): + """ + + Parameters + ---------- + path: str + File location + mode: str + 'rb', 'wb', etc. + block_size: int + Client buffer size for read-ahead or write buffer + autocommit: bool + If False, writes to temporary file that only gets put in final + location upon commit + replication: int + Number of copies of file on the cluster, write mode only + permissions: str or int + posix permissions, write mode only + kwargs + + Returns + ------- + WebHDFile instance + """ + block_size = block_size or self.blocksize + return WebHDFile( + self, + path, + mode=mode, + block_size=block_size, + tempdir=self.tempdir, + autocommit=autocommit, + replication=replication, + permissions=permissions, + ) + + @staticmethod + def _process_info(info): + info["type"] = info["type"].lower() + info["size"] = info["length"] + return info + + @classmethod + def _strip_protocol(cls, path): + return infer_storage_options(path)["path"] + + @staticmethod + def _get_kwargs_from_urls(urlpath): + out = infer_storage_options(urlpath) + out.pop("path", None) + out.pop("protocol", None) + if "username" in out: + out["user"] = out.pop("username") + return out + + def info(self, path): + out = self._call("GETFILESTATUS", path=path) + info = out.json()["FileStatus"] + info["name"] = path + return self._process_info(info) + + def ls(self, path, detail=False): + out = self._call("LISTSTATUS", path=path) + infos = out.json()["FileStatuses"]["FileStatus"] + for info in infos: + self._process_info(info) + info["name"] = path.rstrip("/") + "/" + info["pathSuffix"] + if detail: + return sorted(infos, key=lambda i: i["name"]) + else: + return sorted(info["name"] for info in infos) + + def content_summary(self, path): + """Total numbers of files, directories and bytes under path""" + out = self._call("GETCONTENTSUMMARY", path=path) + return out.json()["ContentSummary"] + + def ukey(self, path): + """Checksum info of file, giving method and result""" + out = self._call("GETFILECHECKSUM", path=path, redirect=False) + if "Location" in out.headers: + location = self._apply_proxy(out.headers["Location"]) + out2 = self.session.get(location) + out2.raise_for_status() + return out2.json()["FileChecksum"] + else: + out.raise_for_status() + return out.json()["FileChecksum"] + + def home_directory(self): + """Get user's home directory""" + out = self._call("GETHOMEDIRECTORY") + return out.json()["Path"] + + def get_delegation_token(self, renewer=None): + """Retrieve token which can give the same authority to other uses + + Parameters + ---------- + renewer: str or None + User who may use this token; if None, will be current user + """ + if renewer: + out = self._call("GETDELEGATIONTOKEN", renewer=renewer) + else: + out = self._call("GETDELEGATIONTOKEN") + t = out.json()["Token"] + if t is None: + raise ValueError("No token available for this user/security context") + return t["urlString"] + + def renew_delegation_token(self, token): + """Make token live longer. Returns new expiry time""" + out = self._call("RENEWDELEGATIONTOKEN", method="put", token=token) + return out.json()["long"] + + def cancel_delegation_token(self, token): + """Stop the token from being useful""" + self._call("CANCELDELEGATIONTOKEN", method="put", token=token) + + def chmod(self, path, mod): + """Set the permission at path + + Parameters + ---------- + path: str + location to set (file or directory) + mod: str or int + posix epresentation or permission, give as oct string, e.g, '777' + or 0o777 + """ + self._call("SETPERMISSION", method="put", path=path, permission=mod) + + def chown(self, path, owner=None, group=None): + """Change owning user and/or group""" + kwargs = {} + if owner is not None: + kwargs["owner"] = owner + if group is not None: + kwargs["group"] = group + self._call("SETOWNER", method="put", path=path, **kwargs) + + def set_replication(self, path, replication): + """ + Set file replication factor + + Parameters + ---------- + path: str + File location (not for directories) + replication: int + Number of copies of file on the cluster. Should be smaller than + number of data nodes; normally 3 on most systems. + """ + self._call("SETREPLICATION", path=path, method="put", replication=replication) + + def mkdir(self, path, **kwargs): + self._call("MKDIRS", method="put", path=path) + + def makedirs(self, path, exist_ok=False): + if exist_ok is False and self.exists(path): + raise FileExistsError(path) + self.mkdir(path) + + def mv(self, path1, path2, **kwargs): + self._call("RENAME", method="put", path=path1, destination=path2) + + def rm(self, path, recursive=False, **kwargs): + self._call( + "DELETE", + method="delete", + path=path, + recursive="true" if recursive else "false", + ) + + def rm_file(self, path, **kwargs): + self.rm(path) + + def cp_file(self, lpath, rpath, **kwargs): + with self.open(lpath) as lstream: + tmp_fname = "/".join([self._parent(rpath), f".tmp.{secrets.token_hex(16)}"]) + # Perform an atomic copy (stream to a temporary file and + # move it to the actual destination). + try: + with self.open(tmp_fname, "wb") as rstream: + shutil.copyfileobj(lstream, rstream) + self.mv(tmp_fname, rpath) + except BaseException: + with suppress(FileNotFoundError): + self.rm(tmp_fname) + raise + + def _apply_proxy(self, location): + if self.proxy and callable(self.proxy): + location = self.proxy(location) + elif self.proxy: + # as a dict + for k, v in self.proxy.items(): + location = location.replace(k, v, 1) + return location + + +class WebHDFile(AbstractBufferedFile): + """A file living in HDFS over webHDFS""" + + def __init__(self, fs, path, **kwargs): + super().__init__(fs, path, **kwargs) + kwargs = kwargs.copy() + if kwargs.get("permissions", None) is None: + kwargs.pop("permissions", None) + if kwargs.get("replication", None) is None: + kwargs.pop("replication", None) + self.permissions = kwargs.pop("permissions", 511) + tempdir = kwargs.pop("tempdir") + if kwargs.pop("autocommit", False) is False: + self.target = self.path + self.path = os.path.join(tempdir, str(uuid.uuid4())) + + def _upload_chunk(self, final=False): + """Write one part of a multi-block file upload + + Parameters + ========== + final: bool + This is the last block, so should complete file, if + self.autocommit is True. + """ + out = self.fs.session.post( + self.location, + data=self.buffer.getvalue(), + headers={"content-type": "application/octet-stream"}, + ) + out.raise_for_status() + return True + + def _initiate_upload(self): + """Create remote file/upload""" + kwargs = self.kwargs.copy() + if "a" in self.mode: + op, method = "APPEND", "POST" + else: + op, method = "CREATE", "PUT" + kwargs["overwrite"] = "true" + out = self.fs._call(op, method, self.path, redirect=False, **kwargs) + location = self.fs._apply_proxy(out.headers["Location"]) + if "w" in self.mode: + # create empty file to append to + out2 = self.fs.session.put( + location, headers={"content-type": "application/octet-stream"} + ) + out2.raise_for_status() + # after creating empty file, change location to append to + out2 = self.fs._call("APPEND", "POST", self.path, redirect=False, **kwargs) + self.location = self.fs._apply_proxy(out2.headers["Location"]) + + def _fetch_range(self, start, end): + start = max(start, 0) + end = min(self.size, end) + if start >= end or start >= self.size: + return b"" + out = self.fs._call( + "OPEN", path=self.path, offset=start, length=end - start, redirect=False + ) + out.raise_for_status() + if "Location" in out.headers: + location = out.headers["Location"] + out2 = self.fs.session.get(self.fs._apply_proxy(location)) + return out2.content + else: + return out.content + + def commit(self): + self.fs.mv(self.path, self.target) + + def discard(self): + self.fs.rm(self.path) diff --git a/lib/python3.10/site-packages/fsspec/implementations/zip.py b/lib/python3.10/site-packages/fsspec/implementations/zip.py new file mode 100644 index 0000000000000000000000000000000000000000..6db3ae27806106a19a366886ab4b183f85c1cb1a --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/implementations/zip.py @@ -0,0 +1,177 @@ +import os +import zipfile + +import fsspec +from fsspec.archive import AbstractArchiveFileSystem + + +class ZipFileSystem(AbstractArchiveFileSystem): + """Read/Write contents of ZIP archive as a file-system + + Keeps file object open while instance lives. + + This class is pickleable, but not necessarily thread-safe + """ + + root_marker = "" + protocol = "zip" + cachable = False + + def __init__( + self, + fo="", + mode="r", + target_protocol=None, + target_options=None, + compression=zipfile.ZIP_STORED, + allowZip64=True, + compresslevel=None, + **kwargs, + ): + """ + Parameters + ---------- + fo: str or file-like + Contains ZIP, and must exist. If a str, will fetch file using + :meth:`~fsspec.open_files`, which must return one file exactly. + mode: str + Accept: "r", "w", "a" + target_protocol: str (optional) + If ``fo`` is a string, this value can be used to override the + FS protocol inferred from a URL + target_options: dict (optional) + Kwargs passed when instantiating the target FS, if ``fo`` is + a string. + compression, allowZip64, compresslevel: passed to ZipFile + Only relevant when creating a ZIP + """ + super().__init__(self, **kwargs) + if mode not in set("rwa"): + raise ValueError(f"mode '{mode}' no understood") + self.mode = mode + if isinstance(fo, (str, os.PathLike)): + if mode == "a": + m = "r+b" + else: + m = mode + "b" + fo = fsspec.open( + fo, mode=m, protocol=target_protocol, **(target_options or {}) + ) + self.force_zip_64 = allowZip64 + self.of = fo + self.fo = fo.__enter__() # the whole instance is a context + self.zip = zipfile.ZipFile( + self.fo, + mode=mode, + compression=compression, + allowZip64=allowZip64, + compresslevel=compresslevel, + ) + self.dir_cache = None + + @classmethod + def _strip_protocol(cls, path): + # zip file paths are always relative to the archive root + return super()._strip_protocol(path).lstrip("/") + + def __del__(self): + if hasattr(self, "zip"): + self.close() + del self.zip + + def close(self): + """Commits any write changes to the file. Done on ``del`` too.""" + self.zip.close() + + def _get_dirs(self): + if self.dir_cache is None or self.mode in set("wa"): + # when writing, dir_cache is always in the ZipFile's attributes, + # not read from the file. + files = self.zip.infolist() + self.dir_cache = { + dirname.rstrip("/"): { + "name": dirname.rstrip("/"), + "size": 0, + "type": "directory", + } + for dirname in self._all_dirnames(self.zip.namelist()) + } + for z in files: + f = {s: getattr(z, s, None) for s in zipfile.ZipInfo.__slots__} + f.update( + { + "name": z.filename.rstrip("/"), + "size": z.file_size, + "type": ("directory" if z.is_dir() else "file"), + } + ) + self.dir_cache[f["name"]] = f + + def pipe_file(self, path, value, **kwargs): + # override upstream, because we know the exact file size in this case + self.zip.writestr(path, value, **kwargs) + + def _open( + self, + path, + mode="rb", + block_size=None, + autocommit=True, + cache_options=None, + **kwargs, + ): + path = self._strip_protocol(path) + if "r" in mode and self.mode in set("wa"): + if self.exists(path): + raise OSError("ZipFS can only be open for reading or writing, not both") + raise FileNotFoundError(path) + if "r" in self.mode and "w" in mode: + raise OSError("ZipFS can only be open for reading or writing, not both") + out = self.zip.open(path, mode.strip("b"), force_zip64=self.force_zip_64) + if "r" in mode: + info = self.info(path) + out.size = info["size"] + out.name = info["name"] + return out + + def find(self, path, maxdepth=None, withdirs=False, detail=False, **kwargs): + if maxdepth is not None and maxdepth < 1: + raise ValueError("maxdepth must be at least 1") + + # Remove the leading slash, as the zip file paths are always + # given without a leading slash + path = path.lstrip("/") + path_parts = list(filter(lambda s: bool(s), path.split("/"))) + + def _matching_starts(file_path): + file_parts = filter(lambda s: bool(s), file_path.split("/")) + return all(a == b for a, b in zip(path_parts, file_parts)) + + self._get_dirs() + + result = {} + # To match posix find, if an exact file name is given, we should + # return only that file + if path in self.dir_cache and self.dir_cache[path]["type"] == "file": + result[path] = self.dir_cache[path] + return result if detail else [path] + + for file_path, file_info in self.dir_cache.items(): + if not (path == "" or _matching_starts(file_path)): + continue + + if file_info["type"] == "directory": + if withdirs: + if file_path not in result: + result[file_path.strip("/")] = file_info + continue + + if file_path not in result: + result[file_path] = file_info if detail else None + + if maxdepth: + path_depth = path.count("/") + result = { + k: v for k, v in result.items() if k.count("/") - path_depth < maxdepth + } + return result if detail else sorted(result) diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py b/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8ed2ad802ecaf021106c25c03112f29e75c7b2f8 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/__init__.py @@ -0,0 +1,289 @@ +import os +from hashlib import md5 + +import pytest + +from fsspec.implementations.local import LocalFileSystem +from fsspec.tests.abstract.copy import AbstractCopyTests # noqa: F401 +from fsspec.tests.abstract.get import AbstractGetTests # noqa: F401 +from fsspec.tests.abstract.open import AbstractOpenTests # noqa: F401 +from fsspec.tests.abstract.pipe import AbstractPipeTests # noqa: F401 +from fsspec.tests.abstract.put import AbstractPutTests # noqa: F401 + + +class BaseAbstractFixtures: + """ + Abstract base class containing fixtures that are used by but never need to + be overridden in derived filesystem-specific classes to run the abstract + tests on such filesystems. + """ + + @pytest.fixture + def fs_bulk_operations_scenario_0(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_glob_edge_cases_files(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_dir_and_file_with_same_name_prefix(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_10_files_with_hashed_names(self, fs, fs_join, fs_path): + """ + Scenario on remote filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(fs, fs_join, fs_path) + yield source + fs.rm(source, recursive=True) + + @pytest.fixture + def fs_target(self, fs, fs_join, fs_path): + """ + Return name of remote directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = fs_join(fs_path, "target") + yield target + if fs.exists(target): + fs.rm(target, recursive=True) + + @pytest.fixture + def local_bulk_operations_scenario_0(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for many cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._bulk_operations_scenario_0(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_glob_edge_cases_files(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used for glob edge cases cp/get/put tests. + + Cleans up at the end of each test it which it is used. + """ + source = self._glob_edge_cases_files(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_dir_and_file_with_same_name_prefix( + self, local_fs, local_join, local_path + ): + """ + Scenario on local filesystem that is used to check cp/get/put on directory + and file with the same name prefixes. + + Cleans up at the end of each test it which it is used. + """ + source = self._dir_and_file_with_same_name_prefix( + local_fs, local_join, local_path + ) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_10_files_with_hashed_names(self, local_fs, local_join, local_path): + """ + Scenario on local filesystem that is used to check cp/get/put files order + when source and destination are lists. + + Cleans up at the end of each test it which it is used. + """ + source = self._10_files_with_hashed_names(local_fs, local_join, local_path) + yield source + local_fs.rm(source, recursive=True) + + @pytest.fixture + def local_target(self, local_fs, local_join, local_path): + """ + Return name of local directory that does not yet exist to copy into. + + Cleans up at the end of each test it which it is used. + """ + target = local_join(local_path, "target") + yield target + if local_fs.exists(target): + local_fs.rm(target, recursive=True) + + def _glob_edge_cases_files(self, some_fs, some_join, some_path): + """ + Scenario that is used for glob edge cases cp/get/put tests. + Creates the following directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + ├── 📁 subdir0 + │ ├── 📄 subfile1 + │ ├── 📄 subfile2 + │ └── 📁 nesteddir + │ └── 📄 nestedfile + └── 📁 subdir1 + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + + for subdir_idx in range(2): + subdir = some_join(source, f"subdir{subdir_idx}") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + + return source + + def _bulk_operations_scenario_0(self, some_fs, some_join, some_path): + """ + Scenario that is used for many cp/get/put tests. Creates the following + directory and file structure: + + 📁 source + ├── 📄 file1 + ├── 📄 file2 + └── 📁 subdir + ├── 📄 subfile1 + ├── 📄 subfile2 + └── 📁 nesteddir + └── 📄 nestedfile + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + nesteddir = some_join(subdir, "nesteddir") + some_fs.makedirs(nesteddir) + some_fs.touch(some_join(source, "file1")) + some_fs.touch(some_join(source, "file2")) + some_fs.touch(some_join(subdir, "subfile1")) + some_fs.touch(some_join(subdir, "subfile2")) + some_fs.touch(some_join(nesteddir, "nestedfile")) + return source + + def _dir_and_file_with_same_name_prefix(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put on directory and file with + the same name prefixes. Creates the following directory and file structure: + + 📁 source + ├── 📄 subdir.txt + └── 📁 subdir + └── 📄 subfile.txt + """ + source = some_join(some_path, "source") + subdir = some_join(source, "subdir") + file = some_join(source, "subdir.txt") + subfile = some_join(subdir, "subfile.txt") + some_fs.makedirs(subdir) + some_fs.touch(file) + some_fs.touch(subfile) + return source + + def _10_files_with_hashed_names(self, some_fs, some_join, some_path): + """ + Scenario that is used to check cp/get/put files order when source and + destination are lists. Creates the following directory and file structure: + + 📁 source + └── 📄 {hashed([0-9])}.txt + """ + source = some_join(some_path, "source") + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + path = some_join(source, f"{hashed_i}.txt") + some_fs.pipe(path=path, value=f"{i}".encode()) + return source + + +class AbstractFixtures(BaseAbstractFixtures): + """ + Abstract base class containing fixtures that may be overridden in derived + filesystem-specific classes to run the abstract tests on such filesystems. + + For any particular filesystem some of these fixtures must be overridden, + such as ``fs`` and ``fs_path``, and others may be overridden if the + default functions here are not appropriate, such as ``fs_join``. + """ + + @pytest.fixture + def fs(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture + def fs_join(self): + """ + Return a function that joins its arguments together into a path. + + Most fsspec implementations join paths in a platform-dependent way, + but some will override this to always use a forward slash. + """ + return os.path.join + + @pytest.fixture + def fs_path(self): + raise NotImplementedError("This function must be overridden in derived classes") + + @pytest.fixture(scope="class") + def local_fs(self): + # Maybe need an option for auto_mkdir=False? This is only relevant + # for certain implementations. + return LocalFileSystem(auto_mkdir=True) + + @pytest.fixture + def local_join(self): + """ + Return a function that joins its arguments together into a path, on + the local filesystem. + """ + return os.path.join + + @pytest.fixture + def local_path(self, tmpdir): + return tmpdir + + @pytest.fixture + def supports_empty_directories(self): + """ + Return whether this implementation supports empty directories. + """ + return True + + @pytest.fixture + def fs_sanitize_path(self): + return lambda x: x diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/common.py b/lib/python3.10/site-packages/fsspec/tests/abstract/common.py new file mode 100644 index 0000000000000000000000000000000000000000..22e7c4140404ab2a8928689721419cf05c2760b9 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/common.py @@ -0,0 +1,175 @@ +GLOB_EDGE_CASES_TESTS = { + "argnames": ("path", "recursive", "maxdepth", "expected"), + "argvalues": [ + ("fil?1", False, None, ["file1"]), + ("fil?1", True, None, ["file1"]), + ("file[1-2]", False, None, ["file1", "file2"]), + ("file[1-2]", True, None, ["file1", "file2"]), + ("*", False, None, ["file1", "file2"]), + ( + "*", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*", True, 1, ["file1", "file2"]), + ( + "*", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("*1", False, None, ["file1"]), + ( + "*1", + True, + None, + [ + "file1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("*1", True, 2, ["file1", "subdir1/subfile1", "subdir1/subfile2"]), + ( + "**", + False, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + True, + None, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**", True, 1, ["file1", "file2"]), + ( + "**", + True, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "**", + False, + 2, + [ + "file1", + "file2", + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ("**/*1", False, None, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ( + "**/*1", + True, + None, + [ + "file1", + "subdir0/subfile1", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ("**/*1", True, 1, ["file1"]), + ( + "**/*1", + True, + 2, + ["file1", "subdir0/subfile1", "subdir1/subfile1", "subdir1/subfile2"], + ), + ("**/*1", False, 2, ["file1", "subdir0/subfile1", "subdir1/subfile1"]), + ("**/subdir0", False, None, []), + ("**/subdir0", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("**/subdir0/nested*", False, 2, []), + ("**/subdir0/nested*", True, 2, ["nestedfile"]), + ("subdir[1-2]", False, None, []), + ("subdir[1-2]", True, None, ["subfile1", "subfile2", "nesteddir/nestedfile"]), + ("subdir[1-2]", True, 2, ["subfile1", "subfile2"]), + ("subdir[0-1]", False, None, []), + ( + "subdir[0-1]", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir0/nesteddir/nestedfile", + "subdir1/subfile1", + "subdir1/subfile2", + "subdir1/nesteddir/nestedfile", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + False, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ( + "subdir[0-1]/*fil[e]*", + True, + None, + [ + "subdir0/subfile1", + "subdir0/subfile2", + "subdir1/subfile1", + "subdir1/subfile2", + ], + ), + ], +} diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py b/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py new file mode 100644 index 0000000000000000000000000000000000000000..e39e57e5f7d52bfda8ab5e2398b04cc2303630a0 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/copy.py @@ -0,0 +1,557 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractCopyTests: + def test_copy_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.cp(fs_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.cp(fs_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.cp(fs_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.cp(fs_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_copy_file_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.cp(fs_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_copy_file_to_file_in_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.cp( + fs_join(source, "subdir", "subfile1"), fs_join(target, "newdir", "newfile") + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_copy_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.cp(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.cp(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.cp(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.cp(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_copy_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_glob_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.cp(fs_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.cp(fs_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.cp( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_copy_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + fs_target, + fs_sanitize_path, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.copy(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_copy_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + fs_target, + supports_empty_directories, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.cp(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_copy_list_of_files_to_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.cp(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_copy_two_files_new_directory( + self, fs, fs_join, fs_bulk_operations_scenario_0, fs_target + ): + # This is a duplicate of test_copy_list_of_files_to_new_directory and + # can eventually be removed. + source = fs_bulk_operations_scenario_0 + + target = fs_target + assert not fs.exists(target) + fs.cp([fs_join(source, "file1"), fs_join(source, "file2")], target) + + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + + def test_copy_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + fs_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.cp(fs_join(source, "subdir"), target, recursive=True) + + assert fs.isfile(fs_join(target, "subfile.txt")) + assert not fs.isfile(fs_join(target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.cp(fs_join(source, "subdir*"), target, recursive=True) + + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, fs_10_files_with_hashed_names + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.copy(path1=source_files, path2=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/get.py b/lib/python3.10/site-packages/fsspec/tests/abstract/get.py new file mode 100644 index 0000000000000000000000000000000000000000..851ab81ee581e74cac41c64c83ef0af75826d6b0 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/get.py @@ -0,0 +1,587 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.implementations.local import make_path_posix +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractGetTests: + def test_get_file_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + target_file2 = local_join(target, "file2") + target_subfile1 = local_join(target, "subfile1") + + # Copy from source directory + fs.get(fs_join(source, "file2"), target) + assert local_fs.isfile(target_file2) + + # Copy from sub directory + fs.get(fs_join(source, "subdir", "subfile1"), target) + assert local_fs.isfile(target_subfile1) + + # Remove copied files + local_fs.rm([target_file2, target_subfile1]) + assert not local_fs.exists(target_file2) + assert not local_fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.get(fs_join(source, "file2"), target + "/") + assert local_fs.isdir(target) + assert local_fs.isfile(target_file2) + + fs.get(fs_join(source, "subdir", "subfile1"), target + "/") + assert local_fs.isfile(target_subfile1) + + def test_get_file_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), local_join(target, "newdir/") + ) # Note trailing slash + + assert local_fs.isdir(target) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1c + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get(fs_join(source, "subdir", "subfile1"), local_join(target, "newfile")) + assert local_fs.isfile(local_join(target, "newfile")) + + def test_get_file_to_file_in_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1d + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + fs.get( + fs_join(source, "subdir", "subfile1"), + local_join(target, "newdir", "newfile"), + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "newfile")) + + def test_get_directory_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1e + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + assert local_fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert local_fs.isdir(local_join(target, "subdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "subdir", "nesteddir", "nestedfile") + ) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + if source_slash: + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile1")) + assert local_fs.isfile(local_join(target, "subdir", "subfile2")) + assert not local_fs.exists(local_join(target, "subdir", "nesteddir")) + + local_fs.rm(local_join(target, "subdir"), recursive=True) + assert local_fs.ls(target) == [] + + def test_get_directory_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1f + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = local_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.get(s, t) + assert local_fs.ls(target) == [] + + # With recursive + fs.get(s, t, recursive=True) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get(s, t, recursive=True, maxdepth=1) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + def test_get_glob_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.isdir(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert local_fs.isdir(local_join(target, "nesteddir")) + assert local_fs.isfile(local_join(target, "nesteddir", "nestedfile")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + local_join(target, "nesteddir"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isfile(local_join(target, "subfile1")) + assert local_fs.isfile(local_join(target, "subfile2")) + assert not local_fs.exists(local_join(target, "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + + local_fs.rm( + [ + local_join(target, "subfile1"), + local_join(target, "subfile2"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_glob_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1h + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.get(fs_join(source, "subdir", "*"), t) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert local_fs.ls(target) == [] + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.get(fs_join(source, "subdir", glob), t, recursive=recursive) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert local_fs.isdir(local_join(target, "newdir", "nesteddir")) + assert local_fs.isfile( + local_join(target, "newdir", "nesteddir", "nestedfile") + ) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_join(target, "newdir"), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.get( + fs_join(source, "subdir", glob), t, recursive=recursive, maxdepth=1 + ) + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + assert local_fs.isfile(local_join(target, "newdir", "subfile2")) + assert not local_fs.exists(local_join(target, "newdir", "nesteddir")) + assert not local_fs.exists(local_join(target, "subdir")) + assert not local_fs.exists(local_join(target, "newdir", "subdir")) + + local_fs.rm(local_fs.ls(target, detail=False), recursive=True) + assert not local_fs.exists(local_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_get_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_glob_edge_cases_files, + local_fs, + local_join, + local_target, + ): + # Copy scenario 1g + source = fs_glob_edge_cases_files + + target = local_target + + for new_dir, target_slash in product([True, False], [True, False]): + local_fs.mkdir(target) + + t = local_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.get(fs_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = local_fs.find(target) + if new_dir: + prefixed_expected = [ + make_path_posix(local_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + make_path_posix(local_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + local_fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_get_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2a + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.get(source_files, t) + assert local_fs.isfile(local_join(target, "file1")) + assert local_fs.isfile(local_join(target, "file2")) + assert local_fs.isfile(local_join(target, "subfile1")) + + local_fs.rm( + [ + local_join(target, "file1"), + local_join(target, "file2"), + local_join(target, "subfile1"), + ], + recursive=True, + ) + assert local_fs.ls(target) == [] + + def test_get_list_of_files_to_new_directory( + self, + fs, + fs_join, + fs_bulk_operations_scenario_0, + local_fs, + local_join, + local_target, + ): + # Copy scenario 2b + source = fs_bulk_operations_scenario_0 + + target = local_target + local_fs.mkdir(target) + + source_files = [ + fs_join(source, "file1"), + fs_join(source, "file2"), + fs_join(source, "subdir", "subfile1"), + ] + + fs.get(source_files, local_join(target, "newdir") + "/") # Note trailing slash + assert local_fs.isdir(local_join(target, "newdir")) + assert local_fs.isfile(local_join(target, "newdir", "file1")) + assert local_fs.isfile(local_join(target, "newdir", "file2")) + assert local_fs.isfile(local_join(target, "newdir", "subfile1")) + + def test_get_directory_recursive( + self, fs, fs_join, fs_path, local_fs, local_join, local_target + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = fs_join(fs_path, "src") + src_file = fs_join(src, "file") + fs.mkdir(src) + fs.touch(src_file) + + target = local_target + + # get without slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src, target, recursive=True) + assert local_fs.isdir(target) + + if loop == 0: + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + else: + assert local_fs.isfile(local_join(target, "file")) + assert local_fs.isdir(local_join(target, "src")) + assert local_fs.isfile(local_join(target, "src", "file")) + + local_fs.rm(target, recursive=True) + + # get with slash + assert not local_fs.exists(target) + for loop in range(2): + fs.get(src + "/", target, recursive=True) + assert local_fs.isdir(target) + assert local_fs.isfile(local_join(target, "file")) + assert not local_fs.exists(local_join(target, "src")) + + def test_get_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_dir_and_file_with_same_name_prefix, + ): + # Create the test dirs + source = fs_dir_and_file_with_same_name_prefix + target = local_target + + # Test without glob + fs.get(fs_join(source, "subdir"), target, recursive=True) + + assert local_fs.isfile(local_join(target, "subfile.txt")) + assert not local_fs.isfile(local_join(target, "subdir.txt")) + + local_fs.rm([local_join(target, "subfile.txt")]) + assert local_fs.ls(target) == [] + + # Test with glob + fs.get(fs_join(source, "subdir*"), target, recursive=True) + + assert local_fs.isdir(local_join(target, "subdir")) + assert local_fs.isfile(local_join(target, "subdir", "subfile.txt")) + assert local_fs.isfile(local_join(target, "subdir.txt")) + + def test_get_with_source_and_destination_as_list( + self, + fs, + fs_join, + local_fs, + local_join, + local_target, + fs_10_files_with_hashed_names, + ): + # Create the test dir + source = fs_10_files_with_hashed_names + target = local_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(fs_join(source, f"{hashed_i}.txt")) + destination_files.append( + make_path_posix(local_join(target, f"{hashed_i}.txt")) + ) + + # Copy and assert order was kept + fs.get(rpath=source_files, lpath=destination_files) + + for i in range(10): + file_content = local_fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py b/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py new file mode 100644 index 0000000000000000000000000000000000000000..39f6caa3de815e024fa84de2acecc986c823ed29 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/mv.py @@ -0,0 +1,57 @@ +import os + +import pytest + +import fsspec + + +def test_move_raises_error_with_tmpdir(tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Define a destination that simulates a protected or invalid path + destination = tmpdir.join("non_existent_directory/destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Use the actual file paths as string + with pytest.raises(FileNotFoundError): + fs.mv(str(source), str(destination)) + + +@pytest.mark.parametrize("recursive", (True, False)) +def test_move_raises_error_with_tmpdir_permission(recursive, tmpdir): + # Create a file in the temporary directory + source = tmpdir.join("source_file.txt") + source.write("content") + + # Create a protected directory (non-writable) + protected_dir = tmpdir.mkdir("protected_directory") + protected_path = str(protected_dir) + + # Set the directory to read-only + if os.name == "nt": + os.system(f'icacls "{protected_path}" /deny Everyone:(W)') + else: + os.chmod(protected_path, 0o555) # Sets the directory to read-only + + # Define a destination inside the protected directory + destination = protected_dir.join("destination_file.txt") + + # Instantiate the filesystem (assuming the local file system interface) + fs = fsspec.filesystem("file") + + # Try to move the file to the read-only directory, expecting a permission error + with pytest.raises(PermissionError): + fs.mv(str(source), str(destination), recursive=recursive) + + # Assert the file was not created in the destination + assert not os.path.exists(destination) + + # Cleanup: Restore permissions so the directory can be cleaned up + if os.name == "nt": + os.system(f'icacls "{protected_path}" /remove:d Everyone') + else: + os.chmod(protected_path, 0o755) # Restore write permission for cleanup diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/open.py b/lib/python3.10/site-packages/fsspec/tests/abstract/open.py new file mode 100644 index 0000000000000000000000000000000000000000..bb75ea852276fb8d834345883813b8e27a0ae24c --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/open.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractOpenTests: + def test_open_exclusive(self, fs, fs_target): + with fs.open(fs_target, "wb") as f: + f.write(b"data") + with fs.open(fs_target, "rb") as f: + assert f.read() == b"data" + with pytest.raises(FileExistsError): + fs.open(fs_target, "xb") diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py b/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py new file mode 100644 index 0000000000000000000000000000000000000000..8ecca96e9d23ff268a253c48269d5cca451ea270 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/pipe.py @@ -0,0 +1,11 @@ +import pytest + + +class AbstractPipeTests: + def test_pipe_exclusive(self, fs, fs_target): + fs.pipe_file(fs_target, b"data") + assert fs.cat_file(fs_target) == b"data" + with pytest.raises(FileExistsError): + fs.pipe_file(fs_target, b"data", mode="create") + fs.pipe_file(fs_target, b"new data", mode="overwrite") + assert fs.cat_file(fs_target) == b"new data" diff --git a/lib/python3.10/site-packages/fsspec/tests/abstract/put.py b/lib/python3.10/site-packages/fsspec/tests/abstract/put.py new file mode 100644 index 0000000000000000000000000000000000000000..9fc349977f0384d9fc86126498be5c6ad99a21d3 --- /dev/null +++ b/lib/python3.10/site-packages/fsspec/tests/abstract/put.py @@ -0,0 +1,591 @@ +from hashlib import md5 +from itertools import product + +import pytest + +from fsspec.tests.abstract.common import GLOB_EDGE_CASES_TESTS + + +class AbstractPutTests: + def test_put_file_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + target_file2 = fs_join(target, "file2") + target_subfile1 = fs_join(target, "subfile1") + + # Copy from source directory + fs.put(local_join(source, "file2"), target) + assert fs.isfile(target_file2) + + # Copy from sub directory + fs.put(local_join(source, "subdir", "subfile1"), target) + assert fs.isfile(target_subfile1) + + # Remove copied files + fs.rm([target_file2, target_subfile1]) + assert not fs.exists(target_file2) + assert not fs.exists(target_subfile1) + + # Repeat with trailing slash on target + fs.put(local_join(source, "file2"), target + "/") + assert fs.isdir(target) + assert fs.isfile(target_file2) + + fs.put(local_join(source, "subdir", "subfile1"), target + "/") + assert fs.isfile(target_subfile1) + + def test_put_file_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), fs_join(target, "newdir/") + ) # Note trailing slash + assert fs.isdir(target) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_file_to_file_in_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1c + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + fs.touch(fs_join(target, "dummy")) + assert fs.isdir(target) + + fs.put(local_join(source, "subdir", "subfile1"), fs_join(target, "newfile")) + assert fs.isfile(fs_join(target, "newfile")) + + def test_put_file_to_file_in_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1d + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + fs.put( + local_join(source, "subdir", "subfile1"), + fs_join(target, "newdir", "newfile"), + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "newfile")) + + def test_put_directory_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1e + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = target + "/" if target_slash else target + + # Without recursive does nothing + fs.put(s, t) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + fs.put(s, t, recursive=True) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert fs.isdir(fs_join(target, "subdir", "nesteddir")) + assert fs.isfile(fs_join(target, "subdir", "nesteddir", "nestedfile")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + if source_slash: + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + else: + assert fs.isdir(fs_join(target, "subdir")) + assert fs.isfile(fs_join(target, "subdir", "subfile1")) + assert fs.isfile(fs_join(target, "subdir", "subfile2")) + assert not fs.exists(fs_join(target, "subdir", "nesteddir")) + + fs.rm(fs_join(target, "subdir"), recursive=True) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_directory_to_new_directory( + self, + fs, + fs_join, + fs_target, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 1f + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for source_slash, target_slash in zip([False, True], [False, True]): + s = fs_join(source, "subdir") + if source_slash: + s += "/" + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive does nothing + fs.put(s, t) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + with pytest.raises(FileNotFoundError): + fs.ls(target) + + # With recursive + fs.put(s, t, recursive=True) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put(s, t, recursive=True, maxdepth=1) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + def test_put_glob_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + supports_empty_directories, + local_bulk_operations_scenario_0, + ): + # Copy scenario 1g + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.isdir(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert fs.isdir(fs_join(target, "nesteddir")) + assert fs.isfile(fs_join(target, "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + fs_join(target, "nesteddir"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isfile(fs_join(target, "subfile1")) + assert fs.isfile(fs_join(target, "subfile2")) + assert not fs.exists(fs_join(target, "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + + fs.rm( + [ + fs_join(target, "subfile1"), + fs_join(target, "subfile2"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_glob_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 1h + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + for target_slash in [False, True]: + t = fs_join(target, "newdir") + if target_slash: + t += "/" + + # Without recursive + fs.put(local_join(source, "subdir", "*"), t) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # With recursive + for glob, recursive in zip(["*", "**"], [True, False]): + fs.put(local_join(source, "subdir", glob), t, recursive=recursive) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert fs.isdir(fs_join(target, "newdir", "nesteddir")) + assert fs.isfile(fs_join(target, "newdir", "nesteddir", "nestedfile")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + # Limit recursive by maxdepth + fs.put( + local_join(source, "subdir", glob), + t, + recursive=recursive, + maxdepth=1, + ) + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + assert fs.isfile(fs_join(target, "newdir", "subfile2")) + assert not fs.exists(fs_join(target, "newdir", "nesteddir")) + assert not fs.exists(fs_join(target, "subdir")) + assert not fs.exists(fs_join(target, "newdir", "subdir")) + + fs.rm(fs_join(target, "newdir"), recursive=True) + assert not fs.exists(fs_join(target, "newdir")) + + @pytest.mark.parametrize( + GLOB_EDGE_CASES_TESTS["argnames"], + GLOB_EDGE_CASES_TESTS["argvalues"], + ) + def test_put_glob_edge_cases( + self, + path, + recursive, + maxdepth, + expected, + fs, + fs_join, + fs_target, + local_glob_edge_cases_files, + local_join, + fs_sanitize_path, + ): + # Copy scenario 1g + source = local_glob_edge_cases_files + + target = fs_target + + for new_dir, target_slash in product([True, False], [True, False]): + fs.mkdir(target) + + t = fs_join(target, "newdir") if new_dir else target + t = t + "/" if target_slash else t + + fs.put(local_join(source, path), t, recursive=recursive, maxdepth=maxdepth) + + output = fs.find(target) + if new_dir: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, "newdir", p)) for p in expected + ] + else: + prefixed_expected = [ + fs_sanitize_path(fs_join(target, p)) for p in expected + ] + assert sorted(output) == sorted(prefixed_expected) + + try: + fs.rm(target, recursive=True) + except FileNotFoundError: + pass + + def test_put_list_of_files_to_existing_directory( + self, + fs, + fs_join, + fs_target, + local_join, + local_bulk_operations_scenario_0, + supports_empty_directories, + ): + # Copy scenario 2a + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + if not supports_empty_directories: + # Force target directory to exist by adding a dummy file + dummy = fs_join(target, "dummy") + fs.touch(dummy) + assert fs.isdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + for target_slash in [False, True]: + t = target + "/" if target_slash else target + + fs.put(source_files, t) + assert fs.isfile(fs_join(target, "file1")) + assert fs.isfile(fs_join(target, "file2")) + assert fs.isfile(fs_join(target, "subfile1")) + + fs.rm( + [ + fs_join(target, "file1"), + fs_join(target, "file2"), + fs_join(target, "subfile1"), + ], + recursive=True, + ) + assert fs.ls(target, detail=False) == ( + [] if supports_empty_directories else [dummy] + ) + + def test_put_list_of_files_to_new_directory( + self, fs, fs_join, fs_target, local_join, local_bulk_operations_scenario_0 + ): + # Copy scenario 2b + source = local_bulk_operations_scenario_0 + + target = fs_target + fs.mkdir(target) + + source_files = [ + local_join(source, "file1"), + local_join(source, "file2"), + local_join(source, "subdir", "subfile1"), + ] + + fs.put(source_files, fs_join(target, "newdir") + "/") # Note trailing slash + assert fs.isdir(fs_join(target, "newdir")) + assert fs.isfile(fs_join(target, "newdir", "file1")) + assert fs.isfile(fs_join(target, "newdir", "file2")) + assert fs.isfile(fs_join(target, "newdir", "subfile1")) + + def test_put_directory_recursive( + self, fs, fs_join, fs_target, local_fs, local_join, local_path + ): + # https://github.com/fsspec/filesystem_spec/issues/1062 + # Recursive cp/get/put of source directory into non-existent target directory. + src = local_join(local_path, "src") + src_file = local_join(src, "file") + local_fs.mkdir(src) + local_fs.touch(src_file) + + target = fs_target + + # put without slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src, target, recursive=True) + assert fs.isdir(target) + + if loop == 0: + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + else: + assert fs.isfile(fs_join(target, "file")) + assert fs.isdir(fs_join(target, "src")) + assert fs.isfile(fs_join(target, "src", "file")) + + fs.rm(target, recursive=True) + + # put with slash + assert not fs.exists(target) + for loop in range(2): + fs.put(src + "/", target, recursive=True) + assert fs.isdir(target) + assert fs.isfile(fs_join(target, "file")) + assert not fs.exists(fs_join(target, "src")) + + def test_put_directory_without_files_with_same_name_prefix( + self, + fs, + fs_join, + fs_target, + local_join, + local_dir_and_file_with_same_name_prefix, + supports_empty_directories, + ): + # Create the test dirs + source = local_dir_and_file_with_same_name_prefix + target = fs_target + + # Test without glob + fs.put(local_join(source, "subdir"), fs_target, recursive=True) + + assert fs.isfile(fs_join(fs_target, "subfile.txt")) + assert not fs.isfile(fs_join(fs_target, "subdir.txt")) + + fs.rm([fs_join(target, "subfile.txt")]) + if supports_empty_directories: + assert fs.ls(target) == [] + else: + assert not fs.exists(target) + + # Test with glob + fs.put(local_join(source, "subdir*"), fs_target, recursive=True) + + assert fs.isdir(fs_join(fs_target, "subdir")) + assert fs.isfile(fs_join(fs_target, "subdir", "subfile.txt")) + assert fs.isfile(fs_join(fs_target, "subdir.txt")) + + def test_copy_with_source_and_destination_as_list( + self, fs, fs_target, fs_join, local_join, local_10_files_with_hashed_names + ): + # Create the test dir + source = local_10_files_with_hashed_names + target = fs_target + + # Create list of files for source and destination + source_files = [] + destination_files = [] + for i in range(10): + hashed_i = md5(str(i).encode("utf-8")).hexdigest() + source_files.append(local_join(source, f"{hashed_i}.txt")) + destination_files.append(fs_join(target, f"{hashed_i}.txt")) + + # Copy and assert order was kept + fs.put(lpath=source_files, rpath=destination_files) + + for i in range(10): + file_content = fs.cat(destination_files[i]).decode("utf-8") + assert file_content == str(i) diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/INSTALLER b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/METADATA b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3a8ef46a3b22574c64fbed725c17c3bf2bf78c4b --- /dev/null +++ b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/METADATA @@ -0,0 +1,81 @@ +Metadata-Version: 2.4 +Name: iniconfig +Version: 2.1.0 +Summary: brain-dead simple config-ini parsing +Project-URL: Homepage, https://github.com/pytest-dev/iniconfig +Author-email: Ronny Pfannschmidt , Holger Krekel +License-Expression: MIT +License-File: LICENSE +Classifier: Development Status :: 4 - Beta +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: POSIX +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst + +iniconfig: brain-dead simple parsing of ini files +======================================================= + +iniconfig is a small and simple INI-file parser module +having a unique set of features: + +* maintains order of sections and entries +* supports multi-line values with or without line-continuations +* supports "#" comments everywhere +* raises errors with proper line-numbers +* no bells and whistles like automatic substitutions +* iniconfig raises an Error if two sections have the same name. + +If you encounter issues or have feature wishes please report them to: + + https://github.com/RonnyPfannschmidt/iniconfig/issues + +Basic Example +=================================== + +If you have an ini file like this: + +.. code-block:: ini + + # content of example.ini + [section1] # comment + name1=value1 # comment + name1b=value1,value2 # comment + + [section2] + name2= + line1 + line2 + +then you can do: + +.. code-block:: pycon + + >>> import iniconfig + >>> ini = iniconfig.IniConfig("example.ini") + >>> ini['section1']['name1'] # raises KeyError if not exists + 'value1' + >>> ini.get('section1', 'name1b', [], lambda x: x.split(",")) + ['value1', 'value2'] + >>> ini.get('section1', 'notexist', [], lambda x: x.split(",")) + [] + >>> [x.name for x in list(ini)] + ['section1', 'section2'] + >>> list(list(ini)[0].items()) + [('name1', 'value1'), ('name1b', 'value1,value2')] + >>> 'section1' in ini + True + >>> 'inexistendsection' in ini + False diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/RECORD b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..68aebaa6569abb9b02318b03ec1be74ad7d54fb6 --- /dev/null +++ b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/RECORD @@ -0,0 +1,11 @@ +iniconfig-2.1.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +iniconfig-2.1.0.dist-info/METADATA,sha256=uS-Ec4h2hMZZFTrbd_4EGKcxBQHnQ3CfwSYjzQPn5cs,2651 +iniconfig-2.1.0.dist-info/RECORD,, +iniconfig-2.1.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +iniconfig-2.1.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87 +iniconfig-2.1.0.dist-info/licenses/LICENSE,sha256=NAn6kfes5VeJRjJnZlbjImT-XvdYFTVyXcmiN3RVG9Q,1098 +iniconfig/__init__.py,sha256=H1UqjEmX-GytGCsqCafTLG-q1CPc_okvCKGairRFMq0,5462 +iniconfig/_parse.py,sha256=OWGLbmE8GjxcoMWTvnGbck1RoNsTm5bt5ficIRZqWJ8,2436 +iniconfig/_version.py,sha256=dseuoOPG9WZ1Ezr1SC3wS9_hczkX-b1NdE4TQPHFJso,511 +iniconfig/exceptions.py,sha256=BJguifCkPayz-n0hI2D5ym1USoAWYNIdi05Jc4r2r4o,490 +iniconfig/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/REQUESTED b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/WHEEL b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..12228d414b6cfed7c39d3781c85c63256a1d7fb5 --- /dev/null +++ b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: hatchling 1.27.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..46f4b2846fd708ecb81b2d665434ce6379aa1101 --- /dev/null +++ b/lib/python3.10/site-packages/iniconfig-2.1.0.dist-info/licenses/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2010 - 2023 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/COPYING.txt b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/COPYING.txt new file mode 100644 index 0000000000000000000000000000000000000000..17f34bc3d8ae0889ae327ae0c16bf78870c41527 --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/COPYING.txt @@ -0,0 +1,28 @@ +Copyright (c) 2006-2008, R Oudkerk + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions +are met: + +1. Redistributions of source code must retain the above copyright + notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. +3. Neither the name of author nor the names of any contributors may be + used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE +ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS +OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) +HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY +OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF +SUCH DAMAGE. diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/INSTALLER b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/METADATA b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..cc659acaab69844b5e4adc6330de6ed65c6d58bc --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/METADATA @@ -0,0 +1,211 @@ +Metadata-Version: 2.1 +Name: multiprocess +Version: 0.70.12.2 +Summary: better multiprocessing and multithreading in python +Home-page: https://github.com/uqfoundation/multiprocess +Author: Mike McKerns +Maintainer: Mike McKerns +License: BSD +Download-URL: https://github.com/uqfoundation/multiprocess/releases/download/multiprocess-0.70.12.2/multiprocess-0.70.12.2.tar.gz +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Software Development +Requires-Dist: dill (>=0.3.4) + +----------------------------------------------------------------- +multiprocess: better multiprocessing and multithreading in python +----------------------------------------------------------------- + +About Multiprocess +==================== + +``multiprocess`` is a fork of ``multiprocessing``, and is developed as part of ``pathos``: +https://github.com/uqfoundation/pathos + +``multiprocessing`` is a package for the Python language which supports the +spawning of processes using the API of the standard library's +``threading`` module. ``multiprocessing`` has been distributed in the standard +library since python 2.6. + +Features: + + - Objects can be transferred between processes using pipes or multi-producer/multi-consumer queues. + - Objects can be shared between processes using a server process or (for simple data) shared memory. + - Equivalents of all the synchronization primitives in ``threading`` are available. + - A ``Pool`` class makes it easy to submit tasks to a pool of worker processes. + + +``multiprocess`` is part of ``pathos``, a python framework for heterogeneous computing. +``multiprocess`` is in active development, so any user feedback, bug reports, comments, +or suggestions are highly appreciated. A list of issues is located at https://github.com/uqfoundation/multiprocess/issues, with a legacy list maintained at https://uqfoundation.github.io/project/pathos/query. + +NOTE: A C compiler is required to build the included extension module. For python 3.3 and above, a C compiler is suggested, but not required. + + +Major Changes +============== + + - enhanced serialization, using ``dill`` + + +Current Release +=============== + +This documentation is for version ``multiprocess-0.70.12.2`` (a fork of ``multiprocessing-0.70a1``). + +The latest released version of ``multiprocess`` is available from:: + + https://pypi.org/project/multiprocess + +``Multiprocessing`` is distributed under a BSD license. + + +Development Version +=================== + +You can get the latest development version with all the shiny new features at:: + + https://github.com/uqfoundation + +If you have a new contribution, please submit a pull request. + + +Installation +============ + +``multiprocess`` is packaged to install from source, so you must +download the tarball, unzip, and run the installer:: + + [download] + $ tar -xvzf multiprocess-0.70.12.2.tgz + $ cd multiprocess-0.70.12.2 + $ python setup.py build + $ python setup.py install + +You will be warned of any missing dependencies and/or settings +after you run the "build" step above. + +Alternately, ``multiprocess`` can be installed with ``pip`` or ``easy_install``:: + + $ pip install multiprocess + +NOTE: A C compiler is required to build the included extension module from source. For python 3.3 and above, a C compiler is suggested, but not required. Binary installs do not require a C compiler. + + +Requirements +============ + +``multiprocess`` requires:: + + - ``python``, **version == 2.7** or **version >= 3.6**, or ``pypy`` + - ``dill``, **version >= 0.3.4** + +Optional requirements:: + + - ``setuptools``, **version >= 0.6** + + +Basic Usage +=========== + +The ``multiprocess.Process`` class follows the API of ``threading.Thread``. +For example :: + + from multiprocess import Process, Queue + + def f(q): + q.put('hello world') + + if __name__ == '__main__': + q = Queue() + p = Process(target=f, args=[q]) + p.start() + print (q.get()) + p.join() + +Synchronization primitives like locks, semaphores and conditions are +available, for example :: + + >>> from multiprocess import Condition + >>> c = Condition() + >>> print (c) + ), 0> + >>> c.acquire() + True + >>> print (c) + ), 0> + +One can also use a manager to create shared objects either in shared +memory or in a server process, for example :: + + >>> from multiprocess import Manager + >>> manager = Manager() + >>> l = manager.list(range(10)) + >>> l.reverse() + >>> print (l) + [9, 8, 7, 6, 5, 4, 3, 2, 1, 0] + >>> print (repr(l)) + + +Tasks can be offloaded to a pool of worker processes in various ways, +for example :: + + >>> from multiprocess import Pool + >>> def f(x): return x*x + ... + >>> p = Pool(4) + >>> result = p.map_async(f, range(10)) + >>> print (result.get(timeout=1)) + [0, 1, 4, 9, 16, 25, 36, 49, 64, 81] + +When ``dill`` is installed, serialization is extended to most objects, +for example :: + + >>> from multiprocess import Pool + >>> p = Pool(4) + >>> print (p.map(lambda x: (lambda y:y**2)(x) + x, xrange(10))) + [0, 2, 6, 12, 20, 30, 42, 56, 72, 90] + + +More Information +================ + +Probably the best way to get started is to look at the documentation at +http://multiprocess.rtfd.io. See ``multiprocess.examples`` for a set of example +scripts. You can also run the test suite with ``python -m multiprocess.tests``. +Please feel free to submit a ticket on github, or ask a question on +stackoverflow (**@Mike McKerns**). If you would like to share how you use +``multiprocess`` in your work, please post send an email +(to **mmckerns at uqfoundation dot org**). + + +Citation +======== + +If you use ``multiprocess`` to do research that leads to publication, we ask that you +acknowledge use of ``multiprocess`` by citing the following in your publication:: + + M.M. McKerns, L. Strand, T. Sullivan, A. Fang, M.A.G. Aivazis, + "Building a framework for predictive science", Proceedings of + the 10th Python in Science Conference, 2011; + http://arxiv.org/pdf/1202.1056 + + Michael McKerns and Michael Aivazis, + "pathos: a framework for heterogeneous computing", 2010- ; + https://uqfoundation.github.io/project/pathos + +Please see https://uqfoundation.github.io/project/pathos or +http://arxiv.org/pdf/1202.1056 for further information. + + diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/RECORD b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..136f627289f69bf71a40617700ca1ef02327f559 --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/RECORD @@ -0,0 +1,40 @@ +_multiprocess/__init__.py,sha256=zuJ1_0yr-gCp0oAe5-vLNCp1myHAXuKVc5MRLv9lzLA,31 +multiprocess-0.70.12.2.dist-info/COPYING.txt,sha256=n3_yfLkw0sMgLuB-PS1hRvTeZ20GmjPaMWbJjNuoOpU,1493 +multiprocess-0.70.12.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +multiprocess-0.70.12.2.dist-info/LICENSE,sha256=JxI4GBBqj5Kc1mivfwcAYeMERyc1g7s3lEuKRbitHrw,1934 +multiprocess-0.70.12.2.dist-info/METADATA,sha256=_I_CULDdj7EcO-xCtYoXLMbhfAvST1Vtzw9iMJvkJTY,6919 +multiprocess-0.70.12.2.dist-info/RECORD,, +multiprocess-0.70.12.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +multiprocess-0.70.12.2.dist-info/WHEEL,sha256=7QEGVCap_ZxScbUFuCWpZks0RoQUFFqL2hOoEAOaKLE,93 +multiprocess-0.70.12.2.dist-info/top_level.txt,sha256=qtJc8GNdvi6suNpISX0Myln9AXJBYrNuas1MCqRPPqg,27 +multiprocess/__init__.py,sha256=IPMNMbDOPugZSGkgZlVV50OvjG9B4VJDbTRKW_PLVrU,943 +multiprocess/connection.py,sha256=GhvxnMNj7sk8Jb0S90gfTx8SHK9YKbquiNFmTZ6uBA4,32182 +multiprocess/context.py,sha256=zpJw0Rb1QhPO_OUHW7PIoXhWt0yAC2g9P0htI1ExNzY,11270 +multiprocess/dummy/__init__.py,sha256=kSekDqD_NCy0FDg7XnxZSgW-Ldg1_iRr07sNwDajKpA,3061 +multiprocess/dummy/connection.py,sha256=1j3Rl5_enBM-_kMO6HDmum3kPAoFE4Zs485HV5H-V6s,1598 +multiprocess/forkserver.py,sha256=hiltKfLImDYJyAcezNAgMDaQznB2LtYWgwre0QroLRg,12138 +multiprocess/heap.py,sha256=9rt5u5m5rkhJNfDWiCLpYDoWIt0LbElmx52yMqk7phQ,11626 +multiprocess/managers.py,sha256=Jy6sf_lW81InSpV-GrQf8_koAL3zWZX8TTtqG3i8sK8,47311 +multiprocess/pool.py,sha256=s8-RW_sBWjZUonbgOyrWJDEr-yW-n7gPspJTWRceKbg,32555 +multiprocess/popen_fork.py,sha256=Nvq5vVId24UfkOQxXhxZbcXuo8d6YMc409yRXAamTd0,2374 +multiprocess/popen_forkserver.py,sha256=SrEbV8Wv0Uu_UegkaW-cayXRdjTGXr560Yyy90pj-yE,2227 +multiprocess/popen_spawn_posix.py,sha256=l7XSWqR5UWiUSJh35qeSElLuNfUeEYwvH5HzKRnnyqg,2029 +multiprocess/popen_spawn_win32.py,sha256=A9uvlPmhO8JBzNcEU_Gmix2Q_qYJW1NXZgXPwtN5Ao0,4011 +multiprocess/process.py,sha256=3ODgVhevgOl2RXWMv3V_ESX5_CHJMOUkOQftt61-qrE,12000 +multiprocess/queues.py,sha256=XRZqsorfG9TlM8B6tKqwkIb03dnyGQ2a7W7TZkBhLYM,12109 +multiprocess/reduction.py,sha256=NQQ6KbDhmuAyaDeWaIarTZQokGPhcFda1poNnPm5uNc,9637 +multiprocess/resource_sharer.py,sha256=nEApLhMQqd8KunfaNKl3n8vdeiCGPxKrSL1Ja0nNAEk,5132 +multiprocess/resource_tracker.py,sha256=AUypNVano3I0_mEA1GXmdg0Vfy0bsKMUU8mxCwg6uCs,8696 +multiprocess/shared_memory.py,sha256=3c-lnw0tGQaqWlsPGyfpkCHQh_KQvy1JX6WF1IRMzJ0,18521 +multiprocess/sharedctypes.py,sha256=d-9SKRJHRlJJC331IxEoWOUXIeY9zxCbhWejXOmzGw0,6306 +multiprocess/spawn.py,sha256=cgtV66HhV_yIVzvdblc8bVdSpem16Ks0BOFu_bV5PDQ,9293 +multiprocess/synchronize.py,sha256=6q1ijwWyWLWLO8uUtaYT9MKepAYKfdzWPSEZGyJFP4s,11829 +multiprocess/tests/__init__.py,sha256=qVV0YnbbgqccqONtDolmgSdXX3eM9EIjzYFudfBiITQ,190661 +multiprocess/tests/__main__.py,sha256=3mqbVdRWqb9lCQ0Nndn1m2TIvqqk8P_QAGEXNxlcd3I,948 +multiprocess/tests/mp_fork_bomb.py,sha256=6ADOEzh1aXHZ21aOGoBPhKcgB5sj15G9tQVgSc6GrlY,448 +multiprocess/tests/mp_preload.py,sha256=cj2tUiPQQqGhPrXBO9LfaY8l0Dk29UdlHMJdG-7LTpQ,351 +multiprocess/tests/test_multiprocessing_fork.py,sha256=BzF6mmub8lAnOGbJF888YrWjKdzcg5TP-v63pckKGqs,479 +multiprocess/tests/test_multiprocessing_forkserver.py,sha256=aefqw98Z4nriFWxijdQqJ9x1iK3zN1RW51Dd5NO4XUU,394 +multiprocess/tests/test_multiprocessing_main_handling.py,sha256=sdavO-pion69T5Cyc6Cl91hsPoc-V5JMrvFD3fhow6M,11812 +multiprocess/tests/test_multiprocessing_spawn.py,sha256=jbm4_yI_Dxj3CAl83dwqbNBDwhPyKPtPW65p9KlSGWA,279 +multiprocess/util.py,sha256=Et2Rtd_Hc7-wXgtLOqlgHCQrK4wGm3LkmfYWBXOLsxw,13993 diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/REQUESTED b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/WHEEL b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..5c92bea317aef808b4fe33564ffbbf2774659b06 --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.35.1) +Root-Is-Purelib: true +Tag: py39-none-any + diff --git a/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/top_level.txt b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..d547cb06bfcaf32e902f6d3c00ec331470ab2f71 --- /dev/null +++ b/lib/python3.10/site-packages/multiprocess-0.70.12.2.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_multiprocess +multiprocess diff --git a/lib/python3.10/site-packages/onnx-1.17.0.dist-info/INSTALLER b/lib/python3.10/site-packages/onnx-1.17.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/onnx-1.17.0.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/onnx-1.17.0.dist-info/REQUESTED b/lib/python3.10/site-packages/onnx-1.17.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/onnx-1.17.0.dist-info/entry_points.txt b/lib/python3.10/site-packages/onnx-1.17.0.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..e513d7dc56c333d0c1b6df78909609635e32d254 --- /dev/null +++ b/lib/python3.10/site-packages/onnx-1.17.0.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +backend-test-tools = onnx.backend.test.cmd_tools:main +check-model = onnx.bin.checker:check_model +check-node = onnx.bin.checker:check_node diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/INSTALLER b/lib/python3.10/site-packages/packaging-24.2.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..6f62d44e4ef733c0e713afcd2371fed7f2b3de67 --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE @@ -0,0 +1,3 @@ +This software is made available under the terms of *either* of the licenses +found in LICENSE.APACHE or LICENSE.BSD. Contributions to this software is made +under the terms of *both* these licenses. diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE new file mode 100644 index 0000000000000000000000000000000000000000..f433b1a53f5b830a205fd2df78e2b34974656c7b --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.APACHE @@ -0,0 +1,177 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.BSD b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.BSD new file mode 100644 index 0000000000000000000000000000000000000000..42ce7b75c92fb01a3f6ed17eea363f756b7da582 --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/LICENSE.BSD @@ -0,0 +1,23 @@ +Copyright (c) Donald Stufft and individual contributors. +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + + 1. Redistributions of source code must retain the above copyright notice, + this list of conditions and the following disclaimer. + + 2. Redistributions in binary form must reproduce the above copyright + notice, this list of conditions and the following disclaimer in the + documentation and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA b/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..1479c8694bfbd583a896dbe9bd33cdb6d7e7371e --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/METADATA @@ -0,0 +1,102 @@ +Metadata-Version: 2.3 +Name: packaging +Version: 24.2 +Summary: Core utilities for Python packages +Author-email: Donald Stufft +Requires-Python: >=3.8 +Description-Content-Type: text/x-rst +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: Apache Software License +Classifier: License :: OSI Approved :: BSD License +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3 :: Only +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Programming Language :: Python :: 3.13 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Typing :: Typed +Project-URL: Documentation, https://packaging.pypa.io/ +Project-URL: Source, https://github.com/pypa/packaging + +packaging +========= + +.. start-intro + +Reusable core utilities for various Python Packaging +`interoperability specifications `_. + +This library provides utilities that implement the interoperability +specifications which have clearly one correct behaviour (eg: :pep:`440`) +or benefit greatly from having a single shared implementation (eg: :pep:`425`). + +.. end-intro + +The ``packaging`` project includes the following: version handling, specifiers, +markers, requirements, tags, utilities. + +Documentation +------------- + +The `documentation`_ provides information and the API for the following: + +- Version Handling +- Specifiers +- Markers +- Requirements +- Tags +- Utilities + +Installation +------------ + +Use ``pip`` to install these utilities:: + + pip install packaging + +The ``packaging`` library uses calendar-based versioning (``YY.N``). + +Discussion +---------- + +If you run into bugs, you can file them in our `issue tracker`_. + +You can also join ``#pypa`` on Freenode to ask questions or get involved. + + +.. _`documentation`: https://packaging.pypa.io/ +.. _`issue tracker`: https://github.com/pypa/packaging/issues + + +Code of Conduct +--------------- + +Everyone interacting in the packaging project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PSF Code of Conduct`_. + +.. _PSF Code of Conduct: https://github.com/pypa/.github/blob/main/CODE_OF_CONDUCT.md + +Contributing +------------ + +The ``CONTRIBUTING.rst`` file outlines how to contribute to this project as +well as how to report a potential security issue. The documentation for this +project also covers information about `project development`_ and `security`_. + +.. _`project development`: https://packaging.pypa.io/en/latest/development/ +.. _`security`: https://packaging.pypa.io/en/latest/security/ + +Project History +--------------- + +Please review the ``CHANGELOG.rst`` file or the `Changelog documentation`_ for +recent changes and project history. + +.. _`Changelog documentation`: https://packaging.pypa.io/en/latest/changelog/ + diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/RECORD b/lib/python3.10/site-packages/packaging-24.2.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..678aa5a501dfda29cfdd8a585e038754f008d380 --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/RECORD @@ -0,0 +1,25 @@ +packaging-24.2.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +packaging-24.2.dist-info/LICENSE,sha256=ytHvW9NA1z4HS6YU0m996spceUDD2MNIUuZcSQlobEg,197 +packaging-24.2.dist-info/LICENSE.APACHE,sha256=DVQuDIgE45qn836wDaWnYhSdxoLXgpRRKH4RuTjpRZQ,10174 +packaging-24.2.dist-info/LICENSE.BSD,sha256=tw5-m3QvHMb5SLNMFqo5_-zpQZY2S8iP8NIYDwAo-sU,1344 +packaging-24.2.dist-info/METADATA,sha256=ohH86s6k5mIfQxY2TS0LcSfADeOFa4BiCC-bxZV-pNs,3204 +packaging-24.2.dist-info/RECORD,, +packaging-24.2.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging-24.2.dist-info/WHEEL,sha256=CpUCUxeHQbRN5UGRQHYRJorO5Af-Qy_fHMctcQ8DSGI,82 +packaging/__init__.py,sha256=dk4Ta_vmdVJxYHDcfyhvQNw8V3PgSBomKNXqg-D2JDY,494 +packaging/_elffile.py,sha256=cflAQAkE25tzhYmq_aCi72QfbT_tn891tPzfpbeHOwE,3306 +packaging/_manylinux.py,sha256=vl5OCoz4kx80H5rwXKeXWjl9WNISGmr4ZgTpTP9lU9c,9612 +packaging/_musllinux.py,sha256=p9ZqNYiOItGee8KcZFeHF_YcdhVwGHdK6r-8lgixvGQ,2694 +packaging/_parser.py,sha256=s_TvTvDNK0NrM2QB3VKThdWFM4Nc0P6JnkObkl3MjpM,10236 +packaging/_structures.py,sha256=q3eVNmbWJGG_S0Dit_S3Ao8qQqz_5PYTXFAKBZe5yr4,1431 +packaging/_tokenizer.py,sha256=J6v5H7Jzvb-g81xp_2QACKwO7LxHQA6ikryMU7zXwN8,5273 +packaging/licenses/__init__.py,sha256=1x5M1nEYjcgwEbLt0dXwz2ukjr18DiCzC0sraQqJ-Ww,5715 +packaging/licenses/_spdx.py,sha256=oAm1ztPFwlsmCKe7lAAsv_OIOfS1cWDu9bNBkeu-2ns,48398 +packaging/markers.py,sha256=c89TNzB7ZdGYhkovm6PYmqGyHxXlYVaLW591PHUNKD8,10561 +packaging/metadata.py,sha256=YJibM7GYe4re8-0a3OlXmGS-XDgTEoO4tlBt2q25Bng,34762 +packaging/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +packaging/requirements.py,sha256=gYyRSAdbrIyKDY66ugIDUQjRMvxkH2ALioTmX3tnL6o,2947 +packaging/specifiers.py,sha256=GG1wPNMcL0fMJO68vF53wKMdwnfehDcaI-r9NpTfilA,40074 +packaging/tags.py,sha256=CFqrJzAzc2XNGexerH__T-Y5Iwq7WbsYXsiLERLWxY0,21014 +packaging/utils.py,sha256=0F3Hh9OFuRgrhTgGZUl5K22Fv1YP2tZl1z_2gO6kJiA,5050 +packaging/version.py,sha256=olfyuk_DPbflNkJ4wBWetXQ17c74x3DB501degUv7DY,16676 diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/REQUESTED b/lib/python3.10/site-packages/packaging-24.2.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/packaging-24.2.dist-info/WHEEL b/lib/python3.10/site-packages/packaging-24.2.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..e3c6feefa22927866e3fd5575379ea972b432aaf --- /dev/null +++ b/lib/python3.10/site-packages/packaging-24.2.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: flit 3.10.1 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/INSTALLER b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/LICENSE b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..adb8038169c5adc677ad36f63c3bbe9b91668690 --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/LICENSE @@ -0,0 +1,48 @@ +PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2 +-------------------------------------------- + +1. This LICENSE AGREEMENT is between the Python Software Foundation +("PSF"), and the Individual or Organization ("Licensee") accessing and +otherwise using this software ("Python") in source or binary form and +its associated documentation. + +2. Subject to the terms and conditions of this License Agreement, PSF hereby +grants Licensee a nonexclusive, royalty-free, world-wide license to reproduce, +analyze, test, perform and/or display publicly, prepare derivative works, +distribute, and otherwise use Python alone or in any derivative version, +provided, however, that PSF's License Agreement and PSF's notice of copyright, +i.e., "Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 +Python Software Foundation; All Rights Reserved" are retained in Python alone or +in any derivative version prepared by Licensee. + +3. In the event Licensee prepares a derivative work that is based on +or incorporates Python or any part thereof, and wants to make +the derivative work available to others as provided herein, then +Licensee hereby agrees to include in any such work a brief summary of +the changes made to Python. + +4. PSF is making Python available to Licensee on an "AS IS" +basis. PSF MAKES NO REPRESENTATIONS OR WARRANTIES, EXPRESS OR +IMPLIED. BY WAY OF EXAMPLE, BUT NOT LIMITATION, PSF MAKES NO AND +DISCLAIMS ANY REPRESENTATION OR WARRANTY OF MERCHANTABILITY OR FITNESS +FOR ANY PARTICULAR PURPOSE OR THAT THE USE OF PYTHON WILL NOT +INFRINGE ANY THIRD PARTY RIGHTS. + +5. PSF SHALL NOT BE LIABLE TO LICENSEE OR ANY OTHER USERS OF PYTHON +FOR ANY INCIDENTAL, SPECIAL, OR CONSEQUENTIAL DAMAGES OR LOSS AS +A RESULT OF MODIFYING, DISTRIBUTING, OR OTHERWISE USING PYTHON, +OR ANY DERIVATIVE THEREOF, EVEN IF ADVISED OF THE POSSIBILITY THEREOF. + +6. This License Agreement will automatically terminate upon a material +breach of its terms and conditions. + +7. Nothing in this License Agreement shall be deemed to create any +relationship of agency, partnership, or joint venture between PSF and +Licensee. This License Agreement does not grant permission to use PSF +trademarks or trade name in a trademark sense to endorse or promote +products or services of Licensee, or any third party. + +8. By copying, installing or otherwise using Python, Licensee +agrees to be bound by the terms and conditions of this License +Agreement. + diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/METADATA b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..3e2e1cc963f9ddf41d063ab79bbb19102504f290 --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/METADATA @@ -0,0 +1,159 @@ +Metadata-Version: 2.1 +Name: portalocker +Version: 2.0.0 +Summary: Wraps the portalocker recipe for easy usage +Home-page: https://github.com/WoLpH/portalocker +Author: Rick van Hattem +Author-email: wolph@wol.ph +License: PSF +Keywords: locking,locks,with statement,windows,linux,unix +Platform: any +Classifier: Intended Audience :: Developers +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Requires-Dist: pywin32 (!=226) ; platform_system == "Windows" +Provides-Extra: docs +Requires-Dist: sphinx (>=1.7.1) ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: pytest (>=4.6.9) ; extra == 'tests' +Requires-Dist: pytest-cov (>=2.8.1) ; extra == 'tests' +Requires-Dist: sphinx (>=1.8.5) ; extra == 'tests' +Requires-Dist: pytest-flake8 (>=1.0.5) ; extra == 'tests' + +############################################ +portalocker - Cross-platform locking library +############################################ + +.. image:: https://travis-ci.org/WoLpH/portalocker.svg?branch=master + :alt: Linux Test Status + :target: https://travis-ci.org/WoLpH/portalocker + +.. image:: https://ci.appveyor.com/api/projects/status/mgqry98hgpy4prhh?svg=true + :alt: Windows Tests Status + :target: https://ci.appveyor.com/project/WoLpH/portalocker + +.. image:: https://coveralls.io/repos/WoLpH/portalocker/badge.svg?branch=master + :alt: Coverage Status + :target: https://coveralls.io/r/WoLpH/portalocker?branch=master + +Overview +-------- + +Portalocker is a library to provide an easy API to file locking. + +An important detail to note is that on Linux and Unix systems the locks are +advisory by default. By specifying the `-o mand` option to the mount command it +is possible to enable mandatory file locking on Linux. This is generally not +recommended however. For more information about the subject: + + - https://en.wikipedia.org/wiki/File_locking + - http://stackoverflow.com/questions/39292051/portalocker-does-not-seem-to-lock + - https://stackoverflow.com/questions/12062466/mandatory-file-lock-on-linux + +The module is currently maintained by Rick van Hattem . +The project resides at https://github.com/WoLpH/portalocker . Bugs and feature +requests can be submitted there. Patches are also very welcome. + +Tips +---- + +On some networked filesystems it might be needed to force a `os.fsync()` before +closing the file so it's actually written before another client reads the file. +Effectively this comes down to: + +:: + + with portalocker.Lock('some_file', 'rb+', timeout=60) as fh: + # do what you need to do + ... + + # flush and sync to filesystem + fh.flush() + os.fsync(fh.fileno()) + +Links +----- + +* Documentation + - http://portalocker.readthedocs.org/en/latest/ +* Source + - https://github.com/WoLpH/portalocker +* Bug reports + - https://github.com/WoLpH/portalocker/issues +* Package homepage + - https://pypi.python.org/pypi/portalocker +* My blog + - http://w.wol.ph/ + +Examples +-------- + +To make sure your cache generation scripts don't race, use the `Lock` class: + +>>> import portalocker +>>> with portalocker.Lock('somefile', timeout=1) as fh: +... print >>fh, 'writing some stuff to my cache...' + +To customize the opening and locking a manual approach is also possible: + +>>> import portalocker +>>> file = open('somefile', 'r+') +>>> portalocker.lock(file, portalocker.LOCK_EX) +>>> file.seek(12) +>>> file.write('foo') +>>> file.close() + +Explicitly unlocking is not needed in most cases but omitting it has been known +to cause issues: +https://github.com/AzureAD/microsoft-authentication-extensions-for-python/issues/42#issuecomment-601108266 + +If needed, it can be done through: + +>>> portalocker.unlock(file) + +Do note that your data might still be in a buffer so it is possible that your +data is not available until you `flush()` or `close()`. + +To create a cross platform bounded semaphore across multiple processes you can +use the `BoundedSemaphore` class which functions somewhat similar to +`threading.BoundedSemaphore`: + +>>> import portalocker +>>> n = 2 +>>> timeout = 0.1 + +>>> semaphore_a = portalocker.BoundedSemaphore(n, timeout=timeout) +>>> semaphore_b = portalocker.BoundedSemaphore(n, timeout=timeout) +>>> semaphore_c = portalocker.BoundedSemaphore(n, timeout=timeout) + +>>> semaphore_a.acquire() + +>>> semaphore_b.acquire() + +>>> semaphore_c.acquire() +Traceback (most recent call last): + ... +portalocker.exceptions.AlreadyLocked + + +More examples can be found in the +`tests `_. + +Changelog +--------- + +See the `changelog `_ page. + +License +------- + +See the `LICENSE `_ file. + + + diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/RECORD b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..217c7c0147db20c30d8d7423187a9a465bb0b294 --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/RECORD @@ -0,0 +1,13 @@ +portalocker-2.0.0.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +portalocker-2.0.0.dist-info/LICENSE,sha256=vXLG6qLW6cjDmlp_mg--JW_5hZgQ6RQnqNZh9TlZvWM,2419 +portalocker-2.0.0.dist-info/METADATA,sha256=XP_IiK7kdQTA88G8qPt1pwS5XPTROsnU165WvDKy-hU,5154 +portalocker-2.0.0.dist-info/RECORD,, +portalocker-2.0.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +portalocker-2.0.0.dist-info/WHEEL,sha256=kGT74LWyRUZrL4VgLh6_g12IeVl_9u9ZVhadrgXZUEY,110 +portalocker-2.0.0.dist-info/top_level.txt,sha256=qfIEwW2X8cgtD0cFJIIWaR-cnKNo4ESR7Raiwxf-UNA,12 +portalocker/__about__.py,sha256=osxcys64T4hR81aAyJTmjMxwfXbJMFBuw2Dk4SCtli8,231 +portalocker/__init__.py,sha256=pTr3v0V25kiu_ZRST6nHgSw-gf6xGH_ITCyZKSqlzIE,1784 +portalocker/constants.py,sha256=2TX72Y7s27AaUyGshLwkqn30bduPpH2OHe7Vu9_4abs,871 +portalocker/exceptions.py,sha256=brdecUFHjIj7ORrLjSwMteLqAvhPk2207nENK5SNum4,359 +portalocker/portalocker.py,sha256=WVCJo8Sz7wFecyybwY4qsuSWnQQbun4KiGiVC7VMkIE,5427 +portalocker/utils.py,sha256=zB6VAWwe7Drcn17R-ieBFu_fIytC-iK3X2VRFmaZHQA,10894 diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/REQUESTED b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/WHEEL b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..ef99c6cf3283b50a273ac4c6d009a0aa85597070 --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.34.2) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/top_level.txt b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/top_level.txt new file mode 100644 index 0000000000000000000000000000000000000000..7bbc14e6fa689626ddcf52c84dd711ef31061774 --- /dev/null +++ b/lib/python3.10/site-packages/portalocker-2.0.0.dist-info/top_level.txt @@ -0,0 +1 @@ +portalocker diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/INSTALLER b/lib/python3.10/site-packages/timm-1.0.11.dist-info/INSTALLER new file mode 100644 index 0000000000000000000000000000000000000000..5c69047b2eb8235994febeeae1da4a82365a240a --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/INSTALLER @@ -0,0 +1 @@ +uv \ No newline at end of file diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/METADATA b/lib/python3.10/site-packages/timm-1.0.11.dist-info/METADATA new file mode 100644 index 0000000000000000000000000000000000000000..bf89501f27c2e50d759fc7fd2f008d3cdbdee189 --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/METADATA @@ -0,0 +1,620 @@ +Metadata-Version: 2.1 +Name: timm +Version: 1.0.11 +Summary: PyTorch Image Models +Keywords: pytorch,image-classification +Home-page: https://github.com/huggingface/pytorch-image-models +Author-Email: Ross Wightman +License: Apache-2.0 +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Education +Classifier: Intended Audience :: Science/Research +Classifier: License :: OSI Approved :: Apache Software License +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: 3.12 +Classifier: Topic :: Scientific/Engineering +Classifier: Topic :: Scientific/Engineering :: Artificial Intelligence +Classifier: Topic :: Software Development +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Project-URL: Homepage, https://github.com/huggingface/pytorch-image-models +Project-URL: Documentation, https://huggingface.co/docs/timm/en/index +Project-URL: Repository, https://github.com/huggingface/pytorch-image-models +Requires-Python: >=3.8 +Requires-Dist: torch +Requires-Dist: torchvision +Requires-Dist: pyyaml +Requires-Dist: huggingface_hub +Requires-Dist: safetensors +Description-Content-Type: text/markdown + +# PyTorch Image Models +- [What's New](#whats-new) +- [Introduction](#introduction) +- [Models](#models) +- [Features](#features) +- [Results](#results) +- [Getting Started (Documentation)](#getting-started-documentation) +- [Train, Validation, Inference Scripts](#train-validation-inference-scripts) +- [Awesome PyTorch Resources](#awesome-pytorch-resources) +- [Licenses](#licenses) +- [Citing](#citing) + +## What's New + +## Oct 16, 2024 +* Fix error on importing from deprecated path `timm.models.registry`, increased priority of existing deprecation warnings to be visible +* Port weights of InternViT-300M (https://huggingface.co/OpenGVLab/InternViT-300M-448px) to `timm` as `vit_intern300m_patch14_448` + +### Oct 14, 2024 +* Pre-activation (ResNetV2) version of 18/18d/34/34d ResNet model defs added by request (weights pending) +* Release 1.0.10 + +### Oct 11, 2024 +* MambaOut (https://github.com/yuweihao/MambaOut) model & weights added. A cheeky take on SSM vision models w/o the SSM (essentially ConvNeXt w/ gating). A mix of original weights + custom variations & weights. + +|model |img_size|top1 |top5 |param_count| +|---------------------------------------------------------------------------------------------------------------------|--------|------|------|-----------| +|[mambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_r384_in12k_ft_in1k)|384 |87.506|98.428|101.66 | +|[mambaout_base_plus_rw.sw_e150_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_in12k_ft_in1k)|288 |86.912|98.236|101.66 | +|[mambaout_base_plus_rw.sw_e150_in12k_ft_in1k](http://huggingface.co/timm/mambaout_base_plus_rw.sw_e150_in12k_ft_in1k)|224 |86.632|98.156|101.66 | +|[mambaout_base_tall_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_tall_rw.sw_e500_in1k) |288 |84.974|97.332|86.48 | +|[mambaout_base_wide_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_wide_rw.sw_e500_in1k) |288 |84.962|97.208|94.45 | +|[mambaout_base_short_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_short_rw.sw_e500_in1k) |288 |84.832|97.27 |88.83 | +|[mambaout_base.in1k](http://huggingface.co/timm/mambaout_base.in1k) |288 |84.72 |96.93 |84.81 | +|[mambaout_small_rw.sw_e450_in1k](http://huggingface.co/timm/mambaout_small_rw.sw_e450_in1k) |288 |84.598|97.098|48.5 | +|[mambaout_small.in1k](http://huggingface.co/timm/mambaout_small.in1k) |288 |84.5 |96.974|48.49 | +|[mambaout_base_wide_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_wide_rw.sw_e500_in1k) |224 |84.454|96.864|94.45 | +|[mambaout_base_tall_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_tall_rw.sw_e500_in1k) |224 |84.434|96.958|86.48 | +|[mambaout_base_short_rw.sw_e500_in1k](http://huggingface.co/timm/mambaout_base_short_rw.sw_e500_in1k) |224 |84.362|96.952|88.83 | +|[mambaout_base.in1k](http://huggingface.co/timm/mambaout_base.in1k) |224 |84.168|96.68 |84.81 | +|[mambaout_small.in1k](http://huggingface.co/timm/mambaout_small.in1k) |224 |84.086|96.63 |48.49 | +|[mambaout_small_rw.sw_e450_in1k](http://huggingface.co/timm/mambaout_small_rw.sw_e450_in1k) |224 |84.024|96.752|48.5 | +|[mambaout_tiny.in1k](http://huggingface.co/timm/mambaout_tiny.in1k) |288 |83.448|96.538|26.55 | +|[mambaout_tiny.in1k](http://huggingface.co/timm/mambaout_tiny.in1k) |224 |82.736|96.1 |26.55 | +|[mambaout_kobe.in1k](http://huggingface.co/timm/mambaout_kobe.in1k) |288 |81.054|95.718|9.14 | +|[mambaout_kobe.in1k](http://huggingface.co/timm/mambaout_kobe.in1k) |224 |79.986|94.986|9.14 | +|[mambaout_femto.in1k](http://huggingface.co/timm/mambaout_femto.in1k) |288 |79.848|95.14 |7.3 | +|[mambaout_femto.in1k](http://huggingface.co/timm/mambaout_femto.in1k) |224 |78.87 |94.408|7.3 | + +* SigLIP SO400M ViT fine-tunes on ImageNet-1k @ 378x378, added 378x378 option for existing SigLIP 384x384 models + * [vit_so400m_patch14_siglip_378.webli_ft_in1k](https://huggingface.co/timm/vit_so400m_patch14_siglip_378.webli_ft_in1k) - 89.42 top-1 + * [vit_so400m_patch14_siglip_gap_378.webli_ft_in1k](https://huggingface.co/timm/vit_so400m_patch14_siglip_gap_378.webli_ft_in1k) - 89.03 +* SigLIP SO400M ViT encoder from recent multi-lingual (i18n) variant, patch16 @ 256x256 (https://huggingface.co/timm/ViT-SO400M-16-SigLIP-i18n-256). OpenCLIP update pending. +* Add two ConvNeXt 'Zepto' models & weights (one w/ overlapped stem and one w/ patch stem). Uses RMSNorm, smaller than previous 'Atto', 2.2M params. + * [convnext_zepto_rms_ols.ra4_e3600_r224_in1k](https://huggingface.co/timm/convnext_zepto_rms_ols.ra4_e3600_r224_in1k) - 73.20 top-1 @ 224 + * [convnext_zepto_rms.ra4_e3600_r224_in1k](https://huggingface.co/timm/convnext_zepto_rms.ra4_e3600_r224_in1k) - 72.81 @ 224 + +### Sept 2024 +* Add a suite of tiny test models for improved unit tests and niche low-resource applications (https://huggingface.co/blog/rwightman/timm-tiny-test) +* Add MobileNetV4-Conv-Small (0.5x) model (https://huggingface.co/posts/rwightman/793053396198664) + * [mobilenetv4_conv_small_050.e3000_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small_050.e3000_r224_in1k) - 65.81 top-1 @ 256, 64.76 @ 224 +* Add MobileNetV3-Large variants trained with MNV4 Small recipe + * [mobilenetv3_large_150d.ra4_e3600_r256_in1k](http://hf.co/timm/mobilenetv3_large_150d.ra4_e3600_r256_in1k) - 81.81 @ 320, 80.94 @ 256 + * [mobilenetv3_large_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv3_large_100.ra4_e3600_r224_in1k) - 77.16 @ 256, 76.31 @ 224 + + +### Aug 21, 2024 +* Updated SBB ViT models trained on ImageNet-12k and fine-tuned on ImageNet-1k, challenging quite a number of much larger, slower models + +| model | top1 | top5 | param_count | img_size | +| -------------------------------------------------- | ------ | ------ | ----------- | -------- | +| [vit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k) | 87.438 | 98.256 | 64.11 | 384 | +| [vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k) | 86.608 | 97.934 | 64.11 | 256 | +| [vit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_384.sbb2_e200_in12k_ft_in1k) | 86.594 | 98.02 | 60.4 | 384 | +| [vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb2_e200_in12k_ft_in1k) | 85.734 | 97.61 | 60.4 | 256 | +* MobileNet-V1 1.25, EfficientNet-B1, & ResNet50-D weights w/ MNV4 baseline challenge recipe + +| model | top1 | top5 | param_count | img_size | +|--------------------------------------------------------------------------------------------------------------------------|--------|--------|-------------|----------| +| [resnet50d.ra4_e3600_r224_in1k](http://hf.co/timm/resnet50d.ra4_e3600_r224_in1k) | 81.838 | 95.922 | 25.58 | 288 | +| [efficientnet_b1.ra4_e3600_r240_in1k](http://hf.co/timm/efficientnet_b1.ra4_e3600_r240_in1k) | 81.440 | 95.700 | 7.79 | 288 | +| [resnet50d.ra4_e3600_r224_in1k](http://hf.co/timm/resnet50d.ra4_e3600_r224_in1k) | 80.952 | 95.384 | 25.58 | 224 | +| [efficientnet_b1.ra4_e3600_r240_in1k](http://hf.co/timm/efficientnet_b1.ra4_e3600_r240_in1k) | 80.406 | 95.152 | 7.79 | 240 | +| [mobilenetv1_125.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_125.ra4_e3600_r224_in1k) | 77.600 | 93.804 | 6.27 | 256 | +| [mobilenetv1_125.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_125.ra4_e3600_r224_in1k) | 76.924 | 93.234 | 6.27 | 224 | + +* Add SAM2 (HieraDet) backbone arch & weight loading support +* Add Hiera Small weights trained w/ abswin pos embed on in12k & fine-tuned on 1k + +|model |top1 |top5 |param_count| +|---------------------------------|------|------|-----------| +|hiera_small_abswin_256.sbb2_e200_in12k_ft_in1k |84.912|97.260|35.01 | +|hiera_small_abswin_256.sbb2_pd_e200_in12k_ft_in1k |84.560|97.106|35.01 | + +### Aug 8, 2024 +* Add RDNet ('DenseNets Reloaded', https://arxiv.org/abs/2403.19588), thanks [Donghyun Kim](https://github.com/dhkim0225) + +### July 28, 2024 +* Add `mobilenet_edgetpu_v2_m` weights w/ `ra4` mnv4-small based recipe. 80.1% top-1 @ 224 and 80.7 @ 256. +* Release 1.0.8 + +### July 26, 2024 +* More MobileNet-v4 weights, ImageNet-12k pretrain w/ fine-tunes, and anti-aliased ConvLarge models + +| model |top1 |top1_err|top5 |top5_err|param_count|img_size| +|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------| +| [mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k)|84.99 |15.01 |97.294|2.706 |32.59 |544 | +| [mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k)|84.772|15.228 |97.344|2.656 |32.59 |480 | +| [mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r448_in12k_ft_in1k)|84.64 |15.36 |97.114|2.886 |32.59 |448 | +| [mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e230_r384_in12k_ft_in1k)|84.314|15.686 |97.102|2.898 |32.59 |384 | +| [mobilenetv4_conv_aa_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e600_r384_in1k) |83.824|16.176 |96.734|3.266 |32.59 |480 | +| [mobilenetv4_conv_aa_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_aa_large.e600_r384_in1k) |83.244|16.756 |96.392|3.608 |32.59 |384 | +| [mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k)|82.99 |17.01 |96.67 |3.33 |11.07 |320 | +| [mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e200_r256_in12k_ft_in1k)|82.364|17.636 |96.256|3.744 |11.07 |256 | + +* Impressive MobileNet-V1 and EfficientNet-B0 baseline challenges (https://huggingface.co/blog/rwightman/mobilenet-baselines) + +| model |top1 |top1_err|top5 |top5_err|param_count|img_size| +|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------| +| [efficientnet_b0.ra4_e3600_r224_in1k](http://hf.co/timm/efficientnet_b0.ra4_e3600_r224_in1k) |79.364|20.636 |94.754|5.246 |5.29 |256 | +| [efficientnet_b0.ra4_e3600_r224_in1k](http://hf.co/timm/efficientnet_b0.ra4_e3600_r224_in1k) |78.584|21.416 |94.338|5.662 |5.29 |224 | +| [mobilenetv1_100h.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100h.ra4_e3600_r224_in1k) |76.596|23.404 |93.272|6.728 |5.28 |256 | +| [mobilenetv1_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100.ra4_e3600_r224_in1k) |76.094|23.906 |93.004|6.996 |4.23 |256 | +| [mobilenetv1_100h.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100h.ra4_e3600_r224_in1k) |75.662|24.338 |92.504|7.496 |5.28 |224 | +| [mobilenetv1_100.ra4_e3600_r224_in1k](http://hf.co/timm/mobilenetv1_100.ra4_e3600_r224_in1k) |75.382|24.618 |92.312|7.688 |4.23 |224 | + +* Prototype of `set_input_size()` added to vit and swin v1/v2 models to allow changing image size, patch size, window size after model creation. +* Improved support in swin for different size handling, in addition to `set_input_size`, `always_partition` and `strict_img_size` args have been added to `__init__` to allow more flexible input size constraints +* Fix out of order indices info for intermediate 'Getter' feature wrapper, check out or range indices for same. +* Add several `tiny` < .5M param models for testing that are actually trained on ImageNet-1k + +|model |top1 |top1_err|top5 |top5_err|param_count|img_size|crop_pct| +|----------------------------|------|--------|------|--------|-----------|--------|--------| +|test_efficientnet.r160_in1k |47.156|52.844 |71.726|28.274 |0.36 |192 |1.0 | +|test_byobnet.r160_in1k |46.698|53.302 |71.674|28.326 |0.46 |192 |1.0 | +|test_efficientnet.r160_in1k |46.426|53.574 |70.928|29.072 |0.36 |160 |0.875 | +|test_byobnet.r160_in1k |45.378|54.622 |70.572|29.428 |0.46 |160 |0.875 | +|test_vit.r160_in1k|42.0 |58.0 |68.664|31.336 |0.37 |192 |1.0 | +|test_vit.r160_in1k|40.822|59.178 |67.212|32.788 |0.37 |160 |0.875 | + +* Fix vit reg token init, thanks [Promisery](https://github.com/Promisery) +* Other misc fixes + +### June 24, 2024 +* 3 more MobileNetV4 hyrid weights with different MQA weight init scheme + +| model |top1 |top1_err|top5 |top5_err|param_count|img_size| +|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------| +| [mobilenetv4_hybrid_large.ix_e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.ix_e600_r384_in1k) |84.356|15.644 |96.892 |3.108 |37.76 |448 | +| [mobilenetv4_hybrid_large.ix_e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.ix_e600_r384_in1k) |83.990|16.010 |96.702 |3.298 |37.76 |384 | +| [mobilenetv4_hybrid_medium.ix_e550_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r384_in1k) |83.394|16.606 |96.760|3.240 |11.07 |448 | +| [mobilenetv4_hybrid_medium.ix_e550_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r384_in1k) |82.968|17.032 |96.474|3.526 |11.07 |384 | +| [mobilenetv4_hybrid_medium.ix_e550_r256_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r256_in1k) |82.492|17.508 |96.278|3.722 |11.07 |320 | +| [mobilenetv4_hybrid_medium.ix_e550_r256_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.ix_e550_r256_in1k) |81.446|18.554 |95.704|4.296 |11.07 |256 | +* florence2 weight loading in DaViT model + +### June 12, 2024 +* MobileNetV4 models and initial set of `timm` trained weights added: + +| model |top1 |top1_err|top5 |top5_err|param_count|img_size| +|--------------------------------------------------------------------------------------------------|------|--------|------|--------|-----------|--------| +| [mobilenetv4_hybrid_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.e600_r384_in1k) |84.266|15.734 |96.936 |3.064 |37.76 |448 | +| [mobilenetv4_hybrid_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_hybrid_large.e600_r384_in1k) |83.800|16.200 |96.770 |3.230 |37.76 |384 | +| [mobilenetv4_conv_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_large.e600_r384_in1k) |83.392|16.608 |96.622 |3.378 |32.59 |448 | +| [mobilenetv4_conv_large.e600_r384_in1k](http://hf.co/timm/mobilenetv4_conv_large.e600_r384_in1k) |82.952|17.048 |96.266 |3.734 |32.59 |384 | +| [mobilenetv4_conv_large.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_large.e500_r256_in1k) |82.674|17.326 |96.31 |3.69 |32.59 |320 | +| [mobilenetv4_conv_large.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_large.e500_r256_in1k) |81.862|18.138 |95.69 |4.31 |32.59 |256 | +| [mobilenetv4_hybrid_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e500_r224_in1k) |81.276|18.724 |95.742|4.258 |11.07 |256 | +| [mobilenetv4_conv_medium.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r256_in1k) |80.858|19.142 |95.768|4.232 |9.72 |320 | +| [mobilenetv4_hybrid_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_hybrid_medium.e500_r224_in1k) |80.442|19.558 |95.38 |4.62 |11.07 |224 | +| [mobilenetv4_conv_blur_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_blur_medium.e500_r224_in1k) |80.142|19.858 |95.298|4.702 |9.72 |256 | +| [mobilenetv4_conv_medium.e500_r256_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r256_in1k) |79.928|20.072 |95.184|4.816 |9.72 |256 | +| [mobilenetv4_conv_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r224_in1k) |79.808|20.192 |95.186|4.814 |9.72 |256 | +| [mobilenetv4_conv_blur_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_blur_medium.e500_r224_in1k) |79.438|20.562 |94.932|5.068 |9.72 |224 | +| [mobilenetv4_conv_medium.e500_r224_in1k](http://hf.co/timm/mobilenetv4_conv_medium.e500_r224_in1k) |79.094|20.906 |94.77 |5.23 |9.72 |224 | +| [mobilenetv4_conv_small.e2400_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e2400_r224_in1k) |74.616|25.384 |92.072|7.928 |3.77 |256 | +| [mobilenetv4_conv_small.e1200_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e1200_r224_in1k) |74.292|25.708 |92.116|7.884 |3.77 |256 | +| [mobilenetv4_conv_small.e2400_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e2400_r224_in1k) |73.756|26.244 |91.422|8.578 |3.77 |224 | +| [mobilenetv4_conv_small.e1200_r224_in1k](http://hf.co/timm/mobilenetv4_conv_small.e1200_r224_in1k) |73.454|26.546 |91.34 |8.66 |3.77 |224 | + +* Apple MobileCLIP (https://arxiv.org/pdf/2311.17049, FastViT and ViT-B) image tower model support & weights added (part of OpenCLIP support). +* ViTamin (https://arxiv.org/abs/2404.02132) CLIP image tower model & weights added (part of OpenCLIP support). +* OpenAI CLIP Modified ResNet image tower modelling & weight support (via ByobNet). Refactor AttentionPool2d. + +### May 14, 2024 +* Support loading PaliGemma jax weights into SigLIP ViT models with average pooling. +* Add Hiera models from Meta (https://github.com/facebookresearch/hiera). +* Add `normalize=` flag for transorms, return non-normalized torch.Tensor with original dytpe (for `chug`) +* Version 1.0.3 release + +### May 11, 2024 +* `Searching for Better ViT Baselines (For the GPU Poor)` weights and vit variants released. Exploring model shapes between Tiny and Base. + +| model | top1 | top5 | param_count | img_size | +| -------------------------------------------------- | ------ | ------ | ----------- | -------- | +| [vit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb_in12k_ft_in1k) | 86.202 | 97.874 | 64.11 | 256 | +| [vit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in12k_ft_in1k) | 85.418 | 97.48 | 60.4 | 256 | +| [vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_mediumd_patch16_rope_reg1_gap_256.sbb_in1k) | 84.322 | 96.812 | 63.95 | 256 | +| [vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_rope_reg4_gap_256.sbb_in1k) | 83.906 | 96.684 | 60.23 | 256 | +| [vit_base_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_base_patch16_rope_reg1_gap_256.sbb_in1k) | 83.866 | 96.67 | 86.43 | 256 | +| [vit_medium_patch16_rope_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_rope_reg1_gap_256.sbb_in1k) | 83.81 | 96.824 | 38.74 | 256 | +| [vit_betwixt_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in1k) | 83.706 | 96.616 | 60.4 | 256 | +| [vit_betwixt_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_betwixt_patch16_reg1_gap_256.sbb_in1k) | 83.628 | 96.544 | 60.4 | 256 | +| [vit_medium_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_reg4_gap_256.sbb_in1k) | 83.47 | 96.622 | 38.88 | 256 | +| [vit_medium_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_medium_patch16_reg1_gap_256.sbb_in1k) | 83.462 | 96.548 | 38.88 | 256 | +| [vit_little_patch16_reg4_gap_256.sbb_in1k](https://huggingface.co/timm/vit_little_patch16_reg4_gap_256.sbb_in1k) | 82.514 | 96.262 | 22.52 | 256 | +| [vit_wee_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_wee_patch16_reg1_gap_256.sbb_in1k) | 80.256 | 95.360 | 13.42 | 256 | +| [vit_pwee_patch16_reg1_gap_256.sbb_in1k](https://huggingface.co/timm/vit_pwee_patch16_reg1_gap_256.sbb_in1k) | 80.072 | 95.136 | 15.25 | 256 | +| [vit_mediumd_patch16_reg4_gap_256.sbb_in12k](https://huggingface.co/timm/vit_mediumd_patch16_reg4_gap_256.sbb_in12k) | N/A | N/A | 64.11 | 256 | +| [vit_betwixt_patch16_reg4_gap_256.sbb_in12k](https://huggingface.co/timm/vit_betwixt_patch16_reg4_gap_256.sbb_in12k) | N/A | N/A | 60.4 | 256 | + +* AttentionExtract helper added to extract attention maps from `timm` models. See example in https://github.com/huggingface/pytorch-image-models/discussions/1232#discussioncomment-9320949 +* `forward_intermediates()` API refined and added to more models including some ConvNets that have other extraction methods. +* 1017 of 1047 model architectures support `features_only=True` feature extraction. Remaining 34 architectures can be supported but based on priority requests. +* Remove torch.jit.script annotated functions including old JIT activations. Conflict with dynamo and dynamo does a much better job when used. + +### April 11, 2024 +* Prepping for a long overdue 1.0 release, things have been stable for a while now. +* Significant feature that's been missing for a while, `features_only=True` support for ViT models with flat hidden states or non-std module layouts (so far covering `'vit_*', 'twins_*', 'deit*', 'beit*', 'mvitv2*', 'eva*', 'samvit_*', 'flexivit*'`) +* Above feature support achieved through a new `forward_intermediates()` API that can be used with a feature wrapping module or direclty. +```python +model = timm.create_model('vit_base_patch16_224') +final_feat, intermediates = model.forward_intermediates(input) +output = model.forward_head(final_feat) # pooling + classifier head + +print(final_feat.shape) +torch.Size([2, 197, 768]) + +for f in intermediates: + print(f.shape) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) +torch.Size([2, 768, 14, 14]) + +print(output.shape) +torch.Size([2, 1000]) +``` + +```python +model = timm.create_model('eva02_base_patch16_clip_224', pretrained=True, img_size=512, features_only=True, out_indices=(-3, -2,)) +output = model(torch.randn(2, 3, 512, 512)) + +for o in output: + print(o.shape) +torch.Size([2, 768, 32, 32]) +torch.Size([2, 768, 32, 32]) +``` +* TinyCLIP vision tower weights added, thx [Thien Tran](https://github.com/gau-nernst) + +### Feb 19, 2024 +* Next-ViT models added. Adapted from https://github.com/bytedance/Next-ViT +* HGNet and PP-HGNetV2 models added. Adapted from https://github.com/PaddlePaddle/PaddleClas by [SeeFun](https://github.com/seefun) +* Removed setup.py, moved to pyproject.toml based build supported by PDM +* Add updated model EMA impl using _for_each for less overhead +* Support device args in train script for non GPU devices +* Other misc fixes and small additions +* Min supported Python version increased to 3.8 +* Release 0.9.16 + +### Jan 8, 2024 +Datasets & transform refactoring +* HuggingFace streaming (iterable) dataset support (`--dataset hfids:org/dataset`) +* Webdataset wrapper tweaks for improved split info fetching, can auto fetch splits from supported HF hub webdataset +* Tested HF `datasets` and webdataset wrapper streaming from HF hub with recent `timm` ImageNet uploads to https://huggingface.co/timm +* Make input & target column/field keys consistent across datasets and pass via args +* Full monochrome support when using e:g: `--input-size 1 224 224` or `--in-chans 1`, sets PIL image conversion appropriately in dataset +* Improved several alternate crop & resize transforms (ResizeKeepRatio, RandomCropOrPad, etc) for use in PixParse document AI project +* Add SimCLR style color jitter prob along with grayscale and gaussian blur options to augmentations and args +* Allow train without validation set (`--val-split ''`) in train script +* Add `--bce-sum` (sum over class dim) and `--bce-pos-weight` (positive weighting) args for training as they're common BCE loss tweaks I was often hard coding + +### Nov 23, 2023 +* Added EfficientViT-Large models, thanks [SeeFun](https://github.com/seefun) +* Fix Python 3.7 compat, will be dropping support for it soon +* Other misc fixes +* Release 0.9.12 + +### Nov 20, 2023 +* Added significant flexibility for Hugging Face Hub based timm models via `model_args` config entry. `model_args` will be passed as kwargs through to models on creation. + * See example at https://huggingface.co/gaunernst/vit_base_patch16_1024_128.audiomae_as2m_ft_as20k/blob/main/config.json + * Usage: https://github.com/huggingface/pytorch-image-models/discussions/2035 +* Updated imagenet eval and test set csv files with latest models +* `vision_transformer.py` typing and doc cleanup by [Laureηt](https://github.com/Laurent2916) +* 0.9.11 release + +### Nov 3, 2023 +* [DFN (Data Filtering Networks)](https://huggingface.co/papers/2309.17425) and [MetaCLIP](https://huggingface.co/papers/2309.16671) ViT weights added +* DINOv2 'register' ViT model weights added (https://huggingface.co/papers/2309.16588, https://huggingface.co/papers/2304.07193) +* Add `quickgelu` ViT variants for OpenAI, DFN, MetaCLIP weights that use it (less efficient) +* Improved typing added to ResNet, MobileNet-v3 thanks to [Aryan](https://github.com/a-r-r-o-w) +* ImageNet-12k fine-tuned (from LAION-2B CLIP) `convnext_xxlarge` +* 0.9.9 release + +### Oct 20, 2023 +* [SigLIP](https://huggingface.co/papers/2303.15343) image tower weights supported in `vision_transformer.py`. + * Great potential for fine-tune and downstream feature use. +* Experimental 'register' support in vit models as per [Vision Transformers Need Registers](https://huggingface.co/papers/2309.16588) +* Updated RepViT with new weight release. Thanks [wangao](https://github.com/jameslahm) +* Add patch resizing support (on pretrained weight load) to Swin models +* 0.9.8 release pending + +### Sep 1, 2023 +* TinyViT added by [SeeFun](https://github.com/seefun) +* Fix EfficientViT (MIT) to use torch.autocast so it works back to PT 1.10 +* 0.9.7 release + +## Introduction + +Py**T**orch **Im**age **M**odels (`timm`) is a collection of image models, layers, utilities, optimizers, schedulers, data-loaders / augmentations, and reference training / validation scripts that aim to pull together a wide variety of SOTA models with ability to reproduce ImageNet training results. + +The work of many others is present here. I've tried to make sure all source material is acknowledged via links to github, arxiv papers, etc in the README, documentation, and code docstrings. Please let me know if I missed anything. + +## Features + +### Models + +All model architecture families include variants with pretrained weights. There are specific model variants without any weights, it is NOT a bug. Help training new or better weights is always appreciated. + +* Aggregating Nested Transformers - https://arxiv.org/abs/2105.12723 +* BEiT - https://arxiv.org/abs/2106.08254 +* Big Transfer ResNetV2 (BiT) - https://arxiv.org/abs/1912.11370 +* Bottleneck Transformers - https://arxiv.org/abs/2101.11605 +* CaiT (Class-Attention in Image Transformers) - https://arxiv.org/abs/2103.17239 +* CoaT (Co-Scale Conv-Attentional Image Transformers) - https://arxiv.org/abs/2104.06399 +* CoAtNet (Convolution and Attention) - https://arxiv.org/abs/2106.04803 +* ConvNeXt - https://arxiv.org/abs/2201.03545 +* ConvNeXt-V2 - http://arxiv.org/abs/2301.00808 +* ConViT (Soft Convolutional Inductive Biases Vision Transformers)- https://arxiv.org/abs/2103.10697 +* CspNet (Cross-Stage Partial Networks) - https://arxiv.org/abs/1911.11929 +* DeiT - https://arxiv.org/abs/2012.12877 +* DeiT-III - https://arxiv.org/pdf/2204.07118.pdf +* DenseNet - https://arxiv.org/abs/1608.06993 +* DLA - https://arxiv.org/abs/1707.06484 +* DPN (Dual-Path Network) - https://arxiv.org/abs/1707.01629 +* EdgeNeXt - https://arxiv.org/abs/2206.10589 +* EfficientFormer - https://arxiv.org/abs/2206.01191 +* EfficientNet (MBConvNet Family) + * EfficientNet NoisyStudent (B0-B7, L2) - https://arxiv.org/abs/1911.04252 + * EfficientNet AdvProp (B0-B8) - https://arxiv.org/abs/1911.09665 + * EfficientNet (B0-B7) - https://arxiv.org/abs/1905.11946 + * EfficientNet-EdgeTPU (S, M, L) - https://ai.googleblog.com/2019/08/efficientnet-edgetpu-creating.html + * EfficientNet V2 - https://arxiv.org/abs/2104.00298 + * FBNet-C - https://arxiv.org/abs/1812.03443 + * MixNet - https://arxiv.org/abs/1907.09595 + * MNASNet B1, A1 (Squeeze-Excite), and Small - https://arxiv.org/abs/1807.11626 + * MobileNet-V2 - https://arxiv.org/abs/1801.04381 + * Single-Path NAS - https://arxiv.org/abs/1904.02877 + * TinyNet - https://arxiv.org/abs/2010.14819 +* EfficientViT (MIT) - https://arxiv.org/abs/2205.14756 +* EfficientViT (MSRA) - https://arxiv.org/abs/2305.07027 +* EVA - https://arxiv.org/abs/2211.07636 +* EVA-02 - https://arxiv.org/abs/2303.11331 +* FastViT - https://arxiv.org/abs/2303.14189 +* FlexiViT - https://arxiv.org/abs/2212.08013 +* FocalNet (Focal Modulation Networks) - https://arxiv.org/abs/2203.11926 +* GCViT (Global Context Vision Transformer) - https://arxiv.org/abs/2206.09959 +* GhostNet - https://arxiv.org/abs/1911.11907 +* GhostNet-V2 - https://arxiv.org/abs/2211.12905 +* gMLP - https://arxiv.org/abs/2105.08050 +* GPU-Efficient Networks - https://arxiv.org/abs/2006.14090 +* Halo Nets - https://arxiv.org/abs/2103.12731 +* HGNet / HGNet-V2 - TBD +* HRNet - https://arxiv.org/abs/1908.07919 +* InceptionNeXt - https://arxiv.org/abs/2303.16900 +* Inception-V3 - https://arxiv.org/abs/1512.00567 +* Inception-ResNet-V2 and Inception-V4 - https://arxiv.org/abs/1602.07261 +* Lambda Networks - https://arxiv.org/abs/2102.08602 +* LeViT (Vision Transformer in ConvNet's Clothing) - https://arxiv.org/abs/2104.01136 +* MaxViT (Multi-Axis Vision Transformer) - https://arxiv.org/abs/2204.01697 +* MetaFormer (PoolFormer-v2, ConvFormer, CAFormer) - https://arxiv.org/abs/2210.13452 +* MLP-Mixer - https://arxiv.org/abs/2105.01601 +* MobileCLIP - https://arxiv.org/abs/2311.17049 +* MobileNet-V3 (MBConvNet w/ Efficient Head) - https://arxiv.org/abs/1905.02244 + * FBNet-V3 - https://arxiv.org/abs/2006.02049 + * HardCoRe-NAS - https://arxiv.org/abs/2102.11646 + * LCNet - https://arxiv.org/abs/2109.15099 +* MobileNetV4 - https://arxiv.org/abs/2404.10518 +* MobileOne - https://arxiv.org/abs/2206.04040 +* MobileViT - https://arxiv.org/abs/2110.02178 +* MobileViT-V2 - https://arxiv.org/abs/2206.02680 +* MViT-V2 (Improved Multiscale Vision Transformer) - https://arxiv.org/abs/2112.01526 +* NASNet-A - https://arxiv.org/abs/1707.07012 +* NesT - https://arxiv.org/abs/2105.12723 +* Next-ViT - https://arxiv.org/abs/2207.05501 +* NFNet-F - https://arxiv.org/abs/2102.06171 +* NF-RegNet / NF-ResNet - https://arxiv.org/abs/2101.08692 +* PNasNet - https://arxiv.org/abs/1712.00559 +* PoolFormer (MetaFormer) - https://arxiv.org/abs/2111.11418 +* Pooling-based Vision Transformer (PiT) - https://arxiv.org/abs/2103.16302 +* PVT-V2 (Improved Pyramid Vision Transformer) - https://arxiv.org/abs/2106.13797 +* RDNet (DenseNets Reloaded) - https://arxiv.org/abs/2403.19588 +* RegNet - https://arxiv.org/abs/2003.13678 +* RegNetZ - https://arxiv.org/abs/2103.06877 +* RepVGG - https://arxiv.org/abs/2101.03697 +* RepGhostNet - https://arxiv.org/abs/2211.06088 +* RepViT - https://arxiv.org/abs/2307.09283 +* ResMLP - https://arxiv.org/abs/2105.03404 +* ResNet/ResNeXt + * ResNet (v1b/v1.5) - https://arxiv.org/abs/1512.03385 + * ResNeXt - https://arxiv.org/abs/1611.05431 + * 'Bag of Tricks' / Gluon C, D, E, S variations - https://arxiv.org/abs/1812.01187 + * Weakly-supervised (WSL) Instagram pretrained / ImageNet tuned ResNeXt101 - https://arxiv.org/abs/1805.00932 + * Semi-supervised (SSL) / Semi-weakly Supervised (SWSL) ResNet/ResNeXts - https://arxiv.org/abs/1905.00546 + * ECA-Net (ECAResNet) - https://arxiv.org/abs/1910.03151v4 + * Squeeze-and-Excitation Networks (SEResNet) - https://arxiv.org/abs/1709.01507 + * ResNet-RS - https://arxiv.org/abs/2103.07579 +* Res2Net - https://arxiv.org/abs/1904.01169 +* ResNeSt - https://arxiv.org/abs/2004.08955 +* ReXNet - https://arxiv.org/abs/2007.00992 +* SelecSLS - https://arxiv.org/abs/1907.00837 +* Selective Kernel Networks - https://arxiv.org/abs/1903.06586 +* Sequencer2D - https://arxiv.org/abs/2205.01972 +* Swin S3 (AutoFormerV2) - https://arxiv.org/abs/2111.14725 +* Swin Transformer - https://arxiv.org/abs/2103.14030 +* Swin Transformer V2 - https://arxiv.org/abs/2111.09883 +* Transformer-iN-Transformer (TNT) - https://arxiv.org/abs/2103.00112 +* TResNet - https://arxiv.org/abs/2003.13630 +* Twins (Spatial Attention in Vision Transformers) - https://arxiv.org/pdf/2104.13840.pdf +* Visformer - https://arxiv.org/abs/2104.12533 +* Vision Transformer - https://arxiv.org/abs/2010.11929 +* ViTamin - https://arxiv.org/abs/2404.02132 +* VOLO (Vision Outlooker) - https://arxiv.org/abs/2106.13112 +* VovNet V2 and V1 - https://arxiv.org/abs/1911.06667 +* Xception - https://arxiv.org/abs/1610.02357 +* Xception (Modified Aligned, Gluon) - https://arxiv.org/abs/1802.02611 +* Xception (Modified Aligned, TF) - https://arxiv.org/abs/1802.02611 +* XCiT (Cross-Covariance Image Transformers) - https://arxiv.org/abs/2106.09681 + +### Optimizers + +Included optimizers available via `create_optimizer` / `create_optimizer_v2` factory methods: +* `adabelief` an implementation of AdaBelief adapted from https://github.com/juntang-zhuang/Adabelief-Optimizer - https://arxiv.org/abs/2010.07468 +* `adafactor` adapted from [FAIRSeq impl](https://github.com/pytorch/fairseq/blob/master/fairseq/optim/adafactor.py) - https://arxiv.org/abs/1804.04235 +* `adahessian` by [David Samuel](https://github.com/davda54/ada-hessian) - https://arxiv.org/abs/2006.00719 +* `adamp` and `sgdp` by [Naver ClovAI](https://github.com/clovaai) - https://arxiv.org/abs/2006.08217 +* `adan` an implementation of Adan adapted from https://github.com/sail-sg/Adan - https://arxiv.org/abs/2208.06677 +* `lamb` an implementation of Lamb and LambC (w/ trust-clipping) cleaned up and modified to support use with XLA - https://arxiv.org/abs/1904.00962 +* `lars` an implementation of LARS and LARC (w/ trust-clipping) - https://arxiv.org/abs/1708.03888 +* `lion` and implementation of Lion adapted from https://github.com/google/automl/tree/master/lion - https://arxiv.org/abs/2302.06675 +* `lookahead` adapted from impl by [Liam](https://github.com/alphadl/lookahead.pytorch) - https://arxiv.org/abs/1907.08610 +* `madgrad` - and implementation of MADGRAD adapted from https://github.com/facebookresearch/madgrad - https://arxiv.org/abs/2101.11075 +* `nadam` an implementation of Adam w/ Nesterov momentum +* `nadamw` an impementation of AdamW (Adam w/ decoupled weight-decay) w/ Nesterov momentum. A simplified impl based on https://github.com/mlcommons/algorithmic-efficiency +* `novograd` by [Masashi Kimura](https://github.com/convergence-lab/novograd) - https://arxiv.org/abs/1905.11286 +* `radam` by [Liyuan Liu](https://github.com/LiyuanLucasLiu/RAdam) - https://arxiv.org/abs/1908.03265 +* `rmsprop_tf` adapted from PyTorch RMSProp by myself. Reproduces much improved Tensorflow RMSProp behaviour +* `sgdw` and implementation of SGD w/ decoupled weight-decay +* `fused` optimizers by name with [NVIDIA Apex](https://github.com/NVIDIA/apex/tree/master/apex/optimizers) installed +* `bits` optimizers by name with [BitsAndBytes](https://github.com/TimDettmers/bitsandbytes) installed + +### Augmentations +* Random Erasing from [Zhun Zhong](https://github.com/zhunzhong07/Random-Erasing/blob/master/transforms.py) - https://arxiv.org/abs/1708.04896) +* Mixup - https://arxiv.org/abs/1710.09412 +* CutMix - https://arxiv.org/abs/1905.04899 +* AutoAugment (https://arxiv.org/abs/1805.09501) and RandAugment (https://arxiv.org/abs/1909.13719) ImageNet configurations modeled after impl for EfficientNet training (https://github.com/tensorflow/tpu/blob/master/models/official/efficientnet/autoaugment.py) +* AugMix w/ JSD loss, JSD w/ clean + augmented mixing support works with AutoAugment and RandAugment as well - https://arxiv.org/abs/1912.02781 +* SplitBachNorm - allows splitting batch norm layers between clean and augmented (auxiliary batch norm) data + +### Regularization +* DropPath aka "Stochastic Depth" - https://arxiv.org/abs/1603.09382 +* DropBlock - https://arxiv.org/abs/1810.12890 +* Blur Pooling - https://arxiv.org/abs/1904.11486 + +### Other + +Several (less common) features that I often utilize in my projects are included. Many of their additions are the reason why I maintain my own set of models, instead of using others' via PIP: + +* All models have a common default configuration interface and API for + * accessing/changing the classifier - `get_classifier` and `reset_classifier` + * doing a forward pass on just the features - `forward_features` (see [documentation](https://huggingface.co/docs/timm/feature_extraction)) + * these makes it easy to write consistent network wrappers that work with any of the models +* All models support multi-scale feature map extraction (feature pyramids) via create_model (see [documentation](https://huggingface.co/docs/timm/feature_extraction)) + * `create_model(name, features_only=True, out_indices=..., output_stride=...)` + * `out_indices` creation arg specifies which feature maps to return, these indices are 0 based and generally correspond to the `C(i + 1)` feature level. + * `output_stride` creation arg controls output stride of the network by using dilated convolutions. Most networks are stride 32 by default. Not all networks support this. + * feature map channel counts, reduction level (stride) can be queried AFTER model creation via the `.feature_info` member +* All models have a consistent pretrained weight loader that adapts last linear if necessary, and from 3 to 1 channel input if desired +* High performance [reference training, validation, and inference scripts](https://huggingface.co/docs/timm/training_script) that work in several process/GPU modes: + * NVIDIA DDP w/ a single GPU per process, multiple processes with APEX present (AMP mixed-precision optional) + * PyTorch DistributedDataParallel w/ multi-gpu, single process (AMP disabled as it crashes when enabled) + * PyTorch w/ single GPU single process (AMP optional) +* A dynamic global pool implementation that allows selecting from average pooling, max pooling, average + max, or concat([average, max]) at model creation. All global pooling is adaptive average by default and compatible with pretrained weights. +* A 'Test Time Pool' wrapper that can wrap any of the included models and usually provides improved performance doing inference with input images larger than the training size. Idea adapted from original DPN implementation when I ported (https://github.com/cypw/DPNs) +* Learning rate schedulers + * Ideas adopted from + * [AllenNLP schedulers](https://github.com/allenai/allennlp/tree/master/allennlp/training/learning_rate_schedulers) + * [FAIRseq lr_scheduler](https://github.com/pytorch/fairseq/tree/master/fairseq/optim/lr_scheduler) + * SGDR: Stochastic Gradient Descent with Warm Restarts (https://arxiv.org/abs/1608.03983) + * Schedulers include `step`, `cosine` w/ restarts, `tanh` w/ restarts, `plateau` +* Space-to-Depth by [mrT23](https://github.com/mrT23/TResNet/blob/master/src/models/tresnet/layers/space_to_depth.py) (https://arxiv.org/abs/1801.04590) -- original paper? +* Adaptive Gradient Clipping (https://arxiv.org/abs/2102.06171, https://github.com/deepmind/deepmind-research/tree/master/nfnets) +* An extensive selection of channel and/or spatial attention modules: + * Bottleneck Transformer - https://arxiv.org/abs/2101.11605 + * CBAM - https://arxiv.org/abs/1807.06521 + * Effective Squeeze-Excitation (ESE) - https://arxiv.org/abs/1911.06667 + * Efficient Channel Attention (ECA) - https://arxiv.org/abs/1910.03151 + * Gather-Excite (GE) - https://arxiv.org/abs/1810.12348 + * Global Context (GC) - https://arxiv.org/abs/1904.11492 + * Halo - https://arxiv.org/abs/2103.12731 + * Involution - https://arxiv.org/abs/2103.06255 + * Lambda Layer - https://arxiv.org/abs/2102.08602 + * Non-Local (NL) - https://arxiv.org/abs/1711.07971 + * Squeeze-and-Excitation (SE) - https://arxiv.org/abs/1709.01507 + * Selective Kernel (SK) - (https://arxiv.org/abs/1903.06586 + * Split (SPLAT) - https://arxiv.org/abs/2004.08955 + * Shifted Window (SWIN) - https://arxiv.org/abs/2103.14030 + +## Results + +Model validation results can be found in the [results tables](results/README.md) + +## Getting Started (Documentation) + +The official documentation can be found at https://huggingface.co/docs/hub/timm. Documentation contributions are welcome. + +[Getting Started with PyTorch Image Models (timm): A Practitioner’s Guide](https://towardsdatascience.com/getting-started-with-pytorch-image-models-timm-a-practitioners-guide-4e77b4bf9055) by [Chris Hughes](https://github.com/Chris-hughes10) is an extensive blog post covering many aspects of `timm` in detail. + +[timmdocs](http://timm.fast.ai/) is an alternate set of documentation for `timm`. A big thanks to [Aman Arora](https://github.com/amaarora) for his efforts creating timmdocs. + +[paperswithcode](https://paperswithcode.com/lib/timm) is a good resource for browsing the models within `timm`. + +## Train, Validation, Inference Scripts + +The root folder of the repository contains reference train, validation, and inference scripts that work with the included models and other features of this repository. They are adaptable for other datasets and use cases with a little hacking. See [documentation](https://huggingface.co/docs/timm/training_script). + +## Awesome PyTorch Resources + +One of the greatest assets of PyTorch is the community and their contributions. A few of my favourite resources that pair well with the models and components here are listed below. + +### Object Detection, Instance and Semantic Segmentation +* Detectron2 - https://github.com/facebookresearch/detectron2 +* Segmentation Models (Semantic) - https://github.com/qubvel/segmentation_models.pytorch +* EfficientDet (Obj Det, Semantic soon) - https://github.com/rwightman/efficientdet-pytorch + +### Computer Vision / Image Augmentation +* Albumentations - https://github.com/albumentations-team/albumentations +* Kornia - https://github.com/kornia/kornia + +### Knowledge Distillation +* RepDistiller - https://github.com/HobbitLong/RepDistiller +* torchdistill - https://github.com/yoshitomo-matsubara/torchdistill + +### Metric Learning +* PyTorch Metric Learning - https://github.com/KevinMusgrave/pytorch-metric-learning + +### Training / Frameworks +* fastai - https://github.com/fastai/fastai + +## Licenses + +### Code +The code here is licensed Apache 2.0. I've taken care to make sure any third party code included or adapted has compatible (permissive) licenses such as MIT, BSD, etc. I've made an effort to avoid any GPL / LGPL conflicts. That said, it is your responsibility to ensure you comply with licenses here and conditions of any dependent licenses. Where applicable, I've linked the sources/references for various components in docstrings. If you think I've missed anything please create an issue. + +### Pretrained Weights +So far all of the pretrained weights available here are pretrained on ImageNet with a select few that have some additional pretraining (see extra note below). ImageNet was released for non-commercial research purposes only (https://image-net.org/download). It's not clear what the implications of that are for the use of pretrained weights from that dataset. Any models I have trained with ImageNet are done for research purposes and one should assume that the original dataset license applies to the weights. It's best to seek legal advice if you intend to use the pretrained weights in a commercial product. + +#### Pretrained on more than ImageNet +Several weights included or references here were pretrained with proprietary datasets that I do not have access to. These include the Facebook WSL, SSL, SWSL ResNe(Xt) and the Google Noisy Student EfficientNet models. The Facebook models have an explicit non-commercial license (CC-BY-NC 4.0, https://github.com/facebookresearch/semi-supervised-ImageNet1K-models, https://github.com/facebookresearch/WSL-Images). The Google models do not appear to have any restriction beyond the Apache 2.0 license (and ImageNet concerns). In either case, you should contact Facebook or Google with any questions. + +## Citing + +### BibTeX + +```bibtex +@misc{rw2019timm, + author = {Ross Wightman}, + title = {PyTorch Image Models}, + year = {2019}, + publisher = {GitHub}, + journal = {GitHub repository}, + doi = {10.5281/zenodo.4414861}, + howpublished = {\url{https://github.com/rwightman/pytorch-image-models}} +} +``` + +### Latest DOI + +[![DOI](https://zenodo.org/badge/168799526.svg)](https://zenodo.org/badge/latestdoi/168799526) diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/RECORD b/lib/python3.10/site-packages/timm-1.0.11.dist-info/RECORD new file mode 100644 index 0000000000000000000000000000000000000000..66d1d4bbde7424129336ccbe914ef51fc3eb3c29 --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/RECORD @@ -0,0 +1,278 @@ +timm-1.0.11.dist-info/INSTALLER,sha256=5hhM4Q4mYTT9z6QB6PGpUAW81PGNFrYrdXMj4oM_6ak,2 +timm-1.0.11.dist-info/METADATA,sha256=0Ckg62AAHiBWvmUFlAQEJ288czj69wgHp16mGeX0ABQ,48363 +timm-1.0.11.dist-info/RECORD,, +timm-1.0.11.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +timm-1.0.11.dist-info/WHEEL,sha256=pM0IBB6ZwH3nkEPhtcp50KvKNX-07jYtnb1g1m6Z4Co,90 +timm-1.0.11.dist-info/entry_points.txt,sha256=6OYgBcLyFCUgeqLgnvMyOJxPCWzgy7se4rLPKtNonMs,34 +timm-1.0.11.dist-info/licenses/LICENSE,sha256=cbERYg-jLBeoDM1tstp1nTGlkeSX2LXzghdPWdG1nUk,11343 +timm/__init__.py,sha256=0bkaVTGjSB6BZhqG8Fw6PFcmGXI47BdNt4cg-eA_QdA,292 +timm/data/__init__.py,sha256=z6Rudp7e489CWoOw3TXBMG0A1mzXSgGBWz5MptnPAW0,819 +timm/data/_info/imagenet12k_synsets.txt,sha256=9kg-efGKm2cNQ9kV3bpAu8r4LPxOkqB71EBXudRJNfM,118210 +timm/data/_info/imagenet21k_goog_synsets.txt,sha256=ZjYr3t822TM4LtylST_FYtzBcSjONkA8nnMKdfSMsvI,218430 +timm/data/_info/imagenet21k_goog_to_12k_indices.txt,sha256=a1iOnVXH_tZ6rILpG-6r5-rOLeHtZxFQEMoHYXnoxQg,64070 +timm/data/_info/imagenet21k_goog_to_22k_indices.txt,sha256=hqdngDZvgHqfgEYX6iRMob9YF0FNAzDPEbop4oXBzFc,119937 +timm/data/_info/imagenet21k_miil_synsets.txt,sha256=zXbZd9qqLxOkqR_KD1NxPy8pZ1MI1M4PDuDlsHoxZMY,112210 +timm/data/_info/imagenet21k_miil_w21_synsets.txt,sha256=9J3ePWmUa4Y1WGQy8081mF-AfRaChs5f3nRqrDp9lMg,104500 +timm/data/_info/imagenet22k_ms_synsets.txt,sha256=8sesR1AOHR1wQy08suKyGHxogBJUOYMr9LQDj3Fsong,218420 +timm/data/_info/imagenet22k_ms_to_12k_indices.txt,sha256=wMkQkjtw2JKm-KzN6bDKvaqN1nuMD8_QuC1IFVei1js,63625 +timm/data/_info/imagenet22k_ms_to_22k_indices.txt,sha256=Sp_dB0M-ncOF_j_rAhWGMI-JSj3LhfSIKyXtIEz2MyE,119938 +timm/data/_info/imagenet22k_synsets.txt,sha256=CuJFOHUZc3l7KdjexBKyvcYbxOw-zQgeugc1gEI2u9I,218410 +timm/data/_info/imagenet22k_to_12k_indices.txt,sha256=0_-qtIBkurHqfOFibHL1oBHuRQPKxhK5DuHaDRR8joc,64070 +timm/data/_info/imagenet_a_indices.txt,sha256=6BIjo1rdbnj5r-byFWUsGSbr7-HY78JsbpI9eskUOTc,774 +timm/data/_info/imagenet_a_synsets.txt,sha256=mCaiQWbnTOYvuHsniJh0yiaRdULHRXEjCdbiWFXWO8I,2000 +timm/data/_info/imagenet_r_indices.txt,sha256=keCdL_CgAhBLwNN6iSp-df_CzA20eytQYEejSTuRFtI,769 +timm/data/_info/imagenet_r_synsets.txt,sha256=pqByn3qZIwKAY5yrblHgFIWjtBpnbqNWIaUBydboPKQ,2000 +timm/data/_info/imagenet_real_labels.json,sha256=2D6b_zdMYxquhDnrBkxwGazFbhs7w_Vrg4DCpxCwIgs,388478 +timm/data/_info/imagenet_synset_to_definition.txt,sha256=GR4ifUj8jHt-LSo8rEH8iy61F9nSgZ0O28kcV2clDu0,1748917 +timm/data/_info/imagenet_synset_to_lemma.txt,sha256=G4ur2hh0IaS94MnFoZfDb2vdqWL3yhH_soE4BsuyF48,741457 +timm/data/_info/imagenet_synsets.txt,sha256=cAArD_XeYKOheoLb_P8pGTH5YiXd-UGtLhgvw54YPRU,10000 +timm/data/auto_augment.py,sha256=E1JZWZLV6GZowIkeAFU0ZuaF7MhHkoaSbKVQogLDinA,35550 +timm/data/config.py,sha256=uf2p-mNW_wntCe34K_Xr3J45cJh8IWEl_cMwgyKGR-g,4616 +timm/data/constants.py,sha256=ZkSr7QArcsDyOOBRYQpLe4c2VOJKRAYoDRU1JwYJseE,442 +timm/data/dataset.py,sha256=7j-H1KY0HtZTjzjREe5xPbedBdC8rR5AX6qwEIV6oSs,6259 +timm/data/dataset_factory.py,sha256=DakZNEZlCYeKnizm-hXzGtEUEHxg1XypgRCEXS4jLNc,8180 +timm/data/dataset_info.py,sha256=fAjTkbi-3oEZveKg-n1cxOYM_gcD5kcUeWMWWRRkhOQ,2391 +timm/data/distributed_sampler.py,sha256=mfcS_bzL_zEIREpRer8LAdXw1WvUFYJrXgqKw2fqlHE,5540 +timm/data/imagenet_info.py,sha256=c_DKVQQyzHYftgoUO-12BuqhsBJI_h6Scyg6Gzue-fY,4167 +timm/data/loader.py,sha256=H3QYKjfpK1IoxRZWEPP1eOuzKr2BlZd0qlYXPi8g7io,15511 +timm/data/mixup.py,sha256=-U4kqPGFOZCz_u2rXVXLXvLkdRX1rhIjHmqJfVjzZgs,14634 +timm/data/random_erasing.py,sha256=jSTJq-1zGZT1zRWv5ByyBuA4lQdj_EFcoibdLjRQaDo,4964 +timm/data/readers/__init__.py,sha256=AuKgQjZ5q9XILIZu6iRDIJy-Xydig_zC1TlaVRmRcCY,72 +timm/data/readers/class_map.py,sha256=dMXs4PuEDqJroaOmlBttTpjMlaeXMiB4H4barDKtzvc,895 +timm/data/readers/img_extensions.py,sha256=KTtkCyfrOhLwH7vws7k7xshkEqekdrEGZ-rmErySvcQ,1482 +timm/data/readers/reader.py,sha256=_Fp97wLI1cxFFI1QlOkvWIKwdmtQaPEgH97KmtQDZoI,487 +timm/data/readers/reader_factory.py,sha256=cYqgl-7Ix-xslL83N8Ae6kzAIKlpdd2qkMYciRBO4PA,1732 +timm/data/readers/reader_hfds.py,sha256=LUqRuSiS1Ji0dF972UV1RfDVGEoSQXMr-P8G_4gbn48,2608 +timm/data/readers/reader_hfids.py,sha256=Ok6dchV4HFPvry8gyELnyxgq_nk6EIKJlHwnjZxCBcE,8282 +timm/data/readers/reader_image_folder.py,sha256=_htQ_ouMibCeeemg0097v7nj14qXlFAtpS9HVQwoqNI,3508 +timm/data/readers/reader_image_in_tar.py,sha256=ok4IDPkpuu_3RKxK83EEo7zfLPnqJq_x7CwDmepAf3E,9182 +timm/data/readers/reader_image_tar.py,sha256=h0PlhmHU_1QzUy4p3psI9JoeaAao1_OBFK1_K6Re9e4,2644 +timm/data/readers/reader_tfds.py,sha256=5daaqhJzYq7hbYBAeuF06lXCO7A0tPipHbJVMFMv-Nk,17909 +timm/data/readers/reader_wds.py,sha256=S91U97D9aKpL1INHA4biFkD5NZiUSa2sLTz7onllDxQ,16947 +timm/data/readers/shared_count.py,sha256=___RvLR-johRZ0Ins6iKVAkKU44LxU3o4H7E0B0b25o,303 +timm/data/real_labels.py,sha256=xdexeoU_KE6c2HY0G81Ny2-T_iTQ_1HOWLj7R_WfAls,1800 +timm/data/tf_preprocessing.py,sha256=xClMeZyUPx8YATBvVnzig0bDJNBa8ZqzcpmVWUwefWg,9169 +timm/data/transforms.py,sha256=IGkJUgw044feTMkHXjsHT4RBLvFp8Tn7sjFzLw7TbI4,20120 +timm/data/transforms_factory.py,sha256=wrIXQ16phs6rqD3NJjPZNJ18_iHrC0itz0nZb5Bve-w,19049 +timm/layers/__init__.py,sha256=wKkQABGAxNfbWS_JjaEvmgL1gnzk3icCqTiTKyumyVU,4121 +timm/layers/activations.py,sha256=5dnM9Fr63FBHTBwmNk7Ps__fYL4aCJfqUbWhFLAYm0I,4882 +timm/layers/activations_me.py,sha256=cPNk1XdvfOunn3Rd9ohNbRh8LtHw5-DPKYp04FMtBZU,5504 +timm/layers/adaptive_avgmax_pool.py,sha256=58cPkSKechsJHJQvcfW_onZRMLntnDNduMitWjmqSMo,6583 +timm/layers/attention2d.py,sha256=nX8K6pe37ZW6E5_R240FSZoGnEV29EpMUGGvU_Oj8h0,12778 +timm/layers/attention_pool.py,sha256=qDSg67oXDnXOeFIjH7tG94Lm2N7VTfB2wsmXrpm1vDQ,3536 +timm/layers/attention_pool2d.py,sha256=gnp7OwzPWGd86zpSiBXuwNhTPoJ1I6DLC3whL-X_Vmc,11551 +timm/layers/blur_pool.py,sha256=KTnd_SS1zOA-7XQ8hk1og9QyRGgxwd4kLu73bEDSC9c,3082 +timm/layers/bottleneck_attn.py,sha256=HLuZbyep1Nf9Qq9Aei81kCzQMs6U1aQBQRLrOnjnkHo,6895 +timm/layers/cbam.py,sha256=b6lo3KFOc88MV4ITw2pSokuvpLkAFpB9lNc1e20QdGI,4426 +timm/layers/classifier.py,sha256=ZNQSi2xX--beMr4VWJeR-hU4eItUHNOvLfUT3-o-4rk,10408 +timm/layers/cond_conv2d.py,sha256=bUfMYl3PRnfdIULJoXlLjh71UT8ZSsb8lQBa_n-q7Vo,5199 +timm/layers/config.py,sha256=_11jlMV9cDw--w1zSKxHeJINOf60N6IDqns0m2gPGts,4175 +timm/layers/conv2d_same.py,sha256=ssxoaAlxauEavfvR792KBfIImVEATlql7tDssv7xyRY,3216 +timm/layers/conv_bn_act.py,sha256=z0M_CBd8X0wi5n62u-w3ckynRJ18QHStqRXTgRrXooI,2873 +timm/layers/create_act.py,sha256=enLaTNA4BTPuKUClQW5ExwHNzTVXW1oFnJQ0YcFftiE,4482 +timm/layers/create_attn.py,sha256=d9-j6iPcFPpC2o7_DW4QLKKP1KWZBhZn6-AHH-iSfh4,3514 +timm/layers/create_conv2d.py,sha256=darxUzkvPYUxguuJ_WJd1PepkxOK4if5JwJzC5IC85k,1622 +timm/layers/create_norm.py,sha256=YKZgOwjgK9yVRdKA4aXiHGDaxXS_3gHBbM-OWVbYp4Y,1617 +timm/layers/create_norm_act.py,sha256=i5EIl-X8gr75TFJ0t7OQaExFIFc_OLMpDVHLhCJFfIo,3836 +timm/layers/drop.py,sha256=HocUkCNINxvSRHV0dQR7zpeRiJQG_Kkk8_ww2AHK2Zg,6973 +timm/layers/eca.py,sha256=MiVhboDUqLUfeubpypWfaR3LMLHwgLCNsWO3iemcQFs,6386 +timm/layers/evo_norm.py,sha256=mOJu-pMlBkVGjp3aKN0lhjnuED3lXLETNqbJIHpSSSA,13862 +timm/layers/fast_norm.py,sha256=IIlM2VUs-DSUNs_Iq0KET-5zjP_dTYMj8QTQ_c1WhBo,4008 +timm/layers/filter_response_norm.py,sha256=mDJ3nbu5nicP-5actvDiZa4HYd393Vq-_06ZTca5d4w,2540 +timm/layers/format.py,sha256=i02NLXbWXPv4WJCSUF4MnSjQp699-UGr5Z3rnMZk364,1109 +timm/layers/gather_excite.py,sha256=53DHt6cySjPqd9NW3voZuhw8b9nUzvsG9NVl_D-9NAo,3824 +timm/layers/global_context.py,sha256=aZWvij4J-T5I1rdTK725D6R0fDuJyYPDaXvl36QMmkw,2445 +timm/layers/grid.py,sha256=lMM8bM3ggxunvQFqQCB943SZAfY7Nw04w-lFaMBkxt8,1624 +timm/layers/grn.py,sha256=dxLWn-V48OiFlKLLKaU8Zt0mdBcR_AOg0mh1i8tmHKY,1319 +timm/layers/halo_attn.py,sha256=zMJkf9S-ocCvrfvWOe0I97UHTpEQIkP381DON3OXm-c,10662 +timm/layers/helpers.py,sha256=9VLqID8jjdw_Un270F3rQLvNz9vQMhN9mts7kk_Ma_Y,1053 +timm/layers/hybrid_embed.py,sha256=6IjQvGASNtME8rhr66L7l-ljFXmfaYB1B6Lbb9TarGg,9975 +timm/layers/inplace_abn.py,sha256=CTJcx3n_Ds6Q-Uds83I87DuQy2jgzX8ummBVKkrlyA0,3374 +timm/layers/interpolate.py,sha256=OsobWqF1CrpLe6YkXLmRzASbAsw_uzztyqsBZ6xUYrs,2439 +timm/layers/lambda_layer.py,sha256=-jB-uYoYqk0QjStAhaec30uyEAWp64N96_Bw33oY8H8,5958 +timm/layers/layer_scale.py,sha256=66PdUvfjgTxKXjjFpoETDnDRjQ6Dt1m8n-ysnk2ZFAM,1021 +timm/layers/linear.py,sha256=baS2Wpl0vYELnvhnQ6Lw65jVotaJz5iGbroJJ9JmIRM,743 +timm/layers/median_pool.py,sha256=b02v36VGvs_gCY9NhVwU7-mglcXJHzrJVzcEpEUuHBI,1737 +timm/layers/mixed_conv2d.py,sha256=mRSmAUtpgHya_RdnUq4j85K5QS7JFTdSPUXOUTKgpmA,1843 +timm/layers/ml_decoder.py,sha256=Kk7JBS8TIlVWsFE8o9iFgN70JzIOJNmJqXrWWvCxpb0,6701 +timm/layers/mlp.py,sha256=2vNBR6ML5BbS1wrleEuqmVZ7pRGvPaZZwn2rWPBV-Ls,8457 +timm/layers/non_local_attn.py,sha256=29ZunmS6vrMmkhPNwl7DtPoFYgGlDpuwF17coCEy7HU,6218 +timm/layers/norm.py,sha256=gwLrnN0Zg-IFaoynyQmG9MZMCDaSnXQgv6HJCS-JAOg,7558 +timm/layers/norm_act.py,sha256=famWugHwzfyg5WiZpRMHA3g-O-NaMMdGN_UEC4S0CwE,17274 +timm/layers/padding.py,sha256=7ToIOAk5HiOs8KpiPzqpNQI8UiGQHIbwWWkxknNnIZo,3471 +timm/layers/patch_dropout.py,sha256=R5v6e2tntFch_JcvEELlYdZ5gpEwyr7u-v34J_c6q00,1778 +timm/layers/patch_embed.py,sha256=NLoXcw5P_V5kIFMClMyohEi1zCIM9rU3F6W3xQ8lzh4,11461 +timm/layers/pool2d_same.py,sha256=UsmtWna5k5kfVTP25T1-OKJOgtcfBQCqSy0FmaZbjRw,3045 +timm/layers/pos_embed.py,sha256=v26pstIGAUvDKyAbZYPIt15rEWiFH7EusoAHdcSgyTM,2585 +timm/layers/pos_embed_rel.py,sha256=xIkPKYHQBxfXVr3-1xWy6-K8fKM-b9DoNS0WOHkzvfA,19370 +timm/layers/pos_embed_sincos.py,sha256=FvP_ZeAbon2IZhTAgtN9aMm4scF0k5mfxgpOqBwnZmw,14436 +timm/layers/selective_kernel.py,sha256=oLsbqh3HYVjg8lW4AbKQplW0k-xHlbIlOgAMF6r4brQ,5383 +timm/layers/separable_conv.py,sha256=staVZPP-BxtO3q0Ka3_VnI1M1e-xtNAUUACP81rhF_Y,2620 +timm/layers/space_to_depth.py,sha256=BwTu9tEamsmqF-DHdkHgWBv8Paf3_CE8v-IGZiLU1Hc,1068 +timm/layers/split_attn.py,sha256=Cl2gx0lNVosX2zgieLgf_FtqUwuwtGKTyxSoWyvejeg,3076 +timm/layers/split_batchnorm.py,sha256=4ghGtliK5z0ZnzR29zJB_rN9BJPiGuy1PSltmVyF5Ww,3441 +timm/layers/squeeze_excite.py,sha256=YrJELkYE5cB1c-r4Ww9omezUp3dugbgz-qN8XsTbc3I,4327 +timm/layers/std_conv.py,sha256=zYhcKCbE0_Rqn422gEM9gr3LeBewu0CXKqvlsa9-M2Q,5887 +timm/layers/test_time_pool.py,sha256=Z5lPvVLI4IYqrJLGQhgJfxPasug9nts1y6mDD_rznBQ,1996 +timm/layers/trace_utils.py,sha256=cbZufOaGKmhTGEMc52QAnqzGRTfn4vvzqsAOJaLKJQ8,335 +timm/layers/typing.py,sha256=UYrThz9-g8PlmXr7LZutKft6seFaFVp_ZrD0ZGV2aP4,163 +timm/layers/weight_init.py,sha256=Pb4dJFpn4U5igKEZpIMCzsQg4zWP0TqSQlqDqbyIflc,6207 +timm/loss/__init__.py,sha256=iCNB9bUAf69neNe1_XO0eeg1QXuxu6jRTAuy4V9yFL8,245 +timm/loss/asymmetric_loss.py,sha256=3BajT94OJslw-MSrqQLRB67SLT2pbhb2vqj0CtOKN6w,3240 +timm/loss/binary_cross_entropy.py,sha256=9AgASCvD-URGZS1E6XyaaYBxf0UOx8AHHKC7tTLTQzU,2483 +timm/loss/cross_entropy.py,sha256=XDE19FnhYjeudAerb6UulIID34AmZoXQ1CPEAjEkCQM,1145 +timm/loss/jsd.py,sha256=MFe8H_JC1srFE_FKinF7jMVIQYgNWgeT7kZL9WeIXGI,1595 +timm/models/__init__.py,sha256=g4rppYCCWKVxezoGup2e5IliDk-CH3yFzJbl_GQUFdg,3462 +timm/models/_builder.py,sha256=1W-XJct1SoRzabNF5MlLyjqMHHeGa09LV9Rxl-wDaE8,20250 +timm/models/_efficientnet_blocks.py,sha256=vgIzyKVeEY3jlRU_Ly1z7H-BECDRuWXlcPvhEFk4o_Q,26272 +timm/models/_efficientnet_builder.py,sha256=3GKkift_ebPGWAQC2abHZ_i5-KjtbwcaTngnWJUBUKg,23834 +timm/models/_factory.py,sha256=1yiXCX6ATfvp_Bxs8ThAHktbmaAW5ErifrWJIoOLxa4,5188 +timm/models/_features.py,sha256=tqqHvrYTe_Kwu5CkaN2M8ivnFf6QsNuRbr42bUdKXJU,19788 +timm/models/_features_fx.py,sha256=plRe_d-VWm-DwEitx86MkCYjPM3wmgXIi6kYjfqAvAI,6281 +timm/models/_helpers.py,sha256=qVsRiiQecHKnUz11LBxeLy5a5ytox6kYHUQUvuP-fvc,6514 +timm/models/_hub.py,sha256=JGeToEzCg3vEwwWp4B03kVwqZiZyXWsrVAMOsxUB1t8,16352 +timm/models/_manipulate.py,sha256=PCrCJD-UPPageLuhHfDzIgEgax7pw96bEsr6d6x1_DE,10503 +timm/models/_pretrained.py,sha256=uS95ANJTn4eYOkKdLzuZUDz31BTn0oHMFkY-qs2rCjE,3525 +timm/models/_prune.py,sha256=r0LJI-UCYSDZrEAej8lN2OeDJxBEey9tA6FBh8uZyH4,4325 +timm/models/_pruned/ecaresnet101d_pruned.txt,sha256=1zA7XaxsTnFJxZ9PMbfMVST7wPSQcAV-UzSgdFfGgYY,8734 +timm/models/_pruned/ecaresnet50d_pruned.txt,sha256=J4AlTwabaSB6-XrINCPCDWMiM_FrdNjuJN_JJRb89WE,4520 +timm/models/_pruned/efficientnet_b1_pruned.txt,sha256=pNDm1EENJYMT8-GjXZ3kXWCXADLDun-4jfigh74RELE,18596 +timm/models/_pruned/efficientnet_b2_pruned.txt,sha256=e_oaSVM-Ux3NMVARynJ74YwjzxuBAX_w7kzOw9Ml3gM,18676 +timm/models/_pruned/efficientnet_b3_pruned.txt,sha256=A1DJEwjEmrg8oUr0QwzwBkdAJV3dVeUFjnO9pNC_0Pg,21133 +timm/models/_registry.py,sha256=TI0rs9L-YXTRA-8zuxrNWETzrdGwD5PekW2UUoUajbg,14465 +timm/models/beit.py,sha256=IAXfqg7Gm4MLGDZvNK7gVF-TBojBFoY0v56oQI1OCbg,29547 +timm/models/byoanet.py,sha256=fgvAAkxAHBto0zlUYX4KrH4Wa_T4POpxhWkGa9j4vOs,18985 +timm/models/byobnet.py,sha256=IisO0q3gYrIVBPIhgpzWPaA4X5FqLQKygh9ZvlcfSN0,105140 +timm/models/cait.py,sha256=AqGHMAUrCMGhex1NlUW9YeZzuG40X79VtIP5KiGL5Ms,21389 +timm/models/coat.py,sha256=1AOE_ALGIn7GEe8AN0fxogHnBeVjOu1eb1PTw5S0qzU,30054 +timm/models/convit.py,sha256=UIEc1iUUWpkYpium6sxveqY6I_83rMpoND-Peva2G_Q,15306 +timm/models/convmixer.py,sha256=8sq26Y2oW8Eaam1yEMeFm2oe6p89BCFanx_lDO8LXoI,4686 +timm/models/convnext.py,sha256=Xvve54hnRNSg1gSIPsMf0F-5pa7_icHmoSGdVmj3a2I,56085 +timm/models/crossvit.py,sha256=xF0hfYntNoJMiJYYJ7qz-I_mAIvEpJJbwc_fjxNjJzU,24330 +timm/models/cspnet.py,sha256=86_UF17CfonT34qFXMm5swJoCMwiyEfbQAtgMPLdPPM,40110 +timm/models/davit.py,sha256=2jGTu3T6KU5_NK_3rJ6oF-ZEKongcwhr-_6fIiOTjTs,27756 +timm/models/deit.py,sha256=W4TDczX7hok5eqpMdjtlchO8NEY-78vNMaMgbZEL15o,18525 +timm/models/densenet.py,sha256=UwA2mXS2XZ2ZNAs6zyCNTEaTnnpUKDsfsYyq4XWY0qQ,16106 +timm/models/dla.py,sha256=d0-gAOElTsMUAgqVKy-gqw1KB6urtRrowItLECn0MPg,18648 +timm/models/dpn.py,sha256=sZeOD38MT4Qnxy-nlckc-pERl_hYHsWxYS62GRj7XNY,13642 +timm/models/edgenext.py,sha256=asxb0Vx3kGOu2YSdvGp3KZVe5lh_2ZWDGnqMzjojjYw,20990 +timm/models/efficientformer.py,sha256=gB-iLBR8qsr94vUg_ZMAwmp9z6OTqpcFDtrOsmKkXJw,21967 +timm/models/efficientformer_v2.py,sha256=o4J6-9-yS43SlAiXcIhj-3GCBmpcsoy6fp4zmFV0WyA,24943 +timm/models/efficientnet.py,sha256=LXf-l6SKgS7OqoKELPfeG7HpkBmW2YxBcp0J4to-MTY,121947 +timm/models/efficientvit_mit.py,sha256=BKsv-vxiIV1OSpfdMwXk6RxVgsf1GAsSzZSRSiX3d5k,33405 +timm/models/efficientvit_msra.py,sha256=lPTWXNN6CvOV1XhcMUgn6QMhf8riLHhN-c2lu36Web4,23474 +timm/models/eva.py,sha256=Wlj5_JModYHWX_jjzGAwdCRbLXQbcx5PAxZFUX_WHRI,48818 +timm/models/factory.py,sha256=vbXTq3VsGH8H71RlTi0jLMemHpNSn50fTT_4K5kNsb0,145 +timm/models/fastvit.py,sha256=oxmhwEnqGIhRPQ_PoMpvfV_Hp_AH9TBcXEYK4cLr8OY,57159 +timm/models/features.py,sha256=l-1pBa36r0ImSRnVRd6kLYOdoMx8TvUMCgbIkQAooT0,146 +timm/models/focalnet.py,sha256=NMjrSJRwl2g90chAT6h8qBrQ8RwUqpIA22yLjZooV0U,24177 +timm/models/fx_features.py,sha256=-xyHnT-WpuVFJPViVkhpdRFgkw5GNXSs_RzFYDHkIHk,149 +timm/models/gcvit.py,sha256=TW5AxQMa0oXyfZpa-QYoLBSu2evaZx_amyx05ZNrHZ8,21484 +timm/models/ghostnet.py,sha256=AzDBwQoPlEe3_rnfklpJ23VV5UZwusd8FoWM4WR9zNA,14690 +timm/models/hardcorenas.py,sha256=MdMhROScNZ92ysLQZP6AxRz1ND0o3hn920FLLbGFtt8,7697 +timm/models/helpers.py,sha256=J81ZF-lhp8L3opH8b02Iob-VFMm_36leF1W5p3RLWs4,218 +timm/models/hgnet.py,sha256=ZLgA8xlD1QIyLhqPx3KSCyEBOjWavJhH9HeZJxFmMbU,23624 +timm/models/hiera.py,sha256=dIq0e2mZmxW2slBbghonmp1ylinxW_OvVb9lzt77bA0,36658 +timm/models/hieradet_sam2.py,sha256=cWjDviJOqkT1LSlYZlXFxUxIe9ESttYEVwVHBh6qrzY,23374 +timm/models/hrnet.py,sha256=5zdNd5EhQzCETx0oVipLGOZUdjJsZJD06tyDTynwjyk,33368 +timm/models/hub.py,sha256=WdUls0bZlTfAtQ447Wk5W-jWE074bkzndOflKPJ1uyo,141 +timm/models/inception_next.py,sha256=t1YcnLETTIEYOaZldHrzRbpMhHdfunMYjEX052FbCZA,14659 +timm/models/inception_resnet_v2.py,sha256=f4wfH3hzhTDjg6f0cauHw-McpNczlpUppN9o60-HeNU,12126 +timm/models/inception_v3.py,sha256=QrzyWxdRTcJtXPb6pCoNFe6GMZb5r-ZkpZD1r9XYkmM,17251 +timm/models/inception_v4.py,sha256=GdmVRao8ag7tHA4e8_7kdsTKJXMVjwysfHfcmjjDNOA,11125 +timm/models/layers/__init__.py,sha256=fLlPJlSRaEjjYnW7mdBtwu5XIlchP6or3EZlZEzGFMs,3369 +timm/models/levit.py,sha256=iRugcKgF79Uwg2MRgtnawj-TLbwdpABLEfVJfPVuROo,34898 +timm/models/mambaout.py,sha256=IF3y-Lj7gS4ujj4cnGKq5q2_PjPwvFXrIHSR7u2HZvM,20569 +timm/models/maxxvit.py,sha256=E_lYIyexObRQXge5tTeWcBdjACjlz8OzgX3LNQCX0Qk,87893 +timm/models/metaformer.py,sha256=XHVmIxxgopHH7lIB1aFx8eTtjybmSDRsInLcHG8PqDw,35318 +timm/models/mlp_mixer.py,sha256=qnJDYhXU6cBYQC0XibK9lzeSq952ftlVwA--XZvXLMA,27269 +timm/models/mobilenetv3.py,sha256=_WMQBzNCc5336EA3ut-f_U0Micd3jYQlvIJ7FAiXdag,56469 +timm/models/mobilevit.py,sha256=ZHbGPG3vFhFCI0f3zLWzH2Jqiasr_Sacah_1MBi9KI8,25734 +timm/models/mvitv2.py,sha256=JtShBKs21UYgz5YGllw5sFB-0hUCwf9pLHvOjqAnP7s,39162 +timm/models/nasnet.py,sha256=OvGWfaItUal0S8-E0RxSK0wQngSWMFzU5mYZnPzDYb4,26737 +timm/models/nest.py,sha256=ca7KzQQl_987b48LJsvRmZXoKi_y9NwBMoGcIHIBJU8,21554 +timm/models/nextvit.py,sha256=RRv7gx2Ky2Y1C3VoxDk2lZ5QhMszL508XKFfTU0FY5I,22829 +timm/models/nfnet.py,sha256=LFYTP6HLwgKyYdGMwt7hC0xaQ5dQ6Boq_2A2w0MJrtI,41712 +timm/models/pit.py,sha256=ZxMvRGQR0H-4iVnlpkXz5DEwQB8d976l-COILwzgJ2U,15083 +timm/models/pnasnet.py,sha256=aLDCm-n3-hAfB6hI62hdWGdM-KJKRjzaCE_YCpl_jOU,15438 +timm/models/pvt_v2.py,sha256=I84eejWLjpLfSBj5cZ29-BhN6023iK9C264yXziUmJ8,17346 +timm/models/rdnet.py,sha256=TE6MN6x5_YNisr6S2UtVLvprL56ZN5Pw_p9XXrgCAVY,19346 +timm/models/registry.py,sha256=JdbWI-bGwVhzgjdci73SMw8JQsjHEZYaS-EtutPz9g0,146 +timm/models/regnet.py,sha256=kP2sbXSYj2EuZnjZNV51918QzZtgAc-sc_SB1t_0D34,46556 +timm/models/repghost.py,sha256=ALRI_vJma4GfFRc5qyvVyS4DiuRYcPQDdthgcYQqeZ0,16534 +timm/models/repvit.py,sha256=qy8nRwKRd92nVRjikDyu5YFlaarcn1B-jzkr4hFuDGc,16463 +timm/models/res2net.py,sha256=PZeI435TG16x-_Zynvqdwfcvwy4b_1_9ydfu_vyBzOA,7691 +timm/models/resnest.py,sha256=65DB4DAErNleuJ6VTVb9HfSM7QFWsxmTs-HT-e6uEIk,9635 +timm/models/resnet.py,sha256=t3taxWJ_CjfSl-hmdoYuu6pnP516j3d8WlPae7r3Bu0,98683 +timm/models/resnetv2.py,sha256=21EYjUqHIs1YUXsGpKY10v_JLTWFQKPWEmhxAawY-5E,34864 +timm/models/rexnet.py,sha256=U5om__uXtQrxjTqDHd7SIHfN6B8vMVctmRxH7OfuOXk,11977 +timm/models/selecsls.py,sha256=xKNbj4clZZN5u-K3JZMc_gkJ1_uIODa6fi5h8bJEstE,13303 +timm/models/senet.py,sha256=fOY8qAP-c5L5gU5Di9AfJW_i6SMHzFxBeb1tHw77-uw,18214 +timm/models/sequencer.py,sha256=Wb7HK4_WuPx51Mfw-XgCyXUxNpEc6lyNaeQ16YKuCQo,17310 +timm/models/sknet.py,sha256=3FbRZv0QsBGQajF7lIs00vt_M8FZkk7PBsBZuRY3pNA,8777 +timm/models/swin_transformer.py,sha256=YxE6mtByK_3dcIQsZm3dGHAVmNYJXvKCOUeJbB5Y64M,43165 +timm/models/swin_transformer_v2.py,sha256=aMD-NxdNEFhhFTWQ9ZXafJ7Hw6jbUXPbjhB0AJPX3LA,44038 +timm/models/swin_transformer_v2_cr.py,sha256=MzlqdR_VHjM0E6xCFP1c4F5PncDxkJ9ocpTI7Teda0Q,46374 +timm/models/tiny_vit.py,sha256=qCWs3P-_BGlgEvpmkVQc34hSzfeGp31NQBlV7xuLdt0,23914 +timm/models/tnt.py,sha256=XFlTclk2z-ArCMRj5nUNXKDVqqZoSjaP8kAFulWGqko,13479 +timm/models/tresnet.py,sha256=Hqn6S-_4xDeq-yk_FXPomliPYSMOUlsBOIMbsADaeAA,12637 +timm/models/twins.py,sha256=0tjHCxUQ9YBDQVhkDBeugxgkz4yTdQRbIojsDndPbiA,21972 +timm/models/vgg.py,sha256=jm3aVT_XO6B374E7U7p9SlywLgKn2d9Uuab625q7bO0,11002 +timm/models/visformer.py,sha256=2cme-3zY5_X-MZXSjBcwwip8GkHbd3xyT9f16V7LA8o,19031 +timm/models/vision_transformer.py,sha256=ioSkrluh-v5mCRNXD7MnXE1ZqOJizLPwaelYfhL58KA,150938 +timm/models/vision_transformer_hybrid.py,sha256=SsX07m4Oh5wFq7oFfck3cvDzfQ1yVXYIc9XU0U4AFHM,17996 +timm/models/vision_transformer_relpos.py,sha256=2LDE4IrW8aG7CSMEQSIwvpuYvhoEPNNX-4IumaRLqnM,28521 +timm/models/vision_transformer_sam.py,sha256=9yuXlWp-Um4-m_TnJ4vGHIaJQzvAvaGn_peKu2BetOs,28292 +timm/models/vitamin.py,sha256=qrWlqhizyVtLD04E9mv4W5VGbGvI2A5pPcWlVTv9sGA,20487 +timm/models/volo.py,sha256=ZpO7X-8iNh8cK7cer431X1ures1LG_gNYvSoRW6JWQU,34231 +timm/models/vovnet.py,sha256=nNLZEagq7oDGw6V4xd6s6uvqpiKa4tcmBDvd4JKriKk,15699 +timm/models/xception.py,sha256=xj6Dxknlhhj2nYROEZoj45gibQROW5m9vB-XIDH3oyE,8147 +timm/models/xception_aligned.py,sha256=0wdXmr-z1sT_610kwJpJ6ay_YIF3aYhVxIIqHWJ92pg,15414 +timm/models/xcit.py,sha256=GAJKrfjGdRZwnZJ2ktGymbV4iCok1wBkTOa5ffODex8,41614 +timm/optim/__init__.py,sha256=PGUpQyjACNNOgKA_ciGsi3AZxlw7ScD0XAK2u4eIujM,530 +timm/optim/adabelief.py,sha256=n8nVbFX0TrCgkI98s7sV9D1l_rwPoqgVdfUW1KxGMPY,9827 +timm/optim/adafactor.py,sha256=UOYdbisCGOXJJF4sklBa4XEb3m68IyV6IkzcEopGack,7459 +timm/optim/adahessian.py,sha256=vJtQ8bZTGLrkMYuGPOJdgO-5V8hjVvM2Il-HSqg59Ao,6535 +timm/optim/adamp.py,sha256=PSJYfobQvxy9K0tdU6-mjaiF4BqhIXY9sHV2vposx5I,3574 +timm/optim/adamw.py,sha256=OKSBGfaWs6DJC1aXJHadAp4FADAnDDwb-ZRKuPao7zk,5147 +timm/optim/adan.py,sha256=szYfq09xWZHHeHcpwLjU-T3nla96mUYA2olJqkI2JkI,5071 +timm/optim/lamb.py,sha256=II9zTpcxWzNqgk4K-bs5VGKlQPabUolSAmHkcSjsqSU,9184 +timm/optim/lars.py,sha256=Pc7laTbk6iDt4iRtL-ZZXtB0dGGCuho6AeBSvxvIYmY,5256 +timm/optim/lion.py,sha256=BhQnG7pL_C_Y1QEWjhiJhUAnWGfXi0CLuyQs3ECkqnA,7099 +timm/optim/lookahead.py,sha256=-fM1DEwFf_bpbNq2cXdkJyrobF4iaVNIBuman_RfRk8,2687 +timm/optim/madgrad.py,sha256=V3LJuPjGwiO7RdHAZFF0Qqa8JT8a9DJJLSEO2PCG7Ho,6893 +timm/optim/nadam.py,sha256=pLV8_hQ5jYGlY3eJNzPvGnFIEVNGkpi8wLgYS6RePHE,3929 +timm/optim/nadamw.py,sha256=ncAgp3F14PkCkZTb0xqMiV9PKg8c3tZe-10WZoAexas,12369 +timm/optim/nvnovograd.py,sha256=NkRLq007qqiRDrhqiZK1KP_kfCcFcDSYCWRcoYvddOQ,4856 +timm/optim/optim_factory.py,sha256=ULeNdcKHnz2SnID_pEw-_iAsgCQi9YBSWb0FROPhDO8,15868 +timm/optim/radam.py,sha256=dCeFJGKo5WC8w7Ad8tuldM6QFz41nYXJIYI5HkH6uxk,3468 +timm/optim/rmsprop_tf.py,sha256=SX47YRaLPNB-YpJpLUbXqx21ZFoDPeqvpJX2kin4wCc,6143 +timm/optim/sgdp.py,sha256=7f4ZMVHbjCTDTgPOZfE06S4lmdUBnIBCDr_Yzy1RFhY,2296 +timm/optim/sgdw.py,sha256=wiCGhF4oOj-dJJ9hD1BBCB-4U_3ZdCiDdqJhlEBKvso,9046 +timm/scheduler/__init__.py,sha256=O3yNRcgje9l_wUhxN5VteAARRGMpCxOmhbOmV_8j2Dw,330 +timm/scheduler/cosine_lr.py,sha256=E6wwe8NjWQAfOzzas6llX3_9dPXoYmxV1p5ZlLbMVVE,3887 +timm/scheduler/multistep_lr.py,sha256=m33HB6ToJQCOAfF7n0Ofab7tpzwlg6eB5EOIb_6v_Qc,1950 +timm/scheduler/plateau_lr.py,sha256=6SmHEBIfgzE83GMHK_ZMIz-L0478HJ3ahwRgDZ9CJbk,3603 +timm/scheduler/poly_lr.py,sha256=q5uedgbuQilvU3AICrRvBo_MMPpMGGle0KgheJpXQ0A,3717 +timm/scheduler/scheduler.py,sha256=Dy3Os_z2qI9AA0InPN0oroOdETW7weYb1Uk7C0ZXurk,5426 +timm/scheduler/scheduler_factory.py,sha256=Yz02A5PartDsUdxrBqzgmiKTbd46V9-Krz3Lgq3UwXM,6787 +timm/scheduler/step_lr.py,sha256=rHliA2nLVUQsOInjUnQgeGtH09aupY4uOXmQ-_ouNXc,1777 +timm/scheduler/tanh_lr.py,sha256=bhprzeUZZNJqqRT0PMY9cE7MbBg1R-pq52H-qEHeY7U,3651 +timm/utils/__init__.py,sha256=r-mgE7sAOYatFEYjgdXXKH0RjOLvQ5FH0rcxv8-gNR0,846 +timm/utils/agc.py,sha256=6lZCChfbW0KGNMfkzztWD_NP87ESopjk24Xtb3WbBqU,1624 +timm/utils/attention_extract.py,sha256=CCPMmnEk4dM1UrvOpY7sfGZI8KLQekJIT7rflg-4qDw,3226 +timm/utils/checkpoint_saver.py,sha256=RljigPicMAHnk48K2Qbl17cWnQepgO4QMZQ0FCjd8xw,6133 +timm/utils/clip_grad.py,sha256=iYFEf7fvPbpyh5K1SI-EKey5Gqs2gztR9VUUGja0GB0,796 +timm/utils/cuda.py,sha256=lmDfKZFUqMlkJlpMbba5tDlslSksUd6B2ouYlkd2Uz4,2029 +timm/utils/decay_batch.py,sha256=5fOrMO985Pw8uzvBK78RwYCoH3Nv2jb46OGa9GkJ6LA,1762 +timm/utils/distributed.py,sha256=hSp_k_OgcRBH975srSdC2tj7BHGsPqYdmlYrH9b0how,5762 +timm/utils/jit.py,sha256=ow8sPjtnlzcRDGR_xNL7E6BU0cdjjgUSEkhGXuvMNkw,2203 +timm/utils/log.py,sha256=BdZ2OqWo3v8d7wsDRJ-uACcoeNUhS8TJSwI3CYvq3Ss,1015 +timm/utils/metrics.py,sha256=RSHpbbkyW6FsbxT6TzcBL7MZh4sv4A_GG1Bo8aN5qKc,901 +timm/utils/misc.py,sha256=wh1RUZPEyVOtA3HFkbunNpAKPbrOKQYeJqhvODmSyyQ,1105 +timm/utils/model.py,sha256=2Qyg3-dw07sA_6qZEYe5L4iHhb_S1Dyd52XysBpmjgQ,10566 +timm/utils/model_ema.py,sha256=-O-HAZKLxo069T_fN3CceNEaxGjJvbCxIFWjxWHJ0SM,11244 +timm/utils/onnx.py,sha256=TjCKpjlBshVuRzC6my22kboyLz5a2BGyzMKmJxgn1aA,3922 +timm/utils/random.py,sha256=Ysv6F3nIO8JYE8j6UrDxGyJDp3uNpq5v8U0KqL_8dic,178 +timm/utils/summary.py,sha256=HYD5nJsTOD3DGqCPUu2L3sX4VjNFTybHbjwntSEWBi4,1325 +timm/version.py,sha256=FX1uB9A8GQrSIKdyM4MfRyU174ifD40UlsZkjgGR5OM,23 diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/REQUESTED b/lib/python3.10/site-packages/timm-1.0.11.dist-info/REQUESTED new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/WHEEL b/lib/python3.10/site-packages/timm-1.0.11.dist-info/WHEEL new file mode 100644 index 0000000000000000000000000000000000000000..fe332ddaedfc25fff8fbbdb32efae0006bf78e39 --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: pdm-backend (2.4.2) +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/entry_points.txt b/lib/python3.10/site-packages/timm-1.0.11.dist-info/entry_points.txt new file mode 100644 index 0000000000000000000000000000000000000000..c3ad4726d437022e5c606a4206ffb6007347a008 --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] + +[gui_scripts] + diff --git a/lib/python3.10/site-packages/timm-1.0.11.dist-info/licenses/LICENSE b/lib/python3.10/site-packages/timm-1.0.11.dist-info/licenses/LICENSE new file mode 100644 index 0000000000000000000000000000000000000000..4336819097f8777b9ca64ebe49bc0bc77ae3c5f5 --- /dev/null +++ b/lib/python3.10/site-packages/timm-1.0.11.dist-info/licenses/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "{}" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2019 Ross Wightman + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/lib/python3.10/site-packages/uritemplate/template.py b/lib/python3.10/site-packages/uritemplate/template.py new file mode 100644 index 0000000000000000000000000000000000000000..73ef89c93ca169c60fa6b4029ea5e5b4c6db05bb --- /dev/null +++ b/lib/python3.10/site-packages/uritemplate/template.py @@ -0,0 +1,169 @@ +""" + +uritemplate.template +==================== + +This module contains the essential inner workings of uritemplate. + +What treasures await you: + +- URITemplate class + +You see a treasure chest of knowledge in front of you. +What do you do? +> + +""" +import re +import typing as t + +from uritemplate import orderedset +from uritemplate import variable + +template_re = re.compile("{([^}]+)}") + + +def _merge( + var_dict: t.Optional[variable.VariableValueDict], + overrides: variable.VariableValueDict, +) -> variable.VariableValueDict: + if var_dict: + opts = var_dict.copy() + opts.update(overrides) + return opts + return overrides + + +class URITemplate: + + """This parses the template and will be used to expand it. + + This is the most important object as the center of the API. + + Example:: + + from uritemplate import URITemplate + import requests + + + t = URITemplate( + 'https://api.github.com/users/sigmavirus24/gists{/gist_id}' + ) + uri = t.expand(gist_id=123456) + resp = requests.get(uri) + for gist in resp.json(): + print(gist['html_url']) + + Please note:: + + str(t) + # 'https://api.github.com/users/sigmavirus24/gists{/gistid}' + repr(t) # is equivalent to + # URITemplate(str(t)) + # Where str(t) is interpreted as the URI string. + + Also, ``URITemplates`` are hashable so they can be used as keys in + dictionaries. + + """ + + def __init__(self, uri: str): + #: The original URI to be parsed. + self.uri: str = uri + #: A list of the variables in the URI. They are stored as + #: :class:`~uritemplate.variable.URIVariable`\ s + self.variables: t.List[variable.URIVariable] = [ + variable.URIVariable(m.groups()[0]) + for m in template_re.finditer(self.uri) + ] + #: A set of variable names in the URI. + self.variable_names = orderedset.OrderedSet() + for var in self.variables: + for name in var.variable_names: + self.variable_names.add(name) + + def __repr__(self) -> str: + return 'URITemplate("%s")' % self + + def __str__(self) -> str: + return self.uri + + def __eq__(self, other: object) -> bool: + if not isinstance(other, URITemplate): + return NotImplemented + return self.uri == other.uri + + def __hash__(self) -> int: + return hash(self.uri) + + def _expand( + self, var_dict: variable.VariableValueDict, replace: bool + ) -> str: + if not self.variables: + return self.uri + + expansion = var_dict + expanded: t.Dict[str, str] = {} + for v in self.variables: + expanded.update(v.expand(expansion)) + + def replace_all(match: "re.Match[str]") -> str: + return expanded.get(match.groups()[0], "") + + def replace_partial(match: "re.Match[str]") -> str: + match_group = match.groups()[0] + var = "{%s}" % match_group + return expanded.get(match_group) or var + + replace_func = replace_partial if replace else replace_all + + return template_re.sub(replace_func, self.uri) + + def expand( + self, + var_dict: t.Optional[variable.VariableValueDict] = None, + **kwargs: variable.VariableValue, + ) -> str: + """Expand the template with the given parameters. + + :param dict var_dict: Optional dictionary with variables and values + :param kwargs: Alternative way to pass arguments + :returns: str + + Example:: + + t = URITemplate('https://api.github.com{/end}') + t.expand({'end': 'users'}) + t.expand(end='gists') + + .. note:: Passing values by both parts, may override values in + ``var_dict``. For example:: + + expand('https://{var}', {'var': 'val1'}, var='val2') + + ``val2`` will be used instead of ``val1``. + + """ + return self._expand(_merge(var_dict, kwargs), False) + + def partial( + self, + var_dict: t.Optional[variable.VariableValueDict] = None, + **kwargs: variable.VariableValue, + ) -> "URITemplate": + """Partially expand the template with the given parameters. + + If all of the parameters for the template are not given, return a + partially expanded template. + + :param dict var_dict: Optional dictionary with variables and values + :param kwargs: Alternative way to pass arguments + :returns: :class:`URITemplate` + + Example:: + + t = URITemplate('https://api.github.com{/end}') + t.partial() # => URITemplate('https://api.github.com{/end}') + + """ + return URITemplate(self._expand(_merge(var_dict, kwargs), True)) diff --git a/lib/python3.10/site-packages/uritemplate/variable.py b/lib/python3.10/site-packages/uritemplate/variable.py new file mode 100644 index 0000000000000000000000000000000000000000..da406cab262cf6065dd32e1ca2ebd2dde231823d --- /dev/null +++ b/lib/python3.10/site-packages/uritemplate/variable.py @@ -0,0 +1,419 @@ +""" + +uritemplate.variable +==================== + +This module contains the URIVariable class which powers the URITemplate class. + +What treasures await you: + +- URIVariable class + +You see a hammer in front of you. +What do you do? +> + +""" +import collections.abc +import typing as t +import urllib.parse + +ScalarVariableValue = t.Union[int, float, complex, str] +VariableValue = t.Union[ + t.Sequence[ScalarVariableValue], + t.Mapping[str, ScalarVariableValue], + t.Tuple[str, ScalarVariableValue], + ScalarVariableValue, +] +VariableValueDict = t.Dict[str, VariableValue] + + +class URIVariable: + + """This object validates everything inside the URITemplate object. + + It validates template expansions and will truncate length as decided by + the template. + + Please note that just like the :class:`URITemplate `, this + object's ``__str__`` and ``__repr__`` methods do not return the same + information. Calling ``str(var)`` will return the original variable. + + This object does the majority of the heavy lifting. The ``URITemplate`` + object finds the variables in the URI and then creates ``URIVariable`` + objects. Expansions of the URI are handled by each ``URIVariable`` + object. ``URIVariable.expand()`` returns a dictionary of the original + variable and the expanded value. Check that method's documentation for + more information. + + """ + + operators = ("+", "#", ".", "/", ";", "?", "&", "|", "!", "@") + reserved = ":/?#[]@!$&'()*+,;=" + + def __init__(self, var: str): + #: The original string that comes through with the variable + self.original: str = var + #: The operator for the variable + self.operator: str = "" + #: List of safe characters when quoting the string + self.safe: str = "" + #: List of variables in this variable + self.variables: t.List[ + t.Tuple[str, t.MutableMapping[str, t.Any]] + ] = [] + #: List of variable names + self.variable_names: t.List[str] = [] + #: List of defaults passed in + self.defaults: t.MutableMapping[str, ScalarVariableValue] = {} + # Parse the variable itself. + self.parse() + self.post_parse() + + def __repr__(self) -> str: + return "URIVariable(%s)" % self + + def __str__(self) -> str: + return self.original + + def parse(self) -> None: + """Parse the variable. + + This finds the: + - operator, + - set of safe characters, + - variables, and + - defaults. + + """ + var_list_str = self.original + if self.original[0] in URIVariable.operators: + self.operator = self.original[0] + var_list_str = self.original[1:] + + if self.operator in URIVariable.operators[:2]: + self.safe = URIVariable.reserved + + var_list = var_list_str.split(",") + + for var in var_list: + default_val = None + name = var + if "=" in var: + name, default_val = tuple(var.split("=", 1)) + + explode = False + if name.endswith("*"): + explode = True + name = name[:-1] + + prefix: t.Optional[int] = None + if ":" in name: + name, prefix_str = tuple(name.split(":", 1)) + prefix = int(prefix_str) + + if default_val: + self.defaults[name] = default_val + + self.variables.append( + (name, {"explode": explode, "prefix": prefix}) + ) + + self.variable_names = [varname for (varname, _) in self.variables] + + def post_parse(self) -> None: + """Set ``start``, ``join_str`` and ``safe`` attributes. + + After parsing the variable, we need to set up these attributes and it + only makes sense to do it in a more easily testable way. + """ + self.safe = "" + self.start = self.join_str = self.operator + if self.operator == "+": + self.start = "" + if self.operator in ("+", "#", ""): + self.join_str = "," + if self.operator == "#": + self.start = "#" + if self.operator == "?": + self.start = "?" + self.join_str = "&" + + if self.operator in ("+", "#"): + self.safe = URIVariable.reserved + + def _query_expansion( + self, + name: str, + value: VariableValue, + explode: bool, + prefix: t.Optional[int], + ) -> t.Optional[str]: + """Expansion method for the '?' and '&' operators.""" + if value is None: + return None + + tuples, items = is_list_of_tuples(value) + + safe = self.safe + if list_test(value) and not tuples: + if not value: + return None + value = t.cast(t.Sequence[ScalarVariableValue], value) + if explode: + return self.join_str.join( + f"{name}={quote(v, safe)}" for v in value + ) + else: + value = ",".join(quote(v, safe) for v in value) + return f"{name}={value}" + + if dict_test(value) or tuples: + if not value: + return None + value = t.cast(t.Mapping[str, ScalarVariableValue], value) + items = items or sorted(value.items()) + if explode: + return self.join_str.join( + f"{quote(k, safe)}={quote(v, safe)}" for k, v in items + ) + else: + value = ",".join( + f"{quote(k, safe)},{quote(v, safe)}" for k, v in items + ) + return f"{name}={value}" + + if value: + value = t.cast(t.Text, value) + value = value[:prefix] if prefix else value + return f"{name}={quote(value, safe)}" + return name + "=" + + def _label_path_expansion( + self, + name: str, + value: VariableValue, + explode: bool, + prefix: t.Optional[int], + ) -> t.Optional[str]: + """Label and path expansion method. + + Expands for operators: '/', '.' + + """ + join_str = self.join_str + safe = self.safe + + if value is None or ( + not isinstance(value, (str, int, float, complex)) + and len(value) == 0 + ): + return None + + tuples, items = is_list_of_tuples(value) + + if list_test(value) and not tuples: + if not explode: + join_str = "," + + value = t.cast(t.Sequence[ScalarVariableValue], value) + fragments = [quote(v, safe) for v in value if v is not None] + return join_str.join(fragments) if fragments else None + + if dict_test(value) or tuples: + value = t.cast(t.Mapping[str, ScalarVariableValue], value) + items = items or sorted(value.items()) + format_str = "%s=%s" + if not explode: + format_str = "%s,%s" + join_str = "," + + expanded = join_str.join( + format_str % (quote(k, safe), quote(v, safe)) + for k, v in items + if v is not None + ) + return expanded if expanded else None + + value = t.cast(t.Text, value) + value = value[:prefix] if prefix else value + return quote(value, safe) + + def _semi_path_expansion( + self, + name: str, + value: VariableValue, + explode: bool, + prefix: t.Optional[int], + ) -> t.Optional[str]: + """Expansion method for ';' operator.""" + join_str = self.join_str + safe = self.safe + + if value is None: + return None + + if self.operator == "?": + join_str = "&" + + tuples, items = is_list_of_tuples(value) + + if list_test(value) and not tuples: + value = t.cast(t.Sequence[ScalarVariableValue], value) + if explode: + expanded = join_str.join( + f"{name}={quote(v, safe)}" for v in value if v is not None + ) + return expanded if expanded else None + else: + value = ",".join(quote(v, safe) for v in value) + return f"{name}={value}" + + if dict_test(value) or tuples: + value = t.cast(t.Mapping[str, ScalarVariableValue], value) + items = items or sorted(value.items()) + + if explode: + return join_str.join( + f"{quote(k, safe)}={quote(v, safe)}" + for k, v in items + if v is not None + ) + else: + expanded = ",".join( + f"{quote(k, safe)},{quote(v, safe)}" + for k, v in items + if v is not None + ) + return f"{name}={expanded}" + + value = t.cast(t.Text, value) + value = value[:prefix] if prefix else value + if value: + return f"{name}={quote(value, safe)}" + + return name + + def _string_expansion( + self, + name: str, + value: VariableValue, + explode: bool, + prefix: t.Optional[int], + ) -> t.Optional[str]: + if value is None: + return None + + tuples, items = is_list_of_tuples(value) + + if list_test(value) and not tuples: + value = t.cast(t.Sequence[ScalarVariableValue], value) + return ",".join(quote(v, self.safe) for v in value) + + if dict_test(value) or tuples: + value = t.cast(t.Mapping[str, ScalarVariableValue], value) + items = items or sorted(value.items()) + format_str = "%s=%s" if explode else "%s,%s" + + return ",".join( + format_str % (quote(k, self.safe), quote(v, self.safe)) + for k, v in items + ) + + value = t.cast(t.Text, value) + value = value[:prefix] if prefix else value + return quote(value, self.safe) + + def expand( + self, var_dict: t.Optional[VariableValueDict] = None + ) -> t.Mapping[str, str]: + """Expand the variable in question. + + Using ``var_dict`` and the previously parsed defaults, expand this + variable and subvariables. + + :param dict var_dict: dictionary of key-value pairs to be used during + expansion + :returns: dict(variable=value) + + Examples:: + + # (1) + v = URIVariable('/var') + expansion = v.expand({'var': 'value'}) + print(expansion) + # => {'/var': '/value'} + + # (2) + v = URIVariable('?var,hello,x,y') + expansion = v.expand({'var': 'value', 'hello': 'Hello World!', + 'x': '1024', 'y': '768'}) + print(expansion) + # => {'?var,hello,x,y': + # '?var=value&hello=Hello%20World%21&x=1024&y=768'} + + """ + return_values = [] + if var_dict is None: + return {self.original: self.original} + + for name, opts in self.variables: + value = var_dict.get(name, None) + if not value and value != "" and name in self.defaults: + value = self.defaults[name] + + if value is None: + continue + + expanded = None + if self.operator in ("/", "."): + expansion = self._label_path_expansion + elif self.operator in ("?", "&"): + expansion = self._query_expansion + elif self.operator == ";": + expansion = self._semi_path_expansion + else: + expansion = self._string_expansion + + expanded = expansion(name, value, opts["explode"], opts["prefix"]) + + if expanded is not None: + return_values.append(expanded) + + value = "" + if return_values: + value = self.start + self.join_str.join(return_values) + return {self.original: value} + + +def is_list_of_tuples( + value: t.Any, +) -> t.Tuple[bool, t.Optional[t.Sequence[t.Tuple[str, ScalarVariableValue]]]]: + if ( + not value + or not isinstance(value, (list, tuple)) + or not all(isinstance(t, tuple) and len(t) == 2 for t in value) + ): + return False, None + + return True, value + + +def list_test(value: t.Any) -> bool: + return isinstance(value, (list, tuple)) + + +def dict_test(value: t.Any) -> bool: + return isinstance(value, (dict, collections.abc.MutableMapping)) + + +def _encode(value: t.AnyStr, encoding: str = "utf-8") -> bytes: + if isinstance(value, str): + return value.encode(encoding) + return value + + +def quote(value: t.Any, safe: str) -> str: + if not isinstance(value, (str, bytes)): + value = str(value) + return urllib.parse.quote(_encode(value), safe)