diff --git a/.coveragerc b/.coveragerc deleted file mode 100644 index ecef4efe..00000000 --- a/.coveragerc +++ /dev/null @@ -1,13 +0,0 @@ -[run] -include = - s3fs/* - -omit = - s3fs/tests/test* - -[report] -show_missing = True - -[html] -directory = coverage_html_report - diff --git a/.gitattributes b/.gitattributes deleted file mode 100644 index 852093f8..00000000 --- a/.gitattributes +++ /dev/null @@ -1 +0,0 @@ -s3fs/_version.py export-subst diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..6dca4666 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,13 @@ +version: 2 +updates: + + # Maintain dependencies for GitHub Actions + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "daily" + + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index a4e41132..10768169 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,6 +1,9 @@ name: CI -on: [push, pull_request] +on: + pull_request: + push: + branches: [master] jobs: test: @@ -9,7 +12,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.7", "3.8", "3.9"] + python-version: ["3.7", "3.8", "3.9", "3.10"] env: BOTO_CONFIG: /dev/null @@ -22,32 +25,28 @@ jobs: with: fetch-depth: 0 - - name: Setup Conda Environment - uses: conda-incubator/setup-miniconda@v2 + - uses: actions/setup-python@v2 with: - auto-update-conda: true - miniconda-version: latest - activate-environment: test python-version: ${{ matrix.python-version }} - - name: Install dependencies - shell: bash -l {0} - run: | - conda install -c conda-forge pip botocore aiobotocore==2.1.0 "aiohttp<=4" "moto>=2.0,<3" pytest flake8 black -y - pip install git+https://github.com/fsspec/filesystem_spec --no-deps - conda list - conda --version + - name: Setup Poetry + uses: Gr1N/setup-poetry@v7 + with: + python-version: ${{ matrix.python-version }} + + - uses: actions/cache@v2 + with: + path: ~/.cache/pypoetry/virtualenvs + key: ${{ runner.os }}-poetry-${{ hashFiles('poetry.lock') }} - - name: Install - shell: bash -l {0} - run: python setup.py install + - run: poetry install - name: Run Tests - shell: bash -l {0} - run: pytest -vv s3fs - - - name: Lint - shell: bash -l {0} - run: | - flake8 s3fs - black s3fs --check --diff + run: poetry run pytest -vv + + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 + - uses: pre-commit/action@v2.0.3 diff --git a/.gitignore b/.gitignore index d95c3636..49cb5b18 100644 --- a/.gitignore +++ b/.gitignore @@ -8,3 +8,4 @@ dist/ *.egg-info build/ venv/ +.venv/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..abfe88a3 --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,17 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v2.3.0 + hooks: + - id: check-yaml + - id: end-of-file-fixer + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 22.1.0 + hooks: + - id: black + exclude: ^docs/ + - repo: https://github.com/pycqa/flake8 + rev: '4.0.1' + hooks: + - id: flake8 + exclude: ^tests/|^docs/|__init__.py diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index e5054bd5..00000000 --- a/MANIFEST.in +++ /dev/null @@ -1,12 +0,0 @@ -recursive-include s3fs *.py -recursive-include docs *.rst - -include setup.py -include README.rst -include LICENSE.txt -include MANIFEST.in -include requirements.txt - -prune docs/_build -include versioneer.py -include s3fs/_version.py diff --git a/docs/source/changelog.rst b/docs/source/changelog.rst index 712a4d1d..10fc3629 100644 --- a/docs/source/changelog.rst +++ b/docs/source/changelog.rst @@ -119,7 +119,7 @@ Version 0.4.0 - Always use multipart uploads when not autocommitting (:pr:`243`) by `Marius van Niekerk`_ - Create ``CONTRIBUTING.md`` (:pr:`248`) by `Jacob Tomlinson`_ - Use autofunction for ``S3Map`` sphinx autosummary (:pr:`251`) by `James Bourbeau`_ -- Miscellaneous doc updates (:pr:`252`) by `James Bourbeau`_ +- Miscellaneous doc updates (:pr:`252`) by `James Bourbeau`_ - Support for Python 3.8 (:pr:`264`) by `Tom Augspurger`_ - Improved performance for ``isdir`` (:pr:`259`) by `Nate Yoder`_ * Increased the minimum required version of fsspec to 0.6.0 diff --git a/docs/source/development.rst b/docs/source/development.rst index a8bcbe97..e574d2b2 100644 --- a/docs/source/development.rst +++ b/docs/source/development.rst @@ -7,4 +7,4 @@ Create a development environment:: Run tests:: - $ pytest \ No newline at end of file + $ pytest diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 00000000..1d1148eb --- /dev/null +++ b/poetry.lock @@ -0,0 +1,1744 @@ +[[package]] +name = "aiobotocore" +version = "2.1.2" +description = "Async client for aws services using botocore and aiohttp" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiohttp = ">=3.3.1" +aioitertools = ">=0.5.1" +botocore = ">=1.23.24,<1.23.25" +wrapt = ">=1.10.10" + +[package.extras] +awscli = ["awscli (>=1.22.24,<1.22.25)"] +boto3 = ["boto3 (>=1.20.24,<1.20.25)"] + +[[package]] +name = "aiohttp" +version = "3.8.1" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +asynctest = {version = "0.13.0", markers = "python_version < \"3.8\""} +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["aiodns", "brotli", "cchardet"] + +[[package]] +name = "aioitertools" +version = "0.10.0" +description = "itertools and builtins for AsyncIO and mixed iterables" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.10\""} + +[[package]] +name = "aiosignal" +version = "1.2.0" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +typing-extensions = {version = ">=3.6.5", markers = "python_version < \"3.8\""} + +[[package]] +name = "asynctest" +version = "0.13.0" +description = "Enhance the standard unittest package with features for testing asyncio libraries" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "attrs" +version = "21.4.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.extras] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] + +[[package]] +name = "aws-sam-translator" +version = "1.42.0" +description = "AWS SAM Translator is a library that transform SAM templates into AWS CloudFormation templates" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +boto3 = ">=1.5,<2.0" +jsonschema = ">=3.2,<4.0" +six = ">=1.15,<2.0" + +[package.extras] +dev = ["coverage (>=5.3,<6.0)", "flake8 (>=3.8.4,<3.9.0)", "tox (>=3.20.1,<3.21.0)", "pytest-cov (>=2.10.1,<2.11.0)", "pylint (>=1.7.2,<2.0)", "pyyaml (>=5.4,<6.0)", "mock (>=3.0.5,<4.0.0)", "parameterized (>=0.7.4,<0.8.0)", "click (>=7.1,<8.0)", "dateparser (>=0.7,<1.0)", "boto3 (>=1.17,<2.0)", "requests (>=2.24.0,<2.25.0)", "docopt (>=0.6.2,<0.7.0)", "pathlib2 (>=2.3.5)", "pytest (>=4.6.11,<4.7.0)", "pytest (>=6.1.1,<6.2.0)", "black (==20.8b1)"] + +[[package]] +name = "aws-xray-sdk" +version = "2.9.0" +description = "The AWS X-Ray SDK for Python (the SDK) enables Python developers to record and emit information from within their applications to the AWS X-Ray service." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +botocore = ">=1.11.3" +future = "*" +wrapt = "*" + +[[package]] +name = "boto3" +version = "1.20.24" +description = "The AWS SDK for Python" +category = "dev" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +botocore = ">=1.23.24,<1.24.0" +jmespath = ">=0.7.1,<1.0.0" +s3transfer = ">=0.5.0,<0.6.0" + +[package.extras] +crt = ["botocore[crt] (>=1.21.0,<2.0a0)"] + +[[package]] +name = "botocore" +version = "1.23.24" +description = "Low-level, data-driven core of boto 3." +category = "main" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +jmespath = ">=0.7.1,<1.0.0" +python-dateutil = ">=2.1,<3.0.0" +urllib3 = ">=1.25.4,<1.27" + +[package.extras] +crt = ["awscrt (==0.12.5)"] + +[[package]] +name = "bump2version" +version = "1.0.1" +description = "Version-bump your software with a single command!" +category = "dev" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "certifi" +version = "2021.10.8" +description = "Python package for providing Mozilla's CA Bundle." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "cffi" +version = "1.15.0" +description = "Foreign Function Interface for Python calling C code." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pycparser = "*" + +[[package]] +name = "cfn-lint" +version = "0.58.2" +description = "Checks CloudFormation templates for practices and behaviour that could potentially be improved" +category = "dev" +optional = false +python-versions = ">=3.6, <=4.0, !=4.0" + +[package.dependencies] +aws-sam-translator = ">=1.42.0" +jschema-to-python = ">=1.2.3,<1.3.0" +jsonpatch = "*" +jsonschema = ">=3.0,<4.0" +junit-xml = ">=1.9,<2.0" +networkx = ">=2.4,<3.0" +pyyaml = ">5.4" +sarif-om = ">=1.0.4,<1.1.0" +six = ">=1.11" + +[[package]] +name = "charset-normalizer" +version = "2.0.12" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "main" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.0.4" +description = "Composable command line interface toolkit" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "cryptography" +version = "36.0.1" +description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cffi = ">=1.12" + +[package.extras] +docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] +docstest = ["pyenchant (>=1.6.11)", "twine (>=1.12.0)", "sphinxcontrib-spelling (>=4.0.1)"] +pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] +sdist = ["setuptools_rust (>=0.11.4)"] +ssh = ["bcrypt (>=3.1.5)"] +test = ["pytest (>=6.2.0)", "pytest-cov", "pytest-subtests", "pytest-xdist", "pretend", "iso8601", "pytz", "hypothesis (>=1.11.4,!=3.79.2)"] + +[[package]] +name = "docker" +version = "5.0.3" +description = "A Python library for the Docker Engine API." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pywin32 = {version = "227", markers = "sys_platform == \"win32\""} +requests = ">=2.14.2,<2.18.0 || >2.18.0" +websocket-client = ">=0.32.0" + +[package.extras] +ssh = ["paramiko (>=2.4.2)"] +tls = ["pyOpenSSL (>=17.5.0)", "cryptography (>=3.4.7)", "idna (>=2.0.0)"] + +[[package]] +name = "ecdsa" +version = "0.17.0" +description = "ECDSA cryptographic signature library (pure python)" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[package.dependencies] +six = ">=1.9.0" + +[package.extras] +gmpy = ["gmpy"] +gmpy2 = ["gmpy2"] + +[[package]] +name = "flask" +version = "2.0.3" +description = "A simple framework for building complex web applications." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=7.1.2" +itsdangerous = ">=2.0" +Jinja2 = ">=3.0" +Werkzeug = ">=2.0" + +[package.extras] +async = ["asgiref (>=3.2)"] +dotenv = ["python-dotenv"] + +[[package]] +name = "flask-cors" +version = "3.0.10" +description = "A Flask extension adding a decorator for CORS support" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +Flask = ">=0.9" +Six = "*" + +[[package]] +name = "frozenlist" +version = "1.3.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "fsspec" +version = "2022.2.0" +description = "File-system specification" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +abfs = ["adlfs"] +adl = ["adlfs"] +arrow = ["pyarrow (>=1)"] +dask = ["dask", "distributed"] +dropbox = ["dropboxdrivefs", "requests", "dropbox"] +entrypoints = ["importlib-metadata"] +fuse = ["fusepy"] +gcs = ["gcsfs"] +git = ["pygit2"] +github = ["requests"] +gs = ["gcsfs"] +gui = ["panel"] +hdfs = ["pyarrow (>=1)"] +http = ["requests", "aiohttp"] +libarchive = ["libarchive-c"] +oci = ["ocifs"] +s3 = ["s3fs"] +sftp = ["paramiko"] +smb = ["smbprotocol"] +ssh = ["paramiko"] + +[[package]] +name = "future" +version = "0.18.2" +description = "Clean single-source support for Python 3 and 2" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "graphql-core" +version = "3.2.0" +description = "GraphQL implementation for Python, a port of GraphQL.js, the JavaScript reference implementation for GraphQL." +category = "dev" +optional = false +python-versions = ">=3.6,<4" + +[[package]] +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" +optional = false +python-versions = ">=3.5" + +[[package]] +name = "importlib-metadata" +version = "4.11.2" +description = "Read metadata from Python packages" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] + +[[package]] +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "itsdangerous" +version = "2.1.0" +description = "Safely pass data to untrusted environments and back." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "jinja2" +version = "3.0.3" +description = "A very fast and expressive template engine." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "jmespath" +version = "0.10.0" +description = "JSON Matching Expressions" +category = "main" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "jschema-to-python" +version = "1.2.3" +description = "Generate source code for Python classes from a JSON schema." +category = "dev" +optional = false +python-versions = ">= 2.7" + +[package.dependencies] +attrs = "*" +jsonpickle = "*" +pbr = "*" + +[[package]] +name = "jsondiff" +version = "1.3.1" +description = "Diff JSON and JSON-like structures in Python" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "jsonpatch" +version = "1.32" +description = "Apply JSON-Patches (RFC 6902)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[package.dependencies] +jsonpointer = ">=1.9" + +[[package]] +name = "jsonpickle" +version = "2.1.0" +description = "Python library for serializing any arbitrary object graph into JSON" +category = "dev" +optional = false +python-versions = ">=2.7" + +[package.dependencies] +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=3.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=3.5,!=3.7.3)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-black-multipy", "pytest-cov", "ecdsa", "feedparser", "numpy", "pandas", "pymongo", "scikit-learn", "sqlalchemy", "enum34", "jsonlib"] +"testing.libs" = ["demjson", "simplejson", "ujson", "yajl"] + +[[package]] +name = "jsonpointer" +version = "2.2" +description = "Identify specific nodes in a JSON document (RFC 6901)" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +attrs = ">=17.4.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +pyrsistent = ">=0.14.0" +six = ">=1.11.0" + +[package.extras] +format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors"] +format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] + +[[package]] +name = "junit-xml" +version = "1.9" +description = "Creates JUnit XML test result documents that can be read by tools such as Jenkins" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "markupsafe" +version = "2.1.0" +description = "Safely add untrusted strings to HTML/XML markup." +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "moto" +version = "2.3.2" +description = "A library that allows your python tests to easily mock out the boto library" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +aws-xray-sdk = {version = ">=0.93,<0.96 || >0.96", optional = true, markers = "extra == \"server\""} +boto3 = ">=1.9.201" +botocore = ">=1.12.201" +cfn-lint = {version = ">=0.4.0", optional = true, markers = "extra == \"server\""} +cryptography = ">=3.3.1" +docker = {version = ">=2.5.1", optional = true, markers = "extra == \"server\""} +ecdsa = {version = "!=0.15", optional = true, markers = "extra == \"server\""} +flask = {version = "*", optional = true, markers = "extra == \"server\""} +flask-cors = {version = "*", optional = true, markers = "extra == \"server\""} +graphql-core = {version = "*", optional = true, markers = "extra == \"server\""} +idna = {version = ">=2.5,<4", optional = true, markers = "extra == \"server\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +Jinja2 = ">=2.10.1" +jsondiff = {version = ">=1.1.2", optional = true, markers = "extra == \"server\""} +MarkupSafe = "!=2.0.0a1" +python-dateutil = ">=2.1,<3.0.0" +python-jose = {version = ">=3.1.0,<4.0.0", extras = ["cryptography"], optional = true, markers = "extra == \"server\""} +pytz = "*" +PyYAML = {version = ">=5.1", optional = true, markers = "extra == \"server\""} +requests = ">=2.5" +responses = ">=0.9.0" +sshpubkeys = {version = ">=3.1.0", optional = true, markers = "extra == \"server\""} +werkzeug = "*" +xmltodict = "*" + +[package.extras] +all = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)", "docker (>=2.5.1)", "graphql-core", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools"] +apigateway = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] +appsync = ["graphql-core"] +awslambda = ["docker (>=2.5.1)"] +batch = ["docker (>=2.5.1)"] +cloudformation = ["docker (>=2.5.1)", "PyYAML (>=5.1)", "cfn-lint (>=0.4.0)"] +cognitoidp = ["python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)"] +ds = ["sshpubkeys (>=3.1.0)"] +dynamodb2 = ["docker (>=2.5.1)"] +dynamodbstreams = ["docker (>=2.5.1)"] +ec2 = ["sshpubkeys (>=3.1.0)"] +efs = ["sshpubkeys (>=3.1.0)"] +iotdata = ["jsondiff (>=1.1.2)"] +route53resolver = ["sshpubkeys (>=3.1.0)"] +s3 = ["PyYAML (>=5.1)"] +server = ["PyYAML (>=5.1)", "python-jose[cryptography] (>=3.1.0,<4.0.0)", "ecdsa (!=0.15)", "docker (>=2.5.1)", "graphql-core", "jsondiff (>=1.1.2)", "aws-xray-sdk (>=0.93,!=0.96)", "idna (>=2.5,<4)", "cfn-lint (>=0.4.0)", "sshpubkeys (>=3.1.0)", "setuptools", "flask", "flask-cors"] +ssm = ["PyYAML (>=5.1)", "dataclasses"] +xray = ["aws-xray-sdk (>=0.93,!=0.96)", "setuptools"] + +[[package]] +name = "multidict" +version = "6.0.2" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "networkx" +version = "2.6.3" +description = "Python package for creating and manipulating graphs and networks" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +default = ["numpy (>=1.19)", "scipy (>=1.5,!=1.6.1)", "matplotlib (>=3.3)", "pandas (>=1.1)"] +developer = ["black (==21.5b1)", "pre-commit (>=2.12)"] +doc = ["sphinx (>=4.0,<5.0)", "pydata-sphinx-theme (>=0.6,<1.0)", "sphinx-gallery (>=0.9,<1.0)", "numpydoc (>=1.1)", "pillow (>=8.2)", "nb2plots (>=0.6)", "texext (>=0.6.6)"] +extra = ["lxml (>=4.5)", "pygraphviz (>=1.7)", "pydot (>=1.4.1)"] +test = ["pytest (>=6.2)", "pytest-cov (>=2.12)", "codecov (>=2.1)"] + +[[package]] +name = "packaging" +version = "21.3" +description = "Core utilities for Python packages" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" + +[[package]] +name = "pbr" +version = "5.8.1" +description = "Python Build Reasonableness" +category = "dev" +optional = false +python-versions = ">=2.6" + +[[package]] +name = "pluggy" +version = "1.0.0" +description = "plugin and hook calling mechanisms for python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "py" +version = "1.11.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "pyasn1" +version = "0.4.8" +description = "ASN.1 types and codecs" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pycparser" +version = "2.21" +description = "C parser in Python" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pyparsing" +version = "3.0.7" +description = "Python parsing module" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pyrsistent" +version = "0.18.1" +description = "Persistent/Functional/Immutable data structures" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "pytest" +version = "7.0.1" +description = "pytest: simple powerful testing with Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +tomli = ">=1.0.0" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "xmlschema"] + +[[package]] +name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +pytest = ">=2.6.0" + +[[package]] +name = "python-dateutil" +version = "2.8.2" +description = "Extensions to the standard Python datetime module" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-jose" +version = "3.3.0" +description = "JOSE implementation in Python" +category = "dev" +optional = false +python-versions = "*" + +[package.dependencies] +cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"cryptography\""} +ecdsa = "!=0.15" +pyasn1 = "*" +rsa = "*" + +[package.extras] +cryptography = ["cryptography (>=3.4.0)"] +pycrypto = ["pycrypto (>=2.6.0,<2.7.0)", "pyasn1"] +pycryptodome = ["pycryptodome (>=3.3.1,<4.0.0)", "pyasn1"] + +[[package]] +name = "pytz" +version = "2021.3" +description = "World timezone definitions, modern and historical" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pywin32" +version = "227" +description = "Python for Window Extensions" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "pyyaml" +version = "6.0" +description = "YAML parser and emitter for Python" +category = "dev" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "requests" +version = "2.27.1" +description = "Python HTTP for Humans." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "responses" +version = "0.19.0" +description = "A utility library for mocking out the `requests` Python library." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +requests = ">=2.0,<3.0" +urllib3 = ">=1.25.10" + +[package.extras] +tests = ["pytest (>=7.0.0)", "coverage (>=6.0.0)", "pytest-cov", "pytest-asyncio", "pytest-localserver", "flake8", "types-mock", "types-requests", "mypy"] + +[[package]] +name = "rsa" +version = "4.8" +description = "Pure-Python RSA implementation" +category = "dev" +optional = false +python-versions = ">=3.6,<4" + +[package.dependencies] +pyasn1 = ">=0.1.3" + +[[package]] +name = "s3transfer" +version = "0.5.2" +description = "An Amazon S3 Transfer Manager" +category = "dev" +optional = false +python-versions = ">= 3.6" + +[package.dependencies] +botocore = ">=1.12.36,<2.0a.0" + +[package.extras] +crt = ["botocore[crt] (>=1.20.29,<2.0a.0)"] + +[[package]] +name = "sarif-om" +version = "1.0.4" +description = "Classes implementing the SARIF 2.1.0 object model." +category = "dev" +optional = false +python-versions = ">= 2.7" + +[package.dependencies] +attrs = "*" +pbr = "*" + +[[package]] +name = "six" +version = "1.16.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "sshpubkeys" +version = "3.3.1" +description = "SSH public key parser" +category = "dev" +optional = false +python-versions = ">=3" + +[package.dependencies] +cryptography = ">=2.1.4" +ecdsa = ">=0.13" + +[package.extras] +dev = ["twine", "wheel", "yapf"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +category = "dev" +optional = false +python-versions = ">=3.7" + +[[package]] +name = "typing-extensions" +version = "4.1.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "urllib3" +version = "1.26.8" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" + +[package.extras] +brotli = ["brotlipy (>=0.6.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "websocket-client" +version = "1.3.1" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + +[[package]] +name = "werkzeug" +version = "2.0.3" +description = "The comprehensive WSGI web application library." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +watchdog = ["watchdog"] + +[[package]] +name = "wrapt" +version = "1.13.3" +description = "Module for decorators, wrappers and monkey patching." +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" + +[[package]] +name = "xmltodict" +version = "0.12.0" +description = "Makes working with XML feel like you are working with JSON" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "yarl" +version = "1.7.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +typing-extensions = {version = ">=3.7.4", markers = "python_version < \"3.8\""} + +[[package]] +name = "zipp" +version = "3.7.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +lock-version = "1.1" +python-versions = ">=3.7,<4" +content-hash = "8ccf3740c4ff848bff118f7ca2d61f8a37cefcc8567a3ca7c228ccaf13a36d72" + +[metadata.files] +aiobotocore = [ + {file = "aiobotocore-2.1.2.tar.gz", hash = "sha256:00fd7f43cc0484d8ed274fd0be492aa16a7fea679dbea96a602bb722ca4c2d22"}, +] +aiohttp = [ + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:1ed0b6477896559f17b9eaeb6d38e07f7f9ffe40b9f0f9627ae8b9926ae260a8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7dadf3c307b31e0e61689cbf9e06be7a867c563d5a63ce9dca578f956609abf8"}, + {file = "aiohttp-3.8.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a79004bb58748f31ae1cbe9fa891054baaa46fb106c2dc7af9f8e3304dc30316"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:12de6add4038df8f72fac606dff775791a60f113a725c960f2bab01d8b8e6b15"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6f0d5f33feb5f69ddd57a4a4bd3d56c719a141080b445cbf18f238973c5c9923"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eaba923151d9deea315be1f3e2b31cc39a6d1d2f682f942905951f4e40200922"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:099ebd2c37ac74cce10a3527d2b49af80243e2a4fa39e7bce41617fbc35fa3c1"}, + {file = "aiohttp-3.8.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2e5d962cf7e1d426aa0e528a7e198658cdc8aa4fe87f781d039ad75dcd52c516"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:fa0ffcace9b3aa34d205d8130f7873fcfefcb6a4dd3dd705b0dab69af6712642"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:61bfc23df345d8c9716d03717c2ed5e27374e0fe6f659ea64edcd27b4b044cf7"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:31560d268ff62143e92423ef183680b9829b1b482c011713ae941997921eebc8"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:01d7bdb774a9acc838e6b8f1d114f45303841b89b95984cbb7d80ea41172a9e3"}, + {file = "aiohttp-3.8.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:97ef77eb6b044134c0b3a96e16abcb05ecce892965a2124c566af0fd60f717e2"}, + {file = "aiohttp-3.8.1-cp310-cp310-win32.whl", hash = "sha256:c2aef4703f1f2ddc6df17519885dbfa3514929149d3ff900b73f45998f2532fa"}, + {file = "aiohttp-3.8.1-cp310-cp310-win_amd64.whl", hash = "sha256:713ac174a629d39b7c6a3aa757b337599798da4c1157114a314e4e391cd28e32"}, + {file = "aiohttp-3.8.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:473d93d4450880fe278696549f2e7aed8cd23708c3c1997981464475f32137db"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99b5eeae8e019e7aad8af8bb314fb908dd2e028b3cdaad87ec05095394cce632"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3af642b43ce56c24d063325dd2cf20ee012d2b9ba4c3c008755a301aaea720ad"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c3630c3ef435c0a7c549ba170a0633a56e92629aeed0e707fec832dee313fb7a"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:4a4a4e30bf1edcad13fb0804300557aedd07a92cabc74382fdd0ba6ca2661091"}, + {file = "aiohttp-3.8.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6f8b01295e26c68b3a1b90efb7a89029110d3a4139270b24fda961893216c440"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:a25fa703a527158aaf10dafd956f7d42ac6d30ec80e9a70846253dd13e2f067b"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:5bfde62d1d2641a1f5173b8c8c2d96ceb4854f54a44c23102e2ccc7e02f003ec"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:51467000f3647d519272392f484126aa716f747859794ac9924a7aafa86cd411"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:03a6d5349c9ee8f79ab3ff3694d6ce1cfc3ced1c9d36200cb8f08ba06bd3b782"}, + {file = "aiohttp-3.8.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:102e487eeb82afac440581e5d7f8f44560b36cf0bdd11abc51a46c1cd88914d4"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win32.whl", hash = "sha256:4aed991a28ea3ce320dc8ce655875e1e00a11bdd29fe9444dd4f88c30d558602"}, + {file = "aiohttp-3.8.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b0e20cddbd676ab8a64c774fefa0ad787cc506afd844de95da56060348021e96"}, + {file = "aiohttp-3.8.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:37951ad2f4a6df6506750a23f7cbabad24c73c65f23f72e95897bb2cecbae676"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c23b1ad869653bc818e972b7a3a79852d0e494e9ab7e1a701a3decc49c20d51"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15b09b06dae900777833fe7fc4b4aa426556ce95847a3e8d7548e2d19e34edb8"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:477c3ea0ba410b2b56b7efb072c36fa91b1e6fc331761798fa3f28bb224830dd"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2f2f69dca064926e79997f45b2f34e202b320fd3782f17a91941f7eb85502ee2"}, + {file = "aiohttp-3.8.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ef9612483cb35171d51d9173647eed5d0069eaa2ee812793a75373447d487aa4"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:6d69f36d445c45cda7b3b26afef2fc34ef5ac0cdc75584a87ef307ee3c8c6d00"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:55c3d1072704d27401c92339144d199d9de7b52627f724a949fc7d5fc56d8b93"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:b9d00268fcb9f66fbcc7cd9fe423741d90c75ee029a1d15c09b22d23253c0a44"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:07b05cd3305e8a73112103c834e91cd27ce5b4bd07850c4b4dbd1877d3f45be7"}, + {file = "aiohttp-3.8.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c34dc4958b232ef6188c4318cb7b2c2d80521c9a56c52449f8f93ab7bc2a8a1c"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win32.whl", hash = "sha256:d2f9b69293c33aaa53d923032fe227feac867f81682f002ce33ffae978f0a9a9"}, + {file = "aiohttp-3.8.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6ae828d3a003f03ae31915c31fa684b9890ea44c9c989056fea96e3d12a9fa17"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0c7ebbbde809ff4e970824b2b6cb7e4222be6b95a296e46c03cf050878fc1785"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b7ef7cbd4fec9a1e811a5de813311ed4f7ac7d93e0fda233c9b3e1428f7dd7b"}, + {file = "aiohttp-3.8.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c3d6a4d0619e09dcd61021debf7059955c2004fa29f48788a3dfaf9c9901a7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:718626a174e7e467f0558954f94af117b7d4695d48eb980146016afa4b580b2e"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:589c72667a5febd36f1315aa6e5f56dd4aa4862df295cb51c769d16142ddd7cd"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2ed076098b171573161eb146afcb9129b5ff63308960aeca4b676d9d3c35e700"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:086f92daf51a032d062ec5f58af5ca6a44d082c35299c96376a41cbb33034675"}, + {file = "aiohttp-3.8.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:11691cf4dc5b94236ccc609b70fec991234e7ef8d4c02dd0c9668d1e486f5abf"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:31d1e1c0dbf19ebccbfd62eff461518dcb1e307b195e93bba60c965a4dcf1ba0"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:11a67c0d562e07067c4e86bffc1553f2cf5b664d6111c894671b2b8712f3aba5"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:bb01ba6b0d3f6c68b89fce7305080145d4877ad3acaed424bae4d4ee75faa950"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:44db35a9e15d6fe5c40d74952e803b1d96e964f683b5a78c3cc64eb177878155"}, + {file = "aiohttp-3.8.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:844a9b460871ee0a0b0b68a64890dae9c415e513db0f4a7e3cab41a0f2fedf33"}, + {file = "aiohttp-3.8.1-cp38-cp38-win32.whl", hash = "sha256:7d08744e9bae2ca9c382581f7dce1273fe3c9bae94ff572c3626e8da5b193c6a"}, + {file = "aiohttp-3.8.1-cp38-cp38-win_amd64.whl", hash = "sha256:04d48b8ce6ab3cf2097b1855e1505181bdd05586ca275f2505514a6e274e8e75"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f5315a2eb0239185af1bddb1abf472d877fede3cc8d143c6cddad37678293237"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a996d01ca39b8dfe77440f3cd600825d05841088fd6bc0144cc6c2ec14cc5f74"}, + {file = "aiohttp-3.8.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:13487abd2f761d4be7c8ff9080de2671e53fff69711d46de703c310c4c9317ca"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea302f34477fda3f85560a06d9ebdc7fa41e82420e892fc50b577e35fc6a50b2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a2f635ce61a89c5732537a7896b6319a8fcfa23ba09bec36e1b1ac0ab31270d2"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e999f2d0e12eea01caeecb17b653f3713d758f6dcc770417cf29ef08d3931421"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0770e2806a30e744b4e21c9d73b7bee18a1cfa3c47991ee2e5a65b887c49d5cf"}, + {file = "aiohttp-3.8.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d15367ce87c8e9e09b0f989bfd72dc641bcd04ba091c68cd305312d00962addd"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c7cefb4b0640703eb1069835c02486669312bf2f12b48a748e0a7756d0de33d"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:71927042ed6365a09a98a6377501af5c9f0a4d38083652bcd2281a06a5976724"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:28d490af82bc6b7ce53ff31337a18a10498303fe66f701ab65ef27e143c3b0ef"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:b6613280ccedf24354406caf785db748bebbddcf31408b20c0b48cb86af76866"}, + {file = "aiohttp-3.8.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81e3d8c34c623ca4e36c46524a3530e99c0bc95ed068fd6e9b55cb721d408fb2"}, + {file = "aiohttp-3.8.1-cp39-cp39-win32.whl", hash = "sha256:7187a76598bdb895af0adbd2fb7474d7f6025d170bc0a1130242da817ce9e7d1"}, + {file = "aiohttp-3.8.1-cp39-cp39-win_amd64.whl", hash = "sha256:1c182cb873bc91b411e184dab7a2b664d4fea2743df0e4d57402f7f3fa644bac"}, + {file = "aiohttp-3.8.1.tar.gz", hash = "sha256:fc5471e1a54de15ef71c1bc6ebe80d4dc681ea600e68bfd1cbce40427f0b7578"}, +] +aioitertools = [ + {file = "aioitertools-0.10.0-py3-none-any.whl", hash = "sha256:a2ea2a39ebf272a2fbb58bfdb73e1daeeb6686edbbc8082215dfc8b8ffffa6e8"}, + {file = "aioitertools-0.10.0.tar.gz", hash = "sha256:7d1d1d4a03d462c5a0840787d3df098f125847e0d38b833b30f8f8cbc45a1420"}, +] +aiosignal = [ + {file = "aiosignal-1.2.0-py3-none-any.whl", hash = "sha256:26e62109036cd181df6e6ad646f91f0dcfd05fe16d0cb924138ff2ab75d64e3a"}, + {file = "aiosignal-1.2.0.tar.gz", hash = "sha256:78ed67db6c7b7ced4f98e495e572106d5c432a93e1ddd1bf475e1dc05f5b7df2"}, +] +async-timeout = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] +asynctest = [ + {file = "asynctest-0.13.0-py3-none-any.whl", hash = "sha256:5da6118a7e6d6b54d83a8f7197769d046922a44d2a99c21382f0a6e4fadae676"}, + {file = "asynctest-0.13.0.tar.gz", hash = "sha256:c27862842d15d83e6a34eb0b2866c323880eb3a75e4485b079ea11748fd77fac"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +aws-sam-translator = [ + {file = "aws-sam-translator-1.42.0.tar.gz", hash = "sha256:8a7976c0ee2fca004a590e17d3551a49c8d8ba14ed0cb3674ea270d41d0dcd5b"}, + {file = "aws_sam_translator-1.42.0-py2-none-any.whl", hash = "sha256:4f5d3d5d0567fe728e75c5c8dff599f7c88313b3b8e85b9b17a2c00cb046b2e4"}, + {file = "aws_sam_translator-1.42.0-py3-none-any.whl", hash = "sha256:31875e4f639511f506d0c757a2a50756bd846440724079e867aafb12c534ac23"}, +] +aws-xray-sdk = [ + {file = "aws-xray-sdk-2.9.0.tar.gz", hash = "sha256:b0cd972db218d4d8f7b53ad806fc6184626b924c4997ae58fc9f2a8cd1281568"}, + {file = "aws_xray_sdk-2.9.0-py2.py3-none-any.whl", hash = "sha256:98216b3ac8281b51b59a8703f8ec561c460807d9d0679838f5c0179d381d7e58"}, +] +boto3 = [ + {file = "boto3-1.20.24-py3-none-any.whl", hash = "sha256:8f08e8e94bf107c5e9866684e9aadf8d9f60abed0cfe5c1dba4e7328674a1986"}, + {file = "boto3-1.20.24.tar.gz", hash = "sha256:739705b28e6b2329ea3b481ba801d439c296aaf176f7850729147ba99bbf8a9a"}, +] +botocore = [ + {file = "botocore-1.23.24-py3-none-any.whl", hash = "sha256:e78d48c50c8c013fb9b362c6202fece2fe868edfd89b51968080180bdff41617"}, + {file = "botocore-1.23.24.tar.gz", hash = "sha256:43006b4f52d7bb655319d3da0f615cdbee7762853acc1ebcb1d49f962e6b4806"}, +] +bump2version = [ + {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, + {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, +] +certifi = [ + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, +] +cffi = [ + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, +] +cfn-lint = [ + {file = "cfn-lint-0.58.2.tar.gz", hash = "sha256:484484e0a849b4f5d621bc55b1af7b46104370a2c1629b49376ab8281787e9eb"}, + {file = "cfn_lint-0.58.2-py3-none-any.whl", hash = "sha256:b77d8379dd339cce625f44990bfff2349920f1872261be7a1802e2a84ad77199"}, +] +charset-normalizer = [ + {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, + {file = "charset_normalizer-2.0.12-py3-none-any.whl", hash = "sha256:6881edbebdb17b39b4eaaa821b438bf6eddffb4468cf344f09f89def34a8b1df"}, +] +click = [ + {file = "click-8.0.4-py3-none-any.whl", hash = "sha256:6a7a62563bbfabfda3a38f3023a1db4a35978c0abd76f6c9605ecd6554d6d9b1"}, + {file = "click-8.0.4.tar.gz", hash = "sha256:8458d7b1287c5fb128c90e23381cf99dcde74beaf6c7ff6384ce84d6fe090adb"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +cryptography = [ + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:73bc2d3f2444bcfeac67dd130ff2ea598ea5f20b40e36d19821b4df8c9c5037b"}, + {file = "cryptography-36.0.1-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:2d87cdcb378d3cfed944dac30596da1968f88fb96d7fc34fdae30a99054b2e31"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74d6c7e80609c0f4c2434b97b80c7f8fdfaa072ca4baab7e239a15d6d70ed73a"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:6c0c021f35b421ebf5976abf2daacc47e235f8b6082d3396a2fe3ccd537ab173"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59a9d55027a8b88fd9fd2826c4392bd487d74bf628bb9d39beecc62a644c12"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a817b961b46894c5ca8a66b599c745b9a3d9f822725221f0e0fe49dc043a3a3"}, + {file = "cryptography-36.0.1-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:94ae132f0e40fe48f310bba63f477f14a43116f05ddb69d6fa31e93f05848ae2"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7be0eec337359c155df191d6ae00a5e8bbb63933883f4f5dffc439dac5348c3f"}, + {file = "cryptography-36.0.1-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:e0344c14c9cb89e76eb6a060e67980c9e35b3f36691e15e1b7a9e58a0a6c6dc3"}, + {file = "cryptography-36.0.1-cp36-abi3-win32.whl", hash = "sha256:4caa4b893d8fad33cf1964d3e51842cd78ba87401ab1d2e44556826df849a8ca"}, + {file = "cryptography-36.0.1-cp36-abi3-win_amd64.whl", hash = "sha256:391432971a66cfaf94b21c24ab465a4cc3e8bf4a939c1ca5c3e3a6e0abebdbcf"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb5829d027ff82aa872d76158919045a7c1e91fbf241aec32cb07956e9ebd3c9"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebc15b1c22e55c4d5566e3ca4db8689470a0ca2babef8e3a9ee057a8b82ce4b1"}, + {file = "cryptography-36.0.1-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:596f3cd67e1b950bc372c33f1a28a0692080625592ea6392987dba7f09f17a94"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:30ee1eb3ebe1644d1c3f183d115a8c04e4e603ed6ce8e394ed39eea4a98469ac"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ec63da4e7e4a5f924b90af42eddf20b698a70e58d86a72d943857c4c6045b3ee"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca238ceb7ba0bdf6ce88c1b74a87bffcee5afbfa1e41e173b1ceb095b39add46"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:ca28641954f767f9822c24e927ad894d45d5a1e501767599647259cbf030b903"}, + {file = "cryptography-36.0.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:39bdf8e70eee6b1c7b289ec6e5d84d49a6bfa11f8b8646b5b3dfe41219153316"}, + {file = "cryptography-36.0.1.tar.gz", hash = "sha256:53e5c1dc3d7a953de055d77bef2ff607ceef7a2aac0353b5d630ab67f7423638"}, +] +docker = [ + {file = "docker-5.0.3-py2.py3-none-any.whl", hash = "sha256:7a79bb439e3df59d0a72621775d600bc8bc8b422d285824cb37103eab91d1ce0"}, + {file = "docker-5.0.3.tar.gz", hash = "sha256:d916a26b62970e7c2f554110ed6af04c7ccff8e9f81ad17d0d40c75637e227fb"}, +] +ecdsa = [ + {file = "ecdsa-0.17.0-py2.py3-none-any.whl", hash = "sha256:5cf31d5b33743abe0dfc28999036c849a69d548f994b535e527ee3cb7f3ef676"}, + {file = "ecdsa-0.17.0.tar.gz", hash = "sha256:b9f500bb439e4153d0330610f5d26baaf18d17b8ced1bc54410d189385ea68aa"}, +] +flask = [ + {file = "Flask-2.0.3-py3-none-any.whl", hash = "sha256:59da8a3170004800a2837844bfa84d49b022550616070f7cb1a659682b2e7c9f"}, + {file = "Flask-2.0.3.tar.gz", hash = "sha256:e1120c228ca2f553b470df4a5fa927ab66258467526069981b3eb0a91902687d"}, +] +flask-cors = [ + {file = "Flask-Cors-3.0.10.tar.gz", hash = "sha256:b60839393f3b84a0f3746f6cdca56c1ad7426aa738b70d6c61375857823181de"}, + {file = "Flask_Cors-3.0.10-py2.py3-none-any.whl", hash = "sha256:74efc975af1194fc7891ff5cd85b0f7478be4f7f59fe158102e91abb72bb4438"}, +] +frozenlist = [ + {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2257aaba9660f78c7b1d8fea963b68f3feffb1a9d5d05a18401ca9eb3e8d0a3"}, + {file = "frozenlist-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4a44ebbf601d7bac77976d429e9bdb5a4614f9f4027777f9e54fd765196e9d3b"}, + {file = "frozenlist-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:45334234ec30fc4ea677f43171b18a27505bfb2dba9aca4398a62692c0ea8868"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47be22dc27ed933d55ee55845d34a3e4e9f6fee93039e7f8ebadb0c2f60d403f"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:03a7dd1bfce30216a3f51a84e6dd0e4a573d23ca50f0346634916ff105ba6e6b"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:691ddf6dc50480ce49f68441f1d16a4c3325887453837036e0fb94736eae1e58"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bde99812f237f79eaf3f04ebffd74f6718bbd216101b35ac7955c2d47c17da02"}, + {file = "frozenlist-1.3.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a202458d1298ced3768f5a7d44301e7c86defac162ace0ab7434c2e961166e8"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b9e3e9e365991f8cc5f5edc1fd65b58b41d0514a6a7ad95ef5c7f34eb49b3d3e"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:04cb491c4b1c051734d41ea2552fde292f5f3a9c911363f74f39c23659c4af78"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:436496321dad302b8b27ca955364a439ed1f0999311c393dccb243e451ff66aa"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:754728d65f1acc61e0f4df784456106e35afb7bf39cfe37227ab00436fb38676"}, + {file = "frozenlist-1.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6eb275c6385dd72594758cbe96c07cdb9bd6becf84235f4a594bdf21e3596c9d"}, + {file = "frozenlist-1.3.0-cp310-cp310-win32.whl", hash = "sha256:e30b2f9683812eb30cf3f0a8e9f79f8d590a7999f731cf39f9105a7c4a39489d"}, + {file = "frozenlist-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:f7353ba3367473d1d616ee727945f439e027f0bb16ac1a750219a8344d1d5d3c"}, + {file = "frozenlist-1.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:88aafd445a233dbbf8a65a62bc3249a0acd0d81ab18f6feb461cc5a938610d24"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4406cfabef8f07b3b3af0f50f70938ec06d9f0fc26cbdeaab431cbc3ca3caeaa"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8cf829bd2e2956066dd4de43fd8ec881d87842a06708c035b37ef632930505a2"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:603b9091bd70fae7be28bdb8aa5c9990f4241aa33abb673390a7f7329296695f"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25af28b560e0c76fa41f550eacb389905633e7ac02d6eb3c09017fa1c8cdfde1"}, + {file = "frozenlist-1.3.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94c7a8a9fc9383b52c410a2ec952521906d355d18fccc927fca52ab575ee8b93"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:65bc6e2fece04e2145ab6e3c47428d1bbc05aede61ae365b2c1bddd94906e478"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:3f7c935c7b58b0d78c0beea0c7358e165f95f1fd8a7e98baa40d22a05b4a8141"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd89acd1b8bb4f31b47072615d72e7f53a948d302b7c1d1455e42622de180eae"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:6983a31698490825171be44ffbafeaa930ddf590d3f051e397143a5045513b01"}, + {file = "frozenlist-1.3.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:adac9700675cf99e3615eb6a0eb5e9f5a4143c7d42c05cea2e7f71c27a3d0846"}, + {file = "frozenlist-1.3.0-cp37-cp37m-win32.whl", hash = "sha256:0c36e78b9509e97042ef869c0e1e6ef6429e55817c12d78245eb915e1cca7468"}, + {file = "frozenlist-1.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:57f4d3f03a18facacb2a6bcd21bccd011e3b75d463dc49f838fd699d074fabd1"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8c905a5186d77111f02144fab5b849ab524f1e876a1e75205cd1386a9be4b00a"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b5009062d78a8c6890d50b4e53b0ddda31841b3935c1937e2ed8c1bda1c7fb9d"}, + {file = "frozenlist-1.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2fdc3cd845e5a1f71a0c3518528bfdbfe2efaf9886d6f49eacc5ee4fd9a10953"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e650bd09b5dda929523b9f8e7f99b24deac61240ecc1a32aeba487afcd970f"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40dff8962b8eba91fd3848d857203f0bd704b5f1fa2b3fc9af64901a190bba08"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:768efd082074bb203c934e83a61654ed4931ef02412c2fbdecea0cff7ecd0274"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:006d3595e7d4108a12025ddf415ae0f6c9e736e726a5db0183326fd191b14c5e"}, + {file = "frozenlist-1.3.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:871d42623ae15eb0b0e9df65baeee6976b2e161d0ba93155411d58ff27483ad8"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aff388be97ef2677ae185e72dc500d19ecaf31b698986800d3fc4f399a5e30a5"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f892d6a94ec5c7b785e548e42722e6f3a52f5f32a8461e82ac3e67a3bd073f1"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e982878792c971cbd60ee510c4ee5bf089a8246226dea1f2138aa0bb67aff148"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:c6c321dd013e8fc20735b92cb4892c115f5cdb82c817b1e5b07f6b95d952b2f0"}, + {file = "frozenlist-1.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:30530930410855c451bea83f7b272fb1c495ed9d5cc72895ac29e91279401db3"}, + {file = "frozenlist-1.3.0-cp38-cp38-win32.whl", hash = "sha256:40ec383bc194accba825fbb7d0ef3dda5736ceab2375462f1d8672d9f6b68d07"}, + {file = "frozenlist-1.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:f20baa05eaa2bcd5404c445ec51aed1c268d62600362dc6cfe04fae34a424bd9"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0437fe763fb5d4adad1756050cbf855bbb2bf0d9385c7bb13d7a10b0dd550486"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b684c68077b84522b5c7eafc1dc735bfa5b341fb011d5552ebe0968e22ed641c"}, + {file = "frozenlist-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93641a51f89473837333b2f8100f3f89795295b858cd4c7d4a1f18e299dc0a4f"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d6d32ff213aef0fd0bcf803bffe15cfa2d4fde237d1d4838e62aec242a8362fa"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31977f84828b5bb856ca1eb07bf7e3a34f33a5cddce981d880240ba06639b94d"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3c62964192a1c0c30b49f403495911298810bada64e4f03249ca35a33ca0417a"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eda49bea3602812518765810af732229b4291d2695ed24a0a20e098c45a707b"}, + {file = "frozenlist-1.3.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acb267b09a509c1df5a4ca04140da96016f40d2ed183cdc356d237286c971b51"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e1e26ac0a253a2907d654a37e390904426d5ae5483150ce3adedb35c8c06614a"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f96293d6f982c58ebebb428c50163d010c2f05de0cde99fd681bfdc18d4b2dc2"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e84cb61b0ac40a0c3e0e8b79c575161c5300d1d89e13c0e02f76193982f066ed"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:ff9310f05b9d9c5c4dd472983dc956901ee6cb2c3ec1ab116ecdde25f3ce4951"}, + {file = "frozenlist-1.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d26b650b71fdc88065b7a21f8ace70175bcf3b5bdba5ea22df4bfd893e795a3b"}, + {file = "frozenlist-1.3.0-cp39-cp39-win32.whl", hash = "sha256:01a73627448b1f2145bddb6e6c2259988bb8aee0fb361776ff8604b99616cd08"}, + {file = "frozenlist-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:772965f773757a6026dea111a15e6e2678fbd6216180f82a48a40b27de1ee2ab"}, + {file = "frozenlist-1.3.0.tar.gz", hash = "sha256:ce6f2ba0edb7b0c1d8976565298ad2deba6f8064d2bebb6ffce2ca896eb35b0b"}, +] +fsspec = [ + {file = "fsspec-2022.2.0-py3-none-any.whl", hash = "sha256:eb9c9d9aee49d23028deefffe53e87c55d3515512c63f57e893710301001449a"}, + {file = "fsspec-2022.2.0.tar.gz", hash = "sha256:20322c659538501f52f6caa73b08b2ff570b7e8ea30a86559721d090e473ad5c"}, +] +future = [ + {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, +] +graphql-core = [ + {file = "graphql-core-3.2.0.tar.gz", hash = "sha256:86e2a0be008bfde19ef78388de8a725a1d942a9190ca431c24a60837973803ce"}, + {file = "graphql_core-3.2.0-py3-none-any.whl", hash = "sha256:0dda7e63676f119bb3d814621190fedad72fda07a8e9ab780bedd9f1957c6dc6"}, +] +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.11.2-py3-none-any.whl", hash = "sha256:d16e8c1deb60de41b8e8ed21c1a7b947b0bc62fab7e1d470bcdf331cea2e6735"}, + {file = "importlib_metadata-4.11.2.tar.gz", hash = "sha256:b36ffa925fe3139b2f6ff11d6925ffd4fa7bc47870165e3ac260ac7b4f91e6ac"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +itsdangerous = [ + {file = "itsdangerous-2.1.0-py3-none-any.whl", hash = "sha256:29285842166554469a56d427addc0843914172343784cb909695fdbe90a3e129"}, + {file = "itsdangerous-2.1.0.tar.gz", hash = "sha256:d848fcb8bc7d507c4546b448574e8a44fc4ea2ba84ebf8d783290d53e81992f5"}, +] +jinja2 = [ + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, +] +jmespath = [ + {file = "jmespath-0.10.0-py2.py3-none-any.whl", hash = "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f"}, + {file = "jmespath-0.10.0.tar.gz", hash = "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9"}, +] +jschema-to-python = [ + {file = "jschema_to_python-1.2.3-py3-none-any.whl", hash = "sha256:8a703ca7604d42d74b2815eecf99a33359a8dccbb80806cce386d5e2dd992b05"}, + {file = "jschema_to_python-1.2.3.tar.gz", hash = "sha256:76ff14fe5d304708ccad1284e4b11f96a658949a31ee7faed9e0995279549b91"}, +] +jsondiff = [ + {file = "jsondiff-1.3.1.tar.gz", hash = "sha256:04cfaebd4a5e5738948ab615710dc3ee98efbdf851255fd3977c4c2ee59e7312"}, +] +jsonpatch = [ + {file = "jsonpatch-1.32-py2.py3-none-any.whl", hash = "sha256:26ac385719ac9f54df8a2f0827bb8253aa3ea8ab7b3368457bcdb8c14595a397"}, + {file = "jsonpatch-1.32.tar.gz", hash = "sha256:b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"}, +] +jsonpickle = [ + {file = "jsonpickle-2.1.0-py2.py3-none-any.whl", hash = "sha256:1dee77ddc5d652dfdabc33d33cff9d7e131d428007007da4fd6f7071ae774b0f"}, + {file = "jsonpickle-2.1.0.tar.gz", hash = "sha256:84684cfc5338a534173c8dd69809e40f2865d0be1f8a2b7af8465e5b968dcfa9"}, +] +jsonpointer = [ + {file = "jsonpointer-2.2-py2.py3-none-any.whl", hash = "sha256:26d9a47a72d4dc3e3ae72c4c6cd432afd73c680164cd2540772eab53cb3823b6"}, + {file = "jsonpointer-2.2.tar.gz", hash = "sha256:f09f8deecaaa5aea65b5eb4f67ca4e54e1a61f7a11c75085e360fe6feb6a48bf"}, +] +jsonschema = [ + {file = "jsonschema-3.2.0-py2.py3-none-any.whl", hash = "sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163"}, + {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, +] +junit-xml = [ + {file = "junit_xml-1.9-py2.py3-none-any.whl", hash = "sha256:ec5ca1a55aefdd76d28fcc0b135251d156c7106fa979686a4b48d62b761b4732"}, +] +markupsafe = [ + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3028252424c72b2602a323f70fbf50aa80a5d3aa616ea6add4ba21ae9cc9da4c"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:290b02bab3c9e216da57c1d11d2ba73a9f73a614bbdcc027d299a60cdfabb11a"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6e104c0c2b4cd765b4e83909cde7ec61a1e313f8a75775897db321450e928cce"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:24c3be29abb6b34052fd26fc7a8e0a49b1ee9d282e3665e8ad09a0a68faee5b3"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204730fd5fe2fe3b1e9ccadb2bd18ba8712b111dcabce185af0b3b5285a7c989"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d3b64c65328cb4cd252c94f83e66e3d7acf8891e60ebf588d7b493a55a1dbf26"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:96de1932237abe0a13ba68b63e94113678c379dca45afa040a17b6e1ad7ed076"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75bb36f134883fdbe13d8e63b8675f5f12b80bb6627f7714c7d6c5becf22719f"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win32.whl", hash = "sha256:4056f752015dfa9828dce3140dbadd543b555afb3252507348c493def166d454"}, + {file = "MarkupSafe-2.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:d4e702eea4a2903441f2735799d217f4ac1b55f7d8ad96ab7d4e25417cb0827c"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:f0eddfcabd6936558ec020130f932d479930581171368fd728efcfb6ef0dd357"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ddea4c352a488b5e1069069f2f501006b1a4362cb906bee9a193ef1245a7a61"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:09c86c9643cceb1d87ca08cdc30160d1b7ab49a8a21564868921959bd16441b8"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a0abef2ca47b33fb615b491ce31b055ef2430de52c5b3fb19a4042dbc5cadb"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:736895a020e31b428b3382a7887bfea96102c529530299f426bf2e636aacec9e"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:679cbb78914ab212c49c67ba2c7396dc599a8479de51b9a87b174700abd9ea49"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:84ad5e29bf8bab3ad70fd707d3c05524862bddc54dc040982b0dbcff36481de7"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win32.whl", hash = "sha256:8da5924cb1f9064589767b0f3fc39d03e3d0fb5aa29e0cb21d43106519bd624a"}, + {file = "MarkupSafe-2.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:454ffc1cbb75227d15667c09f164a0099159da0c1f3d2636aa648f12675491ad"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:142119fb14a1ef6d758912b25c4e803c3ff66920635c44078666fe7cc3f8f759"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b2a5a856019d2833c56a3dcac1b80fe795c95f401818ea963594b345929dffa7"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1d1fb9b2eec3c9714dd936860850300b51dbaa37404209c8d4cb66547884b7ed"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62c0285e91414f5c8f621a17b69fc0088394ccdaa961ef469e833dbff64bd5ea"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc3150f85e2dbcf99e65238c842d1cfe69d3e7649b19864c1cc043213d9cd730"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f02cf7221d5cd915d7fa58ab64f7ee6dd0f6cddbb48683debf5d04ae9b1c2cc1"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d5653619b3eb5cbd35bfba3c12d575db2a74d15e0e1c08bf1db788069d410ce8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d2f5d97fcbd004c03df8d8fe2b973fe2b14e7bfeb2cfa012eaa8759ce9a762f"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win32.whl", hash = "sha256:3cace1837bc84e63b3fd2dfce37f08f8c18aeb81ef5cf6bb9b51f625cb4e6cd8"}, + {file = "MarkupSafe-2.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:fabbe18087c3d33c5824cb145ffca52eccd053061df1d79d4b66dafa5ad2a5ea"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:023af8c54fe63530545f70dd2a2a7eed18d07a9a77b94e8bf1e2ff7f252db9a3"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d66624f04de4af8bbf1c7f21cc06649c1c69a7f84109179add573ce35e46d448"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c532d5ab79be0199fa2658e24a02fce8542df196e60665dd322409a03db6a52c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67ec74fada3841b8c5f4c4f197bea916025cb9aa3fe5abf7d52b655d042f956"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c653fde75a6e5eb814d2a0a89378f83d1d3f502ab710904ee585c38888816c"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:961eb86e5be7d0973789f30ebcf6caab60b844203f4396ece27310295a6082c7"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:598b65d74615c021423bd45c2bc5e9b59539c875a9bdb7e5f2a6b92dfcfc268d"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:599941da468f2cf22bf90a84f6e2a65524e87be2fce844f96f2dd9a6c9d1e635"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win32.whl", hash = "sha256:e6f7f3f41faffaea6596da86ecc2389672fa949bd035251eab26dc6697451d05"}, + {file = "MarkupSafe-2.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:b8811d48078d1cf2a6863dafb896e68406c5f513048451cd2ded0473133473c7"}, + {file = "MarkupSafe-2.1.0.tar.gz", hash = "sha256:80beaf63ddfbc64a0452b841d8036ca0611e049650e20afcb882f5d3c266d65f"}, +] +moto = [ + {file = "moto-2.3.2-py2.py3-none-any.whl", hash = "sha256:0c29f5813d4db69b2f99c5538909a5aba0ba1cb91a74c19eddd9bfdc39ed2ff3"}, + {file = "moto-2.3.2.tar.gz", hash = "sha256:eaaed229742adbd1387383d113350ecd9222fc1e8f5611a9395a058c1eee4377"}, +] +multidict = [ + {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, + {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac0e27844758d7177989ce406acc6a83c16ed4524ebc363c1f748cba184d89d3"}, + {file = "multidict-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:041b81a5f6b38244b34dc18c7b6aba91f9cdaf854d9a39e5ff0b58e2b5773b9c"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fdda29a3c7e76a064f2477c9aab1ba96fd94e02e386f1e665bca1807fc5386f"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3368bf2398b0e0fcbf46d85795adc4c259299fec50c1416d0f77c0a843a3eed9"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4f052ee022928d34fe1f4d2bc743f32609fb79ed9c49a1710a5ad6b2198db20"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:225383a6603c086e6cef0f2f05564acb4f4d5f019a4e3e983f572b8530f70c88"}, + {file = "multidict-6.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50bd442726e288e884f7be9071016c15a8742eb689a593a0cac49ea093eef0a7"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:47e6a7e923e9cada7c139531feac59448f1f47727a79076c0b1ee80274cd8eee"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0556a1d4ea2d949efe5fd76a09b4a82e3a4a30700553a6725535098d8d9fb672"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:626fe10ac87851f4cffecee161fc6f8f9853f0f6f1035b59337a51d29ff3b4f9"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:8064b7c6f0af936a741ea1efd18690bacfbae4078c0c385d7c3f611d11f0cf87"}, + {file = "multidict-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2d36e929d7f6a16d4eb11b250719c39560dd70545356365b494249e2186bc389"}, + {file = "multidict-6.0.2-cp310-cp310-win32.whl", hash = "sha256:fcb91630817aa8b9bc4a74023e4198480587269c272c58b3279875ed7235c293"}, + {file = "multidict-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:8cbf0132f3de7cc6c6ce00147cc78e6439ea736cee6bca4f068bcf892b0fd658"}, + {file = "multidict-6.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:05f6949d6169878a03e607a21e3b862eaf8e356590e8bdae4227eedadacf6e51"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2c2e459f7050aeb7c1b1276763364884595d47000c1cddb51764c0d8976e608"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d0509e469d48940147e1235d994cd849a8f8195e0bca65f8f5439c56e17872a3"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:514fe2b8d750d6cdb4712346a2c5084a80220821a3e91f3f71eec11cf8d28fd4"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19adcfc2a7197cdc3987044e3f415168fc5dc1f720c932eb1ef4f71a2067e08b"}, + {file = "multidict-6.0.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9d153e7f1f9ba0b23ad1568b3b9e17301e23b042c23870f9ee0522dc5cc79e8"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:aef9cc3d9c7d63d924adac329c33835e0243b5052a6dfcbf7732a921c6e918ba"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4571f1beddff25f3e925eea34268422622963cd8dc395bb8778eb28418248e43"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:d48b8ee1d4068561ce8033d2c344cf5232cb29ee1a0206a7b828c79cbc5982b8"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:45183c96ddf61bf96d2684d9fbaf6f3564d86b34cb125761f9a0ef9e36c1d55b"}, + {file = "multidict-6.0.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:75bdf08716edde767b09e76829db8c1e5ca9d8bb0a8d4bd94ae1eafe3dac5e15"}, + {file = "multidict-6.0.2-cp37-cp37m-win32.whl", hash = "sha256:a45e1135cb07086833ce969555df39149680e5471c04dfd6a915abd2fc3f6dbc"}, + {file = "multidict-6.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6f3cdef8a247d1eafa649085812f8a310e728bdf3900ff6c434eafb2d443b23a"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0327292e745a880459ef71be14e709aaea2f783f3537588fb4ed09b6c01bca60"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e875b6086e325bab7e680e4316d667fc0e5e174bb5611eb16b3ea121c8951b86"}, + {file = "multidict-6.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:feea820722e69451743a3d56ad74948b68bf456984d63c1a92e8347b7b88452d"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cc57c68cb9139c7cd6fc39f211b02198e69fb90ce4bc4a094cf5fe0d20fd8b0"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:497988d6b6ec6ed6f87030ec03280b696ca47dbf0648045e4e1d28b80346560d"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:89171b2c769e03a953d5969b2f272efa931426355b6c0cb508022976a17fd376"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:684133b1e1fe91eda8fa7447f137c9490a064c6b7f392aa857bba83a28cfb693"}, + {file = "multidict-6.0.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd9fc9c4849a07f3635ccffa895d57abce554b467d611a5009ba4f39b78a8849"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e07c8e79d6e6fd37b42f3250dba122053fddb319e84b55dd3a8d6446e1a7ee49"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4070613ea2227da2bfb2c35a6041e4371b0af6b0be57f424fe2318b42a748516"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:47fbeedbf94bed6547d3aa632075d804867a352d86688c04e606971595460227"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:5774d9218d77befa7b70d836004a768fb9aa4fdb53c97498f4d8d3f67bb9cfa9"}, + {file = "multidict-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2957489cba47c2539a8eb7ab32ff49101439ccf78eab724c828c1a54ff3ff98d"}, + {file = "multidict-6.0.2-cp38-cp38-win32.whl", hash = "sha256:e5b20e9599ba74391ca0cfbd7b328fcc20976823ba19bc573983a25b32e92b57"}, + {file = "multidict-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:8004dca28e15b86d1b1372515f32eb6f814bdf6f00952699bdeb541691091f96"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2e4a0785b84fb59e43c18a015ffc575ba93f7d1dbd272b4cdad9f5134b8a006c"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6701bf8a5d03a43375909ac91b6980aea74b0f5402fbe9428fc3f6edf5d9677e"}, + {file = "multidict-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a007b1638e148c3cfb6bf0bdc4f82776cef0ac487191d093cdc316905e504071"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:07a017cfa00c9890011628eab2503bee5872f27144936a52eaab449be5eaf032"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c207fff63adcdf5a485969131dc70e4b194327666b7e8a87a97fbc4fd80a53b2"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:373ba9d1d061c76462d74e7de1c0c8e267e9791ee8cfefcf6b0b2495762c370c"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfba7c6d5d7c9099ba21f84662b037a0ffd4a5e6b26ac07d19e423e6fdf965a9"}, + {file = "multidict-6.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19d9bad105dfb34eb539c97b132057a4e709919ec4dd883ece5838bcbf262b80"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:de989b195c3d636ba000ee4281cd03bb1234635b124bf4cd89eeee9ca8fcb09d"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7c40b7bbece294ae3a87c1bc2abff0ff9beef41d14188cda94ada7bcea99b0fb"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:d16cce709ebfadc91278a1c005e3c17dd5f71f5098bfae1035149785ea6e9c68"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:a2c34a93e1d2aa35fbf1485e5010337c72c6791407d03aa5f4eed920343dd360"}, + {file = "multidict-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:feba80698173761cddd814fa22e88b0661e98cb810f9f986c54aa34d281e4937"}, + {file = "multidict-6.0.2-cp39-cp39-win32.whl", hash = "sha256:23b616fdc3c74c9fe01d76ce0d1ce872d2d396d8fa8e4899398ad64fb5aa214a"}, + {file = "multidict-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:4bae31803d708f6f15fd98be6a6ac0b6958fcf68fda3c77a048a4f9073704aae"}, + {file = "multidict-6.0.2.tar.gz", hash = "sha256:5ff3bd75f38e4c43f1f470f2df7a4d430b821c4ce22be384e1459cb57d6bb013"}, +] +networkx = [ + {file = "networkx-2.6.3-py3-none-any.whl", hash = "sha256:80b6b89c77d1dfb64a4c7854981b60aeea6360ac02c6d4e4913319e0a313abef"}, + {file = "networkx-2.6.3.tar.gz", hash = "sha256:c0946ed31d71f1b732b5aaa6da5a0388a345019af232ce2f49c766e2d6795c51"}, +] +packaging = [ + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, +] +pbr = [ + {file = "pbr-5.8.1-py2.py3-none-any.whl", hash = "sha256:27108648368782d07bbf1cb468ad2e2eeef29086affd14087a6d04b7de8af4ec"}, + {file = "pbr-5.8.1.tar.gz", hash = "sha256:66bc5a34912f408bb3925bf21231cb6f59206267b7f63f3503ef865c1a292e25"}, +] +pluggy = [ + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, +] +py = [ + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, +] +pyasn1 = [ + {file = "pyasn1-0.4.8-py2.4.egg", hash = "sha256:fec3e9d8e36808a28efb59b489e4528c10ad0f480e57dcc32b4de5c9d8c9fdf3"}, + {file = "pyasn1-0.4.8-py2.5.egg", hash = "sha256:0458773cfe65b153891ac249bcf1b5f8f320b7c2ce462151f8fa74de8934becf"}, + {file = "pyasn1-0.4.8-py2.6.egg", hash = "sha256:5c9414dcfede6e441f7e8f81b43b34e834731003427e5b09e4e00e3172a10f00"}, + {file = "pyasn1-0.4.8-py2.7.egg", hash = "sha256:6e7545f1a61025a4e58bb336952c5061697da694db1cae97b116e9c46abcf7c8"}, + {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, + {file = "pyasn1-0.4.8-py3.1.egg", hash = "sha256:78fa6da68ed2727915c4767bb386ab32cdba863caa7dbe473eaae45f9959da86"}, + {file = "pyasn1-0.4.8-py3.2.egg", hash = "sha256:08c3c53b75eaa48d71cf8c710312316392ed40899cb34710d092e96745a358b7"}, + {file = "pyasn1-0.4.8-py3.3.egg", hash = "sha256:03840c999ba71680a131cfaee6fab142e1ed9bbd9c693e285cc6aca0d555e576"}, + {file = "pyasn1-0.4.8-py3.4.egg", hash = "sha256:7ab8a544af125fb704feadb008c99a88805126fb525280b2270bb25cc1d78a12"}, + {file = "pyasn1-0.4.8-py3.5.egg", hash = "sha256:e89bf84b5437b532b0803ba5c9a5e054d21fec423a89952a74f87fa2c9b7bce2"}, + {file = "pyasn1-0.4.8-py3.6.egg", hash = "sha256:014c0e9976956a08139dc0712ae195324a75e142284d5f87f1a87ee1b068a359"}, + {file = "pyasn1-0.4.8-py3.7.egg", hash = "sha256:99fcc3c8d804d1bc6d9a099921e39d827026409a58f2a720dcdb89374ea0c776"}, + {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, +] +pycparser = [ + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, +] +pyparsing = [ + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, +] +pyrsistent = [ + {file = "pyrsistent-0.18.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:df46c854f490f81210870e509818b729db4488e1f30f2a1ce1698b2295a878d1"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d45866ececf4a5fff8742c25722da6d4c9e180daa7b405dc0a2a2790d668c26"}, + {file = "pyrsistent-0.18.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4ed6784ceac462a7d6fcb7e9b663e93b9a6fb373b7f43594f9ff68875788e01e"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win32.whl", hash = "sha256:e4f3149fd5eb9b285d6bfb54d2e5173f6a116fe19172686797c056672689daf6"}, + {file = "pyrsistent-0.18.1-cp310-cp310-win_amd64.whl", hash = "sha256:636ce2dc235046ccd3d8c56a7ad54e99d5c1cd0ef07d9ae847306c91d11b5fec"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e92a52c166426efbe0d1ec1332ee9119b6d32fc1f0bbfd55d5c1088070e7fc1b"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7a096646eab884bf8bed965bad63ea327e0d0c38989fc83c5ea7b8a87037bfc"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cdfd2c361b8a8e5d9499b9082b501c452ade8bbf42aef97ea04854f4a3f43b22"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win32.whl", hash = "sha256:7ec335fc998faa4febe75cc5268a9eac0478b3f681602c1f27befaf2a1abe1d8"}, + {file = "pyrsistent-0.18.1-cp37-cp37m-win_amd64.whl", hash = "sha256:6455fc599df93d1f60e1c5c4fe471499f08d190d57eca040c0ea182301321286"}, + {file = "pyrsistent-0.18.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fd8da6d0124efa2f67d86fa70c851022f87c98e205f0594e1fae044e7119a5a6"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7bfe2388663fd18bd8ce7db2c91c7400bf3e1a9e8bd7d63bf7e77d39051b85ec"}, + {file = "pyrsistent-0.18.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e3e1fcc45199df76053026a51cc59ab2ea3fc7c094c6627e93b7b44cdae2c8c"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win32.whl", hash = "sha256:b568f35ad53a7b07ed9b1b2bae09eb15cdd671a5ba5d2c66caee40dbf91c68ca"}, + {file = "pyrsistent-0.18.1-cp38-cp38-win_amd64.whl", hash = "sha256:d1b96547410f76078eaf66d282ddca2e4baae8964364abb4f4dcdde855cd123a"}, + {file = "pyrsistent-0.18.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f87cc2863ef33c709e237d4b5f4502a62a00fab450c9e020892e8e2ede5847f5"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6bc66318fb7ee012071b2792024564973ecc80e9522842eb4e17743604b5e045"}, + {file = "pyrsistent-0.18.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914474c9f1d93080338ace89cb2acee74f4f666fb0424896fcfb8d86058bf17c"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win32.whl", hash = "sha256:1b34eedd6812bf4d33814fca1b66005805d3640ce53140ab8bbb1e2651b0d9bc"}, + {file = "pyrsistent-0.18.1-cp39-cp39-win_amd64.whl", hash = "sha256:e24a828f57e0c337c8d8bb9f6b12f09dfdf0273da25fda9e314f0b684b415a07"}, + {file = "pyrsistent-0.18.1.tar.gz", hash = "sha256:d4d61f8b993a7255ba714df3aca52700f8125289f84f704cf80916517c46eb96"}, +] +pytest = [ + {file = "pytest-7.0.1-py3-none-any.whl", hash = "sha256:9ce3ff477af913ecf6321fe337b93a2c0dcf2a0a1439c43f5452112c1e4280db"}, + {file = "pytest-7.0.1.tar.gz", hash = "sha256:e30905a0c131d3d94b89624a1cc5afec3e0ba2fbdb151867d8e0ebd49850f171"}, +] +pytest-env = [ + {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, +] +python-jose = [ + {file = "python-jose-3.3.0.tar.gz", hash = "sha256:55779b5e6ad599c6336191246e95eb2293a9ddebd555f796a65f838f07e5d78a"}, + {file = "python_jose-3.3.0-py2.py3-none-any.whl", hash = "sha256:9b1376b023f8b298536eedd47ae1089bcdb848f1535ab30555cd92002d78923a"}, +] +pytz = [ + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, +] +pywin32 = [ + {file = "pywin32-227-cp27-cp27m-win32.whl", hash = "sha256:371fcc39416d736401f0274dd64c2302728c9e034808e37381b5e1b22be4a6b0"}, + {file = "pywin32-227-cp27-cp27m-win_amd64.whl", hash = "sha256:4cdad3e84191194ea6d0dd1b1b9bdda574ff563177d2adf2b4efec2a244fa116"}, + {file = "pywin32-227-cp35-cp35m-win32.whl", hash = "sha256:f4c5be1a293bae0076d93c88f37ee8da68136744588bc5e2be2f299a34ceb7aa"}, + {file = "pywin32-227-cp35-cp35m-win_amd64.whl", hash = "sha256:a929a4af626e530383a579431b70e512e736e9588106715215bf685a3ea508d4"}, + {file = "pywin32-227-cp36-cp36m-win32.whl", hash = "sha256:300a2db938e98c3e7e2093e4491439e62287d0d493fe07cce110db070b54c0be"}, + {file = "pywin32-227-cp36-cp36m-win_amd64.whl", hash = "sha256:9b31e009564fb95db160f154e2aa195ed66bcc4c058ed72850d047141b36f3a2"}, + {file = "pywin32-227-cp37-cp37m-win32.whl", hash = "sha256:47a3c7551376a865dd8d095a98deba954a98f326c6fe3c72d8726ca6e6b15507"}, + {file = "pywin32-227-cp37-cp37m-win_amd64.whl", hash = "sha256:31f88a89139cb2adc40f8f0e65ee56a8c585f629974f9e07622ba80199057511"}, + {file = "pywin32-227-cp38-cp38-win32.whl", hash = "sha256:7f18199fbf29ca99dff10e1f09451582ae9e372a892ff03a28528a24d55875bc"}, + {file = "pywin32-227-cp38-cp38-win_amd64.whl", hash = "sha256:7c1ae32c489dc012930787f06244426f8356e129184a02c25aef163917ce158e"}, + {file = "pywin32-227-cp39-cp39-win32.whl", hash = "sha256:c054c52ba46e7eb6b7d7dfae4dbd987a1bb48ee86debe3f245a2884ece46e295"}, + {file = "pywin32-227-cp39-cp39-win_amd64.whl", hash = "sha256:f27cec5e7f588c3d1051651830ecc00294f90728d19c3bf6916e6dba93ea357c"}, +] +pyyaml = [ + {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, + {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b"}, + {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, + {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, + {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, + {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4"}, + {file = "PyYAML-6.0-cp36-cp36m-win32.whl", hash = "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293"}, + {file = "PyYAML-6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57"}, + {file = "PyYAML-6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4"}, + {file = "PyYAML-6.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9"}, + {file = "PyYAML-6.0-cp37-cp37m-win32.whl", hash = "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737"}, + {file = "PyYAML-6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d"}, + {file = "PyYAML-6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34"}, + {file = "PyYAML-6.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287"}, + {file = "PyYAML-6.0-cp38-cp38-win32.whl", hash = "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78"}, + {file = "PyYAML-6.0-cp38-cp38-win_amd64.whl", hash = "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b"}, + {file = "PyYAML-6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3"}, + {file = "PyYAML-6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0"}, + {file = "PyYAML-6.0-cp39-cp39-win32.whl", hash = "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb"}, + {file = "PyYAML-6.0-cp39-cp39-win_amd64.whl", hash = "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c"}, + {file = "PyYAML-6.0.tar.gz", hash = "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2"}, +] +requests = [ + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +responses = [ + {file = "responses-0.19.0-py3-none-any.whl", hash = "sha256:53354b5de163aa2074312c71d8ebccb8bd1ab336cff7053abb75e84dc5637abe"}, + {file = "responses-0.19.0.tar.gz", hash = "sha256:3fc29c3117e14136b833a0a6d4e7f1217c6301bf08b6086db468e12f1e3290e2"}, +] +rsa = [ + {file = "rsa-4.8-py3-none-any.whl", hash = "sha256:95c5d300c4e879ee69708c428ba566c59478fd653cc3a22243eeb8ed846950bb"}, + {file = "rsa-4.8.tar.gz", hash = "sha256:5c6bd9dc7a543b7fe4304a631f8a8a3b674e2bbfc49c2ae96200cdbe55df6b17"}, +] +s3transfer = [ + {file = "s3transfer-0.5.2-py3-none-any.whl", hash = "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971"}, + {file = "s3transfer-0.5.2.tar.gz", hash = "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed"}, +] +sarif-om = [ + {file = "sarif_om-1.0.4-py3-none-any.whl", hash = "sha256:539ef47a662329b1c8502388ad92457425e95dc0aaaf995fe46f4984c4771911"}, + {file = "sarif_om-1.0.4.tar.gz", hash = "sha256:cd5f416b3083e00d402a92e449a7ff67af46f11241073eea0461802a3b5aef98"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sshpubkeys = [ + {file = "sshpubkeys-3.3.1-py2.py3-none-any.whl", hash = "sha256:946f76b8fe86704b0e7c56a00d80294e39bc2305999844f079a217885060b1ac"}, + {file = "sshpubkeys-3.3.1.tar.gz", hash = "sha256:3020ed4f8c846849299370fbe98ff4157b0ccc1accec105e07cfa9ae4bb55064"}, +] +tomli = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] +typing-extensions = [ + {file = "typing_extensions-4.1.1-py3-none-any.whl", hash = "sha256:21c85e0fe4b9a155d0799430b0ad741cdce7e359660ccbd8b530613e8df88ce2"}, + {file = "typing_extensions-4.1.1.tar.gz", hash = "sha256:1a9462dcc3347a79b1f1c0271fbe79e844580bb598bafa1ed208b94da3cdcd42"}, +] +urllib3 = [ + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, +] +websocket-client = [ + {file = "websocket-client-1.3.1.tar.gz", hash = "sha256:6278a75065395418283f887de7c3beafb3aa68dada5cacbe4b214e8d26da499b"}, + {file = "websocket_client-1.3.1-py3-none-any.whl", hash = "sha256:074e2ed575e7c822fc0940d31c3ac9bb2b1142c303eafcf3e304e6ce035522e8"}, +] +werkzeug = [ + {file = "Werkzeug-2.0.3-py3-none-any.whl", hash = "sha256:1421ebfc7648a39a5c58c601b154165d05cf47a3cd0ccb70857cbdacf6c8f2b8"}, + {file = "Werkzeug-2.0.3.tar.gz", hash = "sha256:b863f8ff057c522164b6067c9e28b041161b4be5ba4d0daceeaa50a163822d3c"}, +] +wrapt = [ + {file = "wrapt-1.13.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:e05e60ff3b2b0342153be4d1b597bbcfd8330890056b9619f4ad6b8d5c96a81a"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:85148f4225287b6a0665eef08a178c15097366d46b210574a658c1ff5b377489"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:2dded5496e8f1592ec27079b28b6ad2a1ef0b9296d270f77b8e4a3a796cf6909"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e94b7d9deaa4cc7bac9198a58a7240aaf87fe56c6277ee25fa5b3aa1edebd229"}, + {file = "wrapt-1.13.3-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:498e6217523111d07cd67e87a791f5e9ee769f9241fcf8a379696e25806965af"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:ec7e20258ecc5174029a0f391e1b948bf2906cd64c198a9b8b281b811cbc04de"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:87883690cae293541e08ba2da22cacaae0a092e0ed56bbba8d018cc486fbafbb"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:f99c0489258086308aad4ae57da9e8ecf9e1f3f30fa35d5e170b4d4896554d80"}, + {file = "wrapt-1.13.3-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:6a03d9917aee887690aa3f1747ce634e610f6db6f6b332b35c2dd89412912bca"}, + {file = "wrapt-1.13.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:936503cb0a6ed28dbfa87e8fcd0a56458822144e9d11a49ccee6d9a8adb2ac44"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f9c51d9af9abb899bd34ace878fbec8bf357b3194a10c4e8e0a25512826ef056"}, + {file = "wrapt-1.13.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:220a869982ea9023e163ba915077816ca439489de6d2c09089b219f4e11b6785"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0877fe981fd76b183711d767500e6b3111378ed2043c145e21816ee589d91096"}, + {file = "wrapt-1.13.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:43e69ffe47e3609a6aec0fe723001c60c65305784d964f5007d5b4fb1bc6bf33"}, + {file = "wrapt-1.13.3-cp310-cp310-win32.whl", hash = "sha256:78dea98c81915bbf510eb6a3c9c24915e4660302937b9ae05a0947164248020f"}, + {file = "wrapt-1.13.3-cp310-cp310-win_amd64.whl", hash = "sha256:ea3e746e29d4000cd98d572f3ee2a6050a4f784bb536f4ac1f035987fc1ed83e"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:8c73c1a2ec7c98d7eaded149f6d225a692caa1bd7b2401a14125446e9e90410d"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:086218a72ec7d986a3eddb7707c8c4526d677c7b35e355875a0fe2918b059179"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:e92d0d4fa68ea0c02d39f1e2f9cb5bc4b4a71e8c442207433d8db47ee79d7aa3"}, + {file = "wrapt-1.13.3-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:d4a5f6146cfa5c7ba0134249665acd322a70d1ea61732723c7d3e8cc0fa80755"}, + {file = "wrapt-1.13.3-cp35-cp35m-win32.whl", hash = "sha256:8aab36778fa9bba1a8f06a4919556f9f8c7b33102bd71b3ab307bb3fecb21851"}, + {file = "wrapt-1.13.3-cp35-cp35m-win_amd64.whl", hash = "sha256:944b180f61f5e36c0634d3202ba8509b986b5fbaf57db3e94df11abee244ba13"}, + {file = "wrapt-1.13.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2ebdde19cd3c8cdf8df3fc165bc7827334bc4e353465048b36f7deeae8ee0918"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:610f5f83dd1e0ad40254c306f4764fcdc846641f120c3cf424ff57a19d5f7ade"}, + {file = "wrapt-1.13.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5601f44a0f38fed36cc07db004f0eedeaadbdcec90e4e90509480e7e6060a5bc"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e6906d6f48437dfd80464f7d7af1740eadc572b9f7a4301e7dd3d65db285cacf"}, + {file = "wrapt-1.13.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:766b32c762e07e26f50d8a3468e3b4228b3736c805018e4b0ec8cc01ecd88125"}, + {file = "wrapt-1.13.3-cp36-cp36m-win32.whl", hash = "sha256:5f223101f21cfd41deec8ce3889dc59f88a59b409db028c469c9b20cfeefbe36"}, + {file = "wrapt-1.13.3-cp36-cp36m-win_amd64.whl", hash = "sha256:f122ccd12fdc69628786d0c947bdd9cb2733be8f800d88b5a37c57f1f1d73c10"}, + {file = "wrapt-1.13.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:46f7f3af321a573fc0c3586612db4decb7eb37172af1bc6173d81f5b66c2e068"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:778fd096ee96890c10ce96187c76b3e99b2da44e08c9e24d5652f356873f6709"}, + {file = "wrapt-1.13.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0cb23d36ed03bf46b894cfec777eec754146d68429c30431c99ef28482b5c1df"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:96b81ae75591a795d8c90edc0bfaab44d3d41ffc1aae4d994c5aa21d9b8e19a2"}, + {file = "wrapt-1.13.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7dd215e4e8514004c8d810a73e342c536547038fb130205ec4bba9f5de35d45b"}, + {file = "wrapt-1.13.3-cp37-cp37m-win32.whl", hash = "sha256:47f0a183743e7f71f29e4e21574ad3fa95676136f45b91afcf83f6a050914829"}, + {file = "wrapt-1.13.3-cp37-cp37m-win_amd64.whl", hash = "sha256:fd76c47f20984b43d93de9a82011bb6e5f8325df6c9ed4d8310029a55fa361ea"}, + {file = "wrapt-1.13.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b73d4b78807bd299b38e4598b8e7bd34ed55d480160d2e7fdaabd9931afa65f9"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:ec9465dd69d5657b5d2fa6133b3e1e989ae27d29471a672416fd729b429eb554"}, + {file = "wrapt-1.13.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:dd91006848eb55af2159375134d724032a2d1d13bcc6f81cd8d3ed9f2b8e846c"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ae9de71eb60940e58207f8e71fe113c639da42adb02fb2bcbcaccc1ccecd092b"}, + {file = "wrapt-1.13.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:51799ca950cfee9396a87f4a1240622ac38973b6df5ef7a41e7f0b98797099ce"}, + {file = "wrapt-1.13.3-cp38-cp38-win32.whl", hash = "sha256:4b9c458732450ec42578b5642ac53e312092acf8c0bfce140ada5ca1ac556f79"}, + {file = "wrapt-1.13.3-cp38-cp38-win_amd64.whl", hash = "sha256:7dde79d007cd6dfa65afe404766057c2409316135cb892be4b1c768e3f3a11cb"}, + {file = "wrapt-1.13.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:981da26722bebb9247a0601e2922cedf8bb7a600e89c852d063313102de6f2cb"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:705e2af1f7be4707e49ced9153f8d72131090e52be9278b5dbb1498c749a1e32"}, + {file = "wrapt-1.13.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:25b1b1d5df495d82be1c9d2fad408f7ce5ca8a38085e2da41bb63c914baadff7"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:77416e6b17926d953b5c666a3cb718d5945df63ecf922af0ee576206d7033b5e"}, + {file = "wrapt-1.13.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:865c0b50003616f05858b22174c40ffc27a38e67359fa1495605f96125f76640"}, + {file = "wrapt-1.13.3-cp39-cp39-win32.whl", hash = "sha256:0a017a667d1f7411816e4bf214646d0ad5b1da2c1ea13dec6c162736ff25a374"}, + {file = "wrapt-1.13.3-cp39-cp39-win_amd64.whl", hash = "sha256:81bd7c90d28a4b2e1df135bfbd7c23aee3050078ca6441bead44c42483f9ebfb"}, + {file = "wrapt-1.13.3.tar.gz", hash = "sha256:1fea9cd438686e6682271d36f3481a9f3636195578bab9ca3382e2f5f01fc185"}, +] +xmltodict = [ + {file = "xmltodict-0.12.0-py2.py3-none-any.whl", hash = "sha256:8bbcb45cc982f48b2ca8fe7e7827c5d792f217ecf1792626f808bf41c3b86051"}, + {file = "xmltodict-0.12.0.tar.gz", hash = "sha256:50d8c638ed7ecb88d90561beedbf720c9b4e851a9fa6c47ebd64e99d166d8a21"}, +] +yarl = [ + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f2a8508f7350512434e41065684076f640ecce176d262a7d54f0da41d99c5a95"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:da6df107b9ccfe52d3a48165e48d72db0eca3e3029b5b8cb4fe6ee3cb870ba8b"}, + {file = "yarl-1.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a1d0894f238763717bdcfea74558c94e3bc34aeacd3351d769460c1a586a8b05"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfe4b95b7e00c6635a72e2d00b478e8a28bfb122dc76349a06e20792eb53a523"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c145ab54702334c42237a6c6c4cc08703b6aa9b94e2f227ceb3d477d20c36c63"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ca56f002eaf7998b5fcf73b2421790da9d2586331805f38acd9997743114e98"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1d3d5ad8ea96bd6d643d80c7b8d5977b4e2fb1bab6c9da7322616fd26203d125"}, + {file = "yarl-1.7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:167ab7f64e409e9bdd99333fe8c67b5574a1f0495dcfd905bc7454e766729b9e"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:95a1873b6c0dd1c437fb3bb4a4aaa699a48c218ac7ca1e74b0bee0ab16c7d60d"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6152224d0a1eb254f97df3997d79dadd8bb2c1a02ef283dbb34b97d4f8492d23"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:5bb7d54b8f61ba6eee541fba4b83d22b8a046b4ef4d8eb7f15a7e35db2e1e245"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:9c1f083e7e71b2dd01f7cd7434a5f88c15213194df38bc29b388ccdf1492b739"}, + {file = "yarl-1.7.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f44477ae29025d8ea87ec308539f95963ffdc31a82f42ca9deecf2d505242e72"}, + {file = "yarl-1.7.2-cp310-cp310-win32.whl", hash = "sha256:cff3ba513db55cc6a35076f32c4cdc27032bd075c9faef31fec749e64b45d26c"}, + {file = "yarl-1.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:c9c6d927e098c2d360695f2e9d38870b2e92e0919be07dbe339aefa32a090265"}, + {file = "yarl-1.7.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:9b4c77d92d56a4c5027572752aa35082e40c561eec776048330d2907aead891d"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c01a89a44bb672c38f42b49cdb0ad667b116d731b3f4c896f72302ff77d71656"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c19324a1c5399b602f3b6e7db9478e5b1adf5cf58901996fc973fe4fccd73eed"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3abddf0b8e41445426d29f955b24aeecc83fa1072be1be4e0d194134a7d9baee"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6a1a9fe17621af43e9b9fcea8bd088ba682c8192d744b386ee3c47b56eaabb2c"}, + {file = "yarl-1.7.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8b0915ee85150963a9504c10de4e4729ae700af11df0dc5550e6587ed7891e92"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:29e0656d5497733dcddc21797da5a2ab990c0cb9719f1f969e58a4abac66234d"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bf19725fec28452474d9887a128e98dd67eee7b7d52e932e6949c532d820dc3b"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:d6f3d62e16c10e88d2168ba2d065aa374e3c538998ed04996cd373ff2036d64c"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:ac10bbac36cd89eac19f4e51c032ba6b412b3892b685076f4acd2de18ca990aa"}, + {file = "yarl-1.7.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:aa32aaa97d8b2ed4e54dc65d241a0da1c627454950f7d7b1f95b13985afd6c5d"}, + {file = "yarl-1.7.2-cp36-cp36m-win32.whl", hash = "sha256:87f6e082bce21464857ba58b569370e7b547d239ca22248be68ea5d6b51464a1"}, + {file = "yarl-1.7.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ac35ccde589ab6a1870a484ed136d49a26bcd06b6a1c6397b1967ca13ceb3913"}, + {file = "yarl-1.7.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a467a431a0817a292121c13cbe637348b546e6ef47ca14a790aa2fa8cc93df63"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ab0c3274d0a846840bf6c27d2c60ba771a12e4d7586bf550eefc2df0b56b3b4"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d260d4dc495c05d6600264a197d9d6f7fc9347f21d2594926202fd08cf89a8ba"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc4dd8b01a8112809e6b636b00f487846956402834a7fd59d46d4f4267181c41"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c1164a2eac148d85bbdd23e07dfcc930f2e633220f3eb3c3e2a25f6148c2819e"}, + {file = "yarl-1.7.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:67e94028817defe5e705079b10a8438b8cb56e7115fa01640e9c0bb3edf67332"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:89ccbf58e6a0ab89d487c92a490cb5660d06c3a47ca08872859672f9c511fc52"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:8cce6f9fa3df25f55521fbb5c7e4a736683148bcc0c75b21863789e5185f9185"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:211fcd65c58bf250fb994b53bc45a442ddc9f441f6fec53e65de8cba48ded986"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c10ea1e80a697cf7d80d1ed414b5cb8f1eec07d618f54637067ae3c0334133c4"}, + {file = "yarl-1.7.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:52690eb521d690ab041c3919666bea13ab9fbff80d615ec16fa81a297131276b"}, + {file = "yarl-1.7.2-cp37-cp37m-win32.whl", hash = "sha256:695ba021a9e04418507fa930d5f0704edbce47076bdcfeeaba1c83683e5649d1"}, + {file = "yarl-1.7.2-cp37-cp37m-win_amd64.whl", hash = "sha256:c17965ff3706beedafd458c452bf15bac693ecd146a60a06a214614dc097a271"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fce78593346c014d0d986b7ebc80d782b7f5e19843ca798ed62f8e3ba8728576"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c2a1ac41a6aa980db03d098a5531f13985edcb451bcd9d00670b03129922cd0d"}, + {file = "yarl-1.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:39d5493c5ecd75c8093fa7700a2fb5c94fe28c839c8e40144b7ab7ccba6938c8"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1eb6480ef366d75b54c68164094a6a560c247370a68c02dddb11f20c4c6d3c9d"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ba63585a89c9885f18331a55d25fe81dc2d82b71311ff8bd378fc8004202ff6"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e39378894ee6ae9f555ae2de332d513a5763276a9265f8e7cbaeb1b1ee74623a"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c0910c6b6c31359d2f6184828888c983d54d09d581a4a23547a35f1d0b9484b1"}, + {file = "yarl-1.7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:6feca8b6bfb9eef6ee057628e71e1734caf520a907b6ec0d62839e8293e945c0"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:8300401dc88cad23f5b4e4c1226f44a5aa696436a4026e456fe0e5d2f7f486e6"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:788713c2896f426a4e166b11f4ec538b5736294ebf7d5f654ae445fd44270832"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fd547ec596d90c8676e369dd8a581a21227fe9b4ad37d0dc7feb4ccf544c2d59"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:737e401cd0c493f7e3dd4db72aca11cfe069531c9761b8ea474926936b3c57c8"}, + {file = "yarl-1.7.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf81561f2972fb895e7844882898bda1eef4b07b5b385bcd308d2098f1a767b"}, + {file = "yarl-1.7.2-cp38-cp38-win32.whl", hash = "sha256:ede3b46cdb719c794427dcce9d8beb4abe8b9aa1e97526cc20de9bd6583ad1ef"}, + {file = "yarl-1.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:cc8b7a7254c0fc3187d43d6cb54b5032d2365efd1df0cd1749c0c4df5f0ad45f"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:580c1f15500e137a8c37053e4cbf6058944d4c114701fa59944607505c2fe3a0"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ec1d9a0d7780416e657f1e405ba35ec1ba453a4f1511eb8b9fbab81cb8b3ce1"}, + {file = "yarl-1.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3bf8cfe8856708ede6a73907bf0501f2dc4e104085e070a41f5d88e7faf237f3"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1be4bbb3d27a4e9aa5f3df2ab61e3701ce8fcbd3e9846dbce7c033a7e8136746"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:534b047277a9a19d858cde163aba93f3e1677d5acd92f7d10ace419d478540de"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6ddcd80d79c96eb19c354d9dca95291589c5954099836b7c8d29278a7ec0bda"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9bfcd43c65fbb339dc7086b5315750efa42a34eefad0256ba114cd8ad3896f4b"}, + {file = "yarl-1.7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f64394bd7ceef1237cc604b5a89bf748c95982a84bcd3c4bbeb40f685c810794"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044daf3012e43d4b3538562da94a88fb12a6490652dbc29fb19adfa02cf72eac"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:368bcf400247318382cc150aaa632582d0780b28ee6053cd80268c7e72796dec"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:bab827163113177aee910adb1f48ff7af31ee0289f434f7e22d10baf624a6dfe"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0cba38120db72123db7c58322fa69e3c0efa933040ffb586c3a87c063ec7cae8"}, + {file = "yarl-1.7.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:59218fef177296451b23214c91ea3aba7858b4ae3306dde120224cfe0f7a6ee8"}, + {file = "yarl-1.7.2-cp39-cp39-win32.whl", hash = "sha256:1edc172dcca3f11b38a9d5c7505c83c1913c0addc99cd28e993efeaafdfaa18d"}, + {file = "yarl-1.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:797c2c412b04403d2da075fb93c123df35239cd7b4cc4e0cd9e5839b73f52c58"}, + {file = "yarl-1.7.2.tar.gz", hash = "sha256:45399b46d60c253327a460e99856752009fcee5f5d3c80b2f7c0cae1c38d56dd"}, +] +zipp = [ + {file = "zipp-3.7.0-py3-none-any.whl", hash = "sha256:b47250dd24f92b7dd6a0a8fc5244da14608f3ca90a5efcd37a3b1642fac9a375"}, + {file = "zipp-3.7.0.tar.gz", hash = "sha256:9f50f446828eb9d45b267433fd3e9da8d801f614129124863f9c51ebceafb87d"}, +] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..8b7b082b --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,42 @@ +[tool.poetry] +name = "s3fs" +version = "2022.2.0" +description = "Convenient Filesystem interface over S3" +authors = ["Martin Durant "] +homepage = "http://github.com/fsspec/s3fs/" +repository = "http://github.com/fsspec/s3fs/" +keywords = ["s3", "boto"] +readme = "README.rst" +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "License :: OSI Approved :: BSD License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", +] +license = "BSD" + +[tool.poetry.dependencies] +python = ">=3.7,<4" +aiobotocore = "~=2.1.0" +fsspec = "==2022.2.0" +aiohttp = "<=4" + +[tool.poetry.dev-dependencies] +pytest = ">=4.2.0" +pytest-env = "^0.6.2" +bump2version = "^1.0.1" +moto = {version = ">=2,<3", extras = ["server"]} + +[tool.pytest.ini_options] +env = [ + "BOTO_PATH=/dev/null", + "AWS_ACCESS_KEY_ID=dummy_key", + "AWS_SECRET_ACCESS_KEY=dummy_secret" +] + +[build-system] +requires = ["poetry-core>=1.0.0"] +build-backend = "poetry.core.masonry.api" diff --git a/pytest.ini b/pytest.ini deleted file mode 100644 index 653d5ad9..00000000 --- a/pytest.ini +++ /dev/null @@ -1,6 +0,0 @@ -[pytest] -testpaths = s3fs -env = - BOTO_PATH=/dev/null - AWS_ACCESS_KEY_ID=dummy_key - AWS_SECRET_ACCESS_KEY=dummy_secret diff --git a/release-procedure.md b/release-procedure.md index 4332616e..a91ed1ae 100644 --- a/release-procedure.md +++ b/release-procedure.md @@ -1,20 +1,4 @@ -1. Verify tests on Linux, OS-X, and Windows - -2. Update version in setup.py and s3fs/__init__.py and commit - -3. Tag the commit - - git tag 1.2.3 -m "Version 1.2.3" - -4. Push new version bump commit and tag to github - - git push fsspec main --tags - -5. Build source and wheel packages - - rm -rf dist/ - python setup.py sdist bdist_wheel --universal - -6. Upload packages to PyPI - - twine upload dist/* +1. Verify tests on Linux, OS-X, and Windows +2. Run `poetry run bump2version --new-version=YEAR.MONTH.DAY`, replacing `YEAR`, `MONTH` and `DAY` with the current date +3. Run `git push && git push --tags` +4. Run `poetry publish --build` diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 8ac2d6a0..00000000 --- a/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -aiobotocore~=2.1.0 -fsspec==2022.2.0 -aiohttp<=4 diff --git a/s3fs/__init__.py b/s3fs/__init__.py index 2267b817..13a18d9d 100644 --- a/s3fs/__init__.py +++ b/s3fs/__init__.py @@ -1,7 +1,4 @@ from .core import S3FileSystem, S3File from .mapping import S3Map -from ._version import get_versions - -__version__ = get_versions()["version"] -del get_versions +__version__ = "2022.2.0" diff --git a/s3fs/_version.py b/s3fs/_version.py deleted file mode 100644 index 23af68e2..00000000 --- a/s3fs/_version.py +++ /dev/null @@ -1,657 +0,0 @@ -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.20 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "$Format:%d$" - git_full = "$Format:%H$" - git_date = "$Format:%ci$" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: # pylint: disable=too-few-public-methods - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "pep440" - cfg.tag_prefix = "" - cfg.parentdir_prefix = "None" - cfg.versionfile_source = "s3fs/_version.py" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - - return decorate - - -# pylint:disable=too-many-arguments,consider-using-with # noqa -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen( - [command] + args, - cwd=cwd, - env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr else None), - ) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return { - "version": dirname[len(parentdir_prefix) :], - "full-revisionid": None, - "dirty": False, - "error": None, - "date": None, - } - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print( - "Tried directories %s but none started with prefix %s" - % (str(rootdirs), parentdir_prefix) - ) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "main". - tags = {r for r in refs if re.search(r"\d", r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix) :] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r"\d", r): - continue - if verbose: - print("picking %s" % r) - return { - "version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": None, - "date": date, - } - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return { - "version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, - "error": "no suitable tags", - "date": None, - } - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner( - GITS, - [ - "describe", - "--tags", - "--dirty", - "--always", - "--long", - "--match", - "%s*" % tag_prefix, - ], - cwd=root, - ) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "main" in branches: - branch_name = "main" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[: git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( - full_tag, - tag_prefix, - ) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix) :] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not main branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the main branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post0.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post0.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not main branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return { - "version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None, - } - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return { - "version": rendered, - "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], - "error": None, - "date": pieces.get("date"), - } - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split("/"): - root = os.path.dirname(root) - except NameError: - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None, - } - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return { - "version": "0+unknown", - "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", - "date": None, - } diff --git a/s3fs/tests/__init__.py b/s3fs/tests/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/setup.cfg b/setup.cfg index ebf6d300..fcc89366 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,15 +1,16 @@ -[metadata] -long_description: file: README.rst +[bumpversion] +current_version = 0.4.0 +commit = True +tag = True -[versioneer] -VCS = git -style = pep440 -versionfile_source = s3fs/_version.py -versionfile_build = s3fs/_version.py -tag_prefix = "" +[bumpversion:file:pyproject.toml] +search = version = "{current_version}" +replace = version = "{new_version}" + +[bumpversion:file:s3fs/__init__.py] [flake8] -exclude = __init__.py,versioneer.py,s3fs/tests/ +exclude = __init__.py,./tests/,docs/ max-line-length = 95 ignore = E20, # Extra space in brackets @@ -31,4 +32,3 @@ ignore = E741 # Ambiguous variable names W504, # line break after binary operator E501, # line too long (leave it to black!) - diff --git a/setup.py b/setup.py deleted file mode 100755 index d2521a61..00000000 --- a/setup.py +++ /dev/null @@ -1,39 +0,0 @@ -#!/usr/bin/env python - -from setuptools import setup -import versioneer - -with open('requirements.txt') as file: - aiobotocore_version_suffix = '' - for line in file: - parts = line.rstrip().split('aiobotocore') - if len(parts) == 2: - aiobotocore_version_suffix = parts[1] - break - -setup(name='s3fs', - version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), - classifiers=[ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - ], - description='Convenient Filesystem interface over S3', - url='http://github.com/fsspec/s3fs/', - maintainer='Martin Durant', - maintainer_email='mdurant@continuum.io', - license='BSD', - keywords='s3, boto', - packages=['s3fs'], - python_requires='>= 3.7', - install_requires=[open('requirements.txt').read().strip().split('\n')], - extras_require={ - 'awscli': [f"aiobotocore[awscli]{aiobotocore_version_suffix}"], - 'boto3': [f"aiobotocore[boto3]{aiobotocore_version_suffix}"], - }, - zip_safe=False) diff --git a/test_requirements.txt b/test_requirements.txt deleted file mode 100644 index 8e67f5e0..00000000 --- a/test_requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -mock; python_version < '3.3' -moto>=2,<3 -flask -pytest>=4.2.0 -pytest-env diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..d02abbf7 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,195 @@ +import json +import os +import time + +import pytest +import requests + +from s3fs import S3FileSystem +import uuid + + +@pytest.fixture() +def file_a(bucket_names): + return bucket_names["test"] + "/tmp/test/a" + + +@pytest.fixture() +def file_b(bucket_names): + return bucket_names["test"] + "/tmp/test/b" + + +@pytest.fixture() +def file_c(bucket_names): + return bucket_names["test"] + "/tmp/test/c" + + +@pytest.fixture() +def file_d(bucket_names): + return bucket_names["test"] + "/tmp/test/d" + + +@pytest.fixture() +def s3_files(): + return { + "files": { + "test/accounts.1.json": ( + b'{"amount": 100, "name": "Alice"}\n' + b'{"amount": 200, "name": "Bob"}\n' + b'{"amount": 300, "name": "Charlie"}\n' + b'{"amount": 400, "name": "Dennis"}\n' + ), + "test/accounts.2.json": ( + b'{"amount": 500, "name": "Alice"}\n' + b'{"amount": 600, "name": "Bob"}\n' + b'{"amount": 700, "name": "Charlie"}\n' + b'{"amount": 800, "name": "Dennis"}\n' + ), + }, + "csv": { + "2014-01-01.csv": ( + b"name,amount,id\n" b"Alice,100,1\n" b"Bob,200,2\n" b"Charlie,300,3\n" + ), + "2014-01-02.csv": (b"name,amount,id\n"), + "2014-01-03.csv": ( + b"name,amount,id\n" b"Dennis,400,4\n" b"Edith,500,5\n" b"Frank,600,6\n" + ), + }, + "text": { + "nested/file1": b"hello\n", + "nested/file2": b"world", + "nested/nested2/file1": b"hello\n", + "nested/nested2/file2": b"world", + }, + "glob": {"file.dat": b"", "filexdat": b""}, + } + + +@pytest.fixture() +def bucket_names(): + return { + "test": f"test-{uuid.uuid4()}", + "secure": f"test-secure-{uuid.uuid4()}", + "versioned": f"test-versioned-{uuid.uuid4()}", + } + + +@pytest.fixture(scope="session") +def endpoint_port(): + return 5555 + + +@pytest.fixture(scope="session") +def endpoint_uri(endpoint_port): + return f"http://127.0.0.1:{endpoint_port}" + + +@pytest.fixture(scope="session") +def s3_base(endpoint_uri, endpoint_port): + # writable local S3 system + import shlex + import subprocess + + try: + # should fail since we didn't start server yet + r = requests.get(endpoint_uri) + except Exception: + pass + else: + if r.ok: + raise RuntimeError("moto server already up") + if "AWS_SECRET_ACCESS_KEY" not in os.environ: + os.environ["AWS_SECRET_ACCESS_KEY"] = "foo" + if "AWS_ACCESS_KEY_ID" not in os.environ: + os.environ["AWS_ACCESS_KEY_ID"] = "foo" + proc = subprocess.Popen(shlex.split("moto_server s3 -p %s" % endpoint_port)) + + timeout = 5 + while timeout > 0: + try: + r = requests.get(endpoint_uri) + if r.ok: + break + except Exception: + pass + timeout -= 0.1 + time.sleep(0.1) + yield + proc.terminate() + proc.wait() + + +@pytest.fixture() +def boto3_client(endpoint_uri): + from botocore.session import Session + + # NB: we use the sync botocore client for setup + session = Session() + return session.create_client("s3", endpoint_url=endpoint_uri) + + +@pytest.fixture() +def s3(s3_base, boto3_client, bucket_names, s3_files, endpoint_uri): + boto3_client.create_bucket(Bucket=bucket_names["test"], ACL="public-read") + + boto3_client.create_bucket(Bucket=bucket_names["versioned"], ACL="public-read") + boto3_client.put_bucket_versioning( + Bucket=bucket_names["versioned"], VersioningConfiguration={"Status": "Enabled"} + ) + + # initialize secure bucket + boto3_client.create_bucket(Bucket=bucket_names["secure"], ACL="public-read") + policy = json.dumps( + { + "Version": "2012-10-17", + "Id": "PutObjPolicy", + "Statement": [ + { + "Sid": "DenyUnEncryptedObjectUploads", + "Effect": "Deny", + "Principal": "*", + "Action": "s3:PutObject", + "Resource": "arn:aws:s3:::{bucket_name}/*".format( + bucket_name=bucket_names["secure"] + ), + "Condition": { + "StringNotEquals": { + "s3:x-amz-server-side-encryption": "aws:kms" + } + }, + } + ], + } + ) + boto3_client.put_bucket_policy(Bucket=bucket_names["secure"], Policy=policy) + for flist in s3_files.values(): + for f, data in flist.items(): + boto3_client.put_object(Bucket=bucket_names["test"], Key=f, Body=data) + + S3FileSystem.clear_instance_cache() + s3 = S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) + s3.invalidate_cache() + yield s3 + # Clean up all buckets and all items within those buckets + for bucket in boto3_client.list_buckets()["Buckets"]: + paginator = boto3_client.get_paginator("list_object_versions") + + # Create a PageIterator from the Paginator + page_iterator = paginator.paginate(Bucket=bucket["Name"], Delimiter="") + + for page in page_iterator: + versions = page.get("Versions", []) + delete_markers = page.get("DeleteMarkers", []) + if not versions and not delete_markers: + break + boto3_client.delete_objects( + Bucket=bucket["Name"], + Delete={ + "Objects": [ + {"Key": v["Key"], "VersionId": v["VersionId"]} + for v in versions + delete_markers + ] + }, + ) + + boto3_client.delete_bucket(Bucket=bucket["Name"]) diff --git a/s3fs/tests/test_mapping.py b/tests/test_mapping.py similarity index 80% rename from s3fs/tests/test_mapping.py rename to tests/test_mapping.py index e67c7e73..78934ba2 100644 --- a/s3fs/tests/test_mapping.py +++ b/tests/test_mapping.py @@ -1,11 +1,13 @@ import pytest -from s3fs.tests.test_s3fs import s3_base, s3, test_bucket_name -from s3fs import S3Map, S3FileSystem +from s3fs import S3Map -root = test_bucket_name + "/mapping" +@pytest.fixture() +def root(bucket_names): + return bucket_names["test"] + "/mapping" -def test_simple(s3): + +def test_simple(s3, root): d = s3.get_mapper(root) assert not d @@ -15,12 +17,12 @@ def test_simple(s3): s3.get_mapper(root) -def test_default_s3filesystem(s3): +def test_default_s3filesystem(s3, root): d = s3.get_mapper(root) assert d.fs is s3 -def test_errors(s3): +def test_errors(s3, root): d = s3.get_mapper(root) with pytest.raises(KeyError): d["nonexistent"] @@ -31,7 +33,7 @@ def test_errors(s3): assert "does-not-exist" in str(e) -def test_with_data(s3): +def test_with_data(s3, root, bucket_names): d = s3.get_mapper(root) d["x"] = b"123" assert list(d) == list(d.keys()) == ["x"] @@ -40,7 +42,7 @@ def test_with_data(s3): assert d["x"] == b"123" assert bool(d) - assert s3.find(root) == [test_bucket_name + "/mapping/x"] + assert s3.find(root) == [bucket_names["test"] + "/mapping/x"] d["x"] = b"000" assert d["x"] == b"000" @@ -52,7 +54,7 @@ def test_with_data(s3): assert list(d) == [] -def test_complex_keys(s3): +def test_complex_keys(s3, root): d = s3.get_mapper(root) d[1] = b"hello" assert d[1] == b"hello" @@ -68,7 +70,7 @@ def test_complex_keys(s3): assert ("x", 1, 2) in d -def test_clear_empty(s3): +def test_clear_empty(s3, root): d = s3.get_mapper(root) d.clear() assert list(d) == [] @@ -78,8 +80,7 @@ def test_clear_empty(s3): assert list(d) == [] -def test_no_dircache(s3): - from s3fs.tests.test_s3fs import endpoint_uri +def test_no_dircache(s3, root, endpoint_uri): import fsspec d = fsspec.get_mapper( @@ -96,7 +97,7 @@ def test_no_dircache(s3): assert list(d) == [] -def test_pickle(s3): +def test_pickle(s3, root): d = s3.get_mapper(root) d["x"] = b"1" @@ -107,7 +108,7 @@ def test_pickle(s3): assert d2["x"] == b"1" -def test_array(s3): +def test_array(s3, root): from array import array d = s3.get_mapper(root) @@ -116,14 +117,14 @@ def test_array(s3): assert d["x"] == b"A" * 1000 -def test_bytearray(s3): +def test_bytearray(s3, root): d = s3.get_mapper(root) d["x"] = bytearray(b"123") assert d["x"] == b"123" -def test_new_bucket(s3): +def test_new_bucket(s3, root): try: s3.get_mapper("new-bucket", check=True) assert False @@ -137,7 +138,7 @@ def test_new_bucket(s3): assert not d -def test_old_api(s3): +def test_old_api(s3, root): import fsspec.mapping assert isinstance(S3Map(root, s3), fsspec.mapping.FSMap) diff --git a/s3fs/tests/test_s3fs.py b/tests/test_s3fs.py similarity index 65% rename from s3fs/tests/test_s3fs.py rename to tests/test_s3fs.py index 38093156..a0d69d3b 100644 --- a/s3fs/tests/test_s3fs.py +++ b/tests/test_s3fs.py @@ -2,13 +2,12 @@ import asyncio import errno import datetime +import uuid from contextlib import contextmanager -import json from concurrent.futures import ProcessPoolExecutor import io import os import random -import requests import time import sys import pytest @@ -25,135 +24,6 @@ from fsspec.callbacks import Callback from packaging import version -test_bucket_name = "test" -secure_bucket_name = "test-secure" -versioned_bucket_name = "test-versioned" -files = { - "test/accounts.1.json": ( - b'{"amount": 100, "name": "Alice"}\n' - b'{"amount": 200, "name": "Bob"}\n' - b'{"amount": 300, "name": "Charlie"}\n' - b'{"amount": 400, "name": "Dennis"}\n' - ), - "test/accounts.2.json": ( - b'{"amount": 500, "name": "Alice"}\n' - b'{"amount": 600, "name": "Bob"}\n' - b'{"amount": 700, "name": "Charlie"}\n' - b'{"amount": 800, "name": "Dennis"}\n' - ), -} - -csv_files = { - "2014-01-01.csv": ( - b"name,amount,id\n" b"Alice,100,1\n" b"Bob,200,2\n" b"Charlie,300,3\n" - ), - "2014-01-02.csv": (b"name,amount,id\n"), - "2014-01-03.csv": ( - b"name,amount,id\n" b"Dennis,400,4\n" b"Edith,500,5\n" b"Frank,600,6\n" - ), -} -text_files = { - "nested/file1": b"hello\n", - "nested/file2": b"world", - "nested/nested2/file1": b"hello\n", - "nested/nested2/file2": b"world", -} -glob_files = {"file.dat": b"", "filexdat": b""} -a = test_bucket_name + "/tmp/test/a" -b = test_bucket_name + "/tmp/test/b" -c = test_bucket_name + "/tmp/test/c" -d = test_bucket_name + "/tmp/test/d" -port = 5555 -endpoint_uri = "http://127.0.0.1:%s/" % port - - -@pytest.fixture() -def s3_base(): - # writable local S3 system - import shlex - import subprocess - - try: - # should fail since we didn't start server yet - r = requests.get(endpoint_uri) - except: - pass - else: - if r.ok: - raise RuntimeError("moto server already up") - if "AWS_SECRET_ACCESS_KEY" not in os.environ: - os.environ["AWS_SECRET_ACCESS_KEY"] = "foo" - if "AWS_ACCESS_KEY_ID" not in os.environ: - os.environ["AWS_ACCESS_KEY_ID"] = "foo" - proc = subprocess.Popen(shlex.split("moto_server s3 -p %s" % port)) - - timeout = 5 - while timeout > 0: - try: - r = requests.get(endpoint_uri) - if r.ok: - break - except: - pass - timeout -= 0.1 - time.sleep(0.1) - yield - proc.terminate() - proc.wait() - - -def get_boto3_client(): - from botocore.session import Session - - # NB: we use the sync botocore client for setup - session = Session() - return session.create_client("s3", endpoint_url=endpoint_uri) - - -@pytest.fixture() -def s3(s3_base): - client = get_boto3_client() - client.create_bucket(Bucket=test_bucket_name, ACL="public-read") - - client.create_bucket(Bucket=versioned_bucket_name, ACL="public-read") - client.put_bucket_versioning( - Bucket=versioned_bucket_name, VersioningConfiguration={"Status": "Enabled"} - ) - - # initialize secure bucket - client.create_bucket(Bucket=secure_bucket_name, ACL="public-read") - policy = json.dumps( - { - "Version": "2012-10-17", - "Id": "PutObjPolicy", - "Statement": [ - { - "Sid": "DenyUnEncryptedObjectUploads", - "Effect": "Deny", - "Principal": "*", - "Action": "s3:PutObject", - "Resource": "arn:aws:s3:::{bucket_name}/*".format( - bucket_name=secure_bucket_name - ), - "Condition": { - "StringNotEquals": { - "s3:x-amz-server-side-encryption": "aws:kms" - } - }, - } - ], - } - ) - client.put_bucket_policy(Bucket=secure_bucket_name, Policy=policy) - for flist in [files, csv_files, text_files, glob_files]: - for f, data in flist.items(): - client.put_object(Bucket=test_bucket_name, Key=f, Body=data) - - S3FileSystem.clear_instance_cache() - s3 = S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) - s3.invalidate_cache() - yield s3 - @contextmanager def expect_errno(expected_errno): @@ -163,20 +33,20 @@ def expect_errno(expected_errno): assert error.value.errno == expected_errno, "OSError has wrong error code." -def test_simple(s3): +def test_simple(s3, file_a): data = b"a" * (10 * 2**20) - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: out = f.read(len(data)) assert len(data) == len(out) assert out == data @pytest.mark.parametrize("default_cache_type", ["none", "bytes", "mmap"]) -def test_default_cache_type(s3, default_cache_type): +def test_default_cache_type(s3, default_cache_type, endpoint_uri, file_a): data = b"a" * (10 * 2**20) s3 = S3FileSystem( anon=False, @@ -184,17 +54,17 @@ def test_default_cache_type(s3, default_cache_type): client_kwargs={"endpoint_url": endpoint_uri}, ) - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: assert isinstance(f.cache, fsspec.core.caches[default_cache_type]) out = f.read(len(data)) assert len(data) == len(out) assert out == data -def test_ssl_off(): +def test_ssl_off(endpoint_uri): s3 = S3FileSystem(use_ssl=False, client_kwargs={"endpoint_url": endpoint_uri}) assert s3.s3.meta.endpoint_url.startswith("http://") @@ -204,7 +74,7 @@ def test_client_kwargs(): assert s3.s3.meta.endpoint_url.startswith("http://foo") -def test_config_kwargs(): +def test_config_kwargs(endpoint_uri): s3 = S3FileSystem( config_kwargs={"signature_version": "s3v4"}, client_kwargs={"endpoint_url": endpoint_uri}, @@ -212,13 +82,13 @@ def test_config_kwargs(): assert s3.connect().meta.config.signature_version == "s3v4" -def test_config_kwargs_class_attributes_default(): +def test_config_kwargs_class_attributes_default(endpoint_uri): s3 = S3FileSystem(client_kwargs={"endpoint_url": endpoint_uri}) assert s3.connect().meta.config.connect_timeout == 5 assert s3.connect().meta.config.read_timeout == 15 -def test_config_kwargs_class_attributes_override(): +def test_config_kwargs_class_attributes_override(endpoint_uri): s3 = S3FileSystem( config_kwargs={ "connect_timeout": 60, @@ -244,32 +114,32 @@ def test_idempotent_connect(s3): assert s3.connect(refresh=True) is not first -def test_multiple_objects(s3): +def test_multiple_objects(s3, endpoint_uri, bucket_names): s3.connect() - s3.ls("test") + s3.ls(bucket_names["test"]) s32 = S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) assert s32.session - assert s3.ls("test") == s32.ls("test") + assert s3.ls(bucket_names["test"]) == s32.ls(bucket_names["test"]) -def test_info(s3): - s3.touch(a) - s3.touch(b) - info = s3.info(a) - linfo = s3.ls(a, detail=True)[0] +def test_info(s3, bucket_names, file_a, file_b): + s3.touch(file_a) + s3.touch(file_b) + info = s3.info(file_a) + linfo = s3.ls(file_a, detail=True)[0] assert abs(info.pop("LastModified") - linfo.pop("LastModified")).seconds < 1 info.pop("VersionId") info.pop("ContentType") linfo.pop("Key") linfo.pop("Size") assert info == linfo - parent = a.rsplit("/", 1)[0] + parent = file_a.rsplit("/", 1)[0] s3.invalidate_cache() # remove full path from the cache s3.ls(parent) # fill the cache with parent dir - assert s3.info(a) == s3.dircache[parent][0] # correct value - assert id(s3.info(a)) == id(s3.dircache[parent][0]) # is object from cache + assert s3.info(file_a) == s3.dircache[parent][0] # correct value + assert id(s3.info(file_a)) == id(s3.dircache[parent][0]) # is object from cache - new_parent = test_bucket_name + "/foo" + new_parent = bucket_names["test"] + "/foo" s3.mkdir(new_parent) with pytest.raises(FileNotFoundError): s3.info(new_parent) @@ -278,8 +148,8 @@ def test_info(s3): s3.info(new_parent) -def test_info_cached(s3): - path = test_bucket_name + "/tmp/" +def test_info_cached(s3, bucket_names): + path = bucket_names["test"] + "/tmp/" fqpath = "s3://" + path s3.touch(path + "test") info = s3.info(fqpath) @@ -287,8 +157,8 @@ def test_info_cached(s3): assert info == s3.info(path) -def test_checksum(s3): - bucket = test_bucket_name +def test_checksum(s3, bucket_names): + bucket = bucket_names["test"] d = "checksum" prefix = d + "/e" o1 = prefix + "1" @@ -369,8 +239,8 @@ def test_checksum(s3): test_xattr_sample_metadata = {"test_xattr": "1"} -def test_xattr(s3): - bucket, key = (test_bucket_name, "tmp/test/xattr") +def test_xattr(s3, bucket_names): + bucket, key = (bucket_names["test"], "tmp/test/xattr") filename = bucket + "/" + key body = b"aaaa" public_read_acl = { @@ -421,14 +291,14 @@ def test_xattr(s3): assert s3.info(filename)["ETag"] == etag -def test_xattr_setxattr_in_write_mode(s3): - s3file = s3.open(a, "wb") +def test_xattr_setxattr_in_write_mode(s3, file_a): + s3file = s3.open(file_a, "wb") with pytest.raises(NotImplementedError): s3file.setxattr(test_xattr="1") @pytest.mark.xfail() -def test_delegate(s3): +def test_delegate(s3, endpoint_uri): out = s3.get_delegated_s3pars() assert out assert out["token"] @@ -437,7 +307,7 @@ def test_delegate(s3): assert out == s32.get_delegated_s3pars() -def test_not_delegate(): +def test_not_delegate(endpoint_uri): s3 = S3FileSystem(anon=True, client_kwargs={"endpoint_url": endpoint_uri}) out = s3.get_delegated_s3pars() assert out == {"anon": True} @@ -448,39 +318,39 @@ def test_not_delegate(): assert out == {"anon": False} -def test_ls(s3): +def test_ls(s3, bucket_names): assert set(s3.ls("", detail=False)) == { - test_bucket_name, - secure_bucket_name, - versioned_bucket_name, + bucket_names["test"], + bucket_names["secure"], + bucket_names["versioned"], } with pytest.raises(FileNotFoundError): s3.ls("nonexistent") - fn = test_bucket_name + "/test/accounts.1.json" - assert fn in s3.ls(test_bucket_name + "/test", detail=False) + fn = bucket_names["test"] + "/test/accounts.1.json" + assert fn in s3.ls(bucket_names["test"] + "/test", detail=False) -def test_pickle(s3): +def test_pickle(s3, bucket_names): import pickle s32 = pickle.loads(pickle.dumps(s3)) - assert s3.ls("test") == s32.ls("test") + assert s3.ls(bucket_names["test"]) == s32.ls(bucket_names["test"]) s33 = pickle.loads(pickle.dumps(s32)) - assert s3.ls("test") == s33.ls("test") + assert s3.ls(bucket_names["test"]) == s33.ls(bucket_names["test"]) -def test_ls_touch(s3): - assert not s3.exists(test_bucket_name + "/tmp/test") - s3.touch(a) - s3.touch(b) - L = s3.ls(test_bucket_name + "/tmp/test", True) - assert {d["Key"] for d in L} == {a, b} - L = s3.ls(test_bucket_name + "/tmp/test", False) - assert set(L) == {a, b} +def test_ls_touch(s3, bucket_names, file_a, file_b): + assert not s3.exists(bucket_names["test"] + "/tmp/test") + s3.touch(file_a) + s3.touch(file_b) + L = s3.ls(bucket_names["test"] + "/tmp/test", True) + assert {d["Key"] for d in L} == {file_a, file_b} + L = s3.ls(bucket_names["test"] + "/tmp/test", False) + assert set(L) == {file_a, file_b} @pytest.mark.parametrize("version_aware", [True, False]) -def test_exists_versioned(s3, version_aware): +def test_exists_versioned(s3, version_aware, endpoint_uri, bucket_names): """Test to ensure that a prefix exists when using a versioned bucket""" import uuid @@ -490,7 +360,7 @@ def test_exists_versioned(s3, version_aware): version_aware=version_aware, client_kwargs={"endpoint_url": endpoint_uri}, ) - segments = [versioned_bucket_name] + [str(uuid.uuid4()) for _ in range(n)] + segments = [bucket_names["versioned"]] + [str(uuid.uuid4()) for _ in range(n)] path = "/".join(segments) for i in range(2, n + 1): assert not s3.exists("/".join(segments[:i])) @@ -499,101 +369,101 @@ def test_exists_versioned(s3, version_aware): assert s3.exists("/".join(segments[:i])) -def test_isfile(s3): +def test_isfile(s3, bucket_names, file_a, file_b, file_c): assert not s3.isfile("") assert not s3.isfile("/") - assert not s3.isfile(test_bucket_name) - assert not s3.isfile(test_bucket_name + "/test") + assert not s3.isfile(bucket_names["test"]) + assert not s3.isfile(bucket_names["test"] + "/test") - assert not s3.isfile(test_bucket_name + "/test/foo") - assert s3.isfile(test_bucket_name + "/test/accounts.1.json") - assert s3.isfile(test_bucket_name + "/test/accounts.2.json") + assert not s3.isfile(bucket_names["test"] + "/test/foo") + assert s3.isfile(bucket_names["test"] + "/test/accounts.1.json") + assert s3.isfile(bucket_names["test"] + "/test/accounts.2.json") - assert not s3.isfile(a) - s3.touch(a) - assert s3.isfile(a) + assert not s3.isfile(file_a) + s3.touch(file_a) + assert s3.isfile(file_a) - assert not s3.isfile(b) - assert not s3.isfile(b + "/") - s3.mkdir(b) - assert not s3.isfile(b) - assert not s3.isfile(b + "/") + assert not s3.isfile(file_b) + assert not s3.isfile(file_b + "/") + s3.mkdir(file_b) + assert not s3.isfile(file_b) + assert not s3.isfile(file_b + "/") - assert not s3.isfile(c) - assert not s3.isfile(c + "/") - s3.mkdir(c + "/") - assert not s3.isfile(c) - assert not s3.isfile(c + "/") + assert not s3.isfile(file_c) + assert not s3.isfile(file_c + "/") + s3.mkdir(file_c + "/") + assert not s3.isfile(file_c) + assert not s3.isfile(file_c + "/") -def test_isdir(s3): +def test_isdir(s3, bucket_names, file_a, file_b, file_c): assert s3.isdir("") assert s3.isdir("/") - assert s3.isdir(test_bucket_name) - assert s3.isdir(test_bucket_name + "/test") + assert s3.isdir(bucket_names["test"]) + assert s3.isdir(bucket_names["test"] + "/test") - assert not s3.isdir(test_bucket_name + "/test/foo") - assert not s3.isdir(test_bucket_name + "/test/accounts.1.json") - assert not s3.isdir(test_bucket_name + "/test/accounts.2.json") + assert not s3.isdir(bucket_names["test"] + "/test/foo") + assert not s3.isdir(bucket_names["test"] + "/test/accounts.1.json") + assert not s3.isdir(bucket_names["test"] + "/test/accounts.2.json") - assert not s3.isdir(a) - s3.touch(a) - assert not s3.isdir(a) + assert not s3.isdir(file_a) + s3.touch(file_a) + assert not s3.isdir(file_a) - assert not s3.isdir(b) - assert not s3.isdir(b + "/") + assert not s3.isdir(file_b) + assert not s3.isdir(file_b + "/") - assert not s3.isdir(c) - assert not s3.isdir(c + "/") + assert not s3.isdir(file_c) + assert not s3.isdir(file_c + "/") # test cache s3.invalidate_cache() assert not s3.dircache - s3.ls(test_bucket_name + "/nested") - assert test_bucket_name + "/nested" in s3.dircache - assert not s3.isdir(test_bucket_name + "/nested/file1") - assert not s3.isdir(test_bucket_name + "/nested/file2") - assert s3.isdir(test_bucket_name + "/nested/nested2") - assert s3.isdir(test_bucket_name + "/nested/nested2/") - - -def test_rm(s3): - assert not s3.exists(a) - s3.touch(a) - assert s3.exists(a) - s3.rm(a) - assert not s3.exists(a) + s3.ls(bucket_names["test"] + "/nested") + assert bucket_names["test"] + "/nested" in s3.dircache + assert not s3.isdir(bucket_names["test"] + "/nested/file1") + assert not s3.isdir(bucket_names["test"] + "/nested/file2") + assert s3.isdir(bucket_names["test"] + "/nested/nested2") + assert s3.isdir(bucket_names["test"] + "/nested/nested2/") + + +def test_rm(s3, bucket_names, file_a): + assert not s3.exists(file_a) + s3.touch(file_a) + assert s3.exists(file_a) + s3.rm(file_a) + assert not s3.exists(file_a) # the API is OK with deleting non-files; maybe this is an effect of using bulk # with pytest.raises(FileNotFoundError): - # s3.rm(test_bucket_name + '/nonexistent') + # s3.rm(bucket_names['test'] + '/nonexistent') with pytest.raises(FileNotFoundError): s3.rm("nonexistent") - s3.rm(test_bucket_name + "/nested", recursive=True) - assert not s3.exists(test_bucket_name + "/nested/nested2/file1") + s3.rm(bucket_names["test"] + "/nested", recursive=True) + assert not s3.exists(bucket_names["test"] + "/nested/nested2/file1") # whole bucket - s3.rm(test_bucket_name, recursive=True) - assert not s3.exists(test_bucket_name + "/2014-01-01.csv") - assert not s3.exists(test_bucket_name) + s3.rm(bucket_names["test"], recursive=True) + assert not s3.exists(bucket_names["test"] + "/2014-01-01.csv") + assert not s3.exists(bucket_names["test"]) def test_rmdir(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) s3.rmdir(bucket) assert bucket not in s3.ls("/") def test_mkdir(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) assert bucket in s3.ls("/") -def test_mkdir_existing_bucket(s3): +def test_mkdir_existing_bucket(s3, bucket_names): # mkdir called on existing bucket should be no-op and not calling create_bucket # creating a s3 bucket - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) assert bucket in s3.ls("/") # a second call. @@ -602,14 +472,14 @@ def test_mkdir_existing_bucket(s3): def test_mkdir_bucket_and_key_1(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" file = bucket + "/a/b/c" s3.mkdir(file, create_parents=True) assert bucket in s3.ls("/") def test_mkdir_bucket_and_key_2(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" file = bucket + "/a/b/c" with pytest.raises(FileNotFoundError): s3.mkdir(file, create_parents=False) @@ -617,13 +487,13 @@ def test_mkdir_bucket_and_key_2(s3): def test_mkdir_region_name(s3): - bucket = "test2_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket, region_name="eu-central-1") assert bucket in s3.ls("/") -def test_mkdir_client_region_name(s3): - bucket = "test3_bucket" +def test_mkdir_client_region_name(s3, endpoint_uri): + bucket = f"test1_bucket-{uuid.uuid4()}" s3 = S3FileSystem( anon=False, client_kwargs={"region_name": "eu-central-1", "endpoint_url": endpoint_uri}, @@ -633,14 +503,14 @@ def test_mkdir_client_region_name(s3): def test_makedirs(s3): - bucket = "test_makedirs_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" test_file = bucket + "/a/b/c/file" s3.makedirs(test_file) assert bucket in s3.ls("/") def test_makedirs_existing_bucket(s3): - bucket = "test_makedirs_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) assert bucket in s3.ls("/") test_file = bucket + "/a/b/c/file" @@ -649,28 +519,28 @@ def test_makedirs_existing_bucket(s3): def test_makedirs_pure_bucket_exist_ok(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) s3.makedirs(bucket, exist_ok=True) def test_makedirs_pure_bucket_error_on_exist(s3): - bucket = "test1_bucket" + bucket = f"test1_bucket-{uuid.uuid4()}" s3.mkdir(bucket) with pytest.raises(FileExistsError): s3.makedirs(bucket, exist_ok=False) -def test_bulk_delete(s3): +def test_bulk_delete(s3, bucket_names): with pytest.raises(FileNotFoundError): s3.rm(["nonexistent/file"]) - filelist = s3.find(test_bucket_name + "/nested") + filelist = s3.find(bucket_names["test"] + "/nested") s3.rm(filelist) - assert not s3.exists(test_bucket_name + "/nested/nested2/file1") + assert not s3.exists(bucket_names["test"] + "/nested/nested2/file1") @pytest.mark.xfail(reason="anon user is still privileged on moto") -def test_anonymous_access(s3): +def test_anonymous_access(s3, endpoint_uri): with ignoring(NoCredentialsError): s3 = S3FileSystem(anon=True, client_kwargs={"endpoint_url": endpoint_uri}) assert s3.ls("") == [] @@ -680,8 +550,8 @@ def test_anonymous_access(s3): s3.mkdir("newbucket") -def test_s3_file_access(s3): - fn = test_bucket_name + "/nested/file1" +def test_s3_file_access(s3, bucket_names): + fn = bucket_names["test"] + "/nested/file1" data = b"hello\n" assert s3.cat(fn) == data assert s3.head(fn, 3) == data[:3] @@ -689,10 +559,10 @@ def test_s3_file_access(s3): assert s3.tail(fn, 10000) == data -def test_s3_file_info(s3): - fn = test_bucket_name + "/nested/file1" +def test_s3_file_info(s3, bucket_names): + fn = bucket_names["test"] + "/nested/file1" data = b"hello\n" - assert fn in s3.find(test_bucket_name) + assert fn in s3.find(bucket_names["test"]) assert s3.exists(fn) assert not s3.exists(fn + "another") assert s3.info(fn)["Size"] == len(data) @@ -700,115 +570,122 @@ def test_s3_file_info(s3): s3.info(fn + "another") -def test_content_type_is_set(s3, tmpdir): +def test_content_type_is_set(s3, tmpdir, bucket_names): test_file = str(tmpdir) + "/test.json" - destination = test_bucket_name + "/test.json" + destination = bucket_names["test"] + "/test.json" open(test_file, "w").write("text") s3.put(test_file, destination) assert s3.info(destination)["ContentType"] == "application/json" -def test_content_type_is_not_overrided(s3, tmpdir): +def test_content_type_is_not_overrided(s3, tmpdir, bucket_names): test_file = os.path.join(str(tmpdir), "test.json") - destination = os.path.join(test_bucket_name, "test.json") + destination = os.path.join(bucket_names["test"], "test.json") open(test_file, "w").write("text") s3.put(test_file, destination, ContentType="text/css") assert s3.info(destination)["ContentType"] == "text/css" -def test_bucket_exists(s3): - assert s3.exists(test_bucket_name) - assert not s3.exists(test_bucket_name + "x") +def test_bucket_exists(s3, endpoint_uri, bucket_names): + assert s3.exists(bucket_names["test"]) + assert not s3.exists(bucket_names["test"] + "x") s3 = S3FileSystem(anon=True, client_kwargs={"endpoint_url": endpoint_uri}) - assert s3.exists(test_bucket_name) - assert not s3.exists(test_bucket_name + "x") + assert s3.exists(bucket_names["test"]) + assert not s3.exists(bucket_names["test"] + "x") -def test_du(s3): - d = s3.du(test_bucket_name, total=False) +def test_du(s3, bucket_names, s3_files): + d = s3.du(bucket_names["test"], total=False) assert all(isinstance(v, int) and v >= 0 for v in d.values()) - assert test_bucket_name + "/nested/file1" in d + assert bucket_names["test"] + "/nested/file1" in d - assert s3.du(test_bucket_name + "/test/", total=True) == sum( - map(len, files.values()) + assert s3.du(bucket_names["test"] + "/test/", total=True) == sum( + map(len, s3_files["files"].values()) ) - assert s3.du(test_bucket_name) == s3.du("s3://" + test_bucket_name) + assert s3.du(bucket_names["test"]) == s3.du("s3://" + bucket_names["test"]) -def test_s3_ls(s3): - fn = test_bucket_name + "/nested/file1" - assert fn not in s3.ls(test_bucket_name + "/") - assert fn in s3.ls(test_bucket_name + "/nested/") - assert fn in s3.ls(test_bucket_name + "/nested") - assert s3.ls("s3://" + test_bucket_name + "/nested/") == s3.ls( - test_bucket_name + "/nested" +def test_s3_ls(s3, bucket_names): + fn = bucket_names["test"] + "/nested/file1" + assert fn not in s3.ls(bucket_names["test"] + "/") + assert fn in s3.ls(bucket_names["test"] + "/nested/") + assert fn in s3.ls(bucket_names["test"] + "/nested") + assert s3.ls("s3://" + bucket_names["test"] + "/nested/") == s3.ls( + bucket_names["test"] + "/nested" ) -def test_s3_big_ls(s3): +def test_s3_big_ls(s3, bucket_names): for x in range(1200): - s3.touch(test_bucket_name + "/thousand/%i.part" % x) - assert len(s3.find(test_bucket_name)) > 1200 - s3.rm(test_bucket_name + "/thousand/", recursive=True) - assert len(s3.find(test_bucket_name + "/thousand/")) == 0 + s3.touch(bucket_names["test"] + "/thousand/%i.part" % x) + assert len(s3.find(bucket_names["test"])) > 1200 + s3.rm(bucket_names["test"] + "/thousand/", recursive=True) + assert len(s3.find(bucket_names["test"] + "/thousand/")) == 0 -def test_s3_ls_detail(s3): - L = s3.ls(test_bucket_name + "/nested", detail=True) +def test_s3_ls_detail(s3, bucket_names): + L = s3.ls(bucket_names["test"] + "/nested", detail=True) assert all(isinstance(item, dict) for item in L) -def test_s3_glob(s3): - fn = test_bucket_name + "/nested/file1" - assert fn not in s3.glob(test_bucket_name + "/") - assert fn not in s3.glob(test_bucket_name + "/*") - assert fn not in s3.glob(test_bucket_name + "/nested") - assert fn in s3.glob(test_bucket_name + "/nested/*") - assert fn in s3.glob(test_bucket_name + "/nested/file*") - assert fn in s3.glob(test_bucket_name + "/*/*") +def test_s3_glob(s3, bucket_names): + fn = bucket_names["test"] + "/nested/file1" + assert fn not in s3.glob(bucket_names["test"] + "/") + assert fn not in s3.glob(bucket_names["test"] + "/*") + assert fn not in s3.glob(bucket_names["test"] + "/nested") + assert fn in s3.glob(bucket_names["test"] + "/nested/*") + assert fn in s3.glob(bucket_names["test"] + "/nested/file*") + assert fn in s3.glob(bucket_names["test"] + "/*/*") assert all( - any(p.startswith(f + "/") or p == f for p in s3.find(test_bucket_name)) - for f in s3.glob(test_bucket_name + "/nested/*") + any(p.startswith(f + "/") or p == f for p in s3.find(bucket_names["test"])) + for f in s3.glob(bucket_names["test"] + "/nested/*") ) - assert [test_bucket_name + "/nested/nested2"] == s3.glob( - test_bucket_name + "/nested/nested2" + assert [bucket_names["test"] + "/nested/nested2"] == s3.glob( + bucket_names["test"] + "/nested/nested2" ) - out = s3.glob(test_bucket_name + "/nested/nested2/*") - assert {"test/nested/nested2/file1", "test/nested/nested2/file2"} == set(out) + out = s3.glob(bucket_names["test"] + "/nested/nested2/*") + assert { + bucket_names["test"] + "/nested/nested2/file1", + bucket_names["test"] + "/nested/nested2/file2", + } == set(out) with pytest.raises(ValueError): s3.glob("*") # Make sure glob() deals with the dot character (.) correctly. - assert test_bucket_name + "/file.dat" in s3.glob(test_bucket_name + "/file.*") - assert test_bucket_name + "/filexdat" not in s3.glob(test_bucket_name + "/file.*") + assert bucket_names["test"] + "/file.dat" in s3.glob( + bucket_names["test"] + "/file.*" + ) + assert bucket_names["test"] + "/filexdat" not in s3.glob( + bucket_names["test"] + "/file.*" + ) -def test_get_list_of_summary_objects(s3): - L = s3.ls(test_bucket_name + "/test") +def test_get_list_of_summary_objects(s3, bucket_names, s3_files): + L = s3.ls(bucket_names["test"] + "/test") assert len(L) == 2 - assert [l.lstrip(test_bucket_name).lstrip("/") for l in sorted(L)] == sorted( - list(files) + assert [l.lstrip(bucket_names["test"]).lstrip("/") for l in sorted(L)] == sorted( + list(s3_files["files"]) ) - L2 = s3.ls("s3://" + test_bucket_name + "/test") + L2 = s3.ls("s3://" + bucket_names["test"] + "/test") assert L == L2 -def test_read_keys_from_bucket(s3): - for k, data in files.items(): - file_contents = s3.cat("/".join([test_bucket_name, k])) +def test_read_keys_from_bucket(s3, bucket_names, s3_files): + for k, data in s3_files["files"].items(): + file_contents = s3.cat("/".join([bucket_names["test"], k])) assert file_contents == data - assert s3.cat("/".join([test_bucket_name, k])) == s3.cat( - "s3://" + "/".join([test_bucket_name, k]) + assert s3.cat("/".join([bucket_names["test"], k])) == s3.cat( + "s3://" + "/".join([bucket_names["test"], k]) ) -def test_url(s3): - fn = test_bucket_name + "/nested/file1" +def test_url(s3, bucket_names): + fn = bucket_names["test"] + "/nested/file1" url = s3.url(fn, expires=100) assert "http" in url import urllib.parse @@ -824,11 +701,11 @@ def test_url(s3): assert "http" in f.url() -def test_seek(s3): - with s3.open(a, "wb") as f: +def test_seek(s3, file_a): + with s3.open(file_a, "wb") as f: f.write(b"123") - with s3.open(a) as f: + with s3.open(file_a) as f: f.seek(1000) with pytest.raises(ValueError): f.seek(-1) @@ -856,15 +733,15 @@ def test_bad_open(s3): s3.open("") -def test_copy(s3): - fn = test_bucket_name + "/test/accounts.1.json" +def test_copy(s3, bucket_names): + fn = bucket_names["test"] + "/test/accounts.1.json" s3.copy(fn, fn + "2") assert s3.cat(fn) == s3.cat(fn + "2") -def test_copy_managed(s3): +def test_copy_managed(s3, bucket_names): data = b"abc" * 12 * 2**20 - fn = test_bucket_name + "/test/biggerfile" + fn = bucket_names["test"] + "/test/biggerfile" with s3.open(fn, "wb") as f: f.write(data) sync(s3.loop, s3._copy_managed, fn, fn + "2", size=len(data), block=5 * 2**20) @@ -876,39 +753,39 @@ def test_copy_managed(s3): @pytest.mark.parametrize("recursive", [True, False]) -def test_move(s3, recursive): - fn = test_bucket_name + "/test/accounts.1.json" +def test_move(s3, recursive, bucket_names): + fn = bucket_names["test"] + "/test/accounts.1.json" data = s3.cat(fn) s3.mv(fn, fn + "2", recursive=recursive) assert s3.cat(fn + "2") == data assert not s3.exists(fn) -def test_get_put(s3, tmpdir): +def test_get_put(s3, tmpdir, bucket_names, s3_files): test_file = str(tmpdir.join("test.json")) - s3.get(test_bucket_name + "/test/accounts.1.json", test_file) - data = files["test/accounts.1.json"] + s3.get(bucket_names["test"] + "/test/accounts.1.json", test_file) + data = s3_files["files"]["test/accounts.1.json"] assert open(test_file, "rb").read() == data - s3.put(test_file, test_bucket_name + "/temp") - assert s3.du(test_bucket_name + "/temp", total=False)[ - test_bucket_name + "/temp" + s3.put(test_file, bucket_names["test"] + "/temp") + assert s3.du(bucket_names["test"] + "/temp", total=False)[ + bucket_names["test"] + "/temp" ] == len(data) - assert s3.cat(test_bucket_name + "/temp") == data + assert s3.cat(bucket_names["test"] + "/temp") == data -def test_get_put_big(s3, tmpdir): +def test_get_put_big(s3, tmpdir, bucket_names): test_file = str(tmpdir.join("test")) data = b"1234567890A" * 2**20 open(test_file, "wb").write(data) - s3.put(test_file, test_bucket_name + "/bigfile") + s3.put(test_file, bucket_names["test"] + "/bigfile") test_file = str(tmpdir.join("test2")) - s3.get(test_bucket_name + "/bigfile", test_file) + s3.get(bucket_names["test"] + "/bigfile", test_file) assert open(test_file, "rb").read() == data -def test_get_put_with_callback(s3, tmpdir): +def test_get_put_with_callback(s3, tmpdir, bucket_names): test_file = str(tmpdir.join("test.json")) class BranchingCallback(Callback): @@ -916,47 +793,47 @@ def branch(self, path_1, path_2, kwargs): kwargs["callback"] = BranchingCallback() cb = BranchingCallback() - s3.get(test_bucket_name + "/test/accounts.1.json", test_file, callback=cb) + s3.get(bucket_names["test"] + "/test/accounts.1.json", test_file, callback=cb) assert cb.size == 1 assert cb.value == 1 cb = BranchingCallback() - s3.put(test_file, test_bucket_name + "/temp", callback=cb) + s3.put(test_file, bucket_names["test"] + "/temp", callback=cb) assert cb.size == 1 assert cb.value == 1 -def test_get_file_with_callback(s3, tmpdir): +def test_get_file_with_callback(s3, tmpdir, bucket_names): test_file = str(tmpdir.join("test.json")) cb = Callback() - s3.get_file(test_bucket_name + "/test/accounts.1.json", test_file, callback=cb) + s3.get_file(bucket_names["test"] + "/test/accounts.1.json", test_file, callback=cb) assert cb.size == os.stat(test_file).st_size assert cb.value == cb.size @pytest.mark.parametrize("size", [2**10, 10 * 2**20]) -def test_put_file_with_callback(s3, tmpdir, size): +def test_put_file_with_callback(s3, tmpdir, size, bucket_names): test_file = str(tmpdir.join("test.json")) with open(test_file, "wb") as f: f.write(b"1234567890A" * size) cb = Callback() - s3.put_file(test_file, test_bucket_name + "/temp", callback=cb) + s3.put_file(test_file, bucket_names["test"] + "/temp", callback=cb) assert cb.size == os.stat(test_file).st_size assert cb.value == cb.size @pytest.mark.parametrize("size", [2**10, 2**20, 10 * 2**20]) -def test_pipe_cat_big(s3, size): +def test_pipe_cat_big(s3, size, bucket_names): data = b"1234567890A" * size - s3.pipe(test_bucket_name + "/bigfile", data) - assert s3.cat(test_bucket_name + "/bigfile") == data + s3.pipe(bucket_names["test"] + "/bigfile", data) + assert s3.cat(bucket_names["test"] + "/bigfile") == data -def test_errors(s3): +def test_errors(s3, bucket_names): with pytest.raises(FileNotFoundError): - s3.open(test_bucket_name + "/tmp/test/shfoshf", "rb") + s3.open(bucket_names["test"] + "/tmp/test/shfoshf", "rb") # This is fine, no need for interleaving directories on S3 # with pytest.raises((IOError, OSError)): @@ -964,10 +841,10 @@ def test_errors(s3): # Deleting nonexistent or zero paths is allowed for now # with pytest.raises(FileNotFoundError): - # s3.rm(test_bucket_name + '/tmp/test/shfoshf/x') + # s3.rm(bucket_names['test'] + '/tmp/test/shfoshf/x') with pytest.raises(FileNotFoundError): - s3.mv(test_bucket_name + "/tmp/test/shfoshf/x", "tmp/test/shfoshf/y") + s3.mv(bucket_names["test"] + "/tmp/test/shfoshf/x", "tmp/test/shfoshf/y") with pytest.raises(ValueError): s3.open("x", "rb") @@ -976,11 +853,11 @@ def test_errors(s3): s3.rm("unknown") with pytest.raises(ValueError): - with s3.open(test_bucket_name + "/temp", "wb") as f: + with s3.open(bucket_names["test"] + "/temp", "wb") as f: f.read() with pytest.raises(ValueError): - f = s3.open(test_bucket_name + "/temp", "rb") + f = s3.open(bucket_names["test"] + "/temp", "rb") f.close() f.read() @@ -1014,8 +891,8 @@ async def head_object(*args, **kwargs): assert exc.value.__cause__ is None -def test_read_small(s3): - fn = test_bucket_name + "/2014-01-01.csv" +def test_read_small(s3, bucket_names): + fn = bucket_names["test"] + "/2014-01-01.csv" with s3.open(fn, "rb", block_size=10) as f: out = [] while True: @@ -1028,10 +905,10 @@ def test_read_small(s3): assert len(f.cache) < len(out) -def test_read_s3_block(s3): - data = files["test/accounts.1.json"] +def test_read_s3_block(s3, bucket_names, s3_files): + data = s3_files["files"]["test/accounts.1.json"] lines = io.BytesIO(data).readlines() - path = test_bucket_name + "/test/accounts.1.json" + path = bucket_names["test"] + "/test/accounts.1.json" assert s3.read_block(path, 1, 35, b"\n") == lines[1] assert s3.read_block(path, 0, 30, b"\n") == lines[0] assert s3.read_block(path, 0, 35, b"\n") == lines[0] + lines[1] @@ -1086,16 +963,16 @@ def test_dynamic_add_rm(s3): assert not s3.exists("one") -def test_write_small(s3): - with s3.open(test_bucket_name + "/test", "wb") as f: +def test_write_small(s3, bucket_names): + with s3.open(bucket_names["test"] + "/test", "wb") as f: f.write(b"hello") - assert s3.cat(test_bucket_name + "/test") == b"hello" - s3.open(test_bucket_name + "/test", "wb").close() - assert s3.info(test_bucket_name + "/test")["size"] == 0 + assert s3.cat(bucket_names["test"] + "/test") == b"hello" + s3.open(bucket_names["test"] + "/test", "wb").close() + assert s3.info(bucket_names["test"] + "/test")["size"] == 0 -def test_write_small_with_acl(s3): - bucket, key = (test_bucket_name, "test-acl") +def test_write_small_with_acl(s3, bucket_names): + bucket, key = (bucket_names["test"], "test-acl") filename = bucket + "/" + key body = b"hello" public_read_acl = { @@ -1116,47 +993,47 @@ def test_write_small_with_acl(s3): ) -def test_write_large(s3): +def test_write_large(s3, bucket_names): "flush() chunks buffer when processing large singular payload" mb = 2**20 payload_size = int(2.5 * 5 * mb) payload = b"0" * payload_size - with s3.open(test_bucket_name + "/test", "wb") as fd: + with s3.open(bucket_names["test"] + "/test", "wb") as fd: fd.write(payload) - assert s3.cat(test_bucket_name + "/test") == payload - assert s3.info(test_bucket_name + "/test")["size"] == payload_size + assert s3.cat(bucket_names["test"] + "/test") == payload + assert s3.info(bucket_names["test"] + "/test")["size"] == payload_size -def test_write_limit(s3): +def test_write_limit(s3, bucket_names): "flush() respects part_max when processing large singular payload" mb = 2**20 block_size = 15 * mb payload_size = 44 * mb payload = b"0" * payload_size - with s3.open(test_bucket_name + "/test", "wb", blocksize=block_size) as fd: + with s3.open(bucket_names["test"] + "/test", "wb", blocksize=block_size) as fd: fd.write(payload) - assert s3.cat(test_bucket_name + "/test") == payload + assert s3.cat(bucket_names["test"] + "/test") == payload - assert s3.info(test_bucket_name + "/test")["size"] == payload_size + assert s3.info(bucket_names["test"] + "/test")["size"] == payload_size -def test_write_small_secure(s3): +def test_write_small_secure(s3, bucket_names): # Unfortunately moto does not yet support enforcing SSE policies. It also # does not return the correct objects that can be used to test the results # effectively. # This test is left as a placeholder in case moto eventually supports this. sse_params = SSEParams(server_side_encryption="aws:kms") - with s3.open(secure_bucket_name + "/test", "wb", writer_kwargs=sse_params) as f: + with s3.open(bucket_names["secure"] + "/test", "wb", writer_kwargs=sse_params) as f: f.write(b"hello") - assert s3.cat(secure_bucket_name + "/test") == b"hello" - sync(s3.loop, s3.s3.head_object, Bucket=secure_bucket_name, Key="test") + assert s3.cat(bucket_names["secure"] + "/test") == b"hello" + sync(s3.loop, s3.s3.head_object, Bucket=bucket_names["secure"], Key="test") -def test_write_large_secure(s3): +def test_write_large_secure(s3, endpoint_uri): # build our own s3fs with the relevant additional kwarg s3 = S3FileSystem( s3_additional_kwargs={"ServerSideEncryption": "AES256"}, @@ -1170,13 +1047,13 @@ def test_write_large_secure(s3): assert s3.cat("mybucket/myfile") == b"hello hello" * 10**6 -def test_write_fails(s3): +def test_write_fails(s3, bucket_names): with pytest.raises(ValueError): - s3.touch(test_bucket_name + "/temp") - s3.open(test_bucket_name + "/temp", "rb").write(b"hello") + s3.touch(bucket_names["test"] + "/temp") + s3.open(bucket_names["test"] + "/temp", "rb").write(b"hello") with pytest.raises(ValueError): - s3.open(test_bucket_name + "/temp", "wb", block_size=10) - f = s3.open(test_bucket_name + "/temp", "wb") + s3.open(bucket_names["test"] + "/temp", "wb", block_size=10) + f = s3.open(bucket_names["test"] + "/temp", "wb") f.close() with pytest.raises(ValueError): f.write(b"hello") @@ -1184,8 +1061,8 @@ def test_write_fails(s3): s3.open("nonexistentbucket/temp", "wb").close() -def test_write_blocks(s3): - with s3.open(test_bucket_name + "/temp", "wb") as f: +def test_write_blocks(s3, bucket_names): + with s3.open(bucket_names["test"] + "/temp", "wb") as f: f.write(b"a" * 2 * 2**20) assert f.buffer.tell() == 2 * 2**20 assert not (f.parts) @@ -1196,38 +1073,38 @@ def test_write_blocks(s3): f.write(b"a" * 2 * 2**20) assert f.mpu assert f.parts - assert s3.info(test_bucket_name + "/temp")["size"] == 6 * 2**20 - with s3.open(test_bucket_name + "/temp", "wb", block_size=10 * 2**20) as f: + assert s3.info(bucket_names["test"] + "/temp")["size"] == 6 * 2**20 + with s3.open(bucket_names["test"] + "/temp", "wb", block_size=10 * 2**20) as f: f.write(b"a" * 15 * 2**20) assert f.buffer.tell() == 0 - assert s3.info(test_bucket_name + "/temp")["size"] == 15 * 2**20 + assert s3.info(bucket_names["test"] + "/temp")["size"] == 15 * 2**20 -def test_readline(s3): +def test_readline(s3, bucket_names, s3_files): all_items = chain.from_iterable( - [files.items(), csv_files.items(), text_files.items()] + [s3_files["files"].items(), s3_files["csv"].items(), s3_files["text"].items()] ) for k, data in all_items: - with s3.open("/".join([test_bucket_name, k]), "rb") as f: + with s3.open("/".join([bucket_names["test"], k]), "rb") as f: result = f.readline() expected = data.split(b"\n")[0] + (b"\n" if data.count(b"\n") else b"") assert result == expected -def test_readline_empty(s3): +def test_readline_empty(s3, file_a): data = b"" - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: result = f.readline() assert result == data -def test_readline_blocksize(s3): +def test_readline_blocksize(s3, file_a): data = b"ab\n" + b"a" * (10 * 2**20) + b"\nab" - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: result = f.readline() expected = b"ab\n" assert result == expected @@ -1241,18 +1118,18 @@ def test_readline_blocksize(s3): assert result == expected -def test_next(s3): - expected = csv_files["2014-01-01.csv"].split(b"\n")[0] + b"\n" - with s3.open(test_bucket_name + "/2014-01-01.csv") as f: +def test_next(s3, bucket_names, s3_files): + expected = s3_files["csv"]["2014-01-01.csv"].split(b"\n")[0] + b"\n" + with s3.open(bucket_names["test"] + "/2014-01-01.csv") as f: result = next(f) assert result == expected -def test_iterable(s3): +def test_iterable(s3, file_a): data = b"abc\n123" - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a) as f, io.BytesIO(data) as g: + with s3.open(file_a) as f, io.BytesIO(data) as g: for froms3, fromio in zip(f, g): assert froms3 == fromio f.seek(0) @@ -1261,76 +1138,76 @@ def test_iterable(s3): f.seek(1) assert f.readline() == b"bc\n" - with s3.open(a) as f: + with s3.open(file_a) as f: out = list(f) - with s3.open(a) as f: + with s3.open(file_a) as f: out2 = f.readlines() assert out == out2 assert b"".join(out) == data -def test_readable(s3): - with s3.open(a, "wb") as f: +def test_readable(s3, file_a): + with s3.open(file_a, "wb") as f: assert not f.readable() - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: assert f.readable() -def test_seekable(s3): - with s3.open(a, "wb") as f: +def test_seekable(s3, file_a): + with s3.open(file_a, "wb") as f: assert not f.seekable() - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: assert f.seekable() -def test_writable(s3): - with s3.open(a, "wb") as f: +def test_writable(s3, file_a): + with s3.open(file_a, "wb") as f: assert f.writable() - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: assert not f.writable() -def test_merge(s3): - with s3.open(a, "wb") as f: +def test_merge(s3, bucket_names, file_a, file_b): + with s3.open(file_a, "wb") as f: f.write(b"a" * 10 * 2**20) - with s3.open(b, "wb") as f: + with s3.open(file_b, "wb") as f: f.write(b"a" * 10 * 2**20) - s3.merge(test_bucket_name + "/joined", [a, b]) - assert s3.info(test_bucket_name + "/joined")["size"] == 2 * 10 * 2**20 + s3.merge(bucket_names["test"] + "/joined", [file_a, file_b]) + assert s3.info(bucket_names["test"] + "/joined")["size"] == 2 * 10 * 2**20 -def test_append(s3): - data = text_files["nested/file1"] - with s3.open(test_bucket_name + "/nested/file1", "ab") as f: +def test_append(s3, bucket_names, s3_files, file_a): + data = s3_files["text"]["nested/file1"] + with s3.open(bucket_names["test"] + "/nested/file1", "ab") as f: assert f.tell() == len(data) # append, no write, small file - assert s3.cat(test_bucket_name + "/nested/file1") == data - with s3.open(test_bucket_name + "/nested/file1", "ab") as f: + assert s3.cat(bucket_names["test"] + "/nested/file1") == data + with s3.open(bucket_names["test"] + "/nested/file1", "ab") as f: f.write(b"extra") # append, write, small file - assert s3.cat(test_bucket_name + "/nested/file1") == data + b"extra" + assert s3.cat(bucket_names["test"] + "/nested/file1") == data + b"extra" - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(b"a" * 10 * 2**20) - with s3.open(a, "ab") as f: + with s3.open(file_a, "ab") as f: pass # append, no write, big file - assert s3.cat(a) == b"a" * 10 * 2**20 + assert s3.cat(file_a) == b"a" * 10 * 2**20 - with s3.open(a, "ab") as f: + with s3.open(file_a, "ab") as f: assert f.parts is None f._initiate_upload() assert f.parts assert f.tell() == 10 * 2**20 f.write(b"extra") # append, small write, big file - assert s3.cat(a) == b"a" * 10 * 2**20 + b"extra" + assert s3.cat(file_a) == b"a" * 10 * 2**20 + b"extra" - with s3.open(a, "ab") as f: + with s3.open(file_a, "ab") as f: assert f.tell() == 10 * 2**20 + 5 f.write(b"b" * 10 * 2**20) # append, big write, big file assert f.tell() == 20 * 2**20 + 5 - assert s3.cat(a) == b"a" * 10 * 2**20 + b"extra" + b"b" * 10 * 2**20 + assert s3.cat(file_a) == b"a" * 10 * 2**20 + b"extra" + b"b" * 10 * 2**20 # Keep Head Metadata head = dict( @@ -1346,13 +1223,13 @@ def test_append(s3): WebsiteRedirectLocation="https://www.example.com/", BucketKeyEnabled=False, ) - with s3.open(a, "wb", **head) as f: + with s3.open(file_a, "wb", **head) as f: f.write(b"data") - with s3.open(a, "ab") as f: + with s3.open(file_a, "ab") as f: f.write(b"other") - with s3.open(a) as f: + with s3.open(file_a) as f: filehead = { k: v for k, v in f._call_s3( @@ -1363,33 +1240,33 @@ def test_append(s3): assert filehead == head -def test_bigger_than_block_read(s3): - with s3.open(test_bucket_name + "/2014-01-01.csv", "rb", block_size=3) as f: +def test_bigger_than_block_read(s3, bucket_names, s3_files): + with s3.open(bucket_names["test"] + "/2014-01-01.csv", "rb", block_size=3) as f: out = [] while True: data = f.read(20) out.append(data) if len(data) == 0: break - assert b"".join(out) == csv_files["2014-01-01.csv"] + assert b"".join(out) == s3_files["csv"]["2014-01-01.csv"] -def test_current(s3): +def test_current(s3, endpoint_uri): s3._cache.clear() s3 = S3FileSystem(client_kwargs={"endpoint_url": endpoint_uri}) assert s3.current() is s3 assert S3FileSystem.current() is s3 -def test_array(s3): +def test_array(s3, file_a): from array import array data = array("B", [65] * 1000) - with s3.open(a, "wb") as f: + with s3.open(file_a, "wb") as f: f.write(data) - with s3.open(a, "rb") as f: + with s3.open(file_a, "rb") as f: out = f.read() assert out == b"A" * 1000 @@ -1421,26 +1298,26 @@ def test_no_connection_sharing_among_processes(s3, method): @pytest.mark.xfail() -def test_public_file(s3): +def test_public_file(s3, endpoint_uri, bucket_names): # works on real s3, not on moto - test_bucket_name = "s3fs_public_test" + bucket_names["test"] = "s3fs_public_test" other_bucket_name = "s3fs_private_test" - s3.touch(test_bucket_name) - s3.touch(test_bucket_name + "/afile") + s3.touch(bucket_names["test"]) + s3.touch(bucket_names["test"] + "/afile") s3.touch(other_bucket_name, acl="public-read") s3.touch(other_bucket_name + "/afile", acl="public-read") s = S3FileSystem(anon=True, client_kwargs={"endpoint_url": endpoint_uri}) with pytest.raises(PermissionError): - s.ls(test_bucket_name) + s.ls(bucket_names["test"]) s.ls(other_bucket_name) - s3.chmod(test_bucket_name, acl="public-read") + s3.chmod(bucket_names["test"], acl="public-read") s3.chmod(other_bucket_name, acl="private") with pytest.raises(PermissionError): s.ls(other_bucket_name, refresh=True) - assert s.ls(test_bucket_name, refresh=True) + assert s.ls(bucket_names["test"], refresh=True) # public file in private bucket with s3.open(other_bucket_name + "/see_me", "wb", acl="public-read") as f: @@ -1448,8 +1325,8 @@ def test_public_file(s3): assert s.cat(other_bucket_name + "/see_me") == b"hello" -def test_upload_with_s3fs_prefix(s3): - path = "s3://test/prefix/key" +def test_upload_with_s3fs_prefix(s3, bucket_names): + path = f"s3://{bucket_names['test']}/prefix/key" with s3.open(path, "wb") as f: f.write(b"a" * (10 * 2**20)) @@ -1458,11 +1335,11 @@ def test_upload_with_s3fs_prefix(s3): f.write(b"b" * (10 * 2**20)) -def test_multipart_upload_blocksize(s3): +def test_multipart_upload_blocksize(s3, file_a): blocksize = 5 * (2**20) expected_parts = 3 - s3f = s3.open(a, "wb", block_size=blocksize) + s3f = s3.open(file_a, "wb", block_size=blocksize) for _ in range(3): data = b"b" * blocksize s3f.write(data) @@ -1472,13 +1349,13 @@ def test_multipart_upload_blocksize(s3): s3f.close() -def test_default_pars(s3): +def test_default_pars(s3, endpoint_uri, bucket_names, s3_files): s3 = S3FileSystem( default_block_size=20, default_fill_cache=False, client_kwargs={"endpoint_url": endpoint_uri}, ) - fn = test_bucket_name + "/" + list(files)[0] + fn = bucket_names["test"] + "/" + list(s3_files["files"])[0] with s3.open(fn) as f: assert f.blocksize == 20 assert f.fill_cache is False @@ -1487,9 +1364,9 @@ def test_default_pars(s3): assert f.fill_cache is True -def test_tags(s3): +def test_tags(s3, s3_files, bucket_names): tagset = {"tag1": "value1", "tag2": "value2"} - fname = list(files)[0] + fname = bucket_names["test"] + "/" + list(s3_files["files"])[0] s3.touch(fname) s3.put_tags(fname, tagset) assert s3.get_tags(fname) == tagset @@ -1501,8 +1378,8 @@ def test_tags(s3): assert s3.get_tags(fname) == tagset -def test_versions(s3): - versioned_file = versioned_bucket_name + "/versioned_file" +def test_versions(s3, endpoint_uri, bucket_names): + versioned_file = bucket_names["versioned"] + "/versioned_file" s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1531,13 +1408,13 @@ def test_versions(s3): assert fo.read() == b"1" -def test_list_versions_many(s3): +def test_list_versions_many(s3, endpoint_uri, bucket_names): # moto doesn't actually behave in the same way that s3 does here so this doesn't test # anything really in moto 1.2 s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) - versioned_file = versioned_bucket_name + "/versioned_file2" + versioned_file = bucket_names["versioned"] + "/versioned_file2" for i in range(1200): with s3.open(versioned_file, "wb") as fo: fo.write(b"1") @@ -1545,12 +1422,12 @@ def test_list_versions_many(s3): assert len(versions) == 1200 -def test_fsspec_versions_multiple(s3): +def test_fsspec_versions_multiple(s3, endpoint_uri, bucket_names): """Test that the standard fsspec.core.get_fs_token_paths behaves as expected for versionId urls""" s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) - versioned_file = versioned_bucket_name + "/versioned_file3" + versioned_file = bucket_names["versioned"] + "/versioned_file3" version_lookup = {} for i in range(20): contents = str(i).encode() @@ -1572,8 +1449,8 @@ def test_fsspec_versions_multiple(s3): assert contents == version_lookup[fo.version_id] -def test_versioned_file_fullpath(s3): - versioned_file = versioned_bucket_name + "/versioned_file_fullpath" +def test_versioned_file_fullpath(s3, endpoint_uri, bucket_names): + versioned_file = bucket_names["versioned"] + "/versioned_file_fullpath" s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1595,8 +1472,8 @@ def test_versioned_file_fullpath(s3): assert fo.read() == b"1" -def test_versions_unaware(s3): - versioned_file = versioned_bucket_name + "/versioned_file3" +def test_versions_unaware(s3, endpoint_uri, bucket_names): + versioned_file = bucket_names["versioned"] + "/versioned_file3" s3 = S3FileSystem( anon=False, version_aware=False, client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1662,7 +1539,7 @@ def test_readinto(s3): assert contents.startswith(b"Hello, World!") -def test_change_defaults_only_subsequent(): +def test_change_defaults_only_subsequent(endpoint_uri): """Test for Issue #135 Ensure that changing the default block size doesn't affect existing file @@ -1696,18 +1573,22 @@ def test_change_defaults_only_subsequent(): S3FileSystem.cachable = True -def test_cache_after_copy(s3): +def test_cache_after_copy(s3, bucket_names): # https://github.com/dask/dask/issues/5134 - s3.touch("test/afile") - assert "test/afile" in s3.ls("s3://test", False) - s3.cp("test/afile", "test/bfile") - assert "test/bfile" in s3.ls("s3://test", False) + s3.touch(bucket_names["test"] + "/afile") + assert bucket_names["test"] + "/afile" in s3.ls( + "s3://" + bucket_names["test"], False + ) + s3.cp(bucket_names["test"] + "/afile", bucket_names["test"] + "/bfile") + assert bucket_names["test"] + "/bfile" in s3.ls( + "s3://" + bucket_names["test"], False + ) -def test_autocommit(s3): - auto_file = test_bucket_name + "/auto_file" - committed_file = test_bucket_name + "/commit_file" - aborted_file = test_bucket_name + "/aborted_file" +def test_autocommit(s3, endpoint_uri, bucket_names): + auto_file = bucket_names["test"] + "/auto_file" + committed_file = bucket_names["test"] + "/commit_file" + aborted_file = bucket_names["test"] + "/aborted_file" s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1737,18 +1618,18 @@ def write_and_flush(path, autocommit): fo.commit() -def test_autocommit_mpu(s3): +def test_autocommit_mpu(s3, bucket_names): """When not autocommitting we always want to use multipart uploads""" - path = test_bucket_name + "/auto_commit_with_mpu" + path = bucket_names["test"] + "/auto_commit_with_mpu" with s3.open(path, "wb", autocommit=False) as fo: fo.write(b"1") assert fo.mpu is not None assert len(fo.parts) == 1 -def test_touch(s3): +def test_touch(s3, bucket_names): # create - fn = test_bucket_name + "/touched" + fn = bucket_names["test"] + "/touched" assert not s3.exists(fn) s3.touch(fn) assert s3.exists(fn) @@ -1770,8 +1651,8 @@ def test_touch(s3): assert s3.size(fn) == 4 -def test_touch_versions(s3): - versioned_file = versioned_bucket_name + "/versioned_file" +def test_touch_versions(s3, endpoint_uri, bucket_names): + versioned_file = bucket_names["versioned"] + "/versioned_file" s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1800,9 +1681,9 @@ def test_touch_versions(s3): assert fo.read() == b"1" -def test_cat_missing(s3): - fn0 = test_bucket_name + "/file0" - fn1 = test_bucket_name + "/file1" +def test_cat_missing(s3, bucket_names): + fn0 = bucket_names["test"] + "/file0" + fn1 = bucket_names["test"] + "/file1" s3.touch(fn0) with pytest.raises(FileNotFoundError): s3.cat([fn0, fn1], on_error="raise") @@ -1813,20 +1694,20 @@ def test_cat_missing(s3): assert isinstance(out[fn1], FileNotFoundError) -def test_get_directories(s3, tmpdir): - s3.touch(test_bucket_name + "/dir/dirkey/key0") - s3.touch(test_bucket_name + "/dir/dirkey/key1") - s3.touch(test_bucket_name + "/dir/dirkey") - s3.touch(test_bucket_name + "/dir/dir/key") +def test_get_directories(s3, tmpdir, bucket_names): + s3.touch(bucket_names["test"] + "/dir/dirkey/key0") + s3.touch(bucket_names["test"] + "/dir/dirkey/key1") + s3.touch(bucket_names["test"] + "/dir/dirkey") + s3.touch(bucket_names["test"] + "/dir/dir/key") d = str(tmpdir) - s3.get(test_bucket_name + "/dir", d, recursive=True) + s3.get(bucket_names["test"] + "/dir", d, recursive=True) assert {"dirkey", "dir"} == set(os.listdir(d)) assert ["key"] == os.listdir(os.path.join(d, "dir")) assert {"key0", "key1"} == set(os.listdir(os.path.join(d, "dirkey"))) -def test_seek_reads(s3): - fn = test_bucket_name + "/myfile" +def test_seek_reads(s3, bucket_names): + fn = bucket_names["test"] + "/myfile" with s3.open(fn, "wb") as f: f.write(b"a" * 175627146) with s3.open(fn, "rb", blocksize=100) as f: @@ -1844,7 +1725,7 @@ def test_seek_reads(s3): assert len(d3) == size -def test_connect_many(s3): +def test_connect_many(s3, endpoint_uri): from multiprocessing.pool import ThreadPool def task(i): @@ -1858,8 +1739,8 @@ def task(i): pool.join() -def test_requester_pays(s3): - fn = test_bucket_name + "/myfile" +def test_requester_pays(s3, endpoint_uri, bucket_names): + fn = bucket_names["test"] + "/myfile" s3 = S3FileSystem(requester_pays=True, client_kwargs={"endpoint_url": endpoint_uri}) assert s3.req_kw["RequestPayer"] == "requester" s3.touch(fn) @@ -1867,7 +1748,7 @@ def test_requester_pays(s3): assert f.req_kw["RequestPayer"] == "requester" -def test_credentials(): +def test_credentials(endpoint_uri): s3 = S3FileSystem( key="foo", secret="foo", client_kwargs={"endpoint_url": endpoint_uri} ) @@ -1912,8 +1793,8 @@ def test_credentials(): ).s3 -def test_modified(s3): - dir_path = test_bucket_name + "/modified" +def test_modified(s3, bucket_names): + dir_path = bucket_names["test"] + "/modified" file_path = dir_path + "/file" # Test file @@ -1927,10 +1808,10 @@ def test_modified(s3): # Test bucket with pytest.raises(IsADirectoryError): - s3.modified(path=test_bucket_name) + s3.modified(path=bucket_names["test"]) -def test_async_s3(s3): +def test_async_s3(s3, endpoint_uri, bucket_names): async def _(): s3 = S3FileSystem( anon=False, @@ -1939,7 +1820,7 @@ async def _(): client_kwargs={"region_name": "eu-central-1", "endpoint_url": endpoint_uri}, ) - fn = test_bucket_name + "/nested/file1" + fn = bucket_names["test"] + "/nested/file1" data = b"hello\n" # Is good with or without connect() @@ -1964,9 +1845,9 @@ async def _(): asyncio.run(_()) -def test_cat_ranges(s3): +def test_cat_ranges(s3, bucket_names): data = b"a string to select from" - fn = test_bucket_name + "/parts" + fn = bucket_names["test"] + "/parts" s3.pipe(fn, data) assert s3.cat_file(fn) == data @@ -1976,7 +1857,7 @@ def test_cat_ranges(s3): assert s3.cat_file(fn, start=-5) == data[-5:] -def test_async_s3_old(s3): +def test_async_s3_old(s3, endpoint_uri, bucket_names): async def _(): s3 = S3FileSystem( anon=False, @@ -1985,7 +1866,7 @@ async def _(): client_kwargs={"region_name": "eu-central-1", "endpoint_url": endpoint_uri}, ) - fn = test_bucket_name + "/nested/file1" + fn = bucket_names["test"] + "/nested/file1" data = b"hello\n" # Check old API @@ -2010,35 +1891,35 @@ def test_via_fsspec(s3): assert f.read() == b"hello" -def test_repeat_exists(s3): - fn = "s3://" + test_bucket_name + "/file1" +def test_repeat_exists(s3, bucket_names): + fn = "s3://" + bucket_names["test"] + "/file1" s3.touch(fn) assert s3.exists(fn) assert s3.exists(fn) -def test_with_xzarr(s3): +def test_with_xzarr(s3, bucket_names): da = pytest.importorskip("dask.array") xr = pytest.importorskip("xarray") name = "sample" nana = xr.DataArray(da.random.random((1024, 1024, 10, 9, 1))) - s3_path = f"{test_bucket_name}/{name}" + s3_path = f"{bucket_names['test']}/{name}" s3store = s3.get_mapper(s3_path) s3.ls("") nana.to_dataset().to_zarr(store=s3store, mode="w", consolidated=True, compute=True) -def test_async_close(): +def test_async_close(bucket_names): async def _(): loop = asyncio.get_event_loop() s3 = S3FileSystem(anon=False, asynchronous=True, loop=loop) await s3._connect() - fn = test_bucket_name + "/afile" + fn = bucket_names["test"] + "/afile" async def async_wrapper(): coros = [ @@ -2060,18 +1941,18 @@ async def async_wrapper(): asyncio.run(_()) -def test_put_single(s3, tmpdir): +def test_put_single(s3, tmpdir, bucket_names): fn = os.path.join(str(tmpdir), "dir") os.mkdir(fn) open(os.path.join(fn, "abc"), "w").write("text") - s3.put(fn + "/", test_bucket_name) # no-op, no files - assert not s3.exists(test_bucket_name + "/abc") - assert not s3.exists(test_bucket_name + "/dir") - s3.put(fn + "/", test_bucket_name, recursive=True) # no-op, no files - assert s3.cat(test_bucket_name + "/dir/abc") == b"text" + s3.put(fn + "/", bucket_names["test"]) # no-op, no files + assert not s3.exists(bucket_names["test"] + "/abc") + assert not s3.exists(bucket_names["test"] + "/dir") + s3.put(fn + "/", bucket_names["test"], recursive=True) # no-op, no files + assert s3.cat(bucket_names["test"] + "/dir/abc") == b"text" -def test_shallow_find(s3): +def test_shallow_find(s3, bucket_names): """Test that find method respects maxdepth. Verify that the ``find`` method respects the ``maxdepth`` parameter. With @@ -2079,20 +1960,20 @@ def test_shallow_find(s3): ``ls``, without returning subdirectories. See also issue 378. """ - assert s3.ls(test_bucket_name) == s3.find( - test_bucket_name, maxdepth=1, withdirs=True + assert s3.ls(bucket_names["test"]) == s3.find( + bucket_names["test"], maxdepth=1, withdirs=True ) - assert s3.ls(test_bucket_name) == s3.glob(test_bucket_name + "/*") + assert s3.ls(bucket_names["test"]) == s3.glob(bucket_names["test"] + "/*") -def test_version_sizes(s3): +def test_version_sizes(s3, endpoint_uri, bucket_names): # protect against caching of incorrect version details s3 = S3FileSystem( anon=False, version_aware=True, client_kwargs={"endpoint_url": endpoint_uri} ) import gzip - path = f"s3://{versioned_bucket_name}/test.txt.gz" + path = f"s3://{bucket_names['versioned']}/test.txt.gz" versions = [ s3.pipe_file(path, gzip.compress(text)) for text in ( @@ -2109,10 +1990,10 @@ def test_version_sizes(s3): zfp.read() -def test_find_no_side_effect(s3): - infos1 = s3.find(test_bucket_name, maxdepth=1, withdirs=True, detail=True) - s3.find(test_bucket_name, maxdepth=None, withdirs=True, detail=True) - infos3 = s3.find(test_bucket_name, maxdepth=1, withdirs=True, detail=True) +def test_find_no_side_effect(s3, bucket_names): + infos1 = s3.find(bucket_names["test"], maxdepth=1, withdirs=True, detail=True) + s3.find(bucket_names["test"], maxdepth=None, withdirs=True, detail=True) + infos3 = s3.find(bucket_names["test"], maxdepth=1, withdirs=True, detail=True) assert infos1.keys() == infos3.keys() @@ -2154,17 +2035,17 @@ def test_get_file_info_with_selector(s3): condition=version.parse(moto.__version__) <= version.parse("1.3.16"), reason="Moto 1.3.16 is not supporting pre-conditions.", ) -def test_raise_exception_when_file_has_changed_during_reading(s3): +def test_raise_exception_when_file_has_changed_during_reading( + s3, bucket_names, boto3_client +): test_file_name = "file1" - test_file = "s3://" + test_bucket_name + "/" + test_file_name + test_file = "s3://" + bucket_names["test"] + "/" + test_file_name content1 = b"123" content2 = b"ABCDEFG" - boto3_client = get_boto3_client() - def create_file(content: bytes): boto3_client.put_object( - Bucket=test_bucket_name, Key=test_file_name, Body=content + Bucket=bucket_names["test"], Key=test_file_name, Body=content ) create_file(b"123") @@ -2179,13 +2060,13 @@ def create_file(content: bytes): f.read() -def test_s3fs_etag_preserving_multipart_copy(monkeypatch, s3): +def test_s3fs_etag_preserving_multipart_copy(monkeypatch, s3, bucket_names): # Set this to a lower value so that we can actually # test this without creating giant objects in memory monkeypatch.setattr(s3fs.core, "MANAGED_COPY_THRESHOLD", 5 * 2**20) - test_file1 = test_bucket_name + "/test/multipart-upload.txt" - test_file2 = test_bucket_name + "/test/multipart-upload-copy.txt" + test_file1 = bucket_names["test"] + "/test/multipart-upload.txt" + test_file2 = bucket_names["test"] + "/test/multipart-upload-copy.txt" with s3.open(test_file1, "wb", block_size=5 * 2**21) as stream: for _ in range(5): @@ -2211,116 +2092,118 @@ def test_s3fs_etag_preserving_multipart_copy(monkeypatch, s3): s3.rm(test_file1) -def test_sync_from_wihin_async(s3): +def test_sync_from_wihin_async(s3, endpoint_uri, bucket_names): # if treating as sync but within an even loop, e.g., calling from jupyter; # IO happens on dedicated thread. async def f(): S3FileSystem.clear_instance_cache() s3 = S3FileSystem(anon=False, client_kwargs={"endpoint_url": endpoint_uri}) - assert s3.ls(test_bucket_name) + assert s3.ls(bucket_names["test"]) asyncio.run(f()) -def test_token_paths(s3): +def test_token_paths(s3, endpoint_uri, bucket_names): fs, tok, files = fsspec.get_fs_token_paths( - "s3://" + test_bucket_name + "/*.csv", + "s3://" + bucket_names["test"] + "/*.csv", storage_options={"client_kwargs": {"endpoint_url": endpoint_uri}}, ) assert files -def test_same_name_but_no_exact(s3): - s3.touch(test_bucket_name + "/very/similar/prefix1") - s3.touch(test_bucket_name + "/very/similar/prefix2") - s3.touch(test_bucket_name + "/very/similar/prefix3/something") - assert not s3.exists(test_bucket_name + "/very/similar/prefix") - assert not s3.exists(test_bucket_name + "/very/similar/prefi") - assert not s3.exists(test_bucket_name + "/very/similar/pref") +def test_same_name_but_no_exact(s3, bucket_names): + s3.touch(bucket_names["test"] + "/very/similar/prefix1") + s3.touch(bucket_names["test"] + "/very/similar/prefix2") + s3.touch(bucket_names["test"] + "/very/similar/prefix3/something") + assert not s3.exists(bucket_names["test"] + "/very/similar/prefix") + assert not s3.exists(bucket_names["test"] + "/very/similar/prefi") + assert not s3.exists(bucket_names["test"] + "/very/similar/pref") - assert s3.exists(test_bucket_name + "/very/similar/") - assert s3.exists(test_bucket_name + "/very/similar/prefix1") - assert s3.exists(test_bucket_name + "/very/similar/prefix2") - assert s3.exists(test_bucket_name + "/very/similar/prefix3") - assert s3.exists(test_bucket_name + "/very/similar/prefix3/") - assert s3.exists(test_bucket_name + "/very/similar/prefix3/something") + assert s3.exists(bucket_names["test"] + "/very/similar/") + assert s3.exists(bucket_names["test"] + "/very/similar/prefix1") + assert s3.exists(bucket_names["test"] + "/very/similar/prefix2") + assert s3.exists(bucket_names["test"] + "/very/similar/prefix3") + assert s3.exists(bucket_names["test"] + "/very/similar/prefix3/") + assert s3.exists(bucket_names["test"] + "/very/similar/prefix3/something") - assert not s3.exists(test_bucket_name + "/very/similar/prefix3/some") + assert not s3.exists(bucket_names["test"] + "/very/similar/prefix3/some") - s3.touch(test_bucket_name + "/starting/very/similar/prefix") + s3.touch(bucket_names["test"] + "/starting/very/similar/prefix") - assert not s3.exists(test_bucket_name + "/starting/very/similar/prefix1") - assert not s3.exists(test_bucket_name + "/starting/very/similar/prefix2") - assert not s3.exists(test_bucket_name + "/starting/very/similar/prefix3") - assert not s3.exists(test_bucket_name + "/starting/very/similar/prefix3/") - assert not s3.exists(test_bucket_name + "/starting/very/similar/prefix3/something") + assert not s3.exists(bucket_names["test"] + "/starting/very/similar/prefix1") + assert not s3.exists(bucket_names["test"] + "/starting/very/similar/prefix2") + assert not s3.exists(bucket_names["test"] + "/starting/very/similar/prefix3") + assert not s3.exists(bucket_names["test"] + "/starting/very/similar/prefix3/") + assert not s3.exists( + bucket_names["test"] + "/starting/very/similar/prefix3/something" + ) - assert s3.exists(test_bucket_name + "/starting/very/similar/prefix") - assert s3.exists(test_bucket_name + "/starting/very/similar/prefix/") + assert s3.exists(bucket_names["test"] + "/starting/very/similar/prefix") + assert s3.exists(bucket_names["test"] + "/starting/very/similar/prefix/") -def test_leading_forward_slash(s3): - s3.touch(test_bucket_name + "/some/file") - assert s3.ls(test_bucket_name + "/some/") - assert s3.exists(test_bucket_name + "/some/file") - assert s3.exists("s3://" + test_bucket_name + "/some/file") +def test_leading_forward_slash(s3, bucket_names): + s3.touch(bucket_names["test"] + "/some/file") + assert s3.ls(bucket_names["test"] + "/some/") + assert s3.exists(bucket_names["test"] + "/some/file") + assert s3.exists("s3://" + bucket_names["test"] + "/some/file") -def test_lsdir(s3): +def test_lsdir(s3, bucket_names): # https://github.com/fsspec/s3fs/issues/475 - s3.find(test_bucket_name) + s3.find(bucket_names["test"]) - d = test_bucket_name + "/test" - assert d in s3.ls(test_bucket_name) + d = bucket_names["test"] + "/test" + assert d in s3.ls(bucket_names["test"]) -def test_copy_file_without_etag(s3, monkeypatch): +def test_copy_file_without_etag(s3, monkeypatch, bucket_names): - s3.touch(test_bucket_name + "/copy_tests/file") - s3.ls(test_bucket_name + "/copy_tests/") + s3.touch(bucket_names["test"] + "/copy_tests/file") + s3.ls(bucket_names["test"] + "/copy_tests/") - [file] = s3.dircache[test_bucket_name + "/copy_tests"] + [file] = s3.dircache[bucket_names["test"] + "/copy_tests"] - assert file["name"] == test_bucket_name + "/copy_tests/file" + assert file["name"] == bucket_names["test"] + "/copy_tests/file" file.pop("ETag") assert s3.info(file["name"]).get("ETag", None) is None - s3.cp_file(file["name"], test_bucket_name + "/copy_tests/file2") - assert s3.info(test_bucket_name + "/copy_tests/file2")["ETag"] is not None + s3.cp_file(file["name"], bucket_names["test"] + "/copy_tests/file2") + assert s3.info(bucket_names["test"] + "/copy_tests/file2")["ETag"] is not None -def test_find_with_prefix(s3): +def test_find_with_prefix(s3, bucket_names): for cursor in range(100): - s3.touch(test_bucket_name + f"/prefixes/test_{cursor}") + s3.touch(bucket_names["test"] + f"/prefixes/test_{cursor}") - s3.touch(test_bucket_name + "/prefixes2") - assert len(s3.find(test_bucket_name + "/prefixes")) == 100 - assert len(s3.find(test_bucket_name, prefix="prefixes")) == 101 + s3.touch(bucket_names["test"] + "/prefixes2") + assert len(s3.find(bucket_names["test"] + "/prefixes")) == 100 + assert len(s3.find(bucket_names["test"], prefix="prefixes")) == 101 - assert len(s3.find(test_bucket_name + "/prefixes/test_")) == 0 - assert len(s3.find(test_bucket_name + "/prefixes", prefix="test_")) == 100 - assert len(s3.find(test_bucket_name + "/prefixes/", prefix="test_")) == 100 + assert len(s3.find(bucket_names["test"] + "/prefixes/test_")) == 0 + assert len(s3.find(bucket_names["test"] + "/prefixes", prefix="test_")) == 100 + assert len(s3.find(bucket_names["test"] + "/prefixes/", prefix="test_")) == 100 - test_1s = s3.find(test_bucket_name + "/prefixes/test_1") + test_1s = s3.find(bucket_names["test"] + "/prefixes/test_1") assert len(test_1s) == 1 - assert test_1s[0] == test_bucket_name + "/prefixes/test_1" + assert test_1s[0] == bucket_names["test"] + "/prefixes/test_1" - test_1s = s3.find(test_bucket_name + "/prefixes/", prefix="test_1") + test_1s = s3.find(bucket_names["test"] + "/prefixes/", prefix="test_1") assert len(test_1s) == 11 - assert test_1s == [test_bucket_name + "/prefixes/test_1"] + [ - test_bucket_name + f"/prefixes/test_{cursor}" for cursor in range(10, 20) + assert test_1s == [bucket_names["test"] + "/prefixes/test_1"] + [ + bucket_names["test"] + f"/prefixes/test_{cursor}" for cursor in range(10, 20) ] - assert s3.find(test_bucket_name + "/prefixes/") == s3.find( - test_bucket_name + "/prefixes/", prefix=None + assert s3.find(bucket_names["test"] + "/prefixes/") == s3.find( + bucket_names["test"] + "/prefixes/", prefix=None ) -def test_list_after_find(s3): - before = s3.ls("s3://test") - s3.invalidate_cache("s3://test/2014-01-01.csv") - s3.find("s3://test/2014-01-01.csv") - after = s3.ls("s3://test") +def test_list_after_find(s3, bucket_names): + before = s3.ls(f"s3://{bucket_names['test']}") + s3.invalidate_cache(f"s3://{bucket_names['test']}/2014-01-01.csv") + s3.find(f"s3://{bucket_names['test']}/2014-01-01.csv") + after = s3.ls(f"s3://{bucket_names['test']}") assert before == after @@ -2335,12 +2218,12 @@ def test_upload_recursive_to_bucket(s3, tmpdir): s3.put(folders[0], "newbucket", recursive=True) -def test_rm_file(s3): - target = test_bucket_name + "/to_be_removed/file" +def test_rm_file(s3, bucket_names): + target = bucket_names["test"] + "/to_be_removed/file" s3.touch(target) s3.rm_file(target) assert not s3.exists(target) - assert not s3.exists(test_bucket_name + "/to_be_removed") + assert not s3.exists(bucket_names["test"] + "/to_be_removed") def test_exists_isdir(s3): diff --git a/s3fs/tests/test_utils.py b/tests/test_utils.py similarity index 100% rename from s3fs/tests/test_utils.py rename to tests/test_utils.py diff --git a/versioneer.py b/versioneer.py deleted file mode 100644 index d183df02..00000000 --- a/versioneer.py +++ /dev/null @@ -1,2064 +0,0 @@ - -# Version: 0.20 - -"""The Versioneer - like a rocketeer, but for versions. - -The Versioneer -============== - -* like a rocketeer, but for versions! -* https://github.com/python-versioneer/python-versioneer -* Brian Warner -* License: Public Domain -* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 -* [![Latest Version][pypi-image]][pypi-url] -* [![Build Status][travis-image]][travis-url] - -This is a tool for managing a recorded version number in distutils-based -python projects. The goal is to remove the tedious and error-prone "update -the embedded version string" step from your release process. Making a new -release should be as easy as recording a new tag in your version-control -system, and maybe making new tarballs. - - -## Quick Install - -* `pip install versioneer` to somewhere in your $PATH -* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) -* run `versioneer install` in your source tree, commit the results -* Verify version information with `python setup.py version` - -## Version Identifiers - -Source trees come from a variety of places: - -* a version-control system checkout (mostly used by developers) -* a nightly tarball, produced by build automation -* a snapshot tarball, produced by a web-based VCS browser, like github's - "tarball from tag" feature -* a release tarball, produced by "setup.py sdist", distributed through PyPI - -Within each source tree, the version identifier (either a string or a number, -this tool is format-agnostic) can come from a variety of places: - -* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows - about recent "tags" and an absolute revision-id -* the name of the directory into which the tarball was unpacked -* an expanded VCS keyword ($Id$, etc) -* a `_version.py` created by some earlier build step - -For released software, the version identifier is closely related to a VCS -tag. Some projects use tag names that include more than just the version -string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool -needs to strip the tag prefix to extract the version identifier. For -unreleased software (between tags), the version identifier should provide -enough information to help developers recreate the same tree, while also -giving them an idea of roughly how old the tree is (after version 1.2, before -version 1.3). Many VCS systems can report a description that captures this, -for example `git describe --tags --dirty --always` reports things like -"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the -0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has -uncommitted changes). - -The version identifier is used for multiple purposes: - -* to allow the module to self-identify its version: `myproject.__version__` -* to choose a name and prefix for a 'setup.py sdist' tarball - -## Theory of Operation - -Versioneer works by adding a special `_version.py` file into your source -tree, where your `__init__.py` can import it. This `_version.py` knows how to -dynamically ask the VCS tool for version information at import time. - -`_version.py` also contains `$Revision$` markers, and the installation -process marks `_version.py` to have this marker rewritten with a tag name -during the `git archive` command. As a result, generated tarballs will -contain enough information to get the proper version. - -To allow `setup.py` to compute a version too, a `versioneer.py` is added to -the top level of your source tree, next to `setup.py` and the `setup.cfg` -that configures it. This overrides several distutils/setuptools commands to -compute the version when invoked, and changes `setup.py build` and `setup.py -sdist` to replace `_version.py` with a small static file that contains just -the generated version data. - -## Installation - -See [INSTALL.md](./INSTALL.md) for detailed installation instructions. - -## Version-String Flavors - -Code which uses Versioneer can learn about its version string at runtime by -importing `_version` from your main `__init__.py` file and running the -`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can -import the top-level `versioneer.py` and run `get_versions()`. - -Both functions return a dictionary with different flavors of version -information: - -* `['version']`: A condensed version string, rendered using the selected - style. This is the most commonly used value for the project's version - string. The default "pep440" style yields strings like `0.11`, - `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section - below for alternative styles. - -* `['full-revisionid']`: detailed revision identifier. For Git, this is the - full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". - -* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the - commit date in ISO 8601 format. This will be None if the date is not - available. - -* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that - this is only accurate if run in a VCS checkout, otherwise it is likely to - be False or None - -* `['error']`: if the version string could not be computed, this will be set - to a string describing the problem, otherwise it will be None. It may be - useful to throw an exception in setup.py if this is set, to avoid e.g. - creating tarballs with a version string of "unknown". - -Some variants are more useful than others. Including `full-revisionid` in a -bug report should allow developers to reconstruct the exact code being tested -(or indicate the presence of local changes that should be shared with the -developers). `version` is suitable for display in an "about" box or a CLI -`--version` output: it can be easily compared against release notes and lists -of bugs fixed in various releases. - -The installer adds the following text to your `__init__.py` to place a basic -version in `YOURPROJECT.__version__`: - - from ._version import get_versions - __version__ = get_versions()['version'] - del get_versions - -## Styles - -The setup.cfg `style=` configuration controls how the VCS information is -rendered into a version string. - -The default style, "pep440", produces a PEP440-compliant string, equal to the -un-prefixed tag name for actual releases, and containing an additional "local -version" section with more detail for in-between builds. For Git, this is -TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags ---dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the -tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and -that this commit is two revisions ("+2") beyond the "0.11" tag. For released -software (exactly equal to a known tag), the identifier will only contain the -stripped tag, e.g. "0.11". - -Other styles are available. See [details.md](details.md) in the Versioneer -source tree for descriptions. - -## Debugging - -Versioneer tries to avoid fatal errors: if something goes wrong, it will tend -to return a version of "0+unknown". To investigate the problem, run `setup.py -version`, which will run the version-lookup code in a verbose mode, and will -display the full contents of `get_versions()` (including the `error` string, -which may help identify what went wrong). - -## Known Limitations - -Some situations are known to cause problems for Versioneer. This details the -most significant ones. More can be found on Github -[issues page](https://github.com/python-versioneer/python-versioneer/issues). - -### Subprojects - -Versioneer has limited support for source trees in which `setup.py` is not in -the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are -two common reasons why `setup.py` might not be in the root: - -* Source trees which contain multiple subprojects, such as - [Buildbot](https://github.com/buildbot/buildbot), which contains both - "main" and "slave" subprojects, each with their own `setup.py`, - `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI - distributions (and upload multiple independently-installable tarballs). -* Source trees whose main purpose is to contain a C library, but which also - provide bindings to Python (and perhaps other languages) in subdirectories. - -Versioneer will look for `.git` in parent directories, and most operations -should get the right version string. However `pip` and `setuptools` have bugs -and implementation details which frequently cause `pip install .` from a -subproject directory to fail to find a correct version string (so it usually -defaults to `0+unknown`). - -`pip install --editable .` should work correctly. `setup.py install` might -work too. - -Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in -some later version. - -[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking -this issue. The discussion in -[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the -issue from the Versioneer side in more detail. -[pip PR#3176](https://github.com/pypa/pip/pull/3176) and -[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve -pip to let Versioneer work correctly. - -Versioneer-0.16 and earlier only looked for a `.git` directory next to the -`setup.cfg`, so subprojects were completely unsupported with those releases. - -### Editable installs with setuptools <= 18.5 - -`setup.py develop` and `pip install --editable .` allow you to install a -project into a virtualenv once, then continue editing the source code (and -test) without re-installing after every change. - -"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a -convenient way to specify executable scripts that should be installed along -with the python package. - -These both work as expected when using modern setuptools. When using -setuptools-18.5 or earlier, however, certain operations will cause -`pkg_resources.DistributionNotFound` errors when running the entrypoint -script, which must be resolved by re-installing the package. This happens -when the install happens with one version, then the egg_info data is -regenerated while a different version is checked out. Many setup.py commands -cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into -a different virtualenv), so this can be surprising. - -[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes -this one, but upgrading to a newer version of setuptools should probably -resolve it. - - -## Updating Versioneer - -To upgrade your project to a new release of Versioneer, do the following: - -* install the new Versioneer (`pip install -U versioneer` or equivalent) -* edit `setup.cfg`, if necessary, to include any new configuration settings - indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. -* re-run `versioneer install` in your source tree, to replace - `SRC/_version.py` -* commit any changed files - -## Future Directions - -This tool is designed to make it easily extended to other version-control -systems: all VCS-specific components are in separate directories like -src/git/ . The top-level `versioneer.py` script is assembled from these -components by running make-versioneer.py . In the future, make-versioneer.py -will take a VCS name as an argument, and will construct a version of -`versioneer.py` that is specific to the given VCS. It might also take the -configuration arguments that are currently provided manually during -installation by editing setup.py . Alternatively, it might go the other -direction and include code from all supported VCS systems, reducing the -number of intermediate scripts. - -## Similar projects - -* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time - dependency -* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of - versioneer -* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools - plugin - -## License - -To make Versioneer easier to embed, all its code is dedicated to the public -domain. The `_version.py` that it creates is also in the public domain. -Specifically, both are released under the Creative Commons "Public Domain -Dedication" license (CC0-1.0), as described in -https://creativecommons.org/publicdomain/zero/1.0/ . - -[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg -[pypi-url]: https://pypi.python.org/pypi/versioneer/ -[travis-image]: -https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg -[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer - -""" - -import configparser -import errno -import json -import os -import re -import subprocess -import sys - - -class VersioneerConfig: # pylint: disable=too-few-public-methods # noqa - """Container for Versioneer configuration parameters.""" - - -def get_root(): - """Get the project root directory. - - We require that all commands are run from the project root, i.e. the - directory that contains setup.py, setup.cfg, and versioneer.py . - """ - root = os.path.realpath(os.path.abspath(os.getcwd())) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - # allow 'python path/to/setup.py COMMAND' - root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) - setup_py = os.path.join(root, "setup.py") - versioneer_py = os.path.join(root, "versioneer.py") - if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): - err = ("Versioneer was unable to run the project root directory. " - "Versioneer requires setup.py to be executed from " - "its immediate directory (like 'python setup.py COMMAND'), " - "or in a way that lets it use sys.argv[0] to find the root " - "(like 'python path/to/setup.py COMMAND').") - raise VersioneerBadRootError(err) - try: - # Certain runtime workflows (setup.py install/develop in a setuptools - # tree) execute all dependencies in a single python process, so - # "versioneer" may be imported multiple times, and python's shared - # module-import table will cache the first one. So we can't use - # os.path.dirname(__file__), as that will find whichever - # versioneer.py was first imported, even in later projects. - my_path = os.path.realpath(os.path.abspath(__file__)) - me_dir = os.path.normcase(os.path.splitext(my_path)[0]) - vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) - if me_dir != vsr_dir: - print("Warning: build in %s is using versioneer.py from %s" - % (os.path.dirname(my_path), versioneer_py)) - except NameError: - pass - return root - - -def get_config_from_root(root): - """Read the project setup.cfg file to determine Versioneer config.""" - # This might raise EnvironmentError (if setup.cfg is missing), or - # configparser.NoSectionError (if it lacks a [versioneer] section), or - # configparser.NoOptionError (if it lacks "VCS="). See the docstring at - # the top of versioneer.py for instructions on writing your setup.cfg . - setup_cfg = os.path.join(root, "setup.cfg") - parser = configparser.ConfigParser() - with open(setup_cfg, "r") as cfg_file: - parser.read_file(cfg_file) - VCS = parser.get("versioneer", "VCS") # mandatory - - # Dict-like interface for non-mandatory entries - section = parser["versioneer"] - - # pylint:disable=attribute-defined-outside-init # noqa - cfg = VersioneerConfig() - cfg.VCS = VCS - cfg.style = section.get("style", "") - cfg.versionfile_source = section.get("versionfile_source") - cfg.versionfile_build = section.get("versionfile_build") - cfg.tag_prefix = section.get("tag_prefix") - if cfg.tag_prefix in ("''", '""'): - cfg.tag_prefix = "" - cfg.parentdir_prefix = section.get("parentdir_prefix") - cfg.verbose = section.get("verbose") - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -# these dictionaries contain VCS-specific tools -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - HANDLERS.setdefault(vcs, {})[method] = f - return f - return decorate - - -# pylint:disable=too-many-arguments,consider-using-with # noqa -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %s" % dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %s" % (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %s (error)" % dispcmd) - print("stdout was %s" % stdout) - return None, process.returncode - return stdout, process.returncode - - -LONG_VERSION_PY['git'] = r''' -# This file helps to compute a version number in source trees obtained from -# git-archive tarball (such as those provided by githubs download-from-tag -# feature). Distribution tarballs (built by setup.py sdist) and build -# directories (produced by setup.py build) will contain a much shorter file -# that just contains the computed version number. - -# This file is released into the public domain. Generated by -# versioneer-0.20 (https://github.com/python-versioneer/python-versioneer) - -"""Git implementation of _version.py.""" - -import errno -import os -import re -import subprocess -import sys - - -def get_keywords(): - """Get the keywords needed to look up the version information.""" - # these strings will be replaced by git during git-archive. - # setup.py/versioneer.py will grep for the variable names, so they must - # each be defined on a line of their own. _version.py will just call - # get_keywords(). - git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" - git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" - git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" - keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} - return keywords - - -class VersioneerConfig: # pylint: disable=too-few-public-methods - """Container for Versioneer configuration parameters.""" - - -def get_config(): - """Create, populate and return the VersioneerConfig() object.""" - # these strings are filled in when 'setup.py versioneer' creates - # _version.py - cfg = VersioneerConfig() - cfg.VCS = "git" - cfg.style = "%(STYLE)s" - cfg.tag_prefix = "%(TAG_PREFIX)s" - cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" - cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" - cfg.verbose = False - return cfg - - -class NotThisMethod(Exception): - """Exception raised if a method is not valid for the current scenario.""" - - -LONG_VERSION_PY = {} -HANDLERS = {} - - -def register_vcs_handler(vcs, method): # decorator - """Create decorator to mark a method as the handler of a VCS.""" - def decorate(f): - """Store f in HANDLERS[vcs][method].""" - if vcs not in HANDLERS: - HANDLERS[vcs] = {} - HANDLERS[vcs][method] = f - return f - return decorate - - -# pylint:disable=too-many-arguments,consider-using-with # noqa -def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, - env=None): - """Call the given command(s).""" - assert isinstance(commands, list) - process = None - for command in commands: - try: - dispcmd = str([command] + args) - # remember shell=False, so use git.cmd on windows, not just git - process = subprocess.Popen([command] + args, cwd=cwd, env=env, - stdout=subprocess.PIPE, - stderr=(subprocess.PIPE if hide_stderr - else None)) - break - except EnvironmentError: - e = sys.exc_info()[1] - if e.errno == errno.ENOENT: - continue - if verbose: - print("unable to run %%s" %% dispcmd) - print(e) - return None, None - else: - if verbose: - print("unable to find command, tried %%s" %% (commands,)) - return None, None - stdout = process.communicate()[0].strip().decode() - if process.returncode != 0: - if verbose: - print("unable to run %%s (error)" %% dispcmd) - print("stdout was %%s" %% stdout) - return None, process.returncode - return stdout, process.returncode - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %%s but none started with prefix %%s" %% - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %%d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "main". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%%s', no digits" %% ",".join(refs - tags)) - if verbose: - print("likely tags: %%s" %% ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %%s" %% r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %%s not under git control" %% root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%%s*" %% tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "main" in branches: - branch_name = "main" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%%s'" - %% describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%%s' doesn't start with prefix '%%s'" - print(fmt %% (full_tag, tag_prefix)) - pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" - %% (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not main branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the main branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post0.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post0.dev%%d" %% pieces["distance"] - else: - # exception #1 - rendered = "0.post0.dev%%d" %% pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not main branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+g%%s" %% pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%%d" %% pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%%s'" %% style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -def get_versions(): - """Get version information or return default if unable to do so.""" - # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have - # __file__, we can work backwards from there to the root. Some - # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which - # case we can only use expanded keywords. - - cfg = get_config() - verbose = cfg.verbose - - try: - return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, - verbose) - except NotThisMethod: - pass - - try: - root = os.path.realpath(__file__) - # versionfile_source is the relative path from the top of the source - # tree (where the .git directory might live) to this file. Invert - # this to find the root from __file__. - for _ in cfg.versionfile_source.split('/'): - root = os.path.dirname(root) - except NameError: - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to find root of source tree", - "date": None} - - try: - pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) - return render(pieces, cfg.style) - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - except NotThisMethod: - pass - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, - "error": "unable to compute version", "date": None} -''' - - -@register_vcs_handler("git", "get_keywords") -def git_get_keywords(versionfile_abs): - """Extract version information from the given file.""" - # the code embedded in _version.py can just fetch the value of these - # keywords. When used from setup.py, we don't want to import _version.py, - # so we do it with a regexp instead. This function is not used from - # _version.py. - keywords = {} - try: - with open(versionfile_abs, "r") as fobj: - for line in fobj: - if line.strip().startswith("git_refnames ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["refnames"] = mo.group(1) - if line.strip().startswith("git_full ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["full"] = mo.group(1) - if line.strip().startswith("git_date ="): - mo = re.search(r'=\s*"(.*)"', line) - if mo: - keywords["date"] = mo.group(1) - except EnvironmentError: - pass - return keywords - - -@register_vcs_handler("git", "keywords") -def git_versions_from_keywords(keywords, tag_prefix, verbose): - """Get version information from git keywords.""" - if "refnames" not in keywords: - raise NotThisMethod("Short version file found") - date = keywords.get("date") - if date is not None: - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - - # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant - # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 - # -like" string, which we must then edit to make compliant), because - # it's been around since git-1.5.3, and it's too difficult to - # discover which version we're using, or to work around using an - # older one. - date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - refnames = keywords["refnames"].strip() - if refnames.startswith("$Format"): - if verbose: - print("keywords are unexpanded, not using") - raise NotThisMethod("unexpanded keywords, not a git-archive tarball") - refs = {r.strip() for r in refnames.strip("()").split(",")} - # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of - # just "foo-1.0". If we see a "tag: " prefix, prefer those. - TAG = "tag: " - tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} - if not tags: - # Either we're using git < 1.8.3, or there really are no tags. We use - # a heuristic: assume all version tags have a digit. The old git %d - # expansion behaves like git log --decorate=short and strips out the - # refs/heads/ and refs/tags/ prefixes that would let us distinguish - # between branches and tags. By ignoring refnames without digits, we - # filter out many common branch names like "release" and - # "stabilization", as well as "HEAD" and "main". - tags = {r for r in refs if re.search(r'\d', r)} - if verbose: - print("discarding '%s', no digits" % ",".join(refs - tags)) - if verbose: - print("likely tags: %s" % ",".join(sorted(tags))) - for ref in sorted(tags): - # sorting will prefer e.g. "2.0" over "2.0rc1" - if ref.startswith(tag_prefix): - r = ref[len(tag_prefix):] - # Filter out refs that exactly match prefix or that don't start - # with a number once the prefix is stripped (mostly a concern - # when prefix is '') - if not re.match(r'\d', r): - continue - if verbose: - print("picking %s" % r) - return {"version": r, - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": None, - "date": date} - # no suitable tags, so version is "0+unknown", but full hex is still there - if verbose: - print("no suitable tags, using unknown + full revision id") - return {"version": "0+unknown", - "full-revisionid": keywords["full"].strip(), - "dirty": False, "error": "no suitable tags", "date": None} - - -@register_vcs_handler("git", "pieces_from_vcs") -def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): - """Get version from 'git describe' in the root of the source tree. - - This only gets called if the git-archive 'subst' keywords were *not* - expanded, and _version.py hasn't already been rewritten with a short - version string, meaning we're inside a checked out source tree. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - - _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, - hide_stderr=True) - if rc != 0: - if verbose: - print("Directory %s not under git control" % root) - raise NotThisMethod("'git rev-parse --git-dir' returned error") - - # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] - # if there isn't one, this yields HEX[-dirty] (no NUM) - describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", - "--always", "--long", - "--match", "%s*" % tag_prefix], - cwd=root) - # --long was added in git-1.5.5 - if describe_out is None: - raise NotThisMethod("'git describe' failed") - describe_out = describe_out.strip() - full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) - if full_out is None: - raise NotThisMethod("'git rev-parse' failed") - full_out = full_out.strip() - - pieces = {} - pieces["long"] = full_out - pieces["short"] = full_out[:7] # maybe improved later - pieces["error"] = None - - branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], - cwd=root) - # --abbrev-ref was added in git-1.6.3 - if rc != 0 or branch_name is None: - raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") - branch_name = branch_name.strip() - - if branch_name == "HEAD": - # If we aren't exactly on a branch, pick a branch which represents - # the current commit. If all else fails, we are on a branchless - # commit. - branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) - # --contains was added in git-1.5.4 - if rc != 0 or branches is None: - raise NotThisMethod("'git branch --contains' returned error") - branches = branches.split("\n") - - # Remove the first line if we're running detached - if "(" in branches[0]: - branches.pop(0) - - # Strip off the leading "* " from the list of branches. - branches = [branch[2:] for branch in branches] - if "main" in branches: - branch_name = "main" - elif not branches: - branch_name = None - else: - # Pick the first branch that is returned. Good or bad. - branch_name = branches[0] - - pieces["branch"] = branch_name - - # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] - # TAG might have hyphens. - git_describe = describe_out - - # look for -dirty suffix - dirty = git_describe.endswith("-dirty") - pieces["dirty"] = dirty - if dirty: - git_describe = git_describe[:git_describe.rindex("-dirty")] - - # now we have TAG-NUM-gHEX or HEX - - if "-" in git_describe: - # TAG-NUM-gHEX - mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) - if not mo: - # unparsable. Maybe git-describe is misbehaving? - pieces["error"] = ("unable to parse git-describe output: '%s'" - % describe_out) - return pieces - - # tag - full_tag = mo.group(1) - if not full_tag.startswith(tag_prefix): - if verbose: - fmt = "tag '%s' doesn't start with prefix '%s'" - print(fmt % (full_tag, tag_prefix)) - pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" - % (full_tag, tag_prefix)) - return pieces - pieces["closest-tag"] = full_tag[len(tag_prefix):] - - # distance: number of commits since tag - pieces["distance"] = int(mo.group(2)) - - # commit: short hex revision ID - pieces["short"] = mo.group(3) - - else: - # HEX: no tags - pieces["closest-tag"] = None - count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) - pieces["distance"] = int(count_out) # total number of commits - - # commit date: see ISO-8601 comment in git_versions_from_keywords() - date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() - # Use only the last line. Previous lines may contain GPG signature - # information. - date = date.splitlines()[-1] - pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) - - return pieces - - -def do_vcs_install(manifest_in, versionfile_source, ipy): - """Git-specific installation logic for Versioneer. - - For Git, this means creating/changing .gitattributes to mark _version.py - for export-subst keyword substitution. - """ - GITS = ["git"] - if sys.platform == "win32": - GITS = ["git.cmd", "git.exe"] - files = [manifest_in, versionfile_source] - if ipy: - files.append(ipy) - try: - my_path = __file__ - if my_path.endswith(".pyc") or my_path.endswith(".pyo"): - my_path = os.path.splitext(my_path)[0] + ".py" - versioneer_file = os.path.relpath(my_path) - except NameError: - versioneer_file = "versioneer.py" - files.append(versioneer_file) - present = False - try: - with open(".gitattributes", "r") as fobj: - for line in fobj: - if line.strip().startswith(versionfile_source): - if "export-subst" in line.strip().split()[1:]: - present = True - break - except EnvironmentError: - pass - if not present: - with open(".gitattributes", "a+") as fobj: - fobj.write(f"{versionfile_source} export-subst\n") - files.append(".gitattributes") - run_command(GITS, ["add", "--"] + files) - - -def versions_from_parentdir(parentdir_prefix, root, verbose): - """Try to determine the version from the parent directory name. - - Source tarballs conventionally unpack into a directory that includes both - the project name and a version string. We will also support searching up - two directory levels for an appropriately named parent directory - """ - rootdirs = [] - - for _ in range(3): - dirname = os.path.basename(root) - if dirname.startswith(parentdir_prefix): - return {"version": dirname[len(parentdir_prefix):], - "full-revisionid": None, - "dirty": False, "error": None, "date": None} - rootdirs.append(root) - root = os.path.dirname(root) # up a level - - if verbose: - print("Tried directories %s but none started with prefix %s" % - (str(rootdirs), parentdir_prefix)) - raise NotThisMethod("rootdir doesn't start with parentdir_prefix") - - -SHORT_VERSION_PY = """ -# This file was generated by 'versioneer.py' (0.20) from -# revision-control system data, or from the parent directory name of an -# unpacked source archive. Distribution tarballs contain a pre-generated copy -# of this file. - -import json - -version_json = ''' -%s -''' # END VERSION_JSON - - -def get_versions(): - return json.loads(version_json) -""" - - -def versions_from_file(filename): - """Try to determine the version from _version.py if present.""" - try: - with open(filename) as f: - contents = f.read() - except EnvironmentError: - raise NotThisMethod("unable to read _version.py") - mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", - contents, re.M | re.S) - if not mo: - raise NotThisMethod("no version_json in _version.py") - return json.loads(mo.group(1)) - - -def write_to_version_file(filename, versions): - """Write the given version number to the given _version.py file.""" - os.unlink(filename) - contents = json.dumps(versions, sort_keys=True, - indent=1, separators=(",", ": ")) - with open(filename, "w") as f: - f.write(SHORT_VERSION_PY % contents) - - print("set %s to '%s'" % (filename, versions["version"])) - - -def plus_or_dot(pieces): - """Return a + if we don't already have one, else return a .""" - if "+" in pieces.get("closest-tag", ""): - return "." - return "+" - - -def render_pep440(pieces): - """Build up version string, with post-release "local version identifier". - - Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you - get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty - - Exceptions: - 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_branch(pieces): - """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . - - The ".dev0" means not main branch. Note that .dev0 sorts backwards - (a feature branch will appear "older" than the main branch). - - Exceptions: - 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0" - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+untagged.%d.g%s" % (pieces["distance"], - pieces["short"]) - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_pre(pieces): - """TAG[.post0.devDISTANCE] -- No -dirty. - - Exceptions: - 1: no tags. 0.post0.devDISTANCE - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += ".post0.dev%d" % pieces["distance"] - else: - # exception #1 - rendered = "0.post0.dev%d" % pieces["distance"] - return rendered - - -def render_pep440_post(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX] . - - The ".dev0" means dirty. Note that .dev0 sorts backwards - (a dirty tree will appear "older" than the corresponding clean one), - but you shouldn't be releasing software with -dirty anyways. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - return rendered - - -def render_pep440_post_branch(pieces): - """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . - - The ".dev0" means not main branch. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += plus_or_dot(pieces) - rendered += "g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["branch"] != "main": - rendered += ".dev0" - rendered += "+g%s" % pieces["short"] - if pieces["dirty"]: - rendered += ".dirty" - return rendered - - -def render_pep440_old(pieces): - """TAG[.postDISTANCE[.dev0]] . - - The ".dev0" means dirty. - - Exceptions: - 1: no tags. 0.postDISTANCE[.dev0] - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"] or pieces["dirty"]: - rendered += ".post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - else: - # exception #1 - rendered = "0.post%d" % pieces["distance"] - if pieces["dirty"]: - rendered += ".dev0" - return rendered - - -def render_git_describe(pieces): - """TAG[-DISTANCE-gHEX][-dirty]. - - Like 'git describe --tags --dirty --always'. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - if pieces["distance"]: - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render_git_describe_long(pieces): - """TAG-DISTANCE-gHEX[-dirty]. - - Like 'git describe --tags --dirty --always -long'. - The distance/hash is unconditional. - - Exceptions: - 1: no tags. HEX[-dirty] (note: no 'g' prefix) - """ - if pieces["closest-tag"]: - rendered = pieces["closest-tag"] - rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) - else: - # exception #1 - rendered = pieces["short"] - if pieces["dirty"]: - rendered += "-dirty" - return rendered - - -def render(pieces, style): - """Render the given version pieces into the requested style.""" - if pieces["error"]: - return {"version": "unknown", - "full-revisionid": pieces.get("long"), - "dirty": None, - "error": pieces["error"], - "date": None} - - if not style or style == "default": - style = "pep440" # the default - - if style == "pep440": - rendered = render_pep440(pieces) - elif style == "pep440-branch": - rendered = render_pep440_branch(pieces) - elif style == "pep440-pre": - rendered = render_pep440_pre(pieces) - elif style == "pep440-post": - rendered = render_pep440_post(pieces) - elif style == "pep440-post-branch": - rendered = render_pep440_post_branch(pieces) - elif style == "pep440-old": - rendered = render_pep440_old(pieces) - elif style == "git-describe": - rendered = render_git_describe(pieces) - elif style == "git-describe-long": - rendered = render_git_describe_long(pieces) - else: - raise ValueError("unknown style '%s'" % style) - - return {"version": rendered, "full-revisionid": pieces["long"], - "dirty": pieces["dirty"], "error": None, - "date": pieces.get("date")} - - -class VersioneerBadRootError(Exception): - """The project root directory is unknown or missing key files.""" - - -def get_versions(verbose=False): - """Get the project version from whatever source is available. - - Returns dict with two keys: 'version' and 'full'. - """ - if "versioneer" in sys.modules: - # see the discussion in cmdclass.py:get_cmdclass() - del sys.modules["versioneer"] - - root = get_root() - cfg = get_config_from_root(root) - - assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" - handlers = HANDLERS.get(cfg.VCS) - assert handlers, "unrecognized VCS '%s'" % cfg.VCS - verbose = verbose or cfg.verbose - assert cfg.versionfile_source is not None, \ - "please set versioneer.versionfile_source" - assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" - - versionfile_abs = os.path.join(root, cfg.versionfile_source) - - # extract version from first of: _version.py, VCS command (e.g. 'git - # describe'), parentdir. This is meant to work for developers using a - # source checkout, for users of a tarball created by 'setup.py sdist', - # and for users of a tarball/zipball created by 'git archive' or github's - # download-from-tag feature or the equivalent in other VCSes. - - get_keywords_f = handlers.get("get_keywords") - from_keywords_f = handlers.get("keywords") - if get_keywords_f and from_keywords_f: - try: - keywords = get_keywords_f(versionfile_abs) - ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) - if verbose: - print("got version from expanded keyword %s" % ver) - return ver - except NotThisMethod: - pass - - try: - ver = versions_from_file(versionfile_abs) - if verbose: - print("got version from file %s %s" % (versionfile_abs, ver)) - return ver - except NotThisMethod: - pass - - from_vcs_f = handlers.get("pieces_from_vcs") - if from_vcs_f: - try: - pieces = from_vcs_f(cfg.tag_prefix, root, verbose) - ver = render(pieces, cfg.style) - if verbose: - print("got version from VCS %s" % ver) - return ver - except NotThisMethod: - pass - - try: - if cfg.parentdir_prefix: - ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) - if verbose: - print("got version from parentdir %s" % ver) - return ver - except NotThisMethod: - pass - - if verbose: - print("unable to compute version") - - return {"version": "0+unknown", "full-revisionid": None, - "dirty": None, "error": "unable to compute version", - "date": None} - - -def get_version(): - """Get the short version string for this project.""" - return get_versions()["version"] - - -def get_cmdclass(cmdclass=None): - """Get the custom setuptools/distutils subclasses used by Versioneer. - - If the package uses a different cmdclass (e.g. one from numpy), it - should be provide as an argument. - """ - if "versioneer" in sys.modules: - del sys.modules["versioneer"] - # this fixes the "python setup.py develop" case (also 'install' and - # 'easy_install .'), in which subdependencies of the main project are - # built (using setup.py bdist_egg) in the same python process. Assume - # a main project A and a dependency B, which use different versions - # of Versioneer. A's setup.py imports A's Versioneer, leaving it in - # sys.modules by the time B's setup.py is executed, causing B to run - # with the wrong versioneer. Setuptools wraps the sub-dep builds in a - # sandbox that restores sys.modules to it's pre-build state, so the - # parent is protected against the child's "import versioneer". By - # removing ourselves from sys.modules here, before the child build - # happens, we protect the child from the parent's versioneer too. - # Also see https://github.com/python-versioneer/python-versioneer/issues/52 - - cmds = {} if cmdclass is None else cmdclass.copy() - - # we add "version" to both distutils and setuptools - from distutils.core import Command - - class cmd_version(Command): - description = "report generated version string" - user_options = [] - boolean_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - vers = get_versions(verbose=True) - print("Version: %s" % vers["version"]) - print(" full-revisionid: %s" % vers.get("full-revisionid")) - print(" dirty: %s" % vers.get("dirty")) - print(" date: %s" % vers.get("date")) - if vers["error"]: - print(" error: %s" % vers["error"]) - cmds["version"] = cmd_version - - # we override "build_py" in both distutils and setuptools - # - # most invocation pathways end up running build_py: - # distutils/build -> build_py - # distutils/install -> distutils/build ->.. - # setuptools/bdist_wheel -> distutils/install ->.. - # setuptools/bdist_egg -> distutils/install_lib -> build_py - # setuptools/install -> bdist_egg ->.. - # setuptools/develop -> ? - # pip install: - # copies source tree to a tempdir before running egg_info/etc - # if .git isn't copied too, 'git describe' will fail - # then does setup.py bdist_wheel, or sometimes setup.py install - # setup.py egg_info -> ? - - # we override different "build_py" commands for both environments - if 'build_py' in cmds: - _build_py = cmds['build_py'] - elif "setuptools" in sys.modules: - from setuptools.command.build_py import build_py as _build_py - else: - from distutils.command.build_py import build_py as _build_py - - class cmd_build_py(_build_py): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_py.run(self) - # now locate _version.py in the new build/ directory and replace - # it with an updated value - if cfg.versionfile_build: - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_py"] = cmd_build_py - - if 'build_ext' in cmds: - _build_ext = cmds['build_ext'] - elif "setuptools" in sys.modules: - from setuptools.command.build_ext import build_ext as _build_ext - else: - from distutils.command.build_ext import build_ext as _build_ext - - class cmd_build_ext(_build_ext): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - _build_ext.run(self) - if self.inplace: - # build_ext --inplace will only build extensions in - # build/lib<..> dir with no _version.py to write to. - # As in place builds will already have a _version.py - # in the module dir, we do not need to write one. - return - # now locate _version.py in the new build/ directory and replace - # it with an updated value - target_versionfile = os.path.join(self.build_lib, - cfg.versionfile_build) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - cmds["build_ext"] = cmd_build_ext - - if "cx_Freeze" in sys.modules: # cx_freeze enabled? - from cx_Freeze.dist import build_exe as _build_exe - # nczeczulin reports that py2exe won't like the pep440-style string - # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. - # setup(console=[{ - # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION - # "product_version": versioneer.get_version(), - # ... - - class cmd_build_exe(_build_exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _build_exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["build_exe"] = cmd_build_exe - del cmds["build_py"] - - if 'py2exe' in sys.modules: # py2exe enabled? - from py2exe.distutils_buildexe import py2exe as _py2exe - - class cmd_py2exe(_py2exe): - def run(self): - root = get_root() - cfg = get_config_from_root(root) - versions = get_versions() - target_versionfile = cfg.versionfile_source - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, versions) - - _py2exe.run(self) - os.unlink(target_versionfile) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % - {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - cmds["py2exe"] = cmd_py2exe - - # we override different "sdist" commands for both environments - if 'sdist' in cmds: - _sdist = cmds['sdist'] - elif "setuptools" in sys.modules: - from setuptools.command.sdist import sdist as _sdist - else: - from distutils.command.sdist import sdist as _sdist - - class cmd_sdist(_sdist): - def run(self): - versions = get_versions() - # pylint:disable=attribute-defined-outside-init # noqa - self._versioneer_generated_versions = versions - # unless we update this, the command will keep using the old - # version - self.distribution.metadata.version = versions["version"] - return _sdist.run(self) - - def make_release_tree(self, base_dir, files): - root = get_root() - cfg = get_config_from_root(root) - _sdist.make_release_tree(self, base_dir, files) - # now locate _version.py in the new base_dir directory - # (remembering that it may be a hardlink) and replace it with an - # updated value - target_versionfile = os.path.join(base_dir, cfg.versionfile_source) - print("UPDATING %s" % target_versionfile) - write_to_version_file(target_versionfile, - self._versioneer_generated_versions) - cmds["sdist"] = cmd_sdist - - return cmds - - -CONFIG_ERROR = """ -setup.cfg is missing the necessary Versioneer configuration. You need -a section like: - - [versioneer] - VCS = git - style = pep440 - versionfile_source = src/myproject/_version.py - versionfile_build = myproject/_version.py - tag_prefix = - parentdir_prefix = myproject- - -You will also need to edit your setup.py to use the results: - - import versioneer - setup(version=versioneer.get_version(), - cmdclass=versioneer.get_cmdclass(), ...) - -Please read the docstring in ./versioneer.py for configuration instructions, -edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. -""" - -SAMPLE_CONFIG = """ -# See the docstring in versioneer.py for instructions. Note that you must -# re-run 'versioneer.py setup' after changing this section, and commit the -# resulting files. - -[versioneer] -#VCS = git -#style = pep440 -#versionfile_source = -#versionfile_build = -#tag_prefix = -#parentdir_prefix = - -""" - -OLD_SNIPPET = """ -from ._version import get_versions -__version__ = get_versions()['version'] -del get_versions -""" - -INIT_PY_SNIPPET = """ -from . import {0} -__version__ = {0}.get_versions()['version'] -""" - - -def do_setup(): - """Do main VCS-independent setup function for installing Versioneer.""" - root = get_root() - try: - cfg = get_config_from_root(root) - except (EnvironmentError, configparser.NoSectionError, - configparser.NoOptionError) as e: - if isinstance(e, (EnvironmentError, configparser.NoSectionError)): - print("Adding sample versioneer config to setup.cfg", - file=sys.stderr) - with open(os.path.join(root, "setup.cfg"), "a") as f: - f.write(SAMPLE_CONFIG) - print(CONFIG_ERROR, file=sys.stderr) - return 1 - - print(" creating %s" % cfg.versionfile_source) - with open(cfg.versionfile_source, "w") as f: - LONG = LONG_VERSION_PY[cfg.VCS] - f.write(LONG % {"DOLLAR": "$", - "STYLE": cfg.style, - "TAG_PREFIX": cfg.tag_prefix, - "PARENTDIR_PREFIX": cfg.parentdir_prefix, - "VERSIONFILE_SOURCE": cfg.versionfile_source, - }) - - ipy = os.path.join(os.path.dirname(cfg.versionfile_source), - "__init__.py") - if os.path.exists(ipy): - try: - with open(ipy, "r") as f: - old = f.read() - except EnvironmentError: - old = "" - module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] - snippet = INIT_PY_SNIPPET.format(module) - if OLD_SNIPPET in old: - print(" replacing boilerplate in %s" % ipy) - with open(ipy, "w") as f: - f.write(old.replace(OLD_SNIPPET, snippet)) - elif snippet not in old: - print(" appending to %s" % ipy) - with open(ipy, "a") as f: - f.write(snippet) - else: - print(" %s unmodified" % ipy) - else: - print(" %s doesn't exist, ok" % ipy) - ipy = None - - # Make sure both the top-level "versioneer.py" and versionfile_source - # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so - # they'll be copied into source distributions. Pip won't be able to - # install the package without this. - manifest_in = os.path.join(root, "MANIFEST.in") - simple_includes = set() - try: - with open(manifest_in, "r") as f: - for line in f: - if line.startswith("include "): - for include in line.split()[1:]: - simple_includes.add(include) - except EnvironmentError: - pass - # That doesn't cover everything MANIFEST.in can do - # (http://docs.python.org/2/distutils/sourcedist.html#commands), so - # it might give some false negatives. Appending redundant 'include' - # lines is safe, though. - if "versioneer.py" not in simple_includes: - print(" appending 'versioneer.py' to MANIFEST.in") - with open(manifest_in, "a") as f: - f.write("include versioneer.py\n") - else: - print(" 'versioneer.py' already in MANIFEST.in") - if cfg.versionfile_source not in simple_includes: - print(" appending versionfile_source ('%s') to MANIFEST.in" % - cfg.versionfile_source) - with open(manifest_in, "a") as f: - f.write("include %s\n" % cfg.versionfile_source) - else: - print(" versionfile_source already in MANIFEST.in") - - # Make VCS-specific changes. For git, this means creating/changing - # .gitattributes to mark _version.py for export-subst keyword - # substitution. - do_vcs_install(manifest_in, cfg.versionfile_source, ipy) - return 0 - - -def scan_setup_py(): - """Validate the contents of setup.py against Versioneer's expectations.""" - found = set() - setters = False - errors = 0 - with open("setup.py", "r") as f: - for line in f.readlines(): - if "import versioneer" in line: - found.add("import") - if "versioneer.get_cmdclass()" in line: - found.add("cmdclass") - if "versioneer.get_version()" in line: - found.add("get_version") - if "versioneer.VCS" in line: - setters = True - if "versioneer.versionfile_source" in line: - setters = True - if len(found) != 3: - print("") - print("Your setup.py appears to be missing some important items") - print("(but I might be wrong). Please make sure it has something") - print("roughly like the following:") - print("") - print(" import versioneer") - print(" setup( version=versioneer.get_version(),") - print(" cmdclass=versioneer.get_cmdclass(), ...)") - print("") - errors += 1 - if setters: - print("You should remove lines like 'versioneer.VCS = ' and") - print("'versioneer.versionfile_source = ' . This configuration") - print("now lives in setup.cfg, and should be removed from setup.py") - print("") - errors += 1 - return errors - - -if __name__ == "__main__": - cmd = sys.argv[1] - if cmd == "setup": - errors = do_setup() - errors += scan_setup_py() - if errors: - sys.exit(1)