From b9ccbf279f98567187e0e9b92edc7229b75682e4 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Fri, 23 Sep 2022 13:04:19 +0200 Subject: [PATCH 01/30] added jupyterlab dependency (for easier testing) --- .gitignore | 172 +++++++++++++-- .pre-commit-config.yaml | 2 +- CODE_OF_CONDUCT.md | 4 +- README.md | 7 +- poetry.lock | 453 ++++++++++++++++++++++++++++++---------- pyproject.toml | 1 + 6 files changed, 512 insertions(+), 127 deletions(-) diff --git a/.gitignore b/.gitignore index 233bef3b..a4d8c752 100644 --- a/.gitignore +++ b/.gitignore @@ -1,6 +1,19 @@ +# Created by https://www.toptal.com/developers/gitignore/api/osx,python,pycharm,windows,visualstudio,visualstudiocode,jupyternotebooks +# Edit at https://www.toptal.com/developers/gitignore?templates=osx,python,pycharm,windows,visualstudio,visualstudiocode,jupyternotebooks -# Created by https://www.gitignore.io/api/osx,python,pycharm,windows,visualstudio,visualstudiocode -# Edit at https://www.gitignore.io/?templates=osx,python,pycharm,windows,visualstudio,visualstudiocode +### JupyterNotebooks ### +# gitignore template for Jupyter Notebooks +# website: http://jupyter.org/ + +.ipynb_checkpoints +*/.ipynb_checkpoints/* + +# IPython +profile_default/ +ipython_config.py + +# Remove previous ipynb_checkpoints +# git rm -r .ipynb_checkpoints/ ### OSX ### # General @@ -31,7 +44,7 @@ Temporary Items .apdisk ### PyCharm ### -# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm +# Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 # User-specific stuff @@ -41,6 +54,9 @@ Temporary Items .idea/**/dictionaries .idea/**/shelf +# AWS User-specific +.idea/**/aws.xml + # Generated files .idea/**/contentModel.xml @@ -61,6 +77,9 @@ Temporary Items # When using Gradle or Maven with auto-import, you should exclude module files, # since they will be recreated, and may cause churn. Uncomment if using # auto-import. +# .idea/artifacts +# .idea/compiler.xml +# .idea/jarRepositories.xml # .idea/modules.xml # .idea/*.iml # .idea/modules @@ -88,6 +107,9 @@ atlassian-ide-plugin.xml # Cursive Clojure plugin .idea/replstate.xml +# SonarLint plugin +.idea/sonarlint/ + # Crashlytics plugin (for Android Studio and IntelliJ) com_crashlytics_export_strings.xml crashlytics.properties @@ -109,15 +131,31 @@ fabric.properties # *.ipr # Sonarlint plugin +# https://plugins.jetbrains.com/plugin/7973-sonarlint .idea/**/sonarlint/ # SonarQube Plugin +# https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin .idea/**/sonarIssues.xml # Markdown Navigator plugin +# https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced .idea/**/markdown-navigator.xml +.idea/**/markdown-navigator-enh.xml .idea/**/markdown-navigator/ +# Cache file creation bug +# See https://youtrack.jetbrains.com/issue/JBR-2257 +.idea/$CACHE_FILE$ + +# CodeStream plugin +# https://plugins.jetbrains.com/plugin/12206-codestream +.idea/codestream.xml + +# Azure Toolkit for IntelliJ plugin +# https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij +.idea/**/azureSettings.xml + ### Python ### # Byte-compiled / optimized / DLL files __pycache__/ @@ -168,13 +206,25 @@ htmlcov/ nosetests.xml coverage.xml *.cover +*.py,cover .hypothesis/ .pytest_cache/ +cover/ # Translations *.mo *.pot +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + # Scrapy stuff: .scrapy @@ -182,14 +232,18 @@ coverage.xml docs/_build/ # PyBuilder +.pybuilder/ target/ +# Jupyter Notebook + +# IPython + # pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: .python-version -# poetry -.venv - # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies @@ -197,12 +251,40 @@ target/ # install all needed dependencies. #Pipfile.lock -# celery beat schedule file +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff celerybeat-schedule +celerybeat.pid # SageMath parsed files *.sage.py +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + # Spyder project settings .spyderproject .spyproject @@ -226,18 +308,42 @@ dmypy.json # Pyre type checker .pyre/ -# Plugins -.secrets.baseline +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ ### VisualStudioCode ### .vscode/* !.vscode/tasks.json !.vscode/launch.json !.vscode/extensions.json +!.vscode/*.code-snippets + +# Local History for Visual Studio Code +.history/ + +# Built Visual Studio Code Extensions +*.vsix ### VisualStudioCode Patch ### # Ignore all local history of files .history +.ionide + +# Support for Project snippet scope +.vscode/*.code-snippets + +# Ignore code-workspaces +*.code-workspace ### Windows ### # Windows thumbnail cache files @@ -269,7 +375,7 @@ $RECYCLE.BIN/ ## Ignore Visual Studio temporary files, build results, and ## files generated by popular Visual Studio add-ons. ## -## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore +## Get latest from https://github.com/github/gitignore/blob/main/VisualStudio.gitignore # User-specific files *.rsuser @@ -291,12 +397,14 @@ mono_crash.* [Rr]eleases/ x64/ x86/ +[Ww][Ii][Nn]32/ [Aa][Rr][Mm]/ [Aa][Rr][Mm]64/ bld/ [Bb]in/ [Oo]bj/ [Ll]og/ +[Ll]ogs/ # Visual Studio 2015/2017 cache/options directory .vs/ @@ -328,6 +436,9 @@ project.lock.json project.fragment.lock.json artifacts/ +# ASP.NET Scaffolding +ScaffoldingReadMe.txt + # StyleCop StyleCopReport.xml @@ -336,6 +447,7 @@ StyleCopReport.xml *_p.c *_h.h *.ilk +*.meta *.obj *.iobj *.pch @@ -351,7 +463,7 @@ StyleCopReport.xml *.tmp *.tmp_proj *_wpftmp.csproj -*.log +*.tlog *.vspscc *.vssscc .builds @@ -406,6 +518,11 @@ _TeamCity* .axoCover/* !.axoCover/settings.json +# Coverlet is a free, cross platform Code Coverage Tool +coverage*.json +coverage*.xml +coverage*.info + # Visual Studio code coverage results *.coverage *.coveragexml @@ -553,6 +670,15 @@ node_modules/ # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) *.vbw +# Visual Studio 6 auto-generated project file (contains which files were open etc.) +*.vbp + +# Visual Studio 6 workspace and project file (working project files containing files to include in project) +*.dsw +*.dsp + +# Visual Studio 6 technical files + # Visual Studio LightSwitch build output **/*.HTMLClient/GeneratedArtifacts **/*.DesktopClient/GeneratedArtifacts @@ -608,12 +734,34 @@ ASALocalRun/ # Local History for Visual Studio .localhistory/ +# Visual Studio History (VSHistory) files +.vshistory/ + # BeatPulse healthcheck temp database healthchecksdb # Backup folder for Package Reference Convert tool in Visual Studio 2017 MigrationBackup/ -# End of https://www.gitignore.io/api/osx,python,pycharm,windows,visualstudio,visualstudiocode +# Ionide (cross platform F# VS Code tools) working folder +.ionide/ + +# Fody - auto-generated XML schema +FodyWeavers.xsd + +# VS Code files for those working on multiple tools + +# Local History for Visual Studio Code + +# Windows Installer files from build outputs + +# JetBrains Rider +*.sln.iml + +### VisualStudio Patch ### +# Additional files built by Visual Studio + +# End of https://www.toptal.com/developers/gitignore/api/osx,python,pycharm,windows,visualstudio,visualstudiocode,jupyternotebooks + /examples diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c881fdcc..d1717e4f 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,5 +1,5 @@ default_language_version: - python: python3.8 + python: python3.9 default_stages: [commit, push] diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 2fb12fc9..6fd1310f 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -68,9 +68,9 @@ members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, -available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html +available at [homepage]: https://www.contributor-covenant.org For answers to common questions about this code of conduct, see -https://www.contributor-covenant.org/faq + diff --git a/README.md b/README.md index 58fab699..0a3cede7 100644 --- a/README.md +++ b/README.md @@ -31,9 +31,10 @@ After installation, you should be able to run `make fast-test` and `make lint` w
FAQ Installation - - Installing the `tokenizers` package requires a Rust compiler installation. You can install Rust from [https://rustup.rs](https://rustup.rs) and add `$HOME/.cargo/env` to your PATH. +- Installing the `tokenizers` package requires a Rust compiler installation. You can install Rust from [https://rustup.rs](https://rustup.rs) and add `$HOME/.cargo/env` to your PATH. + +- Installing `sentencepiece` requires various packages, install with `sudo apt-get install cmake build-essential pkg-config` or `brew install cmake gperftools pkg-config`. - - Installing `sentencepiece` requires various packages, install with `sudo apt-get install cmake build-essential pkg-config` or `brew install cmake gperftools pkg-config`.
## Example usage in Python @@ -54,7 +55,6 @@ out.show() ![WinoMT Attribution Map](docs/source/images/heatmap_winomt.png) - ## Using the Inseq client See the available options by typing `inseq -h` in the terminal after installing the package. @@ -85,7 +85,6 @@ inseq attribute-dataset \ Inseq support minimal pair analysis via the `PairAggregator` component. Here is an example of using `PairAggregator` to produce a heatmap containing the score difference between two `FeatureAttributionSequenceOutput` objects: - ```python import inseq from inseq.data.aggregator import AggregatorPipeline, ContiguousSpanAggregator, SequenceAttributionAggregator, PairAggregator diff --git a/poetry.lock b/poetry.lock index 8994dbb1..aac6f9ae 100644 --- a/poetry.lock +++ b/poetry.lock @@ -16,7 +16,7 @@ multidict = ">=4.5,<7.0" yarl = ">=1.0,<2.0" [package.extras] -speedups = ["aiodns", "brotli", "cchardet"] +speedups = ["Brotli", "aiodns", "cchardet"] [[package]] name = "aiosignal" @@ -37,6 +37,23 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "anyio" +version = "3.6.1" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16)"] + [[package]] name = "appnope" version = "0.1.3" @@ -57,8 +74,8 @@ python-versions = ">=3.6" argon2-cffi-bindings = "*" [package.extras] -dev = ["pre-commit", "cogapp", "tomli", "coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "sphinx-notfound-page", "furo"] -docs = ["sphinx", "sphinx-notfound-page", "furo"] +dev = ["cogapp", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "pre-commit", "pytest", "sphinx", "sphinx-notfound-page", "tomli"] +docs = ["furo", "sphinx", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] @@ -73,7 +90,7 @@ python-versions = ">=3.6" cffi = ">=1.0.1" [package.extras] -dev = ["pytest", "cogapp", "pre-commit", "wheel"] +dev = ["cogapp", "pre-commit", "pytest", "wheel"] tests = ["pytest"] [[package]] @@ -86,6 +103,7 @@ python-versions = ">=3.6.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" +setuptools = ">=20.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} wrapt = ">=1.11,<2" @@ -128,10 +146,10 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] +dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "sphinx", "sphinx-notfound-page", "zope.interface"] +docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] +tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six", "zope.interface"] +tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "six"] [[package]] name = "babel" @@ -167,9 +185,9 @@ PyYAML = ">=5.3.1" stevedore = ">=1.20.0" [package.extras] -test = ["coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml", "beautifulsoup4 (>=4.8.0)", "pylint (==1.9.4)"] +test = ["beautifulsoup4 (>=4.8.0)", "coverage (>=4.5.4)", "fixtures (>=3.0.0)", "flake8 (>=4.0.0)", "pylint (==1.9.4)", "stestr (>=2.5.0)", "testscenarios (>=0.5.0)", "testtools (>=2.3.0)", "toml"] toml = ["toml"] -yaml = ["pyyaml"] +yaml = ["PyYAML"] [[package]] name = "beautifulsoup4" @@ -222,7 +240,7 @@ webencodings = "*" [package.extras] css = ["tinycss2 (>=1.1.0,<1.2)"] -dev = ["build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "Sphinx (==4.3.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)", "black (==22.3.0)", "mypy (==0.961)"] +dev = ["Sphinx (==4.3.2)", "black (==22.3.0)", "build (==0.8.0)", "flake8 (==4.0.1)", "hashin (==0.17.0)", "mypy (==0.961)", "pip-tools (==6.6.2)", "pytest (==7.1.2)", "tox (==3.25.0)", "twine (==4.0.1)", "wheel (==0.37.1)"] [[package]] name = "captum" @@ -238,10 +256,10 @@ numpy = "*" torch = ">=1.6" [package.extras] -dev = ["flask", "ipython", "ipywidgets", "jupyter", "flask-compress", "pytest", "pytest-cov", "black (==21.4b2)", "flake8", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-katex", "mypy (>=0.760)", "usort (==0.6.4)", "ufmt", "scikit-learn"] -insights = ["flask", "ipython", "ipywidgets", "jupyter", "flask-compress"] +dev = ["black (==21.4b2)", "flake8", "flask", "flask-compress", "ipython", "ipywidgets", "jupyter", "mypy (>=0.760)", "pytest", "pytest-cov", "scikit-learn", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-katex", "ufmt", "usort (==0.6.4)"] +insights = ["flask", "flask-compress", "ipython", "ipywidgets", "jupyter"] test = ["pytest", "pytest-cov"] -tutorials = ["flask", "ipython", "ipywidgets", "jupyter", "flask-compress", "torchtext", "torchvision"] +tutorials = ["flask", "flask-compress", "ipython", "ipywidgets", "jupyter", "torchtext", "torchvision"] [[package]] name = "certifi" @@ -355,13 +373,13 @@ xxhash = "*" apache-beam = ["apache-beam (>=2.26.0)"] audio = ["librosa"] benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.6.0)", "transformers (==3.0.2)"] -dev = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (==1.4.2)", "boto3 (==1.17.106)", "botocore (==1.20.106)", "faiss-cpu (>=1.6.4)", "fsspec", "moto[s3,server] (==2.0.4)", "rarfile (>=4.0)", "s3fs (==2021.08.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio", "soundfile", "transformers", "bs4", "conllu", "h5py", "langdetect", "lxml", "lz4", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "tldextract", "zstandard", "sentencepiece", "sacremoses", "bert-score (>=0.3.6)", "jiwer", "mauve-text", "rouge-score", "sacrebleu", "scikit-learn", "scipy", "seqeval", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)", "importlib-resources"] +dev = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (==1.4.2)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "black (>=22.0,<23.0)", "boto3 (==1.17.106)", "botocore (==1.20.106)", "bs4", "conllu", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "fsspec[s3]", "h5py", "importlib-resources", "isort (>=5.0.0)", "jiwer", "langdetect", "librosa", "lxml", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (==2021.08.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio", "transformers", "zstandard"] docs = ["s3fs"] quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] -s3 = ["fsspec", "boto3", "botocore", "s3fs"] +s3 = ["boto3", "botocore", "fsspec", "s3fs"] tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)"] tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["absl-py", "pytest", "pytest-datadir", "pytest-xdist", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "aiobotocore (==1.4.2)", "boto3 (==1.17.106)", "botocore (==1.20.106)", "faiss-cpu (>=1.6.4)", "fsspec", "moto[s3,server] (==2.0.4)", "rarfile (>=4.0)", "s3fs (==2021.08.1)", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "torch", "torchaudio", "soundfile", "transformers", "bs4", "conllu", "h5py", "langdetect", "lxml", "lz4", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "tldextract", "zstandard", "sentencepiece", "sacremoses", "bert-score (>=0.3.6)", "jiwer", "mauve-text", "rouge-score", "sacrebleu", "scikit-learn", "scipy", "seqeval", "toml (>=0.10.1)", "requests-file (>=1.5.1)", "tldextract (>=3.1.0)", "texttable (>=1.6.3)", "Werkzeug (>=1.0.1)", "six (>=1.15.0,<1.16.0)", "Pillow (>=6.2.1)", "librosa", "importlib-resources"] +tests = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (==1.4.2)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "boto3 (==1.17.106)", "botocore (==1.20.106)", "bs4", "conllu", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "fsspec[s3]", "h5py", "importlib-resources", "jiwer", "langdetect", "librosa", "lxml", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "mwparserfromhell", "nltk", "openpyxl", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (==2021.08.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio", "transformers", "zstandard"] torch = ["torch"] vision = ["Pillow (>=6.2.1)"] @@ -457,7 +475,7 @@ optional = false python-versions = "*" [package.extras] -devel = ["colorama", "jsonschema", "json-spec", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] +devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] [[package]] name = "filelock" @@ -493,9 +511,9 @@ optional = false python-versions = ">=3.7" [package.extras] -all = ["fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "zopfli (>=0.1.4)", "lz4 (>=1.7.4.2)", "matplotlib", "sympy", "skia-pathops (>=0.5.0)", "uharfbuzz (>=0.23.0)", "brotlicffi (>=0.8.0)", "scipy", "brotli (>=1.0.1)", "munkres", "unicodedata2 (>=14.0.0)", "xattr"] +all = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "lz4 (>=1.7.4.2)", "matplotlib", "munkres", "scipy", "skia-pathops (>=0.5.0)", "sympy", "uharfbuzz (>=0.23.0)", "unicodedata2 (>=14.0.0)", "xattr", "zopfli (>=0.1.4)"] graphite = ["lz4 (>=1.7.4.2)"] -interpolatable = ["scipy", "munkres"] +interpolatable = ["munkres", "scipy"] lxml = ["lxml (>=4.0,<5)"] pathops = ["skia-pathops (>=0.5.0)"] plot = ["matplotlib"] @@ -504,7 +522,7 @@ symfont = ["sympy"] type1 = ["xattr"] ufo = ["fs (>=2.2.0,<3)"] unicode = ["unicodedata2 (>=14.0.0)"] -woff = ["zopfli (>=0.1.4)", "brotlicffi (>=0.8.0)", "brotli (>=1.0.1)"] +woff = ["brotli (>=1.0.1)", "brotlicffi (>=0.8.0)", "zopfli (>=0.1.4)"] [[package]] name = "frozenlist" @@ -531,7 +549,7 @@ abfs = ["adlfs"] adl = ["adlfs"] arrow = ["pyarrow (>=1)"] dask = ["dask", "distributed"] -dropbox = ["dropboxdrivefs", "requests", "dropbox"] +dropbox = ["dropbox", "dropboxdrivefs", "requests"] entrypoints = ["importlib-metadata"] fuse = ["fusepy"] gcs = ["gcsfs"] @@ -540,7 +558,7 @@ github = ["requests"] gs = ["gcsfs"] gui = ["panel"] hdfs = ["pyarrow (>=1)"] -http = ["requests", "aiohttp"] +http = ["aiohttp", "requests"] libarchive = ["libarchive-c"] oci = ["ocifs"] s3 = ["s3fs"] @@ -588,12 +606,12 @@ tqdm = "*" typing-extensions = ">=3.7.4.3" [package.extras] -all = ["pytest", "pytest-cov", "datasets", "soundfile", "black (>=22.0,<23.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] -dev = ["pytest", "pytest-cov", "datasets", "soundfile", "black (>=22.0,<23.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] -fastai = ["toml", "fastai (>=2.4)", "fastcore (>=1.3.27)"] -quality = ["black (>=22.0,<23.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)"] -tensorflow = ["tensorflow", "pydot", "graphviz"] -testing = ["pytest", "pytest-cov", "datasets", "soundfile"] +all = ["black (>=22.0,<23.0)", "datasets", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "pytest", "pytest-cov", "soundfile"] +dev = ["black (>=22.0,<23.0)", "datasets", "flake8 (>=3.8.3)", "isort (>=5.5.4)", "pytest", "pytest-cov", "soundfile"] +fastai = ["fastai (>=2.4)", "fastcore (>=1.3.27)", "toml"] +quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.5.4)"] +tensorflow = ["graphviz", "pydot", "tensorflow"] +testing = ["datasets", "pytest", "pytest-cov", "soundfile"] torch = ["torch"] [[package]] @@ -635,9 +653,9 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] perf = ["ipython"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.3)", "packaging", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)", "importlib-resources (>=1.3)"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] [[package]] name = "importlib-resources" @@ -651,8 +669,8 @@ python-versions = ">=3.7" zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +testing = ["pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [[package]] name = "iniconfig" @@ -684,7 +702,7 @@ tornado = ">=6.1" traitlets = ">=5.1.0" [package.extras] -test = ["flaky", "ipyparallel", "pre-commit", "pytest-cov", "pytest-timeout", "pytest (>=6.0)"] +test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=6.0)", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" @@ -705,11 +723,12 @@ pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = ">=2.4.0" +setuptools = ">=18.5" stack-data = "*" traitlets = ">=5" [package.extras] -all = ["black", "Sphinx (>=1.3)", "ipykernel", "nbconvert", "nbformat", "ipywidgets", "notebook", "ipyparallel", "qtconsole", "pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "numpy (>=1.19)", "pandas", "trio"] +all = ["Sphinx (>=1.3)", "black", "curio", "ipykernel", "ipyparallel", "ipywidgets", "matplotlib (!=3.2.0)", "nbconvert", "nbformat", "notebook", "numpy (>=1.19)", "pandas", "pytest (<7.1)", "pytest-asyncio", "qtconsole", "testpath", "trio"] black = ["black"] doc = ["Sphinx (>=1.3)"] kernel = ["ipykernel"] @@ -719,7 +738,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["pytest (<7.1)", "pytest-asyncio", "testpath", "curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "trio"] +test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.19)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] [[package]] name = "ipython-genutils" @@ -746,7 +765,7 @@ traitlets = ">=4.3.1" widgetsnbextension = ">=3.6.0,<3.7.0" [package.extras] -test = ["pytest (>=3.6.0)", "pytest-cov", "mock"] +test = ["mock", "pytest (>=3.6.0)", "pytest-cov"] [[package]] name = "isort" @@ -760,10 +779,10 @@ python-versions = ">=3.6.1,<4.0" colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"colors\""} [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] colors = ["colorama (>=0.4.3,<0.5.0)"] +pipfile_deprecated_finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] +requirements_deprecated_finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" @@ -810,6 +829,17 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "json5" +version = "0.9.10" +description = "A Python implementation of the JSON5 data format." +category = "dev" +optional = false +python-versions = "*" + +[package.extras] +dev = ["hypothesis"] + [[package]] name = "jsonschema" version = "4.6.1" @@ -845,7 +875,7 @@ tornado = ">=6.0" traitlets = "*" [package.extras] -doc = ["ipykernel", "myst-parser", "sphinx-rtd-theme", "sphinx (>=1.3.6)", "sphinxcontrib-github-alt"] +doc = ["ipykernel", "myst-parser", "sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] test = ["codecov", "coverage", "ipykernel (>=6.5)", "ipython", "mypy", "pre-commit", "pytest", "pytest-asyncio (>=0.18)", "pytest-cov", "pytest-timeout"] [[package]] @@ -863,6 +893,59 @@ traitlets = "*" [package.extras] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] +[[package]] +name = "jupyter-server" +version = "1.18.1" +description = "The backend—i.e. core services, APIs, and REST endpoints—to Jupyter web applications." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +anyio = ">=3.1.0,<4" +argon2-cffi = "*" +jinja2 = "*" +jupyter-client = ">=6.1.12" +jupyter-core = ">=4.7.0" +nbconvert = ">=6.4.4" +nbformat = ">=5.2.0" +packaging = "*" +prometheus-client = "*" +pywinpty = {version = "*", markers = "os_name == \"nt\""} +pyzmq = ">=17" +Send2Trash = "*" +terminado = ">=0.8.3" +tornado = ">=6.1.0" +traitlets = ">=5.1" +websocket-client = "*" + +[package.extras] +test = ["coverage", "ipykernel", "pre-commit", "pytest (>=6.0)", "pytest-console-scripts", "pytest-cov", "pytest-mock", "pytest-timeout", "pytest-tornasync", "requests"] + +[[package]] +name = "jupyterlab" +version = "3.4.7" +description = "JupyterLab computational environment" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +ipython = "*" +jinja2 = ">=2.1" +jupyter-core = "*" +jupyter-server = ">=1.16,<2.0" +jupyterlab-server = ">=2.10,<3.0" +nbclassic = "*" +notebook = "<7" +packaging = "*" +tomli = "*" +tornado = ">=6.1.0" + +[package.extras] +test = ["check-manifest", "coverage", "jupyterlab-server[test]", "pre-commit", "pytest (>=6.0)", "pytest-check-links (>=0.5)", "pytest-console-scripts", "pytest-cov", "requests", "requests-cache", "virtualenv"] +ui-tests = ["build"] + [[package]] name = "jupyterlab-pygments" version = "0.2.2" @@ -871,6 +954,28 @@ category = "main" optional = false python-versions = ">=3.7" +[[package]] +name = "jupyterlab-server" +version = "2.15.1" +description = "A set of server components for JupyterLab and JupyterLab like applications." +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +babel = "*" +importlib-metadata = {version = ">=3.6", markers = "python_version < \"3.10\""} +jinja2 = ">=3.0.3" +json5 = "*" +jsonschema = ">=3.0.1" +jupyter-server = ">=1.8,<2" +packaging = "*" +requests = "*" + +[package.extras] +openapi = ["openapi-core (>=0.14.2)", "ruamel-yaml"] +test = ["codecov", "ipykernel", "jupyter-server[test]", "openapi-core (>=0.14.2)", "openapi-spec-validator (<0.5)", "pytest (>=5.3.2)", "pytest-console-scripts", "pytest-cov", "ruamel-yaml", "strict-rfc3339"] + [[package]] name = "jupyterlab-widgets" version = "1.1.1" @@ -1007,6 +1112,38 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "nbclassic" +version = "0.4.3" +description = "A web-based notebook environment for interactive computing" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +argon2-cffi = "*" +ipykernel = "*" +ipython-genutils = "*" +jinja2 = "*" +jupyter-client = ">=6.1.1" +jupyter-core = ">=4.6.1" +jupyter-server = ">=1.8" +nbconvert = ">=5" +nbformat = "*" +nest-asyncio = ">=1.5" +notebook-shim = ">=0.1.0" +prometheus-client = "*" +pyzmq = ">=17" +Send2Trash = ">=1.8.0" +terminado = ">=0.8.3" +tornado = ">=6.1" +traitlets = ">=4.2.1" + +[package.extras] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +json-logging = ["json-logging"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "pytest-tornasync", "requests", "requests-unixsocket", "selenium (==4.1.5)", "testpath"] + [[package]] name = "nbclient" version = "0.6.6" @@ -1022,7 +1159,7 @@ nest-asyncio = "*" traitlets = ">=5.2.2" [package.extras] -sphinx = ["autodoc-traits", "mock", "moto", "myst-parser", "Sphinx (>=1.7)", "sphinx-book-theme"] +sphinx = ["Sphinx (>=1.7)", "autodoc-traits", "mock", "moto", "myst-parser", "sphinx-book-theme"] test = ["black", "check-manifest", "flake8", "ipykernel", "ipython (<8.0.0)", "ipywidgets (<8.0.0)", "mypy", "pip (>=18.1)", "pre-commit", "pytest (>=4.1)", "pytest-asyncio", "pytest-cov (>=2.6.1)", "setuptools (>=60.0)", "testpath", "twine (>=1.11.0)", "xmltodict"] [[package]] @@ -1052,10 +1189,10 @@ tinycss2 = "*" traitlets = ">=5.0" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "tornado (>=6.1)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] -docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["ipykernel", "ipython", "ipywidgets (>=7)", "nbsphinx (>=0.2.12)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "tornado (>=6.1)"] +docs = ["ipython", "nbsphinx (>=0.2.12)", "sphinx (>=1.5.1)", "sphinx-rtd-theme"] serve = ["tornado (>=6.1)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)"] +test = ["ipykernel", "ipywidgets (>=7)", "pre-commit", "pyppeteer (>=1,<1.1)", "pytest", "pytest-cov", "pytest-dependency"] webpdf = ["pyppeteer (>=1,<1.1)"] [[package]] @@ -1073,7 +1210,7 @@ jupyter-core = "*" traitlets = ">=5.1" [package.extras] -test = ["check-manifest", "testpath", "pytest", "pre-commit"] +test = ["check-manifest", "pre-commit", "pytest", "testpath"] [[package]] name = "nest-asyncio" @@ -1091,6 +1228,9 @@ category = "dev" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +[package.dependencies] +setuptools = "*" + [[package]] name = "notebook" version = "6.4.12" @@ -1117,9 +1257,23 @@ tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme", "myst-parser"] +docs = ["myst-parser", "nbsphinx", "sphinx", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] json-logging = ["json-logging"] -test = ["pytest", "coverage", "requests", "testpath", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] +test = ["coverage", "nbval", "pytest", "pytest-cov", "requests", "requests-unixsocket", "selenium", "testpath"] + +[[package]] +name = "notebook-shim" +version = "0.1.0" +description = "A shim layer for notebook traits and config" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +jupyter-server = ">=1.8,<2.0" + +[package.extras] +test = ["pytest", "pytest-console-scripts", "pytest-tornasync"] [[package]] name = "numpy" @@ -1245,8 +1399,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)", "sphinx (>=4)"] -test = ["appdirs (==1.4.4)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)", "pytest (>=6)"] +docs = ["furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx (>=4)", "sphinx-autodoc-typehints (>=1.12)"] +test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" @@ -1330,7 +1484,7 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -test = ["ipaddress", "mock", "enum34", "pywin32", "wmi"] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] [[package]] name = "ptyprocess" @@ -1448,7 +1602,7 @@ optional = false python-versions = ">=3.6.8" [package.extras] -diagrams = ["railroad-diagrams", "jinja2"] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pyrsistent" @@ -1597,7 +1751,7 @@ requests = ">=2.0,<3.0" urllib3 = ">=1.25.10" [package.extras] -tests = ["pytest (>=4.6)", "coverage (>=6.0.0)", "pytest-cov", "pytest-localserver", "flake8", "types-mock", "types-requests", "mypy"] +tests = ["coverage (>=6.0.0)", "flake8", "mypy", "pytest (>=4.6)", "pytest-cov", "pytest-localserver", "types-mock", "types-requests"] [[package]] name = "rich" @@ -1628,6 +1782,7 @@ Click = ">=6.0" dparse = ">=0.5.1" packaging = "*" requests = "*" +setuptools = "*" [[package]] name = "scikit-learn" @@ -1644,10 +1799,10 @@ scipy = ">=1.3.2" threadpoolctl = ">=2.0.0" [package.extras] -benchmark = ["matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "memory-profiler (>=0.57.0)"] -docs = ["matplotlib (>=3.1.2)", "scikit-image (>=0.14.5)", "pandas (>=1.0.5)", "seaborn (>=0.9.0)", "memory-profiler (>=0.57.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "numpydoc (>=1.2.0)", "Pillow (>=7.1.2)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] -examples = ["matplotlib (>=3.1.2)", "scikit-image (>=0.14.5)", "pandas (>=1.0.5)", "seaborn (>=0.9.0)"] -tests = ["matplotlib (>=3.1.2)", "scikit-image (>=0.14.5)", "pandas (>=1.0.5)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "flake8 (>=3.8.2)", "black (>=22.3.0)", "mypy (>=0.770)", "pyamg (>=4.0.0)", "numpydoc (>=1.2.0)"] +benchmark = ["matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "pandas (>=1.0.5)"] +docs = ["Pillow (>=7.1.2)", "matplotlib (>=3.1.2)", "memory-profiler (>=0.57.0)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "scikit-image (>=0.14.5)", "seaborn (>=0.9.0)", "sphinx (>=4.0.1)", "sphinx-gallery (>=0.7.0)", "sphinx-prompt (>=1.3.0)", "sphinxext-opengraph (>=0.4.2)"] +examples = ["matplotlib (>=3.1.2)", "pandas (>=1.0.5)", "scikit-image (>=0.14.5)", "seaborn (>=0.9.0)"] +tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.2)", "mypy (>=0.770)", "numpydoc (>=1.2.0)", "pandas (>=1.0.5)", "pyamg (>=4.0.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "scikit-image (>=0.14.5)"] [[package]] name = "scipy" @@ -1669,8 +1824,8 @@ optional = false python-versions = "*" [package.extras] -nativelib = ["pyobjc-framework-cocoa", "pywin32"] -objc = ["pyobjc-framework-cocoa"] +nativelib = ["pyobjc-framework-Cocoa", "pywin32"] +objc = ["pyobjc-framework-Cocoa"] win32 = ["pywin32"] [[package]] @@ -1681,6 +1836,19 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "setuptools" +version = "65.3.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mock", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + [[package]] name = "setuptools-scm" version = "7.0.4" @@ -1691,6 +1859,7 @@ python-versions = ">=3.7" [package.dependencies] packaging = ">=20.0" +setuptools = "*" tomli = ">=1.0.0" typing-extensions = "*" @@ -1714,6 +1883,14 @@ category = "dev" optional = false python-versions = ">=3.6" +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "dev" +optional = false +python-versions = ">=3.7" + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1759,8 +1936,8 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "docutils-stubs", "types-typed-ast", "types-requests"] -test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "isort", "mypy (>=0.931)", "types-requests", "types-typed-ast"] +test = ["cython", "html5lib", "pytest", "pytest-cov", "typed-ast"] [[package]] name = "sphinx-copybutton" @@ -1775,7 +1952,7 @@ sphinx = ">=1.8" [package.extras] code_style = ["pre-commit (==2.12.1)"] -rtd = ["sphinx", "ipython", "sphinx-book-theme"] +rtd = ["ipython", "sphinx", "sphinx-book-theme"] [[package]] name = "sphinx-gitstamp" @@ -1814,7 +1991,7 @@ docutils = "<0.18" sphinx = ">=1.6" [package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] +dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client"] [[package]] name = "sphinxcontrib-applehelp" @@ -1825,7 +2002,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1837,7 +2014,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1849,8 +2026,8 @@ optional = false python-versions = ">=3.6" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] +lint = ["docutils-stubs", "flake8", "mypy"] +test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" @@ -1861,7 +2038,7 @@ optional = false python-versions = ">=3.5" [package.extras] -test = ["pytest", "flake8", "mypy"] +test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" @@ -1872,7 +2049,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1884,7 +2061,7 @@ optional = false python-versions = ">=3.5" [package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] +lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] @@ -1923,7 +2100,7 @@ executing = "*" pure-eval = "*" [package.extras] -tests = ["pytest", "typeguard", "pygments", "littleutils", "cython"] +tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"] [[package]] name = "stevedore" @@ -1950,7 +2127,7 @@ pywinpty = {version = ">=1.1.0", markers = "os_name == \"nt\""} tornado = ">=6.1.0" [package.extras] -test = ["pre-commit", "pytest-timeout", "pytest (>=6.0)"] +test = ["pre-commit", "pytest (>=6.0)", "pytest-timeout"] [[package]] name = "threadpoolctl" @@ -1972,8 +2149,8 @@ python-versions = ">=3.6" webencodings = ">=0.4" [package.extras] -doc = ["sphinx", "sphinx-rtd-theme"] -test = ["pytest", "pytest-cov", "pytest-flake8", "pytest-isort", "coverage"] +doc = ["sphinx", "sphinx_rtd_theme"] +test = ["coverage[toml]", "pytest", "pytest-cov", "pytest-flake8", "pytest-isort"] [[package]] name = "tokenize-rt" @@ -1992,8 +2169,8 @@ optional = false python-versions = "*" [package.extras] -docs = ["sphinx", "sphinx-rtd-theme", "setuptools-rust"] -testing = ["pytest", "requests", "numpy", "datasets"] +docs = ["setuptools-rust", "sphinx", "sphinx-rtd-theme"] +testing = ["datasets", "numpy", "pytest", "requests"] [[package]] name = "toml" @@ -2102,45 +2279,45 @@ torch = {version = ">=1.0", optional = true, markers = "extra == \"torch\""} tqdm = ">=4.27" [package.extras] -all = ["tensorflow (>=2.3)", "onnxconverter-common", "tf2onnx", "torch (>=1.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.3.5)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.1)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)"] -audio = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +all = ["Pillow", "codecarbon (==1.2.0)", "flax (>=0.3.5)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.1)", "pyctcdecode (>=0.3.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torch (>=1.0)", "torchaudio"] +audio = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] codecarbon = ["codecarbon (==1.2.0)"] deepspeed = ["deepspeed (>=0.6.5)"] -deepspeed-testing = ["deepspeed (>=0.6.5)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "dill (<0.3.5)", "pytest-timeout", "black (>=22.3,<23.0)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.1)", "sacremoses", "rjieba", "faiss-cpu", "cookiecutter (==1.7.3)", "optuna"] -dev = ["tensorflow (>=2.3)", "onnxconverter-common", "tf2onnx", "torch (>=1.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.3.5)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.1)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "dill (<0.3.5)", "pytest-timeout", "black (>=22.3,<23.0)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "sacremoses", "rjieba", "faiss-cpu", "cookiecutter (==1.7.3)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "hf-doc-builder", "scikit-learn"] -dev-tensorflow = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "dill (<0.3.5)", "pytest-timeout", "black (>=22.3,<23.0)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.1)", "sacremoses", "rjieba", "faiss-cpu", "cookiecutter (==1.7.3)", "tensorflow (>=2.3)", "onnxconverter-common", "tf2onnx", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "pillow", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -dev-torch = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "dill (<0.3.5)", "pytest-timeout", "black (>=22.3,<23.0)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.1)", "sacremoses", "rjieba", "faiss-cpu", "cookiecutter (==1.7.3)", "torch (>=1.0)", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)", "hf-doc-builder", "scikit-learn", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] -docs = ["tensorflow (>=2.3)", "onnxconverter-common", "tf2onnx", "torch (>=1.0)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.3.5)", "optax (>=0.0.8)", "sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.1)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer", "pillow", "optuna", "ray", "sigopt", "timm", "codecarbon (==1.2.0)", "hf-doc-builder"] +deepspeed-testing = ["GitPython (<3.1.19)", "black (>=22.3,<23.0)", "cookiecutter (==1.7.3)", "datasets", "deepspeed (>=0.6.5)", "dill (<0.3.5)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "optuna", "parameterized", "protobuf (<=3.20.1)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] +dev = ["GitPython (<3.1.19)", "Pillow", "black (>=22.3,<23.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets", "dill (<0.3.5)", "faiss-cpu", "flake8 (>=3.8.3)", "flax (>=0.3.5)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "librosa", "nltk", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.1)", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torch (>=1.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "black (>=22.3,<23.0)", "cookiecutter (==1.7.3)", "datasets", "dill (<0.3.5)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.1)", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.3)", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.13)"] +dev-torch = ["GitPython (<3.1.19)", "Pillow", "black (>=22.3,<23.0)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets", "dill (<0.3.5)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.1)", "psutil", "pyctcdecode (>=0.3.0)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torch (>=1.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] +docs = ["Pillow", "codecarbon (==1.2.0)", "flax (>=0.3.5)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.1)", "pyctcdecode (>=0.3.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.3)", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torch (>=1.0)", "torchaudio"] docs_specific = ["hf-doc-builder"] fairscale = ["fairscale (>0.3)"] -flax = ["jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "flax (>=0.3.5)", "optax (>=0.0.8)"] -flax-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +flax = ["flax (>=0.3.5)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] +flax-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] ftfy = ["ftfy"] -integrations = ["optuna", "ray", "sigopt"] -ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic-lite (>=1.0.7)", "unidic (>=1.0.2)"] +integrations = ["optuna", "ray[tune]", "sigopt"] +ja = ["fugashi (>=1.0)", "ipadic (>=1.0.0,<2.0)", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] modelcreation = ["cookiecutter (==1.7.3)"] -onnx = ["onnxconverter-common", "tf2onnx", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] +onnx = ["onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "tf2onnx"] onnxruntime = ["onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)"] optuna = ["optuna"] -quality = ["black (>=22.3,<23.0)", "isort (>=5.5.4)", "flake8 (>=3.8.3)", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)"] -ray = ["ray"] -retrieval = ["faiss-cpu", "datasets"] +quality = ["GitPython (<3.1.19)", "black (>=22.3,<23.0)", "flake8 (>=3.8.3)", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)"] +ray = ["ray[tune]"] +retrieval = ["datasets", "faiss-cpu"] sagemaker = ["sagemaker (>=2.31.0)"] -sentencepiece = ["sentencepiece (>=0.1.91,!=0.1.92)", "protobuf (<=3.20.1)"] -serving = ["pydantic", "uvicorn", "fastapi", "starlette"] +sentencepiece = ["protobuf (<=3.20.1)", "sentencepiece (>=0.1.91,!=0.1.92)"] +serving = ["fastapi", "pydantic", "starlette", "uvicorn"] sigopt = ["sigopt"] sklearn = ["scikit-learn"] -speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -testing = ["pytest", "pytest-xdist", "timeout-decorator", "parameterized", "psutil", "datasets", "dill (<0.3.5)", "pytest-timeout", "black (>=22.3,<23.0)", "sacrebleu (>=1.4.12,<2.0.0)", "rouge-score", "nltk", "GitPython (<3.1.19)", "hf-doc-builder (>=0.3.0)", "protobuf (<=3.20.1)", "sacremoses", "rjieba", "faiss-cpu", "cookiecutter (==1.7.3)"] -tf = ["tensorflow (>=2.3)", "onnxconverter-common", "tf2onnx"] -tf-cpu = ["tensorflow-cpu (>=2.3)", "onnxconverter-common", "tf2onnx"] -tf-speech = ["librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] +speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)", "torchaudio"] +testing = ["GitPython (<3.1.19)", "black (>=22.3,<23.0)", "cookiecutter (==1.7.3)", "datasets", "dill (<0.3.5)", "faiss-cpu", "hf-doc-builder (>=0.3.0)", "nltk", "parameterized", "protobuf (<=3.20.1)", "psutil", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "timeout-decorator"] +tf = ["onnxconverter-common", "tensorflow (>=2.3)", "tf2onnx"] +tf-cpu = ["onnxconverter-common", "tensorflow-cpu (>=2.3)", "tf2onnx"] +tf-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)"] timm = ["timm"] tokenizers = ["tokenizers (>=0.11.1,!=0.11.3,<0.13)"] torch = ["torch (>=1.0)"] -torch-speech = ["torchaudio", "librosa", "pyctcdecode (>=0.3.0)", "phonemizer"] -torchhub = ["filelock", "huggingface-hub (>=0.1.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.1)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "torch (>=1.0)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "tqdm (>=4.27)"] -vision = ["pillow"] +torch-speech = ["librosa", "phonemizer", "pyctcdecode (>=0.3.0)", "torchaudio"] +torchhub = ["filelock", "huggingface-hub (>=0.1.0,<1.0)", "importlib-metadata", "numpy (>=1.17)", "packaging (>=20.0)", "protobuf (<=3.20.1)", "regex (!=2019.12.17)", "requests", "sentencepiece (>=0.1.91,!=0.1.92)", "tokenizers (>=0.11.1,!=0.11.3,<0.13)", "torch (>=1.0)", "tqdm (>=4.27)"] +vision = ["Pillow"] [[package]] name = "typeguard" @@ -2151,8 +2328,8 @@ optional = false python-versions = ">=3.5.3" [package.extras] -doc = ["sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["pytest", "typing-extensions", "mypy"] +doc = ["sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["mypy", "pytest", "typing-extensions"] [[package]] name = "typing-extensions" @@ -2171,8 +2348,8 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] -brotli = ["brotlicffi (>=0.8.0)", "brotli (>=1.0.9)", "brotlipy (>=0.6.0)"] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] +secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "ipaddress", "pyOpenSSL (>=0.14)"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] @@ -2191,7 +2368,7 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] [[package]] name = "wcwidth" @@ -2209,6 +2386,19 @@ category = "main" optional = false python-versions = "*" +[[package]] +name = "websocket-client" +version = "1.4.1" +description = "WebSocket client for Python with low level API options" +category = "dev" +optional = false +python-versions = ">=3.7" + +[package.extras] +docs = ["Sphinx (>=3.4)", "sphinx-rtd-theme (>=0.5)"] +optional = ["python-socks", "wsaccel"] +test = ["websockets"] + [[package]] name = "widgetsnbextension" version = "3.6.1" @@ -2257,8 +2447,8 @@ optional = false python-versions = ">=3.7" [package.extras] -docs = ["sphinx", "jaraco.packaging (>=9)", "rst.linker (>=1.9)"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy (>=0.9.1)"] +docs = ["jaraco.packaging (>=9)", "rst.linker (>=1.9)", "sphinx"] +testing = ["func-timeout", "jaraco.itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [extras] all = ["scikit-learn", "datasets"] @@ -2268,7 +2458,7 @@ scikit-learn = ["scikit-learn"] [metadata] lock-version = "1.1" python-versions = ">=3.8,<3.11" -content-hash = "545a83d4f0ccc3dc78dcec090fffb1381a8c2fec3df565b3a6811717f029e4b4" +content-hash = "71cc4fb50df9f949a992bba499b21cdea00ee3b18b7074cbb55f560fd8566318" [metadata.files] aiohttp = [ @@ -2353,6 +2543,10 @@ alabaster = [ {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] +anyio = [ + {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, + {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, +] appnope = [ {file = "appnope-0.1.3-py2.py3-none-any.whl", hash = "sha256:265a455292d0bd8a72453494fa24df5a11eb18373a60c7c0430889f22548605e"}, {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, @@ -2760,6 +2954,10 @@ json-tricks = [ {file = "json_tricks-3.15.5-py2.py3-none-any.whl", hash = "sha256:3432a602773b36ff0fe5b94a74f5de8612c843a256724e15c32f9f669844b6ef"}, {file = "json_tricks-3.15.5.tar.gz", hash = "sha256:bdf7d8677bccea722984be7f68946a981e4f50c21901e292d71b9c0c60a4ace3"}, ] +json5 = [ + {file = "json5-0.9.10-py2.py3-none-any.whl", hash = "sha256:993189671e7412e9cdd8be8dc61cf402e8e579b35f1d1bb20ae6b09baa78bbce"}, + {file = "json5-0.9.10.tar.gz", hash = "sha256:ad9f048c5b5a4c3802524474ce40a622fae789860a86f10cc4f7e5f9cf9b46ab"}, +] jsonschema = [ {file = "jsonschema-4.6.1-py3-none-any.whl", hash = "sha256:5eb781753403847fb320f05e9ab2191725b58c5e7f97f1bed63285ca423159bc"}, {file = "jsonschema-4.6.1.tar.gz", hash = "sha256:ec2802e6a37517f09d47d9ba107947589ae1d25ff557b925d83a321fc2aa5d3b"}, @@ -2772,10 +2970,22 @@ jupyter-core = [ {file = "jupyter_core-4.10.0-py3-none-any.whl", hash = "sha256:e7f5212177af7ab34179690140f188aa9bf3d322d8155ed972cbded19f55b6f3"}, {file = "jupyter_core-4.10.0.tar.gz", hash = "sha256:a6de44b16b7b31d7271130c71a6792c4040f077011961138afed5e5e73181aec"}, ] +jupyter-server = [ + {file = "jupyter_server-1.18.1-py3-none-any.whl", hash = "sha256:022759b09c96a4e2feb95de59ce4283e04e17782efe197b91d23a47521609b77"}, + {file = "jupyter_server-1.18.1.tar.gz", hash = "sha256:2b72fc595bccae292260aad8157a0ead8da2c703ec6ae1bb7b36dbad0e267ea7"}, +] +jupyterlab = [ + {file = "jupyterlab-3.4.7-py3-none-any.whl", hash = "sha256:30c64bc0aa0ba09959ab6fd5c74f08a6ae64656b46a29e2522142a5fda0dc486"}, + {file = "jupyterlab-3.4.7.tar.gz", hash = "sha256:4dc48ab0980e3af2e921dff26e0013dd03b104b1b67f0d85b67448e16e25311e"}, +] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.2.2-py2.py3-none-any.whl", hash = "sha256:2405800db07c9f770863bcf8049a529c3dd4d3e28536638bd7c1c01d2748309f"}, {file = "jupyterlab_pygments-0.2.2.tar.gz", hash = "sha256:7405d7fde60819d905a9fa8ce89e4cd830e318cdad22a0030f7a901da705585d"}, ] +jupyterlab-server = [ + {file = "jupyterlab_server-2.15.1-py3-none-any.whl", hash = "sha256:5e04008a98bfb510471b8b8a7059f7cdbb1797e1f255657f39ea3d838ba00bf6"}, + {file = "jupyterlab_server-2.15.1.tar.gz", hash = "sha256:305313970e131c590cf77bb6b8ca7e98591bc304111e8d103bc91d212e94796f"}, +] jupyterlab-widgets = [ {file = "jupyterlab_widgets-1.1.1-py3-none-any.whl", hash = "sha256:90ab47d99da03a3697074acb23b2975ead1d6171aa41cb2812041a7f2a08177a"}, {file = "jupyterlab_widgets-1.1.1.tar.gz", hash = "sha256:67d0ef1e407e0c42c8ab60b9d901cd7a4c68923650763f75bf17fb06c1943b79"}, @@ -3072,6 +3282,10 @@ mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, ] +nbclassic = [ + {file = "nbclassic-0.4.3-py3-none-any.whl", hash = "sha256:4b01076effdac53e775cd1b6a4e891663568b32621468e205b502a23b2921899"}, + {file = "nbclassic-0.4.3.tar.gz", hash = "sha256:f03111b2cebaa69b88370a7b23b19b2b37c9bb71767f1828cdfd7a047eae8edd"}, +] nbclient = [ {file = "nbclient-0.6.6-py3-none-any.whl", hash = "sha256:09bae4ea2df79fa6bc50aeb8278d8b79d2036792824337fa6eee834afae17312"}, {file = "nbclient-0.6.6.tar.gz", hash = "sha256:0df76a7961d99a681b4796c74a1f2553b9f998851acc01896dce064ad19a9027"}, @@ -3096,6 +3310,10 @@ notebook = [ {file = "notebook-6.4.12-py3-none-any.whl", hash = "sha256:8c07a3bb7640e371f8a609bdbb2366a1976c6a2589da8ef917f761a61e3ad8b1"}, {file = "notebook-6.4.12.tar.gz", hash = "sha256:6268c9ec9048cff7a45405c990c29ac9ca40b0bc3ec29263d218c5e01f2b4e86"}, ] +notebook-shim = [ + {file = "notebook_shim-0.1.0-py3-none-any.whl", hash = "sha256:02432d55a01139ac16e2100888aa2b56c614720cec73a27e71f40a5387e45324"}, + {file = "notebook_shim-0.1.0.tar.gz", hash = "sha256:7897e47a36d92248925a2143e3596f19c60597708f7bef50d81fcd31d7263e85"}, +] numpy = [ {file = "numpy-1.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58bfd40eb478f54ff7a5710dd61c8097e169bc36cc68333d00a9bcd8def53b38"}, {file = "numpy-1.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:196cd074c3f97c4121601790955f915187736f9cf458d3ee1f1b46aff2b1ade0"}, @@ -3441,6 +3659,13 @@ pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5"}, {file = "PyYAML-6.0-cp310-cp310-win32.whl", hash = "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513"}, {file = "PyYAML-6.0-cp310-cp310-win_amd64.whl", hash = "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358"}, + {file = "PyYAML-6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f"}, + {file = "PyYAML-6.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782"}, + {file = "PyYAML-6.0-cp311-cp311-win32.whl", hash = "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7"}, + {file = "PyYAML-6.0-cp311-cp311-win_amd64.whl", hash = "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf"}, {file = "PyYAML-6.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f"}, {file = "PyYAML-6.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92"}, @@ -3718,6 +3943,10 @@ sentencepiece = [ {file = "sentencepiece-0.1.96-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48c6d13b3bfff08060c138248e85df60f6fad11135ad7a8fc2ef6005aacca839"}, {file = "sentencepiece-0.1.96.tar.gz", hash = "sha256:9bdf097d5bd1d8ce42dfee51f6ff05f5578b96e48c6f6006aa4eff69edfa3639"}, ] +setuptools = [ + {file = "setuptools-65.3.0-py3-none-any.whl", hash = "sha256:2e24e0bec025f035a2e72cdd1961119f557d78ad331bb00ff82efb2ab8da8e82"}, + {file = "setuptools-65.3.0.tar.gz", hash = "sha256:7732871f4f7fa58fb6bdcaeadb0161b2bd046c85905dbaa066bdcbcc81953b57"}, +] setuptools-scm = [ {file = "setuptools_scm-7.0.4-py3-none-any.whl", hash = "sha256:53a6f51451a84d891ca485cec700a802413bbc5e76ee65da134e54c733a6e44d"}, {file = "setuptools_scm-7.0.4.tar.gz", hash = "sha256:c27bc1f48593cfc9527251f1f0fc41ce282ea57bbc7fd5a1ea3acb99325fab4c"}, @@ -3730,6 +3959,10 @@ smmap = [ {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +sniffio = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, @@ -3964,6 +4197,10 @@ webencodings = [ {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, ] +websocket-client = [ + {file = "websocket-client-1.4.1.tar.gz", hash = "sha256:f9611eb65c8241a67fb373bef040b3cf8ad377a9f6546a12b620b6511e8ea9ef"}, + {file = "websocket_client-1.4.1-py3-none-any.whl", hash = "sha256:398909eb7e261f44b8f4bd474785b6ec5f5b499d4953342fe9755e01ef624090"}, +] widgetsnbextension = [ {file = "widgetsnbextension-3.6.1-py2.py3-none-any.whl", hash = "sha256:954e0faefdd414e4e013f17dbc7fd86f24cf1d243a3ac85d5f0fc2c2d2b50c66"}, {file = "widgetsnbextension-3.6.1.tar.gz", hash = "sha256:9c84ae64c2893c7cbe2eaafc7505221a795c27d68938454034ac487319a75b10"}, diff --git a/pyproject.toml b/pyproject.toml index d8b7f447..9bcdfe24 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -69,6 +69,7 @@ darglint = "^1.8.1" flake8 = "^4.0.1" ipykernel = "^6.15.0" isort = {extras = ["colors"], version = "^5.10.1"} +jupyterlab = "^3.4.7" mypy = "^0.910" mypy-extensions = "^0.4.3" pre-commit = "^2.19.0" From 08d5dbd588a481dccf4a7cd6e2b4b9a21be5f341 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 18 Oct 2022 00:36:23 +0200 Subject: [PATCH 02/30] initial commit attention methods, added output_attentions parameter to model initialization --- docs/source/conf.py | 2 +- inseq/attr/feat/attention_attribution.py | 62 ++++++++++++++++++++++++ inseq/models/huggingface_model.py | 6 ++- inseq/utils/misc.py | 2 +- 4 files changed, 69 insertions(+), 3 deletions(-) create mode 100644 inseq/attr/feat/attention_attribution.py diff --git a/docs/source/conf.py b/docs/source/conf.py index 3e3f5172..43159e4b 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -79,7 +79,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py new file mode 100644 index 00000000..e51f7a42 --- /dev/null +++ b/inseq/attr/feat/attention_attribution.py @@ -0,0 +1,62 @@ +# Copyright 2021 The Inseq Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +""" Attention-based feature attribution methods. """ + +import logging + +from ...utils import Registry +from ...utils.typing import ModelIdentifier +from .feature_attribution import FeatureAttribution + + +logger = logging.getLogger(__name__) + + +class AttentionAtribution(FeatureAttribution, Registry): + r"""Attention-based attribution method registry.""" + + @classmethod + def load( + cls, + method_name: str, + attribution_model=None, + model_name_or_path: ModelIdentifier = None, + **kwargs, + ) -> "FeatureAttribution": + from inseq import AttributionModel + + if model_name_or_path is None == attribution_model is None: # noqa + raise RuntimeError( + "Only one among an initialized model and a model identifier " + "must be defined when loading the attribution method." + ) + if model_name_or_path: + attribution_model = AttributionModel.load(model_name_or_path) + model_name_or_path = None + + if not attribution_model.model.config.output_attentions: + raise RuntimeError( + "Attention-based attribution methods require the `output_attentions` parameter to be set." + ) + return super().load(method_name, attribution_model, model_name_or_path, **kwargs) + + +class AggregatedAttentionAtribution(AttentionAtribution): + + method_name = "aggregated_attention" + + +class LastLayerAttentionAttribution(AttentionAtribution): + + method_name = "last_layer_attention" diff --git a/inseq/models/huggingface_model.py b/inseq/models/huggingface_model.py index a6286542..a31abe98 100644 --- a/inseq/models/huggingface_model.py +++ b/inseq/models/huggingface_model.py @@ -37,7 +37,7 @@ class HuggingfaceModel(AttributionModel): - """Performs attribution for any seq2seq model in the HuggingFace Hub. + """Performs attribution for any seq2seq model in the HuggingFace Hub. Attributes: model (AutoModelForSeq2SeqLM): the seq2seq model on which @@ -85,6 +85,10 @@ def __init__( model_kwargs = kwargs.pop("model_kwargs", {}) tokenizer_inputs = kwargs.pop("tokenizer_inputs", {}) tokenizer_kwargs = kwargs.pop("tokenizer_kwargs", {}) + + if "output_attentions" not in model_kwargs: + model_kwargs["output_attentions"] = True + self.model = AutoModelForSeq2SeqLM.from_pretrained(model_name_or_path, *model_args, **model_kwargs) self.tokenizer = AutoTokenizer.from_pretrained(tokenizer_name_or_path, *tokenizer_inputs, **tokenizer_kwargs) self.model_name = self.model.config.name_or_path diff --git a/inseq/utils/misc.py b/inseq/utils/misc.py index 1a3bbcfe..dc1475b4 100644 --- a/inseq/utils/misc.py +++ b/inseq/utils/misc.py @@ -118,7 +118,7 @@ def extract_signature_args( def ordinal_str(n: int): - """Converts a number to and ordinal string.""" + """Converts a number to an ordinal string.""" return str(n) + {1: "st", 2: "nd", 3: "rd"}.get(4 if 10 <= n % 100 < 20 else n % 10, "th") From 544321cdc85044bb14a8dc7404cea161f4f6c1e6 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 25 Oct 2022 00:19:07 +0200 Subject: [PATCH 03/30] added basic attention method stubs\n added attention method registry --- inseq/attr/feat/__init__.py | 4 ++ inseq/attr/feat/attention_attribution.py | 67 ++++++++++++++++++++++-- inseq/attr/feat/attribution_utils.py | 14 +++++ inseq/attr/feat/feature_attribution.py | 2 +- inseq/attr/feat/ops/__init__.py | 3 +- inseq/attr/feat/ops/basic_attention.py | 31 +++++++++++ inseq/models/attribution_model.py | 14 +++-- 7 files changed, 125 insertions(+), 10 deletions(-) create mode 100644 inseq/attr/feat/ops/basic_attention.py diff --git a/inseq/attr/feat/__init__.py b/inseq/attr/feat/__init__.py index 642ee61b..5c47e30a 100644 --- a/inseq/attr/feat/__init__.py +++ b/inseq/attr/feat/__init__.py @@ -1,3 +1,4 @@ +from .attention_attribution import AggregatedAttentionAtribution, AttentionAtribution, LastLayerAttentionAttribution from .attribution_utils import STEP_SCORES_MAP, list_step_scores, register_step_score from .feature_attribution import FeatureAttribution, list_feature_attribution_methods from .gradient_attribution import ( @@ -28,4 +29,7 @@ "LayerIntegratedGradientsAttribution", "LayerGradientXActivationAttribution", "LayerDeepLiftAttribution", + "AttentionAtribution", + "AggregatedAttentionAtribution", + "LastLayerAttentionAttribution", ] diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index e51f7a42..e09ba640 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -13,11 +13,16 @@ # limitations under the License. """ Attention-based feature attribution methods. """ +from typing import Any, Callable, Dict, Union + import logging +from ...data import EncoderDecoderBatch, FeatureAttributionStepOutput from ...utils import Registry -from ...utils.typing import ModelIdentifier +from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor, TargetIdsTensor +from ..attribution_decorators import set_hook, unset_hook from .feature_attribution import FeatureAttribution +from .ops import AggregatedAttention, LastLayerAttention logger = logging.getLogger(__name__) @@ -26,12 +31,60 @@ class AttentionAtribution(FeatureAttribution, Registry): r"""Attention-based attribution method registry.""" + @set_hook + def hook(self, **kwargs): + pass + + @unset_hook + def unhook(self, **kwargs): + pass + + def attribute_step( + self, + batch: EncoderDecoderBatch, + target_ids: TargetIdsTensor, + attributed_fn: Callable[..., SingleScorePerStepTensor], + attribute_target: bool = False, + attribution_args: Dict[str, Any] = {}, + attributed_fn_args: Dict[str, Any] = {}, + ) -> FeatureAttributionStepOutput: + r""" + Performs a single attribution step for the specified target_ids, + given sources and targets in the batch. + + Abstract method, must be implemented by subclasses. + + Args: + batch (:class:`~inseq.data.EncoderDecoderBatch`): The batch of sequences on which attribution is performed. + target_ids (:obj:`torch.Tensor`): Target token ids of size `(batch_size)` corresponding to tokens + for which the attribution step must be performed. + attributed_fn (:obj:`Callable[..., SingleScorePerStepTensor]`): The function of model outputs + representing what should be attributed (e.g. output probits of model best prediction after softmax). + The parameter must be a function that taking multiple keyword arguments and returns a :obj:`tensor` + of size (batch_size,). If not provided, the default attributed function for the model will be used + (change attribution_model.default_attributed_fn_id). + attribute_target (:obj:`bool`, optional): Whether to attribute the target prefix or not. Defaults to False. + attribution_args (:obj:`dict`, `optional`): Additional arguments to pass to the attribution method. + Defaults to {}. + attributed_fn_args (:obj:`dict`, `optional`): Additional arguments to pass to the attributed function. + Defaults to {}. + Returns: + :class:`~inseq.data.FeatureAttributionStepOutput`: A dataclass containing attribution tensors for source + and target attributions of size `(batch_size, source_length)` and `(batch_size, prefix length)`. + (target optional if attribute_target=True), plus batch information and any step score present. + """ + return FeatureAttributionStepOutput( + source_attributions=None, + target_attributions=None, + step_scores=None, + ) + @classmethod def load( cls, method_name: str, attribution_model=None, - model_name_or_path: ModelIdentifier = None, + model_name_or_path: Union[ModelIdentifier, None] = None, **kwargs, ) -> "FeatureAttribution": from inseq import AttributionModel @@ -47,7 +100,7 @@ def load( if not attribution_model.model.config.output_attentions: raise RuntimeError( - "Attention-based attribution methods require the `output_attentions` parameter to be set." + "Attention-based attribution methods require the `output_attentions` parameter to be set on the model." ) return super().load(method_name, attribution_model, model_name_or_path, **kwargs) @@ -56,7 +109,15 @@ class AggregatedAttentionAtribution(AttentionAtribution): method_name = "aggregated_attention" + def __init__(self, attribution_model, **kwargs): + super().__init__(attribution_model) + self.method = AggregatedAttention(attribution_model) + class LastLayerAttentionAttribution(AttentionAtribution): method_name = "last_layer_attention" + + def __init__(self, attribution_model, **kwargs): + super().__init__(attribution_model) + self.method = LastLayerAttention(attribution_model) diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 677feb46..7bc4a2b5 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -231,3 +231,17 @@ def register_step_score( if agg_name not in DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"]: DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"][agg_name] = {} DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"][agg_name][identifier] = agg_fn + + +def num_attention_layers(attention: tuple) -> int: + """ + Returns the number of layers an attention tuple for a given model has. + """ + return len(attention) + + +def num_attention_heads(attention: tuple) -> int: + """ + Returns the number of heads an attention tuple has. + """ + return attention[0][0].size(0) diff --git a/inseq/attr/feat/feature_attribution.py b/inseq/attr/feat/feature_attribution.py index 2f09580c..48d46b3f 100644 --- a/inseq/attr/feat/feature_attribution.py +++ b/inseq/attr/feat/feature_attribution.py @@ -109,7 +109,7 @@ def load( cls, method_name: str, attribution_model=None, - model_name_or_path: ModelIdentifier = None, + model_name_or_path: Union[ModelIdentifier, None] = None, **kwargs, ) -> "FeatureAttribution": r""" diff --git a/inseq/attr/feat/ops/__init__.py b/inseq/attr/feat/ops/__init__.py index 88719e21..5b70d9be 100644 --- a/inseq/attr/feat/ops/__init__.py +++ b/inseq/attr/feat/ops/__init__.py @@ -1,5 +1,6 @@ +from .basic_attention import AggregatedAttention, LastLayerAttention from .discretized_integrated_gradients import DiscretetizedIntegratedGradients from .monotonic_path_builder import MonotonicPathBuilder -__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder"] +__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "AggregatedAttention", "LastLayerAttention"] diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py new file mode 100644 index 00000000..502f87c4 --- /dev/null +++ b/inseq/attr/feat/ops/basic_attention.py @@ -0,0 +1,31 @@ +# Copyright 2021 The Inseq Team. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from captum.attr._utils.attribution import Attribution + + +class AggregatedAttention(Attribution): + def has_convergence_delta(self) -> bool: + return False + + def attribute(): + pass + + +class LastLayerAttention(Attribution): + def has_convergence_delta(self) -> bool: + return False + + def attribute(): + pass diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index 8788e95d..b2359253 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -1,7 +1,7 @@ from typing import Any, Callable, Dict, List, Optional, Tuple, Union import logging -from abc import ABC, abstractmethod, abstractstaticmethod +from abc import ABC, abstractmethod import torch from rich.status import Status @@ -175,6 +175,7 @@ def attribute( self, input_texts: TextInput, generated_texts: Optional[TextInput] = None, + generation_outputs: Optional[Dict] = {}, method: Optional[str] = None, override_default_attribution: Optional[bool] = False, attr_pos_start: Optional[int] = 1, @@ -208,12 +209,14 @@ def attribute( generation_args = kwargs.pop("generation_args", {}) if not constrained_decoding: input_texts = self.encode(input_texts, return_baseline=True, include_eos_baseline=include_eos_baseline) - generated_texts = self.generate(input_texts, return_generation_output=False, **generation_args) + generated_texts, generation_outputs = self.generate( + input_texts, return_generation_output=True, **generation_args + ) logger.debug(f"reference_texts={generated_texts}") attribution_method = self.get_attribution_method(method, override_default_attribution) attributed_fn = self.get_attributed_fn(attributed_fn) attribution_args, attributed_fn_args, step_scores_args = self.extract_args( - attribution_method, attributed_fn, step_scores, **kwargs + attribution_method, attributed_fn, step_scores, **generation_outputs, **kwargs ) if isnotebook(): logger.debug("Pretty progress currently not supported in notebooks, falling back to tqdm.") @@ -267,12 +270,13 @@ def generate( ) -> Union[List[str], Tuple[List[str], Any]]: pass - @abstractstaticmethod + @staticmethod + @abstractmethod def output2logits(forward_output) -> FullLogitsTensor: pass @abstractmethod - def encode(self, texts: TextInput, as_targets: Optional[bool] = False, *args) -> BatchEncoding: + def encode(self, texts: TextInput, as_targets: Optional[bool] = False, *args, **kwargs) -> BatchEncoding: pass @abstractmethod From d0e859fdacd18aac0facbffa19d9915b60203b0f Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 21 Nov 2022 12:55:12 +0100 Subject: [PATCH 04/30] reverted changes to output generation (forward pass done inside attribution function now) --- inseq/models/attribution_model.py | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index b2359253..d1d2a1af 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -165,7 +165,7 @@ def extract_args( if k in attribution_unused_args.keys() & attributed_fn_unused_args.keys() & step_scores_unused_args.keys() } if unused_args: - logger.warning(f"Unused arguments during attribution: {unused_args}") + logger.warning(f"Unused arguments during attribution: {list(unused_args.keys())}") attribution_args.update(extra_attribution_args) attributed_fn_args.update(extra_attributed_fn_args) step_scores_args.update(extra_step_scores_args) @@ -175,7 +175,6 @@ def attribute( self, input_texts: TextInput, generated_texts: Optional[TextInput] = None, - generation_outputs: Optional[Dict] = {}, method: Optional[str] = None, override_default_attribution: Optional[bool] = False, attr_pos_start: Optional[int] = 1, @@ -209,14 +208,12 @@ def attribute( generation_args = kwargs.pop("generation_args", {}) if not constrained_decoding: input_texts = self.encode(input_texts, return_baseline=True, include_eos_baseline=include_eos_baseline) - generated_texts, generation_outputs = self.generate( - input_texts, return_generation_output=True, **generation_args - ) + generated_texts = self.generate(input_texts, return_generation_output=False, **generation_args) logger.debug(f"reference_texts={generated_texts}") attribution_method = self.get_attribution_method(method, override_default_attribution) attributed_fn = self.get_attributed_fn(attributed_fn) attribution_args, attributed_fn_args, step_scores_args = self.extract_args( - attribution_method, attributed_fn, step_scores, **generation_outputs, **kwargs + attribution_method, attributed_fn, step_scores, **kwargs ) if isnotebook(): logger.debug("Pretty progress currently not supported in notebooks, falling back to tqdm.") From a2a2021f0661258c159f85116097415debfbf5b7 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 21 Nov 2022 14:08:56 +0100 Subject: [PATCH 05/30] first working version of basic attention methods --- inseq/attr/feat/attention_attribution.py | 22 +++- inseq/attr/feat/attribution_utils.py | 15 +-- inseq/attr/feat/ops/basic_attention.py | 161 +++++++++++++++++++++-- 3 files changed, 178 insertions(+), 20 deletions(-) diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index e09ba640..dbad75d4 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -18,7 +18,7 @@ import logging from ...data import EncoderDecoderBatch, FeatureAttributionStepOutput -from ...utils import Registry +from ...utils import Registry, pretty_tensor from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor, TargetIdsTensor from ..attribution_decorators import set_hook, unset_hook from .feature_attribution import FeatureAttribution @@ -73,10 +73,24 @@ def attribute_step( and target attributions of size `(batch_size, source_length)` and `(batch_size, prefix length)`. (target optional if attribute_target=True), plus batch information and any step score present. """ + logger.debug(f"batch: {batch},\ntarget_ids: {pretty_tensor(target_ids, lpad=4)}") + attribute_fn_args = self.format_attribute_args( + batch, target_ids, attributed_fn, attribute_target, attributed_fn_args, **attribution_args + ) + attr = self.method.attribute(**attribute_fn_args, **attribution_args) + deltas = None + if ( + attribution_args.get("return_convergence_delta", False) + and hasattr(self.method, "has_convergence_delta") + and self.method.has_convergence_delta() + ): + attr, deltas = attr return FeatureAttributionStepOutput( - source_attributions=None, - target_attributions=None, - step_scores=None, + source_attributions=attr if not isinstance(attr, tuple) else attr[0], + target_attributions=None + if not isinstance(attr, tuple) or (isinstance(attr, tuple) and len(attr) == 1) + else attr[1], + step_scores={"deltas": deltas} if deltas is not None else {}, ) @classmethod diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 7bc4a2b5..d60766db 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -233,15 +233,14 @@ def register_step_score( DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"][agg_name][identifier] = agg_fn -def num_attention_layers(attention: tuple) -> int: +def num_attention_heads(attention: torch.Tensor) -> int: """ - Returns the number of layers an attention tuple for a given model has. - """ - return len(attention) + Returns the number of heads an attention tensor has. + Args: + attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` -def num_attention_heads(attention: tuple) -> int: - """ - Returns the number of heads an attention tuple has. + Returns: + `int`: The number of attention heads """ - return attention[0][0].size(0) + return attention.size(1) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 502f87c4..b9e90c62 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -12,20 +12,165 @@ # See the License for the specific language governing permissions and # limitations under the License. +from typing import Any, Tuple, Union + +import logging + +import torch +from captum._utils.common import _format_output, _is_tuple +from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric from captum.attr._utils.attribution import Attribution +from captum.log import log_usage +from ....utils.typing import MultiStepEmbeddingsTensor +from ..attribution_utils import num_attention_heads -class AggregatedAttention(Attribution): - def has_convergence_delta(self) -> bool: - return False - def attribute(): - pass +logger = logging.getLogger(__name__) + +class AttentionAttribution(Attribution): + """ + All attention based attribution algorithms extend this class. It requires a + forward function, which most commonly is the forward function of the model + that we want to interpret or the model itself. + """ + + merge_head_options = ["average", "max", "single"] -class LastLayerAttention(Attribution): def has_convergence_delta(self) -> bool: return False - def attribute(): - pass + def _merge_attention_heads(self, attention: torch.Tensor, option: str = "average", head: int = None): + + num_heads = num_attention_heads(attention[0]) + + if option == "single" and head is None: + raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") + + if head is not None: + if head > num_heads: + raise RuntimeError( + "Attention head index for attribution too high. " f"The model only has {num_heads} heads." + ) + + if option != "single": + logger.warning( + "Only single-head attention is possible if an attention head is specified.\n" + "Switching to single-head attention" + ) + + return attention.select(1, head) + + if option == "average": + return attention.mean(1, keepdim=True) + + # TODO: test this, I feel like this method is not doing what we want here + elif option == "max": + return attention.max(1, keepdim=True) + + else: + raise RuntimeError( + "Invalid merge method for attention heads specified. " + "Valid methods are: `average`, `max` and `single`" + ) + + def _get_batch_size(self, attention: torch.Tensor): + """returns the batch size of a tensor of shape `batch_size, heads, seq, seq`""" + return attention.size(0) + + +class AggregatedAttention(AttentionAttribution): + """ + A basic attention attribution approach. + It will return the attention values averaged across all layers. + """ + + @log_usage() + def attribute( + self, + inputs: MultiStepEmbeddingsTensor, + target: TargetType = None, + merge_head_option: str = "average", + use_head: int = None, + additional_forward_args: Any = None, + ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: + + is_inputs_tuple = _is_tuple(inputs) + + is_target_attribution = True if len(inputs) > 1 else False + + input_ids = additional_forward_args[0] if is_target_attribution else additional_forward_args[1] + decoder_input_ids = additional_forward_args[1] if is_target_attribution else additional_forward_args[2] + attention_mask = additional_forward_args[4] if is_target_attribution else additional_forward_args[5] + decoder_attention_mask = additional_forward_args[5] if is_target_attribution else additional_forward_args[6] + + outputs = self.forward_func.model( + input_ids=input_ids, + decoder_input_ids=decoder_input_ids, + attention_mask=attention_mask, + decoder_attention_mask=decoder_attention_mask, + ) + + cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) + cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) + cross_aggregation = torch.squeeze(cross_aggregation, 1).select(1, -1) + + attributions = (cross_aggregation,) + + if is_target_attribution: + decoder_aggregation = torch.stack(outputs.decoder_attentions).mean(0) + decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) + decoder_aggregation = torch.squeeze(decoder_aggregation, 1).select(1, -1) + + attributions = attributions + (decoder_aggregation,) + + return _format_output(is_inputs_tuple, attributions) + + +class LastLayerAttention(AttentionAttribution): + """ + A basic attention attribution approach. + It will simply return the attention values of the last layer. + """ + + @log_usage() + def attribute( + self, + inputs: MultiStepEmbeddingsTensor, + target: TargetType = None, + merge_head_option: str = "average", + use_head: int = None, + additional_forward_args: Any = None, + ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: + + is_inputs_tuple = _is_tuple(inputs) + + is_target_attribution = True if len(inputs) > 1 else False + + input_ids = additional_forward_args[0] if is_target_attribution else additional_forward_args[1] + decoder_input_ids = additional_forward_args[1] if is_target_attribution else additional_forward_args[2] + attention_mask = additional_forward_args[4] if is_target_attribution else additional_forward_args[5] + decoder_attention_mask = additional_forward_args[5] if is_target_attribution else additional_forward_args[6] + + outputs = self.forward_func.model( + input_ids=input_ids, + decoder_input_ids=decoder_input_ids, + attention_mask=attention_mask, + decoder_attention_mask=decoder_attention_mask, + ) + + last_layer_cross = outputs.cross_attentions[-1] + last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) + last_layer_cross = torch.squeeze(last_layer_cross, 1).select(1, -1) + + attributions = (last_layer_cross,) + + if is_target_attribution: + last_layer_decoder = outputs.decoder_attentions[-1] + last_layer_decoder = self._merge_attention_heads(last_layer_decoder, merge_head_option, use_head) + last_layer_decoder = torch.squeeze(last_layer_decoder, 1).select(1, -1) + + attributions = attributions + (last_layer_decoder,) + + return _format_output(is_inputs_tuple, attributions) From 7b3c4fdf040e35a53cd10a1f1c1740484229b6db Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 22 Nov 2022 22:50:22 +0100 Subject: [PATCH 06/30] fixed rounding of values in cli output --- inseq/data/viz.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/inseq/data/viz.py b/inseq/data/viz.py index 843f0187..efe6bbe2 100644 --- a/inseq/data/viz.py +++ b/inseq/data/viz.py @@ -236,7 +236,7 @@ def get_saliency_heatmap_rich( color = Color.from_rgb(*input_colors[row_index][col_index]) score = "" if not np.isnan(scores[row_index][col_index]): - score = round(scores[row_index][col_index], 2) + score = round(float(scores[row_index][col_index]), 2) row.append(Text(f"{score}", justify="center", style=Style(color=color))) table.add_row(*row, end_section=row_index == scores.shape[0] - 1) if step_scores is not None: @@ -248,7 +248,7 @@ def get_saliency_heatmap_rich( style = lambda val: "bold" if abs(val) >= threshold else "" score_row = [Text(step_score_name, style="bold")] for score in step_score_values: - score_row.append(Text(f"{score:.2f}", justify="center", style=style(score))) + score_row.append(Text(f"{score:.2f}", justify="center", style=style(round(float(score), 2)))) table.add_row(*score_row, end_section=True) return table From 13fc9f3ca93604743dba9ad98af8c2c542385fa2 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 22 Nov 2022 23:51:52 +0100 Subject: [PATCH 07/30] added documentation to most methods and generalized functions --- inseq/attr/feat/attention_attribution.py | 8 ++ inseq/attr/feat/attribution_utils.py | 13 --- inseq/attr/feat/ops/basic_attention.py | 100 +++++++++++++++-------- 3 files changed, 76 insertions(+), 45 deletions(-) diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index dbad75d4..25c2069c 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -120,6 +120,10 @@ def load( class AggregatedAttentionAtribution(AttentionAtribution): + """ + Aggregated attention attribution method. + Attention values of all layers are averaged. + """ method_name = "aggregated_attention" @@ -129,6 +133,10 @@ def __init__(self, attribution_model, **kwargs): class LastLayerAttentionAttribution(AttentionAtribution): + """ + Last-Layer attention attribution method. + Only the raw attention of the last hidden layer is retrieved. + """ method_name = "last_layer_attention" diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index d60766db..677feb46 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -231,16 +231,3 @@ def register_step_score( if agg_name not in DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"]: DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"][agg_name] = {} DEFAULT_ATTRIBUTION_AGGREGATE_DICT["step_scores"][agg_name][identifier] = agg_fn - - -def num_attention_heads(attention: torch.Tensor) -> int: - """ - Returns the number of heads an attention tensor has. - - Args: - attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` - - Returns: - `int`: The number of attention heads - """ - return attention.size(1) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index b9e90c62..b775c072 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Tuple, Union +from typing import Any, Optional, Tuple, Union import logging @@ -21,9 +21,9 @@ from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric from captum.attr._utils.attribution import Attribution from captum.log import log_usage +from transformers.modeling_outputs import Seq2SeqLMOutput from ....utils.typing import MultiStepEmbeddingsTensor -from ..attribution_utils import num_attention_heads logger = logging.getLogger(__name__) @@ -41,9 +41,35 @@ class AttentionAttribution(Attribution): def has_convergence_delta(self) -> bool: return False - def _merge_attention_heads(self, attention: torch.Tensor, option: str = "average", head: int = None): + def _num_attention_heads(self, attention: torch.Tensor) -> int: + """ + Returns the number of heads an attention tensor has. - num_heads = num_attention_heads(attention[0]) + Args: + attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` + + Returns: + `int`: The number of attention heads + """ + return attention.size(1) + + def _merge_attention_heads( + self, attention: torch.Tensor, option: str = "average", head: int = None + ) -> torch.Tensor: + + """ + Merges the attention values of the different heads together by either averaging across them, + selecting the head with the maximal values or selecting a specific attention head. + + Args: + attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` + option: The method to use for merging. Should be one of `average` (default), `max`, or `single` + head: The index of the head to use, when option is set to `single` + + Returns: + `torch.Tensor`: The attention tensor with its attention heads merged. + """ + num_heads = self._num_attention_heads(attention[0]) if option == "single" and head is None: raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") @@ -51,7 +77,7 @@ def _merge_attention_heads(self, attention: torch.Tensor, option: str = "average if head is not None: if head > num_heads: raise RuntimeError( - "Attention head index for attribution too high. " f"The model only has {num_heads} heads." + f"Attention head index for attribution too high. The model only has {num_heads} heads." ) if option != "single": @@ -63,11 +89,11 @@ def _merge_attention_heads(self, attention: torch.Tensor, option: str = "average return attention.select(1, head) if option == "average": - return attention.mean(1, keepdim=True) + return attention.mean(1) # TODO: test this, I feel like this method is not doing what we want here elif option == "max": - return attention.max(1, keepdim=True) + return attention.max(1) else: raise RuntimeError( @@ -75,10 +101,36 @@ def _merge_attention_heads(self, attention: torch.Tensor, option: str = "average "Valid methods are: `average`, `max` and `single`" ) - def _get_batch_size(self, attention: torch.Tensor): - """returns the batch size of a tensor of shape `batch_size, heads, seq, seq`""" + def _get_batch_size(self, attention: torch.Tensor) -> int: + """returns the batch size of a tensor of shape `(batch_size, heads, seq, seq)`""" return attention.size(0) + def _extract_forward_pass_args( + self, inputs: MultiStepEmbeddingsTensor, forward_args: Optional[Tuple], is_target_attr: bool + ) -> dict: + """extracts the arguments needed for a standard forward pass + from the `inputs` and `additional_forward_args` parameters used by Captum""" + + use_embeddings = forward_args[6] if is_target_attr else forward_args[7] + + forward_pass_args = { + "attention_mask": forward_args[4] if is_target_attr else forward_args[5], + "decoder_attention_mask": forward_args[5] if is_target_attr else forward_args[6], + } + + if use_embeddings: + forward_pass_args["inputs_embeds"] = inputs[0] + forward_pass_args["decoder_inputs_embeds"] = inputs[1] if is_target_attr else forward_args[0] + else: + forward_pass_args["input_ids"] = forward_args[0] if is_target_attr else forward_args[1] + forward_pass_args["decoder_input_ids"] = forward_args[1] if is_target_attr else forward_args[2] + + return forward_pass_args + + def _run_forward_pass(self, **forward_args: dict) -> Seq2SeqLMOutput: + + pass + class AggregatedAttention(AttentionAttribution): """ @@ -100,28 +152,20 @@ def attribute( is_target_attribution = True if len(inputs) > 1 else False - input_ids = additional_forward_args[0] if is_target_attribution else additional_forward_args[1] - decoder_input_ids = additional_forward_args[1] if is_target_attribution else additional_forward_args[2] - attention_mask = additional_forward_args[4] if is_target_attribution else additional_forward_args[5] - decoder_attention_mask = additional_forward_args[5] if is_target_attribution else additional_forward_args[6] + forward_pass_args = self._extract_forward_pass_args(inputs, additional_forward_args, is_target_attribution) - outputs = self.forward_func.model( - input_ids=input_ids, - decoder_input_ids=decoder_input_ids, - attention_mask=attention_mask, - decoder_attention_mask=decoder_attention_mask, - ) + outputs = self.forward_func.model(**forward_pass_args) cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) - cross_aggregation = torch.squeeze(cross_aggregation, 1).select(1, -1) + cross_aggregation = cross_aggregation.select(1, -1) attributions = (cross_aggregation,) if is_target_attribution: decoder_aggregation = torch.stack(outputs.decoder_attentions).mean(0) decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) - decoder_aggregation = torch.squeeze(decoder_aggregation, 1).select(1, -1) + decoder_aggregation = decoder_aggregation.select(1, -1) attributions = attributions + (decoder_aggregation,) @@ -148,17 +192,9 @@ def attribute( is_target_attribution = True if len(inputs) > 1 else False - input_ids = additional_forward_args[0] if is_target_attribution else additional_forward_args[1] - decoder_input_ids = additional_forward_args[1] if is_target_attribution else additional_forward_args[2] - attention_mask = additional_forward_args[4] if is_target_attribution else additional_forward_args[5] - decoder_attention_mask = additional_forward_args[5] if is_target_attribution else additional_forward_args[6] - - outputs = self.forward_func.model( - input_ids=input_ids, - decoder_input_ids=decoder_input_ids, - attention_mask=attention_mask, - decoder_attention_mask=decoder_attention_mask, - ) + forward_pass_args = self._extract_forward_pass_args(inputs, additional_forward_args, is_target_attribution) + + outputs = self.forward_func.model(**forward_pass_args) last_layer_cross = outputs.cross_attentions[-1] last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) From 9340343957798b7e36b806f5f1af82318026eec1 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Thu, 24 Nov 2022 22:34:11 +0100 Subject: [PATCH 08/30] removed python 3.11 build target --- .github/workflows/build.yml | 2 +- pyproject.toml | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 46c77a89..d2ef27a8 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,7 +12,7 @@ jobs: if: github.actor != 'dependabot[bot]' && github.actor != 'dependabot-preview[bot]' strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11"] + python-version: ["3.8", "3.9", "3.10"] steps: - uses: actions/checkout@v3 diff --git a/pyproject.toml b/pyproject.toml index df4862ad..5c77d3b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,8 +35,7 @@ classifiers = [ #! Update me "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.10" ] [tool.poetry.scripts] From 3cfd70688578b26c94c8e1e14498a69244ec70c2 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Thu, 24 Nov 2022 23:07:14 +0100 Subject: [PATCH 09/30] fix safety warnings --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index d060e1bb..d848a7ff 100644 --- a/Makefile +++ b/Makefile @@ -90,7 +90,7 @@ fix-style: .PHONY: check-safety check-safety: poetry check - poetry run safety check --full-report + poetry run safety check --full-report -i 51457 -i 51358 poetry run bandit -ll --recursive inseq tests .PHONY: lint From 2765d636e1dcc12ac7b7831a5df15b263a037828 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Fri, 25 Nov 2022 17:40:02 +0100 Subject: [PATCH 10/30] set correct python version in pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5c77d3b7..ae402ff7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -43,7 +43,7 @@ classifiers = [ #! Update me "inseq" = "inseq.commands.cli:main" [tool.poetry.dependencies] -python = ">=3.8,<3.12" +python = ">=3.8,<3.11" rich = "^10.13.0" transformers = { version = "^4.20.1", extras = ["sentencepiece", "tokenizers", "torch"] } From 4dd442fbc3cc98d6fdc5372c8389ede4629f7f2f Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Fri, 25 Nov 2022 18:18:44 +0100 Subject: [PATCH 11/30] regenerated requirements without 3.11 --- poetry.lock | 38 ++++++------ requirements.txt | 152 +++++++++++++++++++++++------------------------ 2 files changed, 93 insertions(+), 97 deletions(-) diff --git a/poetry.lock b/poetry.lock index 4ed0d8c9..a5f26865 100644 --- a/poetry.lock +++ b/poetry.lock @@ -56,10 +56,7 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} -wrapt = [ - {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, - {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, -] +wrapt = {version = ">=1.11,<2", markers = "python_version < \"3.11\""} [[package]] name = "asttokens" @@ -495,11 +492,11 @@ tqdm = ["tqdm"] [[package]] name = "gitdb" -version = "4.0.9" +version = "4.0.10" description = "Git Object Database" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] smmap = ">=3.0.1,<6" @@ -571,7 +568,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "5.0.0" +version = "5.1.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -933,7 +930,6 @@ python-versions = ">=3.8" numpy = [ {version = ">=1.20.3", markers = "python_version < \"3.10\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.1" pytz = ">=2020.1" @@ -1410,7 +1406,7 @@ python-versions = "*" [[package]] name = "setuptools" -version = "65.6.2" +version = "65.6.3" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false @@ -1868,11 +1864,11 @@ python-versions = ">=3.7" [[package]] name = "urllib3" -version = "1.26.12" +version = "1.26.13" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*, <4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)", "brotlipy (>=0.6.0)"] @@ -1958,8 +1954,8 @@ scikit-learn = ["scikit-learn"] [metadata] lock-version = "1.1" -python-versions = ">=3.8,<3.12" -content-hash = "3f8e9e686633141081a2ae9848b704efbc0d4b2710b18d7056e3bb62018fd445" +python-versions = ">=3.8,<3.11" +content-hash = "086bbc8eb0e9f2e983767d4679a41ef051e8710d33f7f6e22e9e33e77826ee96" [metadata.files] aiohttp = [ @@ -2435,8 +2431,8 @@ fsspec = [ {file = "fsspec-2022.11.0.tar.gz", hash = "sha256:259d5fd5c8e756ff2ea72f42e7613c32667dc2049a4ac3d84364a7ca034acb8b"}, ] gitdb = [ - {file = "gitdb-4.0.9-py3-none-any.whl", hash = "sha256:8033ad4e853066ba6ca92050b9df2f89301b8fc8bf7e9324d412a63f8bf1a8fd"}, - {file = "gitdb-4.0.9.tar.gz", hash = "sha256:bac2fd45c0a1c9cf619e63a90d62bdc63892ef92387424b855792a6cabe789aa"}, + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] GitPython = [ {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, @@ -2459,8 +2455,8 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] importlib-metadata = [ - {file = "importlib_metadata-5.0.0-py3-none-any.whl", hash = "sha256:ddb0e35065e8938f867ed4928d0ae5bf2a53b7773871bfe6bcc7e4fcdc7dea43"}, - {file = "importlib_metadata-5.0.0.tar.gz", hash = "sha256:da31db32b304314d044d3c12c79bd59e307889b287ad12ff387b3500835fc2ab"}, + {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, + {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, @@ -3406,8 +3402,8 @@ sentencepiece = [ {file = "sentencepiece-0.1.97.tar.gz", hash = "sha256:c901305e0a710bbcd296f66d79e96f744e6e175b29812bd5178318437d4e1f6c"}, ] setuptools = [ - {file = "setuptools-65.6.2-py3-none-any.whl", hash = "sha256:97a4a824325146ebc8dc29b0aa5f3b1eaa590a0f00cacbfdf81831670f07862d"}, - {file = "setuptools-65.6.2.tar.gz", hash = "sha256:41fa68ecac9e099122990d7437bc10683b966c32a591caa2824dffcffd5dea7a"}, + {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, + {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] setuptools-scm = [ {file = "setuptools_scm-7.0.5-py3-none-any.whl", hash = "sha256:7930f720905e03ccd1e1d821db521bff7ec2ac9cf0ceb6552dd73d24a45d3b02"}, @@ -3601,8 +3597,8 @@ typing-extensions = [ {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, ] urllib3 = [ - {file = "urllib3-1.26.12-py2.py3-none-any.whl", hash = "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997"}, - {file = "urllib3-1.26.12.tar.gz", hash = "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e"}, + {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, + {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, ] virtualenv = [ {file = "virtualenv-20.16.7-py3-none-any.whl", hash = "sha256:efd66b00386fdb7dbe4822d172303f40cd05e50e01740b19ea42425cbe653e29"}, diff --git a/requirements.txt b/requirements.txt index 00698e87..865fea6a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,76 +1,76 @@ -appnope==0.1.3 ; python_version >= "3.8" and python_version < "3.12" and platform_system == "Darwin" or python_version >= "3.8" and python_version < "3.12" and sys_platform == "darwin" -asttokens==2.1.0 ; python_version >= "3.8" and python_version < "3.12" -backcall==0.2.0 ; python_version >= "3.8" and python_version < "3.12" -captum==0.5.0 ; python_version >= "3.8" and python_version < "3.12" -certifi==2022.9.24 ; python_version >= "3.8" and python_version < "3.12" -cffi==1.15.1 ; python_version >= "3.8" and python_version < "3.12" and implementation_name == "pypy" -charset-normalizer==2.1.1 ; python_version >= "3.8" and python_version < "3.12" -colorama==0.4.6 ; python_version >= "3.8" and python_version < "3.12" -commonmark==0.9.1 ; python_version >= "3.8" and python_version < "3.12" -contourpy==1.0.6 ; python_version >= "3.8" and python_version < "3.12" -cycler==0.11.0 ; python_version >= "3.8" and python_version < "3.12" -debugpy==1.6.3 ; python_version >= "3.8" and python_version < "3.12" -decorator==5.1.1 ; python_version >= "3.8" and python_version < "3.12" -entrypoints==0.4 ; python_version >= "3.8" and python_version < "3.12" -executing==1.2.0 ; python_version >= "3.8" and python_version < "3.12" -filelock==3.8.0 ; python_version >= "3.8" and python_version < "3.12" -fonttools==4.38.0 ; python_version >= "3.8" and python_version < "3.12" -huggingface-hub==0.11.0 ; python_version >= "3.8" and python_version < "3.12" -idna==3.4 ; python_version >= "3.8" and python_version < "3.12" -ipykernel==6.17.1 ; python_version >= "3.8" and python_version < "3.12" -ipython==8.6.0 ; python_version >= "3.8" and python_version < "3.12" -ipywidgets==8.0.2 ; python_version >= "3.8" and python_version < "3.12" -jedi==0.18.2 ; python_version >= "3.8" and python_version < "3.12" -joblib==1.2.0 ; python_version >= "3.8" and python_version < "3.12" -json-tricks==3.16.1 ; python_version >= "3.8" and python_version < "3.12" -jupyter-client==7.4.7 ; python_version >= "3.8" and python_version < "3.12" -jupyter-core==5.0.0 ; python_version >= "3.8" and python_version < "3.12" -jupyterlab-widgets==3.0.3 ; python_version >= "3.8" and python_version < "3.12" -kiwisolver==1.4.4 ; python_version >= "3.8" and python_version < "3.12" -matplotlib-inline==0.1.6 ; python_version >= "3.8" and python_version < "3.12" -matplotlib==3.6.2 ; python_version >= "3.8" and python_version < "3.12" -nest-asyncio==1.5.6 ; python_version >= "3.8" and python_version < "3.12" -numpy==1.23.5 ; python_version >= "3.8" and python_version < "3.12" -packaging==21.3 ; python_version >= "3.8" and python_version < "3.12" -parso==0.8.3 ; python_version >= "3.8" and python_version < "3.12" -pastel==0.2.1 ; python_version >= "3.8" and python_version < "3.12" -pexpect==4.8.0 ; python_version >= "3.8" and python_version < "3.12" and sys_platform != "win32" -pickleshare==0.7.5 ; python_version >= "3.8" and python_version < "3.12" -pillow==9.3.0 ; python_version >= "3.8" and python_version < "3.12" -platformdirs==2.5.4 ; python_version >= "3.8" and python_version < "3.12" -poethepoet==0.13.1 ; python_version >= "3.8" and python_version < "3.12" -prompt-toolkit==3.0.33 ; python_version >= "3.8" and python_version < "3.12" -protobuf==3.20.2 ; python_version >= "3.8" and python_version < "3.12" -psutil==5.9.4 ; python_version >= "3.8" and python_version < "3.12" -ptyprocess==0.7.0 ; python_version >= "3.8" and python_version < "3.12" and sys_platform != "win32" -pure-eval==0.2.2 ; python_version >= "3.8" and python_version < "3.12" -py==1.11.0 ; python_version >= "3.8" and python_version < "3.12" and implementation_name == "pypy" -pycparser==2.21 ; python_version >= "3.8" and python_version < "3.12" and implementation_name == "pypy" -pygments==2.13.0 ; python_version >= "3.8" and python_version < "3.12" -pyparsing==3.0.9 ; python_version >= "3.8" and python_version < "3.12" -python-dateutil==2.8.2 ; python_version >= "3.8" and python_version < "3.12" -pywin32==305 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_version >= "3.8" and python_version < "3.12" -pyyaml==6.0 ; python_version >= "3.8" and python_version < "3.12" -pyzmq==24.0.1 ; python_version >= "3.8" and python_version < "3.12" -regex==2022.10.31 ; python_version >= "3.8" and python_version < "3.12" -requests==2.28.1 ; python_version >= "3.8" and python_version < "3.12" -rich==10.16.2 ; python_version >= "3.8" and python_version < "3.12" -scipy==1.9.3 ; python_version >= "3.8" and python_version < "3.12" -sentencepiece==0.1.97 ; python_version >= "3.8" and python_version < "3.12" -setuptools-scm==7.0.5 ; python_version >= "3.8" and python_version < "3.12" -setuptools==65.6.2 ; python_version >= "3.8" and python_version < "3.12" -six==1.16.0 ; python_version >= "3.8" and python_version < "3.12" -stack-data==0.6.1 ; python_version >= "3.8" and python_version < "3.12" -tokenizers==0.13.2 ; python_version >= "3.8" and python_version < "3.12" -tomli==2.0.1 ; python_version >= "3.8" and python_version < "3.12" -torch==1.12.1 ; python_version >= "3.8" and python_version < "3.12" -torchtyping==0.1.4 ; python_version >= "3.8" and python_version < "3.12" -tornado==6.2 ; python_version >= "3.8" and python_version < "3.12" -tqdm==4.64.1 ; python_version >= "3.8" and python_version < "3.12" -traitlets==5.5.0 ; python_version >= "3.8" and python_version < "3.12" -transformers[sentencepiece,tokenizers,torch]==4.24.0 ; python_version >= "3.8" and python_version < "3.12" -typeguard==2.13.3 ; python_version >= "3.8" and python_version < "3.12" -typing-extensions==4.4.0 ; python_version >= "3.8" and python_version < "3.12" -urllib3==1.26.12 ; python_version >= "3.8" and python_version < "3.12" -wcwidth==0.2.5 ; python_version >= "3.8" and python_version < "3.12" -widgetsnbextension==4.0.3 ; python_version >= "3.8" and python_version < "3.12" +appnope==0.1.3 ; python_version >= "3.8" and python_version < "3.11" and platform_system == "Darwin" or python_version >= "3.8" and python_version < "3.11" and sys_platform == "darwin" +asttokens==2.1.0 ; python_version >= "3.8" and python_version < "3.11" +backcall==0.2.0 ; python_version >= "3.8" and python_version < "3.11" +captum==0.5.0 ; python_version >= "3.8" and python_version < "3.11" +certifi==2022.9.24 ; python_version >= "3.8" and python_version < "3.11" +cffi==1.15.1 ; python_version >= "3.8" and python_version < "3.11" and implementation_name == "pypy" +charset-normalizer==2.1.1 ; python_version >= "3.8" and python_version < "3.11" +colorama==0.4.6 ; python_version >= "3.8" and python_version < "3.11" +commonmark==0.9.1 ; python_version >= "3.8" and python_version < "3.11" +contourpy==1.0.6 ; python_version >= "3.8" and python_version < "3.11" +cycler==0.11.0 ; python_version >= "3.8" and python_version < "3.11" +debugpy==1.6.3 ; python_version >= "3.8" and python_version < "3.11" +decorator==5.1.1 ; python_version >= "3.8" and python_version < "3.11" +entrypoints==0.4 ; python_version >= "3.8" and python_version < "3.11" +executing==1.2.0 ; python_version >= "3.8" and python_version < "3.11" +filelock==3.8.0 ; python_version >= "3.8" and python_version < "3.11" +fonttools==4.38.0 ; python_version >= "3.8" and python_version < "3.11" +huggingface-hub==0.11.0 ; python_version >= "3.8" and python_version < "3.11" +idna==3.4 ; python_version >= "3.8" and python_version < "3.11" +ipykernel==6.17.1 ; python_version >= "3.8" and python_version < "3.11" +ipython==8.6.0 ; python_version >= "3.8" and python_version < "3.11" +ipywidgets==8.0.2 ; python_version >= "3.8" and python_version < "3.11" +jedi==0.18.2 ; python_version >= "3.8" and python_version < "3.11" +joblib==1.2.0 ; python_version >= "3.8" and python_version < "3.11" +json-tricks==3.16.1 ; python_version >= "3.8" and python_version < "3.11" +jupyter-client==7.4.7 ; python_version >= "3.8" and python_version < "3.11" +jupyter-core==5.0.0 ; python_version >= "3.8" and python_version < "3.11" +jupyterlab-widgets==3.0.3 ; python_version >= "3.8" and python_version < "3.11" +kiwisolver==1.4.4 ; python_version >= "3.8" and python_version < "3.11" +matplotlib-inline==0.1.6 ; python_version >= "3.8" and python_version < "3.11" +matplotlib==3.6.2 ; python_version >= "3.8" and python_version < "3.11" +nest-asyncio==1.5.6 ; python_version >= "3.8" and python_version < "3.11" +numpy==1.23.5 ; python_version >= "3.8" and python_version < "3.11" +packaging==21.3 ; python_version >= "3.8" and python_version < "3.11" +parso==0.8.3 ; python_version >= "3.8" and python_version < "3.11" +pastel==0.2.1 ; python_version >= "3.8" and python_version < "3.11" +pexpect==4.8.0 ; python_version >= "3.8" and python_version < "3.11" and sys_platform != "win32" +pickleshare==0.7.5 ; python_version >= "3.8" and python_version < "3.11" +pillow==9.3.0 ; python_version >= "3.8" and python_version < "3.11" +platformdirs==2.5.4 ; python_version >= "3.8" and python_version < "3.11" +poethepoet==0.13.1 ; python_version >= "3.8" and python_version < "3.11" +prompt-toolkit==3.0.33 ; python_version >= "3.8" and python_version < "3.11" +protobuf==3.20.2 ; python_version >= "3.8" and python_version < "3.11" +psutil==5.9.4 ; python_version >= "3.8" and python_version < "3.11" +ptyprocess==0.7.0 ; python_version >= "3.8" and python_version < "3.11" and sys_platform != "win32" +pure-eval==0.2.2 ; python_version >= "3.8" and python_version < "3.11" +py==1.11.0 ; python_version >= "3.8" and python_version < "3.11" and implementation_name == "pypy" +pycparser==2.21 ; python_version >= "3.8" and python_version < "3.11" and implementation_name == "pypy" +pygments==2.13.0 ; python_version >= "3.8" and python_version < "3.11" +pyparsing==3.0.9 ; python_version >= "3.8" and python_version < "3.11" +python-dateutil==2.8.2 ; python_version >= "3.8" and python_version < "3.11" +pywin32==305 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_version >= "3.8" and python_version < "3.11" +pyyaml==6.0 ; python_version >= "3.8" and python_version < "3.11" +pyzmq==24.0.1 ; python_version >= "3.8" and python_version < "3.11" +regex==2022.10.31 ; python_version >= "3.8" and python_version < "3.11" +requests==2.28.1 ; python_version >= "3.8" and python_version < "3.11" +rich==10.16.2 ; python_version >= "3.8" and python_version < "3.11" +scipy==1.9.3 ; python_version >= "3.8" and python_version < "3.11" +sentencepiece==0.1.97 ; python_version >= "3.8" and python_version < "3.11" +setuptools-scm==7.0.5 ; python_version >= "3.8" and python_version < "3.11" +setuptools==65.6.3 ; python_version >= "3.8" and python_version < "3.11" +six==1.16.0 ; python_version >= "3.8" and python_version < "3.11" +stack-data==0.6.1 ; python_version >= "3.8" and python_version < "3.11" +tokenizers==0.13.2 ; python_version >= "3.8" and python_version < "3.11" +tomli==2.0.1 ; python_version >= "3.8" and python_version < "3.11" +torch==1.12.1 ; python_version >= "3.8" and python_version < "3.11" +torchtyping==0.1.4 ; python_version >= "3.8" and python_version < "3.11" +tornado==6.2 ; python_version >= "3.8" and python_version < "3.11" +tqdm==4.64.1 ; python_version >= "3.8" and python_version < "3.11" +traitlets==5.5.0 ; python_version >= "3.8" and python_version < "3.11" +transformers[sentencepiece,tokenizers,torch]==4.24.0 ; python_version >= "3.8" and python_version < "3.11" +typeguard==2.13.3 ; python_version >= "3.8" and python_version < "3.11" +typing-extensions==4.4.0 ; python_version >= "3.8" and python_version < "3.11" +urllib3==1.26.13 ; python_version >= "3.8" and python_version < "3.11" +wcwidth==0.2.5 ; python_version >= "3.8" and python_version < "3.11" +widgetsnbextension==4.0.3 ; python_version >= "3.8" and python_version < "3.11" From 624435e7118003648dc393ef5e99733f73a286fc Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 2 Jan 2023 15:10:28 +0100 Subject: [PATCH 12/30] update deps after merge --- poetry.lock | 872 ++++++++++++++++++++++++------------------- requirements-dev.txt | 56 +-- requirements.txt | 10 +- 3 files changed, 522 insertions(+), 416 deletions(-) diff --git a/poetry.lock b/poetry.lock index 2245b343..3bc3b766 100644 --- a/poetry.lock +++ b/poetry.lock @@ -85,17 +85,19 @@ python-versions = ">=3.6" [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "main" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] [[package]] name = "Babel" @@ -278,7 +280,7 @@ test-no-codebase = ["Pillow", "matplotlib", "pytest"] [[package]] name = "coverage" -version = "6.5.0" +version = "7.0.1" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -308,7 +310,7 @@ python-versions = ">=3.6,<4.0" [[package]] name = "datasets" -version = "2.7.1" +version = "2.8.0" description = "HuggingFace community-driven open-source library of datasets" category = "main" optional = true @@ -334,13 +336,14 @@ xxhash = "*" apache-beam = ["apache-beam (>=2.26.0)"] audio = ["librosa"] benchmarks = ["numpy (==1.18.5)", "tensorflow (==2.3.0)", "torch (==1.7.1)", "transformers (==3.0.2)"] -dev = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "black (>=22.0,<23.0)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "fsspec[s3]", "isort (>=5.0.0)", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] +dev = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "black (>=22.0,<23.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "pyyaml (>=5.3.1)", "rarfile (>=4.0)", "s3fs", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] docs = ["s3fs"] +metrics-tests = ["Werkzeug (>=1.0.1)", "bert-score (>=0.3.6)", "jiwer", "langdetect", "mauve-text", "nltk", "requests-file (>=1.5.1)", "rouge-score", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "spacy (>=3.0.0)", "sqlalchemy", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "typer (<0.5.0)"] quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] -s3 = ["boto3", "botocore", "fsspec", "s3fs"] +s3 = ["s3fs"] tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] -tests = ["Pillow (>=6.2.1)", "Werkzeug (>=1.0.1)", "absl-py", "aiobotocore (>=2.0.1)", "apache-beam (>=2.26.0)", "bert-score (>=0.3.6)", "boto3 (>=1.19.8)", "botocore (>=1.22.8)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "fsspec[s3]", "jiwer", "langdetect", "librosa", "lz4", "mauve-text", "moto[s3,server] (==2.0.4)", "nltk", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "requests-file (>=1.5.1)", "rouge-score", "s3fs (>=2021.11.1)", "sacrebleu", "sacremoses", "scikit-learn", "scipy", "sentencepiece", "seqeval", "six (>=1.15.0,<1.16.0)", "soundfile", "spacy (>=3.0.0)", "sqlalchemy", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "texttable (>=1.6.3)", "tldextract", "tldextract (>=3.1.0)", "toml (>=0.10.1)", "torch", "torchaudio (<0.12.0)", "transformers", "typer (<0.5.0)", "zstandard"] +tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] torch = ["torch"] vision = ["Pillow (>=6.2.1)"] @@ -413,7 +416,7 @@ python-versions = ">=3.6" [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false @@ -435,15 +438,15 @@ tests = ["asttokens", "littleutils", "pytest", "rich"] [[package]] name = "filelock" -version = "3.8.2" +version = "3.9.0" description = "A platform independent file lock." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] -testing = ["covdefaults (>=2.2.2)", "coverage (>=6.5)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-timeout (>=2.1)"] [[package]] name = "flake8" @@ -536,7 +539,7 @@ smmap = ">=3.0.1,<6" [[package]] name = "GitPython" -version = "3.1.29" +version = "3.1.30" description = "GitPython is a python library used to interact with Git repositories" category = "dev" optional = false @@ -574,7 +577,7 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t [[package]] name = "identify" -version = "2.5.10" +version = "2.5.11" description = "File identification library for Python" category = "dev" optional = false @@ -601,7 +604,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "5.1.0" +version = "6.0.0" description = "Read metadata from Python packages" category = "dev" optional = false @@ -611,7 +614,7 @@ python-versions = ">=3.7" zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] @@ -625,7 +628,7 @@ python-versions = "*" [[package]] name = "ipykernel" -version = "6.19.2" +version = "6.19.4" description = "IPython Kernel for Jupyter" category = "main" optional = true @@ -648,9 +651,7 @@ traitlets = ">=5.4.0" [package.extras] cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] -typing = ["mypy (>=0.990)"] [[package]] name = "ipython" @@ -689,7 +690,7 @@ test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pa [[package]] name = "ipywidgets" -version = "8.0.3" +version = "8.0.4" description = "Jupyter interactive widgets" category = "main" optional = true @@ -707,7 +708,7 @@ test = ["jsonschema", "pytest (>=3.6.0)", "pytest-cov", "pytz"] [[package]] name = "isort" -version = "5.11.2" +version = "5.11.4" description = "A Python utility / library to sort Python imports." category = "dev" optional = false @@ -718,9 +719,9 @@ colorama = {version = ">=0.4.3,<0.5.0", optional = true, markers = "extra == \"c [package.extras] colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] +pipfile-deprecated-finder = ["pipreqs", "requirementslib"] plugins = ["setuptools"] -requirements_deprecated_finder = ["pip-api", "pipreqs"] +requirements-deprecated-finder = ["pip-api", "pipreqs"] [[package]] name = "jedi" @@ -791,7 +792,7 @@ test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-com [[package]] name = "jupyter-core" -version = "5.1.0" +version = "5.1.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true @@ -808,7 +809,7 @@ test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "jupyterlab-widgets" -version = "3.0.4" +version = "3.0.5" description = "Jupyter interactive widgets for JupyterLab" category = "main" optional = true @@ -893,7 +894,7 @@ python-versions = ">=3.6" [[package]] name = "multidict" -version = "6.0.3" +version = "6.0.4" description = "multidict implementation" category = "main" optional = true @@ -939,12 +940,60 @@ setuptools = "*" [[package]] name = "numpy" -version = "1.23.5" -description = "NumPy is the fundamental package for array computing with Python." +version = "1.24.1" +description = "Fundamental package for array computing in Python" category = "main" optional = false python-versions = ">=3.8" +[[package]] +name = "nvidia-cublas-cu11" +version = "11.10.3.66" +description = "CUBLAS native runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-nvrtc-cu11" +version = "11.7.99" +description = "NVRTC native runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cuda-runtime-cu11" +version = "11.7.99" +description = "CUDA Runtime native Libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + +[[package]] +name = "nvidia-cudnn-cu11" +version = "8.5.0.96" +description = "cuDNN runtime libraries" +category = "main" +optional = false +python-versions = ">=3" + +[package.dependencies] +setuptools = "*" +wheel = "*" + [[package]] name = "packaging" version = "22.0" @@ -1030,27 +1079,27 @@ python-versions = "*" [[package]] name = "Pillow" -version = "9.3.0" +version = "9.4.0" description = "Python Imaging Library (Fork)" category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] [[package]] name = "platformdirs" -version = "2.6.0" +version = "2.6.2" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "main" optional = false python-versions = ">=3.7" [package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -1081,7 +1130,7 @@ poetry_plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "pre-commit" -version = "2.20.0" +version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1092,8 +1141,7 @@ cfgv = ">=2.0.0" identify = ">=1.0.0" nodeenv = ">=0.11.1" pyyaml = ">=5.1" -toml = "*" -virtualenv = ">=20.0.8" +virtualenv = ">=20.10.0" [[package]] name = "prompt-toolkit" @@ -1189,7 +1237,7 @@ python-versions = ">=3.6" [[package]] name = "Pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1200,7 +1248,7 @@ plugins = ["importlib-metadata"] [[package]] name = "pylint" -version = "2.15.8" +version = "2.15.9" description = "python code static checker" category = "dev" optional = false @@ -1209,7 +1257,10 @@ python-versions = ">=3.7.2" [package.dependencies] astroid = ">=2.12.13,<=2.14.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = ">=0.2" +dill = [ + {version = ">=0.2", markers = "python_version < \"3.11\""}, + {version = ">=0.3.6", markers = "python_version >= \"3.11\""}, +] isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.8" platformdirs = ">=2.2.0" @@ -1280,7 +1331,7 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2022.6" +version = "2022.7" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1497,7 +1548,7 @@ testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs ( [[package]] name = "setuptools-scm" -version = "7.0.5" +version = "7.1.0" description = "the blessed package to manage your versions by scm tags" category = "main" optional = false @@ -1506,7 +1557,7 @@ python-versions = ">=3.7" [package.dependencies] packaging = ">=20.0" setuptools = "*" -tomli = ">=1.0.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} typing-extensions = "*" [package.extras] @@ -1811,6 +1862,9 @@ nvidia-cuda-runtime-cu11 = {version = "11.7.99", markers = "platform_system == \ nvidia-cudnn-cu11 = {version = "8.5.0.96", markers = "platform_system == \"Linux\""} typing-extensions = "*" +[package.extras] +opt-einsum = ["opt-einsum (>=3.3)"] + [[package]] name = "torchtyping" version = "0.1.4" @@ -1850,7 +1904,7 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.7.1" +version = "5.8.0" description = "Traitlets Python configuration system" category = "main" optional = true @@ -1858,9 +1912,7 @@ python-versions = ">=3.7" [package.extras] docs = ["myst-parser", "pydata-sphinx-theme", "sphinx"] -lint = ["black (>=22.6.0)", "mdformat (>0.7)", "ruff (>=0.0.156)"] -test = ["pre-commit", "pytest"] -typing = ["mypy (>=0.990)"] +test = ["argcomplete (>=2.0)", "pre-commit", "pytest", "pytest-mock"] [[package]] name = "transformers" @@ -1985,9 +2037,20 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "wheel" +version = "0.38.4" +description = "A built-package format for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.extras] +test = ["pytest (>=3.0.0)"] + [[package]] name = "widgetsnbextension" -version = "4.0.4" +version = "4.0.5" description = "Jupyter interactive widgets for Jupyter Notebook" category = "main" optional = true @@ -2003,7 +2066,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [[package]] name = "xxhash" -version = "3.1.0" +version = "3.2.0" description = "Python binding for xxHash" category = "main" optional = true @@ -2158,8 +2221,8 @@ async-timeout = [ {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, ] attrs = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] Babel = [ {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, @@ -2357,56 +2420,57 @@ contourpy = [ {file = "contourpy-1.0.6.tar.gz", hash = "sha256:6e459ebb8bb5ee4c22c19cc000174f8059981971a33ce11e17dddf6aca97a142"}, ] coverage = [ - {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, - {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, - {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, - {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, - {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, - {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, - {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, - {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, - {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, - {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, - {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, - {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, - {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, - {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, - {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, - {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, - {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, - {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, - {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, - {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, - {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, - {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, - {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, - {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, - {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, - {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, - {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, - {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, + {file = "coverage-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3695c4f4750bca943b3e1f74ad4be8d29e4aeab927d50772c41359107bd5d5c"}, + {file = "coverage-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa6a5a224b7f4cfb226f4fc55a57e8537fcc096f42219128c2c74c0e7d0953e1"}, + {file = "coverage-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74f70cd92669394eaf8d7756d1b195c8032cf7bbbdfce3bc489d4e15b3b8cf73"}, + {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b66bb21a23680dee0be66557dc6b02a3152ddb55edf9f6723fa4a93368f7158d"}, + {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87717959d4d0ee9db08a0f1d80d21eb585aafe30f9b0a54ecf779a69cb015f6"}, + {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:854f22fa361d1ff914c7efa347398374cc7d567bdafa48ac3aa22334650dfba2"}, + {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e414dc32ee5c3f36544ea466b6f52f28a7af788653744b8570d0bf12ff34bc0"}, + {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6c5ad996c6fa4d8ed669cfa1e8551348729d008a2caf81489ab9ea67cfbc7498"}, + {file = "coverage-7.0.1-cp310-cp310-win32.whl", hash = "sha256:691571f31ace1837838b7e421d3a09a8c00b4aac32efacb4fc9bd0a5c647d25a"}, + {file = "coverage-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:89caf4425fe88889e2973a8e9a3f6f5f9bbe5dd411d7d521e86428c08a873a4a"}, + {file = "coverage-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63d56165a7c76265468d7e0c5548215a5ba515fc2cba5232d17df97bffa10f6c"}, + {file = "coverage-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f943a3b2bc520102dd3e0bb465e1286e12c9a54f58accd71b9e65324d9c7c01"}, + {file = "coverage-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:830525361249dc4cd013652b0efad645a385707a5ae49350c894b67d23fbb07c"}, + {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1b9c5adc066db699ccf7fa839189a649afcdd9e02cb5dc9d24e67e7922737d"}, + {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c14720b8b3b6c23b487e70bd406abafc976ddc50490f645166f111c419c39"}, + {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d55d840e1b8c0002fce66443e124e8581f30f9ead2e54fbf6709fb593181f2c"}, + {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66b18c3cf8bbab0cce0d7b9e4262dc830e93588986865a8c78ab2ae324b3ed56"}, + {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12a5aa77783d49e05439fbe6e6b427484f8a0f9f456b46a51d8aac022cfd024d"}, + {file = "coverage-7.0.1-cp311-cp311-win32.whl", hash = "sha256:b77015d1cb8fe941be1222a5a8b4e3fbca88180cfa7e2d4a4e58aeabadef0ab7"}, + {file = "coverage-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb992c47cb1e5bd6a01e97182400bcc2ba2077080a17fcd7be23aaa6e572e390"}, + {file = "coverage-7.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e78e9dcbf4f3853d3ae18a8f9272111242531535ec9e1009fa8ec4a2b74557dc"}, + {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60bef2e2416f15fdc05772bf87db06c6a6f9870d1db08fdd019fbec98ae24a9"}, + {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9823e4789ab70f3ec88724bba1a203f2856331986cd893dedbe3e23a6cfc1e4e"}, + {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9158f8fb06747ac17bd237930c4372336edc85b6e13bdc778e60f9d685c3ca37"}, + {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:486ee81fa694b4b796fc5617e376326a088f7b9729c74d9defa211813f3861e4"}, + {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1285648428a6101b5f41a18991c84f1c3959cee359e51b8375c5882fc364a13f"}, + {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2c44fcfb3781b41409d0f060a4ed748537557de9362a8a9282182fafb7a76ab4"}, + {file = "coverage-7.0.1-cp37-cp37m-win32.whl", hash = "sha256:d6814854c02cbcd9c873c0f3286a02e3ac1250625cca822ca6bc1018c5b19f1c"}, + {file = "coverage-7.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66460f17c9319ea4f91c165d46840314f0a7c004720b20be58594d162a441d8"}, + {file = "coverage-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b373c9345c584bb4b5f5b8840df7f4ab48c4cbb7934b58d52c57020d911b856"}, + {file = "coverage-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d3022c3007d3267a880b5adcf18c2a9bf1fc64469b394a804886b401959b8742"}, + {file = "coverage-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92651580bd46519067e36493acb394ea0607b55b45bd81dd4e26379ed1871f55"}, + {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cfc595d2af13856505631be072835c59f1acf30028d1c860b435c5fc9c15b69"}, + {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b4b3a4d9915b2be879aff6299c0a6129f3d08a775d5a061f503cf79571f73e4"}, + {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6f22bb64cc39bcb883e5910f99a27b200fdc14cdd79df8696fa96b0005c9444"}, + {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72d1507f152abacea81f65fee38e4ef3ac3c02ff8bc16f21d935fd3a8a4ad910"}, + {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a79137fc99815fff6a852c233628e735ec15903cfd16da0f229d9c4d45926ab"}, + {file = "coverage-7.0.1-cp38-cp38-win32.whl", hash = "sha256:b3763e7fcade2ff6c8e62340af9277f54336920489ceb6a8cd6cc96da52fcc62"}, + {file = "coverage-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:09f6b5a8415b6b3e136d5fec62b552972187265cb705097bf030eb9d4ffb9b60"}, + {file = "coverage-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:978258fec36c154b5e250d356c59af7d4c3ba02bef4b99cda90b6029441d797d"}, + {file = "coverage-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19ec666533f0f70a0993f88b8273057b96c07b9d26457b41863ccd021a043b9a"}, + {file = "coverage-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfded268092a84605f1cc19e5c737f9ce630a8900a3589e9289622db161967e9"}, + {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bcfb1d8ac94af886b54e18a88b393f6a73d5959bb31e46644a02453c36e475"}, + {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b4a923cc7566bbc7ae2dfd0ba5a039b61d19c740f1373791f2ebd11caea59"}, + {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aec2d1515d9d39ff270059fd3afbb3b44e6ec5758af73caf18991807138c7118"}, + {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c20cfebcc149a4c212f6491a5f9ff56f41829cd4f607b5be71bb2d530ef243b1"}, + {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd556ff16a57a070ce4f31c635953cc44e25244f91a0378c6e9bdfd40fdb249f"}, + {file = "coverage-7.0.1-cp39-cp39-win32.whl", hash = "sha256:b9ea158775c7c2d3e54530a92da79496fb3fb577c876eec761c23e028f1e216c"}, + {file = "coverage-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:d1991f1dd95eba69d2cd7708ff6c2bbd2426160ffc73c2b81f617a053ebcb1a8"}, + {file = "coverage-7.0.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:3dd4ee135e08037f458425b8842d24a95a0961831a33f89685ff86b77d378f89"}, + {file = "coverage-7.0.1.tar.gz", hash = "sha256:a4a574a19eeb67575a5328a5760bbbb737faa685616586a9f9da4281f940109c"}, ] cycler = [ {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, @@ -2417,8 +2481,8 @@ darglint = [ {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] datasets = [ - {file = "datasets-2.7.1-py3-none-any.whl", hash = "sha256:3d0d2e860cec7c4e77c40de64533d46853f939b6e2311cba4f483f000afae868"}, - {file = "datasets-2.7.1.tar.gz", hash = "sha256:1c79a982d9d9c75fbbaea5b177c2b4c56894289b647fa2845ae2ebd8ac638a0f"}, + {file = "datasets-2.8.0-py3-none-any.whl", hash = "sha256:f36cb362bb5587659bab18e594b6d25d9d28486d735a571319c82efeb5a4e5df"}, + {file = "datasets-2.8.0.tar.gz", hash = "sha256:a843b69593914071f921fc1086fde939f30a63415a34cdda5db3c0acdd58aff2"}, ] debugpy = [ {file = "debugpy-1.6.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6ae238943482c78867ac707c09122688efb700372b617ffd364261e5e41f7a2f"}, @@ -2465,16 +2529,16 @@ entrypoints = [ {file = "entrypoints-0.4.tar.gz", hash = "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4"}, ] exceptiongroup = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, ] executing = [ {file = "executing-1.2.0-py2.py3-none-any.whl", hash = "sha256:0314a69e37426e3608aada02473b4161d4caf5a4b244d1d0c48072b8fee7bacc"}, {file = "executing-1.2.0.tar.gz", hash = "sha256:19da64c18d2d851112f09c287f8d3dbbdf725ab0e569077efb6cdcbd3497c107"}, ] filelock = [ - {file = "filelock-3.8.2-py3-none-any.whl", hash = "sha256:8df285554452285f79c035efb0c861eb33a4bcfa5b7a137016e32e6a90f9792c"}, - {file = "filelock-3.8.2.tar.gz", hash = "sha256:7565f628ea56bfcd8e54e42bdc55da899c85c1abfe1b5bcfd147e9188cebb3b2"}, + {file = "filelock-3.9.0-py3-none-any.whl", hash = "sha256:f58d535af89bb9ad5cd4df046f741f8553a418c01a7856bf0d173bbc9f6bd16d"}, + {file = "filelock-3.9.0.tar.gz", hash = "sha256:7b319f24340b51f55a2bf7a12ac0755a9b03e718311dac567a0f4f7fabd2f5de"}, ] flake8 = [ {file = "flake8-6.0.0-py2.py3-none-any.whl", hash = "sha256:3833794e27ff64ea4e9cf5d410082a8b97ff1a06c16aa3d2027339cd0f1195c7"}, @@ -2569,16 +2633,16 @@ gitdb = [ {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] GitPython = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, + {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, ] huggingface-hub = [ {file = "huggingface_hub-0.11.1-py3-none-any.whl", hash = "sha256:11eed7aab4fa4d1fb532f2aea3379ef4998d9f6bc24a330834dfedd3dac7f441"}, {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, ] identify = [ - {file = "identify-2.5.10-py2.py3-none-any.whl", hash = "sha256:fb7c2feaeca6976a3ffa31ec3236a6911fbc51aec9acc111de2aed99f244ade2"}, - {file = "identify-2.5.10.tar.gz", hash = "sha256:dce9e31fee7dbc45fea36a9e855c316b8fbf807e65a862f160840bb5a2bf5dfd"}, + {file = "identify-2.5.11-py2.py3-none-any.whl", hash = "sha256:e7db36b772b188099616aaf2accbee122949d1c6a1bac4f38196720d6f9f06db"}, + {file = "identify-2.5.11.tar.gz", hash = "sha256:14b7076b29c99b1b0b8b08e96d448c7b877a9b07683cd8cfda2ea06af85ffa1c"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -2589,28 +2653,28 @@ imagesize = [ {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] importlib-metadata = [ - {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, - {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ - {file = "ipykernel-6.19.2-py3-none-any.whl", hash = "sha256:1374a55c57ca7a7286c3d8b15799cd76e1a2381b6b1fea99c494b955988926b6"}, - {file = "ipykernel-6.19.2.tar.gz", hash = "sha256:1ab68d3d3654196266baa93990055413e167263ffbe4cfe834f871bcd3d3506d"}, + {file = "ipykernel-6.19.4-py3-none-any.whl", hash = "sha256:0ecdae0060da61c5222ad221681f3b99b5bef739e11a3b1eb5778aa47f056f1f"}, + {file = "ipykernel-6.19.4.tar.gz", hash = "sha256:4140c282a6c71cdde59abe5eae2c71bf1eeb4a69316ab76e1c4c25150a49722b"}, ] ipython = [ {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, ] ipywidgets = [ - {file = "ipywidgets-8.0.3-py3-none-any.whl", hash = "sha256:db7dd35fb1217636cbdbe0ba0bd2216d91a7695cb28b5c1dca17e62cd51378de"}, - {file = "ipywidgets-8.0.3.tar.gz", hash = "sha256:2ec50df8538a1d4ddd5d454830d010922ad1015e81ac23efb27c0908bbc1eece"}, + {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, + {file = "ipywidgets-8.0.4.tar.gz", hash = "sha256:c0005a77a47d77889cafed892b58e33b4a2a96712154404c6548ec22272811ea"}, ] isort = [ - {file = "isort-5.11.2-py3-none-any.whl", hash = "sha256:e486966fba83f25b8045f8dd7455b0a0d1e4de481e1d7ce4669902d9fb85e622"}, - {file = "isort-5.11.2.tar.gz", hash = "sha256:dd8bbc5c0990f2a095d754e50360915f73b4c26fc82733eb5bfc6b48396af4d2"}, + {file = "isort-5.11.4-py3-none-any.whl", hash = "sha256:c033fd0edb91000a7f09527fe5c75321878f98322a77ddcc81adbd83724afb7b"}, + {file = "isort-5.11.4.tar.gz", hash = "sha256:6db30c5ded9815d813932c04c2f85a360bcdd35fed496f4d8f35495ef0a261b6"}, ] jedi = [ {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, @@ -2633,12 +2697,12 @@ jupyter-client = [ {file = "jupyter_client-7.4.8.tar.gz", hash = "sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a"}, ] jupyter-core = [ - {file = "jupyter_core-5.1.0-py3-none-any.whl", hash = "sha256:f5740d99606958544396914b08e67b668f45e7eff99ab47a7f4bcead419c02f4"}, - {file = "jupyter_core-5.1.0.tar.gz", hash = "sha256:a5ae7c09c55c0b26f692ec69323ba2b62e8d7295354d20f6cd57b749de4a05bf"}, + {file = "jupyter_core-5.1.1-py3-none-any.whl", hash = "sha256:f1038179d0f179b0e92c8fa2289c012b29dafdc9484b41821079f1a496f5a0f2"}, + {file = "jupyter_core-5.1.1.tar.gz", hash = "sha256:f342d29eb6edb06f8dffa69adea987b3a9ee2b6702338a8cb6911516ea0b432d"}, ] jupyterlab-widgets = [ - {file = "jupyterlab_widgets-3.0.4-py3-none-any.whl", hash = "sha256:4c9275daa6d20fc96c3aea45756ece7110850d035b0b93a6a40e918016b927da"}, - {file = "jupyterlab_widgets-3.0.4.tar.gz", hash = "sha256:9a568e022b8bb53ab23291f6ddb52f8002b789c2c5763378cbc882be1d619be8"}, + {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, + {file = "jupyterlab_widgets-3.0.5.tar.gz", hash = "sha256:eeaecdeaf6c03afc960ddae201ced88d5979b4ca9c3891bcb8f6631af705f5ef"}, ] kiwisolver = [ {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"}, @@ -2829,80 +2893,80 @@ mccabe = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] multidict = [ - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:73009ea04205966d47e16d98686ac5c438af23a1bb30b48a2c5da3423ec9ce37"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:8b92a9f3ab904397a33b193000dc4de7318ea175c4c460a1e154c415f9008e3d"}, - {file = "multidict-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:578bfcb16f4b8675ef71b960c00f174b0426e0eeb796bab6737389d8288eb827"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1650ea41c408755da5eed52ac6ccbc8938ccc3e698d81e6f6a1be02ff2a0945"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d52442e7c951e4c9ee591d6047706e66923d248d83958bbf99b8b19515fffaef"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad7d66422b9cc51125509229693d27e18c08f2dea3ac9de408d821932b1b3759"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6cd14e61f0da2a2cfb9fe05bfced2a1ed7063ce46a7a8cd473be4973de9a7f91"}, - {file = "multidict-6.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:190626ced82d4cc567a09e7346340d380154a493bac6905e0095d8158cdf1e38"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:791458a1f7d1b4ab3bd9e93e0dcd1d59ef7ee9aa051dcd1ea030e62e49b923fd"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b46e79a9f4db53897d17bc64a39d1c7c2be3e3d4f8dba6d6730a2b13ddf0f986"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e4a095e18847c12ec20e55326ab8782d9c2d599400a3a2f174fab4796875d0e2"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:fb6c3dc3d65014d2c782f5acf0b3ba14e639c6c33d3ed8932ead76b9080b3544"}, - {file = "multidict-6.0.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3541882266247c7cd3dba78d6ef28dbe704774df60c9e4231edaa4493522e614"}, - {file = "multidict-6.0.3-cp310-cp310-win32.whl", hash = "sha256:67090b17a0a5be5704fd109f231ee73cefb1b3802d41288d6378b5df46ae89ba"}, - {file = "multidict-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:36df958b15639e40472adaa4f0c2c7828fe680f894a6b48c4ce229f59a6a798b"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5b51969503709415a35754954c2763f536a70b8bf7360322b2edb0c0a44391f6"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:24e8d513bfcaadc1f8b0ebece3ff50961951c54b07d5a775008a882966102418"}, - {file = "multidict-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d325d61cac602976a5d47b19eaa7d04e3daf4efce2164c630219885087234102"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbbe17f8a7211b623502d2bf41022a51da3025142401417c765bf9a56fed4c"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4fb3fe591956d8841882c463f934c9f7485cfd5f763a08c0d467b513dc18ef89"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e1925f78a543b94c3d46274c66a366fee8a263747060220ed0188e5f3eeea1c0"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e1ce0b187c4e93112304dcde2aa18922fdbe8fb4f13d8aa72a5657bce0563a"}, - {file = "multidict-6.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e07c24018986fb00d6e7eafca8fcd6e05095649e17fcf0e33a592caaa62a78b9"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:114a4ab3e5cfbc56c4b6697686ecb92376c7e8c56893ef20547921552f8bdf57"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4ccf55f28066b4f08666764a957c2b7c241c7547b0921d69c7ceab5f74fe1a45"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:9d359b0a962e052b713647ac1f13eabf2263167b149ed1e27d5c579f5c8c7d2c"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:df7b4cee3ff31b3335aba602f8d70dbc641e5b7164b1e9565570c9d3c536a438"}, - {file = "multidict-6.0.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ee9b1cae9a6c5d023e5a150f6f6b9dbb3c3bbc7887d6ee07d4c0ecb49a473734"}, - {file = "multidict-6.0.3-cp311-cp311-win32.whl", hash = "sha256:960ce1b790952916e682093788696ef7e33ac6a97482f9b983abdc293091b531"}, - {file = "multidict-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:2b66d61966b12e6bba500e5cbb2c721a35e119c30ee02495c5629bd0e91eea30"}, - {file = "multidict-6.0.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:526f8397fc124674b8f39748680a0ff673bd6a715fecb4866716d36e380f015f"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f5d5129a937af4e3c4a1d6c139f4051b7d17d43276cefdd8d442a7031f7eef2"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38d394814b39be1c36ac709006d39d50d72a884f9551acd9c8cc1ffae3fc8c4e"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:99341ca1f1db9e7f47914cb2461305665a662383765ced6f843712564766956d"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5790cc603456b6dcf8a9a4765f666895a6afddc88b3d3ba7b53dea2b6e23116"}, - {file = "multidict-6.0.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce8e51774eb03844588d3c279adb94efcd0edeccd2f97516623292445bcc01f9"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:baa96a3418e27d723064854143b2f414a422c84cc87285a71558722049bebc5a"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:cb4a08f0aaaa869f189ffea0e17b86ad0237b51116d494da15ef7991ee6ad2d7"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:62db44727d0befea68e8ad2881bb87a9cfb6b87d45dd78609009627167f37b69"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:4cc5c8cd205a9810d16a5cd428cd81bac554ad1477cb87f4ad722b10992e794d"}, - {file = "multidict-6.0.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:f76109387e1ec8d8e2137c94c437b89fe002f29e0881aae8ae45529bdff92000"}, - {file = "multidict-6.0.3-cp37-cp37m-win32.whl", hash = "sha256:f8a728511c977df6f3d8af388fcb157e49f11db4a6637dd60131b8b6e40b0253"}, - {file = "multidict-6.0.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c2a1168e5aa7c72499fb03c850e0f03f624fa4a5c8d2e215c518d0a73872eb64"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:eddf604a3de2ace3d9a4e4d491be7562a1ac095a0a1c95a9ec5781ef0273ef11"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d09daf5c6ce7fc6ed444c9339bbde5ea84e2534d1ca1cd37b60f365c77f00dea"}, - {file = "multidict-6.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:12e0d396faa6dc55ff5379eee54d1df3b508243ff15bfc8295a6ec7a4483a335"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:70740c2bc9ab1c99f7cdcb104f27d16c63860c56d51c5bf0ef82fc1d892a2131"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e322c94596054352f5a02771eec71563c018b15699b961aba14d6dd943367022"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4159fc1ec9ede8ab93382e0d6ba9b1b3d23c72da39a834db7a116986605c7ab4"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47defc0218682281a52fb1f6346ebb8b68b17538163a89ea24dfe4da37a8a9a3"}, - {file = "multidict-6.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f9511e48bde6b995825e8d35e434fc96296cf07a25f4aae24ff9162be7eaa46"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e0bce9f7c30e7e3a9e683f670314c0144e8d34be6b7019e40604763bd278d84f"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:01b456046a05ff7cceefb0e1d2a9d32f05efcb1c7e0d152446304e11557639ce"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:8230a39bae6c2e8a09e4da6bace5064693b00590a4a213e38f9a9366da10e7dd"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:445c0851a1cbc1f2ec3b40bc22f9c4a235edb3c9a0906122a9df6ea8d51f886c"}, - {file = "multidict-6.0.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:9aac6881454a750554ed4b280a839dcf9e2133a9d12ab4d417d673fb102289b7"}, - {file = "multidict-6.0.3-cp38-cp38-win32.whl", hash = "sha256:81c3d597591b0940e04949e4e4f79359b2d2e542a686ba0da5e25de33fec13e0"}, - {file = "multidict-6.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:dc4cfef5d899f5f1a15f3d2ac49f71107a01a5a2745b4dd53fa0cede1419385a"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d408172519049e36fb6d29672f060dc8461fc7174eba9883c7026041ef9bfb38"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e068dfeadbce63072b2d8096486713d04db4946aad0a0f849bd4fc300799d0d3"}, - {file = "multidict-6.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8b817d4ed68fd568ec5e45dd75ddf30cc72a47a6b41b74d5bb211374c296f5e"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cf5d19e12eff855aa198259c0b02fd3f5d07e1291fbd20279c37b3b0e6c9852"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e5a811aab1b4aea0b4be669363c19847a8c547510f0e18fb632956369fdbdf67"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2cfda34b7cb99eacada2072e0f69c0ad3285cb6f8e480b11f2b6d6c1c6f92718"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beeca903e4270b4afcd114f371a9602240dc143f9e944edfea00f8d4ad56c40d"}, - {file = "multidict-6.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd5771e8ea325f85cbb361ddbdeb9ae424a68e5dfb6eea786afdcd22e68a7d5d"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9dbab2a7e9c073bc9538824a01f5ed689194db7f55f2b8102766873e906a6c1a"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f2c0957b3e8c66c10d27272709a5299ab3670a0f187c9428f3b90d267119aedb"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:94cbe5535ef150546b8321aebea22862a3284da51e7b55f6f95b7d73e96d90ee"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:d0e798b072cf2aab9daceb43d97c9c527a0c7593e67a7846ad4cc6051de1e303"}, - {file = "multidict-6.0.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a27b029caa3b555a4f3da54bc1e718eb55fcf1a11fda8bf0132147b476cf4c08"}, - {file = "multidict-6.0.3-cp39-cp39-win32.whl", hash = "sha256:018c8e3be7f161a12b3e41741b6721f9baeb2210f4ab25a6359b7d76c1017dce"}, - {file = "multidict-6.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5e58ec0375803526d395f6f7e730ecc45d06e15f68f7b9cdbf644a2918324e51"}, - {file = "multidict-6.0.3.tar.gz", hash = "sha256:2523a29006c034687eccd3ee70093a697129a3ffe8732535d3b2df6a4ecc279d"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, ] multiprocess = [ {file = "multiprocess-0.70.14-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:560a27540daef4ce8b24ed3cc2496a3c670df66c96d02461a4da67473685adf3"}, @@ -2933,34 +2997,51 @@ nodeenv = [ {file = "nodeenv-1.7.0.tar.gz", hash = "sha256:e0e7f7dfb85fc5394c6fe1e8fa98131a2473e04311a45afb6508f7cf1836fa2b"}, ] numpy = [ - {file = "numpy-1.23.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63"}, - {file = "numpy-1.23.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d"}, - {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43"}, - {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1"}, - {file = "numpy-1.23.5-cp310-cp310-win32.whl", hash = "sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280"}, - {file = "numpy-1.23.5-cp310-cp310-win_amd64.whl", hash = "sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6"}, - {file = "numpy-1.23.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96"}, - {file = "numpy-1.23.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa"}, - {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2"}, - {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387"}, - {file = "numpy-1.23.5-cp311-cp311-win32.whl", hash = "sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0"}, - {file = "numpy-1.23.5-cp311-cp311-win_amd64.whl", hash = "sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d"}, - {file = "numpy-1.23.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a"}, - {file = "numpy-1.23.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9"}, - {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398"}, - {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb"}, - {file = "numpy-1.23.5-cp38-cp38-win32.whl", hash = "sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07"}, - {file = "numpy-1.23.5-cp38-cp38-win_amd64.whl", hash = "sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e"}, - {file = "numpy-1.23.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f"}, - {file = "numpy-1.23.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de"}, - {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d"}, - {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719"}, - {file = "numpy-1.23.5-cp39-cp39-win32.whl", hash = "sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481"}, - {file = "numpy-1.23.5-cp39-cp39-win_amd64.whl", hash = "sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135"}, - {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, - {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, + {file = "numpy-1.24.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:179a7ef0889ab769cc03573b6217f54c8bd8e16cef80aad369e1e8185f994cd7"}, + {file = "numpy-1.24.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b09804ff570b907da323b3d762e74432fb07955701b17b08ff1b5ebaa8cfe6a9"}, + {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1b739841821968798947d3afcefd386fa56da0caf97722a5de53e07c4ccedc7"}, + {file = "numpy-1.24.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e3463e6ac25313462e04aea3fb8a0a30fb906d5d300f58b3bc2c23da6a15398"}, + {file = "numpy-1.24.1-cp310-cp310-win32.whl", hash = "sha256:b31da69ed0c18be8b77bfce48d234e55d040793cebb25398e2a7d84199fbc7e2"}, + {file = "numpy-1.24.1-cp310-cp310-win_amd64.whl", hash = "sha256:b07b40f5fb4fa034120a5796288f24c1fe0e0580bbfff99897ba6267af42def2"}, + {file = "numpy-1.24.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7094891dcf79ccc6bc2a1f30428fa5edb1e6fb955411ffff3401fb4ea93780a8"}, + {file = "numpy-1.24.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28e418681372520c992805bb723e29d69d6b7aa411065f48216d8329d02ba032"}, + {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e274f0f6c7efd0d577744f52032fdd24344f11c5ae668fe8d01aac0422611df1"}, + {file = "numpy-1.24.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0044f7d944ee882400890f9ae955220d29b33d809a038923d88e4e01d652acd9"}, + {file = "numpy-1.24.1-cp311-cp311-win32.whl", hash = "sha256:442feb5e5bada8408e8fcd43f3360b78683ff12a4444670a7d9e9824c1817d36"}, + {file = "numpy-1.24.1-cp311-cp311-win_amd64.whl", hash = "sha256:de92efa737875329b052982e37bd4371d52cabf469f83e7b8be9bb7752d67e51"}, + {file = "numpy-1.24.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b162ac10ca38850510caf8ea33f89edcb7b0bb0dfa5592d59909419986b72407"}, + {file = "numpy-1.24.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:26089487086f2648944f17adaa1a97ca6aee57f513ba5f1c0b7ebdabbe2b9954"}, + {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:caf65a396c0d1f9809596be2e444e3bd4190d86d5c1ce21f5fc4be60a3bc5b36"}, + {file = "numpy-1.24.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0677a52f5d896e84414761531947c7a330d1adc07c3a4372262f25d84af7bf7"}, + {file = "numpy-1.24.1-cp38-cp38-win32.whl", hash = "sha256:dae46bed2cb79a58d6496ff6d8da1e3b95ba09afeca2e277628171ca99b99db1"}, + {file = "numpy-1.24.1-cp38-cp38-win_amd64.whl", hash = "sha256:6ec0c021cd9fe732e5bab6401adea5a409214ca5592cd92a114f7067febcba0c"}, + {file = "numpy-1.24.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:28bc9750ae1f75264ee0f10561709b1462d450a4808cd97c013046073ae64ab6"}, + {file = "numpy-1.24.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:84e789a085aabef2f36c0515f45e459f02f570c4b4c4c108ac1179c34d475ed7"}, + {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e669fbdcdd1e945691079c2cae335f3e3a56554e06bbd45d7609a6cf568c700"}, + {file = "numpy-1.24.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef85cf1f693c88c1fd229ccd1055570cb41cdf4875873b7728b6301f12cd05bf"}, + {file = "numpy-1.24.1-cp39-cp39-win32.whl", hash = "sha256:87a118968fba001b248aac90e502c0b13606721b1343cdaddbc6e552e8dfb56f"}, + {file = "numpy-1.24.1-cp39-cp39-win_amd64.whl", hash = "sha256:ddc7ab52b322eb1e40521eb422c4e0a20716c271a306860979d450decbb51b8e"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ed5fb71d79e771ec930566fae9c02626b939e37271ec285e9efaf1b5d4370e7d"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad2925567f43643f51255220424c23d204024ed428afc5aad0f86f3ffc080086"}, + {file = "numpy-1.24.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:cfa1161c6ac8f92dea03d625c2d0c05e084668f4a06568b77a25a89111621566"}, + {file = "numpy-1.24.1.tar.gz", hash = "sha256:2386da9a471cc00a1f47845e27d916d5ec5346ae9696e01a8a34760858fe9dd2"}, +] +nvidia-cublas-cu11 = [ + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-manylinux1_x86_64.whl", hash = "sha256:d32e4d75f94ddfb93ea0a5dda08389bcc65d8916a25cb9f37ac89edaeed3bded"}, + {file = "nvidia_cublas_cu11-11.10.3.66-py3-none-win_amd64.whl", hash = "sha256:8ac17ba6ade3ed56ab898a036f9ae0756f1e81052a317bf98f8c6d18dc3ae49e"}, +] +nvidia-cuda-nvrtc-cu11 = [ + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:9f1562822ea264b7e34ed5930567e89242d266448e936b85bc97a3370feabb03"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:f7d9610d9b7c331fa0da2d1b2858a4a8315e6d49765091d28711c8946e7425e7"}, + {file = "nvidia_cuda_nvrtc_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:f2effeb1309bdd1b3854fc9b17eaf997808f8b25968ce0c7070945c4265d64a3"}, +] +nvidia-cuda-runtime-cu11 = [ + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-manylinux1_x86_64.whl", hash = "sha256:cc768314ae58d2641f07eac350f40f99dcb35719c4faff4bc458a7cd2b119e31"}, + {file = "nvidia_cuda_runtime_cu11-11.7.99-py3-none-win_amd64.whl", hash = "sha256:bc77fa59a7679310df9d5c70ab13c4e34c64ae2124dd1efd7e5474b71be125c7"}, +] +nvidia-cudnn-cu11 = [ + {file = "nvidia_cudnn_cu11-8.5.0.96-2-py3-none-manylinux1_x86_64.whl", hash = "sha256:402f40adfc6f418f9dae9ab402e773cfed9beae52333f6d86ae3107a1b9527e7"}, + {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, ] packaging = [ {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, @@ -3020,71 +3101,73 @@ pickleshare = [ {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] Pillow = [ - {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, - {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, - {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, - {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, - {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, - {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, - {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, - {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, - {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, - {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, - {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, - {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, - {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, - {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, - {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, - {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, - {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, - {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, - {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, - {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, - {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, - {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, - {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, - {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, - {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, - {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, - {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, - {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, - {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, + {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3049a10261d7f2b6514d35bbb7a4dfc3ece4c4de14ef5876c4b7a23a0e566d"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a8df99701f9095bea8a6c4b3197da105df6f74e6176c5b410bc2df2fd29a57"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:94cdff45173b1919350601f82d61365e792895e3c3a3443cf99819e6fbf717a5"}, + {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:ed3e4b4e1e6de75fdc16d3259098de7c6571b1a6cc863b1a49e7d3d53e036070"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5b2f8a31bd43e0f18172d8ac82347c8f37ef3e0b414431157718aa234991b28"}, + {file = "Pillow-9.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:09b89ddc95c248ee788328528e6a2996e09eaccddeeb82a5356e92645733be35"}, + {file = "Pillow-9.4.0-cp310-cp310-win32.whl", hash = "sha256:f09598b416ba39a8f489c124447b007fe865f786a89dbfa48bb5cf395693132a"}, + {file = "Pillow-9.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:f6e78171be3fb7941f9910ea15b4b14ec27725865a73c15277bc39f5ca4f8391"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:3fa1284762aacca6dc97474ee9c16f83990b8eeb6697f2ba17140d54b453e133"}, + {file = "Pillow-9.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:eaef5d2de3c7e9b21f1e762f289d17b726c2239a42b11e25446abf82b26ac132"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4dfdae195335abb4e89cc9762b2edc524f3c6e80d647a9a81bf81e17e3fb6f0"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6abfb51a82e919e3933eb137e17c4ae9c0475a25508ea88993bb59faf82f3b35"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:451f10ef963918e65b8869e17d67db5e2f4ab40e716ee6ce7129b0cde2876eab"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:6663977496d616b618b6cfa43ec86e479ee62b942e1da76a2c3daa1c75933ef4"}, + {file = "Pillow-9.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:60e7da3a3ad1812c128750fc1bc14a7ceeb8d29f77e0a2356a8fb2aa8925287d"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:19005a8e58b7c1796bc0167862b1f54a64d3b44ee5d48152b06bb861458bc0f8"}, + {file = "Pillow-9.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f715c32e774a60a337b2bb8ad9839b4abf75b267a0f18806f6f4f5f1688c4b5a"}, + {file = "Pillow-9.4.0-cp311-cp311-win32.whl", hash = "sha256:b222090c455d6d1a64e6b7bb5f4035c4dff479e22455c9eaa1bdd4c75b52c80c"}, + {file = "Pillow-9.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:ba6612b6548220ff5e9df85261bddc811a057b0b465a1226b39bfb8550616aee"}, + {file = "Pillow-9.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5f532a2ad4d174eb73494e7397988e22bf427f91acc8e6ebf5bb10597b49c493"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dd5a9c3091a0f414a963d427f920368e2b6a4c2f7527fdd82cde8ef0bc7a327"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef21af928e807f10bf4141cad4746eee692a0dd3ff56cfb25fce076ec3cc8abe"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:847b114580c5cc9ebaf216dd8c8dbc6b00a3b7ab0131e173d7120e6deade1f57"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:653d7fb2df65efefbcbf81ef5fe5e5be931f1ee4332c2893ca638c9b11a409c4"}, + {file = "Pillow-9.4.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:46f39cab8bbf4a384ba7cb0bc8bae7b7062b6a11cfac1ca4bc144dea90d4a9f5"}, + {file = "Pillow-9.4.0-cp37-cp37m-win32.whl", hash = "sha256:7ac7594397698f77bce84382929747130765f66406dc2cd8b4ab4da68ade4c6e"}, + {file = "Pillow-9.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:46c259e87199041583658457372a183636ae8cd56dbf3f0755e0f376a7f9d0e6"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:0e51f608da093e5d9038c592b5b575cadc12fd748af1479b5e858045fff955a9"}, + {file = "Pillow-9.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:765cb54c0b8724a7c12c55146ae4647e0274a839fb6de7bcba841e04298e1011"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:519e14e2c49fcf7616d6d2cfc5c70adae95682ae20f0395e9280db85e8d6c4df"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d197df5489004db87d90b918033edbeee0bd6df3848a204bca3ff0a903bef837"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0845adc64fe9886db00f5ab68c4a8cd933ab749a87747555cec1c95acea64b0b"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:e1339790c083c5a4de48f688b4841f18df839eb3c9584a770cbd818b33e26d5d"}, + {file = "Pillow-9.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:a96e6e23f2b79433390273eaf8cc94fec9c6370842e577ab10dabdcc7ea0a66b"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:7cfc287da09f9d2a7ec146ee4d72d6ea1342e770d975e49a8621bf54eaa8f30f"}, + {file = "Pillow-9.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d7081c084ceb58278dd3cf81f836bc818978c0ccc770cbbb202125ddabec6628"}, + {file = "Pillow-9.4.0-cp38-cp38-win32.whl", hash = "sha256:df41112ccce5d47770a0c13651479fbcd8793f34232a2dd9faeccb75eb5d0d0d"}, + {file = "Pillow-9.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:7a21222644ab69ddd9967cfe6f2bb420b460dae4289c9d40ff9a4896e7c35c9a"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0f3269304c1a7ce82f1759c12ce731ef9b6e95b6df829dccd9fe42912cc48569"}, + {file = "Pillow-9.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cb362e3b0976dc994857391b776ddaa8c13c28a16f80ac6522c23d5257156bed"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a2e0f87144fcbbe54297cae708c5e7f9da21a4646523456b00cc956bd4c65815"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28676836c7796805914b76b1837a40f76827ee0d5398f72f7dcc634bae7c6264"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0884ba7b515163a1a05440a138adeb722b8a6ae2c2b33aea93ea3118dd3a899e"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:53dcb50fbdc3fb2c55431a9b30caeb2f7027fcd2aeb501459464f0214200a503"}, + {file = "Pillow-9.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:e8c5cf126889a4de385c02a2c3d3aba4b00f70234bfddae82a5eaa3ee6d5e3e6"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6c6b1389ed66cdd174d040105123a5a1bc91d0aa7059c7261d20e583b6d8cbd2"}, + {file = "Pillow-9.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0dd4c681b82214b36273c18ca7ee87065a50e013112eea7d78c7a1b89a739153"}, + {file = "Pillow-9.4.0-cp39-cp39-win32.whl", hash = "sha256:6d9dfb9959a3b0039ee06c1a1a90dc23bac3b430842dcb97908ddde05870601c"}, + {file = "Pillow-9.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:54614444887e0d3043557d9dbc697dbb16cfb5a35d672b7a0fcc1ed0cf1c600b"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b9b752ab91e78234941e44abdecc07f1f0d8f51fb62941d32995b8161f68cfe5"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d3b56206244dc8711f7e8b7d6cad4663917cd5b2d950799425076681e8766286"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aabdab8ec1e7ca7f1434d042bf8b1e92056245fb179790dc97ed040361f16bfd"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db74f5562c09953b2c5f8ec4b7dfd3f5421f31811e97d1dbc0a7c93d6e3a24df"}, + {file = "Pillow-9.4.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e9d7747847c53a16a729b6ee5e737cf170f7a16611c143d95aa60a109a59c336"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b52ff4f4e002f828ea6483faf4c4e8deea8d743cf801b74910243c58acc6eda3"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:575d8912dca808edd9acd6f7795199332696d3469665ef26163cd090fa1f8bfa"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c3c4ed2ff6760e98d262e0cc9c9a7f7b8a9f61aa4d47c58835cdaf7b0b8811bb"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e621b0246192d3b9cb1dc62c78cfa4c6f6d2ddc0ec207d43c0dedecb914f152a"}, + {file = "Pillow-9.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8f127e7b028900421cad64f51f75c051b628db17fb00e099eb148761eed598c9"}, + {file = "Pillow-9.4.0.tar.gz", hash = "sha256:a1c2d7780448eb93fbcc3789bf3916aa5720d942e37945f4056680317f1cd23e"}, ] platformdirs = [ - {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"}, - {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"}, + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, @@ -3095,8 +3178,8 @@ poethepoet = [ {file = "poethepoet-0.13.1.tar.gz", hash = "sha256:4f6962f17f5d5a453fd7fa66e3e7897e9191d4289148433efe441c81f2451a46"}, ] pre-commit = [ - {file = "pre_commit-2.20.0-py2.py3-none-any.whl", hash = "sha256:51a5ba7c480ae8072ecdb6933df22d2f812dc897d5fe848778116129a681aac7"}, - {file = "pre_commit-2.20.0.tar.gz", hash = "sha256:a978dac7bc9ec0bcee55c18a277d553b0f419d259dadb4b9418ff2d00eb43959"}, + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, ] prompt-toolkit = [ {file = "prompt_toolkit-3.0.36-py3-none-any.whl", hash = "sha256:aa64ad242a462c5ff0363a7b9cfe696c20d55d9fc60c11fd8e632d064804d305"}, @@ -3194,12 +3277,12 @@ pyflakes = [ {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] Pygments = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pylint = [ - {file = "pylint-2.15.8-py3-none-any.whl", hash = "sha256:ea82cd6a1e11062dc86d555d07c021b0fb65afe39becbe6fe692efd6c4a67443"}, - {file = "pylint-2.15.8.tar.gz", hash = "sha256:ec4a87c33da054ab86a6c79afa6771dc8765cb5631620053e727fcf3ef8cbed7"}, + {file = "pylint-2.15.9-py3-none-any.whl", hash = "sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb"}, + {file = "pylint-2.15.9.tar.gz", hash = "sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, @@ -3218,8 +3301,8 @@ python-dateutil = [ {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] pytz = [ - {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, - {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, + {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, + {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, ] pyupgrade = [ {file = "pyupgrade-2.38.4-py2.py3-none-any.whl", hash = "sha256:944ff993c396ddc2b9012eb3de4cda138eb4c149b22c6c560d4c8bfd0e180982"}, @@ -3599,8 +3682,8 @@ setuptools = [ {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, ] setuptools-scm = [ - {file = "setuptools_scm-7.0.5-py3-none-any.whl", hash = "sha256:7930f720905e03ccd1e1d821db521bff7ec2ac9cf0ceb6552dd73d24a45d3b02"}, - {file = "setuptools_scm-7.0.5.tar.gz", hash = "sha256:031e13af771d6f892b941adb6ea04545bbf91ebc5ce68c78aaf3fff6e1fb4844"}, + {file = "setuptools_scm-7.1.0-py3-none-any.whl", hash = "sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e"}, + {file = "setuptools_scm-7.1.0.tar.gz", hash = "sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, @@ -3775,8 +3858,8 @@ tqdm = [ {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, ] traitlets = [ - {file = "traitlets-5.7.1-py3-none-any.whl", hash = "sha256:57ba2ba951632eeab9388fa45f342a5402060a5cc9f0bb942f760fafb6641581"}, - {file = "traitlets-5.7.1.tar.gz", hash = "sha256:fde8f62c05204ead43c2c1b9389cfc85befa7f54acb5da28529d671175bb4108"}, + {file = "traitlets-5.8.0-py3-none-any.whl", hash = "sha256:c864831efa0ba6576d09b44884b34e41defc18c0d7e720b4a2d6698c842cab3e"}, + {file = "traitlets-5.8.0.tar.gz", hash = "sha256:6cc57d6dc28c85d5365961726ffd19b538739347749e13ebe34e03323a0e8f84"}, ] transformers = [ {file = "transformers-4.25.1-py3-none-any.whl", hash = "sha256:60f1be15e17e4a54373c787c713ec149dabcc63464131ac45611618fe7c2016e"}, @@ -3802,9 +3885,13 @@ wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, ] +wheel = [ + {file = "wheel-0.38.4-py3-none-any.whl", hash = "sha256:b60533f3f5d530e971d6737ca6d58681ee434818fab630c83a734bb10c083ce8"}, + {file = "wheel-0.38.4.tar.gz", hash = "sha256:965f5259b566725405b05e7cf774052044b1ed30119b5d586b2703aafe8719ac"}, +] widgetsnbextension = [ - {file = "widgetsnbextension-4.0.4-py3-none-any.whl", hash = "sha256:fa0e840719ec95dd2ec85c3a48913f1a0c29d323eacbcdb0b29bfed0cc6da678"}, - {file = "widgetsnbextension-4.0.4.tar.gz", hash = "sha256:44c69f18237af0f610557d6c1c7ef76853f5856a0e604c0a517f2320566bb775"}, + {file = "widgetsnbextension-4.0.5-py3-none-any.whl", hash = "sha256:eaaaf434fb9b08bd197b2a14ffe45ddb5ac3897593d43c69287091e5f3147bf7"}, + {file = "widgetsnbextension-4.0.5.tar.gz", hash = "sha256:003f716d930d385be3fd9de42dd9bf008e30053f73bddde235d14fbeaeff19af"}, ] wrapt = [ {file = "wrapt-1.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1b376b3f4896e7930f1f772ac4b064ac12598d1c38d04907e696cc4d794b43d3"}, @@ -3873,85 +3960,104 @@ wrapt = [ {file = "wrapt-1.14.1.tar.gz", hash = "sha256:380a85cf89e0e69b7cfbe2ea9f765f004ff419f34194018a6827ac0e3edfed4d"}, ] xxhash = [ - {file = "xxhash-3.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5e9141f224a4dc984ea016744aa40a8a040054ef91933b2f9c81ba18e5b9d06e"}, - {file = "xxhash-3.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b8810fa72d361262168c2b215e3cee223eb19b74806c08713b943f57f0c91fd6"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb07bb4125c432f500a76a84ef51c0eafc09afbd1479308c6e1e2bbb73a33bb4"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4582e12d9aa25530449a8cad4e9e8e973e0b2f28e77ef6504fc6f216f8f07406"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d72b5d4eee98ecd25a2c647f7547a024585400ab13aa7ec837ebb8a25151bbef"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f50cfc3e80fe241d25e557f7ca5a145d3d557bdf738cd2d355bfe1324c28d21"}, - {file = "xxhash-3.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:90d03b46fc06f9ca3c0a4db685df3efffeb880ebcef2ffee707057e09fb8cba2"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:540966f42ccd0d3d09539a7236fbfdce6b15d7be49ee5d5adaef0aa0d020cd1e"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2d65d773c4d9c8d1a88af8e0e2169910cfc3d425006e2eb18cd13a6391543ed1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:3a9e2052ac6520e1f56630ff689b2b85ccd24799d362493435cf46defe163cc1"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:77c6d1e0993e8a314f4a6aec911c12fbb4caf4f58223381d3d41fa153ae6924f"}, - {file = "xxhash-3.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:aa4e22665290f2dfa608901c38b1a5f0d367280fd8adc5996356d7f4993f41f7"}, - {file = "xxhash-3.1.0-cp310-cp310-win32.whl", hash = "sha256:5c65cfb8fd3efd3d574a0cd4abbe59741f720334fa1d79e5366b34b0f4584b66"}, - {file = "xxhash-3.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:8f32770a4b39ffe6a5785f87b66435b2e4048ba4a36334108ac5d375447ce667"}, - {file = "xxhash-3.1.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:effd26669365a71e1337977ba789c95029c9cb0ac26e7455255922d3c9ff8fff"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2324c31095708ceb7ee8c15b31bd1bea7376ca477748f9a20aba2e94d01fab1"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1980f79c8ab4536c01048eb6398db0ac2049292150064bef665fa4c89918f86c"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:11d4af8d50b8b08835f653a96d58bb3658454144e5e4d28e369f4b3ad2bff4ea"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83a7b89e0b8b26cb86369ca0a7395299e0046930664ce96cbc07702504af9a26"}, - {file = "xxhash-3.1.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca24dd052adf23e1fd8fb5839d9046328e60222a866fa3c2761e90ddab1fc2b8"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f7f1b27db4798f7ebe599107c75b2a0648fc1f9d9226fa2771fc296c5593dc7e"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e24bab9aecdfba23c7feb3b173488ca6b3168a50095ff544dedc7caa0c05ac3c"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:cb3032588cd46bc6d16b2b35cd7ff5041fcc90423ae7c8f62263a029ff8f1e5d"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:b9c56f45fd18879212b480dc44dc1da44a22d453e3b4038c4b686f6307124220"}, - {file = "xxhash-3.1.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:00f2603b91fbb6fd1c3b83b263a405834e2df393fd2bac6a86e2e0ecc511076c"}, - {file = "xxhash-3.1.0-cp36-cp36m-win32.whl", hash = "sha256:33f865b6eb9126a60345cf3106925a5039ef582b840d2df96f7777a160d0ef17"}, - {file = "xxhash-3.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:95175636d28943eaf3da331aa57c7d02756017880151e11f8476a2ef49dd35de"}, - {file = "xxhash-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b43b55e0d2d81f4ec8caaf165a0a96325d7dd4317770b663701700f9aee855ed"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:694fd42161959382b70ea3442ea017675071dafe8b960849d5a599c4538737d8"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a3d7cd6412a1d0d5e43be188a67f95326e5a8c5d2ae1ad10adf8f896e630091"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1ab8f652ffaed3855d25f7666f63bf1ee81ead4d9d30cc9e592571a3959d2964"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2259f13de36b4e675840e50b16bcd5c6f7aec6f5e833af47b3a0186c19e92dd"}, - {file = "xxhash-3.1.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e86290486768b51478f320abca9fe61805578298b6e60719ec23bca85c60eec"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d08ebd5313f6811ef76cde0f97179374b047442b918089a09019fed53b9f9cef"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2e798287d6efcd51df12ac67f65ba7d78937be80c2c91bff2d17bf5404c63a24"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:91bf72f009114320d9cbb452d5995286f2c6f70b3f53041f72654c4c1a8b79bd"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:d30df80c8bc56aa55f545b1840d84ad6f773a3623b3e1462f17ebbd93c4c69ae"}, - {file = "xxhash-3.1.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9d4f9c8570e9adecae7d480090bcbf856b1d28f462c30c5cbe9f23b6418d6423"}, - {file = "xxhash-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:0bce4ce403129c6cee69860cf2597d04f29c714797c11e8ec3b2b7b3677c4586"}, - {file = "xxhash-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cf20bca461ae45273599be8635b60b517d2212b51d6d5d85fc8c441078eb02ab"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a9efbc1c780ef3b578486eb250f5e93b2934c918386d981d96b7a06bae90c4d4"}, - {file = "xxhash-3.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b16e7fc7991118c0b6cd9f5e141911110388f39df58b2996834619d2b956b4a8"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0f1e298f80e302fd1b859e0b27f997eae82e9e9592843a1df2ca79122365ac1"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4645b106732885fc488656657a5790dee4d8ffd123d2134647028f6575f2c05e"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:02f9a6176152a64955b3dff89dfb2d3c9a7c93e862cbc37c0858e8e25d1f3f3c"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd65c4850231324af7a613e5647c1c484f3dcbcde4a0e608d099050c684ae79"}, - {file = "xxhash-3.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80b6cd0f781c198b0b53f78124658d0f407fbba7450e79d537505608bf4125ba"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3ae86b9672ad4ef431b0e1d284530289382575e2569078071c7adcf5827b4995"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:3cb1c759c2a863dd963bdca8504c7ae39388dd1ef189fca91b94f18acb7bde26"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:e5d54d8e01c34acf3361bb58c5022a92abc9d5054b919a1d483679d59989bbff"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:362520c908887c85af832e16448bad58cb182d165a16241e3251bdd17bd427be"}, - {file = "xxhash-3.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6502a60ced52547e9c8e3eba3e5818ac0adca3e9abb5e32d2ee73a060f3dc362"}, - {file = "xxhash-3.1.0-cp38-cp38-win32.whl", hash = "sha256:4143a1ad5f436550fcc091c80e7af23ec31cca1991750391067b24b051dcd0d7"}, - {file = "xxhash-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:04812deabfdba3fa9cae57abb908a3f980bccbe9a4178f3e56afca2f1f625874"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:53e587e6625bc9a7cfa5e067837590a626ff4150000ae31be2af73a67d08ea8c"}, - {file = "xxhash-3.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:778f3c6007d280b6fff12f95c81d9c4ad6907632a0bfecf23aca18afb54319c0"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc93e4bd34fd14459672345ca4a070b0f28d906bea4b178373b4271498e38ec9"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65a9aac88cc960b3a21a52922d5846f1b15af7a5b937a26c7edee1d3fe80800c"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b74a787be31b8493797d3e7dfac2b240ed443bcd1b42dfbb406629538f103667"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21e030031f13455c9bfc10ed58301fbee8fad0e179cc6a1b15e899d71af2958f"}, - {file = "xxhash-3.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c627840400b90a90d989ebef431b146e89e44377de42cd2e75996bbed394e3c5"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:62cc09f3024ef1a0153e32ac6593025f20fae13b1bc5d08b639891ec110cacec"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:de9758872881d50d0946dfc6067b4782986de6af5ec74c266d47d85c699aa0de"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:216b90f30351fe434903bb039ae88df4c5ae43eb4320a012f6c73bec1d630213"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:84e9d14baccdf31e3c59ed34b9d00df52ad4db376dbbbaad936ea02b9be4a534"}, - {file = "xxhash-3.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8f4089c92b7aac2ead222345b0368e7d69e7a61e7a56762ae2f5e8d67fb67349"}, - {file = "xxhash-3.1.0-cp39-cp39-win32.whl", hash = "sha256:dfa73020bc696a46dab2dddd28c76d1abcd0643dc8a2dd06a037392bda5cc5ec"}, - {file = "xxhash-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0602b45447934fd5b81b387e76512a7c7c946b571b3f9a7d7b2cd9d3a09f9041"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a06311c247f2f45619e754249ca6f868c349fbfb63979ce291c83151840952a2"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f109a872aba254ffebe3c749a6b7148463e5d3168ac5afb515f1a929e73feb8f"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4521fed12d111bb2691ca0dc01fa8b36f6c626f53d9ee54befcea957e1b4dbaa"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c60ff7946e46beaa873509c1ca41937b40fc3048620cbd8441bfe03aa053f33"}, - {file = "xxhash-3.1.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:379c37f2d46a441bdb71af79443990e21943ef644ffeed5662157a9a682d55be"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:daa1a37685759003680bd2775053bbf772c4f71ad3c729810ea4901535635d5e"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c58b2bc7240966f54db9ef7dcfcc988362b0a315c12ed13a778917457c8dfe9d"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efafa866662f6ab50f1ffb808424ca9373d2f3b4a73e6ea66432dce1779f501c"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea074722fa953a1a3bece979620e2f0b43f2dfca841de84aca32a477c2fdb658"}, - {file = "xxhash-3.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:369af334d58f6d2f716bd1557d3580c4c1235077090769abf1d54daec2b301a7"}, - {file = "xxhash-3.1.0.tar.gz", hash = "sha256:ac21b1e21dc6fdfee9a57b53f4777539d53a84f2e1546a3f802f159f9966bdc1"}, + {file = "xxhash-3.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:af44b9e59c4b2926a4e3c7f9d29949ff42fcea28637ff6b8182e654461932be8"}, + {file = "xxhash-3.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1bdd57973e2b802ef32553d7bebf9402dac1557874dbe5c908b499ea917662cd"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b7c9aa77bbce61a5e681bd39cb6a804338474dcc90abe3c543592aa5d6c9a9b"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:11bf87dc7bb8c3b0b5e24b7b941a9a19d8c1f88120b6a03a17264086bc8bb023"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2783d41487ce6d379fdfaa7332fca5187bf7010b9bddcf20cafba923bc1dc665"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:561076ca0dcef2fbc20b2bc2765bff099e002e96041ae9dbe910a863ca6ee3ea"}, + {file = "xxhash-3.2.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3a26eeb4625a6e61cedc8c1b39b89327c9c7e1a8c2c4d786fe3f178eb839ede6"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d93a44d0104d1b9b10de4e7aadf747f6efc1d7ec5ed0aa3f233a720725dd31bd"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:89585adc73395a10306d2e2036e50d6c4ac0cf8dd47edf914c25488871b64f6d"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:a892b4b139126a86bfdcb97cd912a2f8c4e8623869c3ef7b50871451dd7afeb0"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e998efb190653f70e0f30d92b39fc645145369a4823bee46af8ddfc244aa969d"}, + {file = "xxhash-3.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e8ed3bd2b8bb3277710843ca63e4f5c3ee6f8f80b083be5b19a7a9905420d11e"}, + {file = "xxhash-3.2.0-cp310-cp310-win32.whl", hash = "sha256:20181cbaed033c72cb881b2a1d13c629cd1228f113046133469c9a48cfcbcd36"}, + {file = "xxhash-3.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:a0f7a16138279d707db778a63264d1d6016ac13ffd3f1e99f54b2855d6c0d8e1"}, + {file = "xxhash-3.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5daff3fb5bfef30bc5a2cb143810d376d43461445aa17aece7210de52adbe151"}, + {file = "xxhash-3.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:75bb5be3c5de702a547715f320ecf5c8014aeca750ed5147ca75389bd22e7343"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01f36b671ff55cb1d5c2f6058b799b697fd0ae4b4582bba6ed0999678068172a"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d4d4519123aac73c93159eb8f61db9682393862dd669e7eae034ecd0a35eadac"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:994e4741d5ed70fc2a335a91ef79343c6b1089d7dfe6e955dd06f8ffe82bede6"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:919bc1b010aa6ff0eb918838ff73a435aed9e9a19c3202b91acecd296bf75607"}, + {file = "xxhash-3.2.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17b65454c5accbb079c45eca546c27c4782f5175aa320758fafac896b1549d27"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b0c094d5e65a46dbf3fe0928ff20873a747e6abfd2ed4b675beeb2750624bc2e"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f94163ebe2d5546e6a5977e96d83621f4689c1054053428cf8d4c28b10f92f69"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:cead7c0307977a00b3f784cff676e72c147adbcada19a2e6fc2ddf54f37cf387"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:a0e1bd0260c1da35c1883321ce2707ceea07127816ab625e1226ec95177b561a"}, + {file = "xxhash-3.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc8878935671490efe9275fb4190a6062b73277bd273237179b9b5a2aa436153"}, + {file = "xxhash-3.2.0-cp311-cp311-win32.whl", hash = "sha256:a433f6162b18d52f7068175d00bd5b1563b7405f926a48d888a97b90a160c40d"}, + {file = "xxhash-3.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:a32d546a1752e4ee7805d6db57944f7224afa7428d22867006b6486e4195c1f3"}, + {file = "xxhash-3.2.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:82daaab720866bf690b20b49de5640b0c27e3b8eea2d08aa75bdca2b0f0cfb63"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3126df6520cbdbaddd87ce74794b2b6c45dd2cf6ac2b600a374b8cdb76a2548c"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e172c1ee40507ae3b8d220f4048aaca204f203e1e4197e8e652f5c814f61d1aa"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5384f1d9f30876f5d5b618464fb19ff7ce6c0fe4c690fbaafd1c52adc3aae807"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26cb52174a7e96a17acad27a3ca65b24713610ac479c99ac9640843822d3bebf"}, + {file = "xxhash-3.2.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fbcd613a5e76b1495fc24db9c37a6b7ee5f214fd85979187ec4e032abfc12ded"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:f988daf25f31726d5b9d0be6af636ca9000898f9ea43a57eac594daea25b0948"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:bbc30c98ab006ab9fc47e5ed439c00f706bc9d4441ff52693b8b6fea335163e0"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:2408d49260b0a4a7cc6ba445aebf38e073aeaf482f8e32767ca477e32ccbbf9e"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:3f4152fd0bf8b03b79f2f900fd6087a66866537e94b5a11fd0fd99ef7efe5c42"}, + {file = "xxhash-3.2.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:0eea848758e4823a01abdbcccb021a03c1ee4100411cbeeb7a5c36a202a0c13c"}, + {file = "xxhash-3.2.0-cp36-cp36m-win32.whl", hash = "sha256:77709139af5123c578ab06cf999429cdb9ab211047acd0c787e098dcb3f1cb4d"}, + {file = "xxhash-3.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:91687671fd9d484a4e201ad266d366b695a45a1f2b41be93d116ba60f1b8f3b3"}, + {file = "xxhash-3.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e4af8bc5c3fcc2192c266421c6aa2daab1a18e002cb8e66ef672030e46ae25cf"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8be562e2ce3e481d9209b6f254c3d7c5ff920eb256aba2380d2fb5ba75d4f87"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9eba0c7c12126b12f7fcbea5513f28c950d28f33d2a227f74b50b77789e478e8"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2198c4901a0223c48f6ec0a978b60bca4f4f7229a11ca4dc96ca325dd6a29115"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50ce82a71b22a3069c02e914bf842118a53065e2ec1c6fb54786e03608ab89cc"}, + {file = "xxhash-3.2.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b5019fb33711c30e54e4e57ae0ca70af9d35b589d385ac04acd6954452fa73bb"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0d54ac023eef7e3ac9f0b8841ae8a376b933043bc2ad428121346c6fa61c491c"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c55fa832fc3fe64e0d29da5dc9b50ba66ca93312107cec2709300ea3d3bab5c7"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:f4ce006215497993ae77c612c1883ca4f3973899573ce0c52fee91f0d39c4561"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1afb9b9d27fd675b436cb110c15979976d92d761ad6e66799b83756402f3a974"}, + {file = "xxhash-3.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:baa99cebf95c1885db21e119395f222a706a2bb75a545f0672880a442137725e"}, + {file = "xxhash-3.2.0-cp37-cp37m-win32.whl", hash = "sha256:75aa692936942ccb2e8fd6a386c81c61630ac1b6d6e921698122db8a930579c3"}, + {file = "xxhash-3.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:0a2cdfb5cae9fafb9f7b65fd52ecd60cf7d72c13bb2591ea59aaefa03d5a8827"}, + {file = "xxhash-3.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3a68d1e8a390b660d94b9360ae5baa8c21a101bd9c4790a8b30781bada9f1fc6"}, + {file = "xxhash-3.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ce7c3ce28f94302df95eaea7c9c1e2c974b6d15d78a0c82142a97939d7b6c082"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0dcb419bf7b0bc77d366e5005c25682249c5521a63fd36c51f584bd91bb13bd5"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae521ed9287f86aac979eeac43af762f03d9d9797b2272185fb9ddd810391216"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b0d16775094423088ffa357d09fbbb9ab48d2fb721d42c0856b801c86f616eec"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe454aeab348c42f56d6f7434ff758a3ef90787ac81b9ad5a363cd61b90a1b0b"}, + {file = "xxhash-3.2.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:052fd0efdd5525c2dbc61bebb423d92aa619c4905bba605afbf1e985a562a231"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:02badf3754e2133de254a4688798c4d80f0060635087abcb461415cb3eb82115"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:66b8a90b28c13c2aae7a71b32638ceb14cefc2a1c8cf23d8d50dfb64dfac7aaf"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:649cdf19df175925ad87289ead6f760cd840730ee85abc5eb43be326a0a24d97"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:4b948a03f89f5c72d69d40975af8af241111f0643228796558dc1cae8f5560b0"}, + {file = "xxhash-3.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49f51fab7b762da7c2cee0a3d575184d3b9be5e2f64f26cae2dd286258ac9b3c"}, + {file = "xxhash-3.2.0-cp38-cp38-win32.whl", hash = "sha256:1a42994f0d42b55514785356722d9031f064fd34e495b3a589e96db68ee0179d"}, + {file = "xxhash-3.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:0a6d58ba5865475e53d6c2c4fa6a62e2721e7875e146e2681e5337a6948f12e7"}, + {file = "xxhash-3.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aabdbc082030f8df613e2d2ea1f974e7ad36a539bdfc40d36f34e55c7e4b8e94"}, + {file = "xxhash-3.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:498843b66b9ca416e9d03037e5875c8d0c0ab9037527e22df3b39aa5163214cd"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a910b1193cd90af17228f5d6069816646df0148f14f53eefa6b2b11a1dedfcd0"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb6d8ce31dc25faf4da92991320e211fa7f42de010ef51937b1dc565a4926501"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:883dc3d3942620f4c7dbc3fd6162f50a67f050b714e47da77444e3bcea7d91cc"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59dc8bfacf89b8f5be54d55bc3b4bd6d74d0c5320c8a63d2538ac7df5b96f1d5"}, + {file = "xxhash-3.2.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61e6aa1d30c2af692aa88c4dd48709426e8b37bff6a574ee2de677579c34a3d6"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:314ec0bd21f0ee8d30f2bd82ed3759314bd317ddbbd8555668f3d20ab7a8899a"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:dad638cde3a5357ad3163b80b3127df61fb5b5e34e9e05a87697144400ba03c7"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:eaa3ea15025b56076d806b248948612289b093e8dcda8d013776b3848dffff15"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:7deae3a312feb5c17c97cbf18129f83cbd3f1f9ec25b0f50e2bd9697befb22e7"}, + {file = "xxhash-3.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:add774341c09853b1612c64a526032d95ab1683053325403e1afbe3ad2f374c5"}, + {file = "xxhash-3.2.0-cp39-cp39-win32.whl", hash = "sha256:9b94749130ef3119375c599bfce82142c2500ef9ed3280089157ee37662a7137"}, + {file = "xxhash-3.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:e57d94a1552af67f67b27db5dba0b03783ea69d5ca2af2f40e098f0ba3ce3f5f"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:92fd765591c83e5c5f409b33eac1d3266c03d3d11c71a7dbade36d5cdee4fbc0"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8970f6a411a9839a02b23b7e90bbbba4a6de52ace009274998566dc43f36ca18"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c5f3e33fe6cbab481727f9aeb136a213aed7e33cd1ca27bd75e916ffacc18411"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:368265392cb696dd53907e2328b5a8c1bee81cf2142d0cc743caf1c1047abb36"}, + {file = "xxhash-3.2.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:3b1f3c6d67fa9f49c4ff6b25ce0e7143bab88a5bc0f4116dd290c92337d0ecc7"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c5e8db6e1ee7267b7c412ad0afd5863bf7a95286b8333a5958c8097c69f94cf5"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:761df3c7e2c5270088b691c5a8121004f84318177da1ca1db64222ec83c44871"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2d15a707e7f689531eb4134eccb0f8bf3844bb8255ad50823aa39708d9e6755"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e6b2ba4ff53dd5f57d728095e3def7375eb19c90621ce3b41b256de84ec61cfd"}, + {file = "xxhash-3.2.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:61b0bcf946fdfd8ab5f09179dc2b5c74d1ef47cedfc6ed0ec01fdf0ee8682dd3"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f7b79f0f302396d8e0d444826ceb3d07b61977793886ebae04e82796c02e42dc"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e0773cd5c438ffcd5dbff91cdd503574f88a4b960e70cedeb67736583a17a918"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec1f57127879b419a2c8d2db9d9978eb26c61ae17e5972197830430ae78d25b"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d4b15c00e807b1d3d0b612338c814739dec310b80fb069bd732b98ddc709ad7"}, + {file = "xxhash-3.2.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9d3f686e3d1c8900c5459eee02b60c7399e20ec5c6402364068a343c83a61d90"}, + {file = "xxhash-3.2.0.tar.gz", hash = "sha256:1afd47af8955c5db730f630ad53ae798cf7fae0acb64cebb3cf94d35c47dd088"}, ] yarl = [ {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, diff --git a/requirements-dev.txt b/requirements-dev.txt index 16e434ae..e146fbef 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -5,7 +5,7 @@ appnope==0.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" and astroid==2.12.13 ; python_full_version >= "3.8.1" and python_version < "3.12" asttokens==2.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" async-timeout==4.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -attrs==22.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +attrs==22.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" babel==2.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" backcall==0.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" bandit==1.7.4 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -20,46 +20,46 @@ colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" comm==0.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" commonmark==0.9.1 ; python_full_version >= "3.8.1" and python_version < "3.12" contourpy==1.0.6 ; python_full_version >= "3.8.1" and python_version < "3.12" -coverage[toml]==6.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +coverage[toml]==7.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" cycler==0.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" darglint==1.8.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -datasets[datasets]==2.7.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +datasets[datasets]==2.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" debugpy==1.6.4 ; python_full_version >= "3.8.1" and python_version < "3.12" decorator==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -dill==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" +dill==0.3.6 ; python_version < "3.12" and python_full_version >= "3.8.1" distlib==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" docutils==0.17.1 ; python_full_version >= "3.8.1" and python_version < "3.12" dparse==0.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" entrypoints==0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -exceptiongroup==1.0.4 ; python_full_version >= "3.8.1" and python_version < "3.11" +exceptiongroup==1.1.0 ; python_full_version >= "3.8.1" and python_version < "3.11" executing==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -filelock==3.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +filelock==3.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" flake8==6.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" fonttools==4.38.0 ; python_full_version >= "3.8.1" and python_version < "3.12" frozenlist==1.3.3 ; python_full_version >= "3.8.1" and python_version < "3.12" fsspec[http]==2022.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" gitdb==4.0.10 ; python_full_version >= "3.8.1" and python_version < "3.12" -gitpython==3.1.29 ; python_full_version >= "3.8.1" and python_version < "3.12" +gitpython==3.1.30 ; python_full_version >= "3.8.1" and python_version < "3.12" huggingface-hub==0.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -identify==2.5.10 ; python_full_version >= "3.8.1" and python_version < "3.12" +identify==2.5.11 ; python_full_version >= "3.8.1" and python_version < "3.12" idna==3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" imagesize==1.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -importlib-metadata==5.1.0 ; python_full_version >= "3.8.1" and python_version < "3.10" +importlib-metadata==6.0.0 ; python_full_version >= "3.8.1" and python_version < "3.10" iniconfig==1.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipykernel==6.19.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipykernel[notebook]==6.19.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipykernel==6.19.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipykernel[notebook]==6.19.4 ; python_full_version >= "3.8.1" and python_version < "3.12" ipython==8.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipywidgets[notebook]==8.0.3 ; python_full_version >= "3.8.1" and python_version < "3.12" -isort==5.11.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -isort[colors]==5.11.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipywidgets[notebook]==8.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +isort==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +isort[colors]==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" jedi==0.18.2 ; python_full_version >= "3.8.1" and python_version < "3.12" jinja2==3.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" joblib==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" joblib[sklearn]==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" json-tricks==3.16.1 ; python_full_version >= "3.8.1" and python_version < "3.12" jupyter-client==7.4.8 ; python_full_version >= "3.8.1" and python_version < "3.12" -jupyter-core==5.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -jupyterlab-widgets==3.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +jupyter-core==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +jupyterlab-widgets==3.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" lazy-object-proxy==1.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" markdown==3.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -67,12 +67,12 @@ markupsafe==2.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib-inline==0.1.6 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib==3.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" mccabe==0.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -multidict==6.0.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +multidict==6.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" multiprocess==0.70.14 ; python_full_version >= "3.8.1" and python_version < "3.12" mypy-extensions==0.4.3 ; python_full_version >= "3.8.1" and python_version < "3.12" nest-asyncio==1.5.6 ; python_full_version >= "3.8.1" and python_version < "3.12" nodeenv==1.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -numpy==1.23.5 ; python_version < "3.12" and python_full_version >= "3.8.1" +numpy==1.24.1 ; python_version < "3.12" and python_full_version >= "3.8.1" nvidia-cublas-cu11==11.10.3.66 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-nvrtc-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-runtime-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" @@ -85,11 +85,11 @@ pathspec==0.10.3 ; python_full_version >= "3.8.1" and python_version < "3.12" pbr==5.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pexpect==4.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform != "win32" pickleshare==0.7.5 ; python_full_version >= "3.8.1" and python_version < "3.12" -pillow==9.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -platformdirs==2.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pillow==9.4.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +platformdirs==2.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pluggy==1.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" poethepoet==0.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pre-commit==2.20.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pre-commit==2.21.0 ; python_full_version >= "3.8.1" and python_version < "3.12" prompt-toolkit==3.0.36 ; python_full_version >= "3.8.1" and python_version < "3.12" protobuf==3.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" psutil==5.9.4 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -100,13 +100,13 @@ pyarrow==10.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pycodestyle==2.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pycparser==2.21 ; python_full_version >= "3.8.1" and python_version < "3.12" and implementation_name == "pypy" pyflakes==3.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pygments==2.13.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -pylint==2.15.8 ; python_full_version >= "3.8.1" and python_version < "3.12" +pygments==2.14.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pylint==2.15.9 ; python_full_version >= "3.8.1" and python_version < "3.12" pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest-cov==4.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest==7.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" python-dateutil==2.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -pytz==2022.6 ; python_full_version >= "3.8.1" and python_version < "3.12" +pytz==2022.7 ; python_full_version >= "3.8.1" and python_version < "3.12" pyupgrade==2.38.4 ; python_full_version >= "3.8.1" and python_version < "3.12" pywin32==305 ; sys_platform == "win32" and platform_python_implementation != "PyPy" and python_full_version >= "3.8.1" and python_version < "3.12" pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -122,7 +122,7 @@ safety==2.3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" scikit-learn[sklearn]==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" scipy==1.9.3 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.97 ; python_full_version >= "3.8.1" and python_version < "3.12" -setuptools-scm==7.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" +setuptools-scm==7.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools==65.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" six==1.16.0 ; python_full_version >= "3.8.1" and python_version < "3.12" smmap==5.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -152,7 +152,7 @@ torch==1.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" torchtyping==0.1.4 ; python_full_version >= "3.8.1" and python_version < "3.12" tornado==6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" tqdm==4.64.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -traitlets==5.7.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +traitlets==5.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" transformers[sentencepiece,tokenizers,torch]==4.25.1 ; python_full_version >= "3.8.1" and python_version < "3.12" typeguard==2.13.3 ; python_full_version >= "3.8.1" and python_version < "3.12" typing-extensions==4.4.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -160,8 +160,8 @@ urllib3==1.26.13 ; python_full_version >= "3.8.1" and python_version < "3.12" virtualenv==20.17.1 ; python_full_version >= "3.8.1" and python_version < "3.12" wcwidth==0.2.5 ; python_full_version >= "3.8.1" and python_version < "3.12" wheel==0.38.4 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" -widgetsnbextension==4.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +widgetsnbextension==4.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" wrapt==1.14.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -xxhash==3.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +xxhash==3.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" yarl==1.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" zipp==3.11.0 ; python_full_version >= "3.8.1" and python_version < "3.10" diff --git a/requirements.txt b/requirements.txt index 9238eb88..73e1b739 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,24 +5,24 @@ colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" commonmark==0.9.1 ; python_full_version >= "3.8.1" and python_version < "3.12" contourpy==1.0.6 ; python_full_version >= "3.8.1" and python_version < "3.12" cycler==0.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -filelock==3.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +filelock==3.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" fonttools==4.38.0 ; python_full_version >= "3.8.1" and python_version < "3.12" huggingface-hub==0.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" idna==3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" json-tricks==3.16.1 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib==3.6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -numpy==1.23.5 ; python_full_version >= "3.8.1" and python_version < "3.12" +numpy==1.24.1 ; python_full_version >= "3.8.1" and python_version < "3.12" nvidia-cublas-cu11==11.10.3.66 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-nvrtc-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-runtime-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cudnn-cu11==8.5.0.96 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" packaging==22.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pastel==0.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -pillow==9.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pillow==9.4.0 ; python_full_version >= "3.8.1" and python_version < "3.12" poethepoet==0.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" protobuf==3.20.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -pygments==2.13.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +pygments==2.14.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" python-dateutil==2.8.2 ; python_full_version >= "3.8.1" and python_version < "3.12" pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -31,7 +31,7 @@ requests==2.28.1 ; python_full_version >= "3.8.1" and python_version < "3.12" rich==10.16.2 ; python_full_version >= "3.8.1" and python_version < "3.12" scipy==1.9.3 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.97 ; python_full_version >= "3.8.1" and python_version < "3.12" -setuptools-scm==7.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" +setuptools-scm==7.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools==65.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" six==1.16.0 ; python_full_version >= "3.8.1" and python_version < "3.12" tokenizers==0.13.2 ; python_full_version >= "3.8.1" and python_version < "3.12" From 06f89a82ed1614d2717bff4f7b39274131ef4b19 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 2 Jan 2023 15:52:35 +0100 Subject: [PATCH 13/30] include 3.11 as build target --- .github/workflows/build.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index d2ef27a8..46c77a89 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,7 +12,7 @@ jobs: if: github.actor != 'dependabot[bot]' && github.actor != 'dependabot-preview[bot]' strategy: matrix: - python-version: ["3.8", "3.9", "3.10"] + python-version: ["3.8", "3.9", "3.10", "3.11"] steps: - uses: actions/checkout@v3 From 7bcbe92f068cda85da657dba5d83a9b443ce4a02 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 2 Jan 2023 16:03:45 +0100 Subject: [PATCH 14/30] fix different attribution_step argument formatting --- inseq/attr/feat/attention_attribution.py | 49 ++++++++---------------- 1 file changed, 16 insertions(+), 33 deletions(-) diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 25c2069c..2e8b11f5 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -13,13 +13,13 @@ # limitations under the License. """ Attention-based feature attribution methods. """ -from typing import Any, Callable, Dict, Union +from typing import Any, Dict, Union import logging -from ...data import EncoderDecoderBatch, FeatureAttributionStepOutput -from ...utils import Registry, pretty_tensor -from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor, TargetIdsTensor +from ...data import FeatureAttributionStepOutput +from ...utils import Registry +from ...utils.typing import ModelIdentifier from ..attribution_decorators import set_hook, unset_hook from .feature_attribution import FeatureAttribution from .ops import AggregatedAttention, LastLayerAttention @@ -41,43 +41,26 @@ def unhook(self, **kwargs): def attribute_step( self, - batch: EncoderDecoderBatch, - target_ids: TargetIdsTensor, - attributed_fn: Callable[..., SingleScorePerStepTensor], - attribute_target: bool = False, + attribute_fn_main_args: Dict[str, Any], attribution_args: Dict[str, Any] = {}, - attributed_fn_args: Dict[str, Any] = {}, ) -> FeatureAttributionStepOutput: r""" - Performs a single attribution step for the specified target_ids, - given sources and targets in the batch. - - Abstract method, must be implemented by subclasses. + Performs a single attribution step for the specified attribution arguments. Args: - batch (:class:`~inseq.data.EncoderDecoderBatch`): The batch of sequences on which attribution is performed. - target_ids (:obj:`torch.Tensor`): Target token ids of size `(batch_size)` corresponding to tokens - for which the attribution step must be performed. - attributed_fn (:obj:`Callable[..., SingleScorePerStepTensor]`): The function of model outputs - representing what should be attributed (e.g. output probits of model best prediction after softmax). - The parameter must be a function that taking multiple keyword arguments and returns a :obj:`tensor` - of size (batch_size,). If not provided, the default attributed function for the model will be used - (change attribution_model.default_attributed_fn_id). - attribute_target (:obj:`bool`, optional): Whether to attribute the target prefix or not. Defaults to False. + attribute_fn_main_args (:obj:`dict`): Main arguments used for the attribution method. These are built from + model inputs at the current step of the feature attribution process. attribution_args (:obj:`dict`, `optional`): Additional arguments to pass to the attribution method. - Defaults to {}. - attributed_fn_args (:obj:`dict`, `optional`): Additional arguments to pass to the attributed function. - Defaults to {}. + These can be specified by the user while calling the top level `attribute` methods. Defaults to {}. + Returns: - :class:`~inseq.data.FeatureAttributionStepOutput`: A dataclass containing attribution tensors for source - and target attributions of size `(batch_size, source_length)` and `(batch_size, prefix length)`. - (target optional if attribute_target=True), plus batch information and any step score present. + :class:`~inseq.data.FeatureAttributionStepOutput`: A dataclass containing a tensor of source + attributions of size `(batch_size, source_length)`, possibly a tensor of target attributions of size + `(batch_size, prefix length) if attribute_target=True and possibly a tensor of deltas of size + `(batch_size)` if the attribution step supports deltas and they are requested. At this point the batch + information is empty, and will later be filled by the enrich_step_output function. """ - logger.debug(f"batch: {batch},\ntarget_ids: {pretty_tensor(target_ids, lpad=4)}") - attribute_fn_args = self.format_attribute_args( - batch, target_ids, attributed_fn, attribute_target, attributed_fn_args, **attribution_args - ) - attr = self.method.attribute(**attribute_fn_args, **attribution_args) + attr = self.method.attribute(**attribute_fn_main_args, **attribution_args) deltas = None if ( attribution_args.get("return_convergence_delta", False) From b2fc73cfd51a9099eaf0be2cb7bd3deb1d6b66ab Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 2 Jan 2023 20:01:07 +0100 Subject: [PATCH 15/30] added basic decoder-only support --- inseq/attr/feat/ops/basic_attention.py | 103 +++++++++++++++++-------- 1 file changed, 72 insertions(+), 31 deletions(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 959fe13d..8e9f12e2 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -104,24 +104,41 @@ def _get_batch_size(self, attention: torch.Tensor) -> int: return attention.size(0) def _extract_forward_pass_args( - self, inputs: MultiStepEmbeddingsTensor, forward_args: Optional[Tuple], is_target_attr: bool + self, + inputs: MultiStepEmbeddingsTensor, + forward_args: Optional[Tuple], + is_target_attr: bool, + is_encoder_decoder: bool, ) -> dict: """extracts the arguments needed for a standard forward pass from the `inputs` and `additional_forward_args` parameters used by Captum""" - use_embeddings = forward_args[6] if is_target_attr else forward_args[7] + if is_encoder_decoder: + + use_embeddings = forward_args[6] if is_target_attr else forward_args[7] + + forward_pass_args = { + "attention_mask": forward_args[4] if is_target_attr else forward_args[5], + "decoder_attention_mask": forward_args[5] if is_target_attr else forward_args[6], + } - forward_pass_args = { - "attention_mask": forward_args[4] if is_target_attr else forward_args[5], - "decoder_attention_mask": forward_args[5] if is_target_attr else forward_args[6], - } + if use_embeddings: + forward_pass_args["inputs_embeds"] = inputs[0] + forward_pass_args["decoder_inputs_embeds"] = inputs[1] if is_target_attr else forward_args[0] + else: + forward_pass_args["input_ids"] = forward_args[0] if is_target_attr else forward_args[1] + forward_pass_args["decoder_input_ids"] = forward_args[1] if is_target_attr else forward_args[2] - if use_embeddings: - forward_pass_args["inputs_embeds"] = inputs[0] - forward_pass_args["decoder_inputs_embeds"] = inputs[1] if is_target_attr else forward_args[0] else: - forward_pass_args["input_ids"] = forward_args[0] if is_target_attr else forward_args[1] - forward_pass_args["decoder_input_ids"] = forward_args[1] if is_target_attr else forward_args[2] + + use_embeddings = forward_args[4] + + forward_pass_args = {"attention_mask": forward_args[3]} + + if use_embeddings: + forward_pass_args["inputs_embeds"] = inputs[0] + else: + forward_pass_args["input_ids"] = forward_args[0] return forward_pass_args @@ -146,22 +163,33 @@ def attribute( is_target_attribution = True if len(inputs) > 1 else False - forward_pass_args = self._extract_forward_pass_args(inputs, additional_forward_args, is_target_attribution) + is_encoder_decoder = self.forward_func.is_encoder_decoder + + forward_pass_args = self._extract_forward_pass_args( + inputs, additional_forward_args, is_target_attribution, is_encoder_decoder + ) outputs = self.forward_func.model(**forward_pass_args) - cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) - cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) - cross_aggregation = cross_aggregation.select(1, -1) + if is_encoder_decoder: + cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) + cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) + cross_aggregation = cross_aggregation.select(1, -1) - attributions = (cross_aggregation,) + attributions = (cross_aggregation,) - if is_target_attribution: - decoder_aggregation = torch.stack(outputs.decoder_attentions).mean(0) - decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) - decoder_aggregation = decoder_aggregation.select(1, -1) + if is_target_attribution: + decoder_aggregation = torch.stack(outputs.decoder_attentions).mean(0) + decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) + decoder_aggregation = decoder_aggregation.select(1, -1) - attributions = attributions + (decoder_aggregation,) + attributions = attributions + (decoder_aggregation,) + else: + aggregation = torch.stack(outputs.attentions).mean(0) + aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) + aggregation = aggregation.select(1, -1) + + attributions = (aggregation,) return _format_output(is_inputs_tuple, attributions) @@ -186,21 +214,34 @@ def attribute( is_target_attribution = True if len(inputs) > 1 else False - forward_pass_args = self._extract_forward_pass_args(inputs, additional_forward_args, is_target_attribution) + is_encoder_decoder = self.forward_func.is_encoder_decoder + + forward_pass_args = self._extract_forward_pass_args( + inputs, additional_forward_args, is_target_attribution, is_encoder_decoder + ) outputs = self.forward_func.model(**forward_pass_args) - last_layer_cross = outputs.cross_attentions[-1] - last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) - last_layer_cross = torch.squeeze(last_layer_cross, 1).select(1, -1) + if is_encoder_decoder: + + last_layer_cross = outputs.cross_attentions[-1] + last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) + last_layer_cross = torch.squeeze(last_layer_cross, 1).select(1, -1) - attributions = (last_layer_cross,) + attributions = (last_layer_cross,) + + if is_target_attribution: + last_layer_decoder = outputs.decoder_attentions[-1] + last_layer_decoder = self._merge_attention_heads(last_layer_decoder, merge_head_option, use_head) + last_layer_decoder = torch.squeeze(last_layer_decoder, 1).select(1, -1) + + attributions = attributions + (last_layer_decoder,) + else: - if is_target_attribution: - last_layer_decoder = outputs.decoder_attentions[-1] - last_layer_decoder = self._merge_attention_heads(last_layer_decoder, merge_head_option, use_head) - last_layer_decoder = torch.squeeze(last_layer_decoder, 1).select(1, -1) + aggregation = outputs.attentions[-1] + aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) + aggregation = torch.squeeze(aggregation, 1).select(1, -1) - attributions = attributions + (last_layer_decoder,) + attributions = (aggregation,) return _format_output(is_inputs_tuple, attributions) From b044b4cb2a2c97e793e879e77ee002bb1db14839 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 3 Jan 2023 21:04:07 +0100 Subject: [PATCH 16/30] fixed output error for decoder only models --- inseq/attr/feat/attention_attribution.py | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 2e8b11f5..40242ef0 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -21,6 +21,7 @@ from ...utils import Registry from ...utils.typing import ModelIdentifier from ..attribution_decorators import set_hook, unset_hook +from .attribution_utils import get_source_target_attributions from .feature_attribution import FeatureAttribution from .ops import AggregatedAttention, LastLayerAttention @@ -68,11 +69,12 @@ def attribute_step( and self.method.has_convergence_delta() ): attr, deltas = attr + source_attributions, target_attributions = get_source_target_attributions( + attr, self.attribution_model.is_encoder_decoder + ) return FeatureAttributionStepOutput( - source_attributions=attr if not isinstance(attr, tuple) else attr[0], - target_attributions=None - if not isinstance(attr, tuple) or (isinstance(attr, tuple) and len(attr) == 1) - else attr[1], + source_attributions=source_attributions, + target_attributions=target_attributions, step_scores={"deltas": deltas} if deltas is not None else {}, ) From 8c344b757379b425fc594c21534b68ca1b8165c4 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Tue, 3 Jan 2023 22:15:46 +0100 Subject: [PATCH 17/30] removed unnecessary convergence delta references in attention attribution call --- inseq/attr/feat/attention_attribution.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 40242ef0..27f8efb5 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -62,20 +62,14 @@ def attribute_step( information is empty, and will later be filled by the enrich_step_output function. """ attr = self.method.attribute(**attribute_fn_main_args, **attribution_args) - deltas = None - if ( - attribution_args.get("return_convergence_delta", False) - and hasattr(self.method, "has_convergence_delta") - and self.method.has_convergence_delta() - ): - attr, deltas = attr + source_attributions, target_attributions = get_source_target_attributions( attr, self.attribution_model.is_encoder_decoder ) return FeatureAttributionStepOutput( source_attributions=source_attributions, target_attributions=target_attributions, - step_scores={"deltas": deltas} if deltas is not None else {}, + step_scores={}, ) @classmethod From f51cf25b2a11731adef919a2c555863cb97247da Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Wed, 4 Jan 2023 17:26:02 +0100 Subject: [PATCH 18/30] allow negative indices when selecting a specific attention head for attribution --- inseq/attr/feat/ops/basic_attention.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 8e9f12e2..b0553267 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -74,9 +74,9 @@ def _merge_attention_heads( raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") if head is not None: - if head > num_heads: - raise RuntimeError( - f"Attention head index for attribution too high. The model only has {num_heads} heads." + if head in range(-num_heads, num_heads): + raise IndexError( + f"Attention head index for attribution out of range. The model only has {num_heads} heads." ) if option != "single": From c6a9e70e9abbc21375dc03aa26ed0f02e6291ac5 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Wed, 4 Jan 2023 17:29:42 +0100 Subject: [PATCH 19/30] added missing negation to head checking --- inseq/attr/feat/ops/basic_attention.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index b0553267..880c6343 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -74,7 +74,7 @@ def _merge_attention_heads( raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") if head is not None: - if head in range(-num_heads, num_heads): + if head not in range(-num_heads, num_heads): raise IndexError( f"Attention head index for attribution out of range. The model only has {num_heads} heads." ) From 6c9cfaec982f1cb46327c2e5cf07209aed68ac12 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Wed, 4 Jan 2023 18:35:56 +0100 Subject: [PATCH 20/30] fixed last_layer_attention attribution --- inseq/attr/feat/ops/basic_attention.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 880c6343..eac7af8a 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -226,21 +226,21 @@ def attribute( last_layer_cross = outputs.cross_attentions[-1] last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) - last_layer_cross = torch.squeeze(last_layer_cross, 1).select(1, -1) + last_layer_cross = last_layer_cross.select(1, -1) attributions = (last_layer_cross,) if is_target_attribution: last_layer_decoder = outputs.decoder_attentions[-1] last_layer_decoder = self._merge_attention_heads(last_layer_decoder, merge_head_option, use_head) - last_layer_decoder = torch.squeeze(last_layer_decoder, 1).select(1, -1) + last_layer_decoder = last_layer_decoder.select(1, -1) attributions = attributions + (last_layer_decoder,) else: aggregation = outputs.attentions[-1] aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) - aggregation = torch.squeeze(aggregation, 1).select(1, -1) + aggregation = aggregation.select(1, -1) attributions = (aggregation,) From b78bcc18a20a5d2059ed3822733a1f26e17e6dbd Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 9 Jan 2023 18:14:47 +0100 Subject: [PATCH 21/30] use custom format_attribute_args function for attention methods --- .gitignore | 1 - Makefile | 5 ++ inseq/attr/feat/attention_attribution.py | 50 ++++++++++++++-- inseq/attr/feat/attribution_utils.py | 6 +- inseq/attr/feat/ops/basic_attention.py | 76 ++++-------------------- inseq/models/attribution_model.py | 1 + inseq/models/decoder_only.py | 6 +- inseq/models/encoder_decoder.py | 16 ++--- 8 files changed, 82 insertions(+), 79 deletions(-) diff --git a/.gitignore b/.gitignore index a4d8c752..05aaaebc 100644 --- a/.gitignore +++ b/.gitignore @@ -324,7 +324,6 @@ cython_debug/ ### VisualStudioCode ### .vscode/* !.vscode/tasks.json -!.vscode/launch.json !.vscode/extensions.json !.vscode/*.code-snippets diff --git a/Makefile b/Makefile index 7957d8d4..adf5e1ee 100644 --- a/Makefile +++ b/Makefile @@ -21,6 +21,7 @@ help: @echo "check-safety : run safety checks on all tests." @echo "lint : run linting on all files (check-style + check-safety)" @echo "test : run all tests." + @echo "test-cpu : run all tests that do not depend on Torch GPU support." @echo "fast-test : run all quick tests." @echo "codecov : check coverage of all the code." @echo "build-docs : build sphinx documentation." @@ -103,6 +104,10 @@ lint: check-style check-safety test: poetry run pytest -c pyproject.toml -v +.PHONY: test-cpu +test-cpu: + poetry run pytest -c pyproject.toml -v -m "not require_cuda_gpu" + .PHONY: fast-test fast-test: poetry run pytest -c pyproject.toml -v -m "not slow" diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 27f8efb5..124de6f2 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -13,13 +13,13 @@ # limitations under the License. """ Attention-based feature attribution methods. """ -from typing import Any, Dict, Union +from typing import Any, Callable, Dict, Union import logging -from ...data import FeatureAttributionStepOutput -from ...utils import Registry -from ...utils.typing import ModelIdentifier +from ...data import Batch, EncoderDecoderBatch, FeatureAttributionStepOutput +from ...utils import Registry, pretty_tensor +from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor, TargetIdsTensor from ..attribution_decorators import set_hook, unset_hook from .attribution_utils import get_source_target_attributions from .feature_attribution import FeatureAttribution @@ -40,6 +40,48 @@ def hook(self, **kwargs): def unhook(self, **kwargs): pass + def format_attribute_args( + self, + batch: Union[Batch, EncoderDecoderBatch], + target_ids: TargetIdsTensor, + attributed_fn: Callable[..., SingleScorePerStepTensor], + attribute_target: bool = False, + attributed_fn_args: Dict[str, Any] = {}, + **kwargs, + ) -> Dict[str, Any]: + """ + Formats inputs for the attention attribution methods + + Args: + batch (:class:`~inseq.data.Batch` or :class:`~inseq.data.EncoderDecoderBatch`): The batch of sequences on + which attribution is performed. + target_ids (:obj:`torch.Tensor`): Target token ids of size `(batch_size)` corresponding to tokens + for which the attribution step must be performed. + attributed_fn (:obj:`Callable[..., SingleScorePerStepTensor]`): The function of model outputs + representing what should be attributed (e.g. output probits of model best prediction after softmax). + The parameter must be a function that taking multiple keyword arguments and returns a :obj:`tensor` + of size (batch_size,). If not provided, the default attributed function for the model will be used + (change attribution_model.default_attributed_fn_id). + attribute_target (:obj:`bool`, optional): Whether to attribute the target prefix or not. Defaults to False. + attributed_fn_args (:obj:`dict`, `optional`): Additional arguments to pass to the attributed function. + Defaults to {}. + Returns: + :obj:`dict`: A dictionary containing the formatted attribution arguments. + """ + logger.debug(f"batch: {batch},\ntarget_ids: {pretty_tensor(target_ids, lpad=4)}") + attribute_fn_args = { + "batch": batch, + "additional_forward_args": ( + attribute_target, + attributed_fn, + self.forward_batch_embeds, + list(attributed_fn_args.keys()), + ) + + tuple(attributed_fn_args.values()), + } + + return attribute_fn_args + def attribute_step( self, attribute_fn_main_args: Dict[str, Any], diff --git a/inseq/attr/feat/attribution_utils.py b/inseq/attr/feat/attribution_utils.py index 73668766..8a764e3b 100644 --- a/inseq/attr/feat/attribution_utils.py +++ b/inseq/attr/feat/attribution_utils.py @@ -104,7 +104,11 @@ def get_step_scores( if attribution_model is None: raise ValueError("Attribution model is not set.") with torch.no_grad(): - output = attribution_model.get_forward_output(**attribution_model.format_forward_args(batch)) + output = attribution_model.get_forward_output( + **attribution_model.format_forward_args( + batch, use_embeddings=attribution_model.attribution_method.forward_batch_embeds + ) + ) step_scores_args = attribution_model.format_step_function_args( forward_output=output, encoder_input_ids=batch.source_ids, diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index eac7af8a..b9b4f7f4 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -12,17 +12,16 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Optional, Tuple, Union +from typing import Any, Tuple, Union import logging import torch -from captum._utils.common import _format_output, _is_tuple -from captum._utils.typing import TargetType, TensorOrTupleOfTensorsGeneric +from captum._utils.typing import TensorOrTupleOfTensorsGeneric from captum.attr._utils.attribution import Attribution from captum.log import log_usage -from ....utils.typing import MultiStepEmbeddingsTensor +from ....data import Batch, EncoderDecoderBatch logger = logging.getLogger(__name__) @@ -103,45 +102,6 @@ def _get_batch_size(self, attention: torch.Tensor) -> int: """returns the batch size of a tensor of shape `(batch_size, heads, seq, seq)`""" return attention.size(0) - def _extract_forward_pass_args( - self, - inputs: MultiStepEmbeddingsTensor, - forward_args: Optional[Tuple], - is_target_attr: bool, - is_encoder_decoder: bool, - ) -> dict: - """extracts the arguments needed for a standard forward pass - from the `inputs` and `additional_forward_args` parameters used by Captum""" - - if is_encoder_decoder: - - use_embeddings = forward_args[6] if is_target_attr else forward_args[7] - - forward_pass_args = { - "attention_mask": forward_args[4] if is_target_attr else forward_args[5], - "decoder_attention_mask": forward_args[5] if is_target_attr else forward_args[6], - } - - if use_embeddings: - forward_pass_args["inputs_embeds"] = inputs[0] - forward_pass_args["decoder_inputs_embeds"] = inputs[1] if is_target_attr else forward_args[0] - else: - forward_pass_args["input_ids"] = forward_args[0] if is_target_attr else forward_args[1] - forward_pass_args["decoder_input_ids"] = forward_args[1] if is_target_attr else forward_args[2] - - else: - - use_embeddings = forward_args[4] - - forward_pass_args = {"attention_mask": forward_args[3]} - - if use_embeddings: - forward_pass_args["inputs_embeds"] = inputs[0] - else: - forward_pass_args["input_ids"] = forward_args[0] - - return forward_pass_args - class AggregatedAttention(AttentionAttribution): """ @@ -152,24 +112,17 @@ class AggregatedAttention(AttentionAttribution): @log_usage() def attribute( self, - inputs: MultiStepEmbeddingsTensor, - target: TargetType = None, + batch: Union[Batch, EncoderDecoderBatch], merge_head_option: str = "average", use_head: int = None, additional_forward_args: Any = None, ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: - is_inputs_tuple = _is_tuple(inputs) - - is_target_attribution = True if len(inputs) > 1 else False + is_target_attribution = additional_forward_args[0] is_encoder_decoder = self.forward_func.is_encoder_decoder - forward_pass_args = self._extract_forward_pass_args( - inputs, additional_forward_args, is_target_attribution, is_encoder_decoder - ) - - outputs = self.forward_func.model(**forward_pass_args) + outputs = self.forward_func.get_forward_output(**self.forward_func.format_forward_args(batch)) if is_encoder_decoder: cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) @@ -191,7 +144,7 @@ def attribute( attributions = (aggregation,) - return _format_output(is_inputs_tuple, attributions) + return attributions class LastLayerAttention(AttentionAttribution): @@ -203,24 +156,17 @@ class LastLayerAttention(AttentionAttribution): @log_usage() def attribute( self, - inputs: MultiStepEmbeddingsTensor, - target: TargetType = None, + batch: Union[Batch, EncoderDecoderBatch], merge_head_option: str = "average", use_head: int = None, additional_forward_args: Any = None, ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: - is_inputs_tuple = _is_tuple(inputs) - - is_target_attribution = True if len(inputs) > 1 else False + is_target_attribution = additional_forward_args[0] is_encoder_decoder = self.forward_func.is_encoder_decoder - forward_pass_args = self._extract_forward_pass_args( - inputs, additional_forward_args, is_target_attribution, is_encoder_decoder - ) - - outputs = self.forward_func.model(**forward_pass_args) + outputs = self.forward_func.get_forward_output(**self.forward_func.format_forward_args(batch)) if is_encoder_decoder: @@ -244,4 +190,4 @@ def attribute( attributions = (aggregation,) - return _format_output(is_inputs_tuple, attributions) + return attributions diff --git a/inseq/models/attribution_model.py b/inseq/models/attribution_model.py index 149ead39..582f6cce 100644 --- a/inseq/models/attribution_model.py +++ b/inseq/models/attribution_model.py @@ -367,6 +367,7 @@ def prepare_inputs_for_attribution( @abstractmethod def format_forward_args( inputs: Union[DecoderOnlyBatch, EncoderDecoderBatch], + use_embeddings: bool = True, ) -> Dict[str, Any]: pass diff --git a/inseq/models/decoder_only.py b/inseq/models/decoder_only.py index eac20a5b..133c0f81 100644 --- a/inseq/models/decoder_only.py +++ b/inseq/models/decoder_only.py @@ -67,8 +67,12 @@ def prepare_inputs_for_attribution( @staticmethod def format_forward_args( inputs: DecoderOnlyBatch, + use_embeddings: bool = True, ) -> Dict[str, Any]: - return {"forward_tensor": inputs.input_embeds, "attention_mask": inputs.attention_mask} + return { + "forward_tensor": inputs.input_embeds if use_embeddings else inputs.input_ids, + "attention_mask": inputs.attention_mask, + } @staticmethod def format_attribution_args( diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index 3149ebfb..5658f18f 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -114,10 +114,12 @@ def prepare_inputs_for_attribution( @staticmethod def format_forward_args( inputs: EncoderDecoderBatch, + use_embeddings: bool = True, ) -> Dict[str, Any]: return { - "forward_tensor": inputs.sources.input_embeds, + "forward_tensor": inputs.sources.input_embeds if use_embeddings else inputs.sources.input_ids, "decoder_input_embeds": inputs.targets.input_embeds, + "decoder_input_ids": inputs.targets.input_ids, "encoder_attention_mask": inputs.sources.attention_mask, "decoder_attention_mask": inputs.targets.attention_mask, } @@ -238,21 +240,21 @@ def get_forward_output( self, forward_tensor: AttributionForwardInputs, encoder_attention_mask: Optional[IdsTensor] = None, - # decoder_input_ids: Optional[IdsTensor] = None, + decoder_input_ids: Optional[IdsTensor] = None, decoder_input_embeds: Optional[EmbeddingsTensor] = None, decoder_attention_mask: Optional[IdsTensor] = None, use_embeddings: bool = True, ) -> ModelOutput: encoder_embeds = forward_tensor if use_embeddings else None encoder_ids = None if use_embeddings else forward_tensor - # decoder_embeds = decoder_input_embeds if decoder_input_ids is None else None - # decoder_ids = decoder_input_ids if decoder_input_ids is not None else None + decoder_embeds = decoder_input_embeds if use_embeddings else None + decoder_ids = None if use_embeddings else decoder_input_ids return self.model( input_ids=encoder_ids, inputs_embeds=encoder_embeds, attention_mask=encoder_attention_mask, - # decoder_input_ids=decoder_input_ids, - decoder_inputs_embeds=decoder_input_embeds, + decoder_input_ids=decoder_ids, + decoder_inputs_embeds=decoder_embeds, decoder_attention_mask=decoder_attention_mask, ) @@ -275,7 +277,7 @@ def forward( output = self.get_forward_output( forward_tensor=encoder_tensors, encoder_attention_mask=encoder_attention_mask, - # decoder_input_ids=decoder_input_ids, + decoder_input_ids=decoder_input_ids, decoder_input_embeds=decoder_input_embeds, decoder_attention_mask=decoder_attention_mask, use_embeddings=use_embeddings, From d27f1c35d74748af5996a0c663d6c1e593466fc8 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 9 Jan 2023 18:36:22 +0100 Subject: [PATCH 22/30] always use decoder_input_embeds in forward output --- inseq/models/encoder_decoder.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index 5658f18f..755c71da 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -119,7 +119,7 @@ def format_forward_args( return { "forward_tensor": inputs.sources.input_embeds if use_embeddings else inputs.sources.input_ids, "decoder_input_embeds": inputs.targets.input_embeds, - "decoder_input_ids": inputs.targets.input_ids, + # "decoder_input_ids": inputs.targets.input_ids, "encoder_attention_mask": inputs.sources.attention_mask, "decoder_attention_mask": inputs.targets.attention_mask, } @@ -240,21 +240,21 @@ def get_forward_output( self, forward_tensor: AttributionForwardInputs, encoder_attention_mask: Optional[IdsTensor] = None, - decoder_input_ids: Optional[IdsTensor] = None, + # decoder_input_ids: Optional[IdsTensor] = None, decoder_input_embeds: Optional[EmbeddingsTensor] = None, decoder_attention_mask: Optional[IdsTensor] = None, use_embeddings: bool = True, ) -> ModelOutput: encoder_embeds = forward_tensor if use_embeddings else None encoder_ids = None if use_embeddings else forward_tensor - decoder_embeds = decoder_input_embeds if use_embeddings else None - decoder_ids = None if use_embeddings else decoder_input_ids + # decoder_embeds = decoder_input_embeds if use_embeddings else None + # decoder_ids = None if use_embeddings else decoder_input_ids return self.model( input_ids=encoder_ids, inputs_embeds=encoder_embeds, attention_mask=encoder_attention_mask, - decoder_input_ids=decoder_ids, - decoder_inputs_embeds=decoder_embeds, + # decoder_input_ids=decoder_ids, + decoder_inputs_embeds=decoder_input_embeds, decoder_attention_mask=decoder_attention_mask, ) @@ -277,7 +277,7 @@ def forward( output = self.get_forward_output( forward_tensor=encoder_tensors, encoder_attention_mask=encoder_attention_mask, - decoder_input_ids=decoder_input_ids, + # decoder_input_ids=decoder_input_ids, decoder_input_embeds=decoder_input_embeds, decoder_attention_mask=decoder_attention_mask, use_embeddings=use_embeddings, From cacaa31f294217d72214ec9bd01c2821659abb14 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Mon, 9 Jan 2023 21:32:26 +0100 Subject: [PATCH 23/30] reworked LastLayerAttention to work with any single layer and allow a range of layers to be specified for averaging in AggregatedAttention --- inseq/attr/feat/__init__.py | 4 +-- inseq/attr/feat/attention_attribution.py | 10 +++--- inseq/attr/feat/ops/__init__.py | 4 +-- inseq/attr/feat/ops/basic_attention.py | 46 +++++++++++++++--------- 4 files changed, 38 insertions(+), 26 deletions(-) diff --git a/inseq/attr/feat/__init__.py b/inseq/attr/feat/__init__.py index 8333b3b4..376a8027 100644 --- a/inseq/attr/feat/__init__.py +++ b/inseq/attr/feat/__init__.py @@ -1,4 +1,4 @@ -from .attention_attribution import AggregatedAttentionAtribution, AttentionAtribution, LastLayerAttentionAttribution +from .attention_attribution import AggregatedAttentionAtribution, AttentionAtribution, SingleLayerAttentionAttribution from .attribution_utils import STEP_SCORES_MAP, extract_args, join_token_ids, list_step_scores, register_step_score from .feature_attribution import FeatureAttribution, list_feature_attribution_methods from .gradient_attribution import ( @@ -33,5 +33,5 @@ "LayerDeepLiftAttribution", "AttentionAtribution", "AggregatedAttentionAtribution", - "LastLayerAttentionAttribution", + "SingleLayerAttentionAttribution", ] diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 124de6f2..43d7880c 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -23,7 +23,7 @@ from ..attribution_decorators import set_hook, unset_hook from .attribution_utils import get_source_target_attributions from .feature_attribution import FeatureAttribution -from .ops import AggregatedAttention, LastLayerAttention +from .ops import AggregatedAttention, SingleLayerAttention logger = logging.getLogger(__name__) @@ -153,14 +153,14 @@ def __init__(self, attribution_model, **kwargs): self.method = AggregatedAttention(attribution_model) -class LastLayerAttentionAttribution(AttentionAtribution): +class SingleLayerAttentionAttribution(AttentionAtribution): """ - Last-Layer attention attribution method. + Single-Layer attention attribution method. Only the raw attention of the last hidden layer is retrieved. """ - method_name = "last_layer_attention" + method_name = "single_layer_attention" def __init__(self, attribution_model, **kwargs): super().__init__(attribution_model) - self.method = LastLayerAttention(attribution_model) + self.method = SingleLayerAttention(attribution_model) diff --git a/inseq/attr/feat/ops/__init__.py b/inseq/attr/feat/ops/__init__.py index 5b70d9be..7b2525bb 100644 --- a/inseq/attr/feat/ops/__init__.py +++ b/inseq/attr/feat/ops/__init__.py @@ -1,6 +1,6 @@ -from .basic_attention import AggregatedAttention, LastLayerAttention +from .basic_attention import AggregatedAttention, SingleLayerAttention from .discretized_integrated_gradients import DiscretetizedIntegratedGradients from .monotonic_path_builder import MonotonicPathBuilder -__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "AggregatedAttention", "LastLayerAttention"] +__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "AggregatedAttention", "SingleLayerAttention"] diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index b9b4f7f4..5069f023 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -67,7 +67,7 @@ def _merge_attention_heads( Returns: `torch.Tensor`: The attention tensor with its attention heads merged. """ - num_heads = self._num_attention_heads(attention[0]) + num_heads = self._num_attention_heads(attention) if option == "single" and head is None: raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") @@ -90,7 +90,7 @@ def _merge_attention_heads( return attention.mean(1) elif option == "max": - return attention.max(1) + return attention.max(1)[0] else: raise RuntimeError( @@ -106,7 +106,7 @@ def _get_batch_size(self, attention: torch.Tensor) -> int: class AggregatedAttention(AttentionAttribution): """ A basic attention attribution approach. - It will return the attention values averaged across all layers. + It will return the attention values averaged across a range of layers (default is all layers). """ @log_usage() @@ -115,6 +115,7 @@ def attribute( batch: Union[Batch, EncoderDecoderBatch], merge_head_option: str = "average", use_head: int = None, + aggregate_layer: tuple = None, additional_forward_args: Any = None, ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: @@ -125,20 +126,30 @@ def attribute( outputs = self.forward_func.get_forward_output(**self.forward_func.format_forward_args(batch)) if is_encoder_decoder: - cross_aggregation = torch.stack(outputs.cross_attentions).mean(0) + + if aggregate_layer is None: + aggregate_layer = (0, len(outputs.cross_attentions)) + + cross_aggregation = torch.stack(outputs.cross_attentions[aggregate_layer[0] : aggregate_layer[1]]).mean(0) cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) cross_aggregation = cross_aggregation.select(1, -1) attributions = (cross_aggregation,) if is_target_attribution: - decoder_aggregation = torch.stack(outputs.decoder_attentions).mean(0) + decoder_aggregation = torch.stack( + outputs.decoder_attentions[aggregate_layer[0] : aggregate_layer[1]] + ).mean(0) decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) decoder_aggregation = decoder_aggregation.select(1, -1) attributions = attributions + (decoder_aggregation,) else: - aggregation = torch.stack(outputs.attentions).mean(0) + + if aggregate_layer is None: + aggregate_layer = (0, len(outputs.attentions)) + + aggregation = torch.stack(outputs.attentions[aggregate_layer[0] : aggregate_layer[1]]).mean(0) aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) aggregation = aggregation.select(1, -1) @@ -147,10 +158,10 @@ def attribute( return attributions -class LastLayerAttention(AttentionAttribution): +class SingleLayerAttention(AttentionAttribution): """ A basic attention attribution approach. - It will simply return the attention values of the last layer. + It will return the attention values of a single layer (default is the last layer). """ @log_usage() @@ -159,6 +170,7 @@ def attribute( batch: Union[Batch, EncoderDecoderBatch], merge_head_option: str = "average", use_head: int = None, + use_layer: int = -1, additional_forward_args: Any = None, ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: @@ -170,21 +182,21 @@ def attribute( if is_encoder_decoder: - last_layer_cross = outputs.cross_attentions[-1] - last_layer_cross = self._merge_attention_heads(last_layer_cross, merge_head_option, use_head) - last_layer_cross = last_layer_cross.select(1, -1) + layer_cross = outputs.cross_attentions[use_layer] + layer_cross = self._merge_attention_heads(layer_cross, merge_head_option, use_head) + layer_cross = layer_cross.select(1, -1) - attributions = (last_layer_cross,) + attributions = (layer_cross,) if is_target_attribution: - last_layer_decoder = outputs.decoder_attentions[-1] - last_layer_decoder = self._merge_attention_heads(last_layer_decoder, merge_head_option, use_head) - last_layer_decoder = last_layer_decoder.select(1, -1) + layer_decoder = outputs.decoder_attentions[use_layer] + layer_decoder = self._merge_attention_heads(layer_decoder, merge_head_option, use_head) + layer_decoder = layer_decoder.select(1, -1) - attributions = attributions + (last_layer_decoder,) + attributions = attributions + (layer_decoder,) else: - aggregation = outputs.attentions[-1] + aggregation = outputs.attentions[use_layer] aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) aggregation = aggregation.select(1, -1) From a8d5264be5ad5caecdfcae0f4c3ed30fd62c38d2 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Tue, 10 Jan 2023 22:56:51 +0100 Subject: [PATCH 24/30] Minor bugfixes and version bumps --- .gitignore | 2 +- docs/source/examples/quickstart.rst | 2 +- .../main_classes/feature_attribution.rst | 11 +- inseq/attr/feat/__init__.py | 4 +- inseq/attr/feat/gradient_attribution.py | 18 +- inseq/data/attribution.py | 8 +- inseq/models/decoder_only.py | 2 + inseq/models/encoder_decoder.py | 7 +- poetry.lock | 412 +++++++++--------- pyproject.toml | 6 +- requirements-dev.txt | 32 +- requirements.txt | 6 +- tests/__init__.py | 1 - 13 files changed, 262 insertions(+), 249 deletions(-) diff --git a/.gitignore b/.gitignore index 05aaaebc..df47d8d5 100644 --- a/.gitignore +++ b/.gitignore @@ -763,4 +763,4 @@ FodyWeavers.xsd # End of https://www.toptal.com/developers/gitignore/api/osx,python,pycharm,windows,visualstudio,visualstudiocode,jupyternotebooks -/examples +/dev_examples diff --git a/docs/source/examples/quickstart.rst b/docs/source/examples/quickstart.rst index 3faa664d..fe707046 100644 --- a/docs/source/examples/quickstart.rst +++ b/docs/source/examples/quickstart.rst @@ -45,7 +45,7 @@ The :class:`~inseq.models.AttributionModel` class is a ``torch.nn.Module`` inten ``AttributionModel`` children classes belong to two categories: **architectural classes** like :class:`~inseq.models.EncoderDecoderAttributionModel` defines methods that are specific to a certain model architecture, while **framework classes** like :class:`~inseq.models.HuggingfaceModel` specify methods that are specific to a certain modeling framework (e.g. encoding with a tokenizer in |:hugging_face:| transformers). The final class that will be instantiated by the user is a combination of the two, e.g. :class:`~inseq.models.HuggingfaceEncoderDecoderModel` for a sequence-to-sequence model from the |:hugging_face:| transformers library. -When a model is loaded with :meth:`~inseq.load_model`, a :class:`~inseq.attr.feat.FeatureAttribution` can be attached to it to specify which feature attribution technique should be used on it. Different families of attribution methods such as :class:`~inseq.attr.feats.GradientAttribution` are made available, each containing multiple methods (e.g. :class:`~inseq.attr.feats.IntegratedGradientsAttribution`, :class:`~inseq.attr.feats.DeepLiftAttribution`). +When a model is loaded with :meth:`~inseq.load_model`, a :class:`~inseq.attr.feat.FeatureAttribution` can be attached to it to specify which feature attribution technique should be used on it. Different families of attribution methods such as :class:`~inseq.attr.feats.GradientAttributionRegistry` are made available, each containing multiple methods (e.g. :class:`~inseq.attr.feats.IntegratedGradientsAttribution`, :class:`~inseq.attr.feats.DeepLiftAttribution`). The following image provides a visual hierarchy of the division between ``AttributionModel`` and ``FeatureAttribution`` subclasses: diff --git a/docs/source/main_classes/feature_attribution.rst b/docs/source/main_classes/feature_attribution.rst index 5c6a4314..64a12808 100644 --- a/docs/source/main_classes/feature_attribution.rst +++ b/docs/source/main_classes/feature_attribution.rst @@ -20,7 +20,7 @@ Feature Attribution Gradient Attribution Methods ----------------------------------------------------------------------------------------------------------------------- -.. autoclass:: inseq.attr.feat.GradientAttribution +.. autoclass:: inseq.attr.feat.GradientAttributionRegistry :members: @@ -51,22 +51,13 @@ Layer Attribution Methods ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -LayerIntegratedGradientsAttribution -''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' - .. autoclass:: inseq.attr.feat.LayerIntegratedGradientsAttribution :members: -LayerGradientXActivationAttribution -''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' .. autoclass:: inseq.attr.feat.LayerGradientXActivationAttribution :members: -LayerDeepLiftAttribution -''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''' - .. autoclass:: inseq.attr.feat.LayerDeepLiftAttribution :members: diff --git a/inseq/attr/feat/__init__.py b/inseq/attr/feat/__init__.py index 376a8027..dfed90f4 100644 --- a/inseq/attr/feat/__init__.py +++ b/inseq/attr/feat/__init__.py @@ -4,7 +4,7 @@ from .gradient_attribution import ( DeepLiftAttribution, DiscretizedIntegratedGradientsAttribution, - GradientAttribution, + GradientAttributionRegistry, InputXGradientAttribution, IntegratedGradientsAttribution, LayerDeepLiftAttribution, @@ -22,7 +22,7 @@ "register_step_score", "join_token_ids", "list_step_scores", - "GradientAttribution", + "GradientAttributionRegistry", "DeepLiftAttribution", "InputXGradientAttribution", "IntegratedGradientsAttribution", diff --git a/inseq/attr/feat/gradient_attribution.py b/inseq/attr/feat/gradient_attribution.py index 12dca41d..8066c537 100644 --- a/inseq/attr/feat/gradient_attribution.py +++ b/inseq/attr/feat/gradient_attribution.py @@ -38,7 +38,7 @@ logger = logging.getLogger(__name__) -class GradientAttribution(FeatureAttribution, Registry): +class GradientAttributionRegistry(FeatureAttribution, Registry): r"""Gradient-based attribution method registry.""" @set_hook @@ -104,7 +104,7 @@ def attribute_step( ) -class DeepLiftAttribution(GradientAttribution): +class DeepLiftAttribution(GradientAttributionRegistry): """DeepLIFT attribution method. Reference implementation: @@ -119,7 +119,7 @@ def __init__(self, attribution_model, multiply_by_inputs: bool = True, **kwargs) self.use_baseline = True -class DiscretizedIntegratedGradientsAttribution(GradientAttribution): +class DiscretizedIntegratedGradientsAttribution(GradientAttributionRegistry): """Discretized Integrated Gradients attribution method Reference: https://arxiv.org/abs/2108.13654 @@ -157,7 +157,7 @@ def hook(self, **kwargs): super().hook(**other_kwargs) -class IntegratedGradientsAttribution(GradientAttribution): +class IntegratedGradientsAttribution(GradientAttributionRegistry): """Integrated Gradients attribution method. Reference implementation: @@ -172,7 +172,7 @@ def __init__(self, attribution_model, multiply_by_inputs: bool = True, **kwargs) self.use_baseline = True -class InputXGradientAttribution(GradientAttribution): +class InputXGradientAttribution(GradientAttributionRegistry): """Input x Gradient attribution method. Reference implementation: @@ -186,7 +186,7 @@ def __init__(self, attribution_model): self.method = InputXGradient(self.attribution_model) -class SaliencyAttribution(GradientAttribution): +class SaliencyAttribution(GradientAttributionRegistry): """Saliency attribution method. Reference implementation: @@ -203,7 +203,7 @@ def __init__(self, attribution_model): # Layer methods -class LayerIntegratedGradientsAttribution(GradientAttribution): +class LayerIntegratedGradientsAttribution(GradientAttributionRegistry): """Layer Integrated Gradients attribution method. Reference implementation: @@ -225,7 +225,7 @@ def __init__(self, attribution_model, multiply_by_inputs: bool = True, **kwargs) ) -class LayerGradientXActivationAttribution(GradientAttribution): +class LayerGradientXActivationAttribution(GradientAttributionRegistry): """Layer Integrated Gradients attribution method. Reference implementation: @@ -247,7 +247,7 @@ def __init__(self, attribution_model, multiply_by_inputs: bool = True, **kwargs) ) -class LayerDeepLiftAttribution(GradientAttribution): +class LayerDeepLiftAttribution(GradientAttributionRegistry): """Layer DeepLIFT attribution method. Reference implementation: diff --git a/inseq/data/attribution.py b/inseq/data/attribution.py index aa67b17a..2b105f0d 100644 --- a/inseq/data/attribution.py +++ b/inseq/data/attribution.py @@ -208,18 +208,18 @@ def show( def minimum(self) -> float: minimum = 0 if self.source_attributions is not None: - minimum = min(minimum, float(self.source_attributions.min())) + minimum = min(minimum, float(torch.nan_to_num(self.source_attributions).min())) if self.target_attributions is not None: - minimum = min(minimum, float(self.target_attributions.min())) + minimum = min(minimum, float(torch.nan_to_num(self.target_attributions).min())) return minimum @property def maximum(self) -> float: maximum = 0 if self.source_attributions is not None: - maximum = max(maximum, float(self.source_attributions.max())) + maximum = max(maximum, float(torch.nan_to_num(self.source_attributions).max())) if self.target_attributions is not None: - maximum = max(maximum, float(self.target_attributions.max())) + maximum = max(maximum, float(torch.nan_to_num(self.target_attributions).max())) return maximum def weight_attributions(self, step_score_id: str): diff --git a/inseq/models/decoder_only.py b/inseq/models/decoder_only.py index 133c0f81..4cb28cf9 100644 --- a/inseq/models/decoder_only.py +++ b/inseq/models/decoder_only.py @@ -175,6 +175,7 @@ def get_forward_output( forward_tensor: AttributionForwardInputs, attention_mask: Optional[IdsTensor] = None, use_embeddings: bool = True, + **kwargs, ) -> ModelOutput: embeds = forward_tensor if use_embeddings else None ids = None if use_embeddings else forward_tensor @@ -182,6 +183,7 @@ def get_forward_output( input_ids=ids, inputs_embeds=embeds, attention_mask=attention_mask, + **kwargs, ) def forward( diff --git a/inseq/models/encoder_decoder.py b/inseq/models/encoder_decoder.py index 755c71da..a1677d8d 100644 --- a/inseq/models/encoder_decoder.py +++ b/inseq/models/encoder_decoder.py @@ -240,22 +240,20 @@ def get_forward_output( self, forward_tensor: AttributionForwardInputs, encoder_attention_mask: Optional[IdsTensor] = None, - # decoder_input_ids: Optional[IdsTensor] = None, decoder_input_embeds: Optional[EmbeddingsTensor] = None, decoder_attention_mask: Optional[IdsTensor] = None, use_embeddings: bool = True, + **kwargs, ) -> ModelOutput: encoder_embeds = forward_tensor if use_embeddings else None encoder_ids = None if use_embeddings else forward_tensor - # decoder_embeds = decoder_input_embeds if use_embeddings else None - # decoder_ids = None if use_embeddings else decoder_input_ids return self.model( input_ids=encoder_ids, inputs_embeds=encoder_embeds, attention_mask=encoder_attention_mask, - # decoder_input_ids=decoder_ids, decoder_inputs_embeds=decoder_input_embeds, decoder_attention_mask=decoder_attention_mask, + **kwargs, ) def forward( @@ -277,7 +275,6 @@ def forward( output = self.get_forward_output( forward_tensor=encoder_tensors, encoder_attention_mask=encoder_attention_mask, - # decoder_input_ids=decoder_input_ids, decoder_input_embeds=decoder_input_embeds, decoder_attention_mask=decoder_attention_mask, use_embeddings=use_embeddings, diff --git a/poetry.lock b/poetry.lock index 3bc3b766..a5b6934d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -47,7 +47,7 @@ python-versions = "*" [[package]] name = "astroid" -version = "2.12.13" +version = "2.13.2" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false @@ -55,7 +55,7 @@ python-versions = ">=3.7.2" [package.dependencies] lazy-object-proxy = ">=1.4.0" -typing-extensions = {version = ">=3.10", markers = "python_version < \"3.10\""} +typing-extensions = ">=4.0.0" wrapt = [ {version = ">=1.11,<2", markers = "python_version < \"3.11\""}, {version = ">=1.14,<2", markers = "python_version >= \"3.11\""}, @@ -96,11 +96,10 @@ cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] dev = ["attrs[docs,tests]"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] tests = ["attrs[tests-no-zope]", "zope.interface"] -tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests_no_zope = ["cloudpickle", "hypothesis", "mypy (>=0.971,<0.990)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] -name = "Babel" +name = "babel" version = "2.11.0" description = "Internationalization utilities" category = "dev" @@ -161,7 +160,7 @@ uvloop = ["uvloop (>=0.15.2)"] [[package]] name = "captum" -version = "0.5.0" +version = "0.6.0" description = "Model interpretability for PyTorch" category = "main" optional = false @@ -173,9 +172,9 @@ numpy = "*" torch = ">=1.6" [package.extras] -dev = ["black (==21.4b2)", "flake8", "flask", "flask-compress", "ipython", "ipywidgets", "jupyter", "mypy (>=0.760)", "pytest", "pytest-cov", "scikit-learn", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-katex", "ufmt", "usort (==0.6.4)"] +dev = ["annoy", "black (==22.3.0)", "flake8", "flask", "flask-compress", "ipython", "ipywidgets", "jupyter", "mypy (>=0.760)", "parameterized", "pytest", "pytest-cov", "scikit-learn", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-katex", "ufmt", "usort (==1.0.2)"] insights = ["flask", "flask-compress", "ipython", "ipywidgets", "jupyter"] -test = ["pytest", "pytest-cov"] +test = ["parameterized", "pytest", "pytest-cov"] tutorials = ["flask", "flask-compress", "ipython", "ipywidgets", "jupyter", "torchtext", "torchvision"] [[package]] @@ -214,7 +213,7 @@ optional = false python-versions = ">=3.6.0" [package.extras] -unicode_backport = ["unicodedata2"] +unicode-backport = ["unicodedata2"] [[package]] name = "click" @@ -280,7 +279,7 @@ test-no-codebase = ["Pillow", "matplotlib", "pytest"] [[package]] name = "coverage" -version = "7.0.1" +version = "7.0.4" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -342,14 +341,14 @@ metrics-tests = ["Werkzeug (>=1.0.1)", "bert-score (>=0.3.6)", "jiwer", "langdet quality = ["black (>=22.0,<23.0)", "flake8 (>=3.8.3)", "isort (>=5.0.0)", "pyyaml (>=5.3.1)"] s3 = ["s3fs"] tensorflow = ["tensorflow (>=2.2.0,!=2.6.0,!=2.6.1)", "tensorflow-macos"] -tensorflow_gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] +tensorflow-gpu = ["tensorflow-gpu (>=2.2.0,!=2.6.0,!=2.6.1)"] tests = ["Pillow (>=6.2.1)", "absl-py", "apache-beam (>=2.26.0)", "elasticsearch (<8.0.0)", "faiss-cpu (>=1.6.4)", "librosa", "lz4", "py7zr", "pytest", "pytest-datadir", "pytest-xdist", "rarfile (>=4.0)", "s3fs (>=2021.11.1)", "soundfile", "tensorflow (>=2.3,!=2.6.0,!=2.6.1)", "tensorflow-macos", "torch", "torchaudio (<0.12.0)", "transformers", "zstandard"] torch = ["torch"] vision = ["Pillow (>=6.2.1)"] [[package]] name = "debugpy" -version = "1.6.4" +version = "1.6.5" description = "An implementation of the Debug Adapter Protocol for Python" category = "main" optional = true @@ -538,7 +537,7 @@ python-versions = ">=3.7" smmap = ">=3.0.1,<6" [[package]] -name = "GitPython" +name = "gitpython" version = "3.1.30" description = "GitPython is a python library used to interact with Git repositories" category = "dev" @@ -577,7 +576,7 @@ typing = ["types-PyYAML", "types-requests", "types-simplejson", "types-toml", "t [[package]] name = "identify" -version = "2.5.11" +version = "2.5.12" description = "File identification library for Python" category = "dev" optional = false @@ -620,15 +619,15 @@ testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packag [[package]] name = "iniconfig" -version = "1.1.1" -description = "iniconfig: brain-dead simple config-ini parsing" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" [[package]] name = "ipykernel" -version = "6.19.4" +version = "6.20.1" description = "IPython Kernel for Jupyter" category = "main" optional = true @@ -650,12 +649,14 @@ traitlets = ">=5.4.0" [package.extras] cov = ["coverage[toml]", "curio", "matplotlib", "pytest-cov", "trio"] -docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinxcontrib-github-alt"] +docs = ["myst-parser", "pydata-sphinx-theme", "sphinx", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "trio"] +pyqt5 = ["pyqt5"] +pyside6 = ["pyside6"] test = ["flaky", "ipyparallel", "pre-commit", "pytest (>=7.0)", "pytest-asyncio", "pytest-cov", "pytest-timeout"] [[package]] name = "ipython" -version = "8.7.0" +version = "8.8.0" description = "IPython: Productive Interactive Computing" category = "main" optional = true @@ -686,7 +687,7 @@ notebook = ["ipywidgets", "notebook"] parallel = ["ipyparallel"] qtconsole = ["qtconsole"] test = ["pytest (<7.1)", "pytest-asyncio", "testpath"] -test_extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] +test-extra = ["curio", "matplotlib (!=3.2.0)", "nbformat", "numpy (>=1.20)", "pandas", "pytest (<7.1)", "pytest-asyncio", "testpath", "trio"] [[package]] name = "ipywidgets" @@ -740,7 +741,7 @@ qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] testing = ["Django (<3.1)", "attrs", "colorama", "docopt", "pytest (<7.0.0)"] [[package]] -name = "Jinja2" +name = "jinja2" version = "3.1.2" description = "A very fast and expressive template engine." category = "dev" @@ -792,7 +793,7 @@ test = ["codecov", "coverage", "ipykernel (>=6.12)", "ipython", "mypy", "pre-com [[package]] name = "jupyter-core" -version = "5.1.1" +version = "5.1.3" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true @@ -804,7 +805,7 @@ pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\" and platform_ traitlets = ">=5.3" [package.extras] -docs = ["myst-parser", "sphinxcontrib-github-alt", "traitlets"] +docs = ["myst-parser", "sphinx-autodoc-typehints", "sphinxcontrib-github-alt", "sphinxcontrib-spelling", "traitlets"] test = ["ipykernel", "pre-commit", "pytest", "pytest-cov", "pytest-timeout"] [[package]] @@ -825,14 +826,14 @@ python-versions = ">=3.7" [[package]] name = "lazy-object-proxy" -version = "1.8.0" +version = "1.9.0" description = "A fast and thorough lazy object proxy." category = "dev" optional = false python-versions = ">=3.7" [[package]] -name = "Markdown" +name = "markdown" version = "3.4.1" description = "Python implementation of Markdown." category = "dev" @@ -846,7 +847,7 @@ importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} testing = ["coverage", "pyyaml"] [[package]] -name = "MarkupSafe" +name = "markupsafe" version = "2.1.1" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" @@ -996,7 +997,7 @@ wheel = "*" [[package]] name = "packaging" -version = "22.0" +version = "23.0" description = "Core utilities for Python packages" category = "main" optional = false @@ -1078,7 +1079,7 @@ optional = true python-versions = "*" [[package]] -name = "Pillow" +name = "pillow" version = "9.4.0" description = "Python Imaging Library (Fork)" category = "main" @@ -1126,7 +1127,7 @@ pastel = ">=0.2.1,<0.3.0" tomli = ">=1.2.2" [package.extras] -poetry_plugin = ["poetry (>=1.0,<2.0)"] +poetry-plugin = ["poetry (>=1.0,<2.0)"] [[package]] name = "pre-commit" @@ -1236,7 +1237,7 @@ optional = false python-versions = ">=3.6" [[package]] -name = "Pygments" +name = "pygments" version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "main" @@ -1248,7 +1249,7 @@ plugins = ["importlib-metadata"] [[package]] name = "pylint" -version = "2.15.9" +version = "2.15.10" description = "python code static checker" category = "dev" optional = false @@ -1357,7 +1358,7 @@ optional = true python-versions = "*" [[package]] -name = "PyYAML" +name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" category = "main" @@ -1413,7 +1414,7 @@ urllib3 = ">=1.21.1,<1.27" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use_chardet_on_py3 = ["chardet (>=3.0.2,<6)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "responses" @@ -1447,7 +1448,7 @@ pygments = ">=2.6.0,<3.0.0" jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] [[package]] -name = "ruamel.yaml" +name = "ruamel-yaml" version = "0.17.21" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" category = "dev" @@ -1462,7 +1463,7 @@ docs = ["ryd"] jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] -name = "ruamel.yaml.clib" +name = "ruamel-yaml-clib" version = "0.2.7" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" category = "dev" @@ -1511,19 +1512,19 @@ tests = ["black (>=22.3.0)", "flake8 (>=3.8.2)", "matplotlib (>=3.1.3)", "mypy ( [[package]] name = "scipy" -version = "1.9.3" +version = "1.10.0" description = "Fundamental algorithms for scientific computing in Python" category = "main" optional = false -python-versions = ">=3.8" +python-versions = "<3.12,>=3.8" [package.dependencies] -numpy = ">=1.18.5,<1.26.0" +numpy = ">=1.19.5,<1.27.0" [package.extras] -dev = ["flake8", "mypy", "pycodestyle", "typing_extensions"] -doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-panels (>=0.5.2)", "sphinx-tabs"] -test = ["asv", "gmpy2", "mpmath", "pytest", "pytest-cov", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +dev = ["click", "doit (>=0.36.0)", "flake8", "mypy", "pycodestyle", "pydevtool", "rich-click", "typing_extensions"] +doc = ["matplotlib (>2)", "numpydoc", "pydata-sphinx-theme (==0.9.0)", "sphinx (!=4.1.0)", "sphinx-design (>=0.2.0)"] +test = ["asv", "gmpy2", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] [[package]] name = "sentencepiece" @@ -1589,7 +1590,7 @@ optional = false python-versions = "*" [[package]] -name = "Sphinx" +name = "sphinx" version = "5.3.0" description = "Python documentation generator" category = "dev" @@ -1632,7 +1633,7 @@ python-versions = ">=3.6" sphinx = ">=1.8" [package.extras] -code_style = ["pre-commit (==2.12.1)"] +code-style = ["pre-commit (==2.12.1)"] rtd = ["ipython", "sphinx", "sphinx-book-theme"] [[package]] @@ -1676,11 +1677,11 @@ dev = ["bump2version", "sphinxcontrib-httpdomain", "transifex-client", "wheel"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.3" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] @@ -1904,7 +1905,7 @@ telegram = ["requests"] [[package]] name = "traitlets" -version = "5.8.0" +version = "5.8.1" description = "Traitlets Python configuration system" category = "main" optional = true @@ -1947,7 +1948,7 @@ dev = ["GitPython (<3.1.19)", "Pillow", "accelerate (>=0.10.0)", "beautifulsoup4 dev-tensorflow = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "isort (>=5.5.4)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "nltk", "onnxconverter-common", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pytest", "pytest-timeout", "pytest-xdist", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timeout-decorator", "tokenizers (>=0.11.1,!=0.11.3,<0.14)"] dev-torch = ["GitPython (<3.1.19)", "Pillow", "beautifulsoup4", "black (==22.3)", "codecarbon (==1.2.0)", "cookiecutter (==1.7.3)", "datasets (!=2.5.0)", "dill (<0.3.5)", "evaluate (>=0.2.0)", "faiss-cpu", "flake8 (>=3.8.3)", "fugashi (>=1.0)", "hf-doc-builder", "hf-doc-builder (>=0.3.0)", "ipadic (>=1.0.0,<2.0)", "isort (>=5.5.4)", "kenlm", "librosa", "nltk", "onnxruntime (>=1.4.0)", "onnxruntime-tools (>=1.4.2)", "optuna", "parameterized", "phonemizer", "protobuf (<=3.20.2)", "psutil", "pyctcdecode (>=0.4.0)", "pyknp (>=0.6.1)", "pytest", "pytest-timeout", "pytest-xdist", "ray[tune]", "rjieba", "rouge-score (!=0.0.7,!=0.0.8,!=0.1,!=0.1.1)", "sacrebleu (>=1.4.12,<2.0.0)", "sacremoses", "safetensors (>=0.2.1)", "scikit-learn", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "sudachidict-core (>=20220729)", "sudachipy (>=0.6.6)", "timeout-decorator", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio", "unidic (>=1.0.2)", "unidic-lite (>=1.0.7)"] docs = ["Pillow", "accelerate (>=0.10.0)", "codecarbon (==1.2.0)", "flax (>=0.4.1)", "hf-doc-builder", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "kenlm", "keras-nlp (>=0.3.1)", "librosa", "onnxconverter-common", "optax (>=0.0.8)", "optuna", "phonemizer", "protobuf (<=3.20.2)", "pyctcdecode (>=0.4.0)", "ray[tune]", "sentencepiece (>=0.1.91,!=0.1.92)", "sigopt", "tensorflow (>=2.4,<2.11)", "tensorflow-text", "tf2onnx", "timm", "tokenizers (>=0.11.1,!=0.11.3,<0.14)", "torch (>=1.7,!=1.12.0)", "torchaudio"] -docs_specific = ["hf-doc-builder"] +docs-specific = ["hf-doc-builder"] fairscale = ["fairscale (>0.3)"] flax = ["flax (>=0.4.1)", "jax (>=0.2.8,!=0.3.2,<=0.3.6)", "jaxlib (>=0.1.65,<=0.3.6)", "optax (>=0.0.8)"] flax-speech = ["kenlm", "librosa", "phonemizer", "pyctcdecode (>=0.4.0)"] @@ -2104,7 +2105,7 @@ sklearn = ["scikit-learn", "joblib"] [metadata] lock-version = "1.1" python-versions = ">=3.8.1,<3.12" -content-hash = "e1f95d71145b778f3f05d42294911fa3bb69450733f03522cf5c70d39ed76985" +content-hash = "25605c07eae2caa2b872b2af7606c18f13ba59c3634210277190dda6ef24472b" [metadata.files] aiohttp = [ @@ -2209,8 +2210,8 @@ appnope = [ {file = "appnope-0.1.3.tar.gz", hash = "sha256:02bd91c4de869fbb1e1c50aafc4098827a7a54ab2f39d9dcba6c9547ed920e24"}, ] astroid = [ - {file = "astroid-2.12.13-py3-none-any.whl", hash = "sha256:10e0ad5f7b79c435179d0d0f0df69998c4eef4597534aae44910db060baeb907"}, - {file = "astroid-2.12.13.tar.gz", hash = "sha256:1493fe8bd3dfd73dc35bd53c9d5b6e49ead98497c47b2307662556a5692d29d7"}, + {file = "astroid-2.13.2-py3-none-any.whl", hash = "sha256:8f6a8d40c4ad161d6fc419545ae4b2f275ed86d1c989c97825772120842ee0d2"}, + {file = "astroid-2.13.2.tar.gz", hash = "sha256:3bc7834720e1a24ca797fd785d77efb14f7a28ee8e635ef040b6e2d80ccb3303"}, ] asttokens = [ {file = "asttokens-2.2.1-py2.py3-none-any.whl", hash = "sha256:6b0ac9e93fb0335014d382b8fa9b3afa7df546984258005da0b9e7095b3deb1c"}, @@ -2224,7 +2225,7 @@ attrs = [ {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] -Babel = [ +babel = [ {file = "Babel-2.11.0-py3-none-any.whl", hash = "sha256:1ad3eca1c885218f6dce2ab67291178944f810a10a9b5f3cb8382a5a232b64fe"}, {file = "Babel-2.11.0.tar.gz", hash = "sha256:5ef4b3226b0180dedded4229651c8b0e1a3a6a2837d45a073272f313e4cf97f6"}, ] @@ -2251,8 +2252,8 @@ black = [ {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, ] captum = [ - {file = "captum-0.5.0-py3-none-any.whl", hash = "sha256:b27996b4f7f0606d035ca210b723104ab209be55ca44bb65f46e036bcdf788e0"}, - {file = "captum-0.5.0.tar.gz", hash = "sha256:84af2c8793d34c440a351793b5ca705b8542745e2dc8bc24afb1d9b86f3bf6ec"}, + {file = "captum-0.6.0-py3-none-any.whl", hash = "sha256:6b157d6c625cc365c5f9630f1380fbb283194001dd77c0f7c07c827bdbe58661"}, + {file = "captum-0.6.0.tar.gz", hash = "sha256:01bb374622d46b56acd590462757bf2087ed3721dcebf4922db0fa77548d6524"}, ] certifi = [ {file = "certifi-2022.12.7-py3-none-any.whl", hash = "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18"}, @@ -2420,57 +2421,57 @@ contourpy = [ {file = "contourpy-1.0.6.tar.gz", hash = "sha256:6e459ebb8bb5ee4c22c19cc000174f8059981971a33ce11e17dddf6aca97a142"}, ] coverage = [ - {file = "coverage-7.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b3695c4f4750bca943b3e1f74ad4be8d29e4aeab927d50772c41359107bd5d5c"}, - {file = "coverage-7.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fa6a5a224b7f4cfb226f4fc55a57e8537fcc096f42219128c2c74c0e7d0953e1"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74f70cd92669394eaf8d7756d1b195c8032cf7bbbdfce3bc489d4e15b3b8cf73"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b66bb21a23680dee0be66557dc6b02a3152ddb55edf9f6723fa4a93368f7158d"}, - {file = "coverage-7.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d87717959d4d0ee9db08a0f1d80d21eb585aafe30f9b0a54ecf779a69cb015f6"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:854f22fa361d1ff914c7efa347398374cc7d567bdafa48ac3aa22334650dfba2"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:1e414dc32ee5c3f36544ea466b6f52f28a7af788653744b8570d0bf12ff34bc0"}, - {file = "coverage-7.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6c5ad996c6fa4d8ed669cfa1e8551348729d008a2caf81489ab9ea67cfbc7498"}, - {file = "coverage-7.0.1-cp310-cp310-win32.whl", hash = "sha256:691571f31ace1837838b7e421d3a09a8c00b4aac32efacb4fc9bd0a5c647d25a"}, - {file = "coverage-7.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:89caf4425fe88889e2973a8e9a3f6f5f9bbe5dd411d7d521e86428c08a873a4a"}, - {file = "coverage-7.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:63d56165a7c76265468d7e0c5548215a5ba515fc2cba5232d17df97bffa10f6c"}, - {file = "coverage-7.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f943a3b2bc520102dd3e0bb465e1286e12c9a54f58accd71b9e65324d9c7c01"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:830525361249dc4cd013652b0efad645a385707a5ae49350c894b67d23fbb07c"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd1b9c5adc066db699ccf7fa839189a649afcdd9e02cb5dc9d24e67e7922737d"}, - {file = "coverage-7.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e00c14720b8b3b6c23b487e70bd406abafc976ddc50490f645166f111c419c39"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6d55d840e1b8c0002fce66443e124e8581f30f9ead2e54fbf6709fb593181f2c"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:66b18c3cf8bbab0cce0d7b9e4262dc830e93588986865a8c78ab2ae324b3ed56"}, - {file = "coverage-7.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:12a5aa77783d49e05439fbe6e6b427484f8a0f9f456b46a51d8aac022cfd024d"}, - {file = "coverage-7.0.1-cp311-cp311-win32.whl", hash = "sha256:b77015d1cb8fe941be1222a5a8b4e3fbca88180cfa7e2d4a4e58aeabadef0ab7"}, - {file = "coverage-7.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:fb992c47cb1e5bd6a01e97182400bcc2ba2077080a17fcd7be23aaa6e572e390"}, - {file = "coverage-7.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e78e9dcbf4f3853d3ae18a8f9272111242531535ec9e1009fa8ec4a2b74557dc"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e60bef2e2416f15fdc05772bf87db06c6a6f9870d1db08fdd019fbec98ae24a9"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9823e4789ab70f3ec88724bba1a203f2856331986cd893dedbe3e23a6cfc1e4e"}, - {file = "coverage-7.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9158f8fb06747ac17bd237930c4372336edc85b6e13bdc778e60f9d685c3ca37"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:486ee81fa694b4b796fc5617e376326a088f7b9729c74d9defa211813f3861e4"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:1285648428a6101b5f41a18991c84f1c3959cee359e51b8375c5882fc364a13f"}, - {file = "coverage-7.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2c44fcfb3781b41409d0f060a4ed748537557de9362a8a9282182fafb7a76ab4"}, - {file = "coverage-7.0.1-cp37-cp37m-win32.whl", hash = "sha256:d6814854c02cbcd9c873c0f3286a02e3ac1250625cca822ca6bc1018c5b19f1c"}, - {file = "coverage-7.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f66460f17c9319ea4f91c165d46840314f0a7c004720b20be58594d162a441d8"}, - {file = "coverage-7.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9b373c9345c584bb4b5f5b8840df7f4ab48c4cbb7934b58d52c57020d911b856"}, - {file = "coverage-7.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d3022c3007d3267a880b5adcf18c2a9bf1fc64469b394a804886b401959b8742"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92651580bd46519067e36493acb394ea0607b55b45bd81dd4e26379ed1871f55"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3cfc595d2af13856505631be072835c59f1acf30028d1c860b435c5fc9c15b69"}, - {file = "coverage-7.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b4b3a4d9915b2be879aff6299c0a6129f3d08a775d5a061f503cf79571f73e4"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b6f22bb64cc39bcb883e5910f99a27b200fdc14cdd79df8696fa96b0005c9444"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72d1507f152abacea81f65fee38e4ef3ac3c02ff8bc16f21d935fd3a8a4ad910"}, - {file = "coverage-7.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0a79137fc99815fff6a852c233628e735ec15903cfd16da0f229d9c4d45926ab"}, - {file = "coverage-7.0.1-cp38-cp38-win32.whl", hash = "sha256:b3763e7fcade2ff6c8e62340af9277f54336920489ceb6a8cd6cc96da52fcc62"}, - {file = "coverage-7.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:09f6b5a8415b6b3e136d5fec62b552972187265cb705097bf030eb9d4ffb9b60"}, - {file = "coverage-7.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:978258fec36c154b5e250d356c59af7d4c3ba02bef4b99cda90b6029441d797d"}, - {file = "coverage-7.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:19ec666533f0f70a0993f88b8273057b96c07b9d26457b41863ccd021a043b9a"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cfded268092a84605f1cc19e5c737f9ce630a8900a3589e9289622db161967e9"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07bcfb1d8ac94af886b54e18a88b393f6a73d5959bb31e46644a02453c36e475"}, - {file = "coverage-7.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b4a923cc7566bbc7ae2dfd0ba5a039b61d19c740f1373791f2ebd11caea59"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:aec2d1515d9d39ff270059fd3afbb3b44e6ec5758af73caf18991807138c7118"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c20cfebcc149a4c212f6491a5f9ff56f41829cd4f607b5be71bb2d530ef243b1"}, - {file = "coverage-7.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:fd556ff16a57a070ce4f31c635953cc44e25244f91a0378c6e9bdfd40fdb249f"}, - {file = "coverage-7.0.1-cp39-cp39-win32.whl", hash = "sha256:b9ea158775c7c2d3e54530a92da79496fb3fb577c876eec761c23e028f1e216c"}, - {file = "coverage-7.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:d1991f1dd95eba69d2cd7708ff6c2bbd2426160ffc73c2b81f617a053ebcb1a8"}, - {file = "coverage-7.0.1-pp37.pp38.pp39-none-any.whl", hash = "sha256:3dd4ee135e08037f458425b8842d24a95a0961831a33f89685ff86b77d378f89"}, - {file = "coverage-7.0.1.tar.gz", hash = "sha256:a4a574a19eeb67575a5328a5760bbbb737faa685616586a9f9da4281f940109c"}, + {file = "coverage-7.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:daf91db39324e9939a9db919ee4fb42a1a23634a056616dae891a030e89f87ba"}, + {file = "coverage-7.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:55121fe140d7e42cb970999b93cf1c2b24484ce028b32bbd00238bb25c13e34a"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c027fbb83a8c78a6e06a0302ea1799fdb70e5cda9845a5e000545b8e2b47ea39"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:caf82db5b7f16b51ec32fe0bd2da0805b177c807aa8bfb478c7e6f893418c284"}, + {file = "coverage-7.0.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ba5cc54baf3c322c4388de2a43cc95f7809366f0600e743e5aae8ea9d1038b2"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:260854160083f8275a9d9d49a05ab0ffc7a1f08f2ccccbfaec94a18aae9f407c"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ea45f0dba5a993e93b158f1a9dcfff2770e3bcabf2b80dbe7aa15dce0bcb3bf3"}, + {file = "coverage-7.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6abc91f6f8b3cc0ae1034e2c03f38769fba1952ab70d0b26953aa01691265c39"}, + {file = "coverage-7.0.4-cp310-cp310-win32.whl", hash = "sha256:053cdc47cae08257051d7e934a0de4d095b60eb8a3024fa9f1b2322fa1547137"}, + {file = "coverage-7.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:1e9e94f2612ee549a4b3ee79cbc61bceed77e69cf38cfa05858bae939a886d16"}, + {file = "coverage-7.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5caa9dd91dcc5f054350dc57a02e053d79633907b9ccffff999568d13dcd19f8"}, + {file = "coverage-7.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:efc200fa75d9634525b40babc7a16342bd21c101db1a58ef84dc14f4bf6ac0fd"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1791e5f74c5b52f76e83fe9f4bb9571cf76d40ee0c51952ee1e4ee935b7e98b9"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d9201cfa5a98652b9cef36ab202f17fe3ea83f497b4ba2a8ed39399dfb8fcd4"}, + {file = "coverage-7.0.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d8ef6865cb6834cab2b72fff20747a55c714b57b675f7e11c9624fe4f7cb45"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b84076e3de192fba0f95e279ac017b64c7c6ecd4f09f36f13420f5bed898a9c7"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:dcfbf8ffc046f20d75fd775a92c378f6fc7b9bded6c6f2ab88b6b9cb5805a184"}, + {file = "coverage-7.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4665a714af31f160403c2e448fb2fef330719d2e04e836b08d60d612707c1041"}, + {file = "coverage-7.0.4-cp311-cp311-win32.whl", hash = "sha256:2e59aef3fba5758059208c9eff10ae7ded3629e797972746ec33b56844f69411"}, + {file = "coverage-7.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:2b854f7985b48122b6fe346631e86d67b63293f8255cb59a93d79e3d9f1574e3"}, + {file = "coverage-7.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e44b60b0b49aa85d548d392a2dca2c6a581cd4084e72e9e16bd58bd86ec20816"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2904d7a0388911c61e7e3beefe48c29dfccaba938fc1158f63190101a21e04c2"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bc74b64bfa89e2f862ea45dd6ac1def371d7cc883b76680d20bdd61a6f3daa20"}, + {file = "coverage-7.0.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c06046f54e719da21c79f98ecc0962581d1aee0b3798dc6b12b1217da8bf93f4"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:bc9c77004970a364a1e5454cf7cb884e4277592b959c287689b2a0fd027ef552"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:0815a09b32384e8ff00a5939ec9cd10efce8742347e019c2daca1a32f5ac2aae"}, + {file = "coverage-7.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a78a80d131c067d67d8a6f9bd3d3f7ea7eac82c1c7259f97d7ab73f723da9d55"}, + {file = "coverage-7.0.4-cp37-cp37m-win32.whl", hash = "sha256:2b5936b624fbe711ed02dfd86edd678822e5ee68da02b6d231e5c01090b64590"}, + {file = "coverage-7.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:a63922765ee49d5b4c32afb2cd5516812c8665f3b78e64a0dd005bdfabf991b1"}, + {file = "coverage-7.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d68f2f7bddb3acdd3b36ef7f334b9d14f30b93e094f808fbbd8d288b8f9e2f9b"}, + {file = "coverage-7.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9dafdba3b2b9010abab08cb8c0dc6549bfca6e1630fe14d47b01dca00d39e694"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0322354757b47640535daabd2d56384ff3cad2896248fc84d328c5fad4922d5c"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e8267466662aff93d66fa72b9591d02122dfc8a729b0a43dd70e0fb07ed9b37"}, + {file = "coverage-7.0.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f684d88eb4924ed0630cf488fd5606e334c6835594bb5fe36b50a509b10383ed"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70c294bb15ba576fb96b580db35895bf03749d683df044212b74e938a7f6821f"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:34c0457e1ba450ae8b22dc8ea2fd36ada1010af61291e4c96963cd9d9633366f"}, + {file = "coverage-7.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b75aff2c35ceaa299691e772f7bf7c8aeab25f46acea2be3dd04cccb914a9860"}, + {file = "coverage-7.0.4-cp38-cp38-win32.whl", hash = "sha256:6c5554d55668381e131577f20e8f620d4882b04ad558f7e7f3f1f55b3124c379"}, + {file = "coverage-7.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c82f34fafaf5bc05d222fcf84423d6e156432ca35ca78672d4affd0c09c6ef6c"}, + {file = "coverage-7.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b8dfb5fed540f77e814bf4ec79619c241af6b4578fa1093c5e3389bbb7beab3f"}, + {file = "coverage-7.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee32a080bab779b71c4d09a3eb5254bfca43ee88828a683dab27dfe8f582516e"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2dfbee0bf0d633be3a2ab068f5a5731a70adf147d0ba17d9f9932b46c7c5782b"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32dc010713455ac0fe2fddb0e48aa43875cc7eb7b09768df10bad8ce45f9c430"}, + {file = "coverage-7.0.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cb88a3019ad042eaa69fc7639ef077793fedbf313e89207aa82fefe92c97ebd"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:73bc6114aab7753ca784f87bcd3b7613bc797aa255b5bca45e5654070ae9acfb"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:92f135d370fcd7a6fb9659fa2eb716dd2ca364719cbb1756f74d90a221bca1a7"}, + {file = "coverage-7.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f3d485e6ec6e09857bf2115ece572d666b7c498377d4c70e66bb06c63ed177c2"}, + {file = "coverage-7.0.4-cp39-cp39-win32.whl", hash = "sha256:c58921fcd9914b56444292e7546fe183d079db99528142c809549ddeaeacd8e9"}, + {file = "coverage-7.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:f092d9f2ddaa30235d33335fbdb61eb8f3657af519ef5f9dd6bdae65272def11"}, + {file = "coverage-7.0.4-pp37.pp38.pp39-none-any.whl", hash = "sha256:cb8cfa3bf3a9f18211279458917fef5edeb5e1fdebe2ea8b11969ec2ebe48884"}, + {file = "coverage-7.0.4.tar.gz", hash = "sha256:f6c4ad409a0caf7e2e12e203348b1a9b19c514e7d078520973147bf2d3dcbc6f"}, ] cycler = [ {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, @@ -2485,24 +2486,24 @@ datasets = [ {file = "datasets-2.8.0.tar.gz", hash = "sha256:a843b69593914071f921fc1086fde939f30a63415a34cdda5db3c0acdd58aff2"}, ] debugpy = [ - {file = "debugpy-1.6.4-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:6ae238943482c78867ac707c09122688efb700372b617ffd364261e5e41f7a2f"}, - {file = "debugpy-1.6.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a39e7da178e1f22f4bc04b57f085e785ed1bcf424aaf318835a1a7129eefe35"}, - {file = "debugpy-1.6.4-cp310-cp310-win32.whl", hash = "sha256:143f79d0798a9acea21cd1d111badb789f19d414aec95fa6389cfea9485ddfb1"}, - {file = "debugpy-1.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:563f148f94434365ec0ce94739c749aabf60bf67339e68a9446499f3582d62f3"}, - {file = "debugpy-1.6.4-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:1caee68f7e254267df908576c0d0938f8f88af16383f172cb9f0602e24c30c01"}, - {file = "debugpy-1.6.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40e2a83d31a16b83666f19fa06d97b2cc311af88e6266590579737949971a17e"}, - {file = "debugpy-1.6.4-cp37-cp37m-win32.whl", hash = "sha256:82229790442856962aec4767b98ba2559fe0998f897e9f21fb10b4fd24b6c436"}, - {file = "debugpy-1.6.4-cp37-cp37m-win_amd64.whl", hash = "sha256:67edf033f9e512958f7b472975ff9d9b7ff64bf4440f6f6ae44afdc66b89e6b6"}, - {file = "debugpy-1.6.4-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:4ab5e938925e5d973f567d6ef32751b17d10f3be3a8c4d73c52f53e727f69bf1"}, - {file = "debugpy-1.6.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8df268e9f72fc06efc2e75e8dc8e2b881d6a397356faec26efb2ee70b6863b7"}, - {file = "debugpy-1.6.4-cp38-cp38-win32.whl", hash = "sha256:86bd25f38f8b6c5d430a5e2931eebbd5f580c640f4819fcd236d0498790c7204"}, - {file = "debugpy-1.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:62ba4179b372a62abf9c89b56997d70a4100c6dea6c2a4e0e4be5f45920b3253"}, - {file = "debugpy-1.6.4-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d2968e589bda4e485a9c61f113754a28e48d88c5152ed8e0b2564a1fadbe50a5"}, - {file = "debugpy-1.6.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e62b8034ede98932b92268669318848a0d42133d857087a3b9cec03bb844c615"}, - {file = "debugpy-1.6.4-cp39-cp39-win32.whl", hash = "sha256:3d9c31baf64bf959a593996c108e911c5a9aa1693a296840e5469473f064bcec"}, - {file = "debugpy-1.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:ea4bf208054e6d41749f17612066da861dff10102729d32c85b47f155223cf2b"}, - {file = "debugpy-1.6.4-py2.py3-none-any.whl", hash = "sha256:e886a1296cd20a10172e94788009ce74b759e54229ebd64a43fa5c2b4e62cd76"}, - {file = "debugpy-1.6.4.zip", hash = "sha256:d5ab9bd3f4e7faf3765fd52c7c43c074104ab1e109621dc73219099ed1a5399d"}, + {file = "debugpy-1.6.5-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:696165f021a6a17da08163eaae84f3faf5d8be68fb78cd78488dd347e625279c"}, + {file = "debugpy-1.6.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17039e392d6f38388a68bd02c5f823b32a92142a851e96ba3ec52aeb1ce9d900"}, + {file = "debugpy-1.6.5-cp310-cp310-win32.whl", hash = "sha256:62a06eb78378292ba6c427d861246574dc8b84471904973797b29dd33c7c2495"}, + {file = "debugpy-1.6.5-cp310-cp310-win_amd64.whl", hash = "sha256:9984fc00ab372c97f63786c400107f54224663ea293daab7b365a5b821d26309"}, + {file = "debugpy-1.6.5-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:048368f121c08b00bbded161e8583817af5055982d2722450a69efe2051621c2"}, + {file = "debugpy-1.6.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74e4eca42055759032e3f1909d1374ba1d729143e0c2729bb8cb5e8b5807c458"}, + {file = "debugpy-1.6.5-cp37-cp37m-win32.whl", hash = "sha256:0f9afcc8cad6424695f3356dc9a7406d5b18e37ee2e73f34792881a44b02cc50"}, + {file = "debugpy-1.6.5-cp37-cp37m-win_amd64.whl", hash = "sha256:b5a74ecebe5253344501d9b23f74459c46428b30437fa9254cfb8cb129943242"}, + {file = "debugpy-1.6.5-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:9e809ef787802c808995e5b6ade714a25fa187f892b41a412d418a15a9c4a432"}, + {file = "debugpy-1.6.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:947c686e8adb46726f3d5f19854f6aebf66c2edb91225643c7f44b40b064a235"}, + {file = "debugpy-1.6.5-cp38-cp38-win32.whl", hash = "sha256:377391341c4b86f403d93e467da8e2d05c22b683f08f9af3e16d980165b06b90"}, + {file = "debugpy-1.6.5-cp38-cp38-win_amd64.whl", hash = "sha256:286ae0c2def18ee0dc8a61fa76d51039ca8c11485b6ed3ef83e3efe8a23926ae"}, + {file = "debugpy-1.6.5-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:500dd4a9ff818f5c52dddb4a608c7de5371c2d7d905c505eb745556c579a9f11"}, + {file = "debugpy-1.6.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f3fab217fe7e2acb2d90732af1a871947def4e2b6654945ba1ebd94bd0bea26"}, + {file = "debugpy-1.6.5-cp39-cp39-win32.whl", hash = "sha256:15bc5febe0edc79726517b1f8d57d7ac7c784567b5ba804aab8b1c9d07a57018"}, + {file = "debugpy-1.6.5-cp39-cp39-win_amd64.whl", hash = "sha256:7e84d9e4420122384cb2cc762a00b4e17cbf998022890f89b195ce178f78ff47"}, + {file = "debugpy-1.6.5-py2.py3-none-any.whl", hash = "sha256:8116e40a1cd0593bd2aba01d4d560ee08f018da8e8fbd4cbd24ff09b5f0e41ef"}, + {file = "debugpy-1.6.5.zip", hash = "sha256:5e55e6c79e215239dd0794ee0bf655412b934735a58e9d705e5c544f596f1603"}, ] decorator = [ {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, @@ -2632,7 +2633,7 @@ gitdb = [ {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, ] -GitPython = [ +gitpython = [ {file = "GitPython-3.1.30-py3-none-any.whl", hash = "sha256:cd455b0000615c60e286208ba540271af9fe531fa6a87cc590a7298785ab2882"}, {file = "GitPython-3.1.30.tar.gz", hash = "sha256:769c2d83e13f5d938b7688479da374c4e3d49f71549aaf462b646db9602ea6f8"}, ] @@ -2641,8 +2642,8 @@ huggingface-hub = [ {file = "huggingface_hub-0.11.1.tar.gz", hash = "sha256:8b9ebf9bbb1782f6f0419ec490973a6487c6c4ed84293a8a325d34c4f898f53f"}, ] identify = [ - {file = "identify-2.5.11-py2.py3-none-any.whl", hash = "sha256:e7db36b772b188099616aaf2accbee122949d1c6a1bac4f38196720d6f9f06db"}, - {file = "identify-2.5.11.tar.gz", hash = "sha256:14b7076b29c99b1b0b8b08e96d448c7b877a9b07683cd8cfda2ea06af85ffa1c"}, + {file = "identify-2.5.12-py2.py3-none-any.whl", hash = "sha256:e8a400c3062d980243d27ce10455a52832205649bbcaf27ffddb3dfaaf477bad"}, + {file = "identify-2.5.12.tar.gz", hash = "sha256:0bc96b09c838310b6fcfcc61f78a981ea07f94836ef6ef553da5bb5d4745d662"}, ] idna = [ {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, @@ -2657,16 +2658,16 @@ importlib-metadata = [ {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, ] iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] ipykernel = [ - {file = "ipykernel-6.19.4-py3-none-any.whl", hash = "sha256:0ecdae0060da61c5222ad221681f3b99b5bef739e11a3b1eb5778aa47f056f1f"}, - {file = "ipykernel-6.19.4.tar.gz", hash = "sha256:4140c282a6c71cdde59abe5eae2c71bf1eeb4a69316ab76e1c4c25150a49722b"}, + {file = "ipykernel-6.20.1-py3-none-any.whl", hash = "sha256:a314e6782a4f9e277783382976b3a93608a3787cd70a235b558b47f875134be1"}, + {file = "ipykernel-6.20.1.tar.gz", hash = "sha256:f6016ecbf581d0ea6e29ba16cee6cc1a9bbde3835900c46c6571a791692f4139"}, ] ipython = [ - {file = "ipython-8.7.0-py3-none-any.whl", hash = "sha256:352042ddcb019f7c04e48171b4dd78e4c4bb67bf97030d170e154aac42b656d9"}, - {file = "ipython-8.7.0.tar.gz", hash = "sha256:882899fe78d5417a0aa07f995db298fa28b58faeba2112d2e3a4c95fe14bb738"}, + {file = "ipython-8.8.0-py3-none-any.whl", hash = "sha256:da01e6df1501e6e7c32b5084212ddadd4ee2471602e2cf3e0190f4de6b0ea481"}, + {file = "ipython-8.8.0.tar.gz", hash = "sha256:f3bf2c08505ad2c3f4ed5c46ae0331a8547d36bf4b21a451e8ae80c0791db95b"}, ] ipywidgets = [ {file = "ipywidgets-8.0.4-py3-none-any.whl", hash = "sha256:ebb195e743b16c3947fe8827190fb87b4d00979c0fbf685afe4d2c4927059fa1"}, @@ -2680,7 +2681,7 @@ jedi = [ {file = "jedi-0.18.2-py2.py3-none-any.whl", hash = "sha256:203c1fd9d969ab8f2119ec0a3342e0b49910045abe6af0a3ae83a5764d54639e"}, {file = "jedi-0.18.2.tar.gz", hash = "sha256:bae794c30d07f6d910d32a7048af09b5a39ed740918da923c6b780790ebac612"}, ] -Jinja2 = [ +jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, ] @@ -2697,8 +2698,8 @@ jupyter-client = [ {file = "jupyter_client-7.4.8.tar.gz", hash = "sha256:109a3c33b62a9cf65aa8325850a0999a795fac155d9de4f7555aef5f310ee35a"}, ] jupyter-core = [ - {file = "jupyter_core-5.1.1-py3-none-any.whl", hash = "sha256:f1038179d0f179b0e92c8fa2289c012b29dafdc9484b41821079f1a496f5a0f2"}, - {file = "jupyter_core-5.1.1.tar.gz", hash = "sha256:f342d29eb6edb06f8dffa69adea987b3a9ee2b6702338a8cb6911516ea0b432d"}, + {file = "jupyter_core-5.1.3-py3-none-any.whl", hash = "sha256:d23ab7db81ca1759f13780cd6b65f37f59bf8e0186ac422d5ca4982cc7d56716"}, + {file = "jupyter_core-5.1.3.tar.gz", hash = "sha256:82e1cff0ef804c38677eff7070d5ff1d45037fef01a2d9ba9e6b7b8201831e9f"}, ] jupyterlab-widgets = [ {file = "jupyterlab_widgets-3.0.5-py3-none-any.whl", hash = "sha256:a04a42e50231b355b7087e16a818f541e53589f7647144ea0344c4bf16f300e5"}, @@ -2775,31 +2776,48 @@ kiwisolver = [ {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, ] lazy-object-proxy = [ - {file = "lazy-object-proxy-1.8.0.tar.gz", hash = "sha256:c219a00245af0f6fa4e95901ed28044544f50152840c5b6a3e7b2568db34d156"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4fd031589121ad46e293629b39604031d354043bb5cdf83da4e93c2d7f3389fe"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win32.whl", hash = "sha256:b70d6e7a332eb0217e7872a73926ad4fdc14f846e85ad6749ad111084e76df25"}, - {file = "lazy_object_proxy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:eb329f8d8145379bf5dbe722182410fe8863d186e51bf034d2075eb8d85ee25b"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4e2d9f764f1befd8bdc97673261b8bb888764dfdbd7a4d8f55e4fbcabb8c3fb7"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win32.whl", hash = "sha256:e20bfa6db17a39c706d24f82df8352488d2943a3b7ce7d4c22579cb89ca8896e"}, - {file = "lazy_object_proxy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:14010b49a2f56ec4943b6cf925f597b534ee2fe1f0738c84b3bce0c1a11ff10d"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6850e4aeca6d0df35bb06e05c8b934ff7c533734eb51d0ceb2d63696f1e6030c"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win32.whl", hash = "sha256:5b51d6f3bfeb289dfd4e95de2ecd464cd51982fe6f00e2be1d0bf94864d58acd"}, - {file = "lazy_object_proxy-1.8.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6f593f26c470a379cf7f5bc6db6b5f1722353e7bf937b8d0d0b3fba911998858"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0c1c7c0433154bb7c54185714c6929acc0ba04ee1b167314a779b9025517eada"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win32.whl", hash = "sha256:d176f392dbbdaacccf15919c77f526edf11a34aece58b55ab58539807b85436f"}, - {file = "lazy_object_proxy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:afcaa24e48bb23b3be31e329deb3f1858f1f1df86aea3d70cb5c8578bfe5261c"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:71d9ae8a82203511a6f60ca5a1b9f8ad201cac0fc75038b2dc5fa519589c9288"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win32.whl", hash = "sha256:8f6ce2118a90efa7f62dd38c7dbfffd42f468b180287b748626293bf12ed468f"}, - {file = "lazy_object_proxy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:eac3a9a5ef13b332c059772fd40b4b1c3d45a3a2b05e33a361dee48e54a4dad0"}, - {file = "lazy_object_proxy-1.8.0-pp37-pypy37_pp73-any.whl", hash = "sha256:ae032743794fba4d171b5b67310d69176287b5bf82a21f588282406a79498891"}, - {file = "lazy_object_proxy-1.8.0-pp38-pypy38_pp73-any.whl", hash = "sha256:7e1561626c49cb394268edd00501b289053a652ed762c58e1081224c8d881cec"}, - {file = "lazy_object_proxy-1.8.0-pp39-pypy39_pp73-any.whl", hash = "sha256:ce58b2b3734c73e68f0e30e4e725264d4d6be95818ec0a0be4bb6bf9a7e79aa8"}, -] -Markdown = [ + {file = "lazy-object-proxy-1.9.0.tar.gz", hash = "sha256:659fb5809fa4629b8a1ac5106f669cfc7bef26fbb389dda53b3e010d1ac4ebae"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b40387277b0ed2d0602b8293b94d7257e17d1479e257b4de114ea11a8cb7f2d7"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8c6cfb338b133fbdbc5cfaa10fe3c6aeea827db80c978dbd13bc9dd8526b7d4"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:721532711daa7db0d8b779b0bb0318fa87af1c10d7fe5e52ef30f8eff254d0cd"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:66a3de4a3ec06cd8af3f61b8e1ec67614fbb7c995d02fa224813cb7afefee701"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:1aa3de4088c89a1b69f8ec0dcc169aa725b0ff017899ac568fe44ddc1396df46"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win32.whl", hash = "sha256:f0705c376533ed2a9e5e97aacdbfe04cecd71e0aa84c7c0595d02ef93b6e4455"}, + {file = "lazy_object_proxy-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:ea806fd4c37bf7e7ad82537b0757999264d5f70c45468447bb2b91afdbe73a6e"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:946d27deaff6cf8452ed0dba83ba38839a87f4f7a9732e8f9fd4107b21e6ff07"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:79a31b086e7e68b24b99b23d57723ef7e2c6d81ed21007b6281ebcd1688acb0a"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f699ac1c768270c9e384e4cbd268d6e67aebcfae6cd623b4d7c3bfde5a35db59"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:bfb38f9ffb53b942f2b5954e0f610f1e721ccebe9cce9025a38c8ccf4a5183a4"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:189bbd5d41ae7a498397287c408617fe5c48633e7755287b21d741f7db2706a9"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win32.whl", hash = "sha256:81fc4d08b062b535d95c9ea70dbe8a335c45c04029878e62d744bdced5141586"}, + {file = "lazy_object_proxy-1.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:f2457189d8257dd41ae9b434ba33298aec198e30adf2dcdaaa3a28b9994f6adb"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9e25ef10a39e8afe59a5c348a4dbf29b4868ab76269f81ce1674494e2565a6e"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cbf9b082426036e19c6924a9ce90c740a9861e2bdc27a4834fd0a910742ac1e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f5fa4a61ce2438267163891961cfd5e32ec97a2c444e5b842d574251ade27d2"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:8fa02eaab317b1e9e03f69aab1f91e120e7899b392c4fc19807a8278a07a97e8"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e7c21c95cae3c05c14aafffe2865bbd5e377cfc1348c4f7751d9dc9a48ca4bda"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win32.whl", hash = "sha256:f12ad7126ae0c98d601a7ee504c1122bcef553d1d5e0c3bfa77b16b3968d2734"}, + {file = "lazy_object_proxy-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:edd20c5a55acb67c7ed471fa2b5fb66cb17f61430b7a6b9c3b4a1e40293b1671"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2d0daa332786cf3bb49e10dc6a17a52f6a8f9601b4cf5c295a4f85854d61de63"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cd077f3d04a58e83d04b20e334f678c2b0ff9879b9375ed107d5d07ff160171"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:660c94ea760b3ce47d1855a30984c78327500493d396eac4dfd8bd82041b22be"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:212774e4dfa851e74d393a2370871e174d7ff0ebc980907723bb67d25c8a7c30"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f0117049dd1d5635bbff65444496c90e0baa48ea405125c088e93d9cf4525b11"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win32.whl", hash = "sha256:0a891e4e41b54fd5b8313b96399f8b0e173bbbfc03c7631f01efbe29bb0bcf82"}, + {file = "lazy_object_proxy-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:9990d8e71b9f6488e91ad25f322898c136b008d87bf852ff65391b004da5e17b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9e7551208b2aded9c1447453ee366f1c4070602b3d932ace044715d89666899b"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f83ac4d83ef0ab017683d715ed356e30dd48a93746309c8f3517e1287523ef4"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7322c3d6f1766d4ef1e51a465f47955f1e8123caee67dd641e67d539a534d006"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:18b78ec83edbbeb69efdc0e9c1cb41a3b1b1ed11ddd8ded602464c3fc6020494"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:09763491ce220c0299688940f8dc2c5d05fd1f45af1e42e636b2e8b2303e4382"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win32.whl", hash = "sha256:9090d8e53235aa280fc9239a86ae3ea8ac58eff66a705fa6aa2ec4968b95c821"}, + {file = "lazy_object_proxy-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:db1c1722726f47e10e0b5fdbf15ac3b8adb58c091d12b3ab713965795036985f"}, +] +markdown = [ {file = "Markdown-3.4.1-py3-none-any.whl", hash = "sha256:08fb8465cffd03d10b9dd34a5c3fea908e20391a2a90b88d66362cb05beed186"}, {file = "Markdown-3.4.1.tar.gz", hash = "sha256:3b809086bb6efad416156e00a0da66fe47618a5d6918dd688f53f40c8e4cfeff"}, ] -MarkupSafe = [ +markupsafe = [ {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, @@ -3044,8 +3062,8 @@ nvidia-cudnn-cu11 = [ {file = "nvidia_cudnn_cu11-8.5.0.96-py3-none-manylinux1_x86_64.whl", hash = "sha256:71f8111eb830879ff2836db3cccf03bbd735df9b0d17cd93761732ac50a8a108"}, ] packaging = [ - {file = "packaging-22.0-py3-none-any.whl", hash = "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3"}, - {file = "packaging-22.0.tar.gz", hash = "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] pandas = [ {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273"}, @@ -3100,7 +3118,14 @@ pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] -Pillow = [ +pillow = [ + {file = "Pillow-9.4.0-1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1b4b4e9dda4f4e4c4e6896f93e84a8f0bcca3b059de9ddf67dac3c334b1195e1"}, + {file = "Pillow-9.4.0-1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fb5c1ad6bad98c57482236a21bf985ab0ef42bd51f7ad4e4538e89a997624e12"}, + {file = "Pillow-9.4.0-1-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:f0caf4a5dcf610d96c3bd32932bfac8aee61c96e60481c2a0ea58da435e25acd"}, + {file = "Pillow-9.4.0-1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3f4cc516e0b264c8d4ccd6b6cbc69a07c6d582d8337df79be1e15a5056b258c9"}, + {file = "Pillow-9.4.0-1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:b8c2f6eb0df979ee99433d8b3f6d193d9590f735cf12274c108bd954e30ca858"}, + {file = "Pillow-9.4.0-1-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b70756ec9417c34e097f987b4d8c510975216ad26ba6e57ccb53bc758f490dab"}, + {file = "Pillow-9.4.0-1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:43521ce2c4b865d385e78579a082b6ad1166ebed2b1a2293c3be1d68dd7ca3b9"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:2968c58feca624bb6c8502f9564dd187d0e1389964898f5e9e1fbc8533169157"}, {file = "Pillow-9.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c5c1362c14aee73f50143d74389b2c158707b4abce2cb055b7ad37ce60738d47"}, {file = "Pillow-9.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd752c5ff1b4a870b7661234694f24b1d2b9076b8bf337321a814c612665f343"}, @@ -3276,13 +3301,13 @@ pyflakes = [ {file = "pyflakes-3.0.1-py2.py3-none-any.whl", hash = "sha256:ec55bf7fe21fff7f1ad2f7da62363d749e2a470500eab1b555334b67aa1ef8cf"}, {file = "pyflakes-3.0.1.tar.gz", hash = "sha256:ec8b276a6b60bd80defed25add7e439881c19e64850afd9b346283d4165fd0fd"}, ] -Pygments = [ +pygments = [ {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] pylint = [ - {file = "pylint-2.15.9-py3-none-any.whl", hash = "sha256:349c8cd36aede4d50a0754a8c0218b43323d13d5d88f4b2952ddfe3e169681eb"}, - {file = "pylint-2.15.9.tar.gz", hash = "sha256:18783cca3cfee5b83c6c5d10b3cdb66c6594520ffae61890858fe8d932e1c6b4"}, + {file = "pylint-2.15.10-py3-none-any.whl", hash = "sha256:9df0d07e8948a1c3ffa3b6e2d7e6e63d9fb457c5da5b961ed63106594780cc7e"}, + {file = "pylint-2.15.10.tar.gz", hash = "sha256:b3dc5ef7d33858f297ac0d06cc73862f01e4f2e74025ec3eff347ce0bc60baf5"}, ] pyparsing = [ {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, @@ -3324,7 +3349,7 @@ pywin32 = [ {file = "pywin32-305-cp39-cp39-win32.whl", hash = "sha256:9d968c677ac4d5cbdaa62fd3014ab241718e619d8e36ef8e11fb930515a1e918"}, {file = "pywin32-305-cp39-cp39-win_amd64.whl", hash = "sha256:50768c6b7c3f0b38b7fb14dd4104da93ebced5f1a50dc0e834594bff6fbe1271"}, ] -PyYAML = [ +pyyaml = [ {file = "PyYAML-6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53"}, {file = "PyYAML-6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c"}, {file = "PyYAML-6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc"}, @@ -3548,11 +3573,11 @@ rich = [ {file = "rich-10.16.2-py3-none-any.whl", hash = "sha256:c59d73bd804c90f747c8d7b1d023b88f2a9ac2454224a4aeaf959b21eeb42d03"}, {file = "rich-10.16.2.tar.gz", hash = "sha256:720974689960e06c2efdb54327f8bf0cdbdf4eae4ad73b6c94213cad405c371b"}, ] -"ruamel.yaml" = [ +ruamel-yaml = [ {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, ] -"ruamel.yaml.clib" = [ +ruamel-yaml-clib = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d5859983f26d8cd7bb5c287ef452e8aacc86501487634573d260968f753e1d71"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:debc87a9516b237d0466a711b18b6ebeb17ba9f391eb7f91c649c5c4ec5006c7"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:df5828871e6648db72d1c19b4bd24819b80a755c4541d3409f0f7acd0f335c80"}, @@ -3561,7 +3586,6 @@ rich = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4b3a93bb9bc662fc1f99c5c3ea8e623d8b23ad22f861eb6fce9377ac07ad6072"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-macosx_12_0_arm64.whl", hash = "sha256:a234a20ae07e8469da311e182e70ef6b199d0fbeb6c6cc2901204dd87fb867e8"}, {file = "ruamel.yaml.clib-0.2.7-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:15910ef4f3e537eea7fe45f8a5d19997479940d9196f357152a09031c5be59f3"}, @@ -3616,27 +3640,27 @@ scikit-learn = [ {file = "scikit_learn-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:ceb0008f345188aa236e49c973dc160b9ed504a3abd7b321a0ecabcb669be0bd"}, ] scipy = [ - {file = "scipy-1.9.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1884b66a54887e21addf9c16fb588720a8309a57b2e258ae1c7986d4444d3bc0"}, - {file = "scipy-1.9.3-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:83b89e9586c62e787f5012e8475fbb12185bafb996a03257e9675cd73d3736dd"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a72d885fa44247f92743fc20732ae55564ff2a519e8302fb7e18717c5355a8b"}, - {file = "scipy-1.9.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01e1dd7b15bd2449c8bfc6b7cc67d630700ed655654f0dfcf121600bad205c9"}, - {file = "scipy-1.9.3-cp310-cp310-win_amd64.whl", hash = "sha256:68239b6aa6f9c593da8be1509a05cb7f9efe98b80f43a5861cd24c7557e98523"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b41bc822679ad1c9a5f023bc93f6d0543129ca0f37c1ce294dd9d386f0a21096"}, - {file = "scipy-1.9.3-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:90453d2b93ea82a9f434e4e1cba043e779ff67b92f7a0e85d05d286a3625df3c"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c06e62a390a9167da60bedd4575a14c1f58ca9dfde59830fc42e5197283dab"}, - {file = "scipy-1.9.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:abaf921531b5aeaafced90157db505e10345e45038c39e5d9b6c7922d68085cb"}, - {file = "scipy-1.9.3-cp311-cp311-win_amd64.whl", hash = "sha256:06d2e1b4c491dc7d8eacea139a1b0b295f74e1a1a0f704c375028f8320d16e31"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5a04cd7d0d3eff6ea4719371cbc44df31411862b9646db617c99718ff68d4840"}, - {file = "scipy-1.9.3-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:545c83ffb518094d8c9d83cce216c0c32f8c04aaf28b92cc8283eda0685162d5"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d54222d7a3ba6022fdf5773931b5d7c56efe41ede7f7128c7b1637700409108"}, - {file = "scipy-1.9.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cff3a5295234037e39500d35316a4c5794739433528310e117b8a9a0c76d20fc"}, - {file = "scipy-1.9.3-cp38-cp38-win_amd64.whl", hash = "sha256:2318bef588acc7a574f5bfdff9c172d0b1bf2c8143d9582e05f878e580a3781e"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d644a64e174c16cb4b2e41dfea6af722053e83d066da7343f333a54dae9bc31c"}, - {file = "scipy-1.9.3-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:da8245491d73ed0a994ed9c2e380fd058ce2fa8a18da204681f2fe1f57f98f95"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4db5b30849606a95dcf519763dd3ab6fe9bd91df49eba517359e450a7d80ce2e"}, - {file = "scipy-1.9.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c68db6b290cbd4049012990d7fe71a2abd9ffbe82c0056ebe0f01df8be5436b0"}, - {file = "scipy-1.9.3-cp39-cp39-win_amd64.whl", hash = "sha256:5b88e6d91ad9d59478fafe92a7c757d00c59e3bdc3331be8ada76a4f8d683f58"}, - {file = "scipy-1.9.3.tar.gz", hash = "sha256:fbc5c05c85c1a02be77b1ff591087c83bc44579c6d2bd9fb798bb64ea5e1a027"}, + {file = "scipy-1.10.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:b901b423c91281a974f6cd1c36f5c6c523e665b5a6d5e80fcb2334e14670eefd"}, + {file = "scipy-1.10.0-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:16ba05d3d1b9f2141004f3f36888e05894a525960b07f4c2bfc0456b955a00be"}, + {file = "scipy-1.10.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:151f066fe7d6653c3ffefd489497b8fa66d7316e3e0d0c0f7ff6acca1b802809"}, + {file = "scipy-1.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9ea0a37aca111a407cb98aa4e8dfde6e5d9333bae06dfa5d938d14c80bb5c3"}, + {file = "scipy-1.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:27e548276b5a88b51212b61f6dda49a24acf5d770dff940bd372b3f7ced8c6c2"}, + {file = "scipy-1.10.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:42ab8b9e7dc1ebe248e55f54eea5307b6ab15011a7883367af48dd781d1312e4"}, + {file = "scipy-1.10.0-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:e096b062d2efdea57f972d232358cb068413dc54eec4f24158bcbb5cb8bddfd8"}, + {file = "scipy-1.10.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4df25a28bd22c990b22129d3c637fd5c3be4b7c94f975dca909d8bab3309b694"}, + {file = "scipy-1.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad449db4e0820e4b42baccefc98ec772ad7818dcbc9e28b85aa05a536b0f1a2"}, + {file = "scipy-1.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:6faf86ef7717891195ae0537e48da7524d30bc3b828b30c9b115d04ea42f076f"}, + {file = "scipy-1.10.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:4bd0e3278126bc882d10414436e58fa3f1eca0aa88b534fcbf80ed47e854f46c"}, + {file = "scipy-1.10.0-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:38bfbd18dcc69eeb589811e77fae552fa923067fdfbb2e171c9eac749885f210"}, + {file = "scipy-1.10.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ab2a58064836632e2cec31ca197d3695c86b066bc4818052b3f5381bfd2a728"}, + {file = "scipy-1.10.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cd7a30970c29d9768a7164f564d1fbf2842bfc77b7d114a99bc32703ce0bf48"}, + {file = "scipy-1.10.0-cp38-cp38-win_amd64.whl", hash = "sha256:9b878c671655864af59c108c20e4da1e796154bd78c0ed6bb02bc41c84625686"}, + {file = "scipy-1.10.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:3afcbddb4488ac950ce1147e7580178b333a29cd43524c689b2e3543a080a2c8"}, + {file = "scipy-1.10.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:6e4497e5142f325a5423ff5fda2fff5b5d953da028637ff7c704378c8c284ea7"}, + {file = "scipy-1.10.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:441cab2166607c82e6d7a8683779cb89ba0f475b983c7e4ab88f3668e268c143"}, + {file = "scipy-1.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0490dc499fe23e4be35b8b6dd1e60a4a34f0c4adb30ac671e6332446b3cbbb5a"}, + {file = "scipy-1.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:954ff69d2d1bf666b794c1d7216e0a746c9d9289096a64ab3355a17c7c59db54"}, + {file = "scipy-1.10.0.tar.gz", hash = "sha256:c8b3cbc636a87a89b770c6afc999baa6bcbb01691b5ccbbc1b1791c7c0a07540"}, ] sentencepiece = [ {file = "sentencepiece-0.1.97-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:6f249c8f1852893be86eae66b19d522c5fb30bbad4fe2d1b07f06fdc86e1907e"}, @@ -3697,7 +3721,7 @@ snowballstemmer = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] -Sphinx = [ +sphinx = [ {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, ] @@ -3717,8 +3741,8 @@ sphinx-rtd-theme = [ {file = "sphinx_rtd_theme-1.1.1.tar.gz", hash = "sha256:6146c845f1e1947b3c3dd4432c28998a1693ccc742b4f9ad7c63129f0757c103"}, ] sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib.applehelp-1.0.3-py3-none-any.whl", hash = "sha256:ba0f2a22e6eeada8da6428d0d520215ee8864253f32facf958cca81e426f661d"}, + {file = "sphinxcontrib.applehelp-1.0.3.tar.gz", hash = "sha256:83749f09f6ac843b8cb685277dbc818a8bf2d76cc19602699094fe9a74db529e"}, ] sphinxcontrib-devhelp = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, @@ -3858,8 +3882,8 @@ tqdm = [ {file = "tqdm-4.64.1.tar.gz", hash = "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4"}, ] traitlets = [ - {file = "traitlets-5.8.0-py3-none-any.whl", hash = "sha256:c864831efa0ba6576d09b44884b34e41defc18c0d7e720b4a2d6698c842cab3e"}, - {file = "traitlets-5.8.0.tar.gz", hash = "sha256:6cc57d6dc28c85d5365961726ffd19b538739347749e13ebe34e03323a0e8f84"}, + {file = "traitlets-5.8.1-py3-none-any.whl", hash = "sha256:a1ca5df6414f8b5760f7c5f256e326ee21b581742114545b462b35ffe3f04861"}, + {file = "traitlets-5.8.1.tar.gz", hash = "sha256:32500888f5ff7bbf3b9267ea31748fa657aaf34d56d85e60f91dda7dc7f5785b"}, ] transformers = [ {file = "transformers-4.25.1-py3-none-any.whl", hash = "sha256:60f1be15e17e4a54373c787c713ec149dabcc63464131ac45611618fe7c2016e"}, diff --git a/pyproject.toml b/pyproject.toml index 890e1c1f..fdd848e2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -49,9 +49,9 @@ python = ">=3.8.1,<3.12" rich = "^10.13.0" transformers = { version = "^4.22.0", extras = ["sentencepiece", "tokenizers", "torch"] } -protobuf = "^3.20.2" -captum = "^0.5.0" -numpy = "^1.22.4" +protobuf = "^3.20.1" +captum = "^0.6.0" +numpy = "^1.21.6" torchtyping = "^0.1.4" json-tricks = "^3.15.5" torch = "^1.13.1" diff --git a/requirements-dev.txt b/requirements-dev.txt index e146fbef..db1c0680 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -2,7 +2,7 @@ aiohttp==3.8.3 ; python_full_version >= "3.8.1" and python_version < "3.12" aiosignal==1.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" alabaster==0.7.12 ; python_full_version >= "3.8.1" and python_version < "3.12" appnope==0.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Darwin" or python_full_version >= "3.8.1" and python_version < "3.12" and sys_platform == "darwin" -astroid==2.12.13 ; python_full_version >= "3.8.1" and python_version < "3.12" +astroid==2.13.2 ; python_full_version >= "3.8.1" and python_version < "3.12" asttokens==2.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" async-timeout==4.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" attrs==22.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -10,7 +10,7 @@ babel==2.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" backcall==0.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" bandit==1.7.4 ; python_full_version >= "3.8.1" and python_version < "3.12" black==22.12.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -captum==0.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +captum==0.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" certifi==2022.12.7 ; python_full_version >= "3.8.1" and python_version < "3.12" cffi==1.15.1 ; python_full_version >= "3.8.1" and python_version < "3.12" and implementation_name == "pypy" cfgv==3.3.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -20,11 +20,11 @@ colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" comm==0.1.2 ; python_full_version >= "3.8.1" and python_version < "3.12" commonmark==0.9.1 ; python_full_version >= "3.8.1" and python_version < "3.12" contourpy==1.0.6 ; python_full_version >= "3.8.1" and python_version < "3.12" -coverage[toml]==7.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +coverage[toml]==7.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" cycler==0.11.0 ; python_full_version >= "3.8.1" and python_version < "3.12" darglint==1.8.1 ; python_full_version >= "3.8.1" and python_version < "3.12" datasets[datasets]==2.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -debugpy==1.6.4 ; python_full_version >= "3.8.1" and python_version < "3.12" +debugpy==1.6.5 ; python_full_version >= "3.8.1" and python_version < "3.12" decorator==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" dill==0.3.6 ; python_version < "3.12" and python_full_version >= "3.8.1" distlib==0.3.6 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -41,14 +41,14 @@ fsspec[http]==2022.11.0 ; python_full_version >= "3.8.1" and python_version < "3 gitdb==4.0.10 ; python_full_version >= "3.8.1" and python_version < "3.12" gitpython==3.1.30 ; python_full_version >= "3.8.1" and python_version < "3.12" huggingface-hub==0.11.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -identify==2.5.11 ; python_full_version >= "3.8.1" and python_version < "3.12" +identify==2.5.12 ; python_full_version >= "3.8.1" and python_version < "3.12" idna==3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" imagesize==1.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" importlib-metadata==6.0.0 ; python_full_version >= "3.8.1" and python_version < "3.10" -iniconfig==1.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipykernel==6.19.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipykernel[notebook]==6.19.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -ipython==8.7.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +iniconfig==2.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipykernel==6.20.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipykernel[notebook]==6.20.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +ipython==8.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" ipywidgets[notebook]==8.0.4 ; python_full_version >= "3.8.1" and python_version < "3.12" isort==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" isort[colors]==5.11.4 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -58,10 +58,10 @@ joblib==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" joblib[sklearn]==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" json-tricks==3.16.1 ; python_full_version >= "3.8.1" and python_version < "3.12" jupyter-client==7.4.8 ; python_full_version >= "3.8.1" and python_version < "3.12" -jupyter-core==5.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" +jupyter-core==5.1.3 ; python_full_version >= "3.8.1" and python_version < "3.12" jupyterlab-widgets==3.0.5 ; python_full_version >= "3.8.1" and python_version < "3.12" kiwisolver==1.4.4 ; python_full_version >= "3.8.1" and python_version < "3.12" -lazy-object-proxy==1.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +lazy-object-proxy==1.9.0 ; python_full_version >= "3.8.1" and python_version < "3.12" markdown==3.4.1 ; python_full_version >= "3.8.1" and python_version < "3.12" markupsafe==2.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" matplotlib-inline==0.1.6 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -77,7 +77,7 @@ nvidia-cublas-cu11==11.10.3.66 ; python_full_version >= "3.8.1" and python_versi nvidia-cuda-nvrtc-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-runtime-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cudnn-cu11==8.5.0.96 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" -packaging==22.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +packaging==23.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pandas==1.5.2 ; python_full_version >= "3.8.1" and python_version < "3.12" parso==0.8.3 ; python_full_version >= "3.8.1" and python_version < "3.12" pastel==0.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -101,7 +101,7 @@ pycodestyle==2.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pycparser==2.21 ; python_full_version >= "3.8.1" and python_version < "3.12" and implementation_name == "pypy" pyflakes==3.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pygments==2.14.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -pylint==2.15.9 ; python_full_version >= "3.8.1" and python_version < "3.12" +pylint==2.15.10 ; python_full_version >= "3.8.1" and python_version < "3.12" pyparsing==3.0.9 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest-cov==4.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pytest==7.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -120,7 +120,7 @@ ruamel-yaml-clib==0.2.7 ; platform_python_implementation == "CPython" and python ruamel-yaml==0.17.21 ; python_full_version >= "3.8.1" and python_version < "3.12" safety==2.3.4 ; python_full_version >= "3.8.1" and python_version < "3.12" scikit-learn[sklearn]==1.2.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -scipy==1.9.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +scipy==1.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.97 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools-scm==7.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools==65.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -132,7 +132,7 @@ sphinx-gitstamp==0.3.3 ; python_full_version >= "3.8.1" and python_version < "3. sphinx-markdown-tables==0.0.17 ; python_full_version >= "3.8.1" and python_version < "3.12" sphinx-rtd-theme==1.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" sphinx==5.3.0 ; python_full_version >= "3.8.1" and python_version < "3.12" -sphinxcontrib-applehelp==1.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" +sphinxcontrib-applehelp==1.0.3 ; python_full_version >= "3.8.1" and python_version < "3.12" sphinxcontrib-devhelp==1.0.2 ; python_full_version >= "3.8.1" and python_version < "3.12" sphinxcontrib-htmlhelp==2.0.0 ; python_full_version >= "3.8.1" and python_version < "3.12" sphinxcontrib-jsmath==1.0.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -152,7 +152,7 @@ torch==1.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" torchtyping==0.1.4 ; python_full_version >= "3.8.1" and python_version < "3.12" tornado==6.2 ; python_full_version >= "3.8.1" and python_version < "3.12" tqdm==4.64.1 ; python_full_version >= "3.8.1" and python_version < "3.12" -traitlets==5.8.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +traitlets==5.8.1 ; python_full_version >= "3.8.1" and python_version < "3.12" transformers[sentencepiece,tokenizers,torch]==4.25.1 ; python_full_version >= "3.8.1" and python_version < "3.12" typeguard==2.13.3 ; python_full_version >= "3.8.1" and python_version < "3.12" typing-extensions==4.4.0 ; python_full_version >= "3.8.1" and python_version < "3.12" diff --git a/requirements.txt b/requirements.txt index 73e1b739..a6ce7701 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,4 +1,4 @@ -captum==0.5.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +captum==0.6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" certifi==2022.12.7 ; python_full_version >= "3.8.1" and python_version < "3.12" charset-normalizer==2.1.1 ; python_full_version >= "3.8.1" and python_version < "3.12" colorama==0.4.6 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -17,7 +17,7 @@ nvidia-cublas-cu11==11.10.3.66 ; python_full_version >= "3.8.1" and python_versi nvidia-cuda-nvrtc-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cuda-runtime-cu11==11.7.99 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" nvidia-cudnn-cu11==8.5.0.96 ; python_full_version >= "3.8.1" and python_version < "3.12" and platform_system == "Linux" -packaging==22.0 ; python_full_version >= "3.8.1" and python_version < "3.12" +packaging==23.0 ; python_full_version >= "3.8.1" and python_version < "3.12" pastel==0.2.1 ; python_full_version >= "3.8.1" and python_version < "3.12" pillow==9.4.0 ; python_full_version >= "3.8.1" and python_version < "3.12" poethepoet==0.13.1 ; python_full_version >= "3.8.1" and python_version < "3.12" @@ -29,7 +29,7 @@ pyyaml==6.0 ; python_full_version >= "3.8.1" and python_version < "3.12" regex==2022.10.31 ; python_full_version >= "3.8.1" and python_version < "3.12" requests==2.28.1 ; python_full_version >= "3.8.1" and python_version < "3.12" rich==10.16.2 ; python_full_version >= "3.8.1" and python_version < "3.12" -scipy==1.9.3 ; python_full_version >= "3.8.1" and python_version < "3.12" +scipy==1.10.0 ; python_full_version >= "3.8.1" and python_version < "3.12" sentencepiece==0.1.97 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools-scm==7.1.0 ; python_full_version >= "3.8.1" and python_version < "3.12" setuptools==65.6.3 ; python_full_version >= "3.8.1" and python_version < "3.12" diff --git a/tests/__init__.py b/tests/__init__.py index 5e807115..5e69ae86 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,4 @@ import os -import sys TEST_DIR = os.path.dirname(os.path.abspath(__file__)) From 966f63c64eb76d43cc32c310fd3ecd11b7201fcb Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Tue, 10 Jan 2023 23:00:00 +0100 Subject: [PATCH 25/30] Generalized attention attribution --- .../main_classes/feature_attribution.rst | 11 + inseq/attr/feat/__init__.py | 7 +- inseq/attr/feat/attention_attribution.py | 86 ++--- inseq/attr/feat/ops/__init__.py | 4 +- inseq/attr/feat/ops/basic_attention.py | 341 +++++++++++------- inseq/utils/typing.py | 3 + 6 files changed, 266 insertions(+), 186 deletions(-) diff --git a/docs/source/main_classes/feature_attribution.rst b/docs/source/main_classes/feature_attribution.rst index 64a12808..9973d6be 100644 --- a/docs/source/main_classes/feature_attribution.rst +++ b/docs/source/main_classes/feature_attribution.rst @@ -61,3 +61,14 @@ Layer Attribution Methods .. autoclass:: inseq.attr.feat.LayerDeepLiftAttribution :members: + + +Attention Attribution Methods +----------------------------------------------------------------------------------------------------------------------- + +.. autoclass:: inseq.attr.feat.AttentionAttributionRegistry + :members: + + +.. autoclass:: inseq.attr.feat.AttentionAttribution + :members: diff --git a/inseq/attr/feat/__init__.py b/inseq/attr/feat/__init__.py index dfed90f4..991985a8 100644 --- a/inseq/attr/feat/__init__.py +++ b/inseq/attr/feat/__init__.py @@ -1,4 +1,4 @@ -from .attention_attribution import AggregatedAttentionAtribution, AttentionAtribution, SingleLayerAttentionAttribution +from .attention_attribution import AttentionAttribution, AttentionAttributionRegistry from .attribution_utils import STEP_SCORES_MAP, extract_args, join_token_ids, list_step_scores, register_step_score from .feature_attribution import FeatureAttribution, list_feature_attribution_methods from .gradient_attribution import ( @@ -31,7 +31,6 @@ "LayerIntegratedGradientsAttribution", "LayerGradientXActivationAttribution", "LayerDeepLiftAttribution", - "AttentionAtribution", - "AggregatedAttentionAtribution", - "SingleLayerAttentionAttribution", + "AttentionAttributionRegistry", + "AttentionAttribution", ] diff --git a/inseq/attr/feat/attention_attribution.py b/inseq/attr/feat/attention_attribution.py index 43d7880c..bd60dbd4 100644 --- a/inseq/attr/feat/attention_attribution.py +++ b/inseq/attr/feat/attention_attribution.py @@ -19,17 +19,17 @@ from ...data import Batch, EncoderDecoderBatch, FeatureAttributionStepOutput from ...utils import Registry, pretty_tensor -from ...utils.typing import ModelIdentifier, SingleScorePerStepTensor, TargetIdsTensor +from ...utils.typing import SingleScorePerStepTensor, TargetIdsTensor from ..attribution_decorators import set_hook, unset_hook -from .attribution_utils import get_source_target_attributions +from .attribution_utils import STEP_SCORES_MAP, get_source_target_attributions from .feature_attribution import FeatureAttribution -from .ops import AggregatedAttention, SingleLayerAttention +from .ops import Attention logger = logging.getLogger(__name__) -class AttentionAtribution(FeatureAttribution, Registry): +class AttentionAttributionRegistry(FeatureAttribution, Registry): r"""Attention-based attribution method registry.""" @set_hook @@ -69,6 +69,12 @@ def format_attribute_args( :obj:`dict`: A dictionary containing the formatted attribution arguments. """ logger.debug(f"batch: {batch},\ntarget_ids: {pretty_tensor(target_ids, lpad=4)}") + if attributed_fn != STEP_SCORES_MAP[self.attribution_model.default_attributed_fn_id]: + logger.warning( + "Attention-based attribution methods are output agnostic, since they do not rely on specific output" + " targets to compute input saliency. As such, using a custom attributed function for attention" + " attribution methods does not produce any effect of the method's results." + ) attribute_fn_args = { "batch": batch, "additional_forward_args": ( @@ -114,53 +120,39 @@ def attribute_step( step_scores={}, ) - @classmethod - def load( - cls, - method_name: str, - attribution_model=None, - model_name_or_path: Union[ModelIdentifier, None] = None, - **kwargs, - ) -> "FeatureAttribution": - from inseq import AttributionModel - - if model_name_or_path is None == attribution_model is None: # noqa - raise RuntimeError( - "Only one among an initialized model and a model identifier " - "must be defined when loading the attribution method." - ) - if model_name_or_path: - attribution_model = AttributionModel.load(model_name_or_path) - model_name_or_path = None - - if not attribution_model.model.config.output_attentions: - raise RuntimeError( - "Attention-based attribution methods require the `output_attentions` parameter to be set on the model." - ) - return super().load(method_name, attribution_model, model_name_or_path, **kwargs) - - -class AggregatedAttentionAtribution(AttentionAtribution): - """ - Aggregated attention attribution method. - Attention values of all layers are averaged. - """ - - method_name = "aggregated_attention" - - def __init__(self, attribution_model, **kwargs): - super().__init__(attribution_model) - self.method = AggregatedAttention(attribution_model) - -class SingleLayerAttentionAttribution(AttentionAtribution): +class AttentionAttribution(AttentionAttributionRegistry): """ - Single-Layer attention attribution method. - Only the raw attention of the last hidden layer is retrieved. + The basic attention attribution method, which retrieves the attention weights from the model. + + Attribute Args: + aggregate_heads_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across heads. + Can be one of `average` (default if heads is tuple or None), `max`, or `single` (default if heads is + int), or a custom function defined by the user. + aggregate_layers_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. + Can be one of `average` (default if layers is tuple), `max`, or `single` (default if layers is int or + None), or a custom function defined by the user. + heads (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified, + the head at the corresponding index is used. If a tuple of two indices is specified, all heads between + the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective + heads will be used for aggregation. If aggregate_fn is "single", a head must be specified. + Otherwise, all heads are passed to aggregate_fn by default. + layers (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified + , the layer at the corresponding index is used. If a tuple of two indices is specified, all layers + among the indices will be aggregated using aggregate_fn. If a list of indices is specified, the + respective layers will be used for aggregation. If aggregate_fn is "single", the last layer is + used by default. Otherwise, all available layers are passed to aggregate_fn by default. + + Example: + + - ``model.attribute(src)`` will return the average attention for all heads of the last layer. + - ``model.attribute(src, heads=0)`` will return the attention weights for the first head of the last layer. + - ``model.attribute(src, heads=(0, 5), aggregate_heads_fn="max", layers=[0, 2, 7])`` will return the maximum + attention weights for the first 5 heads averaged across the first, third, and eighth layers. """ - method_name = "single_layer_attention" + method_name = "attention" def __init__(self, attribution_model, **kwargs): super().__init__(attribution_model) - self.method = SingleLayerAttention(attribution_model) + self.method = Attention(attribution_model) diff --git a/inseq/attr/feat/ops/__init__.py b/inseq/attr/feat/ops/__init__.py index 7b2525bb..93533e2d 100644 --- a/inseq/attr/feat/ops/__init__.py +++ b/inseq/attr/feat/ops/__init__.py @@ -1,6 +1,6 @@ -from .basic_attention import AggregatedAttention, SingleLayerAttention +from .basic_attention import Attention from .discretized_integrated_gradients import DiscretetizedIntegratedGradients from .monotonic_path_builder import MonotonicPathBuilder -__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "AggregatedAttention", "SingleLayerAttention"] +__all__ = ["DiscretetizedIntegratedGradients", "MonotonicPathBuilder", "Attention"] diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 5069f023..ddb06044 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Tuple, Union +from typing import Any, Dict, List, Protocol, Tuple, Union import logging @@ -22,184 +22,259 @@ from captum.log import log_usage from ....data import Batch, EncoderDecoderBatch +from ....utils.typing import AggregatedLayerAttentionTensor, FullAttentionTensor, FullLayerAttentionTensor logger = logging.getLogger(__name__) -class AttentionAttribution(Attribution): +class AggregateAttentionFunction(Protocol): + def __call__(self, attention: FullLayerAttentionTensor, dim: int, **kwargs) -> AggregatedLayerAttentionTensor: + ... + + +class BaseAttentionAttribution(Attribution): """ All attention based attribution algorithms extend this class. It requires a forward function, which most commonly is the forward function of the model that we want to interpret or the model itself. """ - merge_head_options = ["average", "max", "single"] + AGGREGATE_FN_OPTIONS: Dict[str, AggregateAttentionFunction] = { + "average": lambda x, dim: x.mean(dim), + "max": lambda x, dim: x.max(dim)[0], + "single": lambda x, dim, idx: x.select(dim, idx), + } def has_convergence_delta(self) -> bool: return False - def _num_attention_heads(self, attention: torch.Tensor) -> int: - """ - Returns the number of heads an attention tensor has. - - Args: - attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` - - Returns: - `int`: The number of attention heads - """ + def _num_attention_heads(self, attention: FullLayerAttentionTensor) -> int: + """Returns the number of heads contained in the attention tensor.""" return attention.size(1) - def _merge_attention_heads( - self, attention: torch.Tensor, option: str = "average", head: int = None - ) -> torch.Tensor: + def _num_layers(self, attention: Tuple[FullAttentionTensor, ...]) -> int: + """Returns the number of layers contained in the attention tensor.""" + return attention.size(0) + def _aggregate_attention_heads( + self, + attention: FullLayerAttentionTensor, + aggregate_fn: Union[str, AggregateAttentionFunction, None] = None, + heads: Union[int, Tuple[int, int], List[int], None] = None, + ) -> AggregatedLayerAttentionTensor: """ - Merges the attention values of the different heads together by either averaging across them, - selecting the head with the maximal values or selecting a specific attention head. + Merges the attention values across the specified attention heads for the full sequence. Args: - attention: an attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` - option: The method to use for merging. Should be one of `average` (default), `max`, or `single` - head: The index of the head to use, when option is set to `single` + attention (:obj:`tuple` of :obj:`torch.Tensor`) attention tensor of shape + `(batch_size, num_heads, sequence_length, sequence_length)` + aggregate_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across heads. + Can be one of `average` (default if heads is tuple or None), `max`, or `single` (default if heads is + int), or a custom function defined by the user. + heads (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified, + the head at the corresponding index is used. If a tuple of two indices is specified, all heads between + the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective + heads will be used for aggregation. If aggregate_fn is "single", a head must be specified. + Otherwise, all heads are passed to aggregate_fn by default. Returns: - `torch.Tensor`: The attention tensor with its attention heads merged. + :obj:`torch.Tensor`: An aggregated attention tensor of shape + `(batch_size, num_heads, sequence_length, sequence_length)` """ - num_heads = self._num_attention_heads(attention) - - if option == "single" and head is None: - raise RuntimeError("An attention head has to be specified when choosing single-head attention attribution") - - if head is not None: - if head not in range(-num_heads, num_heads): - raise IndexError( - f"Attention head index for attribution out of range. The model only has {num_heads} heads." - ) - - if option != "single": - logger.warning( - "Only single-head attention is possible if an attention head is specified.\n" - "Switching to single-head attention" + n_heads = self._num_attention_heads(attention) + aggregate_kwargs = {} + + # If heads is not specified or an tuple, average aggregation is used by default + if aggregate_fn is None and not isinstance(heads, int): + aggregate_fn = "average" + logger.info("No attention head aggregation method specified. Using average aggregation by default.") + # If a single head index is specified, single aggregation is used by default + if aggregate_fn is None and isinstance(heads, int): + aggregate_fn = "single" + + if aggregate_fn == "single": + if not isinstance(heads, int): + raise RuntimeError("A single head index must be specified for single-layer attention attribution") + if heads not in range(-n_heads, n_heads): + raise IndexError(f"Attention head index out of range. The model only has {n_heads} heads.") + aggregate_kwargs = {"idx": heads} + else: + if isinstance(aggregate_fn, str): + if aggregate_fn not in self.AGGREGATE_FN_OPTIONS: + raise RuntimeError( + "Invalid aggregation method specified." + f"Valid methods are: {self.AGGREGATE_FN_OPTIONS.keys()}" + ) + if heads is None: + heads = (0, n_heads) + logger.info("No attention head specified for attention extraction. Using all heads by default.") + if not hasattr(heads, "__iter__") or ( + len(heads) == 2 and isinstance(heads, tuple) and heads[0] >= heads[1] + ): + raise RuntimeError( + "A (start, end) tuple of indices representing a span or a list of individual indices" + " must be specified for aggregated attention attribution." ) + if (not all(h in range(-n_heads, n_heads + 1) for h in heads) and isinstance(heads, tuple)) or ( + not all(h in range(-n_heads, n_heads) for h in heads) and isinstance(heads, list) + ): + raise IndexError(f"One or more attention head index out of range. The model only has {n_heads} heads.") + if isinstance(heads, tuple): + attention = attention[:, heads[0] : heads[1]] + else: + attention = torch.index_select(attention, 1, torch.tensor(heads, device=attention.device)) + aggregate_fn = self.AGGREGATE_FN_OPTIONS[aggregate_fn] + return aggregate_fn(attention, 1, **aggregate_kwargs) + + def _aggregate_layers( + self, + attention: FullAttentionTensor, + aggregate_fn: Union[str, AggregateAttentionFunction, None] = None, + layers: Union[int, Tuple[int, int], List[int], None] = None, + ) -> FullLayerAttentionTensor: + """ + Merges the attention values of every attention head across the specified layers for the full sequence. - return attention.select(1, head) - - if option == "average": - return attention.mean(1) - - elif option == "max": - return attention.max(1)[0] + Args: + attention (:obj:`tuple` of :obj:`torch.Tensor`) attention tensor of shape + `(n_layers, batch_size, num_heads, sequence_length, sequence_length)` + aggregate_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. + Can be one of `average` (default if layers is tuple), `max`, or `single` (default if layers is int or + None), or a custom function defined by the user. + layers (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified + , the layer at the corresponding index is used. If a tuple of two indices is specified, all layers + among the indices will be aggregated using aggregate_fn. If a list of indices is specified, the + respective layers will be used for aggregation. If aggregate_fn is "single", the last layer is + used by default. Otherwise, all available layers are passed to aggregate_fn by default. + Returns: + :obj:`torch.Tensor`: An aggregated attention tensor of shape + `(batch_size, num_heads, sequence_length, sequence_length)` + """ + n_layers = self._num_layers(attention) + aggregate_kwargs = {} + + # If layers is not specified or an int, single layer aggregation is used by default + if aggregate_fn is None and not hasattr(layers, "__iter__"): + aggregate_fn = "single" + logger.info("No layer aggregation method specified. Using single layer by default.") + # If a tuple of indices for layers is specified, average aggregation is used by default + if aggregate_fn is None and hasattr(layers, "__iter__"): + aggregate_fn = "average" + logger.info("No layer aggregation method specified. Using average across layers by default.") + + if aggregate_fn == "single": + if layers is None: + layers = -1 + logger.info("No layer specified for attention extraction. Using last layer by default.") + if not isinstance(layers, int): + raise RuntimeError("A single layer index must be specified for single-layer attention attribution") + if layers not in range(-n_layers, n_layers): + raise IndexError(f"Layer index out of range. The model only has {n_layers} layers.") + aggregate_kwargs = {"idx": layers} else: - raise RuntimeError( - "Invalid merge method for attention heads specified. " - "Valid methods are: `average`, `max` and `single`" - ) - - def _get_batch_size(self, attention: torch.Tensor) -> int: - """returns the batch size of a tensor of shape `(batch_size, heads, seq, seq)`""" - return attention.size(0) - - -class AggregatedAttention(AttentionAttribution): + if isinstance(aggregate_fn, str): + if aggregate_fn not in self.AGGREGATE_FN_OPTIONS: + raise RuntimeError( + "Invalid aggregation method specified." + f"Valid methods are: {self.AGGREGATE_FN_OPTIONS.keys()}" + ) + if layers is None: + layers = (0, n_layers) + logger.info("No layer specified for attention extraction. Using all layers by default.") + if not hasattr(layers, "__iter__") or ( + len(layers) == 2 and isinstance(layers, tuple) and layers[0] >= layers[1] + ): + raise RuntimeError( + "A (start, end) tuple of indices representing a span or a list of individual indices" + " must be specified for aggregated attention attribution." + ) + if (not all(l in range(-n_layers, n_layers + 1) for l in layers) and isinstance(layers, tuple)) or ( + not all(l in range(-n_layers, n_layers) for l in layers) and isinstance(layers, list) + ): + raise IndexError(f"One or more layer index out of range. The model only has {n_layers} layers.") + if isinstance(layers, tuple): + attention = attention[layers[0] : layers[1]] + else: + attention = torch.index_select(attention, 0, torch.tensor(layers, device=attention.device)) + aggregate_fn = self.AGGREGATE_FN_OPTIONS[aggregate_fn] + return aggregate_fn(attention, 0, **aggregate_kwargs) + + +class Attention(BaseAttentionAttribution): """ A basic attention attribution approach. - It will return the attention values averaged across a range of layers (default is all layers). + It will return the attention values for the specified values or aggregated across the specified ranges of heads + and layers, given the specified aggregation functions. + + Refer to :meth:`~inseq.attr.feat.ops.BaseAttentionAttribution._aggregate_layers` and + :meth:`~inseq.attr.feat.ops.BaseAttentionAttribution._aggregate_attention_heads` for more details on the + aggregation procedure and default values. """ @log_usage() def attribute( self, batch: Union[Batch, EncoderDecoderBatch], - merge_head_option: str = "average", - use_head: int = None, - aggregate_layer: tuple = None, + aggregate_heads_fn: Union[str, AggregateAttentionFunction, None] = None, + aggregate_layers_fn: Union[str, AggregateAttentionFunction, None] = None, + heads: Union[int, Tuple[int, int], List[int], None] = None, + layers: Union[int, Tuple[int, int], List[int], None] = None, additional_forward_args: Any = None, - ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: - - is_target_attribution = additional_forward_args[0] - - is_encoder_decoder = self.forward_func.is_encoder_decoder - - outputs = self.forward_func.get_forward_output(**self.forward_func.format_forward_args(batch)) - - if is_encoder_decoder: + ) -> TensorOrTupleOfTensorsGeneric: + """Performs basic attention attribution. - if aggregate_layer is None: - aggregate_layer = (0, len(outputs.cross_attentions)) - - cross_aggregation = torch.stack(outputs.cross_attentions[aggregate_layer[0] : aggregate_layer[1]]).mean(0) - cross_aggregation = self._merge_attention_heads(cross_aggregation, merge_head_option, use_head) - cross_aggregation = cross_aggregation.select(1, -1) - - attributions = (cross_aggregation,) - - if is_target_attribution: - decoder_aggregation = torch.stack( - outputs.decoder_attentions[aggregate_layer[0] : aggregate_layer[1]] - ).mean(0) - decoder_aggregation = self._merge_attention_heads(decoder_aggregation, merge_head_option, use_head) - decoder_aggregation = decoder_aggregation.select(1, -1) - - attributions = attributions + (decoder_aggregation,) - else: - - if aggregate_layer is None: - aggregate_layer = (0, len(outputs.attentions)) - - aggregation = torch.stack(outputs.attentions[aggregate_layer[0] : aggregate_layer[1]]).mean(0) - aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) - aggregation = aggregation.select(1, -1) - - attributions = (aggregation,) - - return attributions - - -class SingleLayerAttention(AttentionAttribution): - """ - A basic attention attribution approach. - It will return the attention values of a single layer (default is the last layer). - """ + Args: + batch (`Union[Batch, EncoderDecoderBatch]`): + The input batch used for the forward pass to extract attention scores. + aggregate_heads_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across heads. + Can be one of `average` (default if heads is list, tuple or None), `max`, or `single` (default if heads + is int), or a custom function defined by the user. + aggregate_layers_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. + Can be one of `average` (default if layers is tuple or list), `max`, or `single` (default if layers is + int or None), or a custom function defined by the user. + heads (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified, + the head at the corresponding index is used. If a tuple of two indices is specified, all heads between + the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective + heads will be used for aggregation. If aggregate_fn is "single", a head must be specified. + Otherwise, all heads are passed to aggregate_fn by default. + layers (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified + , the layer at the corresponding index is used. If a tuple of two indices is specified, all layers + among the indices will be aggregated using aggregate_fn. If a list of indices is specified, the + respective layers will be used for aggregation. If aggregate_fn is "single", the last layer is + used by default. Otherwise, all available layers are passed to aggregate_fn by default. - @log_usage() - def attribute( - self, - batch: Union[Batch, EncoderDecoderBatch], - merge_head_option: str = "average", - use_head: int = None, - use_layer: int = -1, - additional_forward_args: Any = None, - ) -> Union[TensorOrTupleOfTensorsGeneric, Tuple[TensorOrTupleOfTensorsGeneric, torch.Tensor]]: + Returns: + `TensorOrTupleOfTensorsGeneric`: Attribution outputs for source-only or source + target feature attribution + """ is_target_attribution = additional_forward_args[0] - is_encoder_decoder = self.forward_func.is_encoder_decoder - - outputs = self.forward_func.get_forward_output(**self.forward_func.format_forward_args(batch)) + outputs = self.forward_func.get_forward_output( + **self.forward_func.format_forward_args(batch), output_attentions=True + ) if is_encoder_decoder: - - layer_cross = outputs.cross_attentions[use_layer] - layer_cross = self._merge_attention_heads(layer_cross, merge_head_option, use_head) - layer_cross = layer_cross.select(1, -1) - - attributions = (layer_cross,) + cross_layer_aggregation = self._aggregate_layers( + torch.stack(outputs.cross_attentions), aggregate_layers_fn, layers + ) + cross_head_aggregation = self._aggregate_attention_heads( + cross_layer_aggregation, aggregate_heads_fn, heads + ) + attributions = (cross_head_aggregation.select(1, -1),) if is_target_attribution: - layer_decoder = outputs.decoder_attentions[use_layer] - layer_decoder = self._merge_attention_heads(layer_decoder, merge_head_option, use_head) - layer_decoder = layer_decoder.select(1, -1) - - attributions = attributions + (layer_decoder,) + decoder_layer_aggregation = self._aggregate_layers( + torch.stack(outputs.decoder_attentions), aggregate_layers_fn, layers + ) + decoder_head_aggregation = self._aggregate_attention_heads( + decoder_layer_aggregation, aggregate_heads_fn, heads + ) + attributions = attributions + (decoder_head_aggregation.select(1, -1),) else: - - aggregation = outputs.attentions[use_layer] - aggregation = self._merge_attention_heads(aggregation, merge_head_option, use_head) - aggregation = aggregation.select(1, -1) - - attributions = (aggregation,) + layer_aggregation = self._aggregate_layers(torch.stack(outputs.attentions), aggregate_layers_fn, layers) + head_aggregation = self._aggregate_attention_heads(layer_aggregation, aggregate_heads_fn, heads) + attributions = (head_aggregation.select(1, -1),) return attributions diff --git a/inseq/utils/typing.py b/inseq/utils/typing.py index 88e6b7e2..07d36f61 100644 --- a/inseq/utils/typing.py +++ b/inseq/utils/typing.py @@ -49,6 +49,9 @@ class TextSequences: MultiStepEmbeddingsTensor = TensorType["batch_size_x_n_steps", "seq_len", "embed_size", float] VocabularyEmbeddingsTensor = TensorType["vocab_size", "embed_size", float] FullLogitsTensor = TensorType["batch_size", "vocab_size", float] +FullAttentionTensor = TensorType["n_layers", "batch_size", "n_heads", "seq_len", "seq_len", float] +FullLayerAttentionTensor = TensorType["batch_size", "n_heads", "seq_len", "seq_len", float] +AggregatedLayerAttentionTensor = TensorType["batch_size", "seq_len", "seq_len", float] # Step and sequence objects used for stepwise scores (e.g. convergence deltas, probabilities) SingleScorePerStepTensor = TensorType["batch_size", float] From 1301a02d632046abf1c33a559294ba1f6bd0aa1b Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Fri, 13 Jan 2023 23:44:26 +0100 Subject: [PATCH 26/30] updated documentation and added 'min' aggregation function --- README.md | 2 ++ docs/source/examples/quickstart.rst | 2 +- docs/source/index.rst | 2 +- inseq/attr/feat/ops/basic_attention.py | 31 +++++++++++++------------- inseq/utils/typing.py | 2 +- 5 files changed, 21 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index b7e920a5..b375aba4 100644 --- a/README.md +++ b/README.md @@ -98,6 +98,8 @@ model.attribute( - Support for single and batched attribution using multiple gradient-based feature attribution methods from [Captum](https://captum.ai/docs/introduction) +- Support for basic single-layer and layer-aggregation attention attribution methods + - Post-hoc aggregation of feature attribution maps via `Aggregator` classes. - Attribution visualization in notebooks, browser and command line. diff --git a/docs/source/examples/quickstart.rst b/docs/source/examples/quickstart.rst index fe707046..b972baa0 100644 --- a/docs/source/examples/quickstart.rst +++ b/docs/source/examples/quickstart.rst @@ -25,7 +25,7 @@ The Inseq library is a Pytorch-based toolkit inteded to democratize the access t **Interpretability Methods** -- At the moment, only gradient-based feature attribution methods sourced from the `Captum `_ library are available, but other popular occlusion and attention-based techniques will soon follow. The list of all available methods can be obtained by using the :meth:`~inseq.list_feature_attribution_methods` method. Each method either points to its original implementation, and is thoroughly documented in its docstring. +- At the moment, only gradient-based feature attribution methods sourced from the `Captum `_ library and basic attention attribution methods are available, but other popular occlusion and attention-based techniques will soon follow. The list of all available methods can be obtained by using the :meth:`~inseq.list_feature_attribution_methods` method. Each method either points to its original implementation, and is thoroughly documented in its docstring. Installing Inseq =================================== diff --git a/docs/source/index.rst b/docs/source/index.rst index c76cc610..c622203d 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -41,7 +41,7 @@ To give a taste of what Inseq can do in a couple lines of code, here's a snippet -Inseq is still in early development and is currently maintained by a small team of graduate students based working on interpretability for NLP/NLG led by `Gabriele Sarti `__. We are working hard to add more features and models. If you have any suggestions or feedback, please open an issue on our `GitHub repository `__. Happy hacking! |:bug:| +Inseq is still in early development and is currently maintained by a small team of graduate students based working on interpretability for NLP/NLG led by `Gabriele Sarti `__. We are working hard to add more features and models. If you have any suggestions or feedback, please open an issue on our `GitHub repository `__. Happy hacking! |:bug:| .. toctree:: :maxdepth: 2 diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index ddb06044..37c3c60e 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -43,6 +43,7 @@ class BaseAttentionAttribution(Attribution): AGGREGATE_FN_OPTIONS: Dict[str, AggregateAttentionFunction] = { "average": lambda x, dim: x.mean(dim), "max": lambda x, dim: x.max(dim)[0], + "min": lambda x, dim: x.min(dim)[0], "single": lambda x, dim, idx: x.select(dim, idx), } @@ -53,9 +54,9 @@ def _num_attention_heads(self, attention: FullLayerAttentionTensor) -> int: """Returns the number of heads contained in the attention tensor.""" return attention.size(1) - def _num_layers(self, attention: Tuple[FullAttentionTensor, ...]) -> int: + def _num_layers(self, attention: FullAttentionTensor) -> int: """Returns the number of layers contained in the attention tensor.""" - return attention.size(0) + return len(attention) def _aggregate_attention_heads( self, @@ -67,11 +68,11 @@ def _aggregate_attention_heads( Merges the attention values across the specified attention heads for the full sequence. Args: - attention (:obj:`tuple` of :obj:`torch.Tensor`) attention tensor of shape + attention (:obj:`torch.Tensor`) attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` aggregate_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across heads. - Can be one of `average` (default if heads is tuple or None), `max`, or `single` (default if heads is - int), or a custom function defined by the user. + Can be one of `average` (default if heads is tuple or None), `max`, `min` or `single` (default if heads + is int), or a custom function defined by the user. heads (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified, the head at the corresponding index is used. If a tuple of two indices is specified, all heads between the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective @@ -80,7 +81,7 @@ def _aggregate_attention_heads( Returns: :obj:`torch.Tensor`: An aggregated attention tensor of shape - `(batch_size, num_heads, sequence_length, sequence_length)` + `(batch_size, sequence_length, sequence_length)` """ n_heads = self._num_attention_heads(attention) aggregate_kwargs = {} @@ -108,7 +109,7 @@ def _aggregate_attention_heads( ) if heads is None: heads = (0, n_heads) - logger.info("No attention head specified for attention extraction. Using all heads by default.") + logger.info("No attention heads specified for attention extraction. Using all heads by default.") if not hasattr(heads, "__iter__") or ( len(heads) == 2 and isinstance(heads, tuple) and heads[0] >= heads[1] ): @@ -140,8 +141,8 @@ def _aggregate_layers( attention (:obj:`tuple` of :obj:`torch.Tensor`) attention tensor of shape `(n_layers, batch_size, num_heads, sequence_length, sequence_length)` aggregate_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. - Can be one of `average` (default if layers is tuple), `max`, or `single` (default if layers is int or - None), or a custom function defined by the user. + Can be one of `average` (default if layers is tuple), `max`, `min` or `single` (default if layers is + int or None), or a custom function defined by the user. layers (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified , the layer at the corresponding index is used. If a tuple of two indices is specified, all layers among the indices will be aggregated using aggregate_fn. If a list of indices is specified, the @@ -229,21 +230,21 @@ def attribute( batch (`Union[Batch, EncoderDecoderBatch]`): The input batch used for the forward pass to extract attention scores. aggregate_heads_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across heads. - Can be one of `average` (default if heads is list, tuple or None), `max`, or `single` (default if heads - is int), or a custom function defined by the user. + Can be one of `average` (default if heads is list, tuple or None), `max`, `min` or `single` (default + if heads is int), or a custom function defined by the user. aggregate_layers_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. - Can be one of `average` (default if layers is tuple or list), `max`, or `single` (default if layers is - int or None), or a custom function defined by the user. + Can be one of `average` (default if layers is tuple or list), `max`, `min` or `single` (default if + layers is int or None), or a custom function defined by the user. heads (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified, the head at the corresponding index is used. If a tuple of two indices is specified, all heads between the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective heads will be used for aggregation. If aggregate_fn is "single", a head must be specified. - Otherwise, all heads are passed to aggregate_fn by default. + If no value specified, all heads are passed to aggregate_fn by default. layers (:obj:`int` or :obj:`tuple[int, int]` or :obj:`list(int)`, optional): If a single value is specified , the layer at the corresponding index is used. If a tuple of two indices is specified, all layers among the indices will be aggregated using aggregate_fn. If a list of indices is specified, the respective layers will be used for aggregation. If aggregate_fn is "single", the last layer is - used by default. Otherwise, all available layers are passed to aggregate_fn by default. + used by default. If no value is specified, all available layers are passed to aggregate_fn by default. Returns: `TensorOrTupleOfTensorsGeneric`: Attribution outputs for source-only or source + target feature attribution diff --git a/inseq/utils/typing.py b/inseq/utils/typing.py index 07d36f61..35f6b44e 100644 --- a/inseq/utils/typing.py +++ b/inseq/utils/typing.py @@ -49,9 +49,9 @@ class TextSequences: MultiStepEmbeddingsTensor = TensorType["batch_size_x_n_steps", "seq_len", "embed_size", float] VocabularyEmbeddingsTensor = TensorType["vocab_size", "embed_size", float] FullLogitsTensor = TensorType["batch_size", "vocab_size", float] -FullAttentionTensor = TensorType["n_layers", "batch_size", "n_heads", "seq_len", "seq_len", float] FullLayerAttentionTensor = TensorType["batch_size", "n_heads", "seq_len", "seq_len", float] AggregatedLayerAttentionTensor = TensorType["batch_size", "seq_len", "seq_len", float] +FullAttentionTensor = Tuple[FullLayerAttentionTensor, ...] # Step and sequence objects used for stepwise scores (e.g. convergence deltas, probabilities) SingleScorePerStepTensor = TensorType["batch_size", float] From 914ee8fc33d911a3d5dc0db24ac629e46c04166a Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Sat, 14 Jan 2023 17:12:10 +0100 Subject: [PATCH 27/30] Tests, typing fix, additional checks --- README.md | 2 +- inseq/attr/feat/ops/basic_attention.py | 45 +++++++++++---- inseq/utils/typing.py | 2 +- tests/models/test_huggingface_model.py | 79 ++++++++++++++++++++++++++ 4 files changed, 114 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index b375aba4..beb43594 100644 --- a/README.md +++ b/README.md @@ -98,7 +98,7 @@ model.attribute( - Support for single and batched attribution using multiple gradient-based feature attribution methods from [Captum](https://captum.ai/docs/introduction) -- Support for basic single-layer and layer-aggregation attention attribution methods +- Support for basic single-layer and layer-aggregation attention attribution methods with one or multiple aggregated heads. - Post-hoc aggregation of feature attribution maps via `Aggregator` classes. diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 37c3c60e..88c87b19 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -22,7 +22,7 @@ from captum.log import log_usage from ....data import Batch, EncoderDecoderBatch -from ....utils.typing import AggregatedLayerAttentionTensor, FullAttentionTensor, FullLayerAttentionTensor +from ....utils.typing import AggregatedLayerAttentionTensor, FullAttentionOutput, FullLayerAttentionTensor logger = logging.getLogger(__name__) @@ -54,7 +54,7 @@ def _num_attention_heads(self, attention: FullLayerAttentionTensor) -> int: """Returns the number of heads contained in the attention tensor.""" return attention.size(1) - def _num_layers(self, attention: FullAttentionTensor) -> int: + def _num_layers(self, attention: FullAttentionOutput) -> int: """Returns the number of layers contained in the attention tensor.""" return len(attention) @@ -86,6 +86,12 @@ def _aggregate_attention_heads( n_heads = self._num_attention_heads(attention) aggregate_kwargs = {} + if hasattr(heads, "__iter__"): + if len(heads) == 0: + raise RuntimeError("At least two heads must be specified for aggregated attention attribution.") + if len(heads) == 1: + heads = heads[0] + # If heads is not specified or an tuple, average aggregation is used by default if aggregate_fn is None and not isinstance(heads, int): aggregate_fn = "average" @@ -110,6 +116,9 @@ def _aggregate_attention_heads( if heads is None: heads = (0, n_heads) logger.info("No attention heads specified for attention extraction. Using all heads by default.") + # Convert negative indices to positive indices + if hasattr(heads, "__iter__"): + heads = type(heads)([h_idx if h_idx >= 0 else n_heads + h_idx for h_idx in heads]) if not hasattr(heads, "__iter__") or ( len(heads) == 2 and isinstance(heads, tuple) and heads[0] >= heads[1] ): @@ -117,10 +126,11 @@ def _aggregate_attention_heads( "A (start, end) tuple of indices representing a span or a list of individual indices" " must be specified for aggregated attention attribution." ) - if (not all(h in range(-n_heads, n_heads + 1) for h in heads) and isinstance(heads, tuple)) or ( - not all(h in range(-n_heads, n_heads) for h in heads) and isinstance(heads, list) - ): + max_idx_val = n_heads if isinstance(heads, list) else n_heads + 1 + if not all(h in range(-n_heads, max_idx_val) for h in heads): raise IndexError(f"One or more attention head index out of range. The model only has {n_heads} heads.") + if len(set(heads)) != len(heads): + raise IndexError("Duplicate head indices are not allowed.") if isinstance(heads, tuple): attention = attention[:, heads[0] : heads[1]] else: @@ -130,7 +140,7 @@ def _aggregate_attention_heads( def _aggregate_layers( self, - attention: FullAttentionTensor, + attention: FullAttentionOutput, aggregate_fn: Union[str, AggregateAttentionFunction, None] = None, layers: Union[int, Tuple[int, int], List[int], None] = None, ) -> FullLayerAttentionTensor: @@ -154,8 +164,15 @@ def _aggregate_layers( `(batch_size, num_heads, sequence_length, sequence_length)` """ n_layers = self._num_layers(attention) + attention = torch.stack(attention, dim=0) aggregate_kwargs = {} + if hasattr(layers, "__iter__"): + if len(layers) == 0: + raise RuntimeError("At least two layer must be specified for aggregated attention attribution.") + if len(layers) == 1: + layers = layers[0] + # If layers is not specified or an int, single layer aggregation is used by default if aggregate_fn is None and not hasattr(layers, "__iter__"): aggregate_fn = "single" @@ -184,6 +201,9 @@ def _aggregate_layers( if layers is None: layers = (0, n_layers) logger.info("No layer specified for attention extraction. Using all layers by default.") + # Convert negative indices to positive indices + if hasattr(layers, "__iter__"): + layers = type(layers)([l_idx if l_idx >= 0 else n_layers + l_idx for l_idx in layers]) if not hasattr(layers, "__iter__") or ( len(layers) == 2 and isinstance(layers, tuple) and layers[0] >= layers[1] ): @@ -191,10 +211,11 @@ def _aggregate_layers( "A (start, end) tuple of indices representing a span or a list of individual indices" " must be specified for aggregated attention attribution." ) - if (not all(l in range(-n_layers, n_layers + 1) for l in layers) and isinstance(layers, tuple)) or ( - not all(l in range(-n_layers, n_layers) for l in layers) and isinstance(layers, list) - ): + max_idx_val = n_layers if isinstance(layers, list) else n_layers + 1 + if not all(l in range(max_idx_val) for l in layers): raise IndexError(f"One or more layer index out of range. The model only has {n_layers} layers.") + if len(set(layers)) != len(layers): + raise IndexError("Duplicate layer indices are not allowed.") if isinstance(layers, tuple): attention = attention[layers[0] : layers[1]] else: @@ -258,7 +279,7 @@ def attribute( if is_encoder_decoder: cross_layer_aggregation = self._aggregate_layers( - torch.stack(outputs.cross_attentions), aggregate_layers_fn, layers + outputs.cross_attentions, aggregate_layers_fn, layers ) cross_head_aggregation = self._aggregate_attention_heads( cross_layer_aggregation, aggregate_heads_fn, heads @@ -267,14 +288,14 @@ def attribute( if is_target_attribution: decoder_layer_aggregation = self._aggregate_layers( - torch.stack(outputs.decoder_attentions), aggregate_layers_fn, layers + outputs.decoder_attentions, aggregate_layers_fn, layers ) decoder_head_aggregation = self._aggregate_attention_heads( decoder_layer_aggregation, aggregate_heads_fn, heads ) attributions = attributions + (decoder_head_aggregation.select(1, -1),) else: - layer_aggregation = self._aggregate_layers(torch.stack(outputs.attentions), aggregate_layers_fn, layers) + layer_aggregation = self._aggregate_layers(outputs.attentions, aggregate_layers_fn, layers) head_aggregation = self._aggregate_attention_heads(layer_aggregation, aggregate_heads_fn, heads) attributions = (head_aggregation.select(1, -1),) diff --git a/inseq/utils/typing.py b/inseq/utils/typing.py index 35f6b44e..6a3bff1f 100644 --- a/inseq/utils/typing.py +++ b/inseq/utils/typing.py @@ -51,7 +51,7 @@ class TextSequences: FullLogitsTensor = TensorType["batch_size", "vocab_size", float] FullLayerAttentionTensor = TensorType["batch_size", "n_heads", "seq_len", "seq_len", float] AggregatedLayerAttentionTensor = TensorType["batch_size", "seq_len", "seq_len", float] -FullAttentionTensor = Tuple[FullLayerAttentionTensor, ...] +FullAttentionOutput = Tuple[FullLayerAttentionTensor, ...] # Step and sequence objects used for stepwise scores (e.g. convergence deltas, probabilities) SingleScorePerStepTensor = TensorType["batch_size", float] diff --git a/tests/models/test_huggingface_model.py b/tests/models/test_huggingface_model.py index 91660478..c43d714b 100644 --- a/tests/models/test_huggingface_model.py +++ b/tests/models/test_huggingface_model.py @@ -25,6 +25,9 @@ STEP_SCORES = [[], ["probability"]] ATTRIBUTION_METHODS = list_feature_attribution_methods() +ATTENTION_IDX = [-2, [0, 5, 1], (1, -2), None] +ATTENTION_AGGREGATE_FN = ["average", None] + @fixture(scope="session") def saliency_mt_model(): @@ -307,3 +310,79 @@ def test_attribute_decoder_forced_sliced(saliency_gpt2_model): assert out.info["attr_pos_end"] == 10 aggregated = [attr.aggregate(attr._aggregator) for attr in out.sequence_attributions] assert all(isinstance(aggr_attr, FeatureAttributionSequenceOutput) for aggr_attr in aggregated) + + +@mark.slow +@mark.parametrize(("texts", "reference_texts"), EXAMPLES["texts"]) +@mark.parametrize("layers", ATTENTION_IDX) +@mark.parametrize("heads", ATTENTION_IDX) +@mark.parametrize("aggregate_heads_fn", ATTENTION_AGGREGATE_FN) +@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) +def test_attention_attribution_seq2seq( + texts, + reference_texts, + layers, + heads, + aggregate_heads_fn, + aggregate_layers_fn, + saliency_mt_model, +): + if isinstance(layers, int): + aggregate_layers_fn = "single" + if isinstance(heads, int): + aggregate_heads_fn = "single" + out = saliency_mt_model.attribute( + texts, + method="attention", + show_progress=False, + attribute_target=True, + device=get_default_device(), + layers=layers, + heads=heads, + aggregate_heads_fn=aggregate_heads_fn, + aggregate_layers_fn=aggregate_layers_fn, + ) + assert isinstance(out, FeatureAttributionOutput) + assert isinstance(out.sequence_attributions[0], FeatureAttributionSequenceOutput) + assert out.info["model_name"] == "Helsinki-NLP/opus-mt-en-it" + assert out.info["constrained_decoding"] is False + assert out.info["attribution_method"] == "attention" + assert out.info["attribute_target"] is True + assert len(out.sequence_attributions[0].source_attributions.shape) == 2 + + +@mark.slow +@mark.parametrize(("texts", "reference_texts"), EXAMPLES["texts"]) +@mark.parametrize("layers", ATTENTION_IDX) +@mark.parametrize("heads", ATTENTION_IDX) +@mark.parametrize("aggregate_heads_fn", ATTENTION_AGGREGATE_FN) +@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) +def test_attention_attribution_decoder( + texts, + reference_texts, + layers, + heads, + aggregate_heads_fn, + aggregate_layers_fn, + saliency_gpt2_model, +): + if isinstance(layers, int): + aggregate_layers_fn = "single" + if isinstance(heads, int): + aggregate_heads_fn = "single" + out = saliency_gpt2_model.attribute( + texts, + method="attention", + show_progress=False, + device=get_default_device(), + layers=layers, + heads=heads, + aggregate_heads_fn=aggregate_heads_fn, + aggregate_layers_fn=aggregate_layers_fn, + ) + assert isinstance(out, FeatureAttributionOutput) + assert isinstance(out.sequence_attributions[0], FeatureAttributionSequenceOutput) + assert out.info["model_name"] == "gpt2" + assert out.info["constrained_decoding"] is False + assert out.info["attribution_method"] == "attention" + assert len(out.sequence_attributions[0].target_attributions.shape) == 2 \ No newline at end of file From 7c825ad48d94eb76efd6635736919f2635507530 Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Sat, 14 Jan 2023 20:48:38 +0100 Subject: [PATCH 28/30] Fix style --- inseq/attr/feat/ops/basic_attention.py | 4 +--- tests/models/test_huggingface_model.py | 6 +++--- 2 files changed, 4 insertions(+), 6 deletions(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 88c87b19..58d926bd 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -278,9 +278,7 @@ def attribute( ) if is_encoder_decoder: - cross_layer_aggregation = self._aggregate_layers( - outputs.cross_attentions, aggregate_layers_fn, layers - ) + cross_layer_aggregation = self._aggregate_layers(outputs.cross_attentions, aggregate_layers_fn, layers) cross_head_aggregation = self._aggregate_attention_heads( cross_layer_aggregation, aggregate_heads_fn, heads ) diff --git a/tests/models/test_huggingface_model.py b/tests/models/test_huggingface_model.py index c43d714b..d53bc399 100644 --- a/tests/models/test_huggingface_model.py +++ b/tests/models/test_huggingface_model.py @@ -317,7 +317,7 @@ def test_attribute_decoder_forced_sliced(saliency_gpt2_model): @mark.parametrize("layers", ATTENTION_IDX) @mark.parametrize("heads", ATTENTION_IDX) @mark.parametrize("aggregate_heads_fn", ATTENTION_AGGREGATE_FN) -@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) +@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) def test_attention_attribution_seq2seq( texts, reference_texts, @@ -356,7 +356,7 @@ def test_attention_attribution_seq2seq( @mark.parametrize("layers", ATTENTION_IDX) @mark.parametrize("heads", ATTENTION_IDX) @mark.parametrize("aggregate_heads_fn", ATTENTION_AGGREGATE_FN) -@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) +@mark.parametrize("aggregate_layers_fn", ATTENTION_AGGREGATE_FN) def test_attention_attribution_decoder( texts, reference_texts, @@ -385,4 +385,4 @@ def test_attention_attribution_decoder( assert out.info["model_name"] == "gpt2" assert out.info["constrained_decoding"] is False assert out.info["attribution_method"] == "attention" - assert len(out.sequence_attributions[0].target_attributions.shape) == 2 \ No newline at end of file + assert len(out.sequence_attributions[0].target_attributions.shape) == 2 From f6f0a64287f65420cef353571116cdcd57eeaed3 Mon Sep 17 00:00:00 2001 From: Ludwig Sickert Date: Sun, 15 Jan 2023 19:33:20 +0100 Subject: [PATCH 29/30] added tests for attention utils --- inseq/attr/feat/ops/basic_attention.py | 37 +++++----- tests/attr/feat/ops/test_attention_utils.py | 79 +++++++++++++++++++++ 2 files changed, 100 insertions(+), 16 deletions(-) create mode 100644 tests/attr/feat/ops/test_attention_utils.py diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 58d926bd..8a48fcf5 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -47,19 +47,23 @@ class BaseAttentionAttribution(Attribution): "single": lambda x, dim, idx: x.select(dim, idx), } - def has_convergence_delta(self) -> bool: + @classmethod + def has_convergence_delta(cls) -> bool: return False - def _num_attention_heads(self, attention: FullLayerAttentionTensor) -> int: + @classmethod + def _num_attention_heads(cls, attention: FullLayerAttentionTensor) -> int: """Returns the number of heads contained in the attention tensor.""" return attention.size(1) - def _num_layers(self, attention: FullAttentionOutput) -> int: + @classmethod + def _num_layers(cls, attention: FullAttentionOutput) -> int: """Returns the number of layers contained in the attention tensor.""" return len(attention) + @classmethod def _aggregate_attention_heads( - self, + cls, attention: FullLayerAttentionTensor, aggregate_fn: Union[str, AggregateAttentionFunction, None] = None, heads: Union[int, Tuple[int, int], List[int], None] = None, @@ -83,7 +87,7 @@ def _aggregate_attention_heads( :obj:`torch.Tensor`: An aggregated attention tensor of shape `(batch_size, sequence_length, sequence_length)` """ - n_heads = self._num_attention_heads(attention) + n_heads = cls._num_attention_heads(attention) aggregate_kwargs = {} if hasattr(heads, "__iter__"): @@ -106,13 +110,14 @@ def _aggregate_attention_heads( if heads not in range(-n_heads, n_heads): raise IndexError(f"Attention head index out of range. The model only has {n_heads} heads.") aggregate_kwargs = {"idx": heads} + aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] else: if isinstance(aggregate_fn, str): - if aggregate_fn not in self.AGGREGATE_FN_OPTIONS: + if aggregate_fn not in cls.AGGREGATE_FN_OPTIONS: raise RuntimeError( - "Invalid aggregation method specified." - f"Valid methods are: {self.AGGREGATE_FN_OPTIONS.keys()}" + "Invalid aggregation method specified." f"Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" ) + aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] if heads is None: heads = (0, n_heads) logger.info("No attention heads specified for attention extraction. Using all heads by default.") @@ -135,11 +140,11 @@ def _aggregate_attention_heads( attention = attention[:, heads[0] : heads[1]] else: attention = torch.index_select(attention, 1, torch.tensor(heads, device=attention.device)) - aggregate_fn = self.AGGREGATE_FN_OPTIONS[aggregate_fn] return aggregate_fn(attention, 1, **aggregate_kwargs) + @classmethod def _aggregate_layers( - self, + cls, attention: FullAttentionOutput, aggregate_fn: Union[str, AggregateAttentionFunction, None] = None, layers: Union[int, Tuple[int, int], List[int], None] = None, @@ -148,7 +153,7 @@ def _aggregate_layers( Merges the attention values of every attention head across the specified layers for the full sequence. Args: - attention (:obj:`tuple` of :obj:`torch.Tensor`) attention tensor of shape + attention (:obj:`torch.Tensor`) attention tensor of shape `(n_layers, batch_size, num_heads, sequence_length, sequence_length)` aggregate_fn (:obj:`str` or :obj:`callable`): The method to use for aggregating across layers. Can be one of `average` (default if layers is tuple), `max`, `min` or `single` (default if layers is @@ -163,7 +168,7 @@ def _aggregate_layers( :obj:`torch.Tensor`: An aggregated attention tensor of shape `(batch_size, num_heads, sequence_length, sequence_length)` """ - n_layers = self._num_layers(attention) + n_layers = cls._num_layers(attention) attention = torch.stack(attention, dim=0) aggregate_kwargs = {} @@ -191,13 +196,14 @@ def _aggregate_layers( if layers not in range(-n_layers, n_layers): raise IndexError(f"Layer index out of range. The model only has {n_layers} layers.") aggregate_kwargs = {"idx": layers} + aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] else: if isinstance(aggregate_fn, str): - if aggregate_fn not in self.AGGREGATE_FN_OPTIONS: + if aggregate_fn not in cls.AGGREGATE_FN_OPTIONS: raise RuntimeError( - "Invalid aggregation method specified." - f"Valid methods are: {self.AGGREGATE_FN_OPTIONS.keys()}" + "Invalid aggregation method specified." f"Valid methods are: {cls.AGGREGATE_FN_OPTIONS.keys()}" ) + aggregate_fn = cls.AGGREGATE_FN_OPTIONS[aggregate_fn] if layers is None: layers = (0, n_layers) logger.info("No layer specified for attention extraction. Using all layers by default.") @@ -220,7 +226,6 @@ def _aggregate_layers( attention = attention[layers[0] : layers[1]] else: attention = torch.index_select(attention, 0, torch.tensor(layers, device=attention.device)) - aggregate_fn = self.AGGREGATE_FN_OPTIONS[aggregate_fn] return aggregate_fn(attention, 0, **aggregate_kwargs) diff --git a/tests/attr/feat/ops/test_attention_utils.py b/tests/attr/feat/ops/test_attention_utils.py new file mode 100644 index 00000000..ca3af3b6 --- /dev/null +++ b/tests/attr/feat/ops/test_attention_utils.py @@ -0,0 +1,79 @@ +import random + +import torch +from pytest import mark, skip + +from inseq.attr.feat.ops.basic_attention import BaseAttentionAttribution + + +AGGREGATE_FN_OPTIONS = list(BaseAttentionAttribution.AGGREGATE_FN_OPTIONS.keys()) + [None] + +AGGREGATE_OPTIONS = ["int", "range", "list", "none"] + + +@mark.parametrize("aggr_method", AGGREGATE_FN_OPTIONS) +@mark.parametrize("aggr_layers", AGGREGATE_OPTIONS) +def test_layer_aggregation(aggr_method: str, aggr_layers: str) -> None: + + layerAttention = tuple() + + shape = (5, 8, 7, 7) + + layers = 0 + + max_layer = random.randint(4, 10) + for i in range(max_layer): + attention = torch.rand(size=shape, dtype=torch.float) + layerAttention = layerAttention + (attention,) + + if aggr_method == "single": + if aggr_layers != "int" and aggr_layers != "none": + skip("only a single layer can be passed if single-layer aggregation is specified") + + if aggr_layers == "int": + if aggr_method != "single" and aggr_method is not None: + skip("only single-layer aggregation is possible if a single layer is passed") + layers = random.randrange(max_layer) + elif aggr_layers == "range": + layers = (1, max_layer) + elif aggr_layers == "list": + layers = [0, 1, max_layer - 1] + elif aggr_layers == "none": + layers = None + + layer_aggr_attention = BaseAttentionAttribution._aggregate_layers(layerAttention, aggr_method, layers) + + assert layer_aggr_attention.shape == shape + + +@mark.parametrize("aggr_method", AGGREGATE_FN_OPTIONS) +@mark.parametrize("aggr_heads", AGGREGATE_OPTIONS) +def test_head_aggregation(aggr_method: str, aggr_heads: str) -> None: + + num_heads = random.randint(4, 12) + + in_shape = (5, num_heads, 7, 7) + out_shape = (5, 7, 7) + + heads = 0 + + attention = torch.rand(size=in_shape, dtype=torch.float) + + if aggr_method == "single": + if aggr_heads != "int": + skip("A single head has to be passed if single-head aggregation is specified") + + if aggr_heads == "int": + if aggr_method != "single" and aggr_method is not None: + skip("only single-head aggregation is possible if a single head is passed") + heads = random.randrange(num_heads) + elif aggr_heads == "range": + heads = (1, num_heads) + elif aggr_heads == "list": + heads = [0, 1, num_heads - 1] + elif aggr_heads == "none": + heads = None + + head_aggr_attention = BaseAttentionAttribution._aggregate_attention_heads(attention, aggr_method, heads) + + assert head_aggr_attention.shape == out_shape From f40f63b81d6fe20f27ad3a268628a5443b0425ff Mon Sep 17 00:00:00 2001 From: Gabriele Sarti Date: Mon, 16 Jan 2023 14:27:09 +0100 Subject: [PATCH 30/30] classmethod -> staticmethod where possible --- inseq/attr/feat/ops/basic_attention.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/inseq/attr/feat/ops/basic_attention.py b/inseq/attr/feat/ops/basic_attention.py index 8a48fcf5..687003ce 100644 --- a/inseq/attr/feat/ops/basic_attention.py +++ b/inseq/attr/feat/ops/basic_attention.py @@ -47,17 +47,17 @@ class BaseAttentionAttribution(Attribution): "single": lambda x, dim, idx: x.select(dim, idx), } - @classmethod - def has_convergence_delta(cls) -> bool: + @staticmethod + def has_convergence_delta() -> bool: return False - @classmethod - def _num_attention_heads(cls, attention: FullLayerAttentionTensor) -> int: + @staticmethod + def _num_attention_heads(attention: FullLayerAttentionTensor) -> int: """Returns the number of heads contained in the attention tensor.""" return attention.size(1) - @classmethod - def _num_layers(cls, attention: FullAttentionOutput) -> int: + @staticmethod + def _num_layers(attention: FullAttentionOutput) -> int: """Returns the number of layers contained in the attention tensor.""" return len(attention)