diff --git a/poetry.lock b/poetry.lock index d060dd1f..b474d91e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,7 +8,7 @@ python-versions = "*" [[package]] name = "anyio" -version = "3.5.0" +version = "3.6.1" description = "High level compatibility layer for multiple asynchronous event loop implementations" category = "main" optional = false @@ -21,7 +21,7 @@ typing-extensions = {version = "*", markers = "python_version < \"3.8\""} [package.extras] doc = ["packaging", "sphinx-rtd-theme", "sphinx-autodoc-typehints (>=1.2.0)"] -test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=6.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] +test = ["coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "contextlib2", "uvloop (<0.15)", "mock (>=4)", "uvloop (>=0.15)"] trio = ["trio (>=0.16)"] [[package]] @@ -74,17 +74,17 @@ compiler = ["black (>=19.3b0)", "jinja2 (>=2.11.2,<3.0.0)"] [[package]] name = "certifi" -version = "2021.10.8" +version = "2022.5.18.1" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "charset-normalizer" version = "2.0.12" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -category = "main" +category = "dev" optional = false python-versions = ">=3.5.0" @@ -101,7 +101,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" -version = "6.3.3" +version = "6.4.1" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -120,7 +120,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "grpcio" -version = "1.46.0" +version = "1.46.3" description = "HTTP/2-based RPC framework" category = "main" optional = false @@ -130,18 +130,18 @@ python-versions = ">=3.6" six = ">=1.5.2" [package.extras] -protobuf = ["grpcio-tools (>=1.46.0)"] +protobuf = ["grpcio-tools (>=1.46.3)"] [[package]] name = "grpcio-tools" -version = "1.46.0" +version = "1.46.3" description = "Protobuf code generator for gRPC" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] -grpcio = ">=1.46.0" +grpcio = ">=1.46.3" protobuf = ">=3.12.0,<4.0dev" [[package]] @@ -186,11 +186,11 @@ python-versions = ">=3.6.1" [[package]] name = "httpcore" -version = "0.14.7" +version = "0.15.0" description = "A minimal low-level HTTP client." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] anyio = ">=3.0.0,<4.0.0" @@ -204,22 +204,21 @@ socks = ["socksio (>=1.0.0,<2.0.0)"] [[package]] name = "httpx" -version = "0.22.0" +version = "0.23.0" description = "The next generation HTTP client." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [package.dependencies] certifi = "*" -charset-normalizer = "*" -httpcore = ">=0.14.5,<0.15.0" +httpcore = ">=0.15.0,<0.16.0" rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} sniffio = "*" [package.extras] brotli = ["brotlicffi", "brotli"] -cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10.0.0,<11.0.0)", "pygments (>=2.0.0,<3.0.0)"] +cli = ["click (>=8.0.0,<9.0.0)", "rich (>=10,<13)", "pygments (>=2.0.0,<3.0.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (>=1.0.0,<2.0.0)"] @@ -249,7 +248,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "importlib-metadata" -version = "4.11.3" +version = "4.11.4" description = "Read metadata from Python packages" category = "dev" optional = false @@ -303,7 +302,7 @@ python-versions = ">=3.7" [[package]] name = "more-itertools" -version = "8.12.0" +version = "8.13.0" description = "More routines for operating on iterables, beyond itertools" category = "dev" optional = false @@ -368,8 +367,8 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pydantic" -version = "1.9.0" -description = "Data validation and settings management using python 3.6 type hinting" +version = "1.9.1" +description = "Data validation and settings management using python type hints" category = "main" optional = false python-versions = ">=3.6.1" @@ -391,7 +390,7 @@ python-versions = ">=3.6" [[package]] name = "pyparsing" -version = "3.0.8" +version = "3.0.9" description = "pyparsing module - Classes and methods to define and execute parsing grammars" category = "dev" optional = false @@ -458,7 +457,7 @@ sphinx = "*" type = "git" url = "https://github.com/qdrant/qdrant_sphinx_theme.git" reference = "master" -resolved_reference = "79266c4940edd3f44e465561ce05f3357e0d5330" +resolved_reference = "b16d6099e4dbf664d37aa51480da75b29887becc" [[package]] name = "requests" @@ -691,7 +690,7 @@ testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest- [metadata] lock-version = "1.1" python-versions = ">=3.7,<4.0" -content-hash = "d566599eeef03540c9e0fab13e8808da5c6632bfbb403a5af76f785ba1045c8b" +content-hash = "3d005f401fb232eb3e4a85324a0923837d0b2b13fe6b43ff8395671cee95a4fc" [metadata.files] alabaster = [ @@ -699,8 +698,8 @@ alabaster = [ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] anyio = [ - {file = "anyio-3.5.0-py3-none-any.whl", hash = "sha256:b5fa16c5ff93fa1046f2eeb5bbff2dad4d3514d6cda61d02816dba34fa8c3c2e"}, - {file = "anyio-3.5.0.tar.gz", hash = "sha256:a0aeffe2fb1fdf374a8e4b471444f0f3ac4fb9f5a5b542b48824475e0042a5a6"}, + {file = "anyio-3.6.1-py3-none-any.whl", hash = "sha256:cb29b9c70620506a9a8f87a309591713446953302d7d995344d0d7c6c0c9a7be"}, + {file = "anyio-3.6.1.tar.gz", hash = "sha256:413adf95f93886e442aea925f3ee43baa5a765a64a0f52c6081894f9992fdd0b"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, @@ -719,8 +718,8 @@ betterproto = [ {file = "betterproto-2.0.0b4.tar.gz", hash = "sha256:99bc6f866fe9c30100fe438662439205f35bc0e65e4e736c46a6ebfea02c3e7b"}, ] certifi = [ - {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, - {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, + {file = "certifi-2022.5.18.1-py3-none-any.whl", hash = "sha256:f1d53542ee8cbedbe2118b5686372fb33c297fcd6379b050cca0ef13a597382a"}, + {file = "certifi-2022.5.18.1.tar.gz", hash = "sha256:9c5705e395cd70084351dd8ad5c41e65655e08ce46f2ec9cf6c2c08390f71eb7"}, ] charset-normalizer = [ {file = "charset-normalizer-2.0.12.tar.gz", hash = "sha256:2857e29ff0d34db842cd7ca3230549d1a697f96ee6d3fb071cfa6c7393832597"}, @@ -731,163 +730,163 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ - {file = "coverage-6.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df32ee0f4935a101e4b9a5f07b617d884a531ed5666671ff6ac66d2e8e8246d8"}, - {file = "coverage-6.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:75b5dbffc334e0beb4f6c503fb95e6d422770fd2d1b40a64898ea26d6c02742d"}, - {file = "coverage-6.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:114944e6061b68a801c5da5427b9173a0dd9d32cd5fcc18a13de90352843737d"}, - {file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ab88a01cd180b5640ccc9c47232e31924d5f9967ab7edd7e5c91c68eee47a69"}, - {file = "coverage-6.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad8f9068f5972a46d50fe5f32c09d6ee11da69c560fcb1b4c3baea246ca4109b"}, - {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4cd696aa712e6cd16898d63cf66139dc70d998f8121ab558f0e1936396dbc579"}, - {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c1a9942e282cc9d3ed522cd3e3cab081149b27ea3bda72d6f61f84eaf88c1a63"}, - {file = "coverage-6.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c06455121a089252b5943ea682187a4e0a5cf0a3fb980eb8e7ce394b144430a9"}, - {file = "coverage-6.3.3-cp310-cp310-win32.whl", hash = "sha256:cb5311d6ccbd22578c80028c5e292a7ab9adb91bd62c1982087fad75abe2e63d"}, - {file = "coverage-6.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:6d4a6f30f611e657495cc81a07ff7aa8cd949144e7667c5d3e680d73ba7a70e4"}, - {file = "coverage-6.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:79bf405432428e989cad7b8bc60581963238f7645ae8a404f5dce90236cc0293"}, - {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:338c417613f15596af9eb7a39353b60abec9d8ce1080aedba5ecee6a5d85f8d3"}, - {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db094a6a4ae6329ed322a8973f83630b12715654c197dd392410400a5bfa1a73"}, - {file = "coverage-6.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1414e8b124611bf4df8d77215bd32cba6e3425da8ce9c1f1046149615e3a9a31"}, - {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:93b16b08f94c92cab88073ffd185070cdcb29f1b98df8b28e6649145b7f2c90d"}, - {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:fbc86ae8cc129c801e7baaafe3addf3c8d49c9c1597c44bdf2d78139707c3c62"}, - {file = "coverage-6.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b5ba058610e8289a07db2a57bce45a1793ec0d3d11db28c047aae2aa1a832572"}, - {file = "coverage-6.3.3-cp37-cp37m-win32.whl", hash = "sha256:8329635c0781927a2c6ae068461e19674c564e05b86736ab8eb29c420ee7dc20"}, - {file = "coverage-6.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:e5af1feee71099ae2e3b086ec04f57f9950e1be9ecf6c420696fea7977b84738"}, - {file = "coverage-6.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e814a4a5a1d95223b08cdb0f4f57029e8eab22ffdbae2f97107aeef28554517e"}, - {file = "coverage-6.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:61f4fbf3633cb0713437291b8848634ea97f89c7e849c2be17a665611e433f53"}, - {file = "coverage-6.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3401b0d2ed9f726fadbfa35102e00d1b3547b73772a1de5508ef3bdbcb36afe7"}, - {file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8586b177b4407f988731eb7f41967415b2197f35e2a6ee1a9b9b561f6323c8e9"}, - {file = "coverage-6.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:892e7fe32191960da559a14536768a62e83e87bbb867e1b9c643e7e0fbce2579"}, - {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:afb03f981fadb5aed1ac6e3dd34f0488e1a0875623d557b6fad09b97a942b38a"}, - {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cbe91bc84be4e5ef0b1480d15c7b18e29c73bdfa33e07d3725da7d18e1b0aff2"}, - {file = "coverage-6.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:91502bf27cbd5c83c95cfea291ef387469f2387508645602e1ca0fd8a4ba7548"}, - {file = "coverage-6.3.3-cp38-cp38-win32.whl", hash = "sha256:c488db059848702aff30aa1d90ef87928d4e72e4f00717343800546fdbff0a94"}, - {file = "coverage-6.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:ceb6534fcdfb5c503affb6b1130db7b5bfc8a0f77fa34880146f7a5c117987d0"}, - {file = "coverage-6.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cc692c9ee18f0dd3214843779ba6b275ee4bb9b9a5745ba64265bce911aefd1a"}, - {file = "coverage-6.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:462105283de203df8de58a68c1bb4ba2a8a164097c2379f664fa81d6baf94b81"}, - {file = "coverage-6.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc972d829ad5ef4d4c5fcabd2bbe2add84ce8236f64ba1c0c72185da3a273130"}, - {file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:06f54765cdbce99901871d50fe9f41d58213f18e98b170a30ca34f47de7dd5e8"}, - {file = "coverage-6.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7835f76a081787f0ca62a53504361b3869840a1620049b56d803a8cb3a9eeea3"}, - {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6f5fee77ec3384b934797f1873758f796dfb4f167e1296dc00f8b2e023ce6ee9"}, - {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:baa8be8aba3dd1e976e68677be68a960a633a6d44c325757aefaa4d66175050f"}, - {file = "coverage-6.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d06380e777dd6b35ee936f333d55b53dc4a8271036ff884c909cf6e94be8b6c"}, - {file = "coverage-6.3.3-cp39-cp39-win32.whl", hash = "sha256:f8cabc5fd0091976ab7b020f5708335033e422de25e20ddf9416bdce2b7e07d8"}, - {file = "coverage-6.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c9441d57b0963cf8340268ad62fc83de61f1613034b79c2b1053046af0c5284"}, - {file = "coverage-6.3.3-pp36.pp37.pp38-none-any.whl", hash = "sha256:d522f1dc49127eab0bfbba4e90fa068ecff0899bbf61bf4065c790ddd6c177fe"}, - {file = "coverage-6.3.3.tar.gz", hash = "sha256:2781c43bffbbec2b8867376d4d61916f5e9c4cc168232528562a61d1b4b01879"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f1d5aa2703e1dab4ae6cf416eb0095304f49d004c39e9db1d86f57924f43006b"}, + {file = "coverage-6.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ce1b258493cbf8aec43e9b50d89982346b98e9ffdfaae8ae5793bc112fb0068"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83c4e737f60c6936460c5be330d296dd5b48b3963f48634c53b3f7deb0f34ec4"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:84e65ef149028516c6d64461b95a8dbcfce95cfd5b9eb634320596173332ea84"}, + {file = "coverage-6.4.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f69718750eaae75efe506406c490d6fc5a6161d047206cc63ce25527e8a3adad"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e57816f8ffe46b1df8f12e1b348f06d164fd5219beba7d9433ba79608ef011cc"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:01c5615d13f3dd3aa8543afc069e5319cfa0c7d712f6e04b920431e5c564a749"}, + {file = "coverage-6.4.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ab269400706fab15981fd4bd5080c56bd5cc07c3bccb86aab5e1d5a88dc8f4"}, + {file = "coverage-6.4.1-cp310-cp310-win32.whl", hash = "sha256:a7f3049243783df2e6cc6deafc49ea123522b59f464831476d3d1448e30d72df"}, + {file = "coverage-6.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:ee2ddcac99b2d2aec413e36d7a429ae9ebcadf912946b13ffa88e7d4c9b712d6"}, + {file = "coverage-6.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:fb73e0011b8793c053bfa85e53129ba5f0250fdc0392c1591fd35d915ec75c46"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:106c16dfe494de3193ec55cac9640dd039b66e196e4641fa8ac396181578b982"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87f4f3df85aa39da00fd3ec4b5abeb7407e82b68c7c5ad181308b0e2526da5d4"}, + {file = "coverage-6.4.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:961e2fb0680b4f5ad63234e0bf55dfb90d302740ae9c7ed0120677a94a1590cb"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:cec3a0f75c8f1031825e19cd86ee787e87cf03e4fd2865c79c057092e69e3a3b"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:129cd05ba6f0d08a766d942a9ed4b29283aff7b2cccf5b7ce279d50796860bb3"}, + {file = "coverage-6.4.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:bf5601c33213d3cb19d17a796f8a14a9eaa5e87629a53979a5981e3e3ae166f6"}, + {file = "coverage-6.4.1-cp37-cp37m-win32.whl", hash = "sha256:269eaa2c20a13a5bf17558d4dc91a8d078c4fa1872f25303dddcbba3a813085e"}, + {file = "coverage-6.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:f02cbbf8119db68455b9d763f2f8737bb7db7e43720afa07d8eb1604e5c5ae28"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ffa9297c3a453fba4717d06df579af42ab9a28022444cae7fa605af4df612d54"}, + {file = "coverage-6.4.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:145f296d00441ca703a659e8f3eb48ae39fb083baba2d7ce4482fb2723e050d9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d67d44996140af8b84284e5e7d398e589574b376fb4de8ccd28d82ad8e3bea13"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2bd9a6fc18aab8d2e18f89b7ff91c0f34ff4d5e0ba0b33e989b3cd4194c81fd9"}, + {file = "coverage-6.4.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3384f2a3652cef289e38100f2d037956194a837221edd520a7ee5b42d00cc605"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9b3e07152b4563722be523e8cd0b209e0d1a373022cfbde395ebb6575bf6790d"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:1480ff858b4113db2718848d7b2d1b75bc79895a9c22e76a221b9d8d62496428"}, + {file = "coverage-6.4.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:865d69ae811a392f4d06bde506d531f6a28a00af36f5c8649684a9e5e4a85c83"}, + {file = "coverage-6.4.1-cp38-cp38-win32.whl", hash = "sha256:664a47ce62fe4bef9e2d2c430306e1428ecea207ffd68649e3b942fa8ea83b0b"}, + {file = "coverage-6.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:26dff09fb0d82693ba9e6231248641d60ba606150d02ed45110f9ec26404ed1c"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d9c80df769f5ec05ad21ea34be7458d1dc51ff1fb4b2219e77fe24edf462d6df"}, + {file = "coverage-6.4.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:39ee53946bf009788108b4dd2894bf1349b4e0ca18c2016ffa7d26ce46b8f10d"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5b66caa62922531059bc5ac04f836860412f7f88d38a476eda0a6f11d4724f4"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd180ed867e289964404051a958f7cccabdeed423f91a899829264bb7974d3d3"}, + {file = "coverage-6.4.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84631e81dd053e8a0d4967cedab6db94345f1c36107c71698f746cb2636c63e3"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:8c08da0bd238f2970230c2a0d28ff0e99961598cb2e810245d7fc5afcf1254e8"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d42c549a8f41dc103a8004b9f0c433e2086add8a719da00e246e17cbe4056f72"}, + {file = "coverage-6.4.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:309ce4a522ed5fca432af4ebe0f32b21d6d7ccbb0f5fcc99290e71feba67c264"}, + {file = "coverage-6.4.1-cp39-cp39-win32.whl", hash = "sha256:fdb6f7bd51c2d1714cea40718f6149ad9be6a2ee7d93b19e9f00934c0f2a74d9"}, + {file = "coverage-6.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:342d4aefd1c3e7f620a13f4fe563154d808b69cccef415415aece4c786665397"}, + {file = "coverage-6.4.1-pp36.pp37.pp38-none-any.whl", hash = "sha256:4803e7ccf93230accb928f3a68f00ffa80a88213af98ed338a57ad021ef06815"}, + {file = "coverage-6.4.1.tar.gz", hash = "sha256:4321f075095a096e70aff1d002030ee612b65a205a0a0f5b815280d5dc58100c"}, ] docutils = [ {file = "docutils-0.17.1-py2.py3-none-any.whl", hash = "sha256:cf316c8370a737a022b72b56874f6602acf974a37a9fba42ec2876387549fc61"}, {file = "docutils-0.17.1.tar.gz", hash = "sha256:686577d2e4c32380bb50cbb22f575ed742d58168cee37e99117a854bcd88f125"}, ] grpcio = [ - {file = "grpcio-1.46.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:fa4834022ca45fcde57fabcd12e5458fdb01372c4c8ab84030eabec24c6f39ca"}, - {file = "grpcio-1.46.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:bdad8c088e088e5d34e9c10a5db8871157cc1a7e42f49ea4bd320fd8b57e7eb2"}, - {file = "grpcio-1.46.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:9e70290273b9d7e6d1cd8f8a7a621c4e9a91a3a35be3068610ee014124a35e75"}, - {file = "grpcio-1.46.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cbfc0c85a2eb34de711028fe9630b159a1c0df5580359368bff8429596c56c97"}, - {file = "grpcio-1.46.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c2fe454b7dd4c41a9cb8fbbb18474fd9a2f7935ac203b5f47a00216beec8aacd"}, - {file = "grpcio-1.46.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:666d40de9e323392f985921c4d112ebda8decd7a4532b9524f7e6f6fd5e4ca57"}, - {file = "grpcio-1.46.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:668cc3e277f2bb88189bb4f0d7dfece326be789096660f94553600040630969c"}, - {file = "grpcio-1.46.0-cp310-cp310-win32.whl", hash = "sha256:80aa6247a77cba60b56192df57cc5d78f0e2fe697fc6ebdf089ce93df894db3e"}, - {file = "grpcio-1.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:828078cb73008c65794af94201c975610d16c9440b00e5efefc9e45dd23de73b"}, - {file = "grpcio-1.46.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:4be2d7f283a7e2a15f9c5d70e1c9899e1824ea0650dbd82b7dc5e54d0c8061a5"}, - {file = "grpcio-1.46.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:6ab4aeadc6c76447bcae91da1c69eeff9d0b78af7051fdcebe18a4cdf766f727"}, - {file = "grpcio-1.46.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c8c0eaede86ae97213548633eb07446dab75a48c771ad8bb3751bffbd9055ea9"}, - {file = "grpcio-1.46.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:b45f4f0815e1df26ced52e6e7012055d023d1b2d943e5d3d168e211bdbb823ad"}, - {file = "grpcio-1.46.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:63e827caff24f7d02c2d4d6fbca720001f7e5158a68abba37ea0c7eb447adfe5"}, - {file = "grpcio-1.46.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7c6958a8a6a8df1caa536314bda3fb54f9ca5c936c14e3a486ff51d150c342c7"}, - {file = "grpcio-1.46.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edc0e052d349d7bac6719bddb5e779314b060eca1f53f99e0cc0be1aea66285a"}, - {file = "grpcio-1.46.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d2b99b28b75b1929d92d947b74b7c74610131ac6acf803f2dedde7d245bc8b90"}, - {file = "grpcio-1.46.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:518b3294dcdd734c4551a7c4cf3b457b9e0b949f4d855419600ceb7921de6f00"}, - {file = "grpcio-1.46.0-cp36-cp36m-win32.whl", hash = "sha256:eca51dd5d16b3a6b19c255cbcb236387d5cc9e058faeff024cd0c904d16f2495"}, - {file = "grpcio-1.46.0-cp36-cp36m-win_amd64.whl", hash = "sha256:206becfce3ad377f50c934b4d91f3fd5f101fe71db80ccce800d6bb898605448"}, - {file = "grpcio-1.46.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:c95497d9bf93c8553b558646dd61cb4b15269c28fcee1a8843892edd50f3754f"}, - {file = "grpcio-1.46.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:fed35c01a01c6d050f8d67456dad83b5196bf4aff6d88fadd9b70936fb732826"}, - {file = "grpcio-1.46.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1419ab58c830f2da40884f4e1b4583038b12d6609fcac1a5700eff9ca9a75070"}, - {file = "grpcio-1.46.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:65477bb9e884c9f46cde27c083d69c6588342f24ee5d56bbf731b9a4a14cc781"}, - {file = "grpcio-1.46.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:d5ef9194f9bc216c8d0c18885bb7db247b0018a219ded543a6a6c2fe9454b220"}, - {file = "grpcio-1.46.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ed29cc8cb0394cb5ae9cf0a56e32228e9d98b8bb79a088393a18346510a06132"}, - {file = "grpcio-1.46.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e4972b82ee1164eeee297e86a6351a2f358e1a9e5b65ae491a7a140d276cec4"}, - {file = "grpcio-1.46.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9ab12c5bfb13f294f6215a2580e446396eaac1b101e6cdb74d7bea3c6be3143e"}, - {file = "grpcio-1.46.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c1111369863d04ea49378b73c1c2890bafa4c558c9cf799da52bf922483c8a3c"}, - {file = "grpcio-1.46.0-cp37-cp37m-win32.whl", hash = "sha256:653d69bc4ac2e1f1bf36625aa42fbba8d399df609ded69a74b5820ca995e75dd"}, - {file = "grpcio-1.46.0-cp37-cp37m-win_amd64.whl", hash = "sha256:afe8cbd4ed74f7d955c7732195d5f46c6af7b0867dfe642c8628332585fa40ee"}, - {file = "grpcio-1.46.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:2f59d6beb12bbccd3d1ecd23d78f0f1a63324cddc42c744c6d13abeef6039496"}, - {file = "grpcio-1.46.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:170ade19379157d5c8e01c8176858a7ffbbf904b7896917c323134021afc1926"}, - {file = "grpcio-1.46.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:a6d45e6fbe3f60fa3a8907f55e8d626a4aa452eb108edfa7f533c9161d973ef9"}, - {file = "grpcio-1.46.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:47f0c0820d0b7f6e4930729b9067f346a07d4bbc632d109a2bcc7ca6f260c5f1"}, - {file = "grpcio-1.46.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:b3004fd04bfd3dba17f9d28b094bff76a32d7e85408f9f26f02594aa31fba040"}, - {file = "grpcio-1.46.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4d4a17d8afcd6c9e4f06cf52b3f7ce0ca06f33510a47358848d30a1aebef10b"}, - {file = "grpcio-1.46.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbfac305c16cb5fcff894f3b80923863877584f1d3be66164aa218ed32841bcb"}, - {file = "grpcio-1.46.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:d518477a73b467953ac8cff08022394b5250e8cfd7adfd167f76fd2d76969158"}, - {file = "grpcio-1.46.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2a751c533679dbc0194daf91a6e665d6163f9b423fc6f2e506035ddc17118f9d"}, - {file = "grpcio-1.46.0-cp38-cp38-win32.whl", hash = "sha256:20fde26fbd40547c65817ca47b15f1f51d4bb0a70fd8a836fa08c9ad9b284b03"}, - {file = "grpcio-1.46.0-cp38-cp38-win_amd64.whl", hash = "sha256:70b6d401a758e85318a2be038eccf8ab965a14082b9f89152f19b8f9b7ac762e"}, - {file = "grpcio-1.46.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:c8539a82debdd50c7fe3f0565b36b5efcd6a68f30ab635aced4175569d5f45e2"}, - {file = "grpcio-1.46.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:884b0182d89bb934a5615f9d056df44e8681473cd124e6262382b5888353691b"}, - {file = "grpcio-1.46.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:26ab8415e2e048e32cf05a86e7b6d76864bc018f837a93112c177130c2743766"}, - {file = "grpcio-1.46.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e30a1be3d1ec426f32d6fa22d9af9f5169a40d4b0955ce1fb111e869e0c0f44f"}, - {file = "grpcio-1.46.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:7c4fff11237fee6f07ac6937f2cff02a1f28d8bf2d675d1c57496423ddb8e01f"}, - {file = "grpcio-1.46.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:19646d7d51643231fbd3414134ddbf5c4c226db861a800bc8c04ac870533b614"}, - {file = "grpcio-1.46.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1efd92661c4d4b106cd97025d52a480255b387ba75d3070cee6c4677e375f1c5"}, - {file = "grpcio-1.46.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9e7ea7a8e7521664dd630fab35daab106a490b65e29254f90aeac66ec5cf1f68"}, - {file = "grpcio-1.46.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd58caa70b4228ebb31e1b5c9872053f9fde4412ef69a1be65b8a8eaae8cf072"}, - {file = "grpcio-1.46.0-cp39-cp39-win32.whl", hash = "sha256:4befe75c0122fe51ae046a4936b735c306ea63849405cd8dc0be534affd60ea0"}, - {file = "grpcio-1.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:25cf4ede6f9703913b4381969159452ff6ca5dfb93d5f58b80d1763e9ad79b18"}, - {file = "grpcio-1.46.0.tar.gz", hash = "sha256:ef37ff444d248ff8ea5e175a7807ce19e324831bc00d466169191cd9aad0ee36"}, + {file = "grpcio-1.46.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:4c05dbc164c2d3015109292ffeed68292807a6cb1225f9a36699bf2166634908"}, + {file = "grpcio-1.46.3-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c6a460b6aaf43428d48fececad864cc562458b944df80568e490d985d8576292"}, + {file = "grpcio-1.46.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:707b85fa0cf606a9ab02246bd3142c76e154f1c30f00f7346b2afa3d0b315d5a"}, + {file = "grpcio-1.46.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c63e7c61c0b06f838e8f45ffd3a7c68a520c4c026b2e0e8b1ad29c456d0f859"}, + {file = "grpcio-1.46.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c6fe85e5873d9784ab82cf261d9fc07ed67a4459ba69fbe1187ef8b8e3d9e30e"}, + {file = "grpcio-1.46.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:df980c4901a92ca649e18036ff67c7c8cad239b2759c2472694f7ab0f0b4ffb9"}, + {file = "grpcio-1.46.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b59982e405159385d5796aa1e0817ec83affb3eb4c2a5b7ca39413d17d7e332"}, + {file = "grpcio-1.46.3-cp310-cp310-win32.whl", hash = "sha256:6d51fa98bd40d4593f819a3fec8a078a192958d24f84c3daf15b5ad7705d4c48"}, + {file = "grpcio-1.46.3-cp310-cp310-win_amd64.whl", hash = "sha256:e9bba429eb743471715e6dadf006a70a77cb6afb065aa4a6eaa9efd76b09e336"}, + {file = "grpcio-1.46.3-cp36-cp36m-linux_armv7l.whl", hash = "sha256:a898b0f13bda2dfe786952cc1ea705762fa6c3ae799b4bb0525d7821605ae968"}, + {file = "grpcio-1.46.3-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:9014aee70e29911008d2f388011cabf2c7fe4fe29918ce5f71513a660494069a"}, + {file = "grpcio-1.46.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9c97106134de70f8323b12738ac0adf0615688b69253002910d0c5d42d202a77"}, + {file = "grpcio-1.46.3-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d41ea8efb87b1ae4e576b13d94f2b470297a1495ae6b2c9d1047952731bf168f"}, + {file = "grpcio-1.46.3-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:ab18e85082003d7883a4d069065436e61cb27c2c2150e7965ce93658f17bc8da"}, + {file = "grpcio-1.46.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:307ff1d6237d5c383196660a12db021c20280227f9f4423d88d6b2ab20c8b1d0"}, + {file = "grpcio-1.46.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c9106ef35239767b3aa9dc1a79856ad499655f853fca9f92f9dd3182d646627"}, + {file = "grpcio-1.46.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:e0ae8e8523308bf7ab0b7d6aa686011de59b19fb06abb253f302d0b5da2a5905"}, + {file = "grpcio-1.46.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:4fd0aa30a938893060defd5f222604461db55f9a81a028b154479b91deac7074"}, + {file = "grpcio-1.46.3-cp36-cp36m-win32.whl", hash = "sha256:f7637b55662e56a64c07846bc0d2da6232a6e893b22c39790f2e41d03ac1a826"}, + {file = "grpcio-1.46.3-cp36-cp36m-win_amd64.whl", hash = "sha256:97801afa96a819f911d030b490dbea95b246de02433bac69c5acf150081686e4"}, + {file = "grpcio-1.46.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:3585a6fa3d97fc8f030bbf0e88185b5eb345a340f6732e165d5c22df54de5bc6"}, + {file = "grpcio-1.46.3-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:dc6d15cbcceaebaacf2994280ed1c01d42b5772059b30afd8a76152e9d23daa4"}, + {file = "grpcio-1.46.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e0486485d59d5865149010966ef3df99c5df97ab8b01f10e26f8759d6e10fafc"}, + {file = "grpcio-1.46.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5210ec7a1638daa61da16487fbfafb3dbb7b8cd44382d9262316bbb58a5b1cf7"}, + {file = "grpcio-1.46.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:e278fa30d2b5652f7e43970c86ad34c639146443553678b746909aae204924dc"}, + {file = "grpcio-1.46.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d4148f1f76516b01cccf2273b45bc706847f1560ccb55aa6e29df851e9ca8cc"}, + {file = "grpcio-1.46.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01f3f7a6cdb111cf276ffff9c892fa32624e03999bac809d3f3d8321d98b6855"}, + {file = "grpcio-1.46.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:91aaccbe1c035ad2bcd1b8a25cebd11839070eb70fb6573e9d0197ddbca5d96b"}, + {file = "grpcio-1.46.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:26136c19d96e2138f04412101f3730d66f5f1515dc912ac0d415587c8208d826"}, + {file = "grpcio-1.46.3-cp37-cp37m-win32.whl", hash = "sha256:a8f40dafcdc3e0e378387953528eaf4e35758161f3b10d96199f12b11afbe2c2"}, + {file = "grpcio-1.46.3-cp37-cp37m-win_amd64.whl", hash = "sha256:a6bb52df85a4bd6d3bad16b4e7cc43efe95469b74a856c87a2c5bef496c9147f"}, + {file = "grpcio-1.46.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:2334ceeab4084e80433693451452cba26afc1607a7974133af3b3635fc8aa935"}, + {file = "grpcio-1.46.3-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:2c96a6103caec84985bb2cffac2b261f8cac2641e7a70d4b43b7d08754a6cfe7"}, + {file = "grpcio-1.46.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7a39d39da8855b03be2d7348387986bab6a322031fcc8b04fa5e72355e7b13a1"}, + {file = "grpcio-1.46.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4caf87a06de88e3611a4610c57ef55b78801843d1f5a9e5fd6b75e887dad3340"}, + {file = "grpcio-1.46.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:ffbbb228e6fc6f85b34aac428eb76b4fc6591d771e487ce46eb16b4b7e18b91d"}, + {file = "grpcio-1.46.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c89ae010c57333dd3c692e0892199a59df1ddfd467cdfea31f98331d0e8cf87"}, + {file = "grpcio-1.46.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:34b206cdf78dd1c14d93e10e7308750c36b4e6754d579895cba74341875e2fb5"}, + {file = "grpcio-1.46.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a19b3ecdb8ddf60e4b034def27636065e49ac1ee3c85854a16353cf52c2afd83"}, + {file = "grpcio-1.46.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:aac6e66acae82be5c99a0a40ab8f5733d7df76a04f242cf42ecc34cfb1e947bd"}, + {file = "grpcio-1.46.3-cp38-cp38-win32.whl", hash = "sha256:aff6d961d6bc5e34e12e148383671f8da5d17e47ed606ec15f483def3053b206"}, + {file = "grpcio-1.46.3-cp38-cp38-win_amd64.whl", hash = "sha256:71d46c2f3c0512bac3d658af3193e3d645c96123af56bd07a8416474c69df2cf"}, + {file = "grpcio-1.46.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:5969f63f3cf92538f83f26949d393d9fc59de670f47cf7c2a0e1e0d30b770294"}, + {file = "grpcio-1.46.3-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5f8134d4a7e76c8c6644bd3ce728b9894933575155d02c09922986d5d8d6e48c"}, + {file = "grpcio-1.46.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:53fff69fd4d315adddda226e7b71804d1f12adf3a4162126dc520725624a483a"}, + {file = "grpcio-1.46.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3af2cc4e41f87d3b57f624b1b14321c1d0f030b191da60f9eeeda5448d83240c"}, + {file = "grpcio-1.46.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:5fb7779ae01c20c4fad5831e98003b3f036acfe6b77697d6a9baa0f9a7f14daf"}, + {file = "grpcio-1.46.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:56636ebf8db63ba50d272dfd73c92538950525120311676246f8f6a81b0aa144"}, + {file = "grpcio-1.46.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a5012ba00cf8b7ce9e6ac2312ace0b0e16fe9502c18340c8c3ecb734a759831"}, + {file = "grpcio-1.46.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:be1679d814a292a701f45df324e25b060435dd13159e9b08a16e2a2396c4391c"}, + {file = "grpcio-1.46.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4faaba7db078a0001a8c1a4370d56dc454c03b4613b6acec01f14b90c8dd03cf"}, + {file = "grpcio-1.46.3-cp39-cp39-win32.whl", hash = "sha256:f5c6393fa645183ae858ebfbf72ab94e7ebafb5cd849dcf4ae8c53a83cce4e24"}, + {file = "grpcio-1.46.3-cp39-cp39-win_amd64.whl", hash = "sha256:158b90d4f1354f40e435f4c866057acc29a4364b214c31049c8b8c903646fbab"}, + {file = "grpcio-1.46.3.tar.gz", hash = "sha256:4b8fd8b1cd553635274b83cd984f0755e6779886eca53c1c71d48215962eb689"}, ] grpcio-tools = [ - {file = "grpcio-tools-1.46.0.tar.gz", hash = "sha256:9295bf9b1e6dd5bcb260d594745fa3d6a089daade28f3a80cb2bc976b5359b7d"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:6bdfb6951a1bcf6567f8b157ca42aab16a747fb90fdb101d76f0d11bff567e8d"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:0df06b2dbd6b2c2365a9e4181182e8aef7f0f53765106d9866c126d008715da8"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:e1b8ea27abb550a7ba5103d9190904690bf9497069b3598de41bf971903d3a40"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eaff051f004e3670d403e1f71b15db3bd7ddb086526e97b55cde748cfec54944"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c788fb21f33931b56ff68d997ddaad56bba1be267e1d089df1feea4f53978e19"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:159c231889c0d3296cb52fdd45d1b145d7fbd5385d5f83985216103167065abd"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:127de9a16e25ae321abfb2806a4fd8abebc260c6d126d06e0d330069aac67c24"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-win32.whl", hash = "sha256:d13ec29f87689c9f590eb9f40a3fdca99050a162d6c87d8474592fc85c719153"}, - {file = "grpcio_tools-1.46.0-cp310-cp310-win_amd64.whl", hash = "sha256:d369af13f38feb547fdfa15e0e3632d5cc6ea815ba7b1706411810eae0888ff2"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-linux_armv7l.whl", hash = "sha256:ef4c8354c685f2291decf9bae7230fb0d9eaa3552b7264b615aa8c431c789d3f"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:d6e6def12d2e12e5bcf64aee6d5bec4a093cf32538b783547a574f993715a416"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b366bd173f452a6c16bcdb1aa59bd17601a48cc651e0765d4fc41e3b84cd5955"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a996aaefdfa6ea061dd4073a8a03273216c7eff332159f6984f8d64c54da84a9"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:0d949951df9db47c4ee2c70a1b634683858655263e600c266aeb8618d10296ab"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0ae3625c281985517f41161db491063e4b0b17b7801151b4a990e3204e07569"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3bf515ae4bff486550e9b860421db398d052b3734355ec3bca78ea6efa1aa5e4"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:31c3b6a2c1f4619bc8c100d90470b701d8deb08fb5cf6c02fea1a23737738d17"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:67e189b436172e4384f320435397afaee546933a3c1a5bdb868b4cafb7c2c5b1"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-win32.whl", hash = "sha256:9cbc8f5ad5efbe8d7cb294592335438fcb5914abf99c317c48b855db9a3a831a"}, - {file = "grpcio_tools-1.46.0-cp36-cp36m-win_amd64.whl", hash = "sha256:6b111e079de15821e3d7184433857654c1ea7121b55998c6bec7ceb28e6c247c"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-linux_armv7l.whl", hash = "sha256:3e8bbed22719dbc5e91d28d7052e19f4055fc64c5a043aa7607cac1d790242e8"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:5c6f2865a4c896950fa1f7c2c9624b594f33fb1363ba299ccad6029a6fad0457"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:e6da50d98f91921843c2944b427aed548726f0d246576c9577788ee82f4a1555"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d112fb1a70a98d4d083b088e6f08fe0b04538f6cc3fb7265debe527f511167e5"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:8184e5ecbb9f3a11cc0498f71d13dd2dafaec5c71d86e36e16539ebccaaf68d6"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7f8d1e323151f0942ba8bfcaa63f1e93d0bf9ebec317c50970a8b2705e8896c"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:74fd43bb395962d197f5bf85cda8a26d4108c33267f1ca1806f2bd91baccb86b"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:34a2c52a32e7109d70448e7e9ef7b6752880daa37bee1ab519df183d35044872"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fdfea41e5693ac8b3d7d9ec1191fbe9f5e0a855e03db3b3482bf58094d763299"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-win32.whl", hash = "sha256:e61dcad694589c477ed0fa0ddeb98de027393a01dd6808739e0fe5cee1337303"}, - {file = "grpcio_tools-1.46.0-cp37-cp37m-win_amd64.whl", hash = "sha256:779abb8499e2d48c3802b52cac6f25faebf80fb9fa9a7499b0f4bc2754f4ceac"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-linux_armv7l.whl", hash = "sha256:8037f56844a31e5f1b5c46d794e6511a6efa4d978a7e44a7f130d2b407aa3464"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:13e24d1ab6d3f16fd482b19f19e119c9edffc43875420ed1f59fd5497493a7c3"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:696877e7eafe8c9b1b31c7c83c94da4095393599fa3ab37126e82ce23c4d121e"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2d9d4bca70f4153869c2376b8c8ab267dcaaa020045618f07f5a8e6968924335"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:9763083bb3f85f8096a24ae9472d79cea5a0a149757cbdc994744389118228c6"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a5b75626426bd77532f324dd532f6d0c5c050c1e6ff5e35b0021daab1ce96924"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf8d4def471eb631282709c59981e3d3d9a04e99969b42c0a62b0565e8d2634d"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:eff55b20cfeda76c644e495ff464f0c4eb0face52ced3e5c88a87319716f1d7f"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dff531bf0f629747be88d1fe97e25c567b04204a7fbe7161fcdc3acb9aa6f45"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-win32.whl", hash = "sha256:4ee26a0787613d43ee2ec3f8dad710f09bdceadb770d2c9398e3e08dc6d4f27f"}, - {file = "grpcio_tools-1.46.0-cp38-cp38-win_amd64.whl", hash = "sha256:4b62818c5ccfb99b7cc719f468df99a267d3ae0ca52a0cf30e944a75239c4258"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-linux_armv7l.whl", hash = "sha256:5d4d2ec7f1525c1a1f9072978e540b3590cb7303328987e8c3d4c404c5f543d8"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:6ed9f82c9107173b2858a87d245da05bc94ca681c5a361af2b091c47fd684535"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:876f1029af70889d3b030275bd0356f4a9640f6b20df71edf708278d82c7e0f7"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4c7a398b7279a8220e3b6f41c7b2a4288a906030c22ba737e2ac11c378511910"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:669f4b1abf80e6ace6f06307c55bef3441487a4b7ebd5733b39a81684f705005"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2641ff82234c398a71889d860cf13063e3ceb699c61501761e49f296f1df8f2f"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5b6fe2d5711a52358c7fef0de6c48afe1825d134d38030be7dfa29f0db6cb409"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c458c5264881a8327e76980c4ea8bea86382f7bffa395a6e6171419f587d9003"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9c33de1f53bd3e9d38b1fec362297c0e58017f82857d405f212f3c2a2b45bf5f"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-win32.whl", hash = "sha256:2ba4d56b36b45c4e9cc4d42880106fbe0c9e1ed10ce9fbc42466cd6197cc7ac1"}, - {file = "grpcio_tools-1.46.0-cp39-cp39-win_amd64.whl", hash = "sha256:910ae66c1f6c710090db4342f52c960a12158005f7c20960e3cc0b5a1ec09496"}, + {file = "grpcio-tools-1.46.3.tar.gz", hash = "sha256:31fee436ace5b3bd950cc3a8e68d6b84de1d6dc755959db7badc3470cdf22f70"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-linux_armv7l.whl", hash = "sha256:5d30bfb595e2ef767082432b17ca9238a3a59c2fd3211e19f29700448d1c9162"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:c51266f343c3207fc24bf954619992bc5173c93b052de9d90ecdf1d3f42cf13a"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-manylinux_2_17_aarch64.whl", hash = "sha256:168520ee3a12002806b7436276bdeb042e4003cc8efba2c82db82a4447d6b3df"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0f9748b8d585c01151ca8178593152f4679db9b8846f11d0e54aba64b7b72fc"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:140288c4d9d1005fcb9e88ac5c1a37973c844bdc180fc34a263efa7aceb2c8fe"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b10828c9f62cfffb9aad65c5960cac5b65b5c879b29ef7650b6f253cd1d19b37"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0bb48a86591d63b34b12cea2468efc96886aa06ed404707a6fe24cd18032ad45"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-win32.whl", hash = "sha256:bc630ccaf97eb68ca814305f408a8172c6b0bb5ffdaee2baf16be4da0b704161"}, + {file = "grpcio_tools-1.46.3-cp310-cp310-win_amd64.whl", hash = "sha256:7cc27ec93aa105dcb86acc9eb4cb67fa85420f1bef2fccaf69955f3ee82028db"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-linux_armv7l.whl", hash = "sha256:83fa1aaf052dad33000b73a865ad38e7b19ba4ecedb374cd0594824d27aab3ba"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:fa404d7c9cd621836220f3b9cb593eb2c475182d5c768b449407506b61f91159"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:36622a56654d2e38b9c323f06786427143e2a44356d30eca5f0e663a58f1d5b4"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:68442c23892478bd6ab324f716e0a6b611fefd98c67f18cfb4ef6c63bce3ab47"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-manylinux_2_17_aarch64.whl", hash = "sha256:1763fc1da10d8b505f706d8147499f199cfa55868f7372a0b7fb198378e0f9d1"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c3a5c279b1ffbb5c90fd71d8ce94b1aa6d43b1264d086fa553acc825149bef9"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e8b37f6ef9b15349c5a575a0e0add11892bbbb107e0f21d5da45adba336daf05"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:920c57d342c8f9d877679c046407cab1c98a769c196fde35c0c5a14471ebc931"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:227e2bbc28519a384a064802e239a0231859a108220ebffe10fd9f9a63a1af16"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-win32.whl", hash = "sha256:e247d3b06b441f5516e9ffbfdb83ac15648965170e184d9b5950af61e5976648"}, + {file = "grpcio_tools-1.46.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bf1a4b9f7ba0d39b36ba4759d00a0bb51603a63cea532c5df07ca04db41ddaf9"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-linux_armv7l.whl", hash = "sha256:a1c2f0f1e13c185c381ddbe366f186a5cf6848014990ca3b8a3411dc06db9a70"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:096f0901089055f467df7133403a1a1ac782b2a078552ffa51215f5d7a064643"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:14b86251ed61133f860b5182801f7d8ac5371ea7a32a60aeca96b3f6d7be56c9"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0c455e883e2bf0ee7cc3d0778f3b15dd7616e9a68db6fc4d36ad23d6be643035"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-manylinux_2_17_aarch64.whl", hash = "sha256:1ab7aead75c794a73cfb3777fca7cd8e234fb15256cff1de506f41de453693a9"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cd4084dd54bdeff97cdf648603fd4c63a496ebdf35455f6fc3ea3016231b1e41"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f781cc5f1bc3d70d62f2a2846ca1de588ed75513a39674dea78cb7a4c52a5e9"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2ab51956b500ebe5718024f083513e62f777f1812e280959e85529730f863e3c"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:15c26699c2c7188010e0dc579ef1a4fba02cfef12bfe56028f73b58feee61d30"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-win32.whl", hash = "sha256:3840304ca4ca9586978c790a5433bf14a2d1cb234274dd095cd3673d08f26f07"}, + {file = "grpcio_tools-1.46.3-cp37-cp37m-win_amd64.whl", hash = "sha256:d8a73c64cd788a7ba597fa8878b4faad928d3c40981449a40e35000c2639d702"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-linux_armv7l.whl", hash = "sha256:7a2c6e847808141ba88e5f45bda76e3d9e5e9866379496d2289e3892fafdd201"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:3721e8c5490f7c015b5961133bb5b605a3d65eb8aa873506d3291f1216e28b03"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:07c9aa1a2549569dfa2a25f495543570f4753e96b99fbd46eaf7e2f3d1948cc7"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3c6a510c29e466c489db8a3f24cad4386f5b0ca7223af93d821bc7ddfe73b172"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-manylinux_2_17_aarch64.whl", hash = "sha256:21a714df8b894d4d0554414393db6616e20a25d44c7356c545c23c922dcd4e17"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0395647f41561a00e5dea633b8171d0597899eb8649dc5bf6a6a0597f9848769"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:600ed9e6600ef4815746052833188471c07be38d14f000617791ed1ec1664d53"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:af9ed199b9b02a3cc66695b1d89b512084444fedc1e9240258cadcc36e1a9071"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:21bbbcee52c3a1d694d4f29bafc197a16274294ee6bb33f913398f9f1bc23f2e"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-win32.whl", hash = "sha256:7b29f82b0f5cf1410f8d4135c20653264a91d156a15394a7b3df5cc4ef4572eb"}, + {file = "grpcio_tools-1.46.3-cp38-cp38-win_amd64.whl", hash = "sha256:5a2a1037c0e5b9d3d417640ad233f9aae8a934f01bf54d79dea23ad53282f004"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-linux_armv7l.whl", hash = "sha256:d4dacd7bba601d00a5f9767b3ea58077ce91c2467bff977998b818f699f9a5ff"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:8e472a2ac4e98a827fcfad719d828dda203d21e63a70f7fbeff0b472d9fb15d5"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5466fa708d9ef9980a67e72f839d24c545b0d57d976b4a1cbec150d7d90891c4"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:cd3b9bec82650a47eb8c5d5c25479fcc8cd658deaa444fb8fa61118794b6a8c9"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-manylinux_2_17_aarch64.whl", hash = "sha256:1bea61acaab74f0ba8a578e7245085fff5021e44756a68dbe83407338776422a"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:790ba7e1de54480c11b70115dfed76dcf79db929b0f808314014a58a8163d07d"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2270c63905fccabc50582e2a6e22700f15f87274e651a0b491771a75c9e68cdf"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:eddc8509d1d22b46d57cd35284cfe4bee8af2c31693a40a4244b9b2f3795aa36"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a78ae1e9e478bd433999522cde57f3d6e47bfa498de98e35f827b5ea83f87c45"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-win32.whl", hash = "sha256:8918bf43be899b5967aeeb955bb1a557c6f753db22456505091482511021b87f"}, + {file = "grpcio_tools-1.46.3-cp39-cp39-win_amd64.whl", hash = "sha256:6533916914dc80837d4caa1cf1aec884cc0390ce0543c4091fd817b2e87a87c5"}, ] grpclib = [ {file = "grpclib-0.4.2.tar.gz", hash = "sha256:ead080cb7d56d6a5e835aaf5255d1ef1dce475a7722566ea225f0188fce33b68"}, @@ -905,12 +904,12 @@ hpack = [ {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, ] httpcore = [ - {file = "httpcore-0.14.7-py3-none-any.whl", hash = "sha256:47d772f754359e56dd9d892d9593b6f9870a37aeb8ba51e9a88b09b3d68cfade"}, - {file = "httpcore-0.14.7.tar.gz", hash = "sha256:7503ec1c0f559066e7e39bc4003fd2ce023d01cf51793e3c173b864eb456ead1"}, + {file = "httpcore-0.15.0-py3-none-any.whl", hash = "sha256:1105b8b73c025f23ff7c36468e4432226cbb959176eab66864b8e31c4ee27fa6"}, + {file = "httpcore-0.15.0.tar.gz", hash = "sha256:18b68ab86a3ccf3e7dc0f43598eaddcf472b602aba29f9aa6ab85fe2ada3980b"}, ] httpx = [ - {file = "httpx-0.22.0-py3-none-any.whl", hash = "sha256:e35e83d1d2b9b2a609ef367cc4c1e66fd80b750348b20cc9e19d1952fc2ca3f6"}, - {file = "httpx-0.22.0.tar.gz", hash = "sha256:d8e778f76d9bbd46af49e7f062467e3157a5a3d2ae4876a4bbfd8a51ed9c9cb4"}, + {file = "httpx-0.23.0-py3-none-any.whl", hash = "sha256:42974f577483e1e932c3cdc3cd2303e883cbfba17fe228b0f63589764d7b9c4b"}, + {file = "httpx-0.23.0.tar.gz", hash = "sha256:f28eac771ec9eb4866d3fb4ab65abd42d38c424739e80c08d8d20570de60b0ef"}, ] hyperframe = [ {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, @@ -925,8 +924,8 @@ imagesize = [ {file = "imagesize-1.3.0.tar.gz", hash = "sha256:cd1750d452385ca327479d45b64d9c7729ecf0b3969a58148298c77092261f9d"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.11.3-py3-none-any.whl", hash = "sha256:1208431ca90a8cca1a6b8af391bb53c1a2db74e5d1cef6ddced95d4b2062edc6"}, - {file = "importlib_metadata-4.11.3.tar.gz", hash = "sha256:ea4c597ebf37142f827b8f39299579e31685c31d3a438b59f469406afd0f2539"}, + {file = "importlib_metadata-4.11.4-py3-none-any.whl", hash = "sha256:c58c8eb8a762858f49e18436ff552e83914778e50e9d2f1660535ffb364552ec"}, + {file = "importlib_metadata-4.11.4.tar.gz", hash = "sha256:5d26852efe48c0a32b0509ffbc583fda1a2266545a78d104a6f4aff3db17d700"}, ] jinja2 = [ {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, @@ -979,8 +978,8 @@ markupsafe = [ {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, ] more-itertools = [ - {file = "more-itertools-8.12.0.tar.gz", hash = "sha256:7dc6ad46f05f545f900dd59e8dfb4e84a4827b97b3cfecb175ea0c7d247f6064"}, - {file = "more_itertools-8.12.0-py3-none-any.whl", hash = "sha256:43e6dd9942dffd72661a2c4ef383ad7da1e6a3e968a927ad7a6083ab410a688b"}, + {file = "more-itertools-8.13.0.tar.gz", hash = "sha256:a42901a0a5b169d925f6f217cd5a190e32ef54360905b9c39ee7db5313bfec0f"}, + {file = "more_itertools-8.13.0-py3-none-any.whl", hash = "sha256:c5122bffc5f104d37c1626b8615b511f3427aa5389b94d61e5ef8236bfbc3ddb"}, ] multidict = [ {file = "multidict-6.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b9e95a740109c6047602f4db4da9949e6c5945cefbad34a1299775ddc9a62e2"}, @@ -1112,49 +1111,49 @@ py = [ {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pydantic = [ - {file = "pydantic-1.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cb23bcc093697cdea2708baae4f9ba0e972960a835af22560f6ae4e7e47d33f5"}, - {file = "pydantic-1.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1d5278bd9f0eee04a44c712982343103bba63507480bfd2fc2790fa70cd64cf4"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab624700dc145aa809e6f3ec93fb8e7d0f99d9023b713f6a953637429b437d37"}, - {file = "pydantic-1.9.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8d7da6f1c1049eefb718d43d99ad73100c958a5367d30b9321b092771e96c25"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:3c3b035103bd4e2e4a28da9da7ef2fa47b00ee4a9cf4f1a735214c1bcd05e0f6"}, - {file = "pydantic-1.9.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3011b975c973819883842c5ab925a4e4298dffccf7782c55ec3580ed17dc464c"}, - {file = "pydantic-1.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:086254884d10d3ba16da0588604ffdc5aab3f7f09557b998373e885c690dd398"}, - {file = "pydantic-1.9.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0fe476769acaa7fcddd17cadd172b156b53546ec3614a4d880e5d29ea5fbce65"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8e9dcf1ac499679aceedac7e7ca6d8641f0193c591a2d090282aaf8e9445a46"}, - {file = "pydantic-1.9.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1e4c28f30e767fd07f2ddc6f74f41f034d1dd6bc526cd59e63a82fe8bb9ef4c"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c86229333cabaaa8c51cf971496f10318c4734cf7b641f08af0a6fbf17ca3054"}, - {file = "pydantic-1.9.0-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:c0727bda6e38144d464daec31dff936a82917f431d9c39c39c60a26567eae3ed"}, - {file = "pydantic-1.9.0-cp36-cp36m-win_amd64.whl", hash = "sha256:dee5ef83a76ac31ab0c78c10bd7d5437bfdb6358c95b91f1ba7ff7b76f9996a1"}, - {file = "pydantic-1.9.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d9c9bdb3af48e242838f9f6e6127de9be7063aad17b32215ccc36a09c5cf1070"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ee7e3209db1e468341ef41fe263eb655f67f5c5a76c924044314e139a1103a2"}, - {file = "pydantic-1.9.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0b6037175234850ffd094ca77bf60fb54b08b5b22bc85865331dd3bda7a02fa1"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:b2571db88c636d862b35090ccf92bf24004393f85c8870a37f42d9f23d13e032"}, - {file = "pydantic-1.9.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8b5ac0f1c83d31b324e57a273da59197c83d1bb18171e512908fe5dc7278a1d6"}, - {file = "pydantic-1.9.0-cp37-cp37m-win_amd64.whl", hash = "sha256:bbbc94d0c94dd80b3340fc4f04fd4d701f4b038ebad72c39693c794fd3bc2d9d"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e0896200b6a40197405af18828da49f067c2fa1f821491bc8f5bde241ef3f7d7"}, - {file = "pydantic-1.9.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bdfdadb5994b44bd5579cfa7c9b0e1b0e540c952d56f627eb227851cda9db77"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:574936363cd4b9eed8acdd6b80d0143162f2eb654d96cb3a8ee91d3e64bf4cf9"}, - {file = "pydantic-1.9.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c556695b699f648c58373b542534308922c46a1cda06ea47bc9ca45ef5b39ae6"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f947352c3434e8b937e3aa8f96f47bdfe6d92779e44bb3f41e4c213ba6a32145"}, - {file = "pydantic-1.9.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5e48ef4a8b8c066c4a31409d91d7ca372a774d0212da2787c0d32f8045b1e034"}, - {file = "pydantic-1.9.0-cp38-cp38-win_amd64.whl", hash = "sha256:96f240bce182ca7fe045c76bcebfa0b0534a1bf402ed05914a6f1dadff91877f"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:815ddebb2792efd4bba5488bc8fde09c29e8ca3227d27cf1c6990fc830fd292b"}, - {file = "pydantic-1.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c5b77947b9e85a54848343928b597b4f74fc364b70926b3c4441ff52620640c"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c68c3bc88dbda2a6805e9a142ce84782d3930f8fdd9655430d8576315ad97ce"}, - {file = "pydantic-1.9.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5a79330f8571faf71bf93667d3ee054609816f10a259a109a0738dac983b23c3"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f5a64b64ddf4c99fe201ac2724daada8595ada0d102ab96d019c1555c2d6441d"}, - {file = "pydantic-1.9.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a733965f1a2b4090a5238d40d983dcd78f3ecea221c7af1497b845a9709c1721"}, - {file = "pydantic-1.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cc6a4cb8a118ffec2ca5fcb47afbacb4f16d0ab8b7350ddea5e8ef7bcc53a16"}, - {file = "pydantic-1.9.0-py3-none-any.whl", hash = "sha256:085ca1de245782e9b46cefcf99deecc67d418737a1fd3f6a4f511344b613a5b3"}, - {file = "pydantic-1.9.0.tar.gz", hash = "sha256:742645059757a56ecd886faf4ed2441b9c0cd406079c2b4bee51bcc3fbcd510a"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c8098a724c2784bf03e8070993f6d46aa2eeca031f8d8a048dff277703e6e193"}, + {file = "pydantic-1.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c320c64dd876e45254bdd350f0179da737463eea41c43bacbee9d8c9d1021f11"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18f3e912f9ad1bdec27fb06b8198a2ccc32f201e24174cec1b3424dda605a310"}, + {file = "pydantic-1.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11951b404e08b01b151222a1cb1a9f0a860a8153ce8334149ab9199cd198131"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8bc541a405423ce0e51c19f637050acdbdf8feca34150e0d17f675e72d119580"}, + {file = "pydantic-1.9.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e565a785233c2d03724c4dc55464559639b1ba9ecf091288dd47ad9c629433bd"}, + {file = "pydantic-1.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:a4a88dcd6ff8fd47c18b3a3709a89adb39a6373f4482e04c1b765045c7e282fd"}, + {file = "pydantic-1.9.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:447d5521575f18e18240906beadc58551e97ec98142266e521c34968c76c8761"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:985ceb5d0a86fcaa61e45781e567a59baa0da292d5ed2e490d612d0de5796918"}, + {file = "pydantic-1.9.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:059b6c1795170809103a1538255883e1983e5b831faea6558ef873d4955b4a74"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:d12f96b5b64bec3f43c8e82b4aab7599d0157f11c798c9f9c528a72b9e0b339a"}, + {file = "pydantic-1.9.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:ae72f8098acb368d877b210ebe02ba12585e77bd0db78ac04a1ee9b9f5dd2166"}, + {file = "pydantic-1.9.1-cp36-cp36m-win_amd64.whl", hash = "sha256:79b485767c13788ee314669008d01f9ef3bc05db9ea3298f6a50d3ef596a154b"}, + {file = "pydantic-1.9.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:494f7c8537f0c02b740c229af4cb47c0d39840b829ecdcfc93d91dcbb0779892"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0f047e11febe5c3198ed346b507e1d010330d56ad615a7e0a89fae604065a0e"}, + {file = "pydantic-1.9.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:969dd06110cb780da01336b281f53e2e7eb3a482831df441fb65dd30403f4608"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:177071dfc0df6248fd22b43036f936cfe2508077a72af0933d0c1fa269b18537"}, + {file = "pydantic-1.9.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:9bcf8b6e011be08fb729d110f3e22e654a50f8a826b0575c7196616780683380"}, + {file = "pydantic-1.9.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a955260d47f03df08acf45689bd163ed9df82c0e0124beb4251b1290fa7ae728"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9ce157d979f742a915b75f792dbd6aa63b8eccaf46a1005ba03aa8a986bde34a"}, + {file = "pydantic-1.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0bf07cab5b279859c253d26a9194a8906e6f4a210063b84b433cf90a569de0c1"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d93d4e95eacd313d2c765ebe40d49ca9dd2ed90e5b37d0d421c597af830c195"}, + {file = "pydantic-1.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1542636a39c4892c4f4fa6270696902acb186a9aaeac6f6cf92ce6ae2e88564b"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a9af62e9b5b9bc67b2a195ebc2c2662fdf498a822d62f902bf27cccb52dbbf49"}, + {file = "pydantic-1.9.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:fe4670cb32ea98ffbf5a1262f14c3e102cccd92b1869df3bb09538158ba90fe6"}, + {file = "pydantic-1.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9f659a5ee95c8baa2436d392267988fd0f43eb774e5eb8739252e5a7e9cf07e0"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b83ba3825bc91dfa989d4eed76865e71aea3a6ca1388b59fc801ee04c4d8d0d6"}, + {file = "pydantic-1.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1dd8fecbad028cd89d04a46688d2fcc14423e8a196d5b0a5c65105664901f810"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02eefd7087268b711a3ff4db528e9916ac9aa18616da7bca69c1871d0b7a091f"}, + {file = "pydantic-1.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb57ba90929bac0b6cc2af2373893d80ac559adda6933e562dcfb375029acee"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:4ce9ae9e91f46c344bec3b03d6ee9612802682c1551aaf627ad24045ce090761"}, + {file = "pydantic-1.9.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:72ccb318bf0c9ab97fc04c10c37683d9eea952ed526707fabf9ac5ae59b701fd"}, + {file = "pydantic-1.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:61b6760b08b7c395975d893e0b814a11cf011ebb24f7d869e7118f5a339a82e1"}, + {file = "pydantic-1.9.1-py3-none-any.whl", hash = "sha256:4988c0f13c42bfa9ddd2fe2f569c9d54646ce84adc5de84228cfe83396f3bd58"}, + {file = "pydantic-1.9.1.tar.gz", hash = "sha256:1ed987c3ff29fff7fd8c3ea3a3ea877ad310aae2ef9889a119e22d3f2db0691a"}, ] pygments = [ {file = "Pygments-2.12.0-py3-none-any.whl", hash = "sha256:dc9c10fb40944260f6ed4c688ece0cd2048414940f1cea51b8b226318411c519"}, {file = "Pygments-2.12.0.tar.gz", hash = "sha256:5eb116118f9612ff1ee89ac96437bb6b49e8f04d8a13b514ba26f620208e26eb"}, ] pyparsing = [ - {file = "pyparsing-3.0.8-py3-none-any.whl", hash = "sha256:ef7b523f6356f763771559412c0d7134753f037822dad1b16945b7b846f7ad06"}, - {file = "pyparsing-3.0.8.tar.gz", hash = "sha256:7bf433498c016c4314268d95df76c81b842a4cb2b276fa3312cfb1e1d85f6954"}, + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, ] pytest = [ {file = "pytest-5.4.3-py3-none-any.whl", hash = "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1"}, diff --git a/pyproject.toml b/pyproject.toml index 86bb3305..6b8b9bb6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "qdrant_client" -version = "0.7.3" +version = "0.8.0" description = "Client library for the Qdrant vector search engine" authors = ["Andrey Vasnetsov "] packages = [ @@ -15,7 +15,7 @@ keywords = ["vector", "search", "neural", "matching", "client"] [tool.poetry.dependencies] python = ">=3.7,<4.0" -httpx = "^0.22.0" +httpx = "^0.23.0" numpy = "^1.21" pydantic = "^1.8" tqdm = "^4.56.0" diff --git a/qdrant_client/conversions/common_types.py b/qdrant_client/conversions/common_types.py index 20cfb222..46508fd4 100644 --- a/qdrant_client/conversions/common_types.py +++ b/qdrant_client/conversions/common_types.py @@ -1,4 +1,4 @@ -from typing import Union, Type, List +from typing import Union, Type, List, Optional import betterproto from pydantic import BaseModel @@ -17,8 +17,16 @@ PayloadSchemaType = Union[rest.PayloadSchemaType, grpc.PayloadSchemaType] Points = Union[rest.Batch, List[Union[rest.PointStruct, grpc.PointStruct]]] PointsSelector = Union[rest.PointsSelector, grpc.PointsSelector] +AliasOperations = Union[ + rest.CreateAliasOperation, + rest.RenameAliasOperation, + rest.DeleteAliasOperation, + grpc.AliasOperations +] Payload = rest.Payload ScoredPoint = rest.ScoredPoint UpdateResult = rest.UpdateResult Record = rest.Record +CollectionsResponse = rest.CollectionsResponse +CollectionInfo = rest.CollectionInfo diff --git a/qdrant_client/conversions/conversion.py b/qdrant_client/conversions/conversion.py index 9216091f..9c6b7628 100644 --- a/qdrant_client/conversions/conversion.py +++ b/qdrant_client/conversions/conversion.py @@ -4,7 +4,7 @@ from qdrant_client import grpc from qdrant_client.http.models import models as rest -from betterproto.lib.google.protobuf import Value, ListValue, Struct, NullValue +from qdrant_client.grpc import Value, ListValue, Struct, NullValue def json_to_value(payload: Any) -> Value: @@ -13,9 +13,9 @@ def json_to_value(payload: Any) -> Value: if isinstance(payload, bool): return Value(bool_value=payload) if isinstance(payload, int): - return Value(number_value=payload) + return Value(integer_value=payload) if isinstance(payload, float): - return Value(number_value=payload) + return Value(double_value=payload) if isinstance(payload, str): return Value(string_value=payload) if isinstance(payload, list): @@ -27,23 +27,29 @@ def json_to_value(payload: Any) -> Value: def value_to_json(value: Value) -> Any: if isinstance(value, Value): - value = value.to_dict(casing=betterproto.Casing.CAMEL) - - if "numberValue" in value: - return value["numberValue"] - if "stringValue" in value: - return value["stringValue"] - if "boolValue" in value: - return value["boolValue"] - if "structValue" in value: - if 'fields' not in value['structValue']: + value_ = value.to_dict(casing=betterproto.Casing.CAMEL) + else: + value_ = value + + if "integerValue" in value_: + # by default int are represented as string for precision + # But in python it is OK to just use `int` + return int(value_["integerValue"]) + if "doubleValue" in value_: + return value_["doubleValue"] + if "stringValue" in value_: + return value_["stringValue"] + if "boolValue" in value_: + return value_["boolValue"] + if "structValue" in value_: + if 'fields' not in value_['structValue']: return {} - return dict((key, value_to_json(val)) for key, val in value["structValue"]['fields'].items()) - if "listValue" in value: - return list(value_to_json(val) for val in value["listValue"]['values']) - if "nullValue" in value: + return dict((key, value_to_json(val)) for key, val in value_["structValue"]['fields'].items()) + if "listValue" in value_: + return list(value_to_json(val) for val in value_["listValue"]['values']) + if "nullValue" in value_: return None - raise ValueError(f"Not supported value: {value}") # pragma: no cover + raise ValueError(f"Not supported value: {value_}") # pragma: no cover def payload_to_grpc(payload: Dict[str, Any]) -> Dict[str, Value]: @@ -160,7 +166,6 @@ def convert_optimizer_config(cls, model: grpc.OptimizersConfigDiff) -> rest.Opti max_optimization_threads=model.max_optimization_threads, max_segment_size=model.max_segment_size, memmap_threshold=model.memmap_threshold, - payload_indexing_threshold=model.payload_indexing_threshold, vacuum_min_vector_number=model.vacuum_min_vector_number ) @@ -371,7 +376,6 @@ def convert_optimizers_config_diff(cls, model: grpc.OptimizersConfigDiff) -> res max_optimization_threads=model.max_optimization_threads, max_segment_size=model.max_segment_size, memmap_threshold=model.memmap_threshold, - payload_indexing_threshold=model.payload_indexing_threshold, vacuum_min_vector_number=model.vacuum_min_vector_number, ) @@ -424,9 +428,9 @@ def convert_with_payload_selector(cls, model: grpc.WithPayloadSelector) -> rest. if name == "enable": return val if name == "include": - return val.include + return val.fields if name == "exclude": - return rest.PayloadSelectorExclude(exclude=val.exclude) + return rest.PayloadSelectorExclude(exclude=val.fields) raise ValueError(f"invalid WithPayloadSelector model: {model}") # pragma: no cover @@ -752,7 +756,6 @@ def convert_optimizers_config(cls, model: rest.OptimizersConfig) -> grpc.Optimiz max_optimization_threads=model.max_optimization_threads, max_segment_size=model.max_segment_size, memmap_threshold=model.memmap_threshold, - payload_indexing_threshold=model.payload_indexing_threshold, vacuum_min_vector_number=model.vacuum_min_vector_number, ) @@ -766,7 +769,6 @@ def convert_optimizers_config_diff(cls, model: rest.OptimizersConfigDiff) -> grp max_optimization_threads=model.max_optimization_threads, max_segment_size=model.max_segment_size, memmap_threshold=model.memmap_threshold, - payload_indexing_threshold=model.payload_indexing_threshold, vacuum_min_vector_number=model.vacuum_min_vector_number, ) @@ -849,11 +851,11 @@ def convert_condition(cls, model: rest.Condition) -> grpc.Condition: def convert_payload_selector(cls, model: rest.PayloadSelector) -> grpc.WithPayloadSelector: if isinstance(model, rest.PayloadSelectorInclude): return grpc.WithPayloadSelector( - include=grpc.PayloadIncludeSelector(include=model.include) + include=grpc.PayloadIncludeSelector(fields=model.include) ) if isinstance(model, rest.PayloadSelectorExclude): return grpc.WithPayloadSelector( - exclude=grpc.PayloadExcludeSelector(exclude=model.exclude) + exclude=grpc.PayloadExcludeSelector(fields=model.exclude) ) raise ValueError(f"invalid PayloadSelector model: {model}") # pragma: no cover diff --git a/qdrant_client/grpc/__init__.py b/qdrant_client/grpc/__init__.py index b245cbfd..2f5c066d 100644 --- a/qdrant_client/grpc/__init__.py +++ b/qdrant_client/grpc/__init__.py @@ -1,5 +1,5 @@ # Generated by the protocol buffer compiler. DO NOT EDIT! -# sources: collections.proto, collections_service.proto, points.proto, points_service.proto, qdrant.proto +# sources: collections.proto, collections_internal_service.proto, collections_service.proto, json_with_int.proto, points.proto, points_service.proto, qdrant.proto, raft_service.proto # plugin: python-betterproto from dataclasses import dataclass from typing import Dict, List, Optional @@ -31,6 +31,17 @@ class PayloadSchemaType(betterproto.Enum): Geo = 4 +class NullValue(betterproto.Enum): + """ + `NullValue` is a singleton enumeration to represent the null value for the + `Value` type union. The JSON representation for `NullValue` is JSON + `null`. + """ + + # Null value. + NULL_VALUE = 0 + + class FieldType(betterproto.Enum): FieldTypeKeyword = 0 FieldTypeInteger = 1 @@ -87,10 +98,11 @@ class HnswConfigDiff(betterproto.Message): ef_construct: Optional[int] = betterproto.uint64_field( 2, optional=True, group="_ef_construct" ) - # Minimal amount of points for additional payload-based indexing.If payload - # chunk is smaller than `full_scan_threshold` additional indexing won't be - # used -in this case full-scan search should be preferred by query planner - # and additional indexing is not required. + # Minimal size (in KiloBytes) of vectors for additional payload-based + # indexing.If payload chunk is smaller than `full_scan_threshold` additional + # indexing won't be used -in this case full-scan search should be preferred + # by query planner and additional indexing is not required.Note: 1Kb = 1 + # vector of size 256 full_scan_threshold: Optional[int] = betterproto.uint64_field( 3, optional=True, group="_full_scan_threshold" ) @@ -126,38 +138,36 @@ class OptimizersConfigDiff(betterproto.Message): default_segment_number: Optional[int] = betterproto.uint64_field( 3, optional=True, group="_default_segment_number" ) - # Do not create segments larger this number of points.Large segments might + # Do not create segments larger this size (in KiloBytes).Large segments might # require disproportionately long indexation times,therefore it makes sense # to limit the size of segments.If indexation speed have more priority for # your - make this parameter lower.If search speed is more important - make - # this parameter higher. + # this parameter higher.Note: 1Kb = 1 vector of size 256 max_segment_size: Optional[int] = betterproto.uint64_field( 4, optional=True, group="_max_segment_size" ) - # Maximum number of vectors to store in-memory per segment.Segments larger - # than this threshold will be stored as read-only memmaped file. + # Maximum size (in KiloBytes) of vectors to store in-memory per + # segment.Segments larger than this threshold will be stored as read-only + # memmaped file.To enable memmap storage, lower the thresholdNote: 1Kb = 1 + # vector of size 256 memmap_threshold: Optional[int] = betterproto.uint64_field( 5, optional=True, group="_memmap_threshold" ) - # Maximum number of vectors allowed for plain index.Default value based on - # https://github.com/google-research/google- - # research/blob/master/scann/docs/algorithms.md + # Maximum size (in KiloBytes) of vectors allowed for plain index.Default + # value based on https://github.com/google-research/google- + # research/blob/master/scann/docs/algorithms.mdNote: 1Kb = 1 vector of size + # 256 indexing_threshold: Optional[int] = betterproto.uint64_field( 6, optional=True, group="_indexing_threshold" ) - # Starting from this amount of vectors per-segment the engine will start - # building index for payload. - payload_indexing_threshold: Optional[int] = betterproto.uint64_field( - 7, optional=True, group="_payload_indexing_threshold" - ) # Interval between forced flushes. flush_interval_sec: Optional[int] = betterproto.uint64_field( - 8, optional=True, group="_flush_interval_sec" + 7, optional=True, group="_flush_interval_sec" ) # Max number of threads, which can be used for optimization. If 0 - `NUM_CPU # - 1` will be used max_optimization_threads: Optional[int] = betterproto.uint64_field( - 9, optional=True, group="_max_optimization_threads" + 8, optional=True, group="_max_optimization_threads" ) @@ -178,6 +188,12 @@ class CreateCollection(betterproto.Message): shard_number: Optional[int] = betterproto.uint32_field( 7, optional=True, group="_shard_number" ) + on_disk_payload: Optional[bool] = betterproto.bool_field( + 8, optional=True, group="_on_disk_payload" + ) + timeout: Optional[int] = betterproto.uint64_field( + 9, optional=True, group="_timeout" + ) @dataclass(eq=False, repr=False) @@ -186,11 +202,17 @@ class UpdateCollection(betterproto.Message): optimizers_config: Optional["OptimizersConfigDiff"] = betterproto.message_field( 2, optional=True, group="_optimizers_config" ) + timeout: Optional[int] = betterproto.uint64_field( + 3, optional=True, group="_timeout" + ) @dataclass(eq=False, repr=False) class DeleteCollection(betterproto.Message): collection_name: str = betterproto.string_field(1) + timeout: Optional[int] = betterproto.uint64_field( + 2, optional=True, group="_timeout" + ) @dataclass(eq=False, repr=False) @@ -204,6 +226,7 @@ class CollectionParams(betterproto.Message): vector_size: int = betterproto.uint64_field(1) distance: "Distance" = betterproto.enum_field(2) shard_number: int = betterproto.uint32_field(3) + on_disk_payload: bool = betterproto.bool_field(4) @dataclass(eq=False, repr=False) @@ -236,6 +259,9 @@ class CollectionInfo(betterproto.Message): @dataclass(eq=False, repr=False) class ChangeAliases(betterproto.Message): actions: List["AliasOperations"] = betterproto.message_field(1) + timeout: Optional[int] = betterproto.uint64_field( + 2, optional=True, group="_timeout" + ) @dataclass(eq=False, repr=False) @@ -262,6 +288,68 @@ class DeleteAlias(betterproto.Message): alias_name: str = betterproto.string_field(1) +@dataclass(eq=False, repr=False) +class GetCollectionInfoRequestInternal(betterproto.Message): + get_collection_info_request: "GetCollectionInfoRequest" = betterproto.message_field( + 1 + ) + shard_id: int = betterproto.uint32_field(2) + + +@dataclass(eq=False, repr=False) +class Struct(betterproto.Message): + """ + `Struct` represents a structured data value, consisting of fields which map + to dynamically typed values. In some languages, `Struct` might be supported + by a native representation. For example, in scripting languages like JS a + struct is represented as an object. The details of that representation are + described together with the proto support for the language. The JSON + representation for `Struct` is JSON object. + """ + + # Unordered map of dynamically typed values. + fields: Dict[str, "Value"] = betterproto.map_field( + 1, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE + ) + + +@dataclass(eq=False, repr=False) +class Value(betterproto.Message): + """ + `Value` represents a dynamically typed value which can be either null, a + number, a string, a boolean, a recursive struct value, or a list of values. + A producer of value is expected to set one of that variants, absence of any + variant indicates an error. The JSON representation for `Value` is JSON + value. + """ + + # Represents a null value. + null_value: "NullValue" = betterproto.enum_field(1, group="kind") + # Represents a double value. + double_value: float = betterproto.double_field(2, group="kind") + # Represents an integer value + integer_value: int = betterproto.int64_field(3, group="kind") + # Represents a string value. + string_value: str = betterproto.string_field(4, group="kind") + # Represents a boolean value. + bool_value: bool = betterproto.bool_field(5, group="kind") + # Represents a structured value. + struct_value: "Struct" = betterproto.message_field(6, group="kind") + # Represents a repeated `Value`. + list_value: "ListValue" = betterproto.message_field(7, group="kind") + + +@dataclass(eq=False, repr=False) +class ListValue(betterproto.Message): + """ + `ListValue` is a wrapper around a repeated field of values. The JSON + representation for `ListValue` is JSON array. + """ + + # Repeated field of dynamically typed values. + values: List["Value"] = betterproto.message_field(1) + + @dataclass(eq=False, repr=False) class PointId(betterproto.Message): num: int = betterproto.uint64_field(1, group="point_id_options") @@ -296,7 +384,7 @@ class GetPoints(betterproto.Message): class SetPayloadPoints(betterproto.Message): collection_name: str = betterproto.string_field(1) wait: Optional[bool] = betterproto.bool_field(2, optional=True, group="_wait") - payload: Dict[str, "betterproto_lib_google_protobuf.Value"] = betterproto.map_field( + payload: Dict[str, "Value"] = betterproto.map_field( 3, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) points: List["PointId"] = betterproto.message_field(4) @@ -336,12 +424,12 @@ class DeleteFieldIndexCollection(betterproto.Message): @dataclass(eq=False, repr=False) class PayloadIncludeSelector(betterproto.Message): - include: List[str] = betterproto.string_field(1) + fields: List[str] = betterproto.string_field(1) @dataclass(eq=False, repr=False) class PayloadExcludeSelector(betterproto.Message): - exclude: List[str] = betterproto.string_field(1) + fields: List[str] = betterproto.string_field(1) @dataclass(eq=False, repr=False) @@ -375,6 +463,9 @@ class SearchPoints(betterproto.Message): ) with_payload: "WithPayloadSelector" = betterproto.message_field(6) params: "SearchParams" = betterproto.message_field(7) + score_threshold: Optional[float] = betterproto.float_field( + 8, optional=True, group="_score_threshold" + ) @dataclass(eq=False, repr=False) @@ -403,6 +494,9 @@ class RecommendPoints(betterproto.Message): ) with_payload: "WithPayloadSelector" = betterproto.message_field(7) params: "SearchParams" = betterproto.message_field(8) + score_threshold: Optional[float] = betterproto.float_field( + 9, optional=True, group="_score_threshold" + ) @dataclass(eq=False, repr=False) @@ -420,7 +514,7 @@ class UpdateResult(betterproto.Message): @dataclass(eq=False, repr=False) class ScoredPoint(betterproto.Message): id: "PointId" = betterproto.message_field(1) - payload: Dict[str, "betterproto_lib_google_protobuf.Value"] = betterproto.map_field( + payload: Dict[str, "Value"] = betterproto.map_field( 2, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) score: float = betterproto.float_field(3) @@ -446,7 +540,7 @@ class ScrollResponse(betterproto.Message): @dataclass(eq=False, repr=False) class RetrievedPoint(betterproto.Message): id: "PointId" = betterproto.message_field(1) - payload: Dict[str, "betterproto_lib_google_protobuf.Value"] = betterproto.map_field( + payload: Dict[str, "Value"] = betterproto.map_field( 2, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) vector: List[float] = betterproto.float_field(3) @@ -553,7 +647,7 @@ class PointsIdsList(betterproto.Message): class PointStruct(betterproto.Message): id: "PointId" = betterproto.message_field(1) vector: List[float] = betterproto.float_field(2) - payload: Dict[str, "betterproto_lib_google_protobuf.Value"] = betterproto.map_field( + payload: Dict[str, "Value"] = betterproto.map_field( 3, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) @@ -564,6 +658,39 @@ class GeoPoint(betterproto.Message): lat: float = betterproto.double_field(2) +@dataclass(eq=False, repr=False) +class RaftMessage(betterproto.Message): + message: bytes = betterproto.bytes_field(1) + + +@dataclass(eq=False, repr=False) +class AllPeers(betterproto.Message): + all_peers: List["Peer"] = betterproto.message_field(1) + + +@dataclass(eq=False, repr=False) +class Peer(betterproto.Message): + uri: str = betterproto.string_field(1) + id: int = betterproto.uint64_field(2) + + +@dataclass(eq=False, repr=False) +class AddPeerToKnownMessage(betterproto.Message): + uri: Optional[str] = betterproto.string_field(1, optional=True, group="_uri") + port: Optional[int] = betterproto.uint32_field(2, optional=True, group="_port") + id: int = betterproto.uint64_field(3) + + +@dataclass(eq=False, repr=False) +class PeerId(betterproto.Message): + id: int = betterproto.uint64_field(1) + + +@dataclass(eq=False, repr=False) +class Uri(betterproto.Message): + uri: str = betterproto.string_field(1) + + @dataclass(eq=False, repr=False) class HealthCheckRequest(betterproto.Message): pass @@ -575,6 +702,24 @@ class HealthCheckReply(betterproto.Message): version: str = betterproto.string_field(2) +class CollectionsInternalStub(betterproto.ServiceStub): + async def get( + self, + *, + get_collection_info_request: "GetCollectionInfoRequest" = None, + shard_id: int = 0 + ) -> "GetCollectionInfoResponse": + + request = GetCollectionInfoRequestInternal() + if get_collection_info_request is not None: + request.get_collection_info_request = get_collection_info_request + request.shard_id = shard_id + + return await self._unary_unary( + "/qdrant.CollectionsInternal/Get", request, GetCollectionInfoResponse + ) + + class CollectionsStub(betterproto.ServiceStub): async def get(self, *, collection_name: str = "") -> "GetCollectionInfoResponse": @@ -602,7 +747,9 @@ async def create( hnsw_config: Optional["HnswConfigDiff"] = None, wal_config: Optional["WalConfigDiff"] = None, optimizers_config: Optional["OptimizersConfigDiff"] = None, - shard_number: Optional[int] = None + shard_number: Optional[int] = None, + on_disk_payload: Optional[bool] = None, + timeout: Optional[int] = None ) -> "CollectionOperationResponse": request = CreateCollection() @@ -616,6 +763,8 @@ async def create( if optimizers_config is not None: request.optimizers_config = optimizers_config request.shard_number = shard_number + request.on_disk_payload = on_disk_payload + request.timeout = timeout return await self._unary_unary( "/qdrant.Collections/Create", request, CollectionOperationResponse @@ -625,37 +774,44 @@ async def update( self, *, collection_name: str = "", - optimizers_config: Optional["OptimizersConfigDiff"] = None + optimizers_config: Optional["OptimizersConfigDiff"] = None, + timeout: Optional[int] = None ) -> "CollectionOperationResponse": request = UpdateCollection() request.collection_name = collection_name if optimizers_config is not None: request.optimizers_config = optimizers_config + request.timeout = timeout return await self._unary_unary( "/qdrant.Collections/Update", request, CollectionOperationResponse ) async def delete( - self, *, collection_name: str = "" + self, *, collection_name: str = "", timeout: Optional[int] = None ) -> "CollectionOperationResponse": request = DeleteCollection() request.collection_name = collection_name + request.timeout = timeout return await self._unary_unary( "/qdrant.Collections/Delete", request, CollectionOperationResponse ) async def update_aliases( - self, *, actions: Optional[List["AliasOperations"]] = None + self, + *, + actions: Optional[List["AliasOperations"]] = None, + timeout: Optional[int] = None ) -> "CollectionOperationResponse": actions = actions or [] request = ChangeAliases() if actions is not None: request.actions = actions + request.timeout = timeout return await self._unary_unary( "/qdrant.Collections/UpdateAliases", request, CollectionOperationResponse @@ -725,7 +881,7 @@ async def set_payload( *, collection_name: str = "", wait: Optional[bool] = None, - payload: Dict[str, "betterproto_lib_google_protobuf.Value"] = None, + payload: Dict[str, "Value"] = None, points: Optional[List["PointId"]] = None ) -> "PointsOperationResponse": points = points or [] @@ -826,7 +982,8 @@ async def search( top: int = 0, with_vector: Optional[bool] = None, with_payload: "WithPayloadSelector" = None, - params: "SearchParams" = None + params: "SearchParams" = None, + score_threshold: Optional[float] = None ) -> "SearchResponse": vector = vector or [] @@ -841,6 +998,7 @@ async def search( request.with_payload = with_payload if params is not None: request.params = params + request.score_threshold = score_threshold return await self._unary_unary("/qdrant.Points/Search", request, SearchResponse) @@ -878,7 +1036,8 @@ async def recommend( top: int = 0, with_vector: Optional[bool] = None, with_payload: "WithPayloadSelector" = None, - params: "SearchParams" = None + params: "SearchParams" = None, + score_threshold: Optional[float] = None ) -> "RecommendResponse": positive = positive or [] negative = negative or [] @@ -897,12 +1056,57 @@ async def recommend( request.with_payload = with_payload if params is not None: request.params = params + request.score_threshold = score_threshold return await self._unary_unary( "/qdrant.Points/Recommend", request, RecommendResponse ) +class RaftStub(betterproto.ServiceStub): + async def send( + self, *, message: bytes = b"" + ) -> "betterproto_lib_google_protobuf.Empty": + + request = RaftMessage() + request.message = message + + return await self._unary_unary( + "/qdrant.Raft/Send", request, betterproto_lib_google_protobuf.Empty + ) + + async def who_is(self, *, id: int = 0) -> "Uri": + + request = PeerId() + request.id = id + + return await self._unary_unary("/qdrant.Raft/WhoIs", request, Uri) + + async def add_peer_to_known( + self, *, uri: Optional[str] = None, port: Optional[int] = None, id: int = 0 + ) -> "AllPeers": + + request = AddPeerToKnownMessage() + request.uri = uri + request.port = port + request.id = id + + return await self._unary_unary("/qdrant.Raft/AddPeerToKnown", request, AllPeers) + + async def add_peer_as_participant( + self, *, id: int = 0 + ) -> "betterproto_lib_google_protobuf.Empty": + + request = PeerId() + request.id = id + + return await self._unary_unary( + "/qdrant.Raft/AddPeerAsParticipant", + request, + betterproto_lib_google_protobuf.Empty, + ) + + class QdrantStub(betterproto.ServiceStub): async def health_check(self) -> "HealthCheckReply": @@ -913,6 +1117,34 @@ async def health_check(self) -> "HealthCheckReply": ) +class CollectionsInternalBase(ServiceBase): + async def get( + self, get_collection_info_request: "GetCollectionInfoRequest", shard_id: int + ) -> "GetCollectionInfoResponse": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_get(self, stream: grpclib.server.Stream) -> None: + request = await stream.recv_message() + + request_kwargs = { + "get_collection_info_request": request.get_collection_info_request, + "shard_id": request.shard_id, + } + + response = await self.get(**request_kwargs) + await stream.send_message(response) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/qdrant.CollectionsInternal/Get": grpclib.const.Handler( + self.__rpc_get, + grpclib.const.Cardinality.UNARY_UNARY, + GetCollectionInfoRequestInternal, + GetCollectionInfoResponse, + ), + } + + class CollectionsBase(ServiceBase): async def get(self, collection_name: str) -> "GetCollectionInfoResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) @@ -929,19 +1161,26 @@ async def create( wal_config: Optional["WalConfigDiff"], optimizers_config: Optional["OptimizersConfigDiff"], shard_number: Optional[int], + on_disk_payload: Optional[bool], + timeout: Optional[int], ) -> "CollectionOperationResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) async def update( - self, collection_name: str, optimizers_config: Optional["OptimizersConfigDiff"] + self, + collection_name: str, + optimizers_config: Optional["OptimizersConfigDiff"], + timeout: Optional[int], ) -> "CollectionOperationResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) - async def delete(self, collection_name: str) -> "CollectionOperationResponse": + async def delete( + self, collection_name: str, timeout: Optional[int] + ) -> "CollectionOperationResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) async def update_aliases( - self, actions: Optional[List["AliasOperations"]] + self, actions: Optional[List["AliasOperations"]], timeout: Optional[int] ) -> "CollectionOperationResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) @@ -974,6 +1213,8 @@ async def __rpc_create(self, stream: grpclib.server.Stream) -> None: "wal_config": request.wal_config, "optimizers_config": request.optimizers_config, "shard_number": request.shard_number, + "on_disk_payload": request.on_disk_payload, + "timeout": request.timeout, } response = await self.create(**request_kwargs) @@ -985,6 +1226,7 @@ async def __rpc_update(self, stream: grpclib.server.Stream) -> None: request_kwargs = { "collection_name": request.collection_name, "optimizers_config": request.optimizers_config, + "timeout": request.timeout, } response = await self.update(**request_kwargs) @@ -995,6 +1237,7 @@ async def __rpc_delete(self, stream: grpclib.server.Stream) -> None: request_kwargs = { "collection_name": request.collection_name, + "timeout": request.timeout, } response = await self.delete(**request_kwargs) @@ -1005,6 +1248,7 @@ async def __rpc_update_aliases(self, stream: grpclib.server.Stream) -> None: request_kwargs = { "actions": request.actions, + "timeout": request.timeout, } response = await self.update_aliases(**request_kwargs) @@ -1078,7 +1322,7 @@ async def set_payload( self, collection_name: str, wait: Optional[bool], - payload: Dict[str, "betterproto_lib_google_protobuf.Value"], + payload: Dict[str, "Value"], points: Optional[List["PointId"]], ) -> "PointsOperationResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) @@ -1120,6 +1364,7 @@ async def search( with_vector: Optional[bool], with_payload: "WithPayloadSelector", params: "SearchParams", + score_threshold: Optional[float], ) -> "SearchResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) @@ -1144,6 +1389,7 @@ async def recommend( with_vector: Optional[bool], with_payload: "WithPayloadSelector", params: "SearchParams", + score_threshold: Optional[float], ) -> "RecommendResponse": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) @@ -1258,6 +1504,7 @@ async def __rpc_search(self, stream: grpclib.server.Stream) -> None: "with_vector": request.with_vector, "with_payload": request.with_payload, "params": request.params, + "score_threshold": request.score_threshold, } response = await self.search(**request_kwargs) @@ -1290,6 +1537,7 @@ async def __rpc_recommend(self, stream: grpclib.server.Stream) -> None: "with_vector": request.with_vector, "with_payload": request.with_payload, "params": request.params, + "score_threshold": request.score_threshold, } response = await self.recommend(**request_kwargs) @@ -1366,6 +1614,96 @@ def __mapping__(self) -> Dict[str, grpclib.const.Handler]: } +class RaftBase(ServiceBase): + async def send(self, message: bytes) -> "betterproto_lib_google_protobuf.Empty": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def who_is(self, id: int) -> "Uri": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def add_peer_to_known( + self, uri: Optional[str], port: Optional[int], id: int + ) -> "AllPeers": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def add_peer_as_participant( + self, id: int + ) -> "betterproto_lib_google_protobuf.Empty": + raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) + + async def __rpc_send(self, stream: grpclib.server.Stream) -> None: + request = await stream.recv_message() + + request_kwargs = { + "message": request.message, + } + + response = await self.send(**request_kwargs) + await stream.send_message(response) + + async def __rpc_who_is(self, stream: grpclib.server.Stream) -> None: + request = await stream.recv_message() + + request_kwargs = { + "id": request.id, + } + + response = await self.who_is(**request_kwargs) + await stream.send_message(response) + + async def __rpc_add_peer_to_known(self, stream: grpclib.server.Stream) -> None: + request = await stream.recv_message() + + request_kwargs = { + "uri": request.uri, + "port": request.port, + "id": request.id, + } + + response = await self.add_peer_to_known(**request_kwargs) + await stream.send_message(response) + + async def __rpc_add_peer_as_participant( + self, stream: grpclib.server.Stream + ) -> None: + request = await stream.recv_message() + + request_kwargs = { + "id": request.id, + } + + response = await self.add_peer_as_participant(**request_kwargs) + await stream.send_message(response) + + def __mapping__(self) -> Dict[str, grpclib.const.Handler]: + return { + "/qdrant.Raft/Send": grpclib.const.Handler( + self.__rpc_send, + grpclib.const.Cardinality.UNARY_UNARY, + RaftMessage, + betterproto_lib_google_protobuf.Empty, + ), + "/qdrant.Raft/WhoIs": grpclib.const.Handler( + self.__rpc_who_is, + grpclib.const.Cardinality.UNARY_UNARY, + PeerId, + Uri, + ), + "/qdrant.Raft/AddPeerToKnown": grpclib.const.Handler( + self.__rpc_add_peer_to_known, + grpclib.const.Cardinality.UNARY_UNARY, + AddPeerToKnownMessage, + AllPeers, + ), + "/qdrant.Raft/AddPeerAsParticipant": grpclib.const.Handler( + self.__rpc_add_peer_as_participant, + grpclib.const.Cardinality.UNARY_UNARY, + PeerId, + betterproto_lib_google_protobuf.Empty, + ), + } + + class QdrantBase(ServiceBase): async def health_check(self) -> "HealthCheckReply": raise grpclib.GRPCError(grpclib.const.Status.UNIMPLEMENTED) diff --git a/qdrant_client/grpc/collections.proto b/qdrant_client/grpc/collections.proto index 9b0ea154..3ad782c0 100644 --- a/qdrant_client/grpc/collections.proto +++ b/qdrant_client/grpc/collections.proto @@ -1,9 +1,4 @@ syntax = "proto3"; - -option java_multiple_files = true; -option java_package = "tech.qdrant.grpc"; -option java_outer_classname = "QdrantProto"; - package qdrant; message GetCollectionInfoRequest { @@ -64,9 +59,10 @@ message HnswConfigDiff { */ optional uint64 ef_construct = 2; /* - Minimal amount of points for additional payload-based indexing. + Minimal size (in KiloBytes) of vectors for additional payload-based indexing. If payload chunk is smaller than `full_scan_threshold` additional indexing won't be used - in this case full-scan search should be preferred by query planner and additional indexing is not required. + Note: 1Kb = 1 vector of size 256 */ optional uint64 full_scan_threshold = 3; } @@ -97,36 +93,36 @@ message OptimizersConfigDiff { */ optional uint64 default_segment_number = 3; /* - Do not create segments larger this number of points. + Do not create segments larger this size (in KiloBytes). Large segments might require disproportionately long indexation times, therefore it makes sense to limit the size of segments. If indexation speed have more priority for your - make this parameter lower. If search speed is more important - make this parameter higher. + Note: 1Kb = 1 vector of size 256 */ optional uint64 max_segment_size = 4; /* - Maximum number of vectors to store in-memory per segment. + Maximum size (in KiloBytes) of vectors to store in-memory per segment. Segments larger than this threshold will be stored as read-only memmaped file. + To enable memmap storage, lower the threshold + Note: 1Kb = 1 vector of size 256 */ optional uint64 memmap_threshold = 5; /* - Maximum number of vectors allowed for plain index. + Maximum size (in KiloBytes) of vectors allowed for plain index. Default value based on https://github.com/google-research/google-research/blob/master/scann/docs/algorithms.md + Note: 1Kb = 1 vector of size 256 */ optional uint64 indexing_threshold = 6; /* - Starting from this amount of vectors per-segment the engine will start building index for payload. - */ - optional uint64 payload_indexing_threshold = 7; - /* Interval between forced flushes. */ - optional uint64 flush_interval_sec = 8; + optional uint64 flush_interval_sec = 7; /* Max number of threads, which can be used for optimization. If 0 - `NUM_CPU - 1` will be used */ - optional uint64 max_optimization_threads = 9; + optional uint64 max_optimization_threads = 8; } message CreateCollection { @@ -137,15 +133,19 @@ message CreateCollection { optional WalConfigDiff wal_config = 5; // Configuration of the Write-Ahead-Log optional OptimizersConfigDiff optimizers_config = 6; // Configuration of the optimizers optional uint32 shard_number = 7; // Number of shards in the collection, default = 1 + optional bool on_disk_payload = 8; // If true - point's payload will not be stored in memory + optional uint64 timeout = 9; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied } message UpdateCollection { string collection_name = 1; // Name of the collection optional OptimizersConfigDiff optimizers_config = 2; // New configuration parameters for the collection + optional uint64 timeout = 3; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied } message DeleteCollection { string collection_name = 1; // Name of the collection + optional uint64 timeout = 2; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied } message CollectionOperationResponse { @@ -157,6 +157,7 @@ message CollectionParams { uint64 vector_size = 1; // Size of the vectors Distance distance = 2; // Distance function used for comparing vectors uint32 shard_number = 3; // Number of shards in collection + bool on_disk_payload = 4; // If true - point's payload will not be stored in memory } message CollectionConfig { @@ -183,6 +184,7 @@ message CollectionInfo { message ChangeAliases { repeated AliasOperations actions = 1; // List of actions + optional uint64 timeout = 2; // Wait timeout for operation commit in seconds, if not specified - default value will be supplied } message AliasOperations { diff --git a/qdrant_client/grpc/collections_service.proto b/qdrant_client/grpc/collections_service.proto index 449e6b1a..d8bac8e4 100644 --- a/qdrant_client/grpc/collections_service.proto +++ b/qdrant_client/grpc/collections_service.proto @@ -2,10 +2,6 @@ syntax = "proto3"; import "collections.proto"; -option java_multiple_files = true; -option java_package = "tech.qdrant.grpc"; -option java_outer_classname = "QdrantProto"; - package qdrant; service Collections { diff --git a/qdrant_client/grpc/json_with_int.proto b/qdrant_client/grpc/json_with_int.proto new file mode 100644 index 00000000..8d03757c --- /dev/null +++ b/qdrant_client/grpc/json_with_int.proto @@ -0,0 +1,61 @@ +// Fork of the google.protobuf.Value with explicit support for integer values + +syntax = "proto3"; + +package qdrant; + +// `Struct` represents a structured data value, consisting of fields +// which map to dynamically typed values. In some languages, `Struct` +// might be supported by a native representation. For example, in +// scripting languages like JS a struct is represented as an +// object. The details of that representation are described together +// with the proto support for the language. +// +// The JSON representation for `Struct` is JSON object. +message Struct { + // Unordered map of dynamically typed values. + map fields = 1; +} + +// `Value` represents a dynamically typed value which can be either +// null, a number, a string, a boolean, a recursive struct value, or a +// list of values. A producer of value is expected to set one of that +// variants, absence of any variant indicates an error. +// +// The JSON representation for `Value` is JSON value. +message Value { + // The kind of value. + oneof kind { + // Represents a null value. + NullValue null_value = 1; + // Represents a double value. + double double_value = 2; + // Represents an integer value + int64 integer_value = 3; + // Represents a string value. + string string_value = 4; + // Represents a boolean value. + bool bool_value = 5; + // Represents a structured value. + Struct struct_value = 6; + // Represents a repeated `Value`. + ListValue list_value = 7; + } +} + +// `NullValue` is a singleton enumeration to represent the null value for the +// `Value` type union. +// +// The JSON representation for `NullValue` is JSON `null`. +enum NullValue { + // Null value. + NULL_VALUE = 0; +} + +// `ListValue` is a wrapper around a repeated field of values. +// +// The JSON representation for `ListValue` is JSON array. +message ListValue { + // Repeated field of dynamically typed values. + repeated Value values = 1; +} diff --git a/qdrant_client/grpc/points.proto b/qdrant_client/grpc/points.proto index d1517d88..ece1a684 100644 --- a/qdrant_client/grpc/points.proto +++ b/qdrant_client/grpc/points.proto @@ -1,12 +1,8 @@ syntax = "proto3"; -option java_multiple_files = true; -option java_package = "tech.qdrant.grpc"; -option java_outer_classname = "QdrantProto"; - package qdrant; -import "google/protobuf/struct.proto"; +import "json_with_int.proto"; // --------------------------------------------- // ------------- Point Id Requests ------------- @@ -46,7 +42,7 @@ message GetPoints { message SetPayloadPoints { string collection_name = 1; // name of the collection optional bool wait = 2; // Wait until the changes have been applied? - map payload = 3; // New payload values + map payload = 3; // New payload values repeated PointId points = 4; // List of point to modify } @@ -84,11 +80,11 @@ message DeleteFieldIndexCollection { } message PayloadIncludeSelector { - repeated string include = 1; // List of payload keys to include into result + repeated string fields = 1; // List of payload keys to include into result } message PayloadExcludeSelector { - repeated string exclude = 1; // List of payload keys to exclude from the result + repeated string fields = 1; // List of payload keys to exclude from the result } message WithPayloadSelector { @@ -117,6 +113,7 @@ message SearchPoints { optional bool with_vector = 5; // Return point vector with the result. WithPayloadSelector with_payload = 6; // Options for specifying which payload to include or not SearchParams params = 7; // Search config + optional float score_threshold = 8; // If provided - cut off results with worse scores } message ScrollPoints { @@ -137,6 +134,7 @@ message RecommendPoints { optional bool with_vector = 6; // Return point vector with the result. WithPayloadSelector with_payload = 7; // Options for specifying which payload to include or not SearchParams params = 8; // Search config + optional float score_threshold = 9; // If provided - cut off results with worse scores } // --------------------------------------------- @@ -161,7 +159,7 @@ enum UpdateStatus { message ScoredPoint { PointId id = 1; // Point id - map payload = 2; // Payload + map payload = 2; // Payload float score = 3; // Similarity score repeated float vector = 4; // Vector uint64 version = 5; // Last update operation applied to this point @@ -180,7 +178,7 @@ message ScrollResponse { message RetrievedPoint { PointId id = 1; - map payload = 2; + map payload = 2; repeated float vector = 3; } @@ -285,7 +283,7 @@ message PointsIdsList { message PointStruct { PointId id = 1; repeated float vector = 2; - map payload = 3; + map payload = 3; } diff --git a/qdrant_client/grpc/points_service.proto b/qdrant_client/grpc/points_service.proto index 2a694552..095c3b1a 100644 --- a/qdrant_client/grpc/points_service.proto +++ b/qdrant_client/grpc/points_service.proto @@ -2,10 +2,6 @@ syntax = "proto3"; import "points.proto"; -option java_multiple_files = true; -option java_package = "tech.qdrant.grpc"; -option java_outer_classname = "QdrantProto"; - package qdrant; import "google/protobuf/struct.proto"; diff --git a/qdrant_client/grpc/qdrant.proto b/qdrant_client/grpc/qdrant.proto index cf9a58f0..61f4b34c 100644 --- a/qdrant_client/grpc/qdrant.proto +++ b/qdrant_client/grpc/qdrant.proto @@ -1,11 +1,9 @@ syntax = "proto3"; import "collections_service.proto"; +import "collections_internal_service.proto"; import "points_service.proto"; - -option java_multiple_files = true; -option java_package = "tech.qdrant.grpc"; -option java_outer_classname = "QdrantProto"; +import "points_internal_service.proto"; package qdrant; diff --git a/qdrant_client/http/api/cluster_api.py b/qdrant_client/http/api/cluster_api.py new file mode 100644 index 00000000..12e7a1e0 --- /dev/null +++ b/qdrant_client/http/api/cluster_api.py @@ -0,0 +1,185 @@ +# flake8: noqa E501 +from enum import Enum +from pathlib import PurePath +from types import GeneratorType +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Set, Tuple, Union + +from pydantic.json import ENCODERS_BY_TYPE +from pydantic.main import BaseModel +from qdrant_client.http.models import models as m + +SetIntStr = Set[Union[int, str]] +DictIntStrAny = Dict[Union[int, str], Any] + + +def generate_encoders_by_class_tuples(type_encoder_map: Dict[Any, Callable]) -> Dict[Callable, Tuple]: + encoders_by_classes: Dict[Callable, List] = {} + for type_, encoder in type_encoder_map.items(): + encoders_by_classes.setdefault(encoder, []).append(type_) + encoders_by_class_tuples: Dict[Callable, Tuple] = {} + for encoder, classes in encoders_by_classes.items(): + encoders_by_class_tuples[encoder] = tuple(classes) + return encoders_by_class_tuples + + +encoders_by_class_tuples = generate_encoders_by_class_tuples(ENCODERS_BY_TYPE) + + +def jsonable_encoder( + obj: Any, + include: Union[SetIntStr, DictIntStrAny] = None, + exclude=None, + by_alias: bool = True, + skip_defaults: bool = None, + exclude_unset: bool = False, + include_none: bool = True, + custom_encoder=None, + sqlalchemy_safe: bool = True, +) -> Any: + if exclude is None: + exclude = set() + if custom_encoder is None: + custom_encoder = {} + if include is not None and not isinstance(include, set): + include = set(include) + if exclude is not None and not isinstance(exclude, set): + exclude = set(exclude) + if isinstance(obj, BaseModel): + encoder = getattr(obj.Config, "json_encoders", {}) + if custom_encoder: + encoder.update(custom_encoder) + obj_dict = obj.dict( + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=bool(exclude_unset or skip_defaults), + ) + + return jsonable_encoder( + obj_dict, + include_none=include_none, + custom_encoder=encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + if isinstance(obj, Enum): + return obj.value + if isinstance(obj, PurePath): + return str(obj) + if isinstance(obj, (str, int, float, type(None))): + return obj + if isinstance(obj, dict): + encoded_dict = {} + for key, value in obj.items(): + if ( + (not sqlalchemy_safe or (not isinstance(key, str)) or (not key.startswith("_sa"))) + and (value is not None or include_none) + and ((include and key in include) or key not in exclude) + ): + encoded_key = jsonable_encoder( + key, + by_alias=by_alias, + exclude_unset=exclude_unset, + include_none=include_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_value = jsonable_encoder( + value, + by_alias=by_alias, + exclude_unset=exclude_unset, + include_none=include_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + encoded_dict[encoded_key] = encoded_value + return encoded_dict + if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)): + encoded_list = [] + for item in obj: + encoded_list.append( + jsonable_encoder( + item, + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + include_none=include_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + ) + return encoded_list + + if custom_encoder: + if type(obj) in custom_encoder: + return custom_encoder[type(obj)](obj) + else: + for encoder_type, encoder in custom_encoder.items(): + if isinstance(obj, encoder_type): + return encoder(obj) + + if type(obj) in ENCODERS_BY_TYPE: + return ENCODERS_BY_TYPE[type(obj)](obj) + for encoder, classes_tuple in encoders_by_class_tuples.items(): + if isinstance(obj, classes_tuple): + return encoder(obj) + + errors: List[Exception] = [] + try: + data = dict(obj) + except Exception as e: + errors.append(e) + try: + data = vars(obj) + except Exception as e: + errors.append(e) + raise ValueError(errors) + return jsonable_encoder( + data, + by_alias=by_alias, + exclude_unset=exclude_unset, + include_none=include_none, + custom_encoder=custom_encoder, + sqlalchemy_safe=sqlalchemy_safe, + ) + + +if TYPE_CHECKING: + from qdrant_client.http.api_client import ApiClient + + +class _ClusterApi: + def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"): + self.api_client = api_client + + def _build_for_cluster_status( + self, + ): + """ + Get information about the current state and composition of the cluster + """ + return self.api_client.request( + type_=m.InlineResponse200, + method="GET", + url="/cluster", + ) + + +class AsyncClusterApi(_ClusterApi): + async def cluster_status( + self, + ) -> m.InlineResponse200: + """ + Get information about the current state and composition of the cluster + """ + return await self._build_for_cluster_status() + + +class SyncClusterApi(_ClusterApi): + def cluster_status( + self, + ) -> m.InlineResponse200: + """ + Get information about the current state and composition of the cluster + """ + return self._build_for_cluster_status() diff --git a/qdrant_client/http/api/collections_api.py b/qdrant_client/http/api/collections_api.py index 147e7456..fc615c28 100644 --- a/qdrant_client/http/api/collections_api.py +++ b/qdrant_client/http/api/collections_api.py @@ -155,6 +155,7 @@ def __init__(self, api_client: "Union[ApiClient, AsyncApiClient]"): def _build_for_create_collection( self, collection_name: str, + timeout: int = None, create_collection: m.CreateCollection = None, ): """ @@ -164,13 +165,18 @@ def _build_for_create_collection( "collection_name": str(collection_name), } + query_params = {} + if timeout is not None: + query_params["timeout"] = str(timeout) + body = jsonable_encoder(create_collection) return self.api_client.request( - type_=m.InlineResponse2001, + type_=m.InlineResponse2003, method="PUT", url="/collections/{collection_name}", path_params=path_params, + params=query_params, json=body, ) @@ -194,7 +200,7 @@ def _build_for_create_field_index( body = jsonable_encoder(create_field_index) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="PUT", url="/collections/{collection_name}/index", path_params=path_params, @@ -205,6 +211,7 @@ def _build_for_create_field_index( def _build_for_delete_collection( self, collection_name: str, + timeout: int = None, ): """ Drop collection and all associated data @@ -213,11 +220,16 @@ def _build_for_delete_collection( "collection_name": str(collection_name), } + query_params = {} + if timeout is not None: + query_params["timeout"] = str(timeout) + return self.api_client.request( - type_=m.InlineResponse2001, + type_=m.InlineResponse2003, method="DELETE", url="/collections/{collection_name}", path_params=path_params, + params=query_params, ) def _build_for_delete_field_index( @@ -239,7 +251,7 @@ def _build_for_delete_field_index( query_params["wait"] = str(wait).lower() return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="DELETE", url="/collections/{collection_name}/index/{field_name}", path_params=path_params, @@ -271,22 +283,30 @@ def _build_for_get_collections( Get list name of all existing collections """ return self.api_client.request( - type_=m.InlineResponse200, + type_=m.InlineResponse2001, method="GET", url="/collections", ) def _build_for_update_aliases( self, + timeout: int = None, change_aliases_operation: m.ChangeAliasesOperation = None, ): + query_params = {} + if timeout is not None: + query_params["timeout"] = str(timeout) + body = jsonable_encoder(change_aliases_operation) - return self.api_client.request(type_=m.InlineResponse2001, method="POST", url="/collections/aliases", json=body) + return self.api_client.request( + type_=m.InlineResponse2003, method="POST", url="/collections/aliases", params=query_params, json=body + ) def _build_for_update_collection( self, collection_name: str, + timeout: int = None, update_collection: m.UpdateCollection = None, ): """ @@ -296,39 +316,35 @@ def _build_for_update_collection( "collection_name": str(collection_name), } + query_params = {} + if timeout is not None: + query_params["timeout"] = str(timeout) + body = jsonable_encoder(update_collection) return self.api_client.request( - type_=m.InlineResponse2001, + type_=m.InlineResponse2003, method="PATCH", url="/collections/{collection_name}", path_params=path_params, + params=query_params, json=body, ) - def _build_for_update_collections( - self, - collection_meta_operations: m.CollectionMetaOperations = None, - ): - """ - Perform update, create, remove or alias change operations on collections - """ - body = jsonable_encoder(collection_meta_operations) - - return self.api_client.request(type_=m.InlineResponse2001, method="POST", url="/collections", json=body) - class AsyncCollectionsApi(_CollectionsApi): async def create_collection( self, collection_name: str, + timeout: int = None, create_collection: m.CreateCollection = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: """ Create new collection with given parameters """ return await self._build_for_create_collection( collection_name=collection_name, + timeout=timeout, create_collection=create_collection, ) @@ -337,7 +353,7 @@ async def create_field_index( collection_name: str, wait: bool = None, create_field_index: m.CreateFieldIndex = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Create index for field in collection """ @@ -350,12 +366,14 @@ async def create_field_index( async def delete_collection( self, collection_name: str, - ) -> m.InlineResponse2001: + timeout: int = None, + ) -> m.InlineResponse2003: """ Drop collection and all associated data """ return await self._build_for_delete_collection( collection_name=collection_name, + timeout=timeout, ) async def delete_field_index( @@ -363,7 +381,7 @@ async def delete_field_index( collection_name: str, field_name: str, wait: bool = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete field index for collection """ @@ -386,7 +404,7 @@ async def get_collection( async def get_collections( self, - ) -> m.InlineResponse200: + ) -> m.InlineResponse2001: """ Get list name of all existing collections """ @@ -394,48 +412,43 @@ async def get_collections( async def update_aliases( self, + timeout: int = None, change_aliases_operation: m.ChangeAliasesOperation = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: return await self._build_for_update_aliases( + timeout=timeout, change_aliases_operation=change_aliases_operation, ) async def update_collection( self, collection_name: str, + timeout: int = None, update_collection: m.UpdateCollection = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: """ Update parameters of the existing collection """ return await self._build_for_update_collection( collection_name=collection_name, + timeout=timeout, update_collection=update_collection, ) - async def update_collections( - self, - collection_meta_operations: m.CollectionMetaOperations = None, - ) -> m.InlineResponse2001: - """ - Perform update, create, remove or alias change operations on collections - """ - return await self._build_for_update_collections( - collection_meta_operations=collection_meta_operations, - ) - class SyncCollectionsApi(_CollectionsApi): def create_collection( self, collection_name: str, + timeout: int = None, create_collection: m.CreateCollection = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: """ Create new collection with given parameters """ return self._build_for_create_collection( collection_name=collection_name, + timeout=timeout, create_collection=create_collection, ) @@ -444,7 +457,7 @@ def create_field_index( collection_name: str, wait: bool = None, create_field_index: m.CreateFieldIndex = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Create index for field in collection """ @@ -457,12 +470,14 @@ def create_field_index( def delete_collection( self, collection_name: str, - ) -> m.InlineResponse2001: + timeout: int = None, + ) -> m.InlineResponse2003: """ Drop collection and all associated data """ return self._build_for_delete_collection( collection_name=collection_name, + timeout=timeout, ) def delete_field_index( @@ -470,7 +485,7 @@ def delete_field_index( collection_name: str, field_name: str, wait: bool = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete field index for collection """ @@ -493,7 +508,7 @@ def get_collection( def get_collections( self, - ) -> m.InlineResponse200: + ) -> m.InlineResponse2001: """ Get list name of all existing collections """ @@ -501,32 +516,25 @@ def get_collections( def update_aliases( self, + timeout: int = None, change_aliases_operation: m.ChangeAliasesOperation = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: return self._build_for_update_aliases( + timeout=timeout, change_aliases_operation=change_aliases_operation, ) def update_collection( self, collection_name: str, + timeout: int = None, update_collection: m.UpdateCollection = None, - ) -> m.InlineResponse2001: + ) -> m.InlineResponse2003: """ Update parameters of the existing collection """ return self._build_for_update_collection( collection_name=collection_name, + timeout=timeout, update_collection=update_collection, ) - - def update_collections( - self, - collection_meta_operations: m.CollectionMetaOperations = None, - ) -> m.InlineResponse2001: - """ - Perform update, create, remove or alias change operations on collections - """ - return self._build_for_update_collections( - collection_meta_operations=collection_meta_operations, - ) diff --git a/qdrant_client/http/api/points_api.py b/qdrant_client/http/api/points_api.py index 6193b25c..2df9fe89 100644 --- a/qdrant_client/http/api/points_api.py +++ b/qdrant_client/http/api/points_api.py @@ -172,7 +172,7 @@ def _build_for_clear_payload( body = jsonable_encoder(points_selector) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="POST", url="/collections/{collection_name}/points/payload/clear", path_params=path_params, @@ -200,7 +200,7 @@ def _build_for_delete_payload( body = jsonable_encoder(delete_payload) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="POST", url="/collections/{collection_name}/points/payload/delete", path_params=path_params, @@ -228,7 +228,7 @@ def _build_for_delete_points( body = jsonable_encoder(points_selector) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="POST", url="/collections/{collection_name}/points/delete", path_params=path_params, @@ -250,7 +250,7 @@ def _build_for_get_point( } return self.api_client.request( - type_=m.InlineResponse2007, + type_=m.InlineResponse2005, method="GET", url="/collections/{collection_name}/points/{id}", path_params=path_params, @@ -271,7 +271,7 @@ def _build_for_get_points( body = jsonable_encoder(point_request) return self.api_client.request( - type_=m.InlineResponse2004, + type_=m.InlineResponse2006, method="POST", url="/collections/{collection_name}/points", path_params=path_params, @@ -293,7 +293,7 @@ def _build_for_recommend_points( body = jsonable_encoder(recommend_request) return self.api_client.request( - type_=m.InlineResponse2005, + type_=m.InlineResponse2008, method="POST", url="/collections/{collection_name}/points/recommend", path_params=path_params, @@ -315,7 +315,7 @@ def _build_for_scroll_points( body = jsonable_encoder(scroll_request) return self.api_client.request( - type_=m.InlineResponse2006, + type_=m.InlineResponse2007, method="POST", url="/collections/{collection_name}/points/scroll", path_params=path_params, @@ -337,7 +337,7 @@ def _build_for_search_points( body = jsonable_encoder(search_request) return self.api_client.request( - type_=m.InlineResponse2005, + type_=m.InlineResponse2008, method="POST", url="/collections/{collection_name}/points/search", path_params=path_params, @@ -364,7 +364,7 @@ def _build_for_set_payload( body = jsonable_encoder(set_payload) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="POST", url="/collections/{collection_name}/points/payload", path_params=path_params, @@ -372,34 +372,6 @@ def _build_for_set_payload( json=body, ) - def _build_for_update_points( - self, - collection_name: str, - wait: bool = None, - collection_update_operations: m.CollectionUpdateOperations = None, - ): - """ - Perform point update operation (vectors, payloads, indexes) in collection - """ - path_params = { - "collection_name": str(collection_name), - } - - query_params = {} - if wait is not None: - query_params["wait"] = str(wait).lower() - - body = jsonable_encoder(collection_update_operations) - - return self.api_client.request( - type_=m.InlineResponse2003, - method="POST", - url="/collections/{collection_name}", - path_params=path_params, - params=query_params, - json=body, - ) - def _build_for_upsert_points( self, collection_name: str, @@ -420,7 +392,7 @@ def _build_for_upsert_points( body = jsonable_encoder(point_insert_operations) return self.api_client.request( - type_=m.InlineResponse2003, + type_=m.InlineResponse2004, method="PUT", url="/collections/{collection_name}/points", path_params=path_params, @@ -435,7 +407,7 @@ async def clear_payload( collection_name: str, wait: bool = None, points_selector: m.PointsSelector = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Remove all payload for specified points """ @@ -450,7 +422,7 @@ async def delete_payload( collection_name: str, wait: bool = None, delete_payload: m.DeletePayload = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete specified key payload for points """ @@ -465,7 +437,7 @@ async def delete_points( collection_name: str, wait: bool = None, points_selector: m.PointsSelector = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete points """ @@ -479,7 +451,7 @@ async def get_point( self, collection_name: str, id: m.ExtendedPointId, - ) -> m.InlineResponse2007: + ) -> m.InlineResponse2005: """ Retrieve full information of single point by id """ @@ -492,7 +464,7 @@ async def get_points( self, collection_name: str, point_request: m.PointRequest = None, - ) -> m.InlineResponse2004: + ) -> m.InlineResponse2006: """ Retrieve multiple points by specified IDs """ @@ -505,7 +477,7 @@ async def recommend_points( self, collection_name: str, recommend_request: m.RecommendRequest = None, - ) -> m.InlineResponse2005: + ) -> m.InlineResponse2008: """ Look for the points which are closer to stored positive examples and at the same time further to negative examples. """ @@ -518,7 +490,7 @@ async def scroll_points( self, collection_name: str, scroll_request: m.ScrollRequest = None, - ) -> m.InlineResponse2006: + ) -> m.InlineResponse2007: """ Scroll request - paginate over all points which matches given filtering condition """ @@ -531,7 +503,7 @@ async def search_points( self, collection_name: str, search_request: m.SearchRequest = None, - ) -> m.InlineResponse2005: + ) -> m.InlineResponse2008: """ Retrieve closest points based on vector similarity and given filtering conditions """ @@ -545,7 +517,7 @@ async def set_payload( collection_name: str, wait: bool = None, set_payload: m.SetPayload = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Set payload for points """ @@ -555,27 +527,12 @@ async def set_payload( set_payload=set_payload, ) - async def update_points( - self, - collection_name: str, - wait: bool = None, - collection_update_operations: m.CollectionUpdateOperations = None, - ) -> m.InlineResponse2003: - """ - Perform point update operation (vectors, payloads, indexes) in collection - """ - return await self._build_for_update_points( - collection_name=collection_name, - wait=wait, - collection_update_operations=collection_update_operations, - ) - async def upsert_points( self, collection_name: str, wait: bool = None, point_insert_operations: m.PointInsertOperations = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Perform insert + updates on points. If point with given ID already exists - it will be overwritten. """ @@ -592,7 +549,7 @@ def clear_payload( collection_name: str, wait: bool = None, points_selector: m.PointsSelector = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Remove all payload for specified points """ @@ -607,7 +564,7 @@ def delete_payload( collection_name: str, wait: bool = None, delete_payload: m.DeletePayload = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete specified key payload for points """ @@ -622,7 +579,7 @@ def delete_points( collection_name: str, wait: bool = None, points_selector: m.PointsSelector = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Delete points """ @@ -636,7 +593,7 @@ def get_point( self, collection_name: str, id: m.ExtendedPointId, - ) -> m.InlineResponse2007: + ) -> m.InlineResponse2005: """ Retrieve full information of single point by id """ @@ -649,7 +606,7 @@ def get_points( self, collection_name: str, point_request: m.PointRequest = None, - ) -> m.InlineResponse2004: + ) -> m.InlineResponse2006: """ Retrieve multiple points by specified IDs """ @@ -662,7 +619,7 @@ def recommend_points( self, collection_name: str, recommend_request: m.RecommendRequest = None, - ) -> m.InlineResponse2005: + ) -> m.InlineResponse2008: """ Look for the points which are closer to stored positive examples and at the same time further to negative examples. """ @@ -675,7 +632,7 @@ def scroll_points( self, collection_name: str, scroll_request: m.ScrollRequest = None, - ) -> m.InlineResponse2006: + ) -> m.InlineResponse2007: """ Scroll request - paginate over all points which matches given filtering condition """ @@ -688,7 +645,7 @@ def search_points( self, collection_name: str, search_request: m.SearchRequest = None, - ) -> m.InlineResponse2005: + ) -> m.InlineResponse2008: """ Retrieve closest points based on vector similarity and given filtering conditions """ @@ -702,7 +659,7 @@ def set_payload( collection_name: str, wait: bool = None, set_payload: m.SetPayload = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Set payload for points """ @@ -712,27 +669,12 @@ def set_payload( set_payload=set_payload, ) - def update_points( - self, - collection_name: str, - wait: bool = None, - collection_update_operations: m.CollectionUpdateOperations = None, - ) -> m.InlineResponse2003: - """ - Perform point update operation (vectors, payloads, indexes) in collection - """ - return self._build_for_update_points( - collection_name=collection_name, - wait=wait, - collection_update_operations=collection_update_operations, - ) - def upsert_points( self, collection_name: str, wait: bool = None, point_insert_operations: m.PointInsertOperations = None, - ) -> m.InlineResponse2003: + ) -> m.InlineResponse2004: """ Perform insert + updates on points. If point with given ID already exists - it will be overwritten. """ diff --git a/qdrant_client/http/api_client.py b/qdrant_client/http/api_client.py index 95a03137..4e6601f3 100644 --- a/qdrant_client/http/api_client.py +++ b/qdrant_client/http/api_client.py @@ -4,6 +4,7 @@ from httpx import AsyncClient, Client, Request, Response from pydantic import ValidationError +from qdrant_client.http.api.cluster_api import AsyncClusterApi, SyncClusterApi from qdrant_client.http.api.collections_api import AsyncCollectionsApi, SyncCollectionsApi from qdrant_client.http.api.points_api import AsyncPointsApi, SyncPointsApi from qdrant_client.http.exceptions import ResponseHandlingException, UnexpectedResponse @@ -16,6 +17,7 @@ class AsyncApis(Generic[AsyncClientT]): def __init__(self, host: str = None, **kwargs: Any): self.client = AsyncApiClient(host, **kwargs) + self.cluster_api = AsyncClusterApi(self.client) self.collections_api = AsyncCollectionsApi(self.client) self.points_api = AsyncPointsApi(self.client) @@ -24,6 +26,7 @@ class SyncApis(Generic[ClientT]): def __init__(self, host: str = None, **kwargs: Any): self.client = ApiClient(host, **kwargs) + self.cluster_api = SyncClusterApi(self.client) self.collections_api = SyncCollectionsApi(self.client) self.points_api = SyncPointsApi(self.client) diff --git a/qdrant_client/http/models/models.py b/qdrant_client/http/models/models.py index 73abe8f2..dda7cc5e 100644 --- a/qdrant_client/http/models/models.py +++ b/qdrant_client/http/models/models.py @@ -8,13 +8,13 @@ from typing_extensions import Literal from pydantic import BaseModel, Field -from pydantic.types import StrictInt, StrictStr +from pydantic.types import StrictBool, StrictInt, StrictStr class Batch(BaseModel): ids: List["ExtendedPointId"] = Field(..., description="") - payloads: Optional[List["Payload"]] = Field(None, description="") vectors: List[List[float]] = Field(..., description="") + payloads: Optional[List["Payload"]] = Field(None, description="") class ChangeAliasesOperation(BaseModel): @@ -28,10 +28,29 @@ class ChangeAliasesOperation(BaseModel): ) +class ClusterStatusOneOf(BaseModel): + status: Literal[ + "disabled", + ] = Field(..., description="") + + +class ClusterStatusOneOf1(BaseModel): + """ + Description of enabled cluster + """ + + status: Literal[ + "enabled", + ] = Field(..., description="Description of enabled cluster") + peer_id: int = Field(..., description="ID of this peer") + peers: Dict[str, "PeerInfo"] = Field(..., description="Peers composition of the cluster with main information") + raft_info: "RaftInfo" = Field(..., description="Description of enabled cluster") + + class CollectionConfig(BaseModel): + params: "CollectionParams" = Field(..., description="") hnsw_config: "HnswConfig" = Field(..., description="") optimizer_config: "OptimizersConfig" = Field(..., description="") - params: "CollectionParams" = Field(..., description="") wal_config: "WalConfig" = Field(..., description="") @@ -44,38 +63,26 @@ class CollectionInfo(BaseModel): Current statistics and configuration of the collection """ - config: "CollectionConfig" = Field(..., description="Current statistics and configuration of the collection") - disk_data_size: int = Field(..., description="Disk space, used by collection") + status: "CollectionStatus" = Field(..., description="Current statistics and configuration of the collection") optimizer_status: "OptimizersStatus" = Field( ..., description="Current statistics and configuration of the collection" ) - payload_schema: Dict[str, "PayloadIndexInfo"] = Field(..., description="Types of stored payload") - ram_data_size: int = Field(..., description="RAM used by collection") - segments_count: int = Field(..., description="Number of segments in collection") - status: "CollectionStatus" = Field(..., description="Current statistics and configuration of the collection") vectors_count: int = Field(..., description="Number of vectors in collection") - - -class CollectionMetaOperationsOneOf(BaseModel): - create_collection: "CreateCollectionOperation" = Field(..., description="") - - -class CollectionMetaOperationsOneOf1(BaseModel): - update_collection: "UpdateCollectionOperation" = Field(..., description="") - - -class CollectionMetaOperationsOneOf2(BaseModel): - delete_collection: str = Field(..., description="Operation for deleting collection with given name") - - -class CollectionMetaOperationsOneOf3(BaseModel): - change_aliases: "ChangeAliasesOperation" = Field(..., description="") + segments_count: int = Field(..., description="Number of segments in collection") + disk_data_size: int = Field(..., description="Disk space, used by collection") + ram_data_size: int = Field(..., description="RAM used by collection") + config: "CollectionConfig" = Field(..., description="Current statistics and configuration of the collection") + payload_schema: Dict[str, "PayloadIndexInfo"] = Field(..., description="Types of stored payload") class CollectionParams(BaseModel): + vector_size: int = Field(..., description="Size of a vectors used") distance: "Distance" = Field(..., description="") shard_number: Optional[int] = Field(1, description="Number of shards the collection has") - vector_size: int = Field(..., description="Size of a vectors used") + on_disk_payload: Optional[bool] = Field( + False, + description="If true - point's payload will not be stored in memory. It will be read from the disk every time it is requested. This setting saves RAM by (slightly) increasing the response time. Note: those payload values that are involved in filtering and are indexed - remain in RAM.", + ) class CollectionStatus(str, Enum): @@ -93,11 +100,11 @@ class CreateAlias(BaseModel): Create alternative name for a collection. Collection will be available under both names for search, retrieve, """ - alias_name: str = Field( + collection_name: str = Field( ..., description="Create alternative name for a collection. Collection will be available under both names for search, retrieve,", ) - collection_name: str = Field( + alias_name: str = Field( ..., description="Create alternative name for a collection. Collection will be available under both names for search, retrieve,", ) @@ -112,48 +119,29 @@ class CreateCollection(BaseModel): Operation for creating new collection and (optionally) specify index params """ - distance: "Distance" = Field( - ..., description="Operation for creating new collection and (optionally) specify index params" - ) - hnsw_config: Optional["HnswConfigDiff"] = Field( - None, description="Custom params for HNSW index. If none - values from service configuration file are used." - ) - optimizers_config: Optional["OptimizersConfigDiff"] = Field( - None, description="Custom params for Optimizers. If none - values from service configuration file are used." - ) - shard_number: Optional[int] = Field(1, description="Number of shards in collection. Default is 1, minimum is 1.") vector_size: int = Field( ..., description="Operation for creating new collection and (optionally) specify index params" ) - wal_config: Optional["WalConfigDiff"] = Field( - None, description="Custom params for WAL. If none - values from service configuration file are used." - ) - - -class CreateCollectionOperation(BaseModel): - """ - Operation for creating new collection and (optionally) specify index params - """ - - collection_name: str = Field( - ..., description="Operation for creating new collection and (optionally) specify index params" - ) distance: "Distance" = Field( ..., description="Operation for creating new collection and (optionally) specify index params" ) - hnsw_config: Optional["HnswConfigDiff"] = Field( - None, description="Custom params for HNSW index. If none - values from service configuration file are used." + shard_number: Optional[int] = Field( + None, + description="Number of shards in collection. Default is 1 for standalone, otherwise equal to the number of nodes Minimum is 1", ) - optimizers_config: Optional["OptimizersConfigDiff"] = Field( - None, description="Custom params for Optimizers. If none - values from service configuration file are used." + on_disk_payload: Optional[bool] = Field( + None, + description="If true - point's payload will not be stored in memory. It will be read from the disk every time it is requested. This setting saves RAM by (slightly) increasing the response time. Note: those payload values that are involved in filtering and are indexed - remain in RAM.", ) - shard_number: Optional[int] = Field(1, description="Number of shards in collection. Default is 1, minimum is 1.") - vector_size: int = Field( - ..., description="Operation for creating new collection and (optionally) specify index params" + hnsw_config: Optional["HnswConfigDiff"] = Field( + None, description="Custom params for HNSW index. If none - values from service configuration file are used." ) wal_config: Optional["WalConfigDiff"] = Field( None, description="Custom params for WAL. If none - values from service configuration file are used." ) + optimizers_config: Optional["OptimizersConfigDiff"] = Field( + None, description="Custom params for Optimizers. If none - values from service configuration file are used." + ) class CreateFieldIndex(BaseModel): @@ -161,11 +149,6 @@ class CreateFieldIndex(BaseModel): field_type: Optional["PayloadSchemaType"] = Field(None, description="") -class CreateIndex(BaseModel): - field_name: str = Field(..., description="") - field_type: Optional["PayloadSchemaType"] = Field(None, description="") - - class DeleteAlias(BaseModel): """ Delete alias if exists @@ -194,9 +177,9 @@ class Distance(str, Enum): class ErrorResponse(BaseModel): - result: Optional[Any] = Field(None, description="") - status: Optional["ErrorResponseStatus"] = Field(None, description="") time: Optional[float] = Field(None, description="Time spent to process this request") + status: Optional["ErrorResponseStatus"] = Field(None, description="") + result: Optional[Any] = Field(None, description="") class ErrorResponseStatus(BaseModel): @@ -208,36 +191,20 @@ class FieldCondition(BaseModel): All possible payload filtering conditions """ + key: str = Field(..., description="Payload key") + match: Optional["Match"] = Field(None, description="Check if point has field with a given value") + range: Optional["Range"] = Field(None, description="Check if points value lies in a given range") geo_bounding_box: Optional["GeoBoundingBox"] = Field( None, description="Check if points geo location lies in a given area" ) geo_radius: Optional["GeoRadius"] = Field(None, description="Check if geo point is within a given radius") - key: str = Field(..., description="Payload key") - match: Optional["Match"] = Field(None, description="Check if point has field with a given value") - range: Optional["Range"] = Field(None, description="Check if points value lies in a given range") values_count: Optional["ValuesCount"] = Field(None, description="Check number of values of the field") -class FieldIndexOperationsOneOf(BaseModel): - """ - Create index for payload field - """ - - create_index: "CreateIndex" = Field(..., description="Create index for payload field") - - -class FieldIndexOperationsOneOf1(BaseModel): - """ - Delete index for the field - """ - - delete_index: str = Field(..., description="Delete index for the field") - - class Filter(BaseModel): + should: Optional[List["Condition"]] = Field(None, description="At least one of those conditions should match") must: Optional[List["Condition"]] = Field(None, description="All conditions must match") must_not: Optional[List["Condition"]] = Field(None, description="All conditions must NOT match") - should: Optional[List["Condition"]] = Field(None, description="At least one of thous conditions should match") class FilterSelector(BaseModel): @@ -249,11 +216,11 @@ class GeoBoundingBox(BaseModel): Geo filter request Matches coordinates inside the rectangle, described by coordinates of lop-left and bottom-right edges """ - bottom_right: "GeoPoint" = Field( + top_left: "GeoPoint" = Field( ..., description="Geo filter request Matches coordinates inside the rectangle, described by coordinates of lop-left and bottom-right edges", ) - top_left: "GeoPoint" = Field( + bottom_right: "GeoPoint" = Field( ..., description="Geo filter request Matches coordinates inside the rectangle, described by coordinates of lop-left and bottom-right edges", ) @@ -264,8 +231,8 @@ class GeoPoint(BaseModel): Geo point payload schema """ - lat: float = Field(..., description="Geo point payload schema") lon: float = Field(..., description="Geo point payload schema") + lat: float = Field(..., description="Geo point payload schema") class GeoRadius(BaseModel): @@ -293,97 +260,105 @@ class HnswConfig(BaseModel): Config of HNSW index """ + m: int = Field( + ..., + description="Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.", + ) ef_construct: int = Field( ..., description="Number of neighbours to consider during the index building. Larger the value - more accurate the search, more time required to build index.", ) full_scan_threshold: int = Field( ..., - description="Minimal amount of points for additional payload-based indexing. If payload chunk is smaller than `full_scan_threshold` additional indexing won't be used - in this case full-scan search should be preferred by query planner and additional indexing is not required.", - ) - m: int = Field( - ..., - description="Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.", + description="Minimal size (in KiloBytes) of vectors for additional payload-based indexing. If payload chunk is smaller than `full_scan_threshold_kb` additional indexing won't be used - in this case full-scan search should be preferred by query planner and additional indexing is not required. Note: 1Kb = 1 vector of size 256", ) class HnswConfigDiff(BaseModel): + m: Optional[int] = Field( + None, + description="Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.", + ) ef_construct: Optional[int] = Field( None, description="Number of neighbours to consider during the index building. Larger the value - more accurate the search, more time required to build index.", ) full_scan_threshold: Optional[int] = Field( None, - description="Minimal amount of points for additional payload-based indexing. If payload chunk is smaller than `full_scan_threshold` additional indexing won't be used - in this case full-scan search should be preferred by query planner and additional indexing is not required.", - ) - m: Optional[int] = Field( - None, - description="Number of edges per node in the index graph. Larger the value - more accurate the search, more space required.", + description="Minimal size (in KiloBytes) of vectors for additional payload-based indexing. If payload chunk is smaller than `full_scan_threshold_kb` additional indexing won't be used - in this case full-scan search should be preferred by query planner and additional indexing is not required. Note: 1Kb = 1 vector of size 256", ) class InlineResponse200(BaseModel): - result: Optional["CollectionsResponse"] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional["ClusterStatus"] = Field(None, description="") class InlineResponse2001(BaseModel): - result: Optional[bool] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional["CollectionsResponse"] = Field(None, description="") class InlineResponse2002(BaseModel): - result: Optional["CollectionInfo"] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional["CollectionInfo"] = Field(None, description="") class InlineResponse2003(BaseModel): - result: Optional["UpdateResult"] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional[bool] = Field(None, description="") class InlineResponse2004(BaseModel): - result: Optional[List["Record"]] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional["UpdateResult"] = Field(None, description="") class InlineResponse2005(BaseModel): - result: Optional[List["ScoredPoint"]] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional["Record"] = Field(None, description="") class InlineResponse2006(BaseModel): - result: Optional["ScrollResult"] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") - time: Optional[float] = Field(None, description="Time spent to process this request") + result: Optional[List["Record"]] = Field(None, description="") class InlineResponse2007(BaseModel): - result: Optional["Record"] = Field(None, description="") + time: Optional[float] = Field(None, description="Time spent to process this request") status: Literal[ "ok", ] = Field(None, description="") + result: Optional["ScrollResult"] = Field(None, description="") + + +class InlineResponse2008(BaseModel): time: Optional[float] = Field(None, description="Time spent to process this request") + status: Literal[ + "ok", + ] = Field(None, description="") + result: Optional[List["ScoredPoint"]] = Field(None, description="") class IsEmptyCondition(BaseModel): @@ -415,68 +390,60 @@ class MatchValue(BaseModel): class OptimizersConfig(BaseModel): - default_segment_number: int = Field( - ..., - description="Target amount of segments optimizer will try to keep. Real amount of segments may vary depending on multiple parameters: - Amount of stored points - Current write RPS It is recommended to select default number of segments as a factor of the number of search threads, so that each segment would be handled evenly by one of the threads", - ) deleted_threshold: float = Field( ..., description="The minimal fraction of deleted vectors in a segment, required to perform segment optimization", ) - flush_interval_sec: int = Field(..., description="Minimum interval between forced flushes.") - indexing_threshold: int = Field( + vacuum_min_vector_number: int = Field( + ..., description="The minimal number of vectors in a segment, required to perform segment optimization" + ) + default_segment_number: int = Field( ..., - description="Maximum number of vectors allowed for plain index. Default value based on https://github.com/google-research/google-research/blob/master/scann/docs/algorithms.md", + description="Target amount of segments optimizer will try to keep. Real amount of segments may vary depending on multiple parameters: - Amount of stored points - Current write RPS It is recommended to select default number of segments as a factor of the number of search threads, so that each segment would be handled evenly by one of the threads If `default_segment_number = 0`, will be automatically selected by the number of available CPUs", ) - max_optimization_threads: int = Field(..., description="Maximum available threads for optimization workers") max_segment_size: int = Field( ..., - description="Do not create segments larger this number of points. Large segments might require disproportionately long indexation times, therefore it makes sense to limit the size of segments. If indexation speed have more priority for your - make this parameter lower. If search speed is more important - make this parameter higher.", + description="Do not create segments larger this size (in KiloBytes). Large segments might require disproportionately long indexation times, therefore it makes sense to limit the size of segments. If indexation speed have more priority for your - make this parameter lower. If search speed is more important - make this parameter higher. Note: 1Kb = 1 vector of size 256", ) memmap_threshold: int = Field( ..., - description="Maximum number of vectors to store in-memory per segment. Segments larger than this threshold will be stored as read-only memmaped file.", + description="Maximum size (in KiloBytes) of vectors to store in-memory per segment. Segments larger than this threshold will be stored as read-only memmaped file. To enable memmap storage, lower the threshold Note: 1Kb = 1 vector of size 256", ) - payload_indexing_threshold: int = Field( + indexing_threshold: int = Field( ..., - description="Starting from this amount of vectors per-segment the engine will start building index for payload.", - ) - vacuum_min_vector_number: int = Field( - ..., description="The minimal number of vectors in a segment, required to perform segment optimization" + description="Maximum size (in KiloBytes) of vectors allowed for plain index. Default value based on https://github.com/google-research/google-research/blob/master/scann/docs/algorithms.md Note: 1Kb = 1 vector of size 256", ) + flush_interval_sec: int = Field(..., description="Minimum interval between forced flushes.") + max_optimization_threads: int = Field(..., description="Maximum available threads for optimization workers") class OptimizersConfigDiff(BaseModel): - default_segment_number: Optional[int] = Field( - None, - description="Target amount of segments optimizer will try to keep. Real amount of segments may vary depending on multiple parameters: - Amount of stored points - Current write RPS It is recommended to select default number of segments as a factor of the number of search threads, so that each segment would be handled evenly by one of the threads", - ) deleted_threshold: Optional[float] = Field( None, description="The minimal fraction of deleted vectors in a segment, required to perform segment optimization", ) - flush_interval_sec: Optional[int] = Field(None, description="Minimum interval between forced flushes.") - indexing_threshold: Optional[int] = Field( - None, - description="Maximum number of vectors allowed for plain index. Default value based on https://github.com/google-research/google-research/blob/master/scann/docs/algorithms.md", + vacuum_min_vector_number: Optional[int] = Field( + None, description="The minimal number of vectors in a segment, required to perform segment optimization" ) - max_optimization_threads: Optional[int] = Field( - None, description="Maximum available threads for optimization workers" + default_segment_number: Optional[int] = Field( + None, + description="Target amount of segments optimizer will try to keep. Real amount of segments may vary depending on multiple parameters: - Amount of stored points - Current write RPS It is recommended to select default number of segments as a factor of the number of search threads, so that each segment would be handled evenly by one of the threads If `default_segment_number = 0`, will be automatically selected by the number of available CPUs", ) max_segment_size: Optional[int] = Field( None, - description="Do not create segments larger this number of points. Large segments might require disproportionately long indexation times, therefore it makes sense to limit the size of segments. If indexation speed have more priority for your - make this parameter lower. If search speed is more important - make this parameter higher.", + description="Do not create segments larger this size (in KiloBytes). Large segments might require disproportionately long indexation times, therefore it makes sense to limit the size of segments. If indexation speed have more priority for your - make this parameter lower. If search speed is more important - make this parameter higher. Note: 1Kb = 1 vector of size 256", ) memmap_threshold: Optional[int] = Field( None, - description="Maximum number of vectors to store in-memory per segment. Segments larger than this threshold will be stored as read-only memmaped file.", + description="Maximum size (in KiloBytes) of vectors to store in-memory per segment. Segments larger than this threshold will be stored as read-only memmaped file. To enable memmap storage, lower the threshold Note: 1Kb = 1 vector of size 256", ) - payload_indexing_threshold: Optional[int] = Field( + indexing_threshold: Optional[int] = Field( None, - description="Starting from this amount of vectors per-segment the engine will start building index for payload.", + description="Maximum size (in KiloBytes) of vectors allowed for plain index. Default value based on https://github.com/google-research/google-research/blob/master/scann/docs/algorithms.md Note: 1Kb = 1 vector of size 256", ) - vacuum_min_vector_number: Optional[int] = Field( - None, description="The minimal number of vectors in a segment, required to perform segment optimization" + flush_interval_sec: Optional[int] = Field(None, description="Minimum interval between forced flushes.") + max_optimization_threads: Optional[int] = Field( + None, description="Maximum available threads for optimization workers" ) @@ -511,44 +478,6 @@ class PayloadIndexInfo(BaseModel): data_type: "PayloadSchemaType" = Field(..., description="Payload field type & index information") -class PayloadOpsOneOf(BaseModel): - """ - Set payload value, overrides if it is already exists - """ - - set_payload: "SetPayload" = Field(..., description="Set payload value, overrides if it is already exists") - - -class PayloadOpsOneOf1(BaseModel): - """ - Deletes specified payload values if they are assigned - """ - - delete_payload: "DeletePayload" = Field(..., description="Deletes specified payload values if they are assigned") - - -class PayloadOpsOneOf2(BaseModel): - """ - Drops all Payload values associated with given points. - """ - - clear_payload: "PayloadOpsOneOf2ClearPayload" = Field( - ..., description="Drops all Payload values associated with given points." - ) - - -class PayloadOpsOneOf2ClearPayload(BaseModel): - points: List["ExtendedPointId"] = Field(..., description="") - - -class PayloadOpsOneOf3(BaseModel): - """ - Clear all Payload values by given filter criteria. - """ - - clear_payload_by_filter: "Filter" = Field(..., description="Clear all Payload values by given filter criteria.") - - class PayloadSchemaType(str, Enum): KEYWORD = "keyword" INTEGER = "integer" @@ -564,36 +493,16 @@ class PayloadSelectorInclude(BaseModel): include: List[str] = Field(..., description="Only include this payload keys") -class PointIdsList(BaseModel): - points: List["ExtendedPointId"] = Field(..., description="") - - -class PointOperationsOneOf(BaseModel): - """ - Insert or update points - """ - - upsert_points: "PointInsertOperations" = Field(..., description="Insert or update points") - - -class PointOperationsOneOf1(BaseModel): +class PeerInfo(BaseModel): """ - Delete point if exists + Information of a peer in the cluster """ - delete_points: "PointOperationsOneOf1DeletePoints" = Field(..., description="Delete point if exists") - - -class PointOperationsOneOf1DeletePoints(BaseModel): - ids: List["ExtendedPointId"] = Field(..., description="") + uri: str = Field(..., description="Information of a peer in the cluster") -class PointOperationsOneOf2(BaseModel): - """ - Delete points by given filter criteria - """ - - delete_points_by_filter: "Filter" = Field(..., description="Delete points by given filter criteria") +class PointIdsList(BaseModel): + points: List["ExtendedPointId"] = Field(..., description="") class PointRequest(BaseModel): @@ -606,8 +515,8 @@ class PointRequest(BaseModel): class PointStruct(BaseModel): id: "ExtendedPointId" = Field(..., description="") - payload: Optional["Payload"] = Field(None, description="Payload values (optional)") vector: List[float] = Field(..., description="Vector") + payload: Optional["Payload"] = Field(None, description="Payload values (optional)") class PointsBatch(BaseModel): @@ -618,14 +527,33 @@ class PointsList(BaseModel): points: List["PointStruct"] = Field(..., description="") +class RaftInfo(BaseModel): + """ + Summary information about the current raft state + """ + + term: int = Field( + ..., + description="Raft divides time into terms of arbitrary length, each beginning with an election. If a candidate wins the election, it remains the leader for the rest of the term. The term number increases monotonically. Each server stores the current term number which is also exchanged in every communication.", + ) + commit: int = Field( + ..., description="The index of the latest committed (finalized) operation that this peer is aware of." + ) + pending_operations: int = Field( + ..., description="Number of consensus operations pending to be applied on this peer" + ) + leader: Optional[int] = Field(None, description="Leader of the current term") + role: Optional["StateRole"] = Field(None, description="Role of this peer in the current term") + + class Range(BaseModel): """ Range filter request """ + lt: Optional[float] = Field(None, description="point.key < range.lt") gt: Optional[float] = Field(None, description="point.key > range.gt") gte: Optional[float] = Field(None, description="point.key >= range.gte") - lt: Optional[float] = Field(None, description="point.key < range.lt") lte: Optional[float] = Field(None, description="point.key <= range.lte") @@ -634,15 +562,19 @@ class RecommendRequest(BaseModel): Recommendation request. Provides positive and negative examples of the vectors, which are already stored in the collection. Service should look for the points which are closer to positive examples and at the same time further to negative examples. The concrete way of how to compare negative and positive distances is up to implementation in `segment` crate. """ - filter: Optional["Filter"] = Field(None, description="Look only for points which satisfies this conditions") + positive: List["ExtendedPointId"] = Field(..., description="Look for vectors closest to those") negative: List["ExtendedPointId"] = Field(..., description="Try to avoid vectors like this") + filter: Optional["Filter"] = Field(None, description="Look only for points which satisfies this conditions") params: Optional["SearchParams"] = Field(None, description="Additional search params") - positive: List["ExtendedPointId"] = Field(..., description="Look for vectors closest to those") top: int = Field(..., description="Max number of result to return") with_payload: Optional["WithPayloadInterface"] = Field( None, description="Select which payload to return with the response. Default: None" ) with_vector: Optional[bool] = Field(False, description="Whether to return the point vector with the result?") + score_threshold: Optional[float] = Field( + None, + description="Define a minimal score threshold for the result. If defined, less similar results will not be returned. Score of the returned result might be higher or smaller than the threshold depending on the Distance function used. E.g. for cosine similarity only higher scores will be returned.", + ) class Record(BaseModel): @@ -660,8 +592,8 @@ class RenameAlias(BaseModel): Change alias to a new one """ - new_alias_name: str = Field(..., description="Change alias to a new one") old_alias_name: str = Field(..., description="Change alias to a new one") + new_alias_name: str = Field(..., description="Change alias to a new one") class RenameAliasOperation(BaseModel): @@ -678,10 +610,10 @@ class ScoredPoint(BaseModel): """ id: "ExtendedPointId" = Field(..., description="Search result") - payload: Optional["Payload"] = Field(None, description="Payload - values assigned to the point") + version: int = Field(..., description="Point version") score: float = Field(..., description="Points vector distance to the query vector") + payload: Optional["Payload"] = Field(None, description="Payload - values assigned to the point") vector: Optional[List[float]] = Field(None, description="Vector of the point") - version: int = Field(..., description="Point version") class ScrollRequest(BaseModel): @@ -689,11 +621,11 @@ class ScrollRequest(BaseModel): Scroll request - paginate over all points which matches given condition """ + offset: Optional["ExtendedPointId"] = Field(None, description="Start ID to read points from.") + limit: Optional[int] = Field(None, description="Page size. Default: 10") filter: Optional["Filter"] = Field( None, description="Look only for points which satisfies this conditions. If not provided - all points." ) - limit: Optional[int] = Field(None, description="Page size. Default: 10") - offset: Optional["ExtendedPointId"] = Field(None, description="Start ID to read points from.") with_payload: Optional["WithPayloadInterface"] = Field( None, description="Select which payload to return with the response. Default: All" ) @@ -705,10 +637,10 @@ class ScrollResult(BaseModel): Result of the points read request """ + points: List["Record"] = Field(..., description="List of retrieved points") next_page_offset: Optional["ExtendedPointId"] = Field( None, description="Offset which should be used to retrieve a next page result" ) - points: List["Record"] = Field(..., description="List of retrieved points") class SearchParams(BaseModel): @@ -727,14 +659,18 @@ class SearchRequest(BaseModel): Search request. Holds all conditions and parameters for the search of most similar points by vector similarity given the filtering restrictions. """ + vector: List[float] = Field(..., description="Look for vectors closest to this") filter: Optional["Filter"] = Field(None, description="Look only for points which satisfies this conditions") params: Optional["SearchParams"] = Field(None, description="Additional search params") top: int = Field(..., description="Max number of result to return") - vector: List[float] = Field(..., description="Look for vectors closest to this") with_payload: Optional["WithPayloadInterface"] = Field( None, description="Select which payload to return with the response. Default: None" ) with_vector: Optional[bool] = Field(False, description="Whether to return the point vector with the result?") + score_threshold: Optional[float] = Field( + None, + description="Define a minimal score threshold for the result. If defined, less similar results will not be returned. Score of the returned result might be higher or smaller than the threshold depending on the Distance function used. E.g. for cosine similarity only higher scores will be returned.", + ) class SetPayload(BaseModel): @@ -742,23 +678,18 @@ class SetPayload(BaseModel): points: List["ExtendedPointId"] = Field(..., description="Assigns payload to each point in this list") -class UpdateCollection(BaseModel): - """ - Operation for updating parameters of the existing collection - """ +class StateRole(str, Enum): + FOLLOWER = "Follower" + CANDIDATE = "Candidate" + LEADER = "Leader" + PRECANDIDATE = "PreCandidate" - optimizers_config: Optional["OptimizersConfigDiff"] = Field( - None, - description="Custom params for Optimizers. If none - values from service configuration file are used. This operation is blocking, it will only proceed ones all current optimizations are complete", - ) - -class UpdateCollectionOperation(BaseModel): +class UpdateCollection(BaseModel): """ Operation for updating parameters of the existing collection """ - collection_name: str = Field(..., description="Operation for updating parameters of the existing collection") optimizers_config: Optional["OptimizersConfigDiff"] = Field( None, description="Custom params for Optimizers. If none - values from service configuration file are used. This operation is blocking, it will only proceed ones all current optimizations are complete", @@ -780,9 +711,9 @@ class ValuesCount(BaseModel): Values count filter request """ + lt: Optional[int] = Field(None, description="point.key.length() < values_count.lt") gt: Optional[int] = Field(None, description="point.key.length() > values_count.gt") gte: Optional[int] = Field(None, description="point.key.length() >= values_count.gte") - lt: Optional[int] = Field(None, description="point.key.length() < values_count.lt") lte: Optional[int] = Field(None, description="point.key.length() <= values_count.lte") @@ -803,11 +734,9 @@ class WalConfigDiff(BaseModel): DeleteAliasOperation, RenameAliasOperation, ] -CollectionMetaOperations = Union[ - CollectionMetaOperationsOneOf, - CollectionMetaOperationsOneOf1, - CollectionMetaOperationsOneOf2, - CollectionMetaOperationsOneOf3, +ClusterStatus = Union[ + ClusterStatusOneOf, + ClusterStatusOneOf1, ] Condition = Union[ FieldCondition, @@ -819,10 +748,6 @@ class WalConfigDiff(BaseModel): StrictInt, StrictStr, ] -FieldIndexOperations = Union[ - FieldIndexOperationsOneOf, - FieldIndexOperationsOneOf1, -] Match = Union[ MatchValue, MatchKeyword, @@ -832,12 +757,6 @@ class WalConfigDiff(BaseModel): OptimizersStatusOneOf, OptimizersStatusOneOf1, ] -PayloadOps = Union[ - PayloadOpsOneOf, - PayloadOpsOneOf1, - PayloadOpsOneOf2, - PayloadOpsOneOf3, -] PayloadSelector = Union[ PayloadSelectorInclude, PayloadSelectorExclude, @@ -846,27 +765,17 @@ class WalConfigDiff(BaseModel): PointsBatch, PointsList, ] -PointOperations = Union[ - PointOperationsOneOf, - PointOperationsOneOf1, - PointOperationsOneOf2, -] PointsSelector = Union[ PointIdsList, FilterSelector, ] ValueVariants = Union[ - bool, + StrictBool, StrictInt, StrictStr, ] -CollectionUpdateOperations = Union[ - PointOperations, - PayloadOps, - FieldIndexOperations, -] WithPayloadInterface = Union[ PayloadSelector, List[StrictStr], - bool, + StrictBool, ] diff --git a/qdrant_client/qdrant_client.py b/qdrant_client/qdrant_client.py index 238d5ad5..7176c620 100644 --- a/qdrant_client/qdrant_client.py +++ b/qdrant_client/qdrant_client.py @@ -1,4 +1,5 @@ import asyncio +import json from typing import Optional, Iterable, List, Union, Tuple import numpy as np @@ -125,6 +126,7 @@ def search(self, top: int = 10, with_payload: Union[bool, List[str], types.PayloadSelector] = True, with_vector: bool = False, + score_threshold: Optional[float] = None, append_payload=True) -> List[types.ScoredPoint]: """Search for closest vectors in collection taking into account filtering conditions @@ -146,6 +148,12 @@ def search(self, - If `True` - Attach stored vector to the search result. - If `False` - Do not attach vector. - Default: `False` + score_threshold: + Define a minimal score threshold for the result. + If defined, less similar results will not be returned. + Score of the returned result might be higher or smaller than the threshold depending + on the Distance function used. + E.g. for cosine similarity only higher scores will be returned. append_payload: Same as `with_payload`. Deprecated. Examples: @@ -201,7 +209,8 @@ def search(self, top=top, with_vector=with_vector, with_payload=with_payload, - params=search_params + params=search_params, + score_threshold=score_threshold )) return [GrpcToRest.convert_scored_point(hit) for hit in res.result] @@ -223,7 +232,9 @@ def search(self, filter=query_filter, top=top, params=search_params, - with_payload=with_payload + with_vector=with_vector, + with_payload=with_payload, + score_threshold=score_threshold ) ) @@ -239,6 +250,7 @@ def recommend( top: int = 10, with_payload: Union[bool, List[str], types.PayloadSelector] = True, with_vector: bool = False, + score_threshold: Optional[float] = None, ) -> List[types.ScoredPoint]: """Recommend points: search for similar points based on already stored in Qdrant examples. @@ -270,6 +282,12 @@ def recommend( - If `True` - Attach stored vector to the search result. - If `False` - Do not attach vector. - Default: `False` + score_threshold: + Define a minimal score threshold for the result. + If defined, less similar results will not be returned. + Score of the returned result might be higher or smaller than the threshold depending + on the Distance function used. + E.g. for cosine similarity only higher scores will be returned. Returns: List of recommended points with similarity scores. @@ -313,7 +331,8 @@ def recommend( top=top, with_vector=with_vector, with_payload=with_payload, - params=search_params + params=search_params, + score_threshold=score_threshold )) return [GrpcToRest.convert_scored_point(hit) for hit in res.result] @@ -347,6 +366,7 @@ def recommend( top=top, with_payload=with_payload, with_vector=with_vector, + score_threshold=score_threshold ) ).result @@ -411,9 +431,9 @@ def scroll( )) return [ - GrpcToRest.convert_retrieved_point(point) - for point in res.result - ], res.next_page_offset + GrpcToRest.convert_retrieved_point(point) + for point in res.result + ], res.next_page_offset else: if isinstance(offset, grpc.PointId): offset = GrpcToRest.convert_point_id(offset) @@ -759,25 +779,116 @@ def clear_payload( points_selector=points_selector ).result - def delete_collection(self, collection_name: str): + def update_collection_aliases( + self, + change_aliases_operations: List[types.AliasOperations], + timeout: Optional[int] = None + ): + """Operation for performing changes of collection aliases. + + Alias changes are atomic, meaning that no collection modifications can happen between alias operations. + + Args: + change_aliases_operations: List of operations to perform + timeout: + Wait for operation commit timeout in seconds. + If timeout is reached - request will return with service error. + + Returns: + Operation result + """ + change_aliases_operation = [ + GrpcToRest.convert_alias_operations(operation) + if isinstance(operation, grpc.AliasOperations) else operation + for operation in change_aliases_operations + ] + + return self.http.collections_api.update_aliases( + timeout=timeout, + change_aliases_operation=rest.ChangeAliasesOperation( + actions=change_aliases_operation + ) + ) + + def get_collections(self) -> types.CollectionsResponse: + """Get list name of all existing collections + + Returns: + List of the collections + """ + return self.http.collections_api.get_collections().result + + def get_collection(self, collection_name: str) -> types.CollectionInfo: + """Get detailed information about specified existing collection + + Args: + collection_name: Name of the collection + + Returns: + Detailed information about the collection + """ + return self.http.collections_api.get_collection(collection_name=collection_name).result + + def update_collection( + self, + collection_name: str, + optimizer_config: Optional[types.OptimizersConfigDiff], + timeout: Optional[int] = None + ): + """Update parameters of the collection + + Args: + collection_name: Name of the collection + optimizer_config: Override for optimizer configuration + timeout: + Wait for operation commit timeout in seconds. + If timeout is reached - request will return with service error. + + Returns: + Operation result + """ + if isinstance(optimizer_config, grpc.OptimizersConfigDiff): + optimizer_config = GrpcToRest.convert_optimizers_config_diff(optimizer_config) + return self.http.collections_api.update_collection( + collection_name, + update_collection=rest.UpdateCollection( + optimizers_config=optimizer_config + ), + timeout=timeout + ) + + def delete_collection( + self, + collection_name: str, + timeout: Optional[int] = None + ): """Removes collection and all it's data Args: collection_name: Name of the collection to delete + timeout: + Wait for operation commit timeout in seconds. + If timeout is reached - request will return with service error. Returns: Operation result """ - return self.http.collections_api.delete_collection(collection_name) + return self.http.collections_api.delete_collection( + collection_name, + timeout=timeout + ) def recreate_collection(self, collection_name: str, vector_size: int, distance: types.Distance, + shard_number: Optional[int] = None, + on_disk_payload: Optional[bool] = None, hnsw_config: Optional[types.HnswConfigDiff] = None, optimizers_config: Optional[types.OptimizersConfigDiff] = None, wal_config: Optional[types.WalConfigDiff] = None, + timeout: Optional[int] = None ): """Delete and create empty collection with given parameters @@ -785,9 +896,18 @@ def recreate_collection(self, collection_name: Name of the collection to recreate vector_size: Vector size of the collection distance: Which metric to use + shard_number: Number of shards in collection. Default is 1, minimum is 1. + on_disk_payload: + If true - point`s payload will not be stored in memory. + It will be read from the disk every time it is requested. + This setting saves RAM by (slightly) increasing the response time. + Note: those payload values that are involved in filtering and are indexed - remain in RAM. hnsw_config: Params for HNSW index optimizers_config: Params for optimizer wal_config: Params for Write-Ahead-Log + timeout: + Wait for operation commit timeout in seconds. + If timeout is reached - request will return with service error. Returns: Operation result @@ -807,15 +927,22 @@ def recreate_collection(self, self.delete_collection(collection_name) + create_collection_request = rest.CreateCollection( + distance=distance, + vector_size=vector_size, + shard_number=shard_number, + on_disk_payload=on_disk_payload, + hnsw_config=hnsw_config, + optimizers_config=optimizers_config, + wal_config=wal_config + ) + + print(json.dumps(create_collection_request.dict(), indent=2)) + self.http.collections_api.create_collection( collection_name=collection_name, - create_collection=rest.CreateCollection( - distance=distance, - vector_size=vector_size, - hnsw_config=hnsw_config, - optimizers_config=optimizers_config, - wal_config=wal_config - ) + create_collection=create_collection_request, + timeout=timeout ) def upload_collection(self, diff --git a/tests/conversions/fixtures.py b/tests/conversions/fixtures.py index b790f7fb..209130c6 100644 --- a/tests/conversions/fixtures.py +++ b/tests/conversions/fixtures.py @@ -122,7 +122,6 @@ max_segment_size=200000, memmap_threshold=50000, indexing_threshold=10000, - payload_indexing_threshold=10000, flush_interval_sec=10, max_optimization_threads=0 ) @@ -284,8 +283,8 @@ alias_operations_delete = grpc.AliasOperations(delete_alias=delete_alias) with_payload_bool = grpc.WithPayloadSelector(enable=True) -with_payload_include = grpc.WithPayloadSelector(include=grpc.PayloadIncludeSelector(include=["color", "price"])) -with_payload_exclude = grpc.WithPayloadSelector(exclude=grpc.PayloadExcludeSelector(exclude=["color", "price"])) +with_payload_include = grpc.WithPayloadSelector(include=grpc.PayloadIncludeSelector(fields=["color", "price"])) +with_payload_exclude = grpc.WithPayloadSelector(exclude=grpc.PayloadExcludeSelector(fields=["color", "price"])) retrieved_point = grpc.RetrievedPoint( id=point_id_1, diff --git a/tests/integration-tests.sh b/tests/integration-tests.sh index 044ef8dd..1181ff07 100755 --- a/tests/integration-tests.sh +++ b/tests/integration-tests.sh @@ -11,7 +11,7 @@ function stop_docker() # Ensure current path is project root cd "$(dirname "$0")/../" -QDRANT_VERSION='v0.7.0' +QDRANT_VERSION='v0.8.0' QDRANT_HOST='localhost:6333' diff --git a/tests/test_qdrant_client.py b/tests/test_qdrant_client.py index a74fe17d..f895ef4e 100644 --- a/tests/test_qdrant_client.py +++ b/tests/test_qdrant_client.py @@ -10,13 +10,14 @@ from qdrant_client import QdrantClient from qdrant_client.conversions.conversion import grpc_to_payload, json_to_value -from qdrant_client.http.models import Filter, FieldCondition, Range, PointsList, PointStruct, PointRequest, \ - SetPayload, HasIdCondition, PointIdsList, PayloadSchemaType, MatchValue, Distance +from qdrant_client.http.models import Filter, FieldCondition, Range, PointStruct, HasIdCondition, PointIdsList, \ + PayloadSchemaType, MatchValue, Distance, CreateAliasOperation, CreateAlias, OptimizersConfigDiff from qdrant_client.uploader.grpc_uploader import payload_to_grpc DIM = 100 NUM_VECTORS = 1_000 COLLECTION_NAME = 'client_test' +COLLECTION_NAME_ALIAS = 'client_test_alias' def random_payload(): @@ -56,14 +57,14 @@ def test_qdrant_client_integration(prefer_grpc): ) # Call Qdrant API to retrieve list of existing collections - collections = client.http.collections_api.get_collections().result.collections + collections = client.get_collections().collections # Print all existing collections for collection in collections: print(collection.dict()) # Retrieve detailed information about newly created collection - test_collection = client.http.collections_api.get_collection(COLLECTION_NAME) + test_collection = client.get_collection(COLLECTION_NAME) pprint(test_collection.dict()) # Upload data to a new collection @@ -80,13 +81,24 @@ def test_qdrant_client_integration(prefer_grpc): # If you need to change this behaviour - simply enable synchronous processing by enabling `wait=true` sleep(1) + client.update_collection_aliases( + change_aliases_operations=[ + CreateAliasOperation( + create_alias=CreateAlias( + collection_name=COLLECTION_NAME, + alias_name=COLLECTION_NAME_ALIAS + ) + ) + ] + ) + # Create payload index for field `random_num` # If indexed field appear in filtering condition - search operation could be performed faster index_create_result = client.create_payload_index(COLLECTION_NAME, "random_num", PayloadSchemaType.FLOAT) pprint(index_create_result.dict()) # Let's now check details about our new collection - test_collection = client.http.collections_api.get_collection(COLLECTION_NAME) + test_collection = client.get_collection(COLLECTION_NAME_ALIAS) pprint(test_collection.dict()) # Now we can actually search in the collection @@ -109,6 +121,15 @@ def test_qdrant_client_integration(prefer_grpc): for hit in hits: print(hit) + client.update_collection( + collection_name=COLLECTION_NAME, + optimizer_config=OptimizersConfigDiff( + max_segment_size=10000 + ) + ) + + assert client.get_collection(COLLECTION_NAME).config.optimizer_config.max_segment_size == 10000 + # Let's now query same vector with filter condition hits = client.search( collection_name=COLLECTION_NAME, diff --git a/tools/generate_grpc_client.sh b/tools/generate_grpc_client.sh index a0458585..1613c6d9 100644 --- a/tools/generate_grpc_client.sh +++ b/tools/generate_grpc_client.sh @@ -21,7 +21,11 @@ cp $PROTO_DIR/*.proto $CLIENT_DIR/ # Remove internal services *.proto rm $CLIENT_DIR/points_internal_service.proto +rm $CLIENT_DIR/collections_internal_service.proto +rm $CLIENT_DIR/raft_service.proto +cat $CLIENT_DIR/qdrant.proto | grep -v 'collections_internal_service.proto' > $CLIENT_DIR/qdrant_tmp.proto cat $CLIENT_DIR/qdrant.proto | grep -v 'points_internal_service.proto' > $CLIENT_DIR/qdrant_tmp.proto +cat $CLIENT_DIR/qdrant.proto | grep -v 'raft_service.proto' > $CLIENT_DIR/qdrant_tmp.proto mv $CLIENT_DIR/qdrant_tmp.proto $CLIENT_DIR/qdrant.proto python -m grpc_tools.protoc -I $CLIENT_DIR --python_betterproto_out=$CLIENT_DIR $CLIENT_DIR/*.proto