diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index 2b807ad..44a4d72 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -20,9 +20,8 @@
"charliermarsh.ruff",
"d-biehl.robotcode",
"tamasfe.even-better-toml",
- "ms-azuretools.vscode-docker",
- "Gruntfuggly.todo-tree",
- "shardulm94.trailing-spaces"
+ "ms-azuretools.vscode-docker"
+
]
}
}
diff --git a/.github/workflows/on-push.yml b/.github/workflows/on-push.yml
index 30b2c26..7ba4cc4 100644
--- a/.github/workflows/on-push.yml
+++ b/.github/workflows/on-push.yml
@@ -53,7 +53,7 @@ jobs:
strategy:
matrix:
os: [ 'ubuntu-latest', 'windows-latest']
- python-version: ['3.10', '3.11', '3.12', '3.13']
+ python-version: ['3.10', '3.11', '3.12', '3.13', '3.14']
robot-version: ['6.1.1', '7.3.2']
exclude:
- os: 'windows-latest'
@@ -62,6 +62,8 @@ jobs:
python-version: '3.11'
- os: 'windows-latest'
python-version: '3.12'
+ - os: 'windows-latest'
+ python-version: '3.13'
- os: 'ubuntu-latest'
python-version: '3.10'
robot-version: '6.1.1'
@@ -71,6 +73,9 @@ jobs:
- os: 'ubuntu-latest'
python-version: '3.12'
robot-version: '6.1.1'
+ - os: 'ubuntu-latest'
+ python-version: '3.13'
+ robot-version: '6.1.1'
fail-fast: false
steps:
- uses: actions/checkout@v6
diff --git a/.gitignore b/.gitignore
index 65fd3fe..26adcdd 100644
--- a/.gitignore
+++ b/.gitignore
@@ -25,9 +25,10 @@ coverage.xml
env/
venv/
-# IDE config
+# IDE config and local tool settings
.vscode/launch.json
.vscode/settings.json
+.robot.toml
# default logs location for the repo
tests/logs
diff --git a/docs/releases.md b/docs/releases.md
index fc45a8a..b10eae4 100644
--- a/docs/releases.md
+++ b/docs/releases.md
@@ -1,14 +1,48 @@
# Release notes
-## OpenApiTools v1.0.5
+## OpenApiTools v2.0.0
+
+### Major changes and new features
+- Request bodies now support all JSON types, not just `objects` (`dicts`).
+ - This closes issue *#9: No body generated when root is a list*.
+ - The `Relations` still need to be reworked to align with this change.
+- Refactored retrieving / loading of the OpenAPI spec.
+ - This closes issue *#93: SSL error even if cert / verify is set*.
+- Improved handling of `treat_as_mandatory` on a `PropertyValueConstraint`.
+- Added support for using `IGNORE` as `invalid_value` on a `PropertyValueConstraint`.
### Bugfixes
-- `parameters` at path level are not taken into account at operation level
+- Added support for the `nullable` property in OAS 3.0 schemas when generating data.
+ - This closes issue *#81: nullable not taken into account in get_valid_value*.
+- Fixed validation errors caused by `Content-Type` not being handled case-insensitive.
+- Fixed an exception during validation caused by `charset` being included in the `Content-Type` header for `application/json`.
+
+### Breaking changes
+- `invalid_property_default_response` library parameter renamed to `invalid_data_default_response`.
+- The `RequestData` class that is returned by a number of keywords has been changed:
+ - The `dto` property was removed.
+ - The `valid_data` property was added.
+ - The `constrait_mapping` property was added.
+
+### Additional changes
+- Special handling of `"format": "byte"` for `"type": "string"` (OAS 3.0) was removed.
+ - While some logic related to this worked, the result was never JSON-serializable.
+- The devcontainer setup was updated.
+- The GitHub pipeline was updated to include Python 3.14.
+- Updated minimum version markers for many dependencies.
+- Annotations are now complete (as far as possible under Python 3.10).
## Previous versions
+### OpenApiTools v1.0.5
+
+#### Bugfixes
+- `parameters` at path level are not taken into account at operation level.
+
+---
+
### OpenApiTools v1.0.4
#### Bugfixes
@@ -41,7 +75,7 @@
### OpenApiTools v1.0.1
#### Bugfixes
-- `openapitools_docs` was missing from package distribution
+- `openapitools_docs` was missing from package distribution.
---
diff --git a/poetry.lock b/poetry.lock
index 7c67acb..964681b 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,15 +1,15 @@
-# This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand.
+# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand.
[[package]]
name = "annotated-doc"
-version = "0.0.3"
+version = "0.0.4"
description = "Document parameters, class attributes, return types, and variables inline, with Annotated."
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
- {file = "annotated_doc-0.0.3-py3-none-any.whl", hash = "sha256:348ec6664a76f1fd3be81f43dffbee4c7e8ce931ba71ec67cc7f4ade7fbbb580"},
- {file = "annotated_doc-0.0.3.tar.gz", hash = "sha256:e18370014c70187422c33e945053ff4c286f453a984eba84d0dbfa0c935adeda"},
+ {file = "annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320"},
+ {file = "annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4"},
]
[[package]]
@@ -47,14 +47,14 @@ trio = ["trio (>=0.31.0)"]
[[package]]
name = "astroid"
-version = "4.0.1"
+version = "4.0.2"
description = "An abstract syntax tree for Python with inference support."
optional = false
python-versions = ">=3.10.0"
groups = ["lint-and-format"]
files = [
- {file = "astroid-4.0.1-py3-none-any.whl", hash = "sha256:37ab2f107d14dc173412327febf6c78d39590fdafcb44868f03b6c03452e3db0"},
- {file = "astroid-4.0.1.tar.gz", hash = "sha256:0d778ec0def05b935e198412e62f9bcca8b3b5c39fdbe50b0ba074005e477aab"},
+ {file = "astroid-4.0.2-py3-none-any.whl", hash = "sha256:d7546c00a12efc32650b19a2bb66a153883185d3179ab0d4868086f807338b9b"},
+ {file = "astroid-4.0.2.tar.gz", hash = "sha256:ac8fb7ca1c08eb9afec91ccc23edbd8ac73bb22cbdd7da1d488d9fb8d6579070"},
]
[package.dependencies]
@@ -72,64 +72,16 @@ files = [
{file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"},
]
-[[package]]
-name = "black"
-version = "25.9.0"
-description = "The uncompromising code formatter."
-optional = false
-python-versions = ">=3.9"
-groups = ["main"]
-files = [
- {file = "black-25.9.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce41ed2614b706fd55fd0b4a6909d06b5bab344ffbfadc6ef34ae50adba3d4f7"},
- {file = "black-25.9.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2ab0ce111ef026790e9b13bd216fa7bc48edd934ffc4cbf78808b235793cbc92"},
- {file = "black-25.9.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f96b6726d690c96c60ba682955199f8c39abc1ae0c3a494a9c62c0184049a713"},
- {file = "black-25.9.0-cp310-cp310-win_amd64.whl", hash = "sha256:d119957b37cc641596063cd7db2656c5be3752ac17877017b2ffcdb9dfc4d2b1"},
- {file = "black-25.9.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:456386fe87bad41b806d53c062e2974615825c7a52159cde7ccaeb0695fa28fa"},
- {file = "black-25.9.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a16b14a44c1af60a210d8da28e108e13e75a284bf21a9afa6b4571f96ab8bb9d"},
- {file = "black-25.9.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aaf319612536d502fdd0e88ce52d8f1352b2c0a955cc2798f79eeca9d3af0608"},
- {file = "black-25.9.0-cp311-cp311-win_amd64.whl", hash = "sha256:c0372a93e16b3954208417bfe448e09b0de5cc721d521866cd9e0acac3c04a1f"},
- {file = "black-25.9.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1b9dc70c21ef8b43248f1d86aedd2aaf75ae110b958a7909ad8463c4aa0880b0"},
- {file = "black-25.9.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8e46eecf65a095fa62e53245ae2795c90bdecabd53b50c448d0a8bcd0d2e74c4"},
- {file = "black-25.9.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9101ee58ddc2442199a25cb648d46ba22cd580b00ca4b44234a324e3ec7a0f7e"},
- {file = "black-25.9.0-cp312-cp312-win_amd64.whl", hash = "sha256:77e7060a00c5ec4b3367c55f39cf9b06e68965a4f2e61cecacd6d0d9b7ec945a"},
- {file = "black-25.9.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0172a012f725b792c358d57fe7b6b6e8e67375dd157f64fa7a3097b3ed3e2175"},
- {file = "black-25.9.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3bec74ee60f8dfef564b573a96b8930f7b6a538e846123d5ad77ba14a8d7a64f"},
- {file = "black-25.9.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b756fc75871cb1bcac5499552d771822fd9db5a2bb8db2a7247936ca48f39831"},
- {file = "black-25.9.0-cp313-cp313-win_amd64.whl", hash = "sha256:846d58e3ce7879ec1ffe816bb9df6d006cd9590515ed5d17db14e17666b2b357"},
- {file = "black-25.9.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ef69351df3c84485a8beb6f7b8f9721e2009e20ef80a8d619e2d1788b7816d47"},
- {file = "black-25.9.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e3c1f4cd5e93842774d9ee4ef6cd8d17790e65f44f7cdbaab5f2cf8ccf22a823"},
- {file = "black-25.9.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:154b06d618233fe468236ba1f0e40823d4eb08b26f5e9261526fde34916b9140"},
- {file = "black-25.9.0-cp39-cp39-win_amd64.whl", hash = "sha256:e593466de7b998374ea2585a471ba90553283fb9beefcfa430d84a2651ed5933"},
- {file = "black-25.9.0-py3-none-any.whl", hash = "sha256:474b34c1342cdc157d307b56c4c65bce916480c4a8f6551fdc6bf9b486a7c4ae"},
- {file = "black-25.9.0.tar.gz", hash = "sha256:0474bca9a0dd1b51791fcc507a4e02078a1c63f6d4e4ae5544b9848c7adfb619"},
-]
-
-[package.dependencies]
-click = ">=8.0.0"
-mypy-extensions = ">=0.4.3"
-packaging = ">=22.0"
-pathspec = ">=0.9.0"
-platformdirs = ">=2"
-pytokens = ">=0.1.10"
-tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
-typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
-
-[package.extras]
-colorama = ["colorama (>=0.4.3)"]
-d = ["aiohttp (>=3.10)"]
-jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"]
-uvloop = ["uvloop (>=0.15.2)"]
-
[[package]]
name = "certifi"
-version = "2025.10.5"
+version = "2025.11.12"
description = "Python package for providing Mozilla's CA Bundle."
optional = false
python-versions = ">=3.7"
groups = ["main", "dev"]
files = [
- {file = "certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de"},
- {file = "certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43"},
+ {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"},
+ {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"},
]
[[package]]
@@ -269,14 +221,14 @@ files = [
[[package]]
name = "click"
-version = "8.3.0"
+version = "8.3.1"
description = "Composable command line interface toolkit"
optional = false
python-versions = ">=3.10"
groups = ["main", "dev", "lint-and-format", "type-checking"]
files = [
- {file = "click-8.3.0-py3-none-any.whl", hash = "sha256:9b9f285302c6e3064f4330c05f05b81945b2a39544279343e6e7c5f27a9baddc"},
- {file = "click-8.3.0.tar.gz", hash = "sha256:e7b8232224eba16f4ebe410c25ced9f7875cb5f3263ffc93cc3e8da705e229c4"},
+ {file = "click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6"},
+ {file = "click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a"},
]
[package.dependencies]
@@ -297,104 +249,104 @@ markers = {main = "platform_system == \"Windows\"", lint-and-format = "platform_
[[package]]
name = "coverage"
-version = "7.11.0"
+version = "7.12.0"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.10"
groups = ["dev"]
files = [
- {file = "coverage-7.11.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eb53f1e8adeeb2e78962bade0c08bfdc461853c7969706ed901821e009b35e31"},
- {file = "coverage-7.11.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d9a03ec6cb9f40a5c360f138b88266fd8f58408d71e89f536b4f91d85721d075"},
- {file = "coverage-7.11.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0d7f0616c557cbc3d1c2090334eddcbb70e1ae3a40b07222d62b3aa47f608fab"},
- {file = "coverage-7.11.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:e44a86a47bbdf83b0a3ea4d7df5410d6b1a0de984fbd805fa5101f3624b9abe0"},
- {file = "coverage-7.11.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:596763d2f9a0ee7eec6e643e29660def2eef297e1de0d334c78c08706f1cb785"},
- {file = "coverage-7.11.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ef55537ff511b5e0a43edb4c50a7bf7ba1c3eea20b4f49b1490f1e8e0e42c591"},
- {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:9cbabd8f4d0d3dc571d77ae5bdbfa6afe5061e679a9d74b6797c48d143307088"},
- {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e24045453384e0ae2a587d562df2a04d852672eb63051d16096d3f08aa4c7c2f"},
- {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:7161edd3426c8d19bdccde7d49e6f27f748f3c31cc350c5de7c633fea445d866"},
- {file = "coverage-7.11.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d4ed4de17e692ba6415b0587bc7f12bc80915031fc9db46a23ce70fc88c9841"},
- {file = "coverage-7.11.0-cp310-cp310-win32.whl", hash = "sha256:765c0bc8fe46f48e341ef737c91c715bd2a53a12792592296a095f0c237e09cf"},
- {file = "coverage-7.11.0-cp310-cp310-win_amd64.whl", hash = "sha256:24d6f3128f1b2d20d84b24f4074475457faedc3d4613a7e66b5e769939c7d969"},
- {file = "coverage-7.11.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d58ecaa865c5b9fa56e35efc51d1014d4c0d22838815b9fce57a27dd9576847"},
- {file = "coverage-7.11.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b679e171f1c104a5668550ada700e3c4937110dbdd153b7ef9055c4f1a1ee3cc"},
- {file = "coverage-7.11.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca61691ba8c5b6797deb221a0d09d7470364733ea9c69425a640f1f01b7c5bf0"},
- {file = "coverage-7.11.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:aef1747ede4bd8ca9cfc04cc3011516500c6891f1b33a94add3253f6f876b7b7"},
- {file = "coverage-7.11.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a1839d08406e4cba2953dcc0ffb312252f14d7c4c96919f70167611f4dee2623"},
- {file = "coverage-7.11.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e0eb0a2dcc62478eb5b4cbb80b97bdee852d7e280b90e81f11b407d0b81c4287"},
- {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc1fbea96343b53f65d5351d8fd3b34fd415a2670d7c300b06d3e14a5af4f552"},
- {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:214b622259dd0cf435f10241f1333d32caa64dbc27f8790ab693428a141723de"},
- {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:258d9967520cca899695d4eb7ea38be03f06951d6ca2f21fb48b1235f791e601"},
- {file = "coverage-7.11.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cf9e6ff4ca908ca15c157c409d608da77a56a09877b97c889b98fb2c32b6465e"},
- {file = "coverage-7.11.0-cp311-cp311-win32.whl", hash = "sha256:fcc15fc462707b0680cff6242c48625da7f9a16a28a41bb8fd7a4280920e676c"},
- {file = "coverage-7.11.0-cp311-cp311-win_amd64.whl", hash = "sha256:865965bf955d92790f1facd64fe7ff73551bd2c1e7e6b26443934e9701ba30b9"},
- {file = "coverage-7.11.0-cp311-cp311-win_arm64.whl", hash = "sha256:5693e57a065760dcbeb292d60cc4d0231a6d4b6b6f6a3191561e1d5e8820b745"},
- {file = "coverage-7.11.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9c49e77811cf9d024b95faf86c3f059b11c0c9be0b0d61bc598f453703bd6fd1"},
- {file = "coverage-7.11.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a61e37a403a778e2cda2a6a39abcc895f1d984071942a41074b5c7ee31642007"},
- {file = "coverage-7.11.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c79cae102bb3b1801e2ef1511fb50e91ec83a1ce466b2c7c25010d884336de46"},
- {file = "coverage-7.11.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:16ce17ceb5d211f320b62df002fa7016b7442ea0fd260c11cec8ce7730954893"},
- {file = "coverage-7.11.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80027673e9d0bd6aef86134b0771845e2da85755cf686e7c7c59566cf5a89115"},
- {file = "coverage-7.11.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4d3ffa07a08657306cd2215b0da53761c4d73cb54d9143b9303a6481ec0cd415"},
- {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a3b6a5f8b2524fd6c1066bc85bfd97e78709bb5e37b5b94911a6506b65f47186"},
- {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fcc0a4aa589de34bc56e1a80a740ee0f8c47611bdfb28cd1849de60660f3799d"},
- {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:dba82204769d78c3fd31b35c3d5f46e06511936c5019c39f98320e05b08f794d"},
- {file = "coverage-7.11.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:81b335f03ba67309a95210caf3eb43bd6fe75a4e22ba653ef97b4696c56c7ec2"},
- {file = "coverage-7.11.0-cp312-cp312-win32.whl", hash = "sha256:037b2d064c2f8cc8716fe4d39cb705779af3fbf1ba318dc96a1af858888c7bb5"},
- {file = "coverage-7.11.0-cp312-cp312-win_amd64.whl", hash = "sha256:d66c0104aec3b75e5fd897e7940188ea1892ca1d0235316bf89286d6a22568c0"},
- {file = "coverage-7.11.0-cp312-cp312-win_arm64.whl", hash = "sha256:d91ebeac603812a09cf6a886ba6e464f3bbb367411904ae3790dfe28311b15ad"},
- {file = "coverage-7.11.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cc3f49e65ea6e0d5d9bd60368684fe52a704d46f9e7fc413918f18d046ec40e1"},
- {file = "coverage-7.11.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f39ae2f63f37472c17b4990f794035c9890418b1b8cca75c01193f3c8d3e01be"},
- {file = "coverage-7.11.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7db53b5cdd2917b6eaadd0b1251cf4e7d96f4a8d24e174bdbdf2f65b5ea7994d"},
- {file = "coverage-7.11.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10ad04ac3a122048688387828b4537bc9cf60c0bf4869c1e9989c46e45690b82"},
- {file = "coverage-7.11.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4036cc9c7983a2b1f2556d574d2eb2154ac6ed55114761685657e38782b23f52"},
- {file = "coverage-7.11.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:7ab934dd13b1c5e94b692b1e01bd87e4488cb746e3a50f798cb9464fd128374b"},
- {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:59a6e5a265f7cfc05f76e3bb53eca2e0dfe90f05e07e849930fecd6abb8f40b4"},
- {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:df01d6c4c81e15a7c88337b795bb7595a8596e92310266b5072c7e301168efbd"},
- {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:8c934bd088eed6174210942761e38ee81d28c46de0132ebb1801dbe36a390dcc"},
- {file = "coverage-7.11.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a03eaf7ec24078ad64a07f02e30060aaf22b91dedf31a6b24d0d98d2bba7f48"},
- {file = "coverage-7.11.0-cp313-cp313-win32.whl", hash = "sha256:695340f698a5f56f795b2836abe6fb576e7c53d48cd155ad2f80fd24bc63a040"},
- {file = "coverage-7.11.0-cp313-cp313-win_amd64.whl", hash = "sha256:2727d47fce3ee2bac648528e41455d1b0c46395a087a229deac75e9f88ba5a05"},
- {file = "coverage-7.11.0-cp313-cp313-win_arm64.whl", hash = "sha256:0efa742f431529699712b92ecdf22de8ff198df41e43aeaaadf69973eb93f17a"},
- {file = "coverage-7.11.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:587c38849b853b157706407e9ebdca8fd12f45869edb56defbef2daa5fb0812b"},
- {file = "coverage-7.11.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b971bdefdd75096163dd4261c74be813c4508477e39ff7b92191dea19f24cd37"},
- {file = "coverage-7.11.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:269bfe913b7d5be12ab13a95f3a76da23cf147be7fa043933320ba5625f0a8de"},
- {file = "coverage-7.11.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:dadbcce51a10c07b7c72b0ce4a25e4b6dcb0c0372846afb8e5b6307a121eb99f"},
- {file = "coverage-7.11.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9ed43fa22c6436f7957df036331f8fe4efa7af132054e1844918866cd228af6c"},
- {file = "coverage-7.11.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9516add7256b6713ec08359b7b05aeff8850c98d357784c7205b2e60aa2513fa"},
- {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:eb92e47c92fcbcdc692f428da67db33337fa213756f7adb6a011f7b5a7a20740"},
- {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:d06f4fc7acf3cabd6d74941d53329e06bab00a8fe10e4df2714f0b134bfc64ef"},
- {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:6fbcee1a8f056af07ecd344482f711f563a9eb1c2cad192e87df00338ec3cdb0"},
- {file = "coverage-7.11.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:dbbf012be5f32533a490709ad597ad8a8ff80c582a95adc8d62af664e532f9ca"},
- {file = "coverage-7.11.0-cp313-cp313t-win32.whl", hash = "sha256:cee6291bb4fed184f1c2b663606a115c743df98a537c969c3c64b49989da96c2"},
- {file = "coverage-7.11.0-cp313-cp313t-win_amd64.whl", hash = "sha256:a386c1061bf98e7ea4758e4313c0ab5ecf57af341ef0f43a0bf26c2477b5c268"},
- {file = "coverage-7.11.0-cp313-cp313t-win_arm64.whl", hash = "sha256:f9ea02ef40bb83823b2b04964459d281688fe173e20643870bb5d2edf68bc836"},
- {file = "coverage-7.11.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c770885b28fb399aaf2a65bbd1c12bf6f307ffd112d6a76c5231a94276f0c497"},
- {file = "coverage-7.11.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a3d0e2087dba64c86a6b254f43e12d264b636a39e88c5cc0a01a7c71bcfdab7e"},
- {file = "coverage-7.11.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:73feb83bb41c32811973b8565f3705caf01d928d972b72042b44e97c71fd70d1"},
- {file = "coverage-7.11.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c6f31f281012235ad08f9a560976cc2fc9c95c17604ff3ab20120fe480169bca"},
- {file = "coverage-7.11.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e9570ad567f880ef675673992222746a124b9595506826b210fbe0ce3f0499cd"},
- {file = "coverage-7.11.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8badf70446042553a773547a61fecaa734b55dc738cacf20c56ab04b77425e43"},
- {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:a09c1211959903a479e389685b7feb8a17f59ec5a4ef9afde7650bd5eabc2777"},
- {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:5ef83b107f50db3f9ae40f69e34b3bd9337456c5a7fe3461c7abf8b75dd666a2"},
- {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f91f927a3215b8907e214af77200250bb6aae36eca3f760f89780d13e495388d"},
- {file = "coverage-7.11.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:cdbcd376716d6b7fbfeedd687a6c4be019c5a5671b35f804ba76a4c0a778cba4"},
- {file = "coverage-7.11.0-cp314-cp314-win32.whl", hash = "sha256:bab7ec4bb501743edc63609320aaec8cd9188b396354f482f4de4d40a9d10721"},
- {file = "coverage-7.11.0-cp314-cp314-win_amd64.whl", hash = "sha256:3d4ba9a449e9364a936a27322b20d32d8b166553bfe63059bd21527e681e2fad"},
- {file = "coverage-7.11.0-cp314-cp314-win_arm64.whl", hash = "sha256:ce37f215223af94ef0f75ac68ea096f9f8e8c8ec7d6e8c346ee45c0d363f0479"},
- {file = "coverage-7.11.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:f413ce6e07e0d0dc9c433228727b619871532674b45165abafe201f200cc215f"},
- {file = "coverage-7.11.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:05791e528a18f7072bf5998ba772fe29db4da1234c45c2087866b5ba4dea710e"},
- {file = "coverage-7.11.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cacb29f420cfeb9283b803263c3b9a068924474ff19ca126ba9103e1278dfa44"},
- {file = "coverage-7.11.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:314c24e700d7027ae3ab0d95fbf8d53544fca1f20345fd30cd219b737c6e58d3"},
- {file = "coverage-7.11.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:630d0bd7a293ad2fc8b4b94e5758c8b2536fdf36c05f1681270203e463cbfa9b"},
- {file = "coverage-7.11.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e89641f5175d65e2dbb44db15fe4ea48fade5d5bbb9868fdc2b4fce22f4a469d"},
- {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c9f08ea03114a637dab06cedb2e914da9dc67fa52c6015c018ff43fdde25b9c2"},
- {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce9f3bde4e9b031eaf1eb61df95c1401427029ea1bfddb8621c1161dcb0fa02e"},
- {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:e4dc07e95495923d6fd4d6c27bf70769425b71c89053083843fd78f378558996"},
- {file = "coverage-7.11.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:424538266794db2861db4922b05d729ade0940ee69dcf0591ce8f69784db0e11"},
- {file = "coverage-7.11.0-cp314-cp314t-win32.whl", hash = "sha256:4c1eeb3fb8eb9e0190bebafd0462936f75717687117339f708f395fe455acc73"},
- {file = "coverage-7.11.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b56efee146c98dbf2cf5cffc61b9829d1e94442df4d7398b26892a53992d3547"},
- {file = "coverage-7.11.0-cp314-cp314t-win_arm64.whl", hash = "sha256:b5c2705afa83f49bd91962a4094b6b082f94aef7626365ab3f8f4bd159c5acf3"},
- {file = "coverage-7.11.0-py3-none-any.whl", hash = "sha256:4b7589765348d78fb4e5fb6ea35d07564e387da2fc5efff62e0222971f155f68"},
- {file = "coverage-7.11.0.tar.gz", hash = "sha256:167bd504ac1ca2af7ff3b81d245dfea0292c5032ebef9d66cc08a7d28c1b8050"},
+ {file = "coverage-7.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:32b75c2ba3f324ee37af3ccee5b30458038c50b349ad9b88cee85096132a575b"},
+ {file = "coverage-7.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cb2a1b6ab9fe833714a483a915de350abc624a37149649297624c8d57add089c"},
+ {file = "coverage-7.12.0-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5734b5d913c3755e72f70bf6cc37a0518d4f4745cde760c5d8e12005e62f9832"},
+ {file = "coverage-7.12.0-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b527a08cdf15753279b7afb2339a12073620b761d79b81cbe2cdebdb43d90daa"},
+ {file = "coverage-7.12.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9bb44c889fb68004e94cab71f6a021ec83eac9aeabdbb5a5a88821ec46e1da73"},
+ {file = "coverage-7.12.0-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:4b59b501455535e2e5dde5881739897967b272ba25988c89145c12d772810ccb"},
+ {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d8842f17095b9868a05837b7b1b73495293091bed870e099521ada176aa3e00e"},
+ {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c5a6f20bf48b8866095c6820641e7ffbe23f2ac84a2efc218d91235e404c7777"},
+ {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:5f3738279524e988d9da2893f307c2093815c623f8d05a8f79e3eff3a7a9e553"},
+ {file = "coverage-7.12.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0d68c1f7eabbc8abe582d11fa393ea483caf4f44b0af86881174769f185c94d"},
+ {file = "coverage-7.12.0-cp310-cp310-win32.whl", hash = "sha256:7670d860e18b1e3ee5930b17a7d55ae6287ec6e55d9799982aa103a2cc1fa2ef"},
+ {file = "coverage-7.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:f999813dddeb2a56aab5841e687b68169da0d3f6fc78ccf50952fa2463746022"},
+ {file = "coverage-7.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa124a3683d2af98bd9d9c2bfa7a5076ca7e5ab09fdb96b81fa7d89376ae928f"},
+ {file = "coverage-7.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d93fbf446c31c0140208dcd07c5d882029832e8ed7891a39d6d44bd65f2316c3"},
+ {file = "coverage-7.12.0-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:52ca620260bd8cd6027317bdd8b8ba929be1d741764ee765b42c4d79a408601e"},
+ {file = "coverage-7.12.0-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f3433ffd541380f3a0e423cff0f4926d55b0cc8c1d160fdc3be24a4c03aa65f7"},
+ {file = "coverage-7.12.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f7bbb321d4adc9f65e402c677cd1c8e4c2d0105d3ce285b51b4d87f1d5db5245"},
+ {file = "coverage-7.12.0-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22a7aade354a72dff3b59c577bfd18d6945c61f97393bc5fb7bd293a4237024b"},
+ {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3ff651dcd36d2fea66877cd4a82de478004c59b849945446acb5baf9379a1b64"},
+ {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:31b8b2e38391a56e3cea39d22a23faaa7c3fc911751756ef6d2621d2a9daf742"},
+ {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:297bc2da28440f5ae51c845a47c8175a4db0553a53827886e4fb25c66633000c"},
+ {file = "coverage-7.12.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6ff7651cc01a246908eac162a6a86fc0dbab6de1ad165dfb9a1e2ec660b44984"},
+ {file = "coverage-7.12.0-cp311-cp311-win32.whl", hash = "sha256:313672140638b6ddb2c6455ddeda41c6a0b208298034544cfca138978c6baed6"},
+ {file = "coverage-7.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:a1783ed5bd0d5938d4435014626568dc7f93e3cb99bc59188cc18857c47aa3c4"},
+ {file = "coverage-7.12.0-cp311-cp311-win_arm64.whl", hash = "sha256:4648158fd8dd9381b5847622df1c90ff314efbfc1df4550092ab6013c238a5fc"},
+ {file = "coverage-7.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:29644c928772c78512b48e14156b81255000dcfd4817574ff69def189bcb3647"},
+ {file = "coverage-7.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8638cbb002eaa5d7c8d04da667813ce1067080b9a91099801a0053086e52b736"},
+ {file = "coverage-7.12.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:083631eeff5eb9992c923e14b810a179798bb598e6a0dd60586819fc23be6e60"},
+ {file = "coverage-7.12.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:99d5415c73ca12d558e07776bd957c4222c687b9f1d26fa0e1b57e3598bdcde8"},
+ {file = "coverage-7.12.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e949ebf60c717c3df63adb4a1a366c096c8d7fd8472608cd09359e1bd48ef59f"},
+ {file = "coverage-7.12.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6d907ddccbca819afa2cd014bc69983b146cca2735a0b1e6259b2a6c10be1e70"},
+ {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:b1518ecbad4e6173f4c6e6c4a46e49555ea5679bf3feda5edb1b935c7c44e8a0"},
+ {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51777647a749abdf6f6fd8c7cffab12de68ab93aab15efc72fbbb83036c2a068"},
+ {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:42435d46d6461a3b305cdfcad7cdd3248787771f53fe18305548cba474e6523b"},
+ {file = "coverage-7.12.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5bcead88c8423e1855e64b8057d0544e33e4080b95b240c2a355334bb7ced937"},
+ {file = "coverage-7.12.0-cp312-cp312-win32.whl", hash = "sha256:dcbb630ab034e86d2a0f79aefd2be07e583202f41e037602d438c80044957baa"},
+ {file = "coverage-7.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:2fd8354ed5d69775ac42986a691fbf68b4084278710cee9d7c3eaa0c28fa982a"},
+ {file = "coverage-7.12.0-cp312-cp312-win_arm64.whl", hash = "sha256:737c3814903be30695b2de20d22bcc5428fdae305c61ba44cdc8b3252984c49c"},
+ {file = "coverage-7.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:47324fffca8d8eae7e185b5bb20c14645f23350f870c1649003618ea91a78941"},
+ {file = "coverage-7.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ccf3b2ede91decd2fb53ec73c1f949c3e034129d1e0b07798ff1d02ea0c8fa4a"},
+ {file = "coverage-7.12.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b365adc70a6936c6b0582dc38746b33b2454148c02349345412c6e743efb646d"},
+ {file = "coverage-7.12.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bc13baf85cd8a4cfcf4a35c7bc9d795837ad809775f782f697bf630b7e200211"},
+ {file = "coverage-7.12.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:099d11698385d572ceafb3288a5b80fe1fc58bf665b3f9d362389de488361d3d"},
+ {file = "coverage-7.12.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:473dc45d69694069adb7680c405fb1e81f60b2aff42c81e2f2c3feaf544d878c"},
+ {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:583f9adbefd278e9de33c33d6846aa8f5d164fa49b47144180a0e037f0688bb9"},
+ {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b2089cc445f2dc0af6f801f0d1355c025b76c24481935303cf1af28f636688f0"},
+ {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:950411f1eb5d579999c5f66c62a40961f126fc71e5e14419f004471957b51508"},
+ {file = "coverage-7.12.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b1aab7302a87bafebfe76b12af681b56ff446dc6f32ed178ff9c092ca776e6bc"},
+ {file = "coverage-7.12.0-cp313-cp313-win32.whl", hash = "sha256:d7e0d0303c13b54db495eb636bc2465b2fb8475d4c8bcec8fe4b5ca454dfbae8"},
+ {file = "coverage-7.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:ce61969812d6a98a981d147d9ac583a36ac7db7766f2e64a9d4d059c2fe29d07"},
+ {file = "coverage-7.12.0-cp313-cp313-win_arm64.whl", hash = "sha256:bcec6f47e4cb8a4c2dc91ce507f6eefc6a1b10f58df32cdc61dff65455031dfc"},
+ {file = "coverage-7.12.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:459443346509476170d553035e4a3eed7b860f4fe5242f02de1010501956ce87"},
+ {file = "coverage-7.12.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:04a79245ab2b7a61688958f7a855275997134bc84f4a03bc240cf64ff132abf6"},
+ {file = "coverage-7.12.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:09a86acaaa8455f13d6a99221d9654df249b33937b4e212b4e5a822065f12aa7"},
+ {file = "coverage-7.12.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:907e0df1b71ba77463687a74149c6122c3f6aac56c2510a5d906b2f368208560"},
+ {file = "coverage-7.12.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9b57e2d0ddd5f0582bae5437c04ee71c46cd908e7bc5d4d0391f9a41e812dd12"},
+ {file = "coverage-7.12.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:58c1c6aa677f3a1411fe6fb28ec3a942e4f665df036a3608816e0847fad23296"},
+ {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4c589361263ab2953e3c4cd2a94db94c4ad4a8e572776ecfbad2389c626e4507"},
+ {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:91b810a163ccad2e43b1faa11d70d3cf4b6f3d83f9fd5f2df82a32d47b648e0d"},
+ {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:40c867af715f22592e0d0fb533a33a71ec9e0f73a6945f722a0c85c8c1cbe3a2"},
+ {file = "coverage-7.12.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:68b0d0a2d84f333de875666259dadf28cc67858bc8fd8b3f1eae84d3c2bec455"},
+ {file = "coverage-7.12.0-cp313-cp313t-win32.whl", hash = "sha256:73f9e7fbd51a221818fd11b7090eaa835a353ddd59c236c57b2199486b116c6d"},
+ {file = "coverage-7.12.0-cp313-cp313t-win_amd64.whl", hash = "sha256:24cff9d1f5743f67db7ba46ff284018a6e9aeb649b67aa1e70c396aa1b7cb23c"},
+ {file = "coverage-7.12.0-cp313-cp313t-win_arm64.whl", hash = "sha256:c87395744f5c77c866d0f5a43d97cc39e17c7f1cb0115e54a2fe67ca75c5d14d"},
+ {file = "coverage-7.12.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a1c59b7dc169809a88b21a936eccf71c3895a78f5592051b1af8f4d59c2b4f92"},
+ {file = "coverage-7.12.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8787b0f982e020adb732b9f051f3e49dd5054cebbc3f3432061278512a2b1360"},
+ {file = "coverage-7.12.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5ea5a9f7dc8877455b13dd1effd3202e0bca72f6f3ab09f9036b1bcf728f69ac"},
+ {file = "coverage-7.12.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fdba9f15849534594f60b47c9a30bc70409b54947319a7c4fd0e8e3d8d2f355d"},
+ {file = "coverage-7.12.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a00594770eb715854fb1c57e0dea08cce6720cfbc531accdb9850d7c7770396c"},
+ {file = "coverage-7.12.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:5560c7e0d82b42eb1951e4f68f071f8017c824ebfd5a6ebe42c60ac16c6c2434"},
+ {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:d6c2e26b481c9159c2773a37947a9718cfdc58893029cdfb177531793e375cfc"},
+ {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:6e1a8c066dabcde56d5d9fed6a66bc19a2883a3fe051f0c397a41fc42aedd4cc"},
+ {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:f7ba9da4726e446d8dd8aae5a6cd872511184a5d861de80a86ef970b5dacce3e"},
+ {file = "coverage-7.12.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e0f483ab4f749039894abaf80c2f9e7ed77bbf3c737517fb88c8e8e305896a17"},
+ {file = "coverage-7.12.0-cp314-cp314-win32.whl", hash = "sha256:76336c19a9ef4a94b2f8dc79f8ac2da3f193f625bb5d6f51a328cd19bfc19933"},
+ {file = "coverage-7.12.0-cp314-cp314-win_amd64.whl", hash = "sha256:7c1059b600aec6ef090721f8f633f60ed70afaffe8ecab85b59df748f24b31fe"},
+ {file = "coverage-7.12.0-cp314-cp314-win_arm64.whl", hash = "sha256:172cf3a34bfef42611963e2b661302a8931f44df31629e5b1050567d6b90287d"},
+ {file = "coverage-7.12.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:aa7d48520a32cb21c7a9b31f81799e8eaec7239db36c3b670be0fa2403828d1d"},
+ {file = "coverage-7.12.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:90d58ac63bc85e0fb919f14d09d6caa63f35a5512a2205284b7816cafd21bb03"},
+ {file = "coverage-7.12.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:ca8ecfa283764fdda3eae1bdb6afe58bf78c2c3ec2b2edcb05a671f0bba7b3f9"},
+ {file = "coverage-7.12.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:874fe69a0785d96bd066059cd4368022cebbec1a8958f224f0016979183916e6"},
+ {file = "coverage-7.12.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b3c889c0b8b283a24d721a9eabc8ccafcfc3aebf167e4cd0d0e23bf8ec4e339"},
+ {file = "coverage-7.12.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8bb5b894b3ec09dcd6d3743229dc7f2c42ef7787dc40596ae04c0edda487371e"},
+ {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:79a44421cd5fba96aa57b5e3b5a4d3274c449d4c622e8f76882d76635501fd13"},
+ {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:33baadc0efd5c7294f436a632566ccc1f72c867f82833eb59820ee37dc811c6f"},
+ {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:c406a71f544800ef7e9e0000af706b88465f3573ae8b8de37e5f96c59f689ad1"},
+ {file = "coverage-7.12.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e71bba6a40883b00c6d571599b4627f50c360b3d0d02bfc658168936be74027b"},
+ {file = "coverage-7.12.0-cp314-cp314t-win32.whl", hash = "sha256:9157a5e233c40ce6613dead4c131a006adfda70e557b6856b97aceed01b0e27a"},
+ {file = "coverage-7.12.0-cp314-cp314t-win_amd64.whl", hash = "sha256:e84da3a0fd233aeec797b981c51af1cabac74f9bd67be42458365b30d11b5291"},
+ {file = "coverage-7.12.0-cp314-cp314t-win_arm64.whl", hash = "sha256:01d24af36fedda51c2b1aca56e4330a3710f83b02a5ff3743a6b015ffa7c9384"},
+ {file = "coverage-7.12.0-py3-none-any.whl", hash = "sha256:159d50c0b12e060b15ed3d39f87ed43d4f7f7ad40b8a534f4dd331adbb51104a"},
+ {file = "coverage-7.12.0.tar.gz", hash = "sha256:fc11e0a4e372cb5f282f16ef90d4a585034050ccda536451901abfb19a57f40c"},
]
[package.dependencies]
@@ -433,27 +385,27 @@ profile = ["gprof2dot (>=2022.7.29)"]
[[package]]
name = "docutils"
-version = "0.22.2"
+version = "0.22.3"
description = "Docutils -- Python Documentation Utilities"
optional = false
python-versions = ">=3.9"
groups = ["main"]
files = [
- {file = "docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8"},
- {file = "docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d"},
+ {file = "docutils-0.22.3-py3-none-any.whl", hash = "sha256:bd772e4aca73aff037958d44f2be5229ded4c09927fcf8690c577b66234d6ceb"},
+ {file = "docutils-0.22.3.tar.gz", hash = "sha256:21486ae730e4ca9f622677b1412b879af1791efcfba517e4c6f60be543fc8cdd"},
]
[[package]]
name = "exceptiongroup"
-version = "1.3.0"
+version = "1.3.1"
description = "Backport of PEP 654 (exception groups)"
optional = false
python-versions = ">=3.7"
groups = ["dev"]
-markers = "python_version == \"3.10\""
+markers = "python_version < \"3.11\""
files = [
- {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"},
- {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"},
+ {file = "exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598"},
+ {file = "exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219"},
]
[package.dependencies]
@@ -464,14 +416,14 @@ test = ["pytest (>=6)"]
[[package]]
name = "faker"
-version = "37.12.0"
+version = "38.2.0"
description = "Faker is a Python package that generates fake data for you."
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
groups = ["main"]
files = [
- {file = "faker-37.12.0-py3-none-any.whl", hash = "sha256:afe7ccc038da92f2fbae30d8e16d19d91e92e242f8401ce9caf44de892bab4c4"},
- {file = "faker-37.12.0.tar.gz", hash = "sha256:7505e59a7e02fa9010f06c3e1e92f8250d4cfbb30632296140c2d6dbef09b0fa"},
+ {file = "faker-38.2.0-py3-none-any.whl", hash = "sha256:35fe4a0a79dee0dc4103a6083ee9224941e7d3594811a50e3969e547b0d2ee65"},
+ {file = "faker-38.2.0.tar.gz", hash = "sha256:20672803db9c7cb97f9b56c18c54b915b6f1d8991f63d1d673642dc43f5ce7ab"},
]
[package.dependencies]
@@ -479,20 +431,20 @@ tzdata = "*"
[[package]]
name = "fastapi"
-version = "0.120.4"
+version = "0.122.0"
description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production"
optional = false
python-versions = ">=3.8"
groups = ["dev"]
files = [
- {file = "fastapi-0.120.4-py3-none-any.whl", hash = "sha256:9bdf192308676480d3593e10fd05094e56d6fdc7d9283db26053d8104d5f82a0"},
- {file = "fastapi-0.120.4.tar.gz", hash = "sha256:2d856bc847893ca4d77896d4504ffdec0fb04312b705065fca9104428eca3868"},
+ {file = "fastapi-0.122.0-py3-none-any.whl", hash = "sha256:a456e8915dfc6c8914a50d9651133bd47ec96d331c5b44600baa635538a30d67"},
+ {file = "fastapi-0.122.0.tar.gz", hash = "sha256:cd9b5352031f93773228af8b4c443eedc2ac2aa74b27780387b853c3726fb94b"},
]
[package.dependencies]
annotated-doc = ">=0.0.2"
pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0"
-starlette = ">=0.40.0,<0.50.0"
+starlette = ">=0.40.0,<0.51.0"
typing-extensions = ">=4.8.0"
[package.extras]
@@ -502,14 +454,14 @@ standard-no-fastapi-cloud-cli = ["email-validator (>=2.0.0)", "fastapi-cli[stand
[[package]]
name = "genbadge"
-version = "1.1.2"
+version = "1.1.3"
description = "Generate badges for tools that do not provide one."
optional = false
python-versions = "*"
groups = ["dev"]
files = [
- {file = "genbadge-1.1.2-py2.py3-none-any.whl", hash = "sha256:4e3073cb56c2745fbef4b7da97eb85b28a18a22af519b66acb6706b6546279f1"},
- {file = "genbadge-1.1.2.tar.gz", hash = "sha256:987ed2feaf6e9cc2850fc3883320d8706b3849eb6c9f436156254dcac438515c"},
+ {file = "genbadge-1.1.3-py2.py3-none-any.whl", hash = "sha256:6e4316c171c6f0f84becae4eb116258340bdc054458632abc622d36b8040655e"},
+ {file = "genbadge-1.1.3.tar.gz", hash = "sha256:2292ea9cc20af4463dfde952c6b15544fdab9d6e50945f63a42cc400c521fa74"},
]
[package.dependencies]
@@ -945,7 +897,7 @@ version = "1.1.0"
description = "Type system extensions for programs checked with the mypy type checker."
optional = false
python-versions = ">=3.8"
-groups = ["main", "type-checking"]
+groups = ["type-checking"]
files = [
{file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"},
{file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"},
@@ -1072,7 +1024,7 @@ version = "0.12.1"
description = "Utility library for gitignore style pattern matching of file paths."
optional = false
python-versions = ">=3.8"
-groups = ["main", "lint-and-format", "type-checking"]
+groups = ["lint-and-format", "type-checking"]
files = [
{file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"},
{file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"},
@@ -1193,7 +1145,7 @@ version = "4.5.0"
description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`."
optional = false
python-versions = ">=3.10"
-groups = ["main", "dev", "lint-and-format", "type-checking"]
+groups = ["dev", "lint-and-format", "type-checking"]
files = [
{file = "platformdirs-4.5.0-py3-none-any.whl", hash = "sha256:e578a81bb873cbb89a41fcc904c7ef523cc18284b7e3b3ccf06aca1403b7ebd3"},
{file = "platformdirs-4.5.0.tar.gz", hash = "sha256:70ddccdd7c99fc5942e9fc25636a8b34d04c24b335100223152c2803e4063312"},
@@ -1249,19 +1201,19 @@ ssv = ["swagger-spec-validator (>=3.0.4,<3.1.0)"]
[[package]]
name = "pydantic"
-version = "2.12.3"
+version = "2.12.5"
description = "Data validation using Python type hints"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
- {file = "pydantic-2.12.3-py3-none-any.whl", hash = "sha256:6986454a854bc3bc6e5443e1369e06a3a456af9d339eda45510f517d9ea5c6bf"},
- {file = "pydantic-2.12.3.tar.gz", hash = "sha256:1da1c82b0fc140bb0103bc1441ffe062154c8d38491189751ee00fd8ca65ce74"},
+ {file = "pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d"},
+ {file = "pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49"},
]
[package.dependencies]
annotated-types = ">=0.6.0"
-pydantic-core = "2.41.4"
+pydantic-core = "2.41.5"
typing-extensions = ">=4.14.1"
typing-inspection = ">=0.4.2"
@@ -1271,129 +1223,133 @@ timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows
[[package]]
name = "pydantic-core"
-version = "2.41.4"
+version = "2.41.5"
description = "Core functionality for Pydantic validation and serialization"
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
- {file = "pydantic_core-2.41.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2442d9a4d38f3411f22eb9dd0912b7cbf4b7d5b6c92c4173b75d3e1ccd84e36e"},
- {file = "pydantic_core-2.41.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:30a9876226dda131a741afeab2702e2d127209bde3c65a2b8133f428bc5d006b"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d55bbac04711e2980645af68b97d445cdbcce70e5216de444a6c4b6943ebcccd"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1d778fb7849a42d0ee5927ab0f7453bf9f85eef8887a546ec87db5ddb178945"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1b65077a4693a98b90ec5ad8f203ad65802a1b9b6d4a7e48066925a7e1606706"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:62637c769dee16eddb7686bf421be48dfc2fae93832c25e25bc7242e698361ba"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dfe3aa529c8f501babf6e502936b9e8d4698502b2cfab41e17a028d91b1ac7b"},
- {file = "pydantic_core-2.41.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca2322da745bf2eeb581fc9ea3bbb31147702163ccbcbf12a3bb630e4bf05e1d"},
- {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e8cd3577c796be7231dcf80badcf2e0835a46665eaafd8ace124d886bab4d700"},
- {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:1cae8851e174c83633f0833e90636832857297900133705ee158cf79d40f03e6"},
- {file = "pydantic_core-2.41.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a26d950449aae348afe1ac8be5525a00ae4235309b729ad4d3399623125b43c9"},
- {file = "pydantic_core-2.41.4-cp310-cp310-win32.whl", hash = "sha256:0cf2a1f599efe57fa0051312774280ee0f650e11152325e41dfd3018ef2c1b57"},
- {file = "pydantic_core-2.41.4-cp310-cp310-win_amd64.whl", hash = "sha256:a8c2e340d7e454dc3340d3d2e8f23558ebe78c98aa8f68851b04dcb7bc37abdc"},
- {file = "pydantic_core-2.41.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:28ff11666443a1a8cf2a044d6a545ebffa8382b5f7973f22c36109205e65dc80"},
- {file = "pydantic_core-2.41.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:61760c3925d4633290292bad462e0f737b840508b4f722247d8729684f6539ae"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eae547b7315d055b0de2ec3965643b0ab82ad0106a7ffd29615ee9f266a02827"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ef9ee5471edd58d1fcce1c80ffc8783a650e3e3a193fe90d52e43bb4d87bff1f"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:15dd504af121caaf2c95cb90c0ebf71603c53de98305621b94da0f967e572def"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a926768ea49a8af4d36abd6a8968b8790f7f76dd7cbd5a4c180db2b4ac9a3a2"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6916b9b7d134bff5440098a4deb80e4cb623e68974a87883299de9124126c2a8"},
- {file = "pydantic_core-2.41.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5cf90535979089df02e6f17ffd076f07237efa55b7343d98760bde8743c4b265"},
- {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7533c76fa647fade2d7ec75ac5cc079ab3f34879626dae5689b27790a6cf5a5c"},
- {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:37e516bca9264cbf29612539801ca3cd5d1be465f940417b002905e6ed79d38a"},
- {file = "pydantic_core-2.41.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0c19cb355224037c83642429b8ce261ae108e1c5fbf5c028bac63c77b0f8646e"},
- {file = "pydantic_core-2.41.4-cp311-cp311-win32.whl", hash = "sha256:09c2a60e55b357284b5f31f5ab275ba9f7f70b7525e18a132ec1f9160b4f1f03"},
- {file = "pydantic_core-2.41.4-cp311-cp311-win_amd64.whl", hash = "sha256:711156b6afb5cb1cb7c14a2cc2c4a8b4c717b69046f13c6b332d8a0a8f41ca3e"},
- {file = "pydantic_core-2.41.4-cp311-cp311-win_arm64.whl", hash = "sha256:6cb9cf7e761f4f8a8589a45e49ed3c0d92d1d696a45a6feaee8c904b26efc2db"},
- {file = "pydantic_core-2.41.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:ab06d77e053d660a6faaf04894446df7b0a7e7aba70c2797465a0a1af00fc887"},
- {file = "pydantic_core-2.41.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c53ff33e603a9c1179a9364b0a24694f183717b2e0da2b5ad43c316c956901b2"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:304c54176af2c143bd181d82e77c15c41cbacea8872a2225dd37e6544dce9999"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:025ba34a4cf4fb32f917d5d188ab5e702223d3ba603be4d8aca2f82bede432a4"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9f5f30c402ed58f90c70e12eff65547d3ab74685ffe8283c719e6bead8ef53f"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd96e5d15385d301733113bcaa324c8bcf111275b7675a9c6e88bfb19fc05e3b"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98f348cbb44fae6e9653c1055db7e29de67ea6a9ca03a5fa2c2e11a47cff0e47"},
- {file = "pydantic_core-2.41.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec22626a2d14620a83ca583c6f5a4080fa3155282718b6055c2ea48d3ef35970"},
- {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a95d4590b1f1a43bf33ca6d647b990a88f4a3824a8c4572c708f0b45a5290ed"},
- {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:f9672ab4d398e1b602feadcffcdd3af44d5f5e6ddc15bc7d15d376d47e8e19f8"},
- {file = "pydantic_core-2.41.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:84d8854db5f55fead3b579f04bda9a36461dab0730c5d570e1526483e7bb8431"},
- {file = "pydantic_core-2.41.4-cp312-cp312-win32.whl", hash = "sha256:9be1c01adb2ecc4e464392c36d17f97e9110fbbc906bcbe1c943b5b87a74aabd"},
- {file = "pydantic_core-2.41.4-cp312-cp312-win_amd64.whl", hash = "sha256:d682cf1d22bab22a5be08539dca3d1593488a99998f9f412137bc323179067ff"},
- {file = "pydantic_core-2.41.4-cp312-cp312-win_arm64.whl", hash = "sha256:833eebfd75a26d17470b58768c1834dfc90141b7afc6eb0429c21fc5a21dcfb8"},
- {file = "pydantic_core-2.41.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:85e050ad9e5f6fe1004eec65c914332e52f429bc0ae12d6fa2092407a462c746"},
- {file = "pydantic_core-2.41.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e7393f1d64792763a48924ba31d1e44c2cfbc05e3b1c2c9abb4ceeadd912cced"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94dab0940b0d1fb28bcab847adf887c66a27a40291eedf0b473be58761c9799a"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de7c42f897e689ee6f9e93c4bec72b99ae3b32a2ade1c7e4798e690ff5246e02"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:664b3199193262277b8b3cd1e754fb07f2c6023289c815a1e1e8fb415cb247b1"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95b253b88f7d308b1c0b417c4624f44553ba4762816f94e6986819b9c273fb2"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a1351f5bbdbbabc689727cb91649a00cb9ee7203e0a6e54e9f5ba9e22e384b84"},
- {file = "pydantic_core-2.41.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1affa4798520b148d7182da0615d648e752de4ab1a9566b7471bc803d88a062d"},
- {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7b74e18052fea4aa8dea2fb7dbc23d15439695da6cbe6cfc1b694af1115df09d"},
- {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:285b643d75c0e30abda9dc1077395624f314a37e3c09ca402d4015ef5979f1a2"},
- {file = "pydantic_core-2.41.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:f52679ff4218d713b3b33f88c89ccbf3a5c2c12ba665fb80ccc4192b4608dbab"},
- {file = "pydantic_core-2.41.4-cp313-cp313-win32.whl", hash = "sha256:ecde6dedd6fff127c273c76821bb754d793be1024bc33314a120f83a3c69460c"},
- {file = "pydantic_core-2.41.4-cp313-cp313-win_amd64.whl", hash = "sha256:d081a1f3800f05409ed868ebb2d74ac39dd0c1ff6c035b5162356d76030736d4"},
- {file = "pydantic_core-2.41.4-cp313-cp313-win_arm64.whl", hash = "sha256:f8e49c9c364a7edcbe2a310f12733aad95b022495ef2a8d653f645e5d20c1564"},
- {file = "pydantic_core-2.41.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ed97fd56a561f5eb5706cebe94f1ad7c13b84d98312a05546f2ad036bafe87f4"},
- {file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a870c307bf1ee91fc58a9a61338ff780d01bfae45922624816878dce784095d2"},
- {file = "pydantic_core-2.41.4-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25e97bc1f5f8f7985bdc2335ef9e73843bb561eb1fa6831fdfc295c1c2061cf"},
- {file = "pydantic_core-2.41.4-cp313-cp313t-win_amd64.whl", hash = "sha256:d405d14bea042f166512add3091c1af40437c2e7f86988f3915fabd27b1e9cd2"},
- {file = "pydantic_core-2.41.4-cp313-cp313t-win_arm64.whl", hash = "sha256:19f3684868309db5263a11bace3c45d93f6f24afa2ffe75a647583df22a2ff89"},
- {file = "pydantic_core-2.41.4-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:e9205d97ed08a82ebb9a307e92914bb30e18cdf6f6b12ca4bedadb1588a0bfe1"},
- {file = "pydantic_core-2.41.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:82df1f432b37d832709fbcc0e24394bba04a01b6ecf1ee87578145c19cde12ac"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3b4cc4539e055cfa39a3763c939f9d409eb40e85813257dcd761985a108554"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b1eb1754fce47c63d2ff57fdb88c351a6c0150995890088b33767a10218eaa4e"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6ab5ab30ef325b443f379ddb575a34969c333004fca5a1daa0133a6ffaad616"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:31a41030b1d9ca497634092b46481b937ff9397a86f9f51bd41c4767b6fc04af"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a44ac1738591472c3d020f61c6df1e4015180d6262ebd39bf2aeb52571b60f12"},
- {file = "pydantic_core-2.41.4-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d72f2b5e6e82ab8f94ea7d0d42f83c487dc159c5240d8f83beae684472864e2d"},
- {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:c4d1e854aaf044487d31143f541f7aafe7b482ae72a022c664b2de2e466ed0ad"},
- {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:b568af94267729d76e6ee5ececda4e283d07bbb28e8148bb17adad93d025d25a"},
- {file = "pydantic_core-2.41.4-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:6d55fb8b1e8929b341cc313a81a26e0d48aa3b519c1dbaadec3a6a2b4fcad025"},
- {file = "pydantic_core-2.41.4-cp314-cp314-win32.whl", hash = "sha256:5b66584e549e2e32a1398df11da2e0a7eff45d5c2d9db9d5667c5e6ac764d77e"},
- {file = "pydantic_core-2.41.4-cp314-cp314-win_amd64.whl", hash = "sha256:557a0aab88664cc552285316809cab897716a372afaf8efdbef756f8b890e894"},
- {file = "pydantic_core-2.41.4-cp314-cp314-win_arm64.whl", hash = "sha256:3f1ea6f48a045745d0d9f325989d8abd3f1eaf47dd00485912d1a3a63c623a8d"},
- {file = "pydantic_core-2.41.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6c1fe4c5404c448b13188dd8bd2ebc2bdd7e6727fa61ff481bcc2cca894018da"},
- {file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:523e7da4d43b113bf8e7b49fa4ec0c35bf4fe66b2230bfc5c13cc498f12c6c3e"},
- {file = "pydantic_core-2.41.4-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5729225de81fb65b70fdb1907fcf08c75d498f4a6f15af005aabb1fdadc19dfa"},
- {file = "pydantic_core-2.41.4-cp314-cp314t-win_amd64.whl", hash = "sha256:de2cfbb09e88f0f795fd90cf955858fc2c691df65b1f21f0aa00b99f3fbc661d"},
- {file = "pydantic_core-2.41.4-cp314-cp314t-win_arm64.whl", hash = "sha256:d34f950ae05a83e0ede899c595f312ca976023ea1db100cd5aa188f7005e3ab0"},
- {file = "pydantic_core-2.41.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:646e76293345954acea6966149683047b7b2ace793011922208c8e9da12b0062"},
- {file = "pydantic_core-2.41.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cc8e85a63085a137d286e2791037f5fdfff0aabb8b899483ca9c496dd5797338"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:692c622c8f859a17c156492783902d8370ac7e121a611bd6fe92cc71acf9ee8d"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d1e2906efb1031a532600679b424ef1d95d9f9fb507f813951f23320903adbd7"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e04e2f7f8916ad3ddd417a7abdd295276a0bf216993d9318a5d61cc058209166"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:df649916b81822543d1c8e0e1d079235f68acdc7d270c911e8425045a8cfc57e"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66c529f862fdba70558061bb936fe00ddbaaa0c647fd26e4a4356ef1d6561891"},
- {file = "pydantic_core-2.41.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc3b4c5a1fd3a311563ed866c2c9b62da06cb6398bee186484ce95c820db71cb"},
- {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:6e0fc40d84448f941df9b3334c4b78fe42f36e3bf631ad54c3047a0cdddc2514"},
- {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:44e7625332683b6c1c8b980461475cde9595eff94447500e80716db89b0da005"},
- {file = "pydantic_core-2.41.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:170ee6835f6c71081d031ef1c3b4dc4a12b9efa6a9540f93f95b82f3c7571ae8"},
- {file = "pydantic_core-2.41.4-cp39-cp39-win32.whl", hash = "sha256:3adf61415efa6ce977041ba9745183c0e1f637ca849773afa93833e04b163feb"},
- {file = "pydantic_core-2.41.4-cp39-cp39-win_amd64.whl", hash = "sha256:a238dd3feee263eeaeb7dc44aea4ba1364682c4f9f9467e6af5596ba322c2332"},
- {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:a1b2cfec3879afb742a7b0bcfa53e4f22ba96571c9e54d6a3afe1052d17d843b"},
- {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:d175600d975b7c244af6eb9c9041f10059f20b8bbffec9e33fdd5ee3f67cdc42"},
- {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f184d657fa4947ae5ec9c47bd7e917730fa1cbb78195037e32dcbab50aca5ee"},
- {file = "pydantic_core-2.41.4-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ed810568aeffed3edc78910af32af911c835cc39ebbfacd1f0ab5dd53028e5c"},
- {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:4f5d640aeebb438517150fdeec097739614421900e4a08db4a3ef38898798537"},
- {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:4a9ab037b71927babc6d9e7fc01aea9e66dc2a4a34dff06ef0724a4049629f94"},
- {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4dab9484ec605c3016df9ad4fd4f9a390bc5d816a3b10c6550f8424bb80b18c"},
- {file = "pydantic_core-2.41.4-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8a5028425820731d8c6c098ab642d7b8b999758e24acae03ed38a66eca8335"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1e5ab4fc177dd41536b3c32b2ea11380dd3d4619a385860621478ac2d25ceb00"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:3d88d0054d3fa11ce936184896bed3c1c5441d6fa483b498fac6a5d0dd6f64a9"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b2a054a8725f05b4b6503357e0ac1c4e8234ad3b0c2ac130d6ffc66f0e170e2"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b0d9db5a161c99375a0c68c058e227bee1d89303300802601d76a3d01f74e258"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:6273ea2c8ffdac7b7fda2653c49682db815aebf4a89243a6feccf5e36c18c347"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:4c973add636efc61de22530b2ef83a65f39b6d6f656df97f678720e20de26caa"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:b69d1973354758007f46cf2d44a4f3d0933f10b6dc9bf15cf1356e037f6f731a"},
- {file = "pydantic_core-2.41.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:3619320641fd212aaf5997b6ca505e97540b7e16418f4a241f44cdf108ffb50d"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:491535d45cd7ad7e4a2af4a5169b0d07bebf1adfd164b0368da8aa41e19907a5"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:54d86c0cada6aba4ec4c047d0e348cbad7063b87ae0f005d9f8c9ad04d4a92a2"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca1124aced216b2500dc2609eade086d718e8249cb9696660ab447d50a758bd"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6c9024169becccf0cb470ada03ee578d7348c119a0d42af3dcf9eda96e3a247c"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:26895a4268ae5a2849269f4991cdc97236e4b9c010e51137becf25182daac405"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:ca4df25762cf71308c446e33c9b1fdca2923a3f13de616e2a949f38bf21ff5a8"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:5a28fcedd762349519276c36634e71853b4541079cab4acaaac60c4421827308"},
- {file = "pydantic_core-2.41.4-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:c173ddcd86afd2535e2b695217e82191580663a1d1928239f877f5a1649ef39f"},
- {file = "pydantic_core-2.41.4.tar.gz", hash = "sha256:70e47929a9d4a1905a67e4b687d5946026390568a8e952b92824118063cee4d5"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:77b63866ca88d804225eaa4af3e664c5faf3568cea95360d21f4725ab6e07146"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dfa8a0c812ac681395907e71e1274819dec685fec28273a28905df579ef137e2"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5921a4d3ca3aee735d9fd163808f5e8dd6c6972101e4adbda9a4667908849b97"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e25c479382d26a2a41b7ebea1043564a937db462816ea07afa8a44c0866d52f9"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f547144f2966e1e16ae626d8ce72b4cfa0caedc7fa28052001c94fb2fcaa1c52"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f52298fbd394f9ed112d56f3d11aabd0d5bd27beb3084cc3d8ad069483b8941"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:100baa204bb412b74fe285fb0f3a385256dad1d1879f0a5cb1499ed2e83d132a"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05a2c8852530ad2812cb7914dc61a1125dc4e06252ee98e5638a12da6cc6fb6c"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:29452c56df2ed968d18d7e21f4ab0ac55e71dc59524872f6fc57dcf4a3249ed2"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:d5160812ea7a8a2ffbe233d8da666880cad0cbaf5d4de74ae15c313213d62556"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:df3959765b553b9440adfd3c795617c352154e497a4eaf3752555cfb5da8fc49"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-win32.whl", hash = "sha256:1f8d33a7f4d5a7889e60dc39856d76d09333d8a6ed0f5f1190635cbec70ec4ba"},
+ {file = "pydantic_core-2.41.5-cp310-cp310-win_amd64.whl", hash = "sha256:62de39db01b8d593e45871af2af9e497295db8d73b085f6bfd0b18c83c70a8f9"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:a3a52f6156e73e7ccb0f8cced536adccb7042be67cb45f9562e12b319c119da6"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7f3bf998340c6d4b0c9a2f02d6a400e51f123b59565d74dc60d252ce888c260b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:378bec5c66998815d224c9ca994f1e14c0c21cb95d2f52b6021cc0b2a58f2a5a"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b576130c69225432866fe2f4a469a85a54ade141d96fd396dffcf607b558f8"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6cb58b9c66f7e4179a2d5e0f849c48eff5c1fca560994d6eb6543abf955a149e"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88942d3a3dff3afc8288c21e565e476fc278902ae4d6d134f1eeda118cc830b1"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f31d95a179f8d64d90f6831d71fa93290893a33148d890ba15de25642c5d075b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c1df3d34aced70add6f867a8cf413e299177e0c22660cc767218373d0779487b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4009935984bd36bd2c774e13f9a09563ce8de4abaa7226f5108262fa3e637284"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:34a64bc3441dc1213096a20fe27e8e128bd3ff89921706e83c0b1ac971276594"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:c9e19dd6e28fdcaa5a1de679aec4141f691023916427ef9bae8584f9c2fb3b0e"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win32.whl", hash = "sha256:2c010c6ded393148374c0f6f0bf89d206bf3217f201faa0635dcd56bd1520f6b"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win_amd64.whl", hash = "sha256:76ee27c6e9c7f16f47db7a94157112a2f3a00e958bc626e2f4ee8bec5c328fbe"},
+ {file = "pydantic_core-2.41.5-cp311-cp311-win_arm64.whl", hash = "sha256:4bc36bbc0b7584de96561184ad7f012478987882ebf9f9c389b23f432ea3d90f"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815"},
+ {file = "pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11"},
+ {file = "pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf"},
+ {file = "pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c"},
+ {file = "pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:8bfeaf8735be79f225f3fefab7f941c712aaca36f1128c9d7e2352ee1aa87bdf"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:346285d28e4c8017da95144c7f3acd42740d637ff41946af5ce6e5e420502dd5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75dafbf87d6276ddc5b2bf6fae5254e3d0876b626eb24969a574fff9149ee5d"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7b93a4d08587e2b7e7882de461e82b6ed76d9026ce91ca7915e740ecc7855f60"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8465ab91a4bd96d36dde3263f06caa6a8a6019e4113f24dc753d79a8b3a3f82"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:299e0a22e7ae2b85c1a57f104538b2656e8ab1873511fd718a1c1c6f149b77b5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:707625ef0983fcfb461acfaf14de2067c5942c6bb0f3b4c99158bed6fedd3cf3"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f41eb9797986d6ebac5e8edff36d5cef9de40def462311b3eb3eeded1431e425"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0384e2e1021894b1ff5a786dbf94771e2986ebe2869533874d7e43bc79c6f504"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:f0cd744688278965817fd0839c4a4116add48d23890d468bc436f78beb28abf5"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:753e230374206729bf0a807954bcc6c150d3743928a73faffee51ac6557a03c3"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-win32.whl", hash = "sha256:873e0d5b4fb9b89ef7c2d2a963ea7d02879d9da0da8d9d4933dee8ee86a8b460"},
+ {file = "pydantic_core-2.41.5-cp39-cp39-win_amd64.whl", hash = "sha256:e4f4a984405e91527a0d62649ee21138f8e3d0ef103be488c1dc11a80d7f184b"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_10_12_x86_64.whl", hash = "sha256:b96d5f26b05d03cc60f11a7761a5ded1741da411e7fe0909e27a5e6a0cb7b034"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-macosx_11_0_arm64.whl", hash = "sha256:634e8609e89ceecea15e2d61bc9ac3718caaaa71963717bf3c8f38bfde64242c"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:93e8740d7503eb008aa2df04d3b9735f845d43ae845e6dcd2be0b55a2da43cd2"},
+ {file = "pydantic_core-2.41.5-graalpy311-graalpy242_311_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f15489ba13d61f670dcc96772e733aad1a6f9c429cc27574c6cdaed82d0146ad"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56"},
+ {file = "pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b5819cd790dbf0c5eb9f82c73c16b39a65dd6dd4d1439dcdea7816ec9adddab8"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:5a4e67afbc95fa5c34cf27d9089bca7fcab4e51e57278d710320a70b956d1b9a"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ece5c59f0ce7d001e017643d8d24da587ea1f74f6993467d85ae8a5ef9d4f42b"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16f80f7abe3351f8ea6858914ddc8c77e02578544a0ebc15b4c2e1a0e813b0b2"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:33cb885e759a705b426baada1fe68cbb0a2e68e34c5d0d0289a364cf01709093"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:c8d8b4eb992936023be7dee581270af5c6e0697a8559895f527f5b7105ecd36a"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:242a206cd0318f95cd21bdacff3fcc3aab23e79bba5cac3db5a841c9ef9c6963"},
+ {file = "pydantic_core-2.41.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d3a978c4f57a597908b7e697229d996d77a6d3c94901e9edee593adada95ce1a"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b2379fa7ed44ddecb5bfe4e48577d752db9fc10be00a6b7446e9663ba143de26"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:266fb4cbf5e3cbd0b53669a6d1b039c45e3ce651fd5442eff4d07c2cc8d66808"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58133647260ea01e4d0500089a8c4f07bd7aa6ce109682b1426394988d8aaacc"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:287dad91cfb551c363dc62899a80e9e14da1f0e2b6ebde82c806612ca2a13ef1"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:03b77d184b9eb40240ae9fd676ca364ce1085f203e1b1256f8ab9984dca80a84"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:a668ce24de96165bb239160b3d854943128f4334822900534f2fe947930e5770"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f14f8f046c14563f8eb3f45f499cc658ab8d10072961e07225e507adb700e93f"},
+ {file = "pydantic_core-2.41.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:56121965f7a4dc965bff783d70b907ddf3d57f6eba29b6d2e5dabfaf07799c51"},
+ {file = "pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e"},
]
[package.dependencies]
@@ -1416,18 +1372,18 @@ windows-terminal = ["colorama (>=0.4.6)"]
[[package]]
name = "pylint"
-version = "4.0.2"
+version = "4.0.3"
description = "python code static checker"
optional = false
python-versions = ">=3.10.0"
groups = ["lint-and-format"]
files = [
- {file = "pylint-4.0.2-py3-none-any.whl", hash = "sha256:9627ccd129893fb8ee8e8010261cb13485daca83e61a6f854a85528ee579502d"},
- {file = "pylint-4.0.2.tar.gz", hash = "sha256:9c22dfa52781d3b79ce86ab2463940f874921a3e5707bcfc98dd0c019945014e"},
+ {file = "pylint-4.0.3-py3-none-any.whl", hash = "sha256:896d09afb0e78bbf2e030cd1f3d8dc92771a51f7e46828cbc3948a89cd03433a"},
+ {file = "pylint-4.0.3.tar.gz", hash = "sha256:a427fe76e0e5355e9fb9b604fd106c419cafb395886ba7f3cebebb03f30e081d"},
]
[package.dependencies]
-astroid = ">=4.0.1,<=4.1.dev0"
+astroid = ">=4.0.2,<=4.1.dev0"
colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""}
dill = [
{version = ">=0.2", markers = "python_version < \"3.11\""},
@@ -1465,21 +1421,6 @@ all = ["nodejs-wheel-binaries", "twine (>=3.4.1)"]
dev = ["twine (>=3.4.1)"]
nodejs = ["nodejs-wheel-binaries"]
-[[package]]
-name = "pytokens"
-version = "0.2.0"
-description = "A Fast, spec compliant Python 3.13+ tokenizer that runs on older Pythons."
-optional = false
-python-versions = ">=3.8"
-groups = ["main"]
-files = [
- {file = "pytokens-0.2.0-py3-none-any.whl", hash = "sha256:74d4b318c67f4295c13782ddd9abcb7e297ec5630ad060eb90abf7ebbefe59f8"},
- {file = "pytokens-0.2.0.tar.gz", hash = "sha256:532d6421364e5869ea57a9523bf385f02586d4662acbcc0342afd69511b4dd43"},
-]
-
-[package.extras]
-dev = ["black", "build", "mypy", "pytest", "pytest-cov", "setuptools", "tox", "twine", "wheel"]
-
[[package]]
name = "pytz"
version = "2025.2"
@@ -1672,62 +1613,62 @@ docs = ["markdown-include (>=0.8.1)", "mike (>=2.1.3)", "mkdocs-github-admonitio
[[package]]
name = "robotcode"
-version = "2.0.1"
+version = "2.0.4"
description = "Command line interface for RobotCode"
optional = false
python-versions = ">=3.10"
groups = ["dev", "type-checking"]
files = [
- {file = "robotcode-2.0.1-py3-none-any.whl", hash = "sha256:4259fc1a3b261c7d01194c019fd7c9cd800c93a0d18a5d3b45d5bd21293ac913"},
- {file = "robotcode-2.0.1.tar.gz", hash = "sha256:2fe0509d91e2f7e351c24bc83cb493ef443ca12c32b0b0a1499309015dd544d1"},
+ {file = "robotcode-2.0.4-py3-none-any.whl", hash = "sha256:bed5fcceb69cf2b65547cfc2cf9a2e4493992b448e3ef5da0b301f09c594b3ff"},
+ {file = "robotcode-2.0.4.tar.gz", hash = "sha256:d58d602c02899a65de6dc4fff393a2b667b1c7c04b7c0fccb526b4fb1ed586ed"},
]
[package.dependencies]
-robotcode-core = "2.0.1"
-robotcode-plugin = "2.0.1"
-robotcode-robot = "2.0.1"
+robotcode-core = "2.0.4"
+robotcode-plugin = "2.0.4"
+robotcode-robot = "2.0.4"
[package.extras]
-all = ["docutils", "pyyaml (>=5.4)", "rich", "robotcode-analyze (==2.0.1)", "robotcode-debugger (==2.0.1)", "robotcode-language-server (==2.0.1)", "robotcode-repl (==2.0.1)", "robotcode-repl-server (==2.0.1)", "robotcode-runner (==2.0.1)", "robotframework-robocop (>=2.0.0)"]
-analyze = ["robotcode-analyze (==2.0.1)"]
+all = ["docutils", "pyyaml (>=5.4)", "rich", "robotcode-analyze (==2.0.4)", "robotcode-debugger (==2.0.4)", "robotcode-language-server (==2.0.4)", "robotcode-repl (==2.0.4)", "robotcode-repl-server (==2.0.4)", "robotcode-runner (==2.0.4)", "robotframework-robocop (>=2.0.0)"]
+analyze = ["robotcode-analyze (==2.0.4)"]
colored = ["rich"]
-debugger = ["robotcode-debugger (==2.0.1)"]
-languageserver = ["robotcode-language-server (==2.0.1)"]
+debugger = ["robotcode-debugger (==2.0.4)"]
+languageserver = ["robotcode-language-server (==2.0.4)"]
lint = ["robotframework-robocop (>=2.0.0)"]
-repl = ["robotcode-repl (==2.0.1)"]
-replserver = ["robotcode-repl-server (==2.0.1)"]
+repl = ["robotcode-repl (==2.0.4)"]
+replserver = ["robotcode-repl-server (==2.0.4)"]
rest = ["docutils"]
-runner = ["robotcode-runner (==2.0.1)"]
+runner = ["robotcode-runner (==2.0.4)"]
yaml = ["pyyaml (>=5.4)"]
[[package]]
name = "robotcode-analyze"
-version = "2.0.1"
+version = "2.0.4"
description = "RobotCode analyze plugin for Robot Framework"
optional = false
python-versions = ">=3.10"
groups = ["type-checking"]
files = [
- {file = "robotcode_analyze-2.0.1-py3-none-any.whl", hash = "sha256:b6589fb93b90d82b8506301833157bd243bded858cf2d890b78387bc9ca9d5bf"},
- {file = "robotcode_analyze-2.0.1.tar.gz", hash = "sha256:4e57805e8ee79f8fb5c210c15f39e817ed109200e53d26c9fa81dd9474a87eab"},
+ {file = "robotcode_analyze-2.0.4-py3-none-any.whl", hash = "sha256:75660bbe9d8c3c83968e92f8492aaf2011eaac65b6f915f173f1e08564afae79"},
+ {file = "robotcode_analyze-2.0.4.tar.gz", hash = "sha256:c7d76f544b4e27cb970f2cec030c375e1765627db91ff1b26c6bf45a360b6534"},
]
[package.dependencies]
-robotcode = "2.0.1"
-robotcode-plugin = "2.0.1"
-robotcode-robot = "2.0.1"
+robotcode = "2.0.4"
+robotcode-plugin = "2.0.4"
+robotcode-robot = "2.0.4"
robotframework = ">=4.1.0"
[[package]]
name = "robotcode-core"
-version = "2.0.1"
+version = "2.0.4"
description = "Some core classes for RobotCode"
optional = false
python-versions = ">=3.10"
groups = ["dev", "type-checking"]
files = [
- {file = "robotcode_core-2.0.1-py3-none-any.whl", hash = "sha256:0f77be39d42ad4e331e5b7e19809fc631fb046b1424426d3f582ac2a83eb127a"},
- {file = "robotcode_core-2.0.1.tar.gz", hash = "sha256:0e5240064f057ff9e64641e896ed16c23c134ecdf9b5116a4c574a5477e4e679"},
+ {file = "robotcode_core-2.0.4-py3-none-any.whl", hash = "sha256:2100975759c6bc385643fc3ee578740f735619a92febd318f07a2910833a6a67"},
+ {file = "robotcode_core-2.0.4.tar.gz", hash = "sha256:99e17b824dd1d37c8e2c013b5ed8920af8aaa83fdd7a8938326db952a669683a"},
]
[package.dependencies]
@@ -1735,14 +1676,14 @@ typing-extensions = ">=4.4.0"
[[package]]
name = "robotcode-modifiers"
-version = "2.0.1"
+version = "2.0.4"
description = "Some Robot Framework Modifiers for RobotCode"
optional = false
python-versions = ">=3.10"
groups = ["dev"]
files = [
- {file = "robotcode_modifiers-2.0.1-py3-none-any.whl", hash = "sha256:827dc9c1ed4f63b6ebce6be67a105d2030cc18266dd4cf8de14f5411ea968678"},
- {file = "robotcode_modifiers-2.0.1.tar.gz", hash = "sha256:ce6159925d86360a1f205d4a45a57c7facf7fd3b3c95a1a966805a66b5dd7fab"},
+ {file = "robotcode_modifiers-2.0.4-py3-none-any.whl", hash = "sha256:9abbf02d7da5e01bfd0b9f440b5a005366b9f4079001467b066b1797da6bac7c"},
+ {file = "robotcode_modifiers-2.0.4.tar.gz", hash = "sha256:36dec0d624eb8366f2db9b52de337f5647ae0b0b821dc6d0e8da74999be5ea6c"},
]
[package.dependencies]
@@ -1750,14 +1691,14 @@ robotframework = ">=4.1.0"
[[package]]
name = "robotcode-plugin"
-version = "2.0.1"
+version = "2.0.4"
description = "Some classes for RobotCode plugin management"
optional = false
python-versions = ">=3.10"
groups = ["dev", "type-checking"]
files = [
- {file = "robotcode_plugin-2.0.1-py3-none-any.whl", hash = "sha256:85e239edfd8c4b6e28af2ee9139958eaef9f09e0aef395f2ed2bd2d5593db4f6"},
- {file = "robotcode_plugin-2.0.1.tar.gz", hash = "sha256:ceb89b663f8e017b1b20bf770113ab51074cf06516187f695e9917e8a84279b4"},
+ {file = "robotcode_plugin-2.0.4-py3-none-any.whl", hash = "sha256:a3cf6b73741a03feeb1e570010ba07519b4ecda05e3a65213388e8110c5b875a"},
+ {file = "robotcode_plugin-2.0.4.tar.gz", hash = "sha256:d58661344a0f4b547e93d611945032b01ae8a53f2e8b5db2d02d27aeaa3f09aa"},
]
[package.dependencies]
@@ -1768,39 +1709,39 @@ tomli-w = ">=1.0.0"
[[package]]
name = "robotcode-robot"
-version = "2.0.1"
+version = "2.0.4"
description = "Support classes for RobotCode for handling Robot Framework projects."
optional = false
python-versions = ">=3.10"
groups = ["dev", "type-checking"]
files = [
- {file = "robotcode_robot-2.0.1-py3-none-any.whl", hash = "sha256:628cafed3525a28928f95ae7f4a79dedd3983b042b443a8e0d10380cc8617b85"},
- {file = "robotcode_robot-2.0.1.tar.gz", hash = "sha256:bb7bba064bd4dbda240975fb9093388926daef5cc7faaa14ed366f89fc38f543"},
+ {file = "robotcode_robot-2.0.4-py3-none-any.whl", hash = "sha256:6df93aec89bd412e3c675044d0eec5d969c4c2392058dc41b170831296b229ac"},
+ {file = "robotcode_robot-2.0.4.tar.gz", hash = "sha256:6fd4a5dd41ca6133dadb71bdb0e94952203dca9d7f7055a3c8af99db68ef5dce"},
]
[package.dependencies]
platformdirs = ">=4.3"
-robotcode-core = "2.0.1"
+robotcode-core = "2.0.4"
robotframework = ">=4.1.0"
tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""}
[[package]]
name = "robotcode-runner"
-version = "2.0.1"
+version = "2.0.4"
description = "RobotCode runner for Robot Framework"
optional = false
python-versions = ">=3.10"
groups = ["dev"]
files = [
- {file = "robotcode_runner-2.0.1-py3-none-any.whl", hash = "sha256:7df0b67fa647fa75bea651c7caf1be6128d54836e5f7cc12d3117f77bd10d644"},
- {file = "robotcode_runner-2.0.1.tar.gz", hash = "sha256:aa350a2b0fe19d32c6efde223e4967d206f63ba085b6c444e914c7855dd15379"},
+ {file = "robotcode_runner-2.0.4-py3-none-any.whl", hash = "sha256:e835aad189c5702b1a2d061b96bb4e4e0acee9056ca453e8ced2572a9da58978"},
+ {file = "robotcode_runner-2.0.4.tar.gz", hash = "sha256:93c846f7524e0e00f72a38fcaa2ed7ad0c6d5d3b92a62ca0338d58866cd8cc15"},
]
[package.dependencies]
-robotcode = "2.0.1"
-robotcode-modifiers = "2.0.1"
-robotcode-plugin = "2.0.1"
-robotcode-robot = "2.0.1"
+robotcode = "2.0.4"
+robotcode-modifiers = "2.0.4"
+robotcode-plugin = "2.0.4"
+robotcode-robot = "2.0.4"
robotframework = ">=4.1.0"
[[package]]
@@ -1837,14 +1778,14 @@ xls = ["openpyxl", "pandas", "xlrd (>=1.2.0)"]
[[package]]
name = "robotframework-robocop"
-version = "6.9.2"
+version = "6.12.0"
description = "Static code analysis tool (linter) and code formatter for Robot Framework"
optional = false
python-versions = ">=3.9"
groups = ["lint-and-format"]
files = [
- {file = "robotframework_robocop-6.9.2-py3-none-any.whl", hash = "sha256:1b6111c614cce67af33998aa35cac60ccc8a1e495b0be44b6b8892a7cdcc7cf9"},
- {file = "robotframework_robocop-6.9.2.tar.gz", hash = "sha256:461b1ae8ad9a43ae1a29ba343ec9b626c65cd8615938e94b76c3f32c0eee39f6"},
+ {file = "robotframework_robocop-6.12.0-py3-none-any.whl", hash = "sha256:ee1146ff4fccf3bd01f98a7965947ec878fa6bb794dec137e49948726eb116e0"},
+ {file = "robotframework_robocop-6.12.0.tar.gz", hash = "sha256:b49a9677f5da514c40bb334a2cc97badab09754e5c09ee31df1e9b8c5d595852"},
]
[package.dependencies]
@@ -1875,127 +1816,127 @@ robotframework = ">=3.2"
[[package]]
name = "rpds-py"
-version = "0.28.0"
+version = "0.29.0"
description = "Python bindings to Rust's persistent data structures (rpds)"
optional = false
python-versions = ">=3.10"
groups = ["main"]
files = [
- {file = "rpds_py-0.28.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:7b6013db815417eeb56b2d9d7324e64fcd4fa289caeee6e7a78b2e11fc9b438a"},
- {file = "rpds_py-0.28.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a4c6b05c685c0c03f80dabaeb73e74218c49deea965ca63f76a752807397207"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4794c6c3fbe8f9ac87699b131a1f26e7b4abcf6d828da46a3a52648c7930eba"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2e8456b6ee5527112ff2354dd9087b030e3429e43a74f480d4a5ca79d269fd85"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:beb880a9ca0a117415f241f66d56025c02037f7c4efc6fe59b5b8454f1eaa50d"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6897bebb118c44b38c9cb62a178e09f1593c949391b9a1a6fe777ccab5934ee7"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1b553dd06e875249fd43efd727785efb57a53180e0fde321468222eabbeaafa"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:f0b2044fdddeea5b05df832e50d2a06fe61023acb44d76978e1b060206a8a476"},
- {file = "rpds_py-0.28.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:05cf1e74900e8da73fa08cc76c74a03345e5a3e37691d07cfe2092d7d8e27b04"},
- {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:efd489fec7c311dae25e94fe7eeda4b3d06be71c68f2cf2e8ef990ffcd2cd7e8"},
- {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:ada7754a10faacd4f26067e62de52d6af93b6d9542f0df73c57b9771eb3ba9c4"},
- {file = "rpds_py-0.28.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c2a34fd26588949e1e7977cfcbb17a9a42c948c100cab890c6d8d823f0586457"},
- {file = "rpds_py-0.28.0-cp310-cp310-win32.whl", hash = "sha256:f9174471d6920cbc5e82a7822de8dfd4dcea86eb828b04fc8c6519a77b0ee51e"},
- {file = "rpds_py-0.28.0-cp310-cp310-win_amd64.whl", hash = "sha256:6e32dd207e2c4f8475257a3540ab8a93eff997abfa0a3fdb287cae0d6cd874b8"},
- {file = "rpds_py-0.28.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:03065002fd2e287725d95fbc69688e0c6daf6c6314ba38bdbaa3895418e09296"},
- {file = "rpds_py-0.28.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:28ea02215f262b6d078daec0b45344c89e161eab9526b0d898221d96fdda5f27"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25dbade8fbf30bcc551cb352376c0ad64b067e4fc56f90e22ba70c3ce205988c"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3c03002f54cc855860bfdc3442928ffdca9081e73b5b382ed0b9e8efe6e5e205"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b9699fa7990368b22032baf2b2dce1f634388e4ffc03dfefaaac79f4695edc95"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b9b06fe1a75e05e0713f06ea0c89ecb6452210fd60e2f1b6ddc1067b990e08d9"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9f83e7b326a3f9ec3ef84cda98fb0a74c7159f33e692032233046e7fd15da2"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:0d3259ea9ad8743a75a43eb7819324cdab393263c91be86e2d1901ee65c314e0"},
- {file = "rpds_py-0.28.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a7548b345f66f6695943b4ef6afe33ccd3f1b638bd9afd0f730dd255c249c9e"},
- {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c9a40040aa388b037eb39416710fbcce9443498d2eaab0b9b45ae988b53f5c67"},
- {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8f60c7ea34e78c199acd0d3cda37a99be2c861dd2b8cf67399784f70c9f8e57d"},
- {file = "rpds_py-0.28.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1571ae4292649100d743b26d5f9c63503bb1fedf538a8f29a98dce2d5ba6b4e6"},
- {file = "rpds_py-0.28.0-cp311-cp311-win32.whl", hash = "sha256:5cfa9af45e7c1140af7321fa0bef25b386ee9faa8928c80dc3a5360971a29e8c"},
- {file = "rpds_py-0.28.0-cp311-cp311-win_amd64.whl", hash = "sha256:dd8d86b5d29d1b74100982424ba53e56033dc47720a6de9ba0259cf81d7cecaa"},
- {file = "rpds_py-0.28.0-cp311-cp311-win_arm64.whl", hash = "sha256:4e27d3a5709cc2b3e013bf93679a849213c79ae0573f9b894b284b55e729e120"},
- {file = "rpds_py-0.28.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:6b4f28583a4f247ff60cd7bdda83db8c3f5b05a7a82ff20dd4b078571747708f"},
- {file = "rpds_py-0.28.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d678e91b610c29c4b3d52a2c148b641df2b4676ffe47c59f6388d58b99cdc424"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e819e0e37a44a78e1383bf1970076e2ccc4dc8c2bbaa2f9bd1dc987e9afff628"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5ee514e0f0523db5d3fb171f397c54875dbbd69760a414dccf9d4d7ad628b5bd"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5f3fa06d27fdcee47f07a39e02862da0100cb4982508f5ead53ec533cd5fe55e"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:46959ef2e64f9e4a41fc89aa20dbca2b85531f9a72c21099a3360f35d10b0d5a"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8455933b4bcd6e83fde3fefc987a023389c4b13f9a58c8d23e4b3f6d13f78c84"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:ad50614a02c8c2962feebe6012b52f9802deec4263946cddea37aaf28dd25a66"},
- {file = "rpds_py-0.28.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e5deca01b271492553fdb6c7fd974659dce736a15bae5dad7ab8b93555bceb28"},
- {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:735f8495a13159ce6a0d533f01e8674cec0c57038c920495f87dcb20b3ddb48a"},
- {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:961ca621ff10d198bbe6ba4957decca61aa2a0c56695384c1d6b79bf61436df5"},
- {file = "rpds_py-0.28.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2374e16cc9131022e7d9a8f8d65d261d9ba55048c78f3b6e017971a4f5e6353c"},
- {file = "rpds_py-0.28.0-cp312-cp312-win32.whl", hash = "sha256:d15431e334fba488b081d47f30f091e5d03c18527c325386091f31718952fe08"},
- {file = "rpds_py-0.28.0-cp312-cp312-win_amd64.whl", hash = "sha256:a410542d61fc54710f750d3764380b53bf09e8c4edbf2f9141a82aa774a04f7c"},
- {file = "rpds_py-0.28.0-cp312-cp312-win_arm64.whl", hash = "sha256:1f0cfd1c69e2d14f8c892b893997fa9a60d890a0c8a603e88dca4955f26d1edd"},
- {file = "rpds_py-0.28.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:e9e184408a0297086f880556b6168fa927d677716f83d3472ea333b42171ee3b"},
- {file = "rpds_py-0.28.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:edd267266a9b0448f33dc465a97cfc5d467594b600fe28e7fa2f36450e03053a"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85beb8b3f45e4e32f6802fb6cd6b17f615ef6c6a52f265371fb916fae02814aa"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d2412be8d00a1b895f8ad827cc2116455196e20ed994bb704bf138fe91a42724"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cf128350d384b777da0e68796afdcebc2e9f63f0e9f242217754e647f6d32491"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a2036d09b363aa36695d1cc1a97b36865597f4478470b0697b5ee9403f4fe399"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8e1e9be4fa6305a16be628959188e4fd5cd6f1b0e724d63c6d8b2a8adf74ea6"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:0a403460c9dd91a7f23fc3188de6d8977f1d9603a351d5db6cf20aaea95b538d"},
- {file = "rpds_py-0.28.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d7366b6553cdc805abcc512b849a519167db8f5e5c3472010cd1228b224265cb"},
- {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b43c6a3726efd50f18d8120ec0551241c38785b68952d240c45ea553912ac41"},
- {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0cb7203c7bc69d7c1585ebb33a2e6074492d2fc21ad28a7b9d40457ac2a51ab7"},
- {file = "rpds_py-0.28.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7a52a5169c664dfb495882adc75c304ae1d50df552fbd68e100fdc719dee4ff9"},
- {file = "rpds_py-0.28.0-cp313-cp313-win32.whl", hash = "sha256:2e42456917b6687215b3e606ab46aa6bca040c77af7df9a08a6dcfe8a4d10ca5"},
- {file = "rpds_py-0.28.0-cp313-cp313-win_amd64.whl", hash = "sha256:e0a0311caedc8069d68fc2bf4c9019b58a2d5ce3cd7cb656c845f1615b577e1e"},
- {file = "rpds_py-0.28.0-cp313-cp313-win_arm64.whl", hash = "sha256:04c1b207ab8b581108801528d59ad80aa83bb170b35b0ddffb29c20e411acdc1"},
- {file = "rpds_py-0.28.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f296ea3054e11fc58ad42e850e8b75c62d9a93a9f981ad04b2e5ae7d2186ff9c"},
- {file = "rpds_py-0.28.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5a7306c19b19005ad98468fcefeb7100b19c79fc23a5f24a12e06d91181193fa"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5d9b86aa501fed9862a443c5c3116f6ead8bc9296185f369277c42542bd646b"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e5bbc701eff140ba0e872691d573b3d5d30059ea26e5785acba9132d10c8c31d"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9a5690671cd672a45aa8616d7374fdf334a1b9c04a0cac3c854b1136e92374fe"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9f1d92ecea4fa12f978a367c32a5375a1982834649cdb96539dcdc12e609ab1a"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d252db6b1a78d0a3928b6190156042d54c93660ce4d98290d7b16b5296fb7cc"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:d61b355c3275acb825f8777d6c4505f42b5007e357af500939d4a35b19177259"},
- {file = "rpds_py-0.28.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:acbe5e8b1026c0c580d0321c8aae4b0a1e1676861d48d6e8c6586625055b606a"},
- {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8aa23b6f0fc59b85b4c7d89ba2965af274346f738e8d9fc2455763602e62fd5f"},
- {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7b14b0c680286958817c22d76fcbca4800ddacef6f678f3a7c79a1fe7067fe37"},
- {file = "rpds_py-0.28.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:bcf1d210dfee61a6c86551d67ee1031899c0fdbae88b2d44a569995d43797712"},
- {file = "rpds_py-0.28.0-cp313-cp313t-win32.whl", hash = "sha256:3aa4dc0fdab4a7029ac63959a3ccf4ed605fee048ba67ce89ca3168da34a1342"},
- {file = "rpds_py-0.28.0-cp313-cp313t-win_amd64.whl", hash = "sha256:7b7d9d83c942855e4fdcfa75d4f96f6b9e272d42fffcb72cd4bb2577db2e2907"},
- {file = "rpds_py-0.28.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:dcdcb890b3ada98a03f9f2bb108489cdc7580176cb73b4f2d789e9a1dac1d472"},
- {file = "rpds_py-0.28.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f274f56a926ba2dc02976ca5b11c32855cbd5925534e57cfe1fda64e04d1add2"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fe0438ac4a29a520ea94c8c7f1754cdd8feb1bc490dfda1bfd990072363d527"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8a358a32dd3ae50e933347889b6af9a1bdf207ba5d1a3f34e1a38cd3540e6733"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e80848a71c78aa328fefaba9c244d588a342c8e03bda518447b624ea64d1ff56"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f586db2e209d54fe177e58e0bc4946bea5fb0102f150b1b2f13de03e1f0976f8"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae8ee156d6b586e4292491e885d41483136ab994e719a13458055bec14cf370"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:a805e9b3973f7e27f7cab63a6b4f61d90f2e5557cff73b6e97cd5b8540276d3d"},
- {file = "rpds_py-0.28.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5d3fd16b6dc89c73a4da0b4ac8b12a7ecc75b2864b95c9e5afed8003cb50a728"},
- {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6796079e5d24fdaba6d49bda28e2c47347e89834678f2bc2c1b4fc1489c0fb01"},
- {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:76500820c2af232435cbe215e3324c75b950a027134e044423f59f5b9a1ba515"},
- {file = "rpds_py-0.28.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bbdc5640900a7dbf9dd707fe6388972f5bbd883633eb68b76591044cfe346f7e"},
- {file = "rpds_py-0.28.0-cp314-cp314-win32.whl", hash = "sha256:adc8aa88486857d2b35d75f0640b949759f79dc105f50aa2c27816b2e0dd749f"},
- {file = "rpds_py-0.28.0-cp314-cp314-win_amd64.whl", hash = "sha256:66e6fa8e075b58946e76a78e69e1a124a21d9a48a5b4766d15ba5b06869d1fa1"},
- {file = "rpds_py-0.28.0-cp314-cp314-win_arm64.whl", hash = "sha256:a6fe887c2c5c59413353b7c0caff25d0e566623501ccfff88957fa438a69377d"},
- {file = "rpds_py-0.28.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:7a69df082db13c7070f7b8b1f155fa9e687f1d6aefb7b0e3f7231653b79a067b"},
- {file = "rpds_py-0.28.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b1cde22f2c30ebb049a9e74c5374994157b9b70a16147d332f89c99c5960737a"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5338742f6ba7a51012ea470bd4dc600a8c713c0c72adaa0977a1b1f4327d6592"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e1460ebde1bcf6d496d80b191d854adedcc619f84ff17dc1c6d550f58c9efbba"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e3eb248f2feba84c692579257a043a7699e28a77d86c77b032c1d9fbb3f0219c"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3bbba5def70b16cd1c1d7255666aad3b290fbf8d0fe7f9f91abafb73611a91"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3114f4db69ac5a1f32e7e4d1cbbe7c8f9cf8217f78e6e002cedf2d54c2a548ed"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:4b0cb8a906b1a0196b863d460c0222fb8ad0f34041568da5620f9799b83ccf0b"},
- {file = "rpds_py-0.28.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:cf681ac76a60b667106141e11a92a3330890257e6f559ca995fbb5265160b56e"},
- {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1e8ee6413cfc677ce8898d9cde18cc3a60fc2ba756b0dec5b71eb6eb21c49fa1"},
- {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b3072b16904d0b5572a15eb9d31c1954e0d3227a585fc1351aa9878729099d6c"},
- {file = "rpds_py-0.28.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b670c30fd87a6aec281c3c9896d3bae4b205fd75d79d06dc87c2503717e46092"},
- {file = "rpds_py-0.28.0-cp314-cp314t-win32.whl", hash = "sha256:8014045a15b4d2b3476f0a287fcc93d4f823472d7d1308d47884ecac9e612be3"},
- {file = "rpds_py-0.28.0-cp314-cp314t-win_amd64.whl", hash = "sha256:7a4e59c90d9c27c561eb3160323634a9ff50b04e4f7820600a2beb0ac90db578"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f5e7101145427087e493b9c9b959da68d357c28c562792300dd21a095118ed16"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:31eb671150b9c62409a888850aaa8e6533635704fe2b78335f9aaf7ff81eec4d"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:48b55c1f64482f7d8bd39942f376bfdf2f6aec637ee8c805b5041e14eeb771db"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24743a7b372e9a76171f6b69c01aedf927e8ac3e16c474d9fe20d552a8cb45c7"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:389c29045ee8bbb1627ea190b4976a310a295559eaf9f1464a1a6f2bf84dde78"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:23690b5827e643150cf7b49569679ec13fe9a610a15949ed48b85eb7f98f34ec"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f0c9266c26580e7243ad0d72fc3e01d6b33866cfab5084a6da7576bcf1c4f72"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:4c6c4db5d73d179746951486df97fd25e92396be07fc29ee8ff9a8f5afbdfb27"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3b695a8fa799dd2cfdb4804b37096c5f6dba1ac7f48a7fbf6d0485bcd060316"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:6aa1bfce3f83baf00d9c5fcdbba93a3ab79958b4c7d7d1f55e7fe68c20e63912"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:7b0f9dceb221792b3ee6acb5438eb1f02b0cb2c247796a72b016dcc92c6de829"},
- {file = "rpds_py-0.28.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:5d0145edba8abd3db0ab22b5300c99dc152f5c9021fab861be0f0544dc3cbc5f"},
- {file = "rpds_py-0.28.0.tar.gz", hash = "sha256:abd4df20485a0983e2ca334a216249b6186d6e3c1627e106651943dbdb791aea"},
+ {file = "rpds_py-0.29.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:4ae4b88c6617e1b9e5038ab3fccd7bac0842fdda2b703117b2aa99bc85379113"},
+ {file = "rpds_py-0.29.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7d9128ec9d8cecda6f044001fde4fb71ea7c24325336612ef8179091eb9596b9"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37812c3da8e06f2bb35b3cf10e4a7b68e776a706c13058997238762b4e07f4f"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:66786c3fb1d8de416a7fa8e1cb1ec6ba0a745b2b0eee42f9b7daa26f1a495545"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b58f5c77f1af888b5fd1876c9a0d9858f6f88a39c9dd7c073a88e57e577da66d"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:799156ef1f3529ed82c36eb012b5d7a4cf4b6ef556dd7cc192148991d07206ae"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:453783477aa4f2d9104c4b59b08c871431647cb7af51b549bbf2d9eb9c827756"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_31_riscv64.whl", hash = "sha256:24a7231493e3c4a4b30138b50cca089a598e52c34cf60b2f35cebf62f274fdea"},
+ {file = "rpds_py-0.29.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7033c1010b1f57bb44d8067e8c25aa6fa2e944dbf46ccc8c92b25043839c3fd2"},
+ {file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0248b19405422573621172ab8e3a1f29141362d13d9f72bafa2e28ea0cdca5a2"},
+ {file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f9f436aee28d13b9ad2c764fc273e0457e37c2e61529a07b928346b219fcde3b"},
+ {file = "rpds_py-0.29.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:24a16cb7163933906c62c272de20ea3c228e4542c8c45c1d7dc2b9913e17369a"},
+ {file = "rpds_py-0.29.0-cp310-cp310-win32.whl", hash = "sha256:1a409b0310a566bfd1be82119891fefbdce615ccc8aa558aff7835c27988cbef"},
+ {file = "rpds_py-0.29.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5523b0009e7c3c1263471b69d8da1c7d41b3ecb4cb62ef72be206b92040a950"},
+ {file = "rpds_py-0.29.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9b9c764a11fd637e0322a488560533112837f5334ffeb48b1be20f6d98a7b437"},
+ {file = "rpds_py-0.29.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3fd2164d73812026ce970d44c3ebd51e019d2a26a4425a5dcbdfa93a34abc383"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a097b7f7f7274164566ae90a221fd725363c0e9d243e2e9ed43d195ccc5495c"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cdc0490374e31cedefefaa1520d5fe38e82fde8748cbc926e7284574c714d6b"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:89ca2e673ddd5bde9b386da9a0aac0cab0e76f40c8f0aaf0d6311b6bbf2aa311"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a5d9da3ff5af1ca1249b1adb8ef0573b94c76e6ae880ba1852f033bf429d4588"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8238d1d310283e87376c12f658b61e1ee23a14c0e54c7c0ce953efdbdc72deed"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_31_riscv64.whl", hash = "sha256:2d6fb2ad1c36f91c4646989811e84b1ea5e0c3cf9690b826b6e32b7965853a63"},
+ {file = "rpds_py-0.29.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:534dc9df211387547267ccdb42253aa30527482acb38dd9b21c5c115d66a96d2"},
+ {file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d456e64724a075441e4ed648d7f154dc62e9aabff29bcdf723d0c00e9e1d352f"},
+ {file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a738f2da2f565989401bd6fd0b15990a4d1523c6d7fe83f300b7e7d17212feca"},
+ {file = "rpds_py-0.29.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a110e14508fd26fd2e472bb541f37c209409876ba601cf57e739e87d8a53cf95"},
+ {file = "rpds_py-0.29.0-cp311-cp311-win32.whl", hash = "sha256:923248a56dd8d158389a28934f6f69ebf89f218ef96a6b216a9be6861804d3f4"},
+ {file = "rpds_py-0.29.0-cp311-cp311-win_amd64.whl", hash = "sha256:539eb77eb043afcc45314d1be09ea6d6cafb3addc73e0547c171c6d636957f60"},
+ {file = "rpds_py-0.29.0-cp311-cp311-win_arm64.whl", hash = "sha256:bdb67151ea81fcf02d8f494703fb728d4d34d24556cbff5f417d74f6f5792e7c"},
+ {file = "rpds_py-0.29.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a0891cfd8db43e085c0ab93ab7e9b0c8fee84780d436d3b266b113e51e79f954"},
+ {file = "rpds_py-0.29.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3897924d3f9a0361472d884051f9a2460358f9a45b1d85a39a158d2f8f1ad71c"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a21deb8e0d1571508c6491ce5ea5e25669b1dd4adf1c9d64b6314842f708b5d"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9efe71687d6427737a0a2de9ca1c0a216510e6cd08925c44162be23ed7bed2d5"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40f65470919dc189c833e86b2c4bd21bd355f98436a2cef9e0a9a92aebc8e57e"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:def48ff59f181130f1a2cb7c517d16328efac3ec03951cca40c1dc2049747e83"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad7bd570be92695d89285a4b373006930715b78d96449f686af422debb4d3949"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_31_riscv64.whl", hash = "sha256:5a572911cd053137bbff8e3a52d31c5d2dba51d3a67ad902629c70185f3f2181"},
+ {file = "rpds_py-0.29.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d583d4403bcbf10cffc3ab5cee23d7643fcc960dff85973fd3c2d6c86e8dbb0c"},
+ {file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:070befbb868f257d24c3bb350dbd6e2f645e83731f31264b19d7231dd5c396c7"},
+ {file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fc935f6b20b0c9f919a8ff024739174522abd331978f750a74bb68abd117bd19"},
+ {file = "rpds_py-0.29.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8c5a8ecaa44ce2d8d9d20a68a2483a74c07f05d72e94a4dff88906c8807e77b0"},
+ {file = "rpds_py-0.29.0-cp312-cp312-win32.whl", hash = "sha256:ba5e1aeaf8dd6d8f6caba1f5539cddda87d511331714b7b5fc908b6cfc3636b7"},
+ {file = "rpds_py-0.29.0-cp312-cp312-win_amd64.whl", hash = "sha256:b5f6134faf54b3cb83375db0f113506f8b7770785be1f95a631e7e2892101977"},
+ {file = "rpds_py-0.29.0-cp312-cp312-win_arm64.whl", hash = "sha256:b016eddf00dca7944721bf0cd85b6af7f6c4efaf83ee0b37c4133bd39757a8c7"},
+ {file = "rpds_py-0.29.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1585648d0760b88292eecab5181f5651111a69d90eff35d6b78aa32998886a61"},
+ {file = "rpds_py-0.29.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:521807963971a23996ddaf764c682b3e46459b3c58ccd79fefbe16718db43154"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8896986efaa243ab713c69e6491a4138410f0fe36f2f4c71e18bd5501e8014"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1d24564a700ef41480a984c5ebed62b74e6ce5860429b98b1fede76049e953e6"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e6596b93c010d386ae46c9fba9bfc9fc5965fa8228edeac51576299182c2e31c"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5cc58aac218826d054c7da7f95821eba94125d88be673ff44267bb89d12a5866"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de73e40ebc04dd5d9556f50180395322193a78ec247e637e741c1b954810f295"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_31_riscv64.whl", hash = "sha256:295ce5ac7f0cf69a651ea75c8f76d02a31f98e5698e82a50a5f4d4982fbbae3b"},
+ {file = "rpds_py-0.29.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1ea59b23ea931d494459c8338056fe7d93458c0bf3ecc061cd03916505369d55"},
+ {file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f49d41559cebd608042fdcf54ba597a4a7555b49ad5c1c0c03e0af82692661cd"},
+ {file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:05a2bd42768ea988294ca328206efbcc66e220d2d9b7836ee5712c07ad6340ea"},
+ {file = "rpds_py-0.29.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33ca7bdfedd83339ca55da3a5e1527ee5870d4b8369456b5777b197756f3ca22"},
+ {file = "rpds_py-0.29.0-cp313-cp313-win32.whl", hash = "sha256:20c51ae86a0bb9accc9ad4e6cdeec58d5ebb7f1b09dd4466331fc65e1766aae7"},
+ {file = "rpds_py-0.29.0-cp313-cp313-win_amd64.whl", hash = "sha256:6410e66f02803600edb0b1889541f4b5cc298a5ccda0ad789cc50ef23b54813e"},
+ {file = "rpds_py-0.29.0-cp313-cp313-win_arm64.whl", hash = "sha256:56838e1cd9174dc23c5691ee29f1d1be9eab357f27efef6bded1328b23e1ced2"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:37d94eadf764d16b9a04307f2ab1d7af6dc28774bbe0535c9323101e14877b4c"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:d472cf73efe5726a067dce63eebe8215b14beabea7c12606fd9994267b3cfe2b"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72fdfd5ff8992e4636621826371e3ac5f3e3b8323e9d0e48378e9c13c3dac9d0"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2549d833abdf8275c901313b9e8ff8fba57e50f6a495035a2a4e30621a2f7cc4"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4448dad428f28a6a767c3e3b80cde3446a22a0efbddaa2360f4bb4dc836d0688"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:115f48170fd4296a33938d8c11f697f5f26e0472e43d28f35624764173a60e4d"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e5bb73ffc029820f4348e9b66b3027493ae00bca6629129cd433fd7a76308ee"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_31_riscv64.whl", hash = "sha256:b1581fcde18fcdf42ea2403a16a6b646f8eb1e58d7f90a0ce693da441f76942e"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:16e9da2bda9eb17ea318b4c335ec9ac1818e88922cbe03a5743ea0da9ecf74fb"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:28fd300326dd21198f311534bdb6d7e989dd09b3418b3a91d54a0f384c700967"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:2aba991e041d031c7939e1358f583ae405a7bf04804ca806b97a5c0e0af1ea5e"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f437026dbbc3f08c99cc41a5b2570c6e1a1ddbe48ab19a9b814254128d4ea7a"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-win32.whl", hash = "sha256:6e97846e9800a5d0fe7be4d008f0c93d0feeb2700da7b1f7528dabafb31dfadb"},
+ {file = "rpds_py-0.29.0-cp313-cp313t-win_amd64.whl", hash = "sha256:f49196aec7c4b406495f60e6f947ad71f317a765f956d74bbd83996b9edc0352"},
+ {file = "rpds_py-0.29.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:394d27e4453d3b4d82bb85665dc1fcf4b0badc30fc84282defed71643b50e1a1"},
+ {file = "rpds_py-0.29.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:55d827b2ae95425d3be9bc9a5838b6c29d664924f98146557f7715e331d06df8"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc31a07ed352e5462d3ee1b22e89285f4ce97d5266f6d1169da1142e78045626"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c4695dd224212f6105db7ea62197144230b808d6b2bba52238906a2762f1d1e7"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcae1770b401167f8b9e1e3f566562e6966ffa9ce63639916248a9e25fa8a244"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90f30d15f45048448b8da21c41703b31c61119c06c216a1bf8c245812a0f0c17"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44a91e0ab77bdc0004b43261a4b8cd6d6b451e8d443754cfda830002b5745b32"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_31_riscv64.whl", hash = "sha256:4aa195e5804d32c682e453b34474f411ca108e4291c6a0f824ebdc30a91c973c"},
+ {file = "rpds_py-0.29.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:7971bdb7bf4ee0f7e6f67fa4c7fbc6019d9850cc977d126904392d363f6f8318"},
+ {file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8ae33ad9ce580c7a47452c3b3f7d8a9095ef6208e0a0c7e4e2384f9fc5bf8212"},
+ {file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:c661132ab2fb4eeede2ef69670fd60da5235209874d001a98f1542f31f2a8a94"},
+ {file = "rpds_py-0.29.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:bb78b3a0d31ac1bde132c67015a809948db751cb4e92cdb3f0b242e430b6ed0d"},
+ {file = "rpds_py-0.29.0-cp314-cp314-win32.whl", hash = "sha256:f475f103488312e9bd4000bc890a95955a07b2d0b6e8884aef4be56132adbbf1"},
+ {file = "rpds_py-0.29.0-cp314-cp314-win_amd64.whl", hash = "sha256:b9cf2359a4fca87cfb6801fae83a76aedf66ee1254a7a151f1341632acf67f1b"},
+ {file = "rpds_py-0.29.0-cp314-cp314-win_arm64.whl", hash = "sha256:9ba8028597e824854f0f1733d8b964e914ae3003b22a10c2c664cb6927e0feb9"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:e71136fd0612556b35c575dc2726ae04a1669e6a6c378f2240312cf5d1a2ab10"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:76fe96632d53f3bf0ea31ede2f53bbe3540cc2736d4aec3b3801b0458499ef3a"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9459a33f077130dbb2c7c3cea72ee9932271fb3126404ba2a2661e4fe9eb7b79"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5c9546cfdd5d45e562cc0444b6dddc191e625c62e866bf567a2c69487c7ad28a"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12597d11d97b8f7e376c88929a6e17acb980e234547c92992f9f7c058f1a7310"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28de03cf48b8a9e6ec10318f2197b83946ed91e2891f651a109611be4106ac4b"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd7951c964069039acc9d67a8ff1f0a7f34845ae180ca542b17dc1456b1f1808"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_31_riscv64.whl", hash = "sha256:c07d107b7316088f1ac0177a7661ca0c6670d443f6fe72e836069025e6266761"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1de2345af363d25696969befc0c1688a6cb5e8b1d32b515ef84fc245c6cddba3"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:00e56b12d2199ca96068057e1ae7f9998ab6e99cda82431afafd32f3ec98cca9"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:3919a3bbecee589300ed25000b6944174e07cd20db70552159207b3f4bbb45b8"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e7fa2ccc312bbd91e43aa5e0869e46bc03278a3dddb8d58833150a18b0f0283a"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-win32.whl", hash = "sha256:97c817863ffc397f1e6a6e9d2d89fe5408c0a9922dac0329672fb0f35c867ea5"},
+ {file = "rpds_py-0.29.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2023473f444752f0f82a58dfcbee040d0a1b3d1b3c2ec40e884bd25db6d117d2"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:acd82a9e39082dc5f4492d15a6b6c8599aa21db5c35aaf7d6889aea16502c07d"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:715b67eac317bf1c7657508170a3e011a1ea6ccb1c9d5f296e20ba14196be6b3"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3b1b87a237cb2dba4db18bcfaaa44ba4cd5936b91121b62292ff21df577fc43"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1c3c3e8101bb06e337c88eb0c0ede3187131f19d97d43ea0e1c5407ea74c0cbf"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2b8e54d6e61f3ecd3abe032065ce83ea63417a24f437e4a3d73d2f85ce7b7cfe"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3fbd4e9aebf110473a420dea85a238b254cf8a15acb04b22a5a6b5ce8925b760"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80fdf53d36e6c72819993e35d1ebeeb8e8fc688d0c6c2b391b55e335b3afba5a"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_31_riscv64.whl", hash = "sha256:ea7173df5d86f625f8dde6d5929629ad811ed8decda3b60ae603903839ac9ac0"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:76054d540061eda273274f3d13a21a4abdde90e13eaefdc205db37c05230efce"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_aarch64.whl", hash = "sha256:9f84c549746a5be3bc7415830747a3a0312573afc9f95785eb35228bb17742ec"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_i686.whl", hash = "sha256:0ea962671af5cb9a260489e311fa22b2e97103e3f9f0caaea6f81390af96a9ed"},
+ {file = "rpds_py-0.29.0-pp311-pypy311_pp73-musllinux_1_2_x86_64.whl", hash = "sha256:f7728653900035fb7b8d06e1e5900545d8088efc9d5d4545782da7df03ec803f"},
+ {file = "rpds_py-0.29.0.tar.gz", hash = "sha256:fe55fe686908f50154d1dc599232016e50c243b438c3b7432f24e2895b0e5359"},
]
[[package]]
@@ -2031,97 +1972,103 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"]
[[package]]
name = "ruamel-yaml-clib"
-version = "0.2.14"
+version = "0.2.15"
description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml"
optional = false
python-versions = ">=3.9"
groups = ["main"]
-markers = "python_version < \"3.14\" and platform_python_implementation == \"CPython\""
-files = [
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f8b2acb0ffdd2ce8208accbec2dca4a06937d556fdcaefd6473ba1b5daa7e3c4"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:aef953f3b8bd0b50bd52a2e52fb54a6a2171a1889d8dea4a5959d46c6624c451"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a0ac90efbc7a77b0d796c03c8cc4e62fd710b3f1e4c32947713ef2ef52e09543"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bf6b699223afe6c7fe9f2ef76e0bfa6dd892c21e94ce8c957478987ade76cd8"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d73a0187718f6eec5b2f729b0f98e4603f7bd9c48aa65d01227d1a5dcdfbe9e8"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:81f6d3b19bc703679a5705c6a16dabdc79823c71d791d73c65949be7f3012c02"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b28caeaf3e670c08cb7e8de221266df8494c169bd6ed8875493fab45be9607a4"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:94f3efb718f8f49b031f2071ec7a27dd20cbfe511b4dfd54ecee54c956da2b31"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-win32.whl", hash = "sha256:27c070cf3888e90d992be75dd47292ff9aa17dafd36492812a6a304a1aedc182"},
- {file = "ruamel.yaml.clib-0.2.14-cp310-cp310-win_amd64.whl", hash = "sha256:4f4a150a737fccae13fb51234d41304ff2222e3b7d4c8e9428ed1a6ab48389b8"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:5bae1a073ca4244620425cd3d3aa9746bde590992b98ee8c7c8be8c597ca0d4e"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:0a54e5e40a7a691a426c2703b09b0d61a14294d25cfacc00631aa6f9c964df0d"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:10d9595b6a19778f3269399eff6bab642608e5966183abc2adbe558a42d4efc9"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba72975485f2b87b786075e18a6e5d07dc2b4d8973beb2732b9b2816f1bad70"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29757bdb7c142f9595cc1b62ec49a3d1c83fab9cef92db52b0ccebaad4eafb98"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:557df28dbccf79b152fe2d1b935f6063d9cc431199ea2b0e84892f35c03bb0ee"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:26a8de280ab0d22b6e3ec745b4a5a07151a0f74aad92dd76ab9c8d8d7087720d"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e501c096aa3889133d674605ebd018471bc404a59cbc17da3c5924421c54d97c"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-win32.whl", hash = "sha256:915748cfc25b8cfd81b14d00f4bfdb2ab227a30d6d43459034533f4d1c207a2a"},
- {file = "ruamel.yaml.clib-0.2.14-cp311-cp311-win_amd64.whl", hash = "sha256:4ccba93c1e5a40af45b2f08e4591969fa4697eae951c708f3f83dcbf9f6c6bb1"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:6aeadc170090ff1889f0d2c3057557f9cd71f975f17535c26a5d37af98f19c27"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5e56ac47260c0eed992789fa0b8efe43404a9adb608608631a948cee4fc2b052"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:a911aa73588d9a8b08d662b9484bc0567949529824a55d3885b77e8dd62a127a"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a05ba88adf3d7189a974b2de7a9d56731548d35dc0a822ec3dc669caa7019b29"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb04c5650de6668b853623eceadcdb1a9f2fee381f5d7b6bc842ee7c239eeec4"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:df3ec9959241d07bc261f4983d25a1205ff37703faf42b474f15d54d88b4f8c9"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:fbc08c02e9b147a11dfcaa1ac8a83168b699863493e183f7c0c8b12850b7d259"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c099cafc1834d3c5dac305865d04235f7c21c167c8dd31ebc3d6bbc357e2f023"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-win32.whl", hash = "sha256:b5b0f7e294700b615a3bcf6d28b26e6da94e8eba63b079f4ec92e9ba6c0d6b54"},
- {file = "ruamel.yaml.clib-0.2.14-cp312-cp312-win_amd64.whl", hash = "sha256:a37f40a859b503304dd740686359fcf541d6fb3ff7fc10f539af7f7150917c68"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7e4f9da7e7549946e02a6122dcad00b7c1168513acb1f8a726b1aaf504a99d32"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-macosx_15_0_arm64.whl", hash = "sha256:dd7546c851e59c06197a7c651335755e74aa383a835878ca86d2c650c07a2f85"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:1c1acc3a0209ea9042cc3cfc0790edd2eddd431a2ec3f8283d081e4d5018571e"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2070bf0ad1540d5c77a664de07ebcc45eebd1ddcab71a7a06f26936920692beb"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bd8fe07f49c170e09d76773fb86ad9135e0beee44f36e1576a201b0676d3d1d"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ff86876889ea478b1381089e55cf9e345707b312beda4986f823e1d95e8c0f59"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:1f118b707eece8cf84ecbc3e3ec94d9db879d85ed608f95870d39b2d2efa5dca"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b30110b29484adc597df6bd92a37b90e63a8c152ca8136aad100a02f8ba6d1b6"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-win32.whl", hash = "sha256:f4e97a1cf0b7a30af9e1d9dad10a5671157b9acee790d9e26996391f49b965a2"},
- {file = "ruamel.yaml.clib-0.2.14-cp313-cp313-win_amd64.whl", hash = "sha256:090782b5fb9d98df96509eecdbcaffd037d47389a89492320280d52f91330d78"},
- {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:7df6f6e9d0e33c7b1d435defb185095386c469109de723d514142632a7b9d07f"},
- {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:70eda7703b8126f5e52fcf276e6c0f40b0d314674f896fc58c47b0aef2b9ae83"},
- {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a0cb71ccc6ef9ce36eecb6272c81afdc2f565950cdcec33ae8e6cd8f7fc86f27"},
- {file = "ruamel.yaml.clib-0.2.14-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e7cb9ad1d525d40f7d87b6df7c0ff916a66bc52cb61b66ac1b2a16d0c1b07640"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:18c041b28f3456ddef1f1951d4492dbebe0f8114157c1b3c981a4611c2020792"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:d8354515ab62f95a07deaf7f845886cc50e2f345ceab240a3d2d09a9f7d77853"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:275f938692013a3883edbd848edde6d9f26825d65c9a2eb1db8baa1adc96a05d"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16a60d69f4057ad9a92f3444e2367c08490daed6428291aa16cefb445c29b0e9"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ac5ff9425d8acb8f59ac5b96bcb7fd3d272dc92d96a7c730025928ffcc88a7a"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e1d1735d97fd8a48473af048739379975651fab186f8a25a9f683534e6904179"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:83bbd8354f6abb3fdfb922d1ed47ad8d1db3ea72b0523dac8d07cdacfe1c0fcf"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:808c7190a0fe7ae7014c42f73897cf8e9ef14ff3aa533450e51b1e72ec5239ad"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-win32.whl", hash = "sha256:6d5472f63a31b042aadf5ed28dd3ef0523da49ac17f0463e10fda9c4a2773352"},
- {file = "ruamel.yaml.clib-0.2.14-cp39-cp39-win_amd64.whl", hash = "sha256:8dd3c2cc49caa7a8d64b67146462aed6723a0495e44bf0aa0a2e94beaa8432f6"},
- {file = "ruamel.yaml.clib-0.2.14.tar.gz", hash = "sha256:803f5044b13602d58ea378576dd75aa759f52116a0232608e8fdada4da33752e"},
+markers = "platform_python_implementation == \"CPython\" and python_version < \"3.14\""
+files = [
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:88eea8baf72f0ccf232c22124d122a7f26e8a24110a0273d9bcddcb0f7e1fa03"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9b6f7d74d094d1f3a4e157278da97752f16ee230080ae331fcc219056ca54f77"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4be366220090d7c3424ac2b71c90d1044ea34fca8c0b88f250064fd06087e614"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1f66f600833af58bea694d5892453f2270695b92200280ee8c625ec5a477eed3"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da3d6adadcf55a93c214d23941aef4abfd45652110aed6580e814152f385b862"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e9fde97ecb7bb9c41261c2ce0da10323e9227555c674989f8d9eb7572fc2098d"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:05c70f7f86be6f7bee53794d80050a28ae7e13e4a0087c1839dcdefd68eb36b6"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f1d38cbe622039d111b69e9ca945e7e3efebb30ba998867908773183357f3ed"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win32.whl", hash = "sha256:fe239bdfdae2302e93bd6e8264bd9b71290218fff7084a9db250b55caaccf43f"},
+ {file = "ruamel_yaml_clib-0.2.15-cp310-cp310-win_amd64.whl", hash = "sha256:468858e5cbde0198337e6a2a78eda8c3fb148bdf4c6498eaf4bc9ba3f8e780bd"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c583229f336682b7212a43d2fa32c30e643d3076178fb9f7a6a14dde85a2d8bd"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56ea19c157ed8c74b6be51b5fa1c3aff6e289a041575f0556f66e5fb848bb137"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5fea0932358e18293407feb921d4f4457db837b67ec1837f87074667449f9401"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef71831bd61fbdb7aa0399d5c4da06bea37107ab5c79ff884cc07f2450910262"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:617d35dc765715fa86f8c3ccdae1e4229055832c452d4ec20856136acc75053f"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b45498cc81a4724a2d42273d6cfc243c0547ad7c6b87b4f774cb7bcc131c98d"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:def5663361f6771b18646620fca12968aae730132e104688766cf8a3b1d65922"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:014181cdec565c8745b7cbc4de3bf2cc8ced05183d986e6d1200168e5bb59490"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win32.whl", hash = "sha256:d290eda8f6ada19e1771b54e5706b8f9807e6bb08e873900d5ba114ced13e02c"},
+ {file = "ruamel_yaml_clib-0.2.15-cp311-cp311-win_amd64.whl", hash = "sha256:bdc06ad71173b915167702f55d0f3f027fc61abd975bd308a0968c02db4a4c3e"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cb15a2e2a90c8475df45c0949793af1ff413acfb0a716b8b94e488ea95ce7cff"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:64da03cbe93c1e91af133f5bec37fd24d0d4ba2418eaf970d7166b0a26a148a2"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f6d3655e95a80325b84c4e14c080b2470fe4f33b6846f288379ce36154993fb1"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71845d377c7a47afc6592aacfea738cc8a7e876d586dfba814501d8c53c1ba60"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11e5499db1ccbc7f4b41f0565e4f799d863ea720e01d3e99fa0b7b5fcd7802c9"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b293a37dc97e2b1e8a1aec62792d1e52027087c8eea4fc7b5abd2bdafdd6642"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:512571ad41bba04eac7268fe33f7f4742210ca26a81fe0c75357fa682636c690"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e5e9f630c73a490b758bf14d859a39f375e6999aea5ddd2e2e9da89b9953486a"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win32.whl", hash = "sha256:f4421ab780c37210a07d138e56dd4b51f8642187cdfb433eb687fe8c11de0144"},
+ {file = "ruamel_yaml_clib-0.2.15-cp312-cp312-win_amd64.whl", hash = "sha256:2b216904750889133d9222b7b873c199d48ecbb12912aca78970f84a5aa1a4bc"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4dcec721fddbb62e60c2801ba08c87010bd6b700054a09998c4d09c08147b8fb"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:65f48245279f9bb301d1276f9679b82e4c080a1ae25e679f682ac62446fac471"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:46895c17ead5e22bea5e576f1db7e41cb273e8d062c04a6a49013d9f60996c25"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3eb199178b08956e5be6288ee0b05b2fb0b5c1f309725ad25d9c6ea7e27f962a"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4d1032919280ebc04a80e4fb1e93f7a738129857eaec9448310e638c8bccefcf"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ab0df0648d86a7ecbd9c632e8f8d6b21bb21b5fc9d9e095c796cacf32a728d2d"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:331fb180858dd8534f0e61aa243b944f25e73a4dae9962bd44c46d1761126bbf"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fd4c928ddf6bce586285daa6d90680b9c291cfd045fc40aad34e445d57b1bf51"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win32.whl", hash = "sha256:bf0846d629e160223805db9fe8cc7aec16aaa11a07310c50c8c7164efa440aec"},
+ {file = "ruamel_yaml_clib-0.2.15-cp313-cp313-win_amd64.whl", hash = "sha256:45702dfbea1420ba3450bb3dd9a80b33f0badd57539c6aac09f42584303e0db6"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:753faf20b3a5906faf1fc50e4ddb8c074cb9b251e00b14c18b28492f933ac8ef"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:480894aee0b29752560a9de46c0e5f84a82602f2bc5c6cde8db9a345319acfdf"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:4d3b58ab2454b4747442ac76fab66739c72b1e2bb9bd173d7694b9f9dbc9c000"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bfd309b316228acecfa30670c3887dcedf9b7a44ea39e2101e75d2654522acd4"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2812ff359ec1f30129b62372e5f22a52936fac13d5d21e70373dbca5d64bb97c"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7e74ea87307303ba91073b63e67f2c667e93f05a8c63079ee5b7a5c8d0d7b043"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:713cd68af9dfbe0bb588e144a61aad8dcc00ef92a82d2e87183ca662d242f524"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:542d77b72786a35563f97069b9379ce762944e67055bea293480f7734b2c7e5e"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win32.whl", hash = "sha256:424ead8cef3939d690c4b5c85ef5b52155a231ff8b252961b6516ed7cf05f6aa"},
+ {file = "ruamel_yaml_clib-0.2.15-cp314-cp314-win_amd64.whl", hash = "sha256:ac9b8d5fa4bb7fd2917ab5027f60d4234345fd366fe39aa711d5dca090aa1467"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:923816815974425fbb1f1bf57e85eca6e14d8adc313c66db21c094927ad01815"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:dcc7f3162d3711fd5d52e2267e44636e3e566d1e5675a5f0b30e98f2c4af7974"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:5d3c9210219cbc0f22706f19b154c9a798ff65a6beeafbf77fc9c057ec806f7d"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bb7b728fd9f405aa00b4a0b17ba3f3b810d0ccc5f77f7373162e9b5f0ff75d5"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3cb75a3c14f1d6c3c2a94631e362802f70e83e20d1f2b2ef3026c05b415c4900"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:badd1d7283f3e5894779a6ea8944cc765138b96804496c91812b2829f70e18a7"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ba6604bbc3dfcef844631932d06a1a4dcac3fee904efccf582261948431628a"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a8220fd4c6f98485e97aea65e1df76d4fed1678ede1fe1d0eed2957230d287c4"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win32.whl", hash = "sha256:04d21dc9c57d9608225da28285900762befbb0165ae48482c15d8d4989d4af14"},
+ {file = "ruamel_yaml_clib-0.2.15-cp39-cp39-win_amd64.whl", hash = "sha256:27dc656e84396e6d687f97c6e65fb284d100483628f02d95464fd731743a4afe"},
+ {file = "ruamel_yaml_clib-0.2.15.tar.gz", hash = "sha256:46e4cc8c43ef6a94885f72512094e482114a8a706d3c555a34ed4b0d20200600"},
]
[[package]]
name = "ruff"
-version = "0.14.3"
+version = "0.14.6"
description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false
python-versions = ">=3.7"
groups = ["lint-and-format"]
files = [
- {file = "ruff-0.14.3-py3-none-linux_armv6l.whl", hash = "sha256:876b21e6c824f519446715c1342b8e60f97f93264012de9d8d10314f8a79c371"},
- {file = "ruff-0.14.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:b6fd8c79b457bedd2abf2702b9b472147cd860ed7855c73a5247fa55c9117654"},
- {file = "ruff-0.14.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:71ff6edca490c308f083156938c0c1a66907151263c4abdcb588602c6e696a14"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:786ee3ce6139772ff9272aaf43296d975c0217ee1b97538a98171bf0d21f87ed"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cd6291d0061811c52b8e392f946889916757610d45d004e41140d81fb6cd5ddc"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a497ec0c3d2c88561b6d90f9c29f5ae68221ac00d471f306fa21fa4264ce5fcd"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e231e1be58fc568950a04fbe6887c8e4b85310e7889727e2b81db205c45059eb"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:469e35872a09c0e45fecf48dd960bfbce056b5db2d5e6b50eca329b4f853ae20"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d6bc90307c469cb9d28b7cfad90aaa600b10d67c6e22026869f585e1e8a2db0"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2f8a0bbcffcfd895df39c9a4ecd59bb80dca03dc43f7fb63e647ed176b741e"},
- {file = "ruff-0.14.3-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:678fdd7c7d2d94851597c23ee6336d25f9930b460b55f8598e011b57c74fd8c5"},
- {file = "ruff-0.14.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1ec1ac071e7e37e0221d2f2dbaf90897a988c531a8592a6a5959f0603a1ecf5e"},
- {file = "ruff-0.14.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:afcdc4b5335ef440d19e7df9e8ae2ad9f749352190e96d481dc501b753f0733e"},
- {file = "ruff-0.14.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:7bfc42f81862749a7136267a343990f865e71fe2f99cf8d2958f684d23ce3dfa"},
- {file = "ruff-0.14.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:a65e448cfd7e9c59fae8cf37f9221585d3354febaad9a07f29158af1528e165f"},
- {file = "ruff-0.14.3-py3-none-win32.whl", hash = "sha256:f3d91857d023ba93e14ed2d462ab62c3428f9bbf2b4fbac50a03ca66d31991f7"},
- {file = "ruff-0.14.3-py3-none-win_amd64.whl", hash = "sha256:d7b7006ac0756306db212fd37116cce2bd307e1e109375e1c6c106002df0ae5f"},
- {file = "ruff-0.14.3-py3-none-win_arm64.whl", hash = "sha256:26eb477ede6d399d898791d01961e16b86f02bc2486d0d1a7a9bb2379d055dc1"},
- {file = "ruff-0.14.3.tar.gz", hash = "sha256:4ff876d2ab2b161b6de0aa1f5bd714e8e9b4033dc122ee006925fbacc4f62153"},
+ {file = "ruff-0.14.6-py3-none-linux_armv6l.whl", hash = "sha256:d724ac2f1c240dbd01a2ae98db5d1d9a5e1d9e96eba999d1c48e30062df578a3"},
+ {file = "ruff-0.14.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:9f7539ea257aa4d07b7ce87aed580e485c40143f2473ff2f2b75aee003186004"},
+ {file = "ruff-0.14.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7f6007e55b90a2a7e93083ba48a9f23c3158c433591c33ee2e99a49b889c6332"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a8e7b9d73d8728b68f632aa8e824ef041d068d231d8dbc7808532d3629a6bef"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d50d45d4553a3ebcbd33e7c5e0fe6ca4aafd9a9122492de357205c2c48f00775"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:118548dd121f8a21bfa8ab2c5b80e5b4aed67ead4b7567790962554f38e598ce"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:57256efafbfefcb8748df9d1d766062f62b20150691021f8ab79e2d919f7c11f"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff18134841e5c68f8e5df1999a64429a02d5549036b394fafbe410f886e1989d"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:29c4b7ec1e66a105d5c27bd57fa93203637d66a26d10ca9809dc7fc18ec58440"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:167843a6f78680746d7e226f255d920aeed5e4ad9c03258094a2d49d3028b105"},
+ {file = "ruff-0.14.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:16a33af621c9c523b1ae006b1b99b159bf5ac7e4b1f20b85b2572455018e0821"},
+ {file = "ruff-0.14.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1432ab6e1ae2dc565a7eea707d3b03a0c234ef401482a6f1621bc1f427c2ff55"},
+ {file = "ruff-0.14.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:4c55cfbbe7abb61eb914bfd20683d14cdfb38a6d56c6c66efa55ec6570ee4e71"},
+ {file = "ruff-0.14.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:efea3c0f21901a685fff4befda6d61a1bf4cb43de16da87e8226a281d614350b"},
+ {file = "ruff-0.14.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:344d97172576d75dc6afc0e9243376dbe1668559c72de1864439c4fc95f78185"},
+ {file = "ruff-0.14.6-py3-none-win32.whl", hash = "sha256:00169c0c8b85396516fdd9ce3446c7ca20c2a8f90a77aa945ba6b8f2bfe99e85"},
+ {file = "ruff-0.14.6-py3-none-win_amd64.whl", hash = "sha256:390e6480c5e3659f8a4c8d6a0373027820419ac14fa0d2713bd8e6c3e125b8b9"},
+ {file = "ruff-0.14.6-py3-none-win_arm64.whl", hash = "sha256:d43c81fbeae52cfa8728d8766bbf46ee4298c888072105815b392da70ca836b2"},
+ {file = "ruff-0.14.6.tar.gz", hash = "sha256:6f0c742ca6a7783a736b867a263b9a7a80a45ce9bee391eeda296895f1b4e1cc"},
]
[[package]]
@@ -2171,14 +2118,14 @@ files = [
[[package]]
name = "starlette"
-version = "0.49.3"
+version = "0.50.0"
description = "The little ASGI library that shines."
optional = false
-python-versions = ">=3.9"
+python-versions = ">=3.10"
groups = ["dev"]
files = [
- {file = "starlette-0.49.3-py3-none-any.whl", hash = "sha256:b579b99715fdc2980cf88c8ec96d3bf1ce16f5a8051a7c2b84ef9b1cdecaea2f"},
- {file = "starlette-0.49.3.tar.gz", hash = "sha256:1c14546f299b5901a1ea0e34410575bc33bbd741377a10484a54445588d00284"},
+ {file = "starlette-0.50.0-py3-none-any.whl", hash = "sha256:9e5391843ec9b6e472eed1365a78c8098cfceb7a74bfd4d6b1c0c0095efb3bca"},
+ {file = "starlette-0.50.0.tar.gz", hash = "sha256:a2a17b22203254bcbc2e1f926d2d55f3f9497f769416b3190768befe598fa3ca"},
]
[package.dependencies]
@@ -2194,8 +2141,8 @@ version = "2.2.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
-groups = ["main", "dev", "lint-and-format", "type-checking"]
-markers = "python_version == \"3.10\""
+groups = ["dev", "lint-and-format", "type-checking"]
+markers = "python_version < \"3.11\""
files = [
{file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
{file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
@@ -2399,4 +2346,4 @@ watchdog = ["watchdog (>=2.3)"]
[metadata]
lock-version = "2.1"
python-versions = ">=3.10, <4"
-content-hash = "b7ea95a1a29ef5ce4aec7eb44e253601508ae6b67b5d1091db3e5dc0d4252e8c"
+content-hash = "1007af3ad0f6fd0278abe498547d74e90b5870a522754706c50f85673d11f88b"
diff --git a/pyproject.toml b/pyproject.toml
index 121ba48..0b3c0fe 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[project]
name="robotframework-openapitools"
-version = "1.0.5"
+version = "2.0.0b1"
description = "A set of Robot Framework libraries to test APIs for which the OAS is available."
authors = [
{name = "Robin Mackaij", email = "r.a.mackaij@gmail.com"},
@@ -28,12 +28,11 @@ dependencies = [
"robotframework >= 6.0.0, !=7.0.0",
"robotframework-datadriver >= 1.10.0",
"requests >= 2.31.0",
- "prance[CLI] >= 23",
- "Faker >= 23.1.0",
+ "prance[CLI] >= 25",
+ "Faker >= 38.0.0",
"rstr >= 3.2.0",
"openapi-core >= 0.19.0",
"rich_click >= 1.7.0",
- "black >= 24.1.0",
"Jinja2 >= 3.1.2",
"pydantic >= 2.11.0",
]
@@ -42,23 +41,23 @@ dependencies = [
dev = [
"invoke >= 2.2.0",
"robotframework-stacktrace >= 0.4.0",
- "uvicorn >= 0.27.0",
- "fastapi >= 0.109.0",
+ "uvicorn >= 0.38.0",
+ "fastapi >= 0.122.0",
"coverage[toml] >= 7.2.0",
- "robotcode-runner >= 1.0.3",
+ "robotcode-runner >= 2.0.0",
"genbadge[coverage] >= 1.1.2",
]
type-checking = [
"mypy >= 1.14.1",
"types-requests >= 2.31.0",
"types-invoke >= 2.0.0.0",
- "pyright >= 1.1.350",
- "robotcode-analyze >= 1.0.3",
+ "pyright >= 1.1.400",
+ "robotcode-analyze >= 2.0.0",
]
lint-and-format = [
- "ruff >= 0.9.0",
- "pylint >= 3.3.3",
- "robotframework-robocop >= 5.7.0",
+ "ruff >= 0.14.0",
+ "pylint >= 4.0.0",
+ "robotframework-robocop >= 6.0.0",
]
[project.urls]
@@ -108,7 +107,7 @@ build-backend = "poetry.core.masonry.api"
branch = true
parallel = true
source = ["src/OpenApiDriver", "src/OpenApiLibCore", "src/openapi_libgen"]
-omit = ["src/openapi_libgen/command_line.py"]
+omit = ["src/openapi_libgen/command_line.py", "src/OpenApiLibCore/protocols.py"]
[tool.coverage.report]
exclude_lines = [
@@ -126,6 +125,7 @@ disallow_untyped_defs = true
strict = true
show_error_codes = true
exclude = []
+follow_untyped_imports = true
[[tool.mypy.overrides]]
module = [
diff --git a/src/OpenApiDriver/__init__.py b/src/OpenApiDriver/__init__.py
index 7557256..be0c6b7 100644
--- a/src/OpenApiDriver/__init__.py
+++ b/src/OpenApiDriver/__init__.py
@@ -13,8 +13,12 @@
from importlib.metadata import version
from OpenApiDriver.openapidriver import OpenApiDriver
-from OpenApiLibCore.dto_base import (
+from OpenApiLibCore.data_constraints.dto_base import (
Dto,
+)
+from OpenApiLibCore.keyword_logic.validation import ValidationLevel
+from OpenApiLibCore.models import IGNORE
+from OpenApiLibCore.models.resource_relations import (
IdDependency,
IdReference,
PathPropertiesConstraint,
@@ -22,8 +26,6 @@
ResourceRelation,
UniquePropertyValueConstraint,
)
-from OpenApiLibCore.validation import ValidationLevel
-from OpenApiLibCore.value_utils import IGNORE
try:
__version__ = version("robotframework-openapidriver")
diff --git a/src/OpenApiDriver/openapi_executors.py b/src/OpenApiDriver/openapi_executors.py
index 3daa00d..b536d6e 100644
--- a/src/OpenApiDriver/openapi_executors.py
+++ b/src/OpenApiDriver/openapi_executors.py
@@ -6,6 +6,7 @@
from pathlib import Path
from random import choice
from types import MappingProxyType
+from typing import Literal, overload
from requests import Response
from requests.auth import AuthBase
@@ -25,6 +26,7 @@
ValidationLevel,
)
from OpenApiLibCore.annotations import JSON
+from OpenApiLibCore.models.oas_models import ObjectSchema
run_keyword = BuiltIn().run_keyword
default_str_mapping: Mapping[str, str] = MappingProxyType({})
@@ -38,6 +40,40 @@
]
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_valid_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["authorized_request"], *args: object
+) -> Response: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_invalid_body_data"], *args: object
+) -> dict[str, JSON] | list[JSON]: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_invalidated_parameters"], *args: object
+) -> tuple[dict[str, JSON], dict[str, str]]: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
@library(scope="SUITE", doc_format="ROBOT")
class OpenApiExecutors(OpenApiLibCore):
"""Main class providing the keywords and core logic to perform endpoint validations."""
@@ -50,7 +86,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
response_validation: ValidationLevel = ValidationLevel.WARN,
disable_server_validation: bool = True,
mappings_path: str | Path = "",
- invalid_property_default_response: int = 422,
+ invalid_data_default_response: int = 422,
default_id_property_name: str = "id",
faker_locale: str | list[str] = "",
require_body_for_invalid_url: bool = False,
@@ -74,7 +110,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
disable_server_validation=disable_server_validation,
mappings_path=mappings_path,
default_id_property_name=default_id_property_name,
- invalid_property_default_response=invalid_property_default_response,
+ invalid_data_default_response=invalid_data_default_response,
faker_locale=faker_locale,
require_body_for_invalid_url=require_body_for_invalid_url,
recursion_limit=recursion_limit,
@@ -102,7 +138,7 @@ def test_unauthorized(self, path: str, method: str) -> None:
> Note: No headers or (json) body are send with the request. For security
reasons, the authorization validation should be checked first.
"""
- url: str = run_keyword("get_valid_url", path)
+ url = _run_keyword("get_valid_url", path)
response = self.session.request(
method=method,
url=url,
@@ -123,8 +159,8 @@ def test_forbidden(self, path: str, method: str) -> None:
> Note: No headers or (json) body are send with the request. For security
reasons, the access rights validation should be checked first.
"""
- url: str = run_keyword("get_valid_url", path)
- response: Response = run_keyword("authorized_request", url, method)
+ url = _run_keyword("get_valid_url", path)
+ response = _run_keyword("authorized_request", url, method)
if response.status_code != int(HTTPStatus.FORBIDDEN):
raise AssertionError(f"Response {response.status_code} was not 403.")
@@ -148,12 +184,10 @@ def test_invalid_url(
parameters are send with the request. The `require_body_for_invalid_url`
parameter can be set to `True` if needed.
"""
- valid_url: str = run_keyword("get_valid_url", path)
+ valid_url = _run_keyword("get_valid_url", path)
try:
- url = run_keyword(
- "get_invalidated_url", valid_url, path, expected_status_code
- )
+ url = run_keyword("get_invalidated_url", valid_url, expected_status_code)
except Exception as exception:
message = getattr(exception, "message", "")
if not message.startswith("ValueError"):
@@ -166,12 +200,11 @@ def test_invalid_url(
params, headers, json_data = None, None, None
if self.require_body_for_invalid_url:
- request_data: RequestData = run_keyword("get_request_data", path, method)
+ request_data = _run_keyword("get_request_data", path, method)
params = request_data.params
headers = request_data.headers
- dto = request_data.dto
- json_data = dto.as_dict()
- response: Response = run_keyword(
+ json_data = request_data.valid_data
+ response = _run_keyword(
"authorized_request", url, method, params, headers, json_data
)
if response.status_code != expected_status_code:
@@ -191,68 +224,63 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None:
The keyword calls other keywords to generate the neccesary data to perform
the desired operation and validate the response against the openapi document.
"""
- json_data: dict[str, JSON] = {}
original_data = {}
- url: str = run_keyword("get_valid_url", path)
- request_data: RequestData = run_keyword("get_request_data", path, method)
+ url = _run_keyword("get_valid_url", path)
+ request_data = _run_keyword("get_request_data", path, method)
params = request_data.params
headers = request_data.headers
- if request_data.has_body:
- json_data = request_data.dto.as_dict()
+ json_data = request_data.valid_data
# when patching, get the original data to check only patched data has changed
if method == "PATCH":
original_data = self.get_original_data(url=url)
# in case of a status code indicating an error, ensure the error occurs
if status_code >= int(HTTPStatus.BAD_REQUEST):
- invalidation_keyword_data = {
- "get_invalid_body_data": [
- "get_invalid_body_data",
- url,
- method,
- status_code,
- request_data,
- ],
- "get_invalidated_parameters": [
- "get_invalidated_parameters",
- status_code,
- request_data,
- ],
- }
- invalidation_keywords = []
-
- if request_data.dto.get_body_relations_for_error_code(status_code):
+ invalidation_keywords: list[str] = []
+
+ if request_data.constraint_mapping.get_body_relations_for_error_code(
+ status_code
+ ):
invalidation_keywords.append("get_invalid_body_data")
- if request_data.dto.get_parameter_relations_for_error_code(status_code):
+ if request_data.constraint_mapping.get_parameter_relations_for_error_code(
+ status_code
+ ):
invalidation_keywords.append("get_invalidated_parameters")
if invalidation_keywords:
- if (
- invalidation_keyword := choice(invalidation_keywords)
- ) == "get_invalid_body_data":
- json_data = run_keyword(
- *invalidation_keyword_data[invalidation_keyword]
+ invalidation_keyword = choice(invalidation_keywords)
+ if invalidation_keyword == "get_invalid_body_data":
+ json_data = _run_keyword(
+ "get_invalid_body_data",
+ url,
+ method,
+ status_code,
+ request_data,
)
else:
- params, headers = run_keyword(
- *invalidation_keyword_data[invalidation_keyword]
+ params, headers = _run_keyword(
+ "get_invalidated_parameters", status_code, request_data
)
# if there are no relations to invalide and the status_code is the default
# response_code for invalid properties, invalidate properties instead
- elif status_code == self.invalid_property_default_response:
+ elif status_code == self.invalid_data_default_response:
if (
request_data.params_that_can_be_invalidated
or request_data.headers_that_can_be_invalidated
):
- params, headers = run_keyword(
- *invalidation_keyword_data["get_invalidated_parameters"]
+ params, headers = _run_keyword(
+ "get_invalidated_parameters", status_code, request_data
)
if request_data.body_schema:
- json_data = run_keyword(
- *invalidation_keyword_data["get_invalid_body_data"]
+ json_data = _run_keyword(
+ "get_invalid_body_data",
+ url,
+ method,
+ status_code,
+ request_data,
)
elif request_data.body_schema:
- json_data = run_keyword(
- *invalidation_keyword_data["get_invalid_body_data"]
+ json_data = _run_keyword(
+ "get_invalid_body_data", url, method, status_code, request_data
)
else:
raise SkipExecution(
@@ -260,7 +288,7 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None:
)
else:
raise AssertionError(
- f"No Dto mapping found to cause status_code {status_code}."
+ f"No constraint mapping found to cause status_code {status_code}."
)
run_keyword(
"perform_validated_request",
@@ -281,13 +309,16 @@ def test_endpoint(self, path: str, method: str, status_code: int) -> None:
or request_data.has_optional_headers
):
logger.info("Performing request without optional properties and parameters")
- url = run_keyword("get_valid_url", path)
- request_data = run_keyword("get_request_data", path, method)
+ url = _run_keyword("get_valid_url", path)
+ request_data = _run_keyword("get_request_data", path, method)
params = request_data.get_required_params()
headers = request_data.get_required_headers()
- json_data = (
- request_data.get_minimal_body_dict() if request_data.has_body else {}
- )
+ if isinstance(request_data.body_schema, ObjectSchema):
+ json_data = (
+ request_data.get_minimal_body_dict()
+ if request_data.has_body
+ else {}
+ )
original_data = {}
if method == "PATCH":
original_data = self.get_original_data(url=url)
@@ -313,10 +344,10 @@ def get_original_data(self, url: str) -> dict[str, JSON]:
"""
original_data = {}
path = self.get_parameterized_path_from_url(url)
- get_request_data: RequestData = run_keyword("get_request_data", path, "GET")
+ get_request_data = _run_keyword("get_request_data", path, "GET")
get_params = get_request_data.params
get_headers = get_request_data.headers
- response: Response = run_keyword(
+ response = _run_keyword(
"authorized_request", url, "GET", get_params, get_headers
)
if response.ok:
@@ -327,5 +358,5 @@ def get_original_data(self, url: str) -> dict[str, JSON]:
def get_keyword_names() -> list[str]:
"""Curated keywords for libdoc and libspec."""
if getenv("HIDE_INHERITED_KEYWORDS") == "true":
- return KEYWORD_NAMES
+ return KEYWORD_NAMES # pragma: no cover
return KEYWORD_NAMES + LIBCORE_KEYWORD_NAMES
diff --git a/src/OpenApiDriver/openapi_reader.py b/src/OpenApiDriver/openapi_reader.py
index 90be78d..44d8afe 100644
--- a/src/OpenApiDriver/openapi_reader.py
+++ b/src/OpenApiDriver/openapi_reader.py
@@ -5,7 +5,7 @@
from DataDriver.AbstractReaderClass import AbstractReaderClass
from DataDriver.ReaderConfig import TestCaseData
-from OpenApiLibCore.models import PathItemObject
+from OpenApiLibCore.models.oas_models import PathItemObject
class Test:
@@ -45,7 +45,7 @@ def get_data_from_source(self) -> list[TestCaseData]:
ignored_tests = [Test(*test) for test in getattr(self, "ignored_testcases", [])]
for path, path_item in paths.items():
- path_operations = path_item.get_operations()
+ path_operations = path_item.operations
# by reseversing the items, post/put operations come before get and delete
for method, operation_data in reversed(path_operations.items()):
diff --git a/src/OpenApiDriver/openapidriver.py b/src/OpenApiDriver/openapidriver.py
index 5f3026d..ded7560 100644
--- a/src/OpenApiDriver/openapidriver.py
+++ b/src/OpenApiDriver/openapidriver.py
@@ -34,7 +34,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
response_validation: ValidationLevel = ValidationLevel.WARN,
disable_server_validation: bool = True,
mappings_path: str | Path = "",
- invalid_property_default_response: int = 422,
+ invalid_data_default_response: int = 422,
default_id_property_name: str = "id",
faker_locale: str | list[str] = "",
require_body_for_invalid_url: bool = False,
@@ -64,7 +64,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
response_validation=response_validation,
disable_server_validation=disable_server_validation,
mappings_path=mappings_path,
- invalid_property_default_response=invalid_property_default_response,
+ invalid_data_default_response=invalid_data_default_response,
default_id_property_name=default_id_property_name,
faker_locale=faker_locale,
require_body_for_invalid_url=require_body_for_invalid_url,
@@ -84,7 +84,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
read_paths_method = self.read_paths
DataDriver.__init__(
self,
- reader_class=OpenApiReader,
+ reader_class=OpenApiReader, # type: ignore[arg-type]
read_paths_method=read_paths_method,
included_paths=included_paths,
ignored_paths=ignored_paths,
diff --git a/src/OpenApiLibCore/__init__.py b/src/OpenApiLibCore/__init__.py
index b4d373e..928debb 100644
--- a/src/OpenApiLibCore/__init__.py
+++ b/src/OpenApiLibCore/__init__.py
@@ -13,8 +13,11 @@
from importlib.metadata import version
-from OpenApiLibCore.dto_base import (
- Dto,
+from OpenApiLibCore.data_constraints.dto_base import Dto
+from OpenApiLibCore.keyword_logic.validation import ValidationLevel
+from OpenApiLibCore.models import IGNORE, UNSET
+from OpenApiLibCore.models.request_data import RequestData, RequestValues
+from OpenApiLibCore.models.resource_relations import (
IdDependency,
IdReference,
PathPropertiesConstraint,
@@ -22,13 +25,7 @@
ResourceRelation,
UniquePropertyValueConstraint,
)
-from OpenApiLibCore.dto_utils import DefaultDto
-from OpenApiLibCore.openapi_libcore import (
- OpenApiLibCore,
-)
-from OpenApiLibCore.request_data import RequestData, RequestValues
-from OpenApiLibCore.validation import ValidationLevel
-from OpenApiLibCore.value_utils import IGNORE, UNSET
+from OpenApiLibCore.openapi_libcore import OpenApiLibCore
try:
__version__ = version("robotframework-openapi-libcore")
@@ -65,7 +62,6 @@
__all__ = [
"IGNORE",
"UNSET",
- "DefaultDto",
"Dto",
"IdDependency",
"IdReference",
diff --git a/src/OpenApiLibCore/annotations.py b/src/OpenApiLibCore/annotations.py
index fa70867..2cb6a5e 100644
--- a/src/OpenApiLibCore/annotations.py
+++ b/src/OpenApiLibCore/annotations.py
@@ -6,5 +6,5 @@
JSON = TypeAliasType(
"JSON",
- "Union[dict[str, JSON], list[JSON], str, bytes, int, float, bool, None]",
+ "Union[dict[str, JSON], list[JSON], str, int, float, bool, None]",
)
diff --git a/src/OpenApiLibCore/data_constraints/__init__.py b/src/OpenApiLibCore/data_constraints/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/OpenApiLibCore/data_constraints/dto_base.py b/src/OpenApiLibCore/data_constraints/dto_base.py
new file mode 100644
index 0000000..5cec1c2
--- /dev/null
+++ b/src/OpenApiLibCore/data_constraints/dto_base.py
@@ -0,0 +1,142 @@
+"""
+Module holding the (base) classes that can be used by the user of the OpenApiLibCore
+to implement custom mappings for dependencies between resources in the API under
+test and constraints / restrictions on properties of the resources.
+"""
+
+from abc import ABC
+from dataclasses import dataclass
+from importlib import import_module
+from typing import Callable
+
+from robot.api import logger
+
+from OpenApiLibCore.models.resource_relations import (
+ NOT_SET,
+ PathPropertiesConstraint,
+ ResourceRelation,
+)
+from OpenApiLibCore.protocols import (
+ ConstraintMappingType,
+ IGetIdPropertyName,
+)
+from OpenApiLibCore.utils.id_mapping import dummy_transformer
+
+
+@dataclass
+class Dto(ABC):
+ """Base class for the Dto class."""
+
+ @staticmethod
+ def get_path_relations() -> list[PathPropertiesConstraint]:
+ """Return the list of path-related Relations."""
+ return []
+
+ @staticmethod
+ def get_parameter_relations() -> list[ResourceRelation]:
+ """Return the list of Relations for the header and query parameters."""
+ return []
+
+ @classmethod
+ def get_parameter_relations_for_error_code(
+ cls, error_code: int
+ ) -> list[ResourceRelation]:
+ """Return the list of Relations associated with the given error_code."""
+ relations: list[ResourceRelation] = [
+ r
+ for r in cls.get_parameter_relations()
+ if r.error_code == error_code
+ or (
+ getattr(r, "invalid_value_error_code", None) == error_code
+ and getattr(r, "invalid_value", None) != NOT_SET
+ )
+ ]
+ return relations
+
+ @staticmethod
+ def get_relations() -> list[ResourceRelation]:
+ """Return the list of Relations for the (json) body."""
+ return []
+
+ @classmethod
+ def get_body_relations_for_error_code(
+ cls, error_code: int
+ ) -> list[ResourceRelation]:
+ """
+ Return the list of Relations associated with the given error_code that are
+ applicable to the body / payload of the request.
+ """
+ relations: list[ResourceRelation] = [
+ r
+ for r in cls.get_relations()
+ if r.error_code == error_code
+ or (
+ getattr(r, "invalid_value_error_code", None) == error_code
+ and getattr(r, "invalid_value", None) != NOT_SET
+ )
+ ]
+ return relations
+
+
+def get_constraint_mapping_dict(
+ mappings_module_name: str,
+) -> dict[tuple[str, str], ConstraintMappingType]:
+ try:
+ mappings_module = import_module(mappings_module_name)
+ return mappings_module.DTO_MAPPING # type: ignore[no-any-return]
+ except (ImportError, AttributeError, ValueError) as exception:
+ if mappings_module_name != "no mapping":
+ logger.error(f"DTO_MAPPING was not imported: {exception}")
+ return {}
+
+
+def get_path_mapping_dict(
+ mappings_module_name: str,
+) -> dict[str, ConstraintMappingType]:
+ try:
+ mappings_module = import_module(mappings_module_name)
+ return mappings_module.PATH_MAPPING # type: ignore[no-any-return]
+ except (ImportError, AttributeError, ValueError) as exception:
+ if mappings_module_name != "no mapping":
+ logger.error(f"PATH_MAPPING was not imported: {exception}")
+ return {}
+
+
+def get_id_property_name(
+ mappings_module_name: str, default_id_property_name: str
+) -> IGetIdPropertyName:
+ return GetIdPropertyName(
+ mappings_module_name=mappings_module_name,
+ default_id_property_name=default_id_property_name,
+ )
+
+
+class GetIdPropertyName:
+ """
+ Callable class to return the name of the property that uniquely identifies
+ the resource from user-implemented mappings file.
+ """
+
+ def __init__(
+ self, mappings_module_name: str, default_id_property_name: str
+ ) -> None:
+ self.default_id_property_name = default_id_property_name
+ try:
+ mappings_module = import_module(mappings_module_name)
+ self.id_mapping: dict[
+ str,
+ str | tuple[str, Callable[[str], str]],
+ ] = mappings_module.ID_MAPPING
+ except (ImportError, AttributeError, ValueError) as exception:
+ if mappings_module_name != "no mapping":
+ logger.error(f"ID_MAPPING was not imported: {exception}")
+ self.id_mapping = {}
+
+ def __call__(self, path: str) -> tuple[str, Callable[[str], str]]:
+ try:
+ value_or_mapping = self.id_mapping[path]
+ if isinstance(value_or_mapping, str):
+ return (value_or_mapping, dummy_transformer)
+ return value_or_mapping
+ except KeyError:
+ return (self.default_id_property_name, dummy_transformer)
diff --git a/src/OpenApiLibCore/data_generation/__init__.py b/src/OpenApiLibCore/data_generation/__init__.py
index be5f703..c41511c 100644
--- a/src/OpenApiLibCore/data_generation/__init__.py
+++ b/src/OpenApiLibCore/data_generation/__init__.py
@@ -2,9 +2,3 @@
Module holding the functions related to data generation
for the requests made as part of keyword exection.
"""
-
-from .data_generation_core import get_request_data
-
-__all__ = [
- "get_request_data",
-]
diff --git a/src/OpenApiLibCore/data_generation/body_data_generation.py b/src/OpenApiLibCore/data_generation/body_data_generation.py
deleted file mode 100644
index 0ae0270..0000000
--- a/src/OpenApiLibCore/data_generation/body_data_generation.py
+++ /dev/null
@@ -1,250 +0,0 @@
-"""
-Module holding the functions related to (json) data generation
-for the body of requests made as part of keyword exection.
-"""
-
-from random import choice, randint, sample
-from typing import Any
-
-from robot.api import logger
-
-import OpenApiLibCore.path_functions as _path_functions
-from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.dto_base import (
- Dto,
- IdDependency,
- PropertyValueConstraint,
-)
-from OpenApiLibCore.dto_utils import DefaultDto
-from OpenApiLibCore.models import (
- ArraySchema,
- ObjectSchema,
- SchemaObjectTypes,
- UnionTypeSchema,
-)
-from OpenApiLibCore.parameter_utils import get_safe_name_for_oas_name
-from OpenApiLibCore.protocols import GetIdPropertyNameType
-from OpenApiLibCore.value_utils import IGNORE
-
-
-def get_json_data_for_dto_class(
- schema: SchemaObjectTypes,
- dto_class: type[Dto],
- get_id_property_name: GetIdPropertyNameType,
- operation_id: str | None = None,
-) -> JSON:
- if isinstance(schema, UnionTypeSchema):
- chosen_schema = choice(schema.resolved_schemas)
- return get_json_data_for_dto_class(
- schema=chosen_schema,
- dto_class=dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
-
- match schema:
- case ObjectSchema():
- return get_dict_data_for_dto_class(
- schema=schema,
- dto_class=dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
- case ArraySchema():
- return get_list_data_for_dto_class(
- schema=schema,
- dto_class=dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
- case _:
- return schema.get_valid_value()
-
-
-def get_dict_data_for_dto_class(
- schema: ObjectSchema,
- dto_class: type[Dto],
- get_id_property_name: GetIdPropertyNameType,
- operation_id: str | None = None,
-) -> dict[str, Any]:
- json_data: dict[str, Any] = {}
-
- property_names = get_property_names_to_process(schema=schema, dto_class=dto_class)
-
- for property_name in property_names:
- property_schema = schema.properties.root[property_name] # type: ignore[union-attr]
- if property_schema.readOnly:
- continue
-
- json_data[property_name] = get_data_for_property(
- property_name=property_name,
- property_schema=property_schema,
- get_id_property_name=get_id_property_name,
- dto_class=dto_class,
- operation_id=operation_id,
- )
-
- return json_data
-
-
-def get_list_data_for_dto_class(
- schema: ArraySchema,
- dto_class: type[Dto],
- get_id_property_name: GetIdPropertyNameType,
- operation_id: str | None = None,
-) -> list[JSON]:
- json_data: list[JSON] = []
- list_item_schema = schema.items
- min_items = schema.minItems if schema.minItems is not None else 0
- max_items = schema.maxItems if schema.maxItems is not None else 1
- number_of_items_to_generate = randint(min_items, max_items)
- for _ in range(number_of_items_to_generate):
- list_item_data = get_json_data_for_dto_class(
- schema=list_item_schema,
- dto_class=dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
- json_data.append(list_item_data)
- return json_data
-
-
-def get_data_for_property(
- property_name: str,
- property_schema: SchemaObjectTypes,
- get_id_property_name: GetIdPropertyNameType,
- dto_class: type[Dto],
- operation_id: str | None,
-) -> JSON:
- if constrained_values := get_constrained_values(
- dto_class=dto_class, property_name=property_name
- ):
- constrained_value = choice(constrained_values)
- # Check if the chosen value is a nested Dto; since a Dto is never
- # instantiated, we can use isinstance(..., type) for this.
- if isinstance(constrained_value, type):
- return get_value_constrained_by_nested_dto(
- property_schema=property_schema,
- nested_dto_class=constrained_value,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
- return constrained_value
-
- if (
- dependent_id := get_dependent_id(
- dto_class=dto_class,
- property_name=property_name,
- operation_id=operation_id,
- get_id_property_name=get_id_property_name,
- )
- ) is not None:
- return dependent_id
-
- return get_json_data_for_dto_class(
- schema=property_schema,
- dto_class=DefaultDto,
- get_id_property_name=get_id_property_name,
- )
-
-
-def get_value_constrained_by_nested_dto(
- property_schema: SchemaObjectTypes,
- nested_dto_class: type[Dto],
- get_id_property_name: GetIdPropertyNameType,
- operation_id: str | None,
-) -> JSON:
- nested_schema = get_schema_for_nested_dto(property_schema=property_schema)
- nested_value = get_json_data_for_dto_class(
- schema=nested_schema,
- dto_class=nested_dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_id,
- )
- return nested_value
-
-
-def get_schema_for_nested_dto(property_schema: SchemaObjectTypes) -> SchemaObjectTypes:
- if isinstance(property_schema, UnionTypeSchema):
- chosen_schema = choice(property_schema.resolved_schemas)
- return get_schema_for_nested_dto(chosen_schema)
-
- return property_schema
-
-
-def get_property_names_to_process(
- schema: ObjectSchema,
- dto_class: type[Dto],
-) -> list[str]:
- property_names = []
-
- for property_name in schema.properties.root: # type: ignore[union-attr]
- # register the oas_name
- _ = get_safe_name_for_oas_name(property_name)
- if constrained_values := get_constrained_values(
- dto_class=dto_class, property_name=property_name
- ):
- # do not add properties that are configured to be ignored
- if IGNORE in constrained_values: # type: ignore[comparison-overlap]
- continue
- property_names.append(property_name)
-
- max_properties = schema.maxProperties
- if max_properties and len(property_names) > max_properties:
- required_properties = schema.required
- number_of_optional_properties = max_properties - len(required_properties)
- optional_properties = [
- name for name in property_names if name not in required_properties
- ]
- selected_optional_properties = sample(
- optional_properties, number_of_optional_properties
- )
- property_names = required_properties + selected_optional_properties
-
- return property_names
-
-
-def get_constrained_values(
- dto_class: type[Dto], property_name: str
-) -> list[JSON | type[Dto]]:
- relations = dto_class.get_relations()
- values_list = [
- c.values
- for c in relations
- if (isinstance(c, PropertyValueConstraint) and c.property_name == property_name)
- ]
- # values should be empty or contain 1 list of allowed values
- return values_list.pop() if values_list else []
-
-
-def get_dependent_id(
- dto_class: type[Dto],
- property_name: str,
- operation_id: str | None,
- get_id_property_name: GetIdPropertyNameType,
-) -> str | int | float | None:
- relations = dto_class.get_relations()
- # multiple get paths are possible based on the operation being performed
- id_get_paths = [
- (d.get_path, d.operation_id)
- for d in relations
- if (isinstance(d, IdDependency) and d.property_name == property_name)
- ]
- if not id_get_paths:
- return None
- if len(id_get_paths) == 1:
- id_get_path, _ = id_get_paths.pop()
- else:
- try:
- [id_get_path] = [
- path for path, operation in id_get_paths if operation == operation_id
- ]
- # There could be multiple get_paths, but not one for the current operation
- except ValueError:
- return None
-
- valid_id = _path_functions.get_valid_id_for_path(
- path=id_get_path, get_id_property_name=get_id_property_name
- )
- logger.debug(f"get_dependent_id for {id_get_path} returned {valid_id}")
- return valid_id
diff --git a/src/OpenApiLibCore/data_generation/data_generation_core.py b/src/OpenApiLibCore/data_generation/data_generation_core.py
index 71f3ac6..b66ed87 100644
--- a/src/OpenApiLibCore/data_generation/data_generation_core.py
+++ b/src/OpenApiLibCore/data_generation/data_generation_core.py
@@ -10,68 +10,65 @@
from robot.api import logger
-import OpenApiLibCore.path_functions as _path_functions
+import OpenApiLibCore.keyword_logic.path_functions as _path_functions
from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.dto_base import (
- Dto,
- PropertyValueConstraint,
- ResourceRelation,
-)
-from OpenApiLibCore.dto_utils import DefaultDto
-from OpenApiLibCore.models import (
+from OpenApiLibCore.data_constraints.dto_base import Dto
+from OpenApiLibCore.models import IGNORE
+from OpenApiLibCore.models.oas_models import (
+ ArraySchema,
ObjectSchema,
OpenApiObject,
OperationObject,
ParameterObject,
+ ResolvedSchemaObjectTypes,
UnionTypeSchema,
)
-from OpenApiLibCore.parameter_utils import get_safe_name_for_oas_name
-from OpenApiLibCore.protocols import GetDtoClassType, GetIdPropertyNameType
-from OpenApiLibCore.request_data import RequestData
-from OpenApiLibCore.value_utils import IGNORE
-
-from .body_data_generation import (
- get_json_data_for_dto_class as _get_json_data_for_dto_class,
+from OpenApiLibCore.models.request_data import RequestData
+from OpenApiLibCore.models.resource_relations import (
+ PropertyValueConstraint,
+ ResourceRelation,
)
+from OpenApiLibCore.protocols import ConstraintMappingType
+from OpenApiLibCore.utils.parameter_utils import get_safe_name_for_oas_name
def get_request_data(
path: str,
method: str,
- get_dto_class: GetDtoClassType,
- get_id_property_name: GetIdPropertyNameType,
openapi_spec: OpenApiObject,
) -> RequestData:
method = method.lower()
- dto_cls_name = get_dto_cls_name(path=path, method=method)
+ mapping_cls_name = get_mapping_cls_name(path=path, method=method)
# The path can contain already resolved Ids that have to be matched
# against the parametrized paths in the paths section.
spec_path = _path_functions.get_parametrized_path(
path=path, openapi_spec=openapi_spec
)
- dto_class = get_dto_class(path=spec_path, method=method)
try:
path_item = openapi_spec.paths[spec_path]
operation_spec: OperationObject | None = getattr(path_item, method)
if operation_spec is None:
raise AttributeError
+ constraint_mapping = operation_spec.constraint_mapping
except AttributeError:
logger.info(
f"method '{method}' not supported on '{spec_path}, using empty spec."
)
operation_spec = OperationObject(operationId="")
+ constraint_mapping = None
parameters, params, headers = get_request_parameters(
- dto_class=dto_class, method_spec=operation_spec
+ constraint_mapping=constraint_mapping, method_spec=operation_spec
)
if operation_spec.requestBody is None:
- dto_instance = _get_dto_instance_for_empty_body(
- dto_class=dto_class,
- dto_cls_name=dto_cls_name,
+ constraint_mapping = _get_mapping_dataclass_for_empty_body(
+ constraint_mapping=constraint_mapping,
+ mapping_cls_name=mapping_cls_name,
method_spec=operation_spec,
)
return RequestData(
- dto=dto_instance,
+ valid_data=None,
+ constraint_mapping=constraint_mapping,
parameters=parameters,
params=params,
headers=headers,
@@ -85,92 +82,113 @@ def get_request_data(
f"No supported content schema found: {operation_spec.requestBody.content}"
)
- headers.update({"content-type": operation_spec.requestBody.mime_type})
-
- if isinstance(body_schema, UnionTypeSchema):
- resolved_schemas = body_schema.resolved_schemas
- body_schema = choice(resolved_schemas)
-
- if not isinstance(body_schema, ObjectSchema):
- raise ValueError(f"Selected schema is not an object schema: {body_schema}")
+ if operation_spec.requestBody.mime_type: # pragma: no branch
+ if "content-type" in headers: # pragma: no cover
+ key_value = "content-type"
+ else:
+ key_value = "Content-Type"
+ headers.update({key_value: operation_spec.requestBody.mime_type})
- dto_data = _get_json_data_for_dto_class(
- schema=body_schema,
- dto_class=dto_class,
- get_id_property_name=get_id_property_name,
- operation_id=operation_spec.operationId,
+ valid_data, schema_used_for_data_generation = body_schema.get_valid_value(
+ operation_id=operation_spec.operationId
)
- dto_instance = _get_dto_instance_from_dto_data(
- object_schema=body_schema,
- dto_class=dto_class,
- dto_data=dto_data,
+
+ constraint_mapping = _get_mapping_dataclass_from_valid_data(
+ schema=schema_used_for_data_generation,
+ constraint_mapping=constraint_mapping,
+ valid_data=valid_data,
method_spec=operation_spec,
- dto_cls_name=dto_cls_name,
+ mapping_cls_name=mapping_cls_name,
)
return RequestData(
- dto=dto_instance,
- body_schema=body_schema,
+ valid_data=valid_data,
+ constraint_mapping=constraint_mapping,
+ body_schema=schema_used_for_data_generation,
parameters=parameters,
params=params,
headers=headers,
)
-def _get_dto_instance_for_empty_body(
- dto_class: type[Dto],
- dto_cls_name: str,
+def _get_mapping_dataclass_for_empty_body(
+ constraint_mapping: ConstraintMappingType | None,
+ mapping_cls_name: str,
method_spec: OperationObject,
-) -> Dto:
- if dto_class == DefaultDto:
- dto_instance: Dto = DefaultDto()
- else:
- cls_name = method_spec.operationId if method_spec.operationId else dto_cls_name
- dto_class = make_dataclass(
- cls_name=cls_name,
- fields=[],
- bases=(dto_class,),
- )
- dto_instance = dto_class()
- return dto_instance
+) -> ConstraintMappingType:
+ cls_name = method_spec.operationId if method_spec.operationId else mapping_cls_name
+ base = constraint_mapping if constraint_mapping else Dto
+ mapping_class = make_dataclass(
+ cls_name=cls_name,
+ fields=[],
+ bases=(base,),
+ )
+ return mapping_class
-def _get_dto_instance_from_dto_data(
- object_schema: ObjectSchema,
- dto_class: type[Dto],
- dto_data: JSON,
+def _get_mapping_dataclass_from_valid_data(
+ schema: ResolvedSchemaObjectTypes,
+ constraint_mapping: ConstraintMappingType | None,
+ valid_data: JSON,
method_spec: OperationObject,
- dto_cls_name: str,
-) -> Dto:
- if not isinstance(dto_data, (dict, list)):
- return DefaultDto()
+ mapping_cls_name: str,
+) -> ConstraintMappingType:
+ if not isinstance(schema, (ObjectSchema, ArraySchema)):
+ return _get_mapping_dataclass_for_empty_body(
+ constraint_mapping=constraint_mapping,
+ mapping_cls_name=mapping_cls_name,
+ method_spec=method_spec,
+ )
- if isinstance(dto_data, list):
- raise NotImplementedError
+ if isinstance(schema, ArraySchema):
+ if not valid_data or not isinstance(valid_data, list):
+ return _get_mapping_dataclass_for_empty_body(
+ constraint_mapping=constraint_mapping,
+ mapping_cls_name=mapping_cls_name,
+ method_spec=method_spec,
+ )
+ first_item_data = valid_data[0]
+ item_object_schema = schema.items
+
+ if isinstance(item_object_schema, UnionTypeSchema):
+ resolved_schemas = item_object_schema.resolved_schemas
+ for resolved_schema in resolved_schemas:
+ matched_schema = resolved_schema
+ if isinstance(first_item_data, resolved_schema.python_type):
+ break
+ else:
+ matched_schema = item_object_schema
+
+ mapping_dataclass = _get_mapping_dataclass_from_valid_data(
+ schema=matched_schema,
+ constraint_mapping=constraint_mapping,
+ valid_data=first_item_data,
+ method_spec=method_spec,
+ mapping_cls_name=mapping_cls_name,
+ )
+ return mapping_dataclass
- fields = get_fields_from_dto_data(object_schema, dto_data)
- cls_name = method_spec.operationId if method_spec.operationId else dto_cls_name
- dto_class_ = make_dataclass(
+ assert isinstance(valid_data, dict), (
+ "Data consistency error: schema is of type ObjectSchema but valid_data is not a dict."
+ )
+ fields = get_dataclass_fields(object_schema=schema, valid_data=valid_data)
+ cls_name = method_spec.operationId if method_spec.operationId else mapping_cls_name
+ base = constraint_mapping if constraint_mapping else Dto
+ mapping_dataclass = make_dataclass(
cls_name=cls_name,
fields=fields,
- bases=(dto_class,),
+ bases=(base,),
)
- # dto_data = {get_safe_key(key): value for key, value in dto_data.items()}
- dto_data = {
- get_safe_name_for_oas_name(key): value for key, value in dto_data.items()
- }
- return cast(Dto, dto_class_(**dto_data))
+ return mapping_dataclass
-def get_fields_from_dto_data(
- object_schema: ObjectSchema, dto_data: dict[str, JSON]
+def get_dataclass_fields(
+ object_schema: ObjectSchema, valid_data: dict[str, JSON]
) -> list[tuple[str, type[object], Field[object]]]:
- """Get a dataclasses fields list based on the content_schema and dto_data."""
+ """Get a dataclasses fields list based on the object_schema and valid_data."""
fields: list[tuple[str, type[object], Field[object]]] = []
- for key, value in dto_data.items():
- # safe_key = get_safe_key(key)
+ for key, value in valid_data.items():
safe_key = get_safe_name_for_oas_name(key)
- # metadata = {"original_property_name": key}
if key in object_schema.required:
# The fields list is used to create a dataclass, so non-default fields
# must go before fields with a default
@@ -182,7 +200,7 @@ def get_fields_from_dto_data(
return fields
-def get_dto_cls_name(path: str, method: str) -> str:
+def get_mapping_cls_name(path: str, method: str) -> str:
method = method.capitalize()
path = path.translate({ord(i): None for i in "{}"})
path_parts = path.split("/")
@@ -192,11 +210,13 @@ def get_dto_cls_name(path: str, method: str) -> str:
def get_request_parameters(
- dto_class: Dto | type[Dto], method_spec: OperationObject
+ constraint_mapping: ConstraintMappingType | None, method_spec: OperationObject
) -> tuple[list[ParameterObject], dict[str, Any], dict[str, str]]:
"""Get the methods parameter spec and params and headers with valid data."""
parameters = method_spec.parameters if method_spec.parameters else []
- parameter_relations = dto_class.get_parameter_relations()
+ parameter_relations = (
+ constraint_mapping.get_parameter_relations() if constraint_mapping else []
+ )
query_params = [p for p in parameters if p.in_ == "query"]
header_params = [p for p in parameters if p.in_ == "header"]
params = get_parameter_data(query_params, parameter_relations)
@@ -229,7 +249,7 @@ def get_parameter_data(
continue
if parameter.schema_ is None:
- continue
- value = parameter.schema_.get_valid_value()
+ continue # pragma: no cover
+ value = parameter.schema_.get_valid_value()[0]
result[parameter_name] = value
return result
diff --git a/src/OpenApiLibCore/data_invalidation.py b/src/OpenApiLibCore/data_generation/data_invalidation.py
similarity index 62%
rename from src/OpenApiLibCore/data_invalidation.py
rename to src/OpenApiLibCore/data_generation/data_invalidation.py
index 63fa745..f93bb27 100644
--- a/src/OpenApiLibCore/data_invalidation.py
+++ b/src/OpenApiLibCore/data_generation/data_invalidation.py
@@ -5,80 +5,151 @@
from copy import deepcopy
from random import choice
-from typing import Any
+from typing import Any, Literal, overload
from requests import Response
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.dto_base import (
- NOT_SET,
+from OpenApiLibCore.data_constraints.dto_base import (
Dto,
+)
+from OpenApiLibCore.models import IGNORE
+from OpenApiLibCore.models.oas_models import (
+ ArraySchema,
+ ObjectSchema,
+ ParameterObject,
+ UnionTypeSchema,
+)
+from OpenApiLibCore.models.request_data import RequestData
+from OpenApiLibCore.models.resource_relations import (
+ NOT_SET,
IdReference,
PropertyValueConstraint,
UniquePropertyValueConstraint,
)
-from OpenApiLibCore.models import ParameterObject, UnionTypeSchema
-from OpenApiLibCore.request_data import RequestData
-from OpenApiLibCore.value_utils import IGNORE, get_invalid_value
run_keyword = BuiltIn().run_keyword
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_json_data_with_conflict"], *args: object
+) -> dict[str, JSON]: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["ensure_in_use"], *args: object
+) -> None: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["authorized_request"], *args: object
+) -> Response: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
def get_invalid_body_data(
url: str,
method: str,
status_code: int,
request_data: RequestData,
- invalid_property_default_response: int,
-) -> dict[str, Any]:
+ invalid_data_default_response: int,
+) -> JSON:
method = method.lower()
- data_relations = request_data.dto.get_body_relations_for_error_code(status_code)
+ data_relations = request_data.constraint_mapping.get_body_relations_for_error_code(
+ status_code
+ )
if not data_relations:
if request_data.body_schema is None:
raise ValueError(
"Failed to invalidate: request_data does not contain a body_schema."
)
- json_data = request_data.dto.get_invalidated_data(
- schema=request_data.body_schema,
+
+ if not isinstance(request_data.body_schema, (ArraySchema, ObjectSchema)):
+ raise NotImplementedError("primitive types not supported for body data.")
+
+ if isinstance(request_data.body_schema, ArraySchema):
+ if not isinstance(request_data.valid_data, list):
+ raise ValueError("Type of valid_data does not match body_schema type.")
+ invalid_item_data: list[JSON] = request_data.body_schema.get_invalid_data(
+ valid_data=request_data.valid_data,
+ status_code=status_code,
+ invalid_property_default_code=invalid_data_default_response,
+ )
+ return [invalid_item_data]
+
+ if not isinstance(request_data.valid_data, dict):
+ raise ValueError("Type of valid_data does not match body_schema type.")
+ json_data = request_data.body_schema.get_invalid_data(
+ valid_data=request_data.valid_data,
status_code=status_code,
- invalid_property_default_code=invalid_property_default_response,
+ invalid_property_default_code=invalid_data_default_response,
)
return json_data
+
resource_relation = choice(data_relations)
if isinstance(resource_relation, UniquePropertyValueConstraint):
- json_data = run_keyword(
+ return _run_keyword(
"get_json_data_with_conflict",
url,
method,
- request_data.dto,
+ request_data.valid_data,
+ request_data.constraint_mapping,
status_code,
)
- elif isinstance(resource_relation, IdReference):
- run_keyword("ensure_in_use", url, resource_relation)
- json_data = request_data.dto.as_dict()
- else:
- if request_data.body_schema is None:
- raise ValueError(
- "Failed to invalidate: request_data does not contain a body_schema."
- )
- json_data = request_data.dto.get_invalidated_data(
- schema=request_data.body_schema,
+ if isinstance(resource_relation, IdReference):
+ _run_keyword("ensure_in_use", url, resource_relation)
+ return request_data.valid_data
+
+ if request_data.body_schema is None:
+ raise ValueError(
+ "Failed to invalidate: request_data does not contain a body_schema."
+ )
+ if not isinstance(request_data.body_schema, (ArraySchema, ObjectSchema)):
+ raise NotImplementedError("primitive types not supported for body data.")
+
+ if isinstance(request_data.body_schema, ArraySchema):
+ if not isinstance(request_data.valid_data, list):
+ raise ValueError("Type of valid_data does not match body_schema type.")
+ invalid_item_data = request_data.body_schema.get_invalid_data(
+ valid_data=request_data.valid_data,
status_code=status_code,
- invalid_property_default_code=invalid_property_default_response,
+ invalid_property_default_code=invalid_data_default_response,
)
- return json_data
+ return [invalid_item_data]
+
+ if not isinstance(request_data.valid_data, dict):
+ raise ValueError("Type of valid_data does not match body_schema type.")
+ return request_data.body_schema.get_invalid_data(
+ valid_data=request_data.valid_data,
+ status_code=status_code,
+ invalid_property_default_code=invalid_data_default_response,
+ )
def get_invalidated_parameters(
- status_code: int, request_data: RequestData, invalid_property_default_response: int
-) -> tuple[dict[str, JSON], dict[str, JSON]]:
+ status_code: int, request_data: RequestData, invalid_data_default_response: int
+) -> tuple[dict[str, JSON], dict[str, str]]:
if not request_data.parameters:
raise ValueError("No params or headers to invalidate.")
# ensure the status_code can be triggered
- relations = request_data.dto.get_parameter_relations_for_error_code(status_code)
+ relations = request_data.constraint_mapping.get_parameter_relations_for_error_code(
+ status_code
+ )
relations_for_status_code = [
r
for r in relations
@@ -92,14 +163,14 @@ def get_invalidated_parameters(
}
relation_property_names = {r.property_name for r in relations_for_status_code}
if not relation_property_names:
- if status_code != invalid_property_default_response:
+ if status_code != invalid_data_default_response:
raise ValueError(f"No relations to cause status_code {status_code} found.")
# ensure we're not modifying mutable properties
params = deepcopy(request_data.params)
headers = deepcopy(request_data.headers)
- if status_code == invalid_property_default_response:
+ if status_code == invalid_data_default_response:
# take the params and headers that can be invalidated based on data type
# and expand the set with properties that can be invalided by relations
parameter_names = set(request_data.params_that_can_be_invalidated).union(
@@ -114,8 +185,8 @@ def get_invalidated_parameters(
# non-default status_codes can only be the result of a Relation
parameter_names = relation_property_names
- # Dto mappings may contain generic mappings for properties that are not present
- # in this specific schema
+ # Constraint mappings may contain generic mappings for properties that are
+ # not present in this specific schema
request_data_parameter_names = [p.name for p in request_data.parameters]
additional_relation_property_names = {
n for n in relation_property_names if n not in request_data_parameter_names
@@ -197,19 +268,17 @@ def get_invalidated_parameters(
raise ValueError(f"No schema defined for parameter: {parameter_data}.")
if isinstance(value_schema, UnionTypeSchema):
- # FIXME: extra handling may be needed in case of values_from_constraint
value_schema = choice(value_schema.resolved_schemas)
- invalid_value = get_invalid_value(
- value_schema=value_schema,
- current_value=valid_value,
+ invalid_value = value_schema.get_invalid_value(
+ valid_value=valid_value, # type: ignore[arg-type]
values_from_constraint=values_from_constraint,
)
logger.debug(f"{parameter_to_invalidate} changed to {invalid_value}")
# update the params / headers and return
if parameter_to_invalidate in params.keys():
- params[parameter_to_invalidate] = invalid_value
+ params[parameter_to_invalidate] = invalid_value # pyright: ignore[reportArgumentType]
else:
headers[parameter_to_invalidate] = str(invalid_value)
return params, headers
@@ -218,10 +287,10 @@ def get_invalidated_parameters(
def ensure_parameter_in_parameters(
parameter_to_invalidate: str,
params: dict[str, JSON],
- headers: dict[str, JSON],
+ headers: dict[str, str],
parameter_data: ParameterObject,
values_from_constraint: list[JSON],
-) -> tuple[dict[str, JSON], dict[str, JSON]]:
+) -> tuple[dict[str, JSON], dict[str, str]]:
"""
Returns the params, headers tuple with parameter_to_invalidate with a valid
value to params or headers if not originally present.
@@ -239,7 +308,7 @@ def ensure_parameter_in_parameters(
if isinstance(value_schema, UnionTypeSchema):
value_schema = choice(value_schema.resolved_schemas)
- valid_value = value_schema.get_valid_value()
+ valid_value = value_schema.get_valid_value()[0]
if (
parameter_data.in_ == "query"
and parameter_to_invalidate not in params.keys()
@@ -254,12 +323,18 @@ def ensure_parameter_in_parameters(
def get_json_data_with_conflict(
- url: str, base_url: str, method: str, dto: Dto, conflict_status_code: int
+ url: str,
+ base_url: str,
+ method: str,
+ json_data: dict[str, JSON],
+ constraint_mapping: type[Dto],
+ conflict_status_code: int,
) -> dict[str, Any]:
method = method.lower()
- json_data = dto.as_dict()
unique_property_value_constraints = [
- r for r in dto.get_relations() if isinstance(r, UniquePropertyValueConstraint)
+ r
+ for r in constraint_mapping.get_relations()
+ if isinstance(r, UniquePropertyValueConstraint)
]
for relation in unique_property_value_constraints:
json_data[relation.property_name] = relation.value
@@ -267,21 +342,22 @@ def get_json_data_with_conflict(
if method in ["patch", "put"]:
post_url_parts = url.split("/")[:-1]
post_url = "/".join(post_url_parts)
- # the PATCH or PUT may use a different dto than required for POST
- # so a valid POST dto must be constructed
+ # the PATCH or PUT may use a different constraint_mapping than required for
+ # POST so valid POST data must be constructed
path = post_url.replace(base_url, "")
- request_data: RequestData = run_keyword("get_request_data", path, "post")
- post_json = request_data.dto.as_dict()
- for key in post_json.keys():
- if key in json_data:
- post_json[key] = json_data.get(key)
+ request_data = _run_keyword("get_request_data", path, "post")
+ post_json = request_data.valid_data
+ if isinstance(post_json, dict):
+ for key in post_json.keys():
+ if key in json_data:
+ post_json[key] = json_data.get(key)
else:
post_url = url
post_json = json_data
path = post_url.replace(base_url, "")
- request_data = run_keyword("get_request_data", path, "post")
+ request_data = _run_keyword("get_request_data", path, "post")
- response: Response = run_keyword(
+ response = _run_keyword(
"authorized_request",
post_url,
"post",
@@ -295,5 +371,6 @@ def get_json_data_with_conflict(
)
return json_data
raise ValueError(
- f"No UniquePropertyValueConstraint in the get_relations list on dto {dto}."
+ f"No UniquePropertyValueConstraint in the get_relations list on "
+ f"constraint_mapping {constraint_mapping}."
)
diff --git a/src/OpenApiLibCore/localized_faker.py b/src/OpenApiLibCore/data_generation/localized_faker.py
similarity index 100%
rename from src/OpenApiLibCore/localized_faker.py
rename to src/OpenApiLibCore/data_generation/localized_faker.py
diff --git a/src/OpenApiLibCore/data_generation/value_utils.py b/src/OpenApiLibCore/data_generation/value_utils.py
new file mode 100644
index 0000000..81dc28c
--- /dev/null
+++ b/src/OpenApiLibCore/data_generation/value_utils.py
@@ -0,0 +1,39 @@
+"""Utility module with functions to handle OpenAPI value types and restrictions."""
+
+
+def json_type_name_of_python_type(python_type: type) -> str:
+ """Return the JSON type name for supported Python types."""
+ if python_type == str:
+ return "string"
+ if python_type == bool:
+ return "boolean"
+ if python_type == int:
+ return "integer"
+ if python_type == float:
+ return "number"
+ if python_type == list:
+ return "array"
+ if python_type == dict:
+ return "object"
+ if python_type == type(None):
+ return "null"
+ raise ValueError(f"No json type mapping for Python type {python_type} available.")
+
+
+def python_type_by_json_type_name(type_name: str) -> type:
+ """Return the Python type based on the JSON type name."""
+ if type_name == "string":
+ return str
+ if type_name == "boolean":
+ return bool
+ if type_name == "integer":
+ return int
+ if type_name == "number":
+ return float
+ if type_name == "array":
+ return list
+ if type_name == "object":
+ return dict
+ if type_name == "null":
+ return type(None)
+ raise ValueError(f"No Python type mapping for JSON type '{type_name}' available.")
diff --git a/src/OpenApiLibCore/dto_base.py b/src/OpenApiLibCore/dto_base.py
deleted file mode 100644
index b646103..0000000
--- a/src/OpenApiLibCore/dto_base.py
+++ /dev/null
@@ -1,260 +0,0 @@
-"""
-Module holding the (base) classes that can be used by the user of the OpenApiLibCore
-to implement custom mappings for dependencies between resources in the API under
-test and constraints / restrictions on properties of the resources.
-"""
-
-from abc import ABC
-from dataclasses import dataclass, fields
-from random import choice, shuffle
-from typing import Any
-from uuid import uuid4
-
-from robot.api import logger
-
-from OpenApiLibCore import value_utils
-from OpenApiLibCore.models import NullSchema, ObjectSchema, UnionTypeSchema
-from OpenApiLibCore.parameter_utils import get_oas_name_from_safe_name
-
-NOT_SET = object()
-SENTINEL = object()
-
-
-class ResourceRelation(ABC):
- """ABC for all resource relations or restrictions within the API."""
-
- property_name: str
- error_code: int
-
-
-@dataclass
-class PathPropertiesConstraint(ResourceRelation):
- """The value to be used as the ``path`` for related requests."""
-
- path: str
- property_name: str = "id"
- invalid_value: Any = NOT_SET
- invalid_value_error_code: int = 422
- error_code: int = 404
-
-
-@dataclass
-class PropertyValueConstraint(ResourceRelation):
- """The allowed values for property_name."""
-
- property_name: str
- values: list[Any]
- invalid_value: Any = NOT_SET
- invalid_value_error_code: int = 422
- error_code: int = 422
- treat_as_mandatory: bool = False
-
-
-@dataclass
-class IdDependency(ResourceRelation):
- """The path where a valid id for the property_name can be gotten (using GET)."""
-
- property_name: str
- get_path: str
- operation_id: str = ""
- error_code: int = 422
-
-
-@dataclass
-class IdReference(ResourceRelation):
- """The path where a resource that needs this resource's id can be created (using POST)."""
-
- property_name: str
- post_path: str
- error_code: int = 422
-
-
-@dataclass
-class UniquePropertyValueConstraint(ResourceRelation):
- """The value of the property must be unique within the resource scope."""
-
- property_name: str
- value: Any
- error_code: int = 422
-
-
-@dataclass
-class Dto(ABC):
- """Base class for the Dto class."""
-
- @staticmethod
- def get_path_relations() -> list[PathPropertiesConstraint]:
- """Return the list of Relations for the header and query parameters."""
- return []
-
- def get_path_relations_for_error_code(
- self, error_code: int
- ) -> list[PathPropertiesConstraint]:
- """Return the list of Relations associated with the given error_code."""
- relations: list[PathPropertiesConstraint] = [
- r
- for r in self.get_path_relations()
- if r.error_code == error_code
- or (
- getattr(r, "invalid_value_error_code", None) == error_code
- and getattr(r, "invalid_value", None) != NOT_SET
- )
- ]
- return relations
-
- @staticmethod
- def get_parameter_relations() -> list[ResourceRelation]:
- """Return the list of Relations for the header and query parameters."""
- return []
-
- def get_parameter_relations_for_error_code(
- self, error_code: int
- ) -> list[ResourceRelation]:
- """Return the list of Relations associated with the given error_code."""
- relations: list[ResourceRelation] = [
- r
- for r in self.get_parameter_relations()
- if r.error_code == error_code
- or (
- getattr(r, "invalid_value_error_code", None) == error_code
- and getattr(r, "invalid_value", None) != NOT_SET
- )
- ]
- return relations
-
- @staticmethod
- def get_relations() -> list[ResourceRelation]:
- """Return the list of Relations for the (json) body."""
- return []
-
- def get_body_relations_for_error_code(
- self, error_code: int
- ) -> list[ResourceRelation]:
- """
- Return the list of Relations associated with the given error_code that are
- applicable to the body / payload of the request.
- """
- relations: list[ResourceRelation] = [
- r
- for r in self.get_relations()
- if r.error_code == error_code
- or (
- getattr(r, "invalid_value_error_code", None) == error_code
- and getattr(r, "invalid_value", None) != NOT_SET
- )
- ]
- return relations
-
- def get_invalidated_data(
- self,
- schema: ObjectSchema,
- status_code: int,
- invalid_property_default_code: int,
- ) -> dict[str, Any]:
- """Return a data set with one of the properties set to an invalid value or type."""
- properties: dict[str, Any] = self.as_dict()
-
- relations = self.get_body_relations_for_error_code(error_code=status_code)
- property_names = [r.property_name for r in relations]
- if status_code == invalid_property_default_code:
- # add all properties defined in the schema, including optional properties
- property_names.extend((schema.properties.root.keys())) # type: ignore[union-attr]
- if not property_names:
- raise ValueError(
- f"No property can be invalidated to cause status_code {status_code}"
- )
- # Remove duplicates, then shuffle the property_names so different properties on
- # the Dto are invalidated when rerunning the test.
- shuffle(list(set(property_names)))
- for property_name in property_names:
- # if possible, invalidate a constraint but send otherwise valid data
- id_dependencies = [
- r
- for r in relations
- if isinstance(r, IdDependency) and r.property_name == property_name
- ]
- if id_dependencies:
- invalid_id = uuid4().hex
- logger.debug(
- f"Breaking IdDependency for status_code {status_code}: setting "
- f"{property_name} to {invalid_id}"
- )
- properties[property_name] = invalid_id
- return properties
-
- invalid_value_from_constraint = [
- r.invalid_value
- for r in relations
- if isinstance(r, PropertyValueConstraint)
- and r.property_name == property_name
- and r.invalid_value_error_code == status_code
- ]
- if (
- invalid_value_from_constraint
- and invalid_value_from_constraint[0] is not NOT_SET
- ):
- properties[property_name] = invalid_value_from_constraint[0]
- logger.debug(
- f"Using invalid_value {invalid_value_from_constraint[0]} to "
- f"invalidate property {property_name}"
- )
- return properties
-
- value_schema = schema.properties.root[property_name] # type: ignore[union-attr]
- if isinstance(value_schema, UnionTypeSchema):
- # Filter "type": "null" from the possible types since this indicates an
- # optional / nullable property that can only be invalidated by sending
- # invalid data of a non-null type
- non_null_schemas = [
- s
- for s in value_schema.resolved_schemas
- if not isinstance(s, NullSchema)
- ]
- value_schema = choice(non_null_schemas)
-
- # there may not be a current_value when invalidating an optional property
- current_value = properties.get(property_name, SENTINEL)
- if current_value is SENTINEL:
- # the current_value isn't very relevant as long as the type is correct
- # so no logic to handle Relations / objects / arrays here
- property_type = value_schema.type
- if property_type == "object":
- current_value = {}
- elif property_type == "array":
- current_value = []
- else:
- current_value = value_schema.get_valid_value()
-
- values_from_constraint = [
- r.values[0]
- for r in relations
- if isinstance(r, PropertyValueConstraint)
- and r.property_name == property_name
- ]
-
- invalid_value = value_utils.get_invalid_value(
- value_schema=value_schema,
- current_value=current_value,
- values_from_constraint=values_from_constraint,
- )
- properties[property_name] = invalid_value
- logger.debug(
- f"Property {property_name} changed to {invalid_value!r} (received from "
- f"get_invalid_value)"
- )
- return properties
- logger.warn("get_invalidated_data returned unchanged properties")
- return properties # pragma: no cover
-
- def as_dict(self) -> dict[Any, Any]:
- """Return the dict representation of the Dto."""
- result = {}
-
- for field in fields(self):
- field_name = field.name
- if field_name not in self.__dict__:
- continue
- original_name = get_oas_name_from_safe_name(field_name)
- result[original_name] = getattr(self, field_name)
-
- return result
diff --git a/src/OpenApiLibCore/dto_utils.py b/src/OpenApiLibCore/dto_utils.py
deleted file mode 100644
index 4f8c8c9..0000000
--- a/src/OpenApiLibCore/dto_utils.py
+++ /dev/null
@@ -1,125 +0,0 @@
-"""Module for helper methods and classes used by the openapi_executors module."""
-
-from dataclasses import dataclass
-from importlib import import_module
-from typing import Any, Callable, Type, overload
-
-from robot.api import logger
-
-from OpenApiLibCore.dto_base import Dto
-from OpenApiLibCore.protocols import (
- GetDtoClassType,
- GetIdPropertyNameType,
- GetPathDtoClassType,
-)
-
-
-@dataclass
-class _DefaultIdPropertyName:
- id_property_name: str = "id"
-
-
-DEFAULT_ID_PROPERTY_NAME = _DefaultIdPropertyName()
-
-
-@dataclass
-class DefaultDto(Dto):
- """A default Dto that can be instantiated."""
-
-
-def get_dto_class(mappings_module_name: str) -> GetDtoClassType:
- return GetDtoClass(mappings_module_name=mappings_module_name)
-
-
-class GetDtoClass:
- """Callable class to return Dtos from user-implemented mappings file."""
-
- def __init__(self, mappings_module_name: str) -> None:
- try:
- mappings_module = import_module(mappings_module_name)
- self.dto_mapping: dict[tuple[str, str], Type[Dto]] = (
- mappings_module.DTO_MAPPING
- )
- except (ImportError, AttributeError, ValueError) as exception:
- if mappings_module_name != "no mapping":
- logger.error(f"DTO_MAPPING was not imported: {exception}")
- self.dto_mapping = {}
-
- def __call__(self, path: str, method: str) -> Type[Dto]:
- try:
- return self.dto_mapping[(path, method.lower())]
- except KeyError:
- logger.debug(f"No Dto mapping for {path} {method}.")
- return DefaultDto
-
-
-def get_path_dto_class(mappings_module_name: str) -> GetPathDtoClassType:
- return GetPathDtoClass(mappings_module_name=mappings_module_name)
-
-
-class GetPathDtoClass:
- """Callable class to return Dtos from user-implemented mappings file."""
-
- def __init__(self, mappings_module_name: str) -> None:
- try:
- mappings_module = import_module(mappings_module_name)
- self.dto_mapping: dict[str, Type[Dto]] = mappings_module.PATH_MAPPING
- except (ImportError, AttributeError, ValueError) as exception:
- if mappings_module_name != "no mapping":
- logger.error(f"PATH_MAPPING was not imported: {exception}")
- self.dto_mapping = {}
-
- def __call__(self, path: str) -> Type[Dto]:
- try:
- return self.dto_mapping[path]
- except KeyError:
- logger.debug(f"No Dto mapping for {path}.")
- return DefaultDto
-
-
-def get_id_property_name(mappings_module_name: str) -> GetIdPropertyNameType:
- return GetIdPropertyName(mappings_module_name=mappings_module_name)
-
-
-class GetIdPropertyName:
- """
- Callable class to return the name of the property that uniquely identifies
- the resource from user-implemented mappings file.
- """
-
- def __init__(self, mappings_module_name: str) -> None:
- try:
- mappings_module = import_module(mappings_module_name)
- self.id_mapping: dict[
- str,
- str | tuple[str, Callable[[str], str] | Callable[[int], int]],
- ] = mappings_module.ID_MAPPING
- except (ImportError, AttributeError, ValueError) as exception:
- if mappings_module_name != "no mapping":
- logger.error(f"ID_MAPPING was not imported: {exception}")
- self.id_mapping = {}
-
- def __call__(
- self, path: str
- ) -> tuple[str, Callable[[str], str] | Callable[[int], int]]:
- try:
- value_or_mapping = self.id_mapping[path]
- if isinstance(value_or_mapping, str):
- return (value_or_mapping, dummy_transformer)
- return value_or_mapping
- except KeyError:
- default_id_name = DEFAULT_ID_PROPERTY_NAME.id_property_name
- logger.debug(f"No id mapping for {path} ('{default_id_name}' will be used)")
- return (default_id_name, dummy_transformer)
-
-
-@overload
-def dummy_transformer(valid_id: str) -> str: ... # pragma: no cover
-
-
-@overload
-def dummy_transformer(valid_id: int) -> int: ... # pragma: no cover
-
-
-def dummy_transformer(valid_id: Any) -> Any:
- return valid_id
diff --git a/src/OpenApiLibCore/keyword_logic/__init__.py b/src/OpenApiLibCore/keyword_logic/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/OpenApiLibCore/path_functions.py b/src/OpenApiLibCore/keyword_logic/path_functions.py
similarity index 75%
rename from src/OpenApiLibCore/path_functions.py
rename to src/OpenApiLibCore/keyword_logic/path_functions.py
index d104d7b..3ac3f97 100644
--- a/src/OpenApiLibCore/path_functions.py
+++ b/src/OpenApiLibCore/keyword_logic/path_functions.py
@@ -3,18 +3,57 @@
import json as _json
from itertools import zip_longest
from random import choice
-from typing import Any
+from typing import Any, Literal, overload
from requests import Response
from robot.libraries.BuiltIn import BuiltIn
-from OpenApiLibCore.models import OpenApiObject
-from OpenApiLibCore.protocols import GetIdPropertyNameType, GetPathDtoClassType
-from OpenApiLibCore.request_data import RequestData
+from OpenApiLibCore.models import oas_models
+from OpenApiLibCore.models.request_data import RequestData
run_keyword = BuiltIn().run_keyword
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_valid_id_for_path"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_ids_from_url"], *args: str
+) -> list[str]: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_valid_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_parameterized_path_from_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["authorized_request"], *args: object
+) -> Response: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
def match_parts(parts: list[str], spec_parts: list[str]) -> bool:
for part, spec_part in zip_longest(parts, spec_parts, fillvalue="Filler"):
if part == "Filler" or spec_part == "Filler":
@@ -24,14 +63,14 @@ def match_parts(parts: list[str], spec_parts: list[str]) -> bool:
return True
-def get_parametrized_path(path: str, openapi_spec: OpenApiObject) -> str:
+def get_parametrized_path(path: str, openapi_spec: oas_models.OpenApiObject) -> str:
path_parts = path.split("/")
# if the last part is empty, the path has a trailing `/` that
# should be ignored during matching
if path_parts[-1] == "":
_ = path_parts.pop(-1)
- spec_paths: list[str] = list(openapi_spec.paths.keys())
+ spec_paths = list(openapi_spec.paths.keys())
candidates: list[str] = []
@@ -63,19 +102,19 @@ def get_parametrized_path(path: str, openapi_spec: OpenApiObject) -> str:
def get_valid_url(
path: str,
base_url: str,
- get_path_dto_class: GetPathDtoClassType,
- openapi_spec: OpenApiObject,
+ openapi_spec: oas_models.OpenApiObject,
) -> str:
try:
# path can be partially resolved or provided by a PathPropertiesConstraint
parametrized_path = get_parametrized_path(path=path, openapi_spec=openapi_spec)
- _ = openapi_spec.paths[parametrized_path]
+ path_item = openapi_spec.paths[parametrized_path]
except KeyError:
raise ValueError(
f"{path} not found in paths section of the OpenAPI document."
) from None
- dto_class = get_path_dto_class(path=path)
- relations = dto_class.get_path_relations()
+
+ constraint_mapping = path_item.constraint_mapping
+ relations = constraint_mapping.get_path_relations() if constraint_mapping else []
paths = [p.path for p in relations]
if paths:
url = f"{base_url}{choice(paths)}"
@@ -85,9 +124,7 @@ def get_valid_url(
if part.startswith("{") and part.endswith("}"):
type_path_parts = path_parts[slice(index)]
type_path = "/".join(type_path_parts)
- existing_id: str | int | float = run_keyword(
- "get_valid_id_for_path", type_path
- )
+ existing_id = _run_keyword("get_valid_id_for_path", type_path)
path_parts[index] = str(existing_id)
resolved_path = "/".join(path_parts)
url = f"{base_url}{resolved_path}"
@@ -96,14 +133,14 @@ def get_valid_url(
def get_valid_id_for_path(
path: str,
- get_id_property_name: GetIdPropertyNameType,
-) -> str | int:
- url: str = run_keyword("get_valid_url", path)
+ openapi_spec: oas_models.OpenApiObject,
+) -> str:
+ url = _run_keyword("get_valid_url", path)
# Try to create a new resource to prevent conflicts caused by
# operations performed on the same resource by other test cases
- request_data: RequestData = run_keyword("get_request_data", path, "post")
+ request_data = _run_keyword("get_request_data", path, "post")
- response: Response = run_keyword(
+ response = _run_keyword(
"authorized_request",
url,
"post",
@@ -112,13 +149,14 @@ def get_valid_id_for_path(
request_data.get_required_properties_dict(),
)
- id_property, id_transformer = get_id_property_name(path=path)
+ path_item = openapi_spec.paths[path]
+ id_property, id_transformer = path_item.id_mapper
if not response.ok:
# If a new resource cannot be created using POST, try to retrieve a
# valid id using a GET request.
try:
- valid_id = choice(run_keyword("get_ids_from_url", url))
+ valid_id = choice(_run_keyword("get_ids_from_url", url))
return id_transformer(valid_id)
except Exception as exception:
raise AssertionError(
@@ -172,11 +210,11 @@ def get_valid_id_for_path(
def get_ids_from_url(
url: str,
- get_id_property_name: GetIdPropertyNameType,
+ openapi_spec: oas_models.OpenApiObject,
) -> list[str]:
- path: str = run_keyword("get_parameterized_path_from_url", url)
- request_data: RequestData = run_keyword("get_request_data", path, "get")
- response = run_keyword(
+ path = _run_keyword("get_parameterized_path_from_url", url)
+ request_data = _run_keyword("get_request_data", path, "get")
+ response = _run_keyword(
"authorized_request",
url,
"get",
@@ -187,11 +225,8 @@ def get_ids_from_url(
response_data: dict[str, Any] | list[dict[str, Any]] = response.json()
# determine the property name to use
- mapping = get_id_property_name(path=path)
- if isinstance(mapping, str):
- id_property = mapping
- else:
- id_property, _ = mapping
+ path_item = openapi_spec.paths[path]
+ id_property, _ = path_item.id_mapper
if isinstance(response_data, list):
valid_ids: list[str] = [item[id_property] for item in response_data]
diff --git a/src/OpenApiLibCore/path_invalidation.py b/src/OpenApiLibCore/keyword_logic/path_invalidation.py
similarity index 57%
rename from src/OpenApiLibCore/path_invalidation.py
rename to src/OpenApiLibCore/keyword_logic/path_invalidation.py
index 31cd042..83c7449 100644
--- a/src/OpenApiLibCore/path_invalidation.py
+++ b/src/OpenApiLibCore/keyword_logic/path_invalidation.py
@@ -1,24 +1,43 @@
"""Module holding functions related to invalidation of paths and urls."""
from random import choice
+from typing import Literal, overload
from uuid import uuid4
from robot.libraries.BuiltIn import BuiltIn
-from OpenApiLibCore.protocols import GetPathDtoClassType
+from OpenApiLibCore.models import oas_models
run_keyword = BuiltIn().run_keyword
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_parameterized_path_from_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["overload_default"], *args: object
+) -> object: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
def get_invalidated_url(
valid_url: str,
- path: str,
base_url: str,
- get_path_dto_class: GetPathDtoClassType,
+ openapi_spec: oas_models.OpenApiObject,
expected_status_code: int,
) -> str:
- dto_class = get_path_dto_class(path=path)
- relations = dto_class.get_path_relations()
+ path = _run_keyword("get_parameterized_path_from_url", valid_url)
+ path_item = openapi_spec.paths[path]
+
+ constraint_mapping = path_item.constraint_mapping
+ relations = constraint_mapping.get_path_relations() if constraint_mapping else []
paths = [
p.invalid_value
for p in relations
@@ -27,7 +46,7 @@ def get_invalidated_url(
if paths:
url = f"{base_url}{choice(paths)}"
return url
- parameterized_path: str = run_keyword("get_parameterized_path_from_url", valid_url)
+ parameterized_path = _run_keyword("get_parameterized_path_from_url", valid_url)
parameterized_url = base_url + parameterized_path
valid_url_parts = list(reversed(valid_url.split("/")))
parameterized_parts = reversed(parameterized_url.split("/"))
diff --git a/src/OpenApiLibCore/resource_relations.py b/src/OpenApiLibCore/keyword_logic/resource_relations.py
similarity index 50%
rename from src/OpenApiLibCore/resource_relations.py
rename to src/OpenApiLibCore/keyword_logic/resource_relations.py
index 600ff03..3605f4f 100644
--- a/src/OpenApiLibCore/resource_relations.py
+++ b/src/OpenApiLibCore/keyword_logic/resource_relations.py
@@ -1,17 +1,41 @@
"""Module holding the functions related to relations between resources."""
+from typing import Literal, overload
+
from requests import Response
from robot.api import logger
from robot.libraries.BuiltIn import BuiltIn
-import OpenApiLibCore.path_functions as _path_functions
-from OpenApiLibCore.dto_base import IdReference
-from OpenApiLibCore.models import OpenApiObject
-from OpenApiLibCore.request_data import RequestData
+import OpenApiLibCore.keyword_logic.path_functions as _path_functions
+from OpenApiLibCore.models.oas_models import OpenApiObject
+from OpenApiLibCore.models.request_data import RequestData
+from OpenApiLibCore.models.resource_relations import IdReference
run_keyword = BuiltIn().run_keyword
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_valid_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["authorized_request"], *args: object
+) -> Response: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
def ensure_in_use(
url: str,
base_url: str,
@@ -34,13 +58,13 @@ def ensure_in_use(
break
if not resource_id:
raise ValueError(f"The provided url ({url}) does not contain an id.")
- request_data: RequestData = run_keyword(
- "get_request_data", resource_relation.post_path, "post"
- )
- json_data = request_data.dto.as_dict()
- json_data[resource_relation.property_name] = resource_id
- post_url: str = run_keyword("get_valid_url", resource_relation.post_path)
- response: Response = run_keyword(
+ request_data = _run_keyword("get_request_data", resource_relation.post_path, "post")
+ json_data = request_data.valid_data if request_data.valid_data else {}
+ # FIXME: currently only works for object / dict data
+ if isinstance(json_data, dict):
+ json_data[resource_relation.property_name] = resource_id
+ post_url = _run_keyword("get_valid_url", resource_relation.post_path)
+ response = _run_keyword(
"authorized_request",
post_url,
"post",
diff --git a/src/OpenApiLibCore/validation.py b/src/OpenApiLibCore/keyword_logic/validation.py
similarity index 83%
rename from src/OpenApiLibCore/validation.py
rename to src/OpenApiLibCore/keyword_logic/validation.py
index 4df33b5..a84238c 100644
--- a/src/OpenApiLibCore/validation.py
+++ b/src/OpenApiLibCore/keyword_logic/validation.py
@@ -3,7 +3,7 @@
import json as _json
from enum import Enum
from http import HTTPStatus
-from typing import Any, Mapping
+from typing import Any, Literal, Mapping, overload
from openapi_core.contrib.requests import (
RequestsOpenAPIRequest,
@@ -18,17 +18,52 @@
from robot.api.exceptions import Failure
from robot.libraries.BuiltIn import BuiltIn
-from OpenApiLibCore.models import (
+from OpenApiLibCore.annotations import JSON
+from OpenApiLibCore.models.oas_models import (
OpenApiObject,
ResponseObject,
UnionTypeSchema,
)
-from OpenApiLibCore.protocols import ResponseValidatorType
-from OpenApiLibCore.request_data import RequestData, RequestValues
+from OpenApiLibCore.models.request_data import RequestData, RequestValues
+from OpenApiLibCore.protocols import IResponseValidator
run_keyword = BuiltIn().run_keyword
+@overload
+def _run_keyword(
+ keyword_name: Literal["validate_response"], *args: object
+) -> None: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["authorized_request"], *args: object
+) -> Response: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["validate_send_response"], *args: Response | JSON
+) -> None: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["assert_href_to_resource_is_valid"], *args: str | JSON
+) -> None: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
class ValidationLevel(str, Enum):
"""The available levels for the response_validation parameter."""
@@ -44,7 +79,7 @@ def perform_validated_request(
request_values: RequestValues,
original_data: Mapping[str, Any],
) -> None:
- response = run_keyword(
+ response = _run_keyword(
"authorized_request",
request_values.url,
request_values.method,
@@ -78,13 +113,13 @@ def perform_validated_request(
f"Response status_code {response.status_code} was not {status_code}."
)
- run_keyword("validate_response", path, response, original_data)
+ _run_keyword("validate_response", path, response, original_data)
if request_values.method == "DELETE":
- request_data: RequestData = run_keyword("get_request_data", path, "GET")
+ request_data = _run_keyword("get_request_data", path, "GET")
get_params = request_data.params
get_headers = request_data.headers
- get_response = run_keyword(
+ get_response = _run_keyword(
"authorized_request", request_values.url, "GET", get_params, get_headers
)
if response.ok:
@@ -109,13 +144,13 @@ def perform_validated_request(
def validate_response(
path: str,
response: Response,
- response_validator: ResponseValidatorType,
+ response_validator: IResponseValidator,
server_validation_warning_logged: bool,
disable_server_validation: bool,
- invalid_property_default_response: int,
+ invalid_data_default_response: int,
response_validation: str,
openapi_spec: OpenApiObject,
- original_data: Mapping[str, Any],
+ original_data: JSON,
) -> None:
if response.status_code == int(HTTPStatus.NO_CONTENT):
assert not response.content
@@ -127,7 +162,7 @@ def validate_response(
response_validator=response_validator,
server_validation_warning_logged=server_validation_warning_logged,
disable_server_validation=disable_server_validation,
- invalid_property_default_response=invalid_property_default_response,
+ invalid_data_default_response=invalid_data_default_response,
response_validation=response_validation,
)
except OpenAPIError as exception:
@@ -188,24 +223,24 @@ def validate_response(
# ensure the href is valid if the response is an object that contains a href
if isinstance(json_response, dict):
if href := json_response.get("href"):
- run_keyword("assert_href_to_resource_is_valid", href, json_response)
+ _run_keyword("assert_href_to_resource_is_valid", href, json_response)
# every property that was sucessfully send and that is in the response
# schema must have the value that was send
if response.ok and response.request.method in ["POST", "PUT", "PATCH"]:
- run_keyword("validate_send_response", response, original_data)
+ _run_keyword("validate_send_response", response, original_data)
return None
def assert_href_to_resource_is_valid(
- href: str, origin: str, base_url: str, referenced_resource: dict[str, Any]
+ href: str, origin: str, base_url: str, referenced_resource: JSON
) -> None:
url = f"{origin}{href}"
path = url.replace(base_url, "")
- request_data: RequestData = run_keyword("get_request_data", path, "GET")
+ request_data = _run_keyword("get_request_data", path, "GET")
params = request_data.params
headers = request_data.headers
- get_response = run_keyword("authorized_request", url, "GET", params, headers)
+ get_response = _run_keyword("authorized_request", url, "GET", params, headers)
assert get_response.json() == referenced_resource, (
f"{get_response.json()} not equal to original {referenced_resource}"
)
@@ -299,6 +334,9 @@ def validate_dict_response(
if original_data:
for send_property_name, send_value in original_data.items():
if send_property_name not in send_json.keys():
+ if send_property_name not in response_data:
+ logger.debug(f"'{send_property_name}' not found in response data.")
+ continue
assert send_value == response_data[send_property_name], (
f"Received value for {send_property_name} '{response_data[send_property_name]}' does not "
f"match '{send_value}' in the pre-patch data"
@@ -310,7 +348,7 @@ def validate_dict_response(
def validate_response_using_validator(
response: Response,
- response_validator: ResponseValidatorType,
+ response_validator: IResponseValidator,
) -> None:
openapi_request = RequestsOpenAPIRequest(response.request)
openapi_response = RequestsOpenAPIResponse(response)
@@ -319,13 +357,23 @@ def validate_response_using_validator(
def _validate_response(
response: Response,
- response_validator: ResponseValidatorType,
+ response_validator: IResponseValidator,
server_validation_warning_logged: bool,
disable_server_validation: bool,
- invalid_property_default_response: int,
+ invalid_data_default_response: int,
response_validation: str,
) -> None:
try:
+ content_type = response.headers.get("Content-Type", "")
+ if content_type:
+ key_value = "Content-Type"
+ else:
+ content_type = response.headers.get("content-type", "")
+ if content_type:
+ key_value = "content-type"
+ if "json" in content_type.lower():
+ content_type, _, _ = content_type.partition(";")
+ response.headers.update({key_value: content_type}) # pyright: ignore[reportPossiblyUnboundVariable]
validate_response_using_validator(
response=response,
response_validator=response_validator,
@@ -354,7 +402,7 @@ def _validate_response(
if disable_server_validation:
return
- if response.status_code == invalid_property_default_response:
+ if response.status_code == invalid_data_default_response:
logger.debug(error_message)
return
if response_validation == ValidationLevel.STRICT:
@@ -372,7 +420,7 @@ def _get_response_object(
method = method.lower()
status = str(status_code)
path_item = openapi_spec.paths[path]
- path_operations = path_item.get_operations()
+ path_operations = path_item.operations
operation_data = path_operations.get(method)
if operation_data is None:
raise ValueError(f"method '{method}' not supported for {path}.")
diff --git a/src/OpenApiLibCore/models.py b/src/OpenApiLibCore/models.py
deleted file mode 100644
index e37d9c0..0000000
--- a/src/OpenApiLibCore/models.py
+++ /dev/null
@@ -1,759 +0,0 @@
-import base64
-from abc import abstractmethod
-from collections import ChainMap
-from functools import cached_property
-from random import choice, randint, uniform
-from sys import float_info
-from typing import (
- Generator,
- Generic,
- Literal,
- Mapping,
- TypeAlias,
- TypeVar,
-)
-
-import rstr
-from pydantic import BaseModel, Field, RootModel
-from robot.api import logger
-
-from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.localized_faker import FAKE, fake_string
-
-EPSILON = float_info.epsilon
-
-O = TypeVar("O")
-
-
-class SchemaBase(BaseModel, Generic[O], frozen=True):
- readOnly: bool = False
- writeOnly: bool = False
-
- @abstractmethod
- def get_valid_value(self) -> JSON: ...
-
- @abstractmethod
- def get_values_out_of_bounds(self, current_value: O) -> list[O]: ...
-
- @abstractmethod
- def get_invalid_value_from_const_or_enum(self) -> O: ...
-
-
-class NullSchema(SchemaBase[None], frozen=True):
- type: Literal["null"] = "null"
-
- def get_valid_value(self) -> None:
- return None
-
- def get_values_out_of_bounds(self, current_value: None) -> list[None]:
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> None:
- raise ValueError
-
- @property
- def can_be_invalidated(self) -> bool:
- return False
-
- @property
- def annotation_string(self) -> str:
- return "None"
-
-
-class BooleanSchema(SchemaBase[bool], frozen=True):
- type: Literal["boolean"] = "boolean"
- const: bool | None = None
- nullable: bool = False
-
- def get_valid_value(self) -> bool:
- if self.const is not None:
- return self.const
- return choice([True, False])
-
- def get_values_out_of_bounds(self, current_value: bool) -> list[bool]:
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> bool:
- if self.const is not None:
- return not self.const
- raise ValueError
-
- @property
- def can_be_invalidated(self) -> bool:
- return True
-
- @property
- def annotation_string(self) -> str:
- return "bool"
-
-
-class StringSchema(SchemaBase[str], frozen=True):
- type: Literal["string"] = "string"
- format: str = ""
- pattern: str = ""
- maxLength: int | None = None
- minLength: int | None = None
- const: str | None = None
- enum: list[str] | None = None
- nullable: bool = False
-
- def get_valid_value(self) -> bytes | str:
- """Generate a random string within the min/max length in the schema, if specified."""
- if self.const is not None:
- return self.const
- if self.enum is not None:
- return choice(self.enum)
- # if a pattern is provided, format and min/max length can be ignored
- if pattern := self.pattern:
- try:
- return rstr.xeger(pattern)
- except Exception as exception:
- logger.warn(
- f"An error occured trying to generate a string matching the "
- f"pattern defined in the specification. To ensure a valid value "
- f"is generated for this property, a PropertyValueConstraint can be "
- f"configured. See the Advanced Use section of the OpenApiTools "
- f"documentation for more details."
- f"\nThe exception was: {exception}\nThe pattern was: {pattern}"
- )
- minimum = self.minLength if self.minLength is not None else 0
- maximum = self.maxLength if self.maxLength is not None else 36
- maximum = max(minimum, maximum)
- format_ = self.format if self.format else "uuid"
- # byte is a special case due to the required encoding
- if format_ == "byte":
- data = FAKE.uuid()
- return base64.b64encode(data.encode("utf-8"))
- value = fake_string(string_format=format_)
- while len(value) < minimum:
- # use fake.name() to ensure the returned string uses the provided locale
- value = value + FAKE.name()
- if len(value) > maximum:
- value = value[:maximum]
- return value
-
- def get_values_out_of_bounds(self, current_value: str) -> list[str]:
- invalid_values: list[str] = []
- if self.minLength:
- invalid_values.append(current_value[0 : self.minLength - 1])
- # if there is a maximum length, send 1 character more
- if self.maxLength:
- invalid_string_value = current_value if current_value else "x"
- # add random characters from the current value to prevent adding new characters
- while len(invalid_string_value) <= self.maxLength:
- invalid_string_value += choice(invalid_string_value)
- invalid_values.append(invalid_string_value)
- if invalid_values:
- return invalid_values
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> str:
- valid_values = []
- if self.const is not None:
- valid_values = [self.const]
- if self.enum is not None:
- valid_values = self.enum
-
- if not valid_values:
- raise ValueError
-
- invalid_value = ""
- for value in valid_values:
- invalid_value += value + value
-
- return invalid_value
-
- @property
- def can_be_invalidated(self) -> bool:
- if (
- self.maxLength is not None
- or self.minLength is not None
- or self.const is not None
- or self.enum is not None
- ):
- return True
- return False
-
- @property
- def annotation_string(self) -> str:
- return "str"
-
-
-class IntegerSchema(SchemaBase[int], frozen=True):
- type: Literal["integer"] = "integer"
- format: str = "int32"
- maximum: int | None = None
- exclusiveMaximum: int | bool | None = None
- minimum: int | None = None
- exclusiveMinimum: int | bool | None = None
- multipleOf: int | None = None # TODO: implement support
- const: int | None = None
- enum: list[int] | None = None
- nullable: bool = False
-
- @cached_property
- def _max_int(self) -> int:
- if self.format == "int64":
- return 9223372036854775807
- return 2147483647
-
- @cached_property
- def _min_int(self) -> int:
- if self.format == "int64":
- return -9223372036854775808
- return -2147483648
-
- @cached_property
- def _max_value(self) -> int:
- # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
- # OAS 3.1: exclusiveMinimum/Maximum is an integer
- if isinstance(self.exclusiveMaximum, int) and not isinstance(
- self.exclusiveMaximum, bool
- ):
- return self.exclusiveMaximum - 1
-
- if isinstance(self.maximum, int):
- if self.exclusiveMaximum is True:
- return self.maximum - 1
- return self.maximum
-
- return self._max_int
-
- @cached_property
- def _min_value(self) -> int:
- # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
- # OAS 3.1: exclusiveMinimum/Maximum is an integer
- if isinstance(self.exclusiveMinimum, int) and not isinstance(
- self.exclusiveMinimum, bool
- ):
- return self.exclusiveMinimum + 1
-
- if isinstance(self.minimum, int):
- if self.exclusiveMinimum is True:
- return self.minimum + 1
- return self.minimum
-
- return self._min_int
-
- def get_valid_value(self) -> int:
- """Generate a random int within the min/max range of the schema, if specified."""
- if self.const is not None:
- return self.const
- if self.enum is not None:
- return choice(self.enum)
-
- return randint(self._min_value, self._max_value)
-
- def get_values_out_of_bounds(self, current_value: int) -> list[int]: # pylint: disable=unused-argument
- invalid_values: list[int] = []
-
- if self._min_value > self._min_int:
- invalid_values.append(self._min_value - 1)
-
- if self._max_value < self._max_int:
- invalid_values.append(self._max_value + 1)
-
- if invalid_values:
- return invalid_values
-
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> int:
- valid_values = []
- if self.const is not None:
- valid_values = [self.const]
- if self.enum is not None:
- valid_values = self.enum
-
- if not valid_values:
- raise ValueError
-
- invalid_value = 0
- for value in valid_values:
- invalid_value += abs(value) + abs(value)
-
- return invalid_value
-
- @property
- def can_be_invalidated(self) -> bool:
- return True
-
- @property
- def annotation_string(self) -> str:
- return "int"
-
-
-class NumberSchema(SchemaBase[float], frozen=True):
- type: Literal["number"] = "number"
- maximum: int | float | None = None
- exclusiveMaximum: int | float | bool | None = None
- minimum: int | float | None = None
- exclusiveMinimum: int | float | bool | None = None
- multipleOf: int | None = None # TODO: implement support
- const: int | float | None = None
- enum: list[int | float] | None = None
- nullable: bool = False
-
- @cached_property
- def _max_float(self) -> float:
- return 9223372036854775807.0
-
- @cached_property
- def _min_float(self) -> float:
- return -9223372036854775808.0
-
- @cached_property
- def _max_value(self) -> float:
- # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
- # OAS 3.1: exclusiveMinimum/Maximum is an integer or a float
- if isinstance(self.exclusiveMaximum, (int, float)) and not isinstance(
- self.exclusiveMaximum, bool
- ):
- return self.exclusiveMaximum - 0.0000000001
-
- if isinstance(self.maximum, (int, float)):
- if self.exclusiveMaximum is True:
- return self.maximum - 0.0000000001
- return self.maximum
-
- return self._max_float
-
- @cached_property
- def _min_value(self) -> float:
- # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
- # OAS 3.1: exclusiveMinimum/Maximum is an integer or a float
- if isinstance(self.exclusiveMinimum, (int, float)) and not isinstance(
- self.exclusiveMinimum, bool
- ):
- return self.exclusiveMinimum + 0.0000000001
-
- if isinstance(self.minimum, (int, float)):
- if self.exclusiveMinimum is True:
- return self.minimum + 0.0000000001
- return self.minimum
-
- return self._min_float
-
- def get_valid_value(self) -> float:
- """Generate a random float within the min/max range of the schema, if specified."""
- if self.const is not None:
- return self.const
- if self.enum is not None:
- return choice(self.enum)
-
- return uniform(self._min_value, self._max_value)
-
- def get_values_out_of_bounds(self, current_value: float) -> list[float]: # pylint: disable=unused-argument
- invalid_values: list[float] = []
-
- if self._min_value > self._min_float:
- invalid_values.append(self._min_value - 0.000000001)
-
- if self._max_value < self._max_float:
- invalid_values.append(self._max_value + 0.000000001)
-
- if invalid_values:
- return invalid_values
-
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> float:
- valid_values = []
- if self.const is not None:
- valid_values = [self.const]
- if self.enum is not None:
- valid_values = self.enum
-
- if not valid_values:
- raise ValueError
-
- invalid_value = 0.0
- for value in valid_values:
- invalid_value += abs(value) + abs(value)
-
- return invalid_value
-
- @property
- def can_be_invalidated(self) -> bool:
- return True
-
- @property
- def annotation_string(self) -> str:
- return "float"
-
-
-class ArraySchema(SchemaBase[list[JSON]], frozen=True):
- type: Literal["array"] = "array"
- items: "SchemaObjectTypes"
- maxItems: int | None = None
- minItems: int | None = None
- uniqueItems: bool = False
- const: list[JSON] | None = None
- enum: list[list[JSON]] | None = None
- nullable: bool = False
-
- def get_valid_value(self) -> list[JSON]:
- if self.const is not None:
- return self.const
-
- if self.enum is not None:
- return choice(self.enum)
-
- minimum = self.minItems if self.minItems is not None else 0
- maximum = self.maxItems if self.maxItems is not None else 1
- maximum = max(minimum, maximum)
-
- value: list[JSON] = []
- for _ in range(maximum):
- item_value = self.items.get_valid_value()
- value.append(item_value)
- return value
-
- def get_values_out_of_bounds(self, current_value: list[JSON]) -> list[list[JSON]]:
- invalid_values: list[list[JSON]] = []
-
- if self.minItems:
- invalid_value = current_value[0 : self.minItems - 1]
- invalid_values.append(invalid_value)
-
- if self.maxItems is not None:
- invalid_value = []
- if not current_value:
- current_value = self.get_valid_value()
-
- if not current_value:
- current_value = [self.items.get_valid_value()]
-
- while len(invalid_value) <= self.maxItems:
- invalid_value.append(choice(current_value))
- invalid_values.append(invalid_value)
-
- if invalid_values:
- return invalid_values
-
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> list[JSON]:
- valid_values = []
- if self.const is not None:
- valid_values = [self.const]
- if self.enum is not None:
- valid_values = self.enum
-
- if not valid_values:
- raise ValueError
-
- invalid_value = []
- for value in valid_values:
- invalid_value.extend(value)
- invalid_value.extend(value)
-
- return invalid_value
-
- @property
- def can_be_invalidated(self) -> bool:
- if (
- self.maxItems is not None
- or self.minItems is not None
- or self.uniqueItems
- or self.const is not None
- or self.enum is not None
- ):
- return True
- if isinstance(self.items, (BooleanSchema, IntegerSchema, NumberSchema)):
- return True
- return False
-
- @property
- def annotation_string(self) -> str:
- return f"list[{self.items.annotation_string}]"
-
-
-# NOTE: Workaround for cyclic PropertiesMapping / SchemaObjectTypes annotations
-def _get_properties_mapping_default() -> "PropertiesMapping":
- return _get_empty_properties_mapping()
-
-
-class ObjectSchema(SchemaBase[dict[str, JSON]], frozen=True):
- type: Literal["object"] = "object"
- properties: "PropertiesMapping" = Field(
- default_factory=_get_properties_mapping_default
- )
- additionalProperties: "bool | SchemaObjectTypes" = True
- required: list[str] = []
- maxProperties: int | None = None
- minProperties: int | None = None
- const: dict[str, JSON] | None = None
- enum: list[dict[str, JSON]] | None = None
- nullable: bool = False
-
- def get_valid_value(self) -> dict[str, JSON]:
- raise NotImplementedError
-
- def get_values_out_of_bounds(
- self, current_value: Mapping[str, JSON]
- ) -> list[dict[str, JSON]]:
- raise ValueError
-
- def get_invalid_value_from_const_or_enum(self) -> dict[str, JSON]:
- valid_values = []
- if self.const is not None:
- valid_values = [self.const]
- if self.enum is not None:
- valid_values = self.enum
-
- if not valid_values:
- raise ValueError
-
- # This invalidation will not work for a const and may not work for
- # an enum. In that case a different invalidation approach will be used.
- invalid_value = {**valid_values[0]}
- for value in valid_values:
- for key in invalid_value.keys():
- invalid_value[key] = value.get(key)
- if invalid_value not in valid_values:
- return invalid_value
-
- raise ValueError
-
- @property
- def can_be_invalidated(self) -> bool:
- if (
- self.required
- or self.maxProperties is not None
- or self.minProperties is not None
- or self.const is not None
- or self.enum is not None
- ):
- return True
- return False
-
- @property
- def annotation_string(self) -> str:
- return "dict[str, Any]"
-
-
-ResolvedSchemaObjectTypes: TypeAlias = (
- NullSchema
- | BooleanSchema
- | StringSchema
- | IntegerSchema
- | NumberSchema
- | ArraySchema
- | ObjectSchema
-)
-
-
-class UnionTypeSchema(SchemaBase[JSON], frozen=True):
- allOf: list["SchemaObjectTypes"] = []
- anyOf: list["SchemaObjectTypes"] = []
- oneOf: list["SchemaObjectTypes"] = []
-
- def get_valid_value(self) -> JSON:
- chosen_schema = choice(self.resolved_schemas)
- return chosen_schema.get_valid_value()
-
- def get_values_out_of_bounds(self, current_value: JSON) -> list[JSON]:
- raise ValueError
-
- @property
- def resolved_schemas(self) -> list[ResolvedSchemaObjectTypes]:
- return list(self._get_resolved_schemas())
-
- def _get_resolved_schemas(self) -> Generator[ResolvedSchemaObjectTypes, None, None]:
- if self.allOf:
- properties_list: list[PropertiesMapping] = []
- additional_properties_list = []
- required_list = []
- max_properties_list = []
- min_properties_list = []
- nullable_list = []
-
- for schema in self.allOf:
- if not isinstance(schema, ObjectSchema):
- raise NotImplementedError("allOf only supported for ObjectSchemas")
-
- if schema.const is not None:
- raise ValueError("allOf and models with a const are not compatible")
-
- if schema.enum:
- raise ValueError("allOf and models with enums are not compatible")
-
- if schema.properties:
- properties_list.append(schema.properties)
- additional_properties_list.append(schema.additionalProperties)
- required_list += schema.required
- max_properties_list.append(schema.maxProperties)
- min_properties_list.append(schema.minProperties)
- nullable_list.append(schema.nullable)
-
- properties_dicts = [mapping.root for mapping in properties_list]
- properties = dict(ChainMap(*properties_dicts))
-
- if True in additional_properties_list:
- additional_properties_value: bool | SchemaObjectTypes = True
- else:
- additional_properties_types = []
- for additional_properties_item in additional_properties_list:
- if isinstance(
- additional_properties_item, ResolvedSchemaObjectTypes
- ):
- additional_properties_types.append(additional_properties_item)
- if not additional_properties_types:
- additional_properties_value = False
- else:
- additional_properties_value = UnionTypeSchema(
- anyOf=additional_properties_types,
- )
-
- max_properties = [max for max in max_properties_list if max is not None]
- min_properties = [min for min in min_properties_list if min is not None]
- max_propeties_value = max(max_properties) if max_properties else None
- min_propeties_value = min(min_properties) if min_properties else None
-
- merged_schema = ObjectSchema(
- type="object",
- properties=properties,
- additionalProperties=additional_properties_value,
- required=required_list,
- maxProperties=max_propeties_value,
- minProperties=min_propeties_value,
- nullable=all(nullable_list),
- )
- yield merged_schema
- else:
- for schema in self.anyOf + self.oneOf:
- if isinstance(schema, ResolvedSchemaObjectTypes):
- yield schema
- else:
- yield from schema.resolved_schemas
-
- def get_invalid_value_from_const_or_enum(self) -> JSON:
- raise ValueError
-
- @property
- def annotation_string(self) -> str:
- unique_annotations = {s.annotation_string for s in self.resolved_schemas}
- return " | ".join(unique_annotations)
-
-
-SchemaObjectTypes: TypeAlias = ResolvedSchemaObjectTypes | UnionTypeSchema
-
-
-class PropertiesMapping(RootModel[dict[str, "SchemaObjectTypes"]], frozen=True): ...
-
-
-def _get_empty_properties_mapping() -> PropertiesMapping:
- return PropertiesMapping(root={})
-
-
-class ParameterObject(BaseModel):
- name: str
- in_: str = Field(..., alias="in")
- required: bool = False
- description: str = ""
- schema_: SchemaObjectTypes | None = Field(None, alias="schema")
-
-
-class MediaTypeObject(BaseModel):
- schema_: SchemaObjectTypes | None = Field(None, alias="schema")
-
-
-class RequestBodyObject(BaseModel):
- content: dict[str, MediaTypeObject]
- required: bool = False
- description: str = ""
-
- @cached_property
- def schema_(self) -> SchemaObjectTypes | None:
- if not self.mime_type:
- return None
-
- if len(self._json_schemas) > 1:
- logger.info(
- f"Multiple JSON media types defined for requestBody, "
- f"using the first candidate from {self.content}"
- )
- return self._json_schemas[self.mime_type]
-
- @cached_property
- def mime_type(self) -> str | None:
- if not self._json_schemas:
- return None
-
- return next(iter(self._json_schemas))
-
- @cached_property
- def _json_schemas(self) -> dict[str, SchemaObjectTypes]:
- json_schemas = {
- mime_type: media_type.schema_
- for mime_type, media_type in self.content.items()
- if "json" in mime_type and media_type.schema_ is not None
- }
- return json_schemas
-
-
-class HeaderObject(BaseModel): ...
-
-
-class LinkObject(BaseModel): ...
-
-
-class ResponseObject(BaseModel):
- description: str
- content: dict[str, MediaTypeObject] = {}
- headers: dict[str, HeaderObject] = {}
- links: dict[str, LinkObject] = {}
-
-
-class OperationObject(BaseModel):
- operationId: str | None = None
- summary: str = ""
- description: str = ""
- tags: list[str] = []
- parameters: list[ParameterObject] = []
- requestBody: RequestBodyObject | None = None
- responses: dict[str, ResponseObject] = {}
-
- def update_parameters(self, parameters: list[ParameterObject]) -> None:
- self.parameters.extend(parameters)
-
-
-class PathItemObject(BaseModel):
- get: OperationObject | None = None
- post: OperationObject | None = None
- patch: OperationObject | None = None
- put: OperationObject | None = None
- delete: OperationObject | None = None
- summary: str = ""
- description: str = ""
- parameters: list[ParameterObject] = []
-
- def get_operations(self) -> dict[str, OperationObject]:
- return {
- k: v for k, v in self.__dict__.items() if isinstance(v, OperationObject)
- }
-
- def update_operation_parameters(self) -> None:
- if not self.parameters:
- return
-
- operations_to_update = self.get_operations()
- for operation_object in operations_to_update.values():
- operation_object.update_parameters(self.parameters)
-
-
-class InfoObject(BaseModel):
- title: str
- version: str
- summary: str = ""
- description: str = ""
-
-
-class OpenApiObject(BaseModel):
- info: InfoObject
- paths: dict[str, PathItemObject]
-
- def model_post_init(self, context: object) -> None:
- for path_object in self.paths.values():
- path_object.update_operation_parameters()
diff --git a/src/OpenApiLibCore/models/__init__.py b/src/OpenApiLibCore/models/__init__.py
new file mode 100644
index 0000000..dd35889
--- /dev/null
+++ b/src/OpenApiLibCore/models/__init__.py
@@ -0,0 +1,17 @@
+class Ignore:
+ """Helper class to flag properties to be ignored in data generation."""
+
+ def __str__(self) -> str:
+ return "IGNORE" # pragma: no cover
+
+
+class UnSet:
+ """Helper class to flag arguments that have not been set in a keyword call."""
+
+ def __str__(self) -> str:
+ return "UNSET" # pragma: no cover
+
+
+IGNORE = Ignore()
+
+UNSET = UnSet()
diff --git a/src/OpenApiLibCore/models/oas_models.py b/src/OpenApiLibCore/models/oas_models.py
new file mode 100644
index 0000000..1014594
--- /dev/null
+++ b/src/OpenApiLibCore/models/oas_models.py
@@ -0,0 +1,1443 @@
+from __future__ import annotations
+
+import builtins
+from abc import abstractmethod
+from collections import ChainMap
+from copy import deepcopy
+from functools import cached_property
+from random import choice, randint, sample, shuffle, uniform
+from sys import float_info
+from typing import (
+ Annotated,
+ Any,
+ Callable,
+ Generator,
+ Generic,
+ Iterable,
+ Literal,
+ Mapping,
+ TypeAlias,
+ TypeGuard,
+ TypeVar,
+ Union,
+ cast,
+)
+from uuid import uuid4
+
+import rstr
+from pydantic import BaseModel, Field, RootModel
+from robot.api import logger
+from robot.libraries.BuiltIn import BuiltIn
+
+from OpenApiLibCore.annotations import JSON
+from OpenApiLibCore.data_constraints.dto_base import Dto
+from OpenApiLibCore.data_generation.localized_faker import FAKE, fake_string
+from OpenApiLibCore.data_generation.value_utils import (
+ json_type_name_of_python_type,
+ python_type_by_json_type_name,
+)
+from OpenApiLibCore.models import IGNORE, Ignore
+from OpenApiLibCore.models.resource_relations import (
+ NOT_SET,
+ IdDependency,
+ PropertyValueConstraint,
+)
+from OpenApiLibCore.protocols import ConstraintMappingType
+from OpenApiLibCore.utils.id_mapping import dummy_transformer
+from OpenApiLibCore.utils.parameter_utils import get_safe_name_for_oas_name
+
+run_keyword = BuiltIn().run_keyword
+
+EPSILON = float_info.epsilon
+
+SENTINEL = object()
+
+O = TypeVar("O")
+AI = TypeVar("AI", bound=JSON)
+
+
+def is_object_schema(schema: SchemaObjectTypes) -> TypeGuard[ObjectSchema]:
+ return isinstance(schema, ObjectSchema)
+
+
+class SchemaBase(BaseModel, Generic[O], frozen=True):
+ readOnly: bool = False
+ writeOnly: bool = False
+ constraint_mapping: ConstraintMappingType = Dto # type: ignore[assignment]
+
+ @abstractmethod
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[O, SchemaObjectTypes]: ...
+
+ @abstractmethod
+ def get_values_out_of_bounds(self, current_value: O) -> list[O]: ...
+
+ @abstractmethod
+ def get_invalid_value_from_const_or_enum(self) -> O: ...
+
+ @abstractmethod
+ def get_invalid_value_from_constraint(self, values_from_constraint: list[O]) -> O:
+ """
+ Return a value of the same type as the values in the values_from_constraints that
+ is not in the values_from_constraints, if possible. Otherwise raise ValueError.
+ """
+
+ def get_invalid_value(
+ self,
+ valid_value: O,
+ values_from_constraint: Iterable[O] = tuple(),
+ ) -> O | str | list[JSON] | Ignore:
+ """Return a random value that violates the provided value_schema."""
+ invalid_values: list[O | str | list[JSON] | Ignore] = []
+ value_type = getattr(self, "type")
+
+ if not isinstance(valid_value, python_type_by_json_type_name(value_type)):
+ valid_value = self.get_valid_value()[0]
+
+ if values_from_constraint:
+ # if IGNORE is in the values_from_constraints, the parameter needs to be
+ # ignored for an OK response so leaving the value at it's original value
+ # should result in the specified error response
+ if any(map(lambda x: isinstance(x, Ignore), values_from_constraint)):
+ return IGNORE
+ try:
+ return self.get_invalid_value_from_constraint(
+ values_from_constraint=list(values_from_constraint),
+ )
+ except ValueError:
+ pass
+
+ # For schemas with a const or enum, add invalidated values from those
+ try:
+ invalid_value = self.get_invalid_value_from_const_or_enum()
+ invalid_values.append(invalid_value)
+ except ValueError:
+ pass
+
+ # Violate min / max values or length if possible
+ try:
+ values_out_of_bounds = self.get_values_out_of_bounds(
+ current_value=valid_value
+ )
+ invalid_values += values_out_of_bounds
+ except ValueError:
+ pass
+
+ # No value constraints or min / max ranges to violate, so change the data type
+ if value_type == "string":
+ # Since int / float / bool can always be cast to sting, change
+ # the string to a nested object.
+ # An array gets exploded in query strings, "null" is then often invalid
+ invalid_values.append([{"invalid": [None, False]}, "null", None, True])
+ else:
+ invalid_values.append(FAKE.uuid())
+
+ return choice(invalid_values)
+
+ def attach_constraint_mapping(
+ self, constraint_mapping: ConstraintMappingType
+ ) -> None:
+ # NOTE: https://github.com/pydantic/pydantic/issues/11495
+ self.__dict__["constraint_mapping"] = constraint_mapping
+
+
+class NullSchema(SchemaBase[None], frozen=True):
+ type: Literal["null"] = "null"
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[None, NullSchema]:
+ return None, self
+
+ def get_values_out_of_bounds(self, current_value: None) -> list[None]:
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> None:
+ raise ValueError
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[None]
+ ) -> None:
+ raise ValueError
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ return False
+
+ @property
+ def annotation_string(self) -> str:
+ return "None"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return type(None)
+
+
+class BooleanSchema(SchemaBase[bool], frozen=True):
+ type: Literal["boolean"] = "boolean"
+ const: bool | None = None
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[bool, BooleanSchema]:
+ if self.const is not None:
+ return self.const, self
+ return choice([True, False]), self
+
+ def get_values_out_of_bounds(self, current_value: bool) -> list[bool]:
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> bool:
+ if self.const is not None:
+ return not self.const
+ raise ValueError
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[bool]
+ ) -> bool:
+ if len(values_from_constraint) == 1:
+ return not values_from_constraint[0]
+ raise ValueError
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ return True
+
+ @property
+ def annotation_string(self) -> str:
+ return "bool"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return bool
+
+
+class StringSchema(SchemaBase[str], frozen=True):
+ type: Literal["string"] = "string"
+ format: str = ""
+ pattern: str = ""
+ maxLength: int | None = None
+ minLength: int | None = None
+ const: str | None = None
+ enum: list[str] | None = None
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[str, StringSchema]:
+ """Generate a random string within the min/max length in the schema, if specified."""
+ if self.const is not None:
+ return self.const, self
+ if self.enum is not None:
+ return choice(self.enum), self
+ # if a pattern is provided, format and min/max length can be ignored
+ if pattern := self.pattern:
+ try:
+ return rstr.xeger(pattern), self
+ except Exception as exception:
+ logger.warn(
+ f"An error occured trying to generate a string matching the "
+ f"pattern defined in the specification. To ensure a valid value "
+ f"is generated for this property, a PropertyValueConstraint can be "
+ f"configured. See the Advanced Use section of the OpenApiTools "
+ f"documentation for more details."
+ f"\nThe exception was: {exception}\nThe pattern was: {pattern}"
+ )
+ minimum = self.minLength if self.minLength is not None else 0
+ maximum = self.maxLength if self.maxLength is not None else 36
+ maximum = max(minimum, maximum)
+
+ format_ = self.format if self.format else "uuid"
+ value = fake_string(string_format=format_)
+ while len(value) < minimum:
+ value = value + fake_string(string_format=format_)
+ if len(value) > maximum:
+ value = value[:maximum]
+ return value, self
+
+ def get_values_out_of_bounds(self, current_value: str) -> list[str]:
+ invalid_values: list[str] = []
+ if self.minLength:
+ invalid_values.append(current_value[0 : self.minLength - 1])
+ # if there is a maximum length, send 1 character more
+ if self.maxLength:
+ invalid_value = current_value if current_value else "x"
+ # add random characters from the current value to prevent adding new characters
+ while len(invalid_value) <= self.maxLength:
+ invalid_value += choice(invalid_value)
+ invalid_values.append(invalid_value)
+ if invalid_values:
+ return invalid_values
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> str:
+ valid_values = []
+ if self.const is not None:
+ valid_values = [self.const]
+ if self.enum is not None:
+ valid_values = self.enum
+
+ if not valid_values:
+ raise ValueError
+
+ invalid_value = ""
+ for value in valid_values:
+ invalid_value += value + value
+
+ return invalid_value
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[str]
+ ) -> str:
+ invalid_values = 2 * values_from_constraint
+ invalid_value = invalid_values.pop()
+ for value in invalid_values:
+ invalid_value = invalid_value + value
+
+ if not invalid_value:
+ raise ValueError("Value invalidation yielded an empty string.")
+ return invalid_value
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ if (
+ self.maxLength is not None
+ or self.minLength is not None
+ or self.const is not None
+ or self.enum is not None
+ ):
+ return True
+ return False
+
+ @property
+ def annotation_string(self) -> str:
+ return "str"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return str
+
+
+class IntegerSchema(SchemaBase[int], frozen=True):
+ type: Literal["integer"] = "integer"
+ format: str = "int32"
+ maximum: int | None = None
+ exclusiveMaximum: int | bool | None = None
+ minimum: int | None = None
+ exclusiveMinimum: int | bool | None = None
+ multipleOf: int | None = None # TODO: implement support
+ const: int | None = None
+ enum: list[int] | None = None
+ nullable: bool = False
+
+ @cached_property
+ def _max_int(self) -> int:
+ if self.format == "int64":
+ return 9223372036854775807
+ return 2147483647
+
+ @cached_property
+ def _min_int(self) -> int:
+ if self.format == "int64":
+ return -9223372036854775808
+ return -2147483648
+
+ @cached_property
+ def _max_value(self) -> int:
+ # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
+ # OAS 3.1: exclusiveMinimum/Maximum is an integer
+ if isinstance(self.exclusiveMaximum, int) and not isinstance(
+ self.exclusiveMaximum, bool
+ ):
+ return self.exclusiveMaximum - 1
+
+ if isinstance(self.maximum, int):
+ if self.exclusiveMaximum is True:
+ return self.maximum - 1
+ return self.maximum
+
+ return self._max_int
+
+ @cached_property
+ def _min_value(self) -> int:
+ # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
+ # OAS 3.1: exclusiveMinimum/Maximum is an integer
+ if isinstance(self.exclusiveMinimum, int) and not isinstance(
+ self.exclusiveMinimum, bool
+ ):
+ return self.exclusiveMinimum + 1
+
+ if isinstance(self.minimum, int):
+ if self.exclusiveMinimum is True:
+ return self.minimum + 1
+ return self.minimum
+
+ return self._min_int
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[int, IntegerSchema]:
+ """Generate a random int within the min/max range of the schema, if specified."""
+ if self.const is not None:
+ return self.const, self
+ if self.enum is not None:
+ return choice(self.enum), self
+
+ return randint(self._min_value, self._max_value), self
+
+ def get_values_out_of_bounds(self, current_value: int) -> list[int]: # pylint: disable=unused-argument
+ invalid_values: list[int] = []
+
+ if self._min_value > self._min_int:
+ invalid_values.append(self._min_value - 1)
+
+ if self._max_value < self._max_int:
+ invalid_values.append(self._max_value + 1)
+
+ if invalid_values:
+ return invalid_values
+
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> int:
+ valid_values = []
+ if self.const is not None:
+ valid_values = [self.const]
+ if self.enum is not None:
+ valid_values = self.enum
+
+ if not valid_values:
+ raise ValueError
+
+ invalid_value = 0
+ for value in valid_values:
+ invalid_value += abs(value) + abs(value)
+
+ return invalid_value
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[int]
+ ) -> int:
+ invalid_values = 2 * values_from_constraint
+ invalid_value = invalid_values.pop()
+ for value in invalid_values:
+ invalid_value = abs(invalid_value) + abs(value)
+ if not invalid_value:
+ invalid_value += 1
+ return invalid_value
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ return True
+
+ @property
+ def annotation_string(self) -> str:
+ return "int"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return int
+
+
+class NumberSchema(SchemaBase[float], frozen=True):
+ type: Literal["number"] = "number"
+ maximum: int | float | None = None
+ exclusiveMaximum: int | float | bool | None = None
+ minimum: int | float | None = None
+ exclusiveMinimum: int | float | bool | None = None
+ multipleOf: int | None = None # TODO: implement support
+ const: int | float | None = None
+ enum: list[int | float] | None = None
+ nullable: bool = False
+
+ @cached_property
+ def _max_float(self) -> float:
+ return 9223372036854775807.0
+
+ @cached_property
+ def _min_float(self) -> float:
+ return -9223372036854775808.0
+
+ @cached_property
+ def _max_value(self) -> float:
+ # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
+ # OAS 3.1: exclusiveMinimum/Maximum is an integer or a float
+ if isinstance(self.exclusiveMaximum, (int, float)) and not isinstance(
+ self.exclusiveMaximum, bool
+ ):
+ return self.exclusiveMaximum - 0.0000000001
+
+ if isinstance(self.maximum, (int, float)):
+ if self.exclusiveMaximum is True:
+ return self.maximum - 0.0000000001
+ return self.maximum
+
+ return self._max_float
+
+ @cached_property
+ def _min_value(self) -> float:
+ # OAS 3.0: exclusiveMinimum/Maximum is a bool in combination with minimum/maximum
+ # OAS 3.1: exclusiveMinimum/Maximum is an integer or a float
+ if isinstance(self.exclusiveMinimum, (int, float)) and not isinstance(
+ self.exclusiveMinimum, bool
+ ):
+ return self.exclusiveMinimum + 0.0000000001
+
+ if isinstance(self.minimum, (int, float)):
+ if self.exclusiveMinimum is True:
+ return self.minimum + 0.0000000001
+ return self.minimum
+
+ return self._min_float
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[float, NumberSchema]:
+ """Generate a random float within the min/max range of the schema, if specified."""
+ if self.const is not None:
+ return self.const, self
+ if self.enum is not None:
+ return choice(self.enum), self
+
+ return uniform(self._min_value, self._max_value), self
+
+ def get_values_out_of_bounds(self, current_value: float) -> list[float]: # pylint: disable=unused-argument
+ invalid_values: list[float] = []
+
+ if self._min_value > self._min_float:
+ invalid_values.append(self._min_value - 0.000000001)
+
+ if self._max_value < self._max_float:
+ invalid_values.append(self._max_value + 0.000000001)
+
+ if invalid_values:
+ return invalid_values
+
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> float:
+ valid_values = []
+ if self.const is not None:
+ valid_values = [self.const]
+ if self.enum is not None:
+ valid_values = self.enum
+
+ if not valid_values:
+ raise ValueError
+
+ invalid_value = 0.0
+ for value in valid_values:
+ invalid_value += abs(value) + abs(value)
+
+ return invalid_value
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[float]
+ ) -> float:
+ invalid_values = 2 * values_from_constraint
+ invalid_value = invalid_values.pop()
+ for value in invalid_values:
+ invalid_value = abs(invalid_value) + abs(value)
+ if not invalid_value:
+ invalid_value += 1
+ return invalid_value
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ return True
+
+ @property
+ def annotation_string(self) -> str:
+ return "float"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return float
+
+
+class ArraySchema(SchemaBase[list[AI]], frozen=True):
+ type: Literal["array"] = "array"
+ items: SchemaObjectTypes
+ maxItems: int | None = None
+ minItems: int | None = None
+ uniqueItems: bool = False
+ const: list[AI] | None = None
+ enum: list[list[AI]] | None = None
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[list[AI], ArraySchema[AI]]:
+ if self.const is not None:
+ return self.const, self
+
+ if self.enum is not None:
+ return choice(self.enum), self
+
+ minimum = self.minItems if self.minItems is not None else 0
+ maximum = self.maxItems if self.maxItems is not None else 1
+ maximum = max(minimum, maximum)
+
+ value: list[AI] = []
+ number_of_items_to_generate = randint(minimum, maximum)
+ for _ in range(number_of_items_to_generate):
+ item_value = cast("AI", self.items.get_valid_value()[0])
+ value.append(item_value)
+ return value, self
+
+ def get_values_out_of_bounds(self, current_value: list[AI]) -> list[list[AI]]:
+ invalid_values: list[list[AI]] = []
+
+ if self.minItems:
+ invalid_value = current_value[0 : self.minItems - 1]
+ invalid_values.append(invalid_value)
+
+ if self.maxItems is not None:
+ invalid_value = []
+ if not current_value:
+ current_value = self.get_valid_value()[0]
+
+ if not current_value:
+ current_value = [self.items.get_valid_value()[0]] # type: ignore[list-item]
+
+ while len(invalid_value) <= self.maxItems:
+ invalid_value.append(choice(current_value))
+ invalid_values.append(invalid_value)
+
+ if invalid_values:
+ return invalid_values
+
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> list[AI]:
+ valid_values = []
+ if self.const is not None:
+ valid_values = [self.const]
+ if self.enum is not None:
+ valid_values = self.enum
+
+ if not valid_values:
+ raise ValueError
+
+ invalid_value = []
+ for value in valid_values:
+ invalid_value.extend(value)
+ invalid_value.extend(value)
+
+ return invalid_value
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[list[AI]]
+ ) -> list[AI]:
+ values_from_constraint = deepcopy(values_from_constraint)
+
+ valid_array = values_from_constraint.pop()
+ invalid_array: list[AI] = []
+ for value in valid_array:
+ invalid_value = self.items.get_invalid_value_from_constraint(
+ values_from_constraint=[value], # type: ignore[list-item]
+ )
+ invalid_array.append(invalid_value) # type: ignore[arg-type]
+ return invalid_array
+
+ def get_invalid_data(
+ self,
+ valid_data: list[AI],
+ status_code: int,
+ invalid_property_default_code: int,
+ ) -> list[AI]:
+ """Return a data set with one of the properties set to an invalid value or type."""
+ invalid_values: list[list[AI]] = []
+
+ relations = self.constraint_mapping.get_body_relations_for_error_code(
+ error_code=status_code
+ )
+ # TODO: handle relations applicable to arrays / lists
+
+ if status_code == invalid_property_default_code:
+ try:
+ values_out_of_bounds = self.get_values_out_of_bounds(
+ current_value=valid_data
+ )
+ invalid_values.extend(values_out_of_bounds)
+ except ValueError:
+ pass
+ try:
+ invalid_const_or_enum = self.get_invalid_value_from_const_or_enum()
+ invalid_values.append(invalid_const_or_enum)
+ except ValueError:
+ pass
+ if is_object_schema(self.items):
+ data_to_invalidate = deepcopy(valid_data)
+ valid_item = (
+ data_to_invalidate.pop()
+ if valid_data
+ else self.items.get_valid_value()[0]
+ )
+ invalid_item = self.items.get_invalid_data(
+ valid_data=valid_item, # type: ignore[arg-type]
+ status_code=status_code,
+ invalid_property_default_code=invalid_property_default_code,
+ )
+ invalid_data = [*data_to_invalidate, invalid_item]
+ invalid_values.append(invalid_data)
+
+ if not invalid_values:
+ raise ValueError(
+ f"No constraint can be broken to cause status_code {status_code}"
+ )
+ return choice(invalid_values)
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ if (
+ self.maxItems is not None
+ or self.minItems is not None
+ or self.uniqueItems
+ or self.const is not None
+ or self.enum is not None
+ ):
+ return True
+ if isinstance(self.items, (BooleanSchema, IntegerSchema, NumberSchema)):
+ return True
+ return False
+
+ @property
+ def annotation_string(self) -> str:
+ return f"list[{self.items.annotation_string}]"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return list
+
+
+# NOTE: Workaround for cyclic PropertiesMapping / SchemaObjectTypes annotations
+def _get_properties_mapping_default() -> PropertiesMapping:
+ return _get_empty_properties_mapping()
+
+
+class ObjectSchema(SchemaBase[dict[str, JSON]], frozen=True):
+ type: Literal["object"] = "object"
+ properties: PropertiesMapping = Field(
+ default_factory=_get_properties_mapping_default
+ )
+ additionalProperties: SchemaObjectTypes | bool = True
+ required: list[str] = []
+ maxProperties: int | None = None
+ minProperties: int | None = None
+ const: dict[str, JSON] | None = None
+ enum: list[dict[str, JSON]] | None = None
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[dict[str, JSON], ObjectSchema]:
+ if self.const is not None:
+ return self.const, self
+
+ if self.enum is not None:
+ return choice(self.enum), self
+
+ json_data: dict[str, Any] = {}
+
+ property_names = self._get_property_names_to_process()
+
+ for property_name in property_names:
+ property_schema = self.properties.root[property_name]
+ if property_schema.readOnly:
+ continue
+
+ json_data[property_name] = self._get_data_for_property(
+ property_name=property_name,
+ property_schema=property_schema,
+ operation_id=operation_id,
+ )
+
+ return json_data, self
+
+ def _get_property_names_to_process(self) -> list[str]:
+ property_names = []
+
+ properties = {} if self.properties is None else self.properties.root
+ for property_name in properties:
+ # register the oas_name
+ _ = get_safe_name_for_oas_name(property_name)
+ if constrained_values := self._get_constrained_values(
+ property_name=property_name
+ ):
+ # do not add properties that are configured to be ignored
+ if IGNORE in constrained_values: # type: ignore[comparison-overlap]
+ continue
+ property_names.append(property_name)
+
+ max_properties = self.maxProperties
+ if max_properties and len(property_names) > max_properties:
+ required_properties = self.required
+ number_of_optional_properties = max_properties - len(required_properties)
+ optional_properties = [
+ name for name in property_names if name not in required_properties
+ ]
+ selected_optional_properties = sample(
+ optional_properties, number_of_optional_properties
+ )
+ property_names = required_properties + selected_optional_properties
+
+ return property_names
+
+ def _get_data_for_property(
+ self,
+ property_name: str,
+ property_schema: SchemaObjectTypes,
+ operation_id: str | None,
+ ) -> JSON:
+ if constrained_values := self._get_constrained_values(
+ property_name=property_name
+ ):
+ constrained_value = choice(constrained_values)
+ # Check if the chosen value is a nested constraint_mapping; since a
+ # mapping is never instantiated, we can use isinstance(..., type) for this.
+ if isinstance(constrained_value, type):
+ property_schema.attach_constraint_mapping(constrained_value)
+ valid_value, _ = property_schema.get_valid_value(
+ operation_id=operation_id
+ )
+ return valid_value
+
+ return constrained_value
+
+ if (
+ dependent_id := get_dependent_id(
+ constraint_mapping=self.constraint_mapping,
+ property_name=property_name,
+ operation_id=operation_id,
+ )
+ ) is not None:
+ return dependent_id
+
+ # Constraints are mapped to endpoints; they are not attached to the property
+ # value schemas so update the schema before value generation
+ property_schema.attach_constraint_mapping(self.constraint_mapping)
+ return property_schema.get_valid_value(operation_id=operation_id)[0]
+
+ def _get_constrained_values(
+ self, property_name: str
+ ) -> list[JSON | ConstraintMappingType]:
+ relations = self.constraint_mapping.get_relations()
+ values_list = [
+ c.values
+ for c in relations
+ if (
+ isinstance(c, PropertyValueConstraint)
+ and c.property_name == property_name
+ )
+ ]
+ # values should be empty or contain 1 list of allowed values
+ return values_list.pop() if values_list else []
+
+ def get_values_out_of_bounds(
+ self, current_value: Mapping[str, JSON]
+ ) -> list[dict[str, JSON]]:
+ raise ValueError
+
+ def get_invalid_value_from_const_or_enum(self) -> dict[str, JSON]:
+ valid_values = []
+ if self.const is not None:
+ valid_values = [self.const]
+ if self.enum is not None:
+ valid_values = self.enum
+
+ if not valid_values:
+ raise ValueError
+
+ # This invalidation will not work for a const and may not work for
+ # an enum. In that case a different invalidation approach will be used.
+ invalid_value = {**valid_values[0]}
+ for value in valid_values:
+ for key in invalid_value.keys():
+ invalid_value[key] = value.get(key)
+ if invalid_value not in valid_values:
+ return invalid_value
+
+ raise ValueError
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[dict[str, JSON]]
+ ) -> dict[str, JSON]:
+ values_from_constraint = deepcopy(values_from_constraint)
+
+ valid_object = values_from_constraint.pop()
+ invalid_object: dict[str, JSON] = {}
+ for key, value in valid_object.items():
+ python_type_of_value = type(value)
+ json_type_of_value = json_type_name_of_python_type(python_type_of_value)
+ schema = MediaTypeObject(schema={"type": json_type_of_value}).schema_ # pyright: ignore[reportArgumentType]
+ invalid_value = schema.get_invalid_value_from_constraint( # type: ignore[union-attr]
+ values_from_constraint=[value], # type: ignore[list-item]
+ )
+ invalid_object[key] = invalid_value
+ return invalid_object
+
+ def get_invalid_data(
+ self,
+ valid_data: dict[str, JSON],
+ status_code: int,
+ invalid_property_default_code: int,
+ ) -> dict[str, JSON]:
+ """Return a data set with one of the properties set to an invalid value or type."""
+ properties: dict[str, JSON] = deepcopy(valid_data)
+
+ relations = self.constraint_mapping.get_body_relations_for_error_code(
+ error_code=status_code
+ )
+ property_names = [r.property_name for r in relations]
+
+ if status_code == invalid_property_default_code:
+ # add all properties defined in the schema, including optional properties
+ property_names.extend((self.properties.root.keys()))
+ if not property_names:
+ raise ValueError(
+ f"No property can be invalidated to cause status_code {status_code}"
+ )
+ # Remove duplicates, then shuffle the property_names so different properties in
+ # the data dict are invalidated when rerunning the test.
+ shuffle(list(set(property_names)))
+ # The value of 1 property will be changed and since they are shuffled, take the first
+ property_name = property_names[0]
+ # if possible, invalidate a constraint but send otherwise valid data
+ id_dependencies = [
+ r
+ for r in relations
+ if isinstance(r, IdDependency) and r.property_name == property_name
+ ]
+ if id_dependencies:
+ invalid_id = uuid4().hex
+ logger.debug(
+ f"Breaking IdDependency for status_code {status_code}: setting "
+ f"{property_name} to {invalid_id}"
+ )
+ properties[property_name] = invalid_id
+ return properties
+
+ invalid_value_from_constraint = [
+ r.invalid_value
+ for r in relations
+ if isinstance(r, PropertyValueConstraint)
+ and r.property_name == property_name
+ and r.invalid_value_error_code == status_code
+ ]
+ if (
+ invalid_value_from_constraint
+ and invalid_value_from_constraint[0] is not NOT_SET
+ ):
+ invalid_value = invalid_value_from_constraint[0]
+ if isinstance(invalid_value, Ignore):
+ properties.pop(property_name)
+ logger.debug(
+ f"Property {property_name} removed since the invalid_value "
+ f"was IGNORE (received from get_invalid_value)"
+ )
+ else:
+ properties[property_name] = invalid_value
+ logger.debug(
+ f"Using invalid_value {invalid_value_from_constraint[0]} to "
+ f"invalidate property {property_name}"
+ )
+ return properties
+
+ value_schema = self.properties.root[property_name]
+ if isinstance(value_schema, UnionTypeSchema):
+ # Filter "type": "null" from the possible types since this indicates an
+ # optional / nullable property that can only be invalidated by sending
+ # invalid data of a non-null type
+ non_null_schemas = [
+ s
+ for s in value_schema.resolved_schemas
+ if not isinstance(s, NullSchema)
+ ]
+ value_schema = choice(non_null_schemas)
+
+ # there may not be a current_value when invalidating an optional property
+ current_value = properties.get(property_name, SENTINEL)
+ if current_value is SENTINEL:
+ current_value = value_schema.get_valid_value()[0]
+
+ values_from_constraint = [
+ r.values[0]
+ for r in relations
+ if isinstance(r, PropertyValueConstraint)
+ and r.property_name == property_name
+ ]
+
+ invalid_value = value_schema.get_invalid_value(
+ valid_value=current_value, # type: ignore[arg-type]
+ values_from_constraint=values_from_constraint,
+ )
+ if isinstance(invalid_value, Ignore):
+ properties.pop(property_name)
+ logger.debug(
+ f"Property {property_name} removed since the invalid_value "
+ f"was IGNORE (received from get_invalid_value)"
+ )
+ else:
+ properties[property_name] = invalid_value
+ logger.debug(
+ f"Property {property_name} changed to {invalid_value} "
+ f"(received from get_invalid_value)"
+ )
+ return properties
+
+ def contains_properties(self, property_names: list[str]) -> bool:
+ if self.properties is None:
+ return False # pragma: no cover
+ for property_name in property_names:
+ if property_name not in self.properties.root:
+ return False
+ return True
+
+ @property
+ def can_be_invalidated(self) -> bool:
+ if (
+ self.required
+ or self.maxProperties is not None
+ or self.minProperties is not None
+ or self.const is not None
+ or self.enum is not None
+ ):
+ return True
+ return False
+
+ @property
+ def annotation_string(self) -> str:
+ return "dict[str, JSON]"
+
+ @property
+ def python_type(self) -> builtins.type:
+ return dict
+
+
+ResolvedSchemaObjectTypes = Annotated[
+ Union[
+ ArraySchema, # type: ignore[type-arg]
+ BooleanSchema,
+ IntegerSchema,
+ NullSchema,
+ NumberSchema,
+ ObjectSchema,
+ StringSchema,
+ ],
+ Field(discriminator="type"),
+]
+
+RESOLVED_SCHEMA_CLASS_TUPLE = (
+ NullSchema,
+ BooleanSchema,
+ StringSchema,
+ IntegerSchema,
+ NumberSchema,
+ ArraySchema,
+ ObjectSchema,
+)
+
+
+class UnionTypeSchema(SchemaBase[JSON], frozen=True):
+ allOf: list["SchemaObjectTypes"] = []
+ anyOf: list["SchemaObjectTypes"] = []
+ oneOf: list["SchemaObjectTypes"] = []
+ nullable: bool = False
+
+ def get_valid_value(
+ self,
+ operation_id: str | None = None,
+ ) -> tuple[JSON, ResolvedSchemaObjectTypes]:
+ relations = (
+ self.constraint_mapping.get_relations()
+ + self.constraint_mapping.get_parameter_relations()
+ )
+ constrained_property_names = [relation.property_name for relation in relations]
+
+ if not constrained_property_names:
+ resolved_schemas = self.resolved_schemas
+ chosen_schema = choice(resolved_schemas)
+ return chosen_schema.get_valid_value(operation_id=operation_id)
+
+ valid_values = []
+ valid_schemas = []
+ for candidate in self.resolved_schemas:
+ if isinstance(candidate, ObjectSchema):
+ if candidate.contains_properties(constrained_property_names):
+ valid_schemas.append(candidate)
+
+ if isinstance(candidate, UnionTypeSchema):
+ candidate.attach_constraint_mapping(self.constraint_mapping)
+ try:
+ valid_value = candidate.get_valid_value(operation_id=operation_id)
+ valid_values.append(valid_value)
+ except ValueError:
+ pass
+ for valid_schema in valid_schemas:
+ valid_value = valid_schema.get_valid_value(operation_id=operation_id)
+ valid_values.append(valid_value)
+
+ if valid_values:
+ return choice(valid_values)
+
+ # The constraints from the parent may not be applicable, resulting in no
+ # valid_values being generated. In that case, generated a random value as normal.
+ chosen_schema = choice(self.resolved_schemas)
+ return chosen_schema.get_valid_value(operation_id=operation_id)
+
+ def get_values_out_of_bounds(self, current_value: JSON) -> list[JSON]:
+ raise ValueError
+
+ @cached_property
+ def resolved_schemas(self) -> list[ResolvedSchemaObjectTypes]:
+ schemas_to_return: list[ResolvedSchemaObjectTypes] = []
+ null_schema = None
+
+ resolved_schemas = list(self._get_resolved_schemas())
+ for schema in resolved_schemas:
+ # Prevent duplication of NullSchema when handling nullable models.
+ if isinstance(schema, NullSchema):
+ null_schema = schema
+ else:
+ schemas_to_return.append(schema)
+ if null_schema is not None:
+ schemas_to_return.append(null_schema)
+ return schemas_to_return
+
+ def _get_resolved_schemas(self) -> Generator[ResolvedSchemaObjectTypes, None, None]:
+ if self.allOf:
+ properties_list: list[PropertiesMapping] = []
+ additional_properties_list = []
+ required_list = []
+ max_properties_list = []
+ min_properties_list = []
+ nullable_list = []
+
+ schemas_to_process = []
+ for schema in self.allOf:
+ if isinstance(schema, UnionTypeSchema):
+ schemas_to_process.extend(schema.resolved_schemas)
+ else:
+ schemas_to_process.append(schema)
+
+ for schema in schemas_to_process:
+ if not isinstance(schema, ObjectSchema):
+ raise ValueError("allOf is only supported for ObjectSchemas")
+
+ if schema.const is not None:
+ raise ValueError("allOf and models with a const are not compatible")
+
+ if schema.enum:
+ raise ValueError("allOf and models with enums are not compatible")
+
+ if schema.properties.root:
+ properties_list.append(schema.properties)
+ additional_properties_list.append(schema.additionalProperties)
+ required_list += schema.required
+ max_properties_list.append(schema.maxProperties)
+ min_properties_list.append(schema.minProperties)
+ nullable_list.append(schema.nullable)
+
+ properties_dicts = [mapping.root for mapping in properties_list]
+ merged_properties = dict(ChainMap(*properties_dicts))
+
+ if True in additional_properties_list:
+ additional_properties_value: bool | SchemaObjectTypes = True
+ else:
+ additional_properties_types = []
+ for additional_properties_item in additional_properties_list:
+ if isinstance(
+ additional_properties_item, RESOLVED_SCHEMA_CLASS_TUPLE
+ ):
+ additional_properties_types.append(additional_properties_item)
+ if isinstance(additional_properties_item, UnionTypeSchema):
+ additional_properties_types.extend(
+ additional_properties_item.resolved_schemas
+ )
+ if not additional_properties_types:
+ additional_properties_value = False
+ else:
+ additional_properties_value = UnionTypeSchema(
+ anyOf=additional_properties_types,
+ )
+
+ max_properties = [max for max in max_properties_list if max is not None]
+ min_properties = [min for min in min_properties_list if min is not None]
+ max_propeties_value = max(max_properties) if max_properties else None
+ min_propeties_value = min(min_properties) if min_properties else None
+
+ merged_schema = ObjectSchema(
+ type="object",
+ properties=PropertiesMapping(root=merged_properties),
+ additionalProperties=additional_properties_value,
+ required=required_list,
+ maxProperties=max_propeties_value,
+ minProperties=min_propeties_value,
+ nullable=False,
+ )
+ merged_schema.attach_constraint_mapping(self.constraint_mapping)
+ yield merged_schema
+ # If all schemas are nullable the merged schema is treated as nullable.
+ if all(nullable_list):
+ null_schema = NullSchema()
+ null_schema.attach_constraint_mapping(self.constraint_mapping)
+ yield null_schema
+ else:
+ for schema in self.anyOf + self.oneOf:
+ if isinstance(schema, RESOLVED_SCHEMA_CLASS_TUPLE):
+ if schema.nullable:
+ schema.__dict__["nullable"] = False
+ null_schema = NullSchema()
+ null_schema.attach_constraint_mapping(self.constraint_mapping)
+ yield null_schema
+ yield schema
+ else:
+ yield from schema.resolved_schemas
+
+ def get_invalid_value_from_const_or_enum(self) -> JSON:
+ raise ValueError
+
+ def get_invalid_value_from_constraint(
+ self, values_from_constraint: list[JSON]
+ ) -> JSON:
+ raise ValueError
+
+ @property
+ def annotation_string(self) -> str:
+ unique_annotations = {s.annotation_string for s in self.resolved_schemas}
+ return " | ".join(unique_annotations)
+
+
+SchemaObjectTypes: TypeAlias = ResolvedSchemaObjectTypes | UnionTypeSchema
+
+
+class PropertiesMapping(RootModel[dict[str, SchemaObjectTypes]], frozen=True): ...
+
+
+def _get_empty_properties_mapping() -> PropertiesMapping:
+ return PropertiesMapping(root={})
+
+
+class ParameterObject(BaseModel):
+ name: str
+ in_: str = Field(..., alias="in")
+ required: bool = False
+ description: str = ""
+ schema_: SchemaObjectTypes | None = Field(None, alias="schema")
+ constraint_mapping: ConstraintMappingType | None = None
+
+ def attach_constraint_mapping(
+ self, constraint_mapping: ConstraintMappingType
+ ) -> None:
+ if self.schema_: # pragma: no branch
+ self.schema_.attach_constraint_mapping(constraint_mapping)
+
+ def replace_nullable_with_union(self) -> None:
+ if self.schema_: # pragma: no branch
+ processed_schema = nullable_schema_to_union_schema(self.schema_)
+ self.schema_ = processed_schema
+
+
+class MediaTypeObject(BaseModel):
+ schema_: SchemaObjectTypes | None = Field(None, alias="schema")
+
+
+class RequestBodyObject(BaseModel):
+ content: dict[str, MediaTypeObject]
+ required: bool = False
+ description: str = ""
+
+ @cached_property
+ def schema_(self) -> SchemaObjectTypes | None:
+ if not self.mime_type:
+ return None
+
+ if len(self._json_schemas) > 1:
+ logger.info(
+ f"Multiple JSON media types defined for requestBody, "
+ f"using the first candidate from {self.content}"
+ )
+ return self._json_schemas[self.mime_type]
+
+ @cached_property
+ def mime_type(self) -> str | None:
+ if not self._json_schemas:
+ return None
+
+ return next(iter(self._json_schemas))
+
+ @cached_property
+ def _json_schemas(self) -> dict[str, SchemaObjectTypes]:
+ json_schemas = {
+ mime_type: media_type.schema_
+ for mime_type, media_type in self.content.items()
+ if "json" in mime_type and media_type.schema_ is not None
+ }
+ return json_schemas
+
+ def attach_constraint_mapping(
+ self, constraint_mapping: ConstraintMappingType
+ ) -> None:
+ for media_object_type in self.content.values():
+ if media_object_type and media_object_type.schema_: # pragma: no branch
+ media_object_type.schema_.attach_constraint_mapping(constraint_mapping)
+
+ def replace_nullable_with_union(self) -> None:
+ for media_object_type in self.content.values():
+ if media_object_type and media_object_type.schema_: # pragma: no branch
+ processed_schema = nullable_schema_to_union_schema(
+ media_object_type.schema_
+ )
+ media_object_type.schema_ = processed_schema
+
+
+class HeaderObject(BaseModel): ...
+
+
+class LinkObject(BaseModel): ...
+
+
+class ResponseObject(BaseModel):
+ description: str
+ content: dict[str, MediaTypeObject] = {}
+ headers: dict[str, HeaderObject] = {}
+ links: dict[str, LinkObject] = {}
+
+
+class OperationObject(BaseModel):
+ operationId: str | None = None
+ summary: str = ""
+ description: str = ""
+ tags: list[str] = []
+ parameters: list[ParameterObject] = []
+ requestBody: RequestBodyObject | None = None
+ responses: dict[str, ResponseObject] = {}
+ constraint_mapping: ConstraintMappingType | None = None
+
+ def update_parameters(self, parameters: list[ParameterObject]) -> None:
+ self.parameters.extend(parameters)
+
+ def attach_constraint_mappings(self) -> None:
+ if not self.constraint_mapping:
+ return
+
+ if self.requestBody:
+ self.requestBody.attach_constraint_mapping(self.constraint_mapping)
+
+ for parameter_object in self.parameters:
+ parameter_object.attach_constraint_mapping(self.constraint_mapping)
+
+ def replace_nullable_with_union(self) -> None:
+ if self.requestBody:
+ self.requestBody.replace_nullable_with_union()
+
+ for parameter_object in self.parameters:
+ parameter_object.replace_nullable_with_union()
+
+
+class PathItemObject(BaseModel):
+ get: OperationObject | None = None
+ post: OperationObject | None = None
+ patch: OperationObject | None = None
+ put: OperationObject | None = None
+ delete: OperationObject | None = None
+ summary: str = ""
+ description: str = ""
+ parameters: list[ParameterObject] = []
+ constraint_mapping: ConstraintMappingType | None = None
+ id_mapper: tuple[str, Callable[[str], str]] = (
+ "id",
+ dummy_transformer,
+ )
+
+ @property
+ def operations(self) -> dict[str, OperationObject]:
+ return {
+ k: v for k, v in self.__dict__.items() if isinstance(v, OperationObject)
+ }
+
+ def update_operation_parameters(self) -> None:
+ if not self.parameters:
+ return
+
+ operations_to_update = self.operations
+ for operation_object in operations_to_update.values():
+ operation_object.update_parameters(self.parameters)
+
+ def attach_constraint_mappings(self) -> None:
+ for operation_object in self.operations.values():
+ operation_object.attach_constraint_mappings()
+
+ def replace_nullable_with_union(self) -> None:
+ for operation_object in self.operations.values():
+ operation_object.attach_constraint_mappings()
+ operation_object.replace_nullable_with_union()
+
+
+class InfoObject(BaseModel):
+ title: str
+ version: str
+ summary: str = ""
+ description: str = ""
+
+
+class OpenApiObject(BaseModel):
+ info: InfoObject
+ paths: dict[str, PathItemObject]
+
+
+def nullable_schema_to_union_schema(schema: SchemaObjectTypes) -> SchemaObjectTypes:
+ if not schema.nullable:
+ return schema
+
+ schema.__dict__["nullable"] = False
+ null_schema = NullSchema()
+ null_schema.attach_constraint_mapping(schema.constraint_mapping)
+ union_schema = UnionTypeSchema(oneOf=[schema, null_schema])
+ union_schema.attach_constraint_mapping(schema.constraint_mapping)
+ return union_schema
+
+
+# TODO: move to keyword_logic?
+def get_dependent_id(
+ constraint_mapping: ConstraintMappingType | None,
+ property_name: str,
+ operation_id: str | None,
+) -> str | int | float | None:
+ relations = constraint_mapping.get_relations() if constraint_mapping else []
+ # multiple get paths are possible based on the operation being performed
+ id_get_paths = [
+ (d.get_path, d.operation_id)
+ for d in relations
+ if (isinstance(d, IdDependency) and d.property_name == property_name)
+ ]
+ if not id_get_paths:
+ return None
+ if len(id_get_paths) == 1:
+ id_get_path, _ = id_get_paths.pop()
+ else:
+ try:
+ [id_get_path] = [
+ path for path, operation in id_get_paths if operation == operation_id
+ ]
+ # There could be multiple get_paths, but not one for the current operation
+ except ValueError:
+ return None
+
+ valid_id = cast(
+ str | int | float, run_keyword("get_valid_id_for_path", id_get_path)
+ )
+ logger.debug(f"get_dependent_id for {id_get_path} returned {valid_id}")
+ return valid_id
diff --git a/src/OpenApiLibCore/request_data.py b/src/OpenApiLibCore/models/request_data.py
similarity index 78%
rename from src/OpenApiLibCore/request_data.py
rename to src/OpenApiLibCore/models/request_data.py
index 707539d..b21711b 100644
--- a/src/OpenApiLibCore/request_data.py
+++ b/src/OpenApiLibCore/models/request_data.py
@@ -4,17 +4,15 @@
from dataclasses import dataclass, field
from functools import cached_property
from random import sample
-from typing import Any
from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.dto_base import Dto
-from OpenApiLibCore.dto_utils import DefaultDto
-from OpenApiLibCore.models import (
+from OpenApiLibCore.models.oas_models import (
ObjectSchema,
ParameterObject,
ResolvedSchemaObjectTypes,
UnionTypeSchema,
)
+from OpenApiLibCore.protocols import ConstraintMappingType
@dataclass
@@ -24,14 +22,15 @@ class RequestValues:
url: str
method: str
params: dict[str, JSON] = field(default_factory=dict)
- headers: dict[str, JSON] = field(default_factory=dict)
- json_data: dict[str, JSON] = field(default_factory=dict)
+ headers: dict[str, str] = field(default_factory=dict)
+ json_data: JSON = None
def override_body_value(self, name: str, value: JSON) -> None:
- if name in self.json_data:
+ # TODO: add support for overriding list body items
+ if isinstance(self.json_data, dict) and name in self.json_data:
self.json_data[name] = value
- def override_header_value(self, name: str, value: JSON) -> None:
+ def override_header_value(self, name: str, value: str) -> None:
if name in self.headers:
self.headers[name] = value
@@ -41,25 +40,27 @@ def override_param_value(self, name: str, value: JSON) -> None:
def override_request_value(self, name: str, value: JSON) -> None:
self.override_body_value(name=name, value=value)
- self.override_header_value(name=name, value=value)
+ self.override_header_value(name=name, value=str(value))
self.override_param_value(name=name, value=value)
def remove_parameters(self, parameters: list[str]) -> None:
for parameter in parameters:
_ = self.params.pop(parameter, None)
_ = self.headers.pop(parameter, None)
- _ = self.json_data.pop(parameter, None)
+ if isinstance(self.json_data, dict):
+ _ = self.json_data.pop(parameter, None)
@dataclass
class RequestData:
"""Helper class to manage parameters used when making requests."""
- dto: Dto | DefaultDto = field(default_factory=DefaultDto)
- body_schema: ObjectSchema | None = None
+ valid_data: JSON
+ constraint_mapping: ConstraintMappingType
+ body_schema: ResolvedSchemaObjectTypes | None = None
parameters: list[ParameterObject] = field(default_factory=list)
params: dict[str, JSON] = field(default_factory=dict)
- headers: dict[str, JSON] = field(default_factory=dict)
+ headers: dict[str, str] = field(default_factory=dict)
has_body: bool = True
def __post_init__(self) -> None:
@@ -69,17 +70,20 @@ def __post_init__(self) -> None:
@property
def has_optional_properties(self) -> bool:
- """Whether or not the dto data (json data) contains optional properties."""
+ """Whether or not the json data contains optional properties."""
def is_required_property(property_name: str) -> bool:
return property_name in self.required_property_names
- properties = (self.dto.as_dict()).keys()
+ if not isinstance(self.valid_data, dict):
+ return False
+
+ properties = (self.valid_data).keys()
return not all(map(is_required_property, properties))
@property
def required_property_names(self) -> list[str]:
- if self.body_schema:
+ if isinstance(self.body_schema, ObjectSchema):
return self.body_schema.required
return []
@@ -165,28 +169,38 @@ def headers_that_can_be_invalidated(self) -> set[str]:
return result
- def get_required_properties_dict(self) -> dict[str, Any]:
- """Get the json-compatible dto data containing only the required properties."""
- relations = self.dto.get_relations()
+ def get_required_properties_dict(self) -> dict[str, JSON]:
+ """Get the json data containing only the required properties."""
+ relations = self.constraint_mapping.get_relations()
mandatory_properties = [
relation.property_name
for relation in relations
if getattr(relation, "treat_as_mandatory", False)
]
- required_properties = self.body_schema.required if self.body_schema else []
+ required_properties = (
+ self.body_schema.required
+ if isinstance(self.body_schema, ObjectSchema)
+ else []
+ )
required_properties.extend(mandatory_properties)
- required_properties_dict: dict[str, Any] = {}
- for key, value in (self.dto.as_dict()).items():
+ required_properties_dict: dict[str, JSON] = {}
+ if not isinstance(self.valid_data, dict):
+ return required_properties_dict
+
+ for key, value in self.valid_data.items():
if key in required_properties:
required_properties_dict[key] = value
return required_properties_dict
- def get_minimal_body_dict(self) -> dict[str, Any]:
+ def get_minimal_body_dict(self) -> dict[str, JSON]:
required_properties_dict = self.get_required_properties_dict()
min_properties = 0
- if self.body_schema and self.body_schema.minProperties is not None:
+ if (
+ isinstance(self.body_schema, ObjectSchema)
+ and self.body_schema.minProperties is not None
+ ):
min_properties = self.body_schema.minProperties
number_of_optional_properties_to_add = min_properties - len(
@@ -196,9 +210,12 @@ def get_minimal_body_dict(self) -> dict[str, Any]:
if number_of_optional_properties_to_add < 1:
return required_properties_dict
+ if not isinstance(self.valid_data, dict):
+ return required_properties_dict
+
optional_properties_dict = {
k: v
- for k, v in self.dto.as_dict().items()
+ for k, v in self.valid_data.items()
if k not in required_properties_dict
}
optional_properties_to_keep = sample(
@@ -218,7 +235,7 @@ def get_required_params(self) -> dict[str, JSON]:
k: v for k, v in self.params.items() if k in self.required_parameter_names
}
- def get_required_headers(self) -> dict[str, JSON]:
+ def get_required_headers(self) -> dict[str, str]:
"""Get the headers dict containing only the required headers."""
return {
k: v for k, v in self.headers.items() if k in self.required_parameter_names
@@ -230,7 +247,7 @@ def required_parameter_names(self) -> list[str]:
The names of the mandatory parameters, including the parameters configured to be
treated as mandatory using a PropertyValueConstraint.
"""
- relations = self.dto.get_parameter_relations()
+ relations = self.constraint_mapping.get_parameter_relations()
mandatory_property_names = [
relation.property_name
for relation in relations
diff --git a/src/OpenApiLibCore/models/resource_relations.py b/src/OpenApiLibCore/models/resource_relations.py
new file mode 100644
index 0000000..9f38d36
--- /dev/null
+++ b/src/OpenApiLibCore/models/resource_relations.py
@@ -0,0 +1,63 @@
+from abc import ABC
+from dataclasses import dataclass
+from typing import Any
+
+NOT_SET = object()
+
+
+class ResourceRelation(ABC):
+ """ABC for all resource relations or restrictions within the API."""
+
+ property_name: str
+ error_code: int
+
+
+@dataclass
+class PathPropertiesConstraint(ResourceRelation):
+ """The value to be used as the ``path`` for related requests."""
+
+ path: str
+ property_name: str = "id"
+ invalid_value: Any = NOT_SET
+ invalid_value_error_code: int = 422
+ error_code: int = 404
+
+
+@dataclass
+class PropertyValueConstraint(ResourceRelation):
+ """The allowed values for property_name."""
+
+ property_name: str
+ values: list[Any]
+ invalid_value: Any = NOT_SET
+ invalid_value_error_code: int = 422
+ error_code: int = 422
+ treat_as_mandatory: bool = False
+
+
+@dataclass
+class IdDependency(ResourceRelation):
+ """The path where a valid id for the property_name can be gotten (using GET)."""
+
+ property_name: str
+ get_path: str
+ operation_id: str = ""
+ error_code: int = 422
+
+
+@dataclass
+class IdReference(ResourceRelation):
+ """The path where a resource that needs this resource's id can be created (using POST)."""
+
+ property_name: str
+ post_path: str
+ error_code: int = 422
+
+
+@dataclass
+class UniquePropertyValueConstraint(ResourceRelation):
+ """The value of the property must be unique within the resource scope."""
+
+ property_name: str
+ value: Any
+ error_code: int = 422
diff --git a/src/OpenApiLibCore/oas_cache.py b/src/OpenApiLibCore/oas_cache.py
deleted file mode 100644
index a9c4d24..0000000
--- a/src/OpenApiLibCore/oas_cache.py
+++ /dev/null
@@ -1,18 +0,0 @@
-"""Module holding the (global) parser cache."""
-
-from dataclasses import dataclass
-
-from openapi_core import Spec
-from prance import ResolvingParser
-
-from OpenApiLibCore.protocols import ResponseValidatorType
-
-
-@dataclass
-class CachedParser:
- parser: ResolvingParser
- validation_spec: Spec
- response_validator: ResponseValidatorType
-
-
-PARSER_CACHE: dict[str, CachedParser] = {}
diff --git a/src/OpenApiLibCore/openapi_libcore.py b/src/OpenApiLibCore/openapi_libcore.py
index 36ce83c..68ecabe 100644
--- a/src/OpenApiLibCore/openapi_libcore.py
+++ b/src/OpenApiLibCore/openapi_libcore.py
@@ -1,13 +1,15 @@
import json as _json
import sys
+import tempfile
from collections.abc import Mapping, MutableMapping
from copy import deepcopy
from functools import cached_property
from pathlib import Path
from types import MappingProxyType
-from typing import Any, Generator
+from typing import Any, Callable, Generator, Literal, overload
-from openapi_core import Config, OpenAPI, Spec
+from jsonschema_path import SchemaPath
+from openapi_core import Config, OpenAPI
from openapi_core.validation.exceptions import ValidationError
from prance import ResolvingParser
from prance.util.url import ResolutionError
@@ -19,32 +21,33 @@
from robot.api.exceptions import FatalError
from robot.libraries.BuiltIn import BuiltIn
-import OpenApiLibCore.data_generation as _data_generation
-import OpenApiLibCore.data_invalidation as _data_invalidation
-import OpenApiLibCore.path_functions as _path_functions
-import OpenApiLibCore.path_invalidation as _path_invalidation
-import OpenApiLibCore.resource_relations as _resource_relations
-import OpenApiLibCore.validation as _validation
+import OpenApiLibCore.data_generation.data_generation_core as _data_generation
+import OpenApiLibCore.data_generation.data_invalidation as _data_invalidation
+import OpenApiLibCore.keyword_logic.path_functions as _path_functions
+import OpenApiLibCore.keyword_logic.path_invalidation as _path_invalidation
+import OpenApiLibCore.keyword_logic.resource_relations as _resource_relations
+import OpenApiLibCore.keyword_logic.validation as _validation
from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.dto_base import Dto, IdReference
-from OpenApiLibCore.dto_utils import (
- DEFAULT_ID_PROPERTY_NAME,
- get_dto_class,
+from OpenApiLibCore.data_constraints.dto_base import (
+ Dto,
+ get_constraint_mapping_dict,
get_id_property_name,
- get_path_dto_class,
+ get_path_mapping_dict,
)
-from OpenApiLibCore.localized_faker import FAKE
-from OpenApiLibCore.models import (
+from OpenApiLibCore.data_generation.localized_faker import FAKE
+from OpenApiLibCore.models.oas_models import (
OpenApiObject,
+ ParameterObject,
PathItemObject,
)
-from OpenApiLibCore.oas_cache import PARSER_CACHE, CachedParser
-from OpenApiLibCore.parameter_utils import (
+from OpenApiLibCore.models.request_data import RequestData, RequestValues
+from OpenApiLibCore.models.resource_relations import IdReference
+from OpenApiLibCore.protocols import IResponseValidator
+from OpenApiLibCore.utils.oas_cache import SPEC_CACHE, CachedSpec
+from OpenApiLibCore.utils.parameter_utils import (
get_oas_name_from_safe_name,
- register_path_parameters,
+ get_safe_name_for_oas_name,
)
-from OpenApiLibCore.protocols import ResponseValidatorType
-from OpenApiLibCore.request_data import RequestData, RequestValues
from openapitools_docs.docstrings import (
OPENAPILIBCORE_INIT_DOCSTRING,
OPENAPILIBCORE_LIBRARY_DOCSTRING,
@@ -55,6 +58,22 @@
default_json_mapping: Mapping[str, JSON] = MappingProxyType({})
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_valid_url"], *args: str
+) -> str: ... # pragma: no cover
+
+
+@overload
+def _run_keyword(
+ keyword_name: Literal["get_request_data"], *args: str
+) -> RequestData: ... # pragma: no cover
+
+
+def _run_keyword(keyword_name: str, *args: object) -> object:
+ return run_keyword(keyword_name, *args)
+
+
@library(scope="SUITE", doc_format="HTML")
class OpenApiLibCore: # pylint: disable=too-many-public-methods
def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
@@ -65,7 +84,7 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
response_validation: _validation.ValidationLevel = _validation.ValidationLevel.WARN,
disable_server_validation: bool = True,
mappings_path: str | Path = "",
- invalid_property_default_response: int = 422,
+ invalid_data_default_response: int = 422,
default_id_property_name: str = "id",
faker_locale: str | list[str] = "",
require_body_for_invalid_url: bool = False,
@@ -104,7 +123,12 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
self.extra_headers = extra_headers
self.cookies = cookies
self.proxies = proxies
- self.invalid_property_default_response = invalid_property_default_response
+ self.invalid_data_default_response = invalid_data_default_response
+ if faker_locale:
+ FAKE.set_locale(locale=faker_locale)
+ self.require_body_for_invalid_url = require_body_for_invalid_url
+ self._server_validation_warning_logged = False
+
if mappings_path and str(mappings_path) != ".":
mappings_path = Path(mappings_path)
if not mappings_path.is_file():
@@ -114,30 +138,28 @@ def __init__( # noqa: PLR0913, pylint: disable=dangerous-default-value
mappings_folder = str(mappings_path.parent)
sys.path.append(mappings_folder)
mappings_module_name = mappings_path.stem
- self.get_dto_class = get_dto_class(
+ self.constraint_mapping_dict = get_constraint_mapping_dict(
mappings_module_name=mappings_module_name
)
- self.get_path_dto_class = get_path_dto_class(
+ self.path_mapping_dict = get_path_mapping_dict(
mappings_module_name=mappings_module_name
)
self.get_id_property_name = get_id_property_name(
- mappings_module_name=mappings_module_name
+ mappings_module_name=mappings_module_name,
+ default_id_property_name=default_id_property_name,
)
sys.path.pop()
else:
- self.get_dto_class = get_dto_class(mappings_module_name="no mapping")
- self.get_path_dto_class = get_path_dto_class(
+ self.constraint_mapping_dict = get_constraint_mapping_dict(
mappings_module_name="no mapping"
)
- self.get_id_property_name = get_id_property_name(
+ self.path_mapping_dict = get_path_mapping_dict(
mappings_module_name="no mapping"
)
- if faker_locale:
- FAKE.set_locale(locale=faker_locale)
- self.require_body_for_invalid_url = require_body_for_invalid_url
- # update the globally available DEFAULT_ID_PROPERTY_NAME to the provided value
- DEFAULT_ID_PROPERTY_NAME.id_property_name = default_id_property_name
- self._server_validation_warning_logged = False
+ self.get_id_property_name = get_id_property_name(
+ mappings_module_name="no mapping",
+ default_id_property_name=default_id_property_name,
+ )
# region: library configuration keywords
@keyword
@@ -205,14 +227,14 @@ def get_request_values(
overrides: Mapping[str, JSON] = default_json_mapping,
) -> RequestValues:
"""Return an object with all (valid) request values needed to make a request."""
- json_data: dict[str, JSON] = {}
+ json_data: JSON = {}
- url: str = run_keyword("get_valid_url", path)
- request_data: RequestData = run_keyword("get_request_data", path, method)
+ url = _run_keyword("get_valid_url", path)
+ request_data = _run_keyword("get_request_data", path, method)
params = request_data.params
headers = request_data.headers
if request_data.has_body:
- json_data = request_data.dto.as_dict()
+ json_data = request_data.valid_data
request_values = RequestValues(
url=url,
@@ -229,7 +251,9 @@ def get_request_values(
if location == "body":
request_values.override_body_value(name=oas_name, value=value)
if location == "header":
- request_values.override_header_value(name=oas_name, value=value)
+ request_values.override_header_value(
+ name=oas_name, value=str(value)
+ )
if location == "query":
request_values.override_param_value(name=oas_name, value=str(value))
else:
@@ -244,8 +268,6 @@ def get_request_data(self, path: str, method: str) -> RequestData:
return _data_generation.get_request_data(
path=path,
method=method,
- get_dto_class=self.get_dto_class,
- get_id_property_name=self.get_id_property_name,
openapi_spec=self.openapi_spec,
)
@@ -256,10 +278,10 @@ def get_invalid_body_data(
method: str,
status_code: int,
request_data: RequestData,
- ) -> dict[str, JSON]:
+ ) -> JSON:
"""
- Return `json_data` based on the `dto` on the `request_data` that will cause
- the provided `status_code` for the `method` operation on the `url`.
+ Return `json_data` based on the `constraint_mapping` on the `request_data` that
+ will cause the provided `status_code` for the `method` operation on the `url`.
> Note: applicable UniquePropertyValueConstraint and IdReference Relations are
considered before changes to `json_data` are made.
@@ -269,7 +291,7 @@ def get_invalid_body_data(
method=method,
status_code=status_code,
request_data=request_data,
- invalid_property_default_response=self.invalid_property_default_response,
+ invalid_data_default_response=self.invalid_data_default_response,
)
@keyword
@@ -277,7 +299,7 @@ def get_invalidated_parameters(
self,
status_code: int,
request_data: RequestData,
- ) -> tuple[dict[str, JSON], dict[str, JSON]]:
+ ) -> tuple[dict[str, JSON], dict[str, str]]:
"""
Returns a version of `params, headers` as present on `request_data` that has
been modified to cause the provided `status_code`.
@@ -285,23 +307,29 @@ def get_invalidated_parameters(
return _data_invalidation.get_invalidated_parameters(
status_code=status_code,
request_data=request_data,
- invalid_property_default_response=self.invalid_property_default_response,
+ invalid_data_default_response=self.invalid_data_default_response,
)
@keyword
def get_json_data_with_conflict(
- self, url: str, method: str, dto: Dto, conflict_status_code: int
+ self,
+ url: str,
+ method: str,
+ json_data: dict[str, JSON],
+ constraint_mapping: type[Dto],
+ conflict_status_code: int,
) -> dict[str, JSON]:
"""
Return `json_data` based on the `UniquePropertyValueConstraint` that must be
- returned by the `get_relations` implementation on the `dto` for the given
- `conflict_status_code`.
+ returned by the `get_relations` implementation on the `constraint_mapping` for
+ the given `conflict_status_code`.
"""
return _data_invalidation.get_json_data_with_conflict(
url=url,
base_url=self.base_url,
method=method,
- dto=dto,
+ json_data=json_data,
+ constraint_mapping=constraint_mapping, # FIXME: the model should have this information
conflict_status_code=conflict_status_code,
)
@@ -322,7 +350,6 @@ def get_valid_url(self, path: str) -> str:
return _path_functions.get_valid_url(
path=path,
base_url=self.base_url,
- get_path_dto_class=self.get_path_dto_class,
openapi_spec=self.openapi_spec,
)
@@ -335,7 +362,7 @@ def get_valid_id_for_path(self, path: str) -> str | int | float:
(by a POST operation) if possible.
"""
return _path_functions.get_valid_id_for_path(
- path=path, get_id_property_name=self.get_id_property_name
+ path=path, openapi_spec=self.openapi_spec
)
@keyword
@@ -358,30 +385,26 @@ def get_ids_from_url(self, url: str) -> list[str]:
Perform a GET request on the `url` and return the list of resource
`ids` from the response.
"""
- return _path_functions.get_ids_from_url(
- url=url, get_id_property_name=self.get_id_property_name
- )
+ return _path_functions.get_ids_from_url(url=url, openapi_spec=self.openapi_spec)
@keyword
def get_invalidated_url(
self,
valid_url: str,
- path: str = "",
expected_status_code: int = 404,
) -> str:
"""
Return an url with all the path parameters in the `valid_url` replaced by a
random UUID if no PathPropertiesConstraint is mapped for the `"get"` operation
- on the mapped `path` and `expected_status_code`.
+ on the related `path` and `expected_status_code`.
If a PathPropertiesConstraint is mapped, the `invalid_value` is returned.
Raises: ValueError if the valid_url cannot be invalidated.
"""
return _path_invalidation.get_invalidated_url(
valid_url=valid_url,
- path=path,
base_url=self.base_url,
- get_path_dto_class=self.get_path_dto_class,
+ openapi_spec=self.openapi_spec,
expected_status_code=expected_status_code,
)
@@ -474,7 +497,7 @@ def perform_validated_request(
@keyword
def validate_response_using_validator(self, response: Response) -> None:
"""
- Validate the `response` against the OpenAPI Spec that is
+ Validate the `response` against the OpenAPI spec that is
loaded during library initialization.
"""
_validation.validate_response_using_validator(
@@ -484,11 +507,11 @@ def validate_response_using_validator(self, response: Response) -> None:
@keyword
def assert_href_to_resource_is_valid(
- self, href: str, referenced_resource: dict[str, JSON]
+ self, href: str, referenced_resource: JSON
) -> None:
"""
Attempt to GET the resource referenced by the `href` and validate it's equal
- to the provided `referenced_resource` object / dictionary.
+ to the provided `referenced_resource`.
"""
_validation.assert_href_to_resource_is_valid(
href=href,
@@ -502,7 +525,7 @@ def validate_response(
self,
path: str,
response: Response,
- original_data: Mapping[str, JSON] = default_json_mapping,
+ original_data: JSON = default_json_mapping, # type: ignore[assignment]
) -> None:
"""
Validate the `response` by performing the following validations:
@@ -520,7 +543,7 @@ def validate_response(
response_validator=self.response_validator,
server_validation_warning_logged=self._server_validation_warning_logged,
disable_server_validation=self.disable_server_validation,
- invalid_property_default_response=self.invalid_property_default_response,
+ invalid_data_default_response=self.invalid_data_default_response,
response_validation=self.response_validation,
openapi_spec=self.openapi_spec,
original_data=original_data,
@@ -552,11 +575,6 @@ def origin(self) -> str:
def base_url(self) -> str:
return f"{self.origin}{self._base_path}"
- @cached_property
- def validation_spec(self) -> Spec:
- _, validation_spec, _ = self._load_specs_and_validator()
- return validation_spec
-
@property
def openapi_spec(self) -> OpenApiObject:
"""Return a deepcopy of the parsed openapi document."""
@@ -565,19 +583,65 @@ def openapi_spec(self) -> OpenApiObject:
@cached_property
def _openapi_spec(self) -> OpenApiObject:
- parser, _, _ = self._load_specs_and_validator()
- spec_model = OpenApiObject.model_validate(parser.specification)
- register_path_parameters(spec_model.paths)
+ specification, _ = self._load_specs_and_validator()
+ spec_model = OpenApiObject.model_validate(specification)
+ spec_model = self._perform_post_init_model_updates(spec_model=spec_model)
+ self._register_path_parameters(spec_model.paths)
+ return spec_model
+
+ def _register_path_parameters(self, paths_data: dict[str, PathItemObject]) -> None:
+ def _register_path_parameter(parameter_object: ParameterObject) -> None:
+ if parameter_object.in_ == "path":
+ _ = get_safe_name_for_oas_name(parameter_object.name)
+
+ for path_item in paths_data.values():
+ operations = path_item.operations
+ for operation in operations.values():
+ if parameters := operation.parameters:
+ for parameter in parameters:
+ _register_path_parameter(parameter_object=parameter)
+
+ def _perform_post_init_model_updates(
+ self, spec_model: OpenApiObject
+ ) -> OpenApiObject:
+ for (
+ path,
+ operation,
+ ), data_constraint in self.constraint_mapping_dict.items():
+ try:
+ operation_item = getattr(spec_model.paths[path], operation.lower())
+ operation_item.constraint_mapping = data_constraint
+ except KeyError:
+ logger.warn(
+ f"The DTO_MAPPING contains a path that is not found in the OpenAPI spec: {path}"
+ )
+
+ for path, path_constraint in self.path_mapping_dict.items():
+ try:
+ path_item = spec_model.paths[path]
+ path_item.constraint_mapping = path_constraint
+ except KeyError:
+ logger.warn(
+ f"The PATH_MAPPING contains a path that is not found in the OpenAPI spec: {path}"
+ )
+
+ for path, path_item in spec_model.paths.items():
+ mapper = self.get_id_property_name(path)
+ path_item.id_mapper = mapper
+ path_item.update_operation_parameters()
+ path_item.attach_constraint_mappings()
+ path_item.replace_nullable_with_union()
+
return spec_model
@cached_property
def response_validator(
self,
- ) -> ResponseValidatorType:
- _, _, response_validator = self._load_specs_and_validator()
+ ) -> IResponseValidator:
+ _, response_validator = self._load_specs_and_validator()
return response_validator
- def _get_json_types_from_spec(self, spec: dict[str, JSON]) -> set[str]:
+ def _get_json_types_from_spec(self, spec: Mapping[str, JSON]) -> set[str]:
json_types: set[str] = set(self._get_json_types(spec))
return {json_type for json_type in json_types if json_type is not None}
@@ -601,9 +665,8 @@ def _get_json_types(self, item: object) -> Generator[str, None, None]:
def _load_specs_and_validator(
self,
) -> tuple[
- ResolvingParser,
- Spec,
- ResponseValidatorType,
+ Mapping[str, JSON],
+ IResponseValidator,
]:
def recursion_limit_handler(
limit: int, # pylint: disable=unused-argument
@@ -613,50 +676,38 @@ def recursion_limit_handler(
return self._recursion_default # pragma: no cover
try:
- # Since parsing of the OAS and creating the Spec can take a long time,
+ # Since parsing of the OAS and the specification can take a long time,
# they are cached. This is done by storing them in an imported module that
# will have a global scope due to how the Python import system works. This
# ensures that in a Suite of Suites where multiple Suites use the same
# `source`, that OAS is only parsed / loaded once.
- cached_parser = PARSER_CACHE.get(self._source, None)
- if cached_parser:
+ cached_spec = SPEC_CACHE.get(self._source, None)
+ if cached_spec:
return (
- cached_parser.parser,
- cached_parser.validation_spec,
- cached_parser.response_validator,
+ cached_spec.specification,
+ cached_spec.response_validator,
)
- parser = ResolvingParser(
- self._source,
- backend="openapi-spec-validator",
- recursion_limit=self._recursion_limit,
- recursion_limit_handler=recursion_limit_handler,
- )
+ specification = self._get_specification(recursion_limit_handler)
- if parser.specification is None: # pragma: no cover
- raise FatalError(
- "Source was loaded, but no specification was present after parsing."
- )
-
- validation_spec = Spec.from_dict(parser.specification) # pyright: ignore[reportArgumentType]
+ validation_spec = SchemaPath.from_dict(specification) # type: ignore[arg-type]
json_types_from_spec: set[str] = self._get_json_types_from_spec(
- parser.specification
+ specification
)
extra_deserializers = {
json_type: _json.loads for json_type in json_types_from_spec
}
config = Config(extra_media_type_deserializers=extra_deserializers) # type: ignore[arg-type]
openapi = OpenAPI(spec=validation_spec, config=config)
- response_validator: ResponseValidatorType = openapi.validate_response # type: ignore[assignment]
+ response_validator: IResponseValidator = openapi.validate_response
- PARSER_CACHE[self._source] = CachedParser(
- parser=parser,
- validation_spec=validation_spec,
+ SPEC_CACHE[self._source] = CachedSpec(
+ specification=specification,
response_validator=response_validator,
)
- return parser, validation_spec, response_validator
+ return specification, response_validator
except ResolutionError as exception: # pragma: no cover
raise FatalError(
@@ -667,6 +718,62 @@ def recursion_limit_handler(
f"ValidationError while trying to load openapi spec: {exception}"
) from exception
+ def _get_specification(
+ self, recursion_limit_handler: Callable[[int, str, JSON], JSON]
+ ) -> Mapping[str, JSON]:
+ if Path(self._source).is_file():
+ return self._load_specification(
+ filepath=self._source, recursion_limit_handler=recursion_limit_handler
+ )
+
+ try:
+ response = self.authorized_request(url=self._source, method="GET")
+ response.raise_for_status()
+ except Exception as exception: # pragma: no cover
+ raise FatalError(
+ f"Failed to download the OpenAPI spec using an authorized request."
+ f"\nThis download attempt was made since the provided `source` "
+ f"does not point to a file.\nPlease verify the source path is "
+ f"correct if you intent to reference a local file. "
+ f"\nMake sure the source url is correct and reachable if "
+ f"referencing a web resource."
+ f"\nThe exception was: {exception}"
+ )
+
+ _, _, filename = self._source.rpartition("/")
+ with tempfile.TemporaryDirectory() as tempdir:
+ filepath = Path(tempdir, filename)
+ with open(file=filepath, mode="w", encoding="UTF-8") as spec_file:
+ spec_file.write(response.text)
+
+ return self._load_specification(
+ filepath=filepath.as_posix(),
+ recursion_limit_handler=recursion_limit_handler,
+ )
+
+ def _load_specification(
+ self, filepath: str, recursion_limit_handler: Callable[[int, str, JSON], JSON]
+ ) -> Mapping[str, JSON]:
+ try:
+ parser = ResolvingParser(
+ filepath,
+ backend="openapi-spec-validator",
+ recursion_limit=self._recursion_limit,
+ recursion_limit_handler=recursion_limit_handler,
+ ) # type: ignore[no-untyped-call]
+ except Exception as exception: # pragma: no cover
+ raise FatalError(
+ f"Failed to parse the OpenAPI spec downloaded to {filepath}"
+ f"\nThe exception was: {exception}"
+ )
+
+ if parser.specification is None: # pragma: no cover
+ raise FatalError(
+ "Source was loaded, but no specification was present after parsing."
+ )
+
+ return parser.specification # type: ignore[no-any-return]
+
def read_paths(self) -> dict[str, PathItemObject]:
return self.openapi_spec.paths
diff --git a/src/OpenApiLibCore/protocols.py b/src/OpenApiLibCore/protocols.py
index 40958f7..c050414 100644
--- a/src/OpenApiLibCore/protocols.py
+++ b/src/OpenApiLibCore/protocols.py
@@ -1,38 +1,72 @@
"""A module holding Protcols."""
-from typing import Callable, Protocol, Type
+from __future__ import annotations
+
+import builtins
+from typing import Any, Callable, Protocol
from openapi_core.contrib.requests import (
RequestsOpenAPIRequest,
RequestsOpenAPIResponse,
)
+from pydantic import GetCoreSchemaHandler
+from pydantic_core import CoreSchema, core_schema
-from OpenApiLibCore.dto_base import Dto
+from OpenApiLibCore.models.resource_relations import (
+ PathPropertiesConstraint,
+ ResourceRelation,
+)
-class ResponseValidatorType(Protocol):
+class IResponseValidator(Protocol):
def __call__(
self, request: RequestsOpenAPIRequest, response: RequestsOpenAPIResponse
- ) -> None: ... # pragma: no cover
+ ) -> None: ...
-class GetDtoClassType(Protocol):
- def __init__(self, mappings_module_name: str) -> None: ... # pragma: no cover
+class IGetIdPropertyName(Protocol):
+ def __init__(
+ self, mappings_module_name: str, default_id_property_name: str
+ ) -> None: ...
- def __call__(self, path: str, method: str) -> Type[Dto]: ... # pragma: no cover
+ def __call__(self, path: str) -> tuple[str, Callable[[str], str]]: ...
+ @property
+ def default_id_property_name(self) -> str: ...
-class GetIdPropertyNameType(Protocol):
- def __init__(self, mappings_module_name: str) -> None: ... # pragma: no cover
+ @property
+ def id_mapping(
+ self,
+ ) -> dict[str, str | tuple[str, Callable[[str], str]]]: ...
- def __call__(
- self, path: str
- ) -> tuple[
- str, Callable[[str], str] | Callable[[int], int]
- ]: ... # pragma: no cover
+class IConstraintMapping(Protocol):
+ # NOTE: This Protocol is used as annotation in a number of the oas_models, which
+ # requires this method to prevent a PydanticSchemaGenerationError.
+ @classmethod
+ def __get_pydantic_core_schema__(
+ cls, source_type: Any, handler: GetCoreSchemaHandler
+ ) -> CoreSchema:
+ return core_schema.no_info_after_validator_function(cls, handler(str))
+
+ @staticmethod
+ def get_path_relations() -> list[PathPropertiesConstraint]: ...
+
+ @staticmethod
+ def get_parameter_relations() -> list[ResourceRelation]: ...
+
+ @classmethod
+ def get_parameter_relations_for_error_code(
+ cls, error_code: int
+ ) -> list[ResourceRelation]: ...
+
+ @staticmethod
+ def get_relations() -> list[ResourceRelation]: ...
+
+ @classmethod
+ def get_body_relations_for_error_code(
+ cls, error_code: int
+ ) -> list[ResourceRelation]: ...
-class GetPathDtoClassType(Protocol):
- def __init__(self, mappings_module_name: str) -> None: ... # pragma: no cover
- def __call__(self, path: str) -> Type[Dto]: ... # pragma: no cover
+ConstraintMappingType = builtins.type[IConstraintMapping]
diff --git a/src/OpenApiLibCore/utils/__init__.py b/src/OpenApiLibCore/utils/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/src/OpenApiLibCore/utils/id_mapping.py b/src/OpenApiLibCore/utils/id_mapping.py
new file mode 100644
index 0000000..e2f12ab
--- /dev/null
+++ b/src/OpenApiLibCore/utils/id_mapping.py
@@ -0,0 +1,2 @@
+def dummy_transformer(valid_id: str) -> str:
+ return valid_id
diff --git a/src/OpenApiLibCore/utils/oas_cache.py b/src/OpenApiLibCore/utils/oas_cache.py
new file mode 100644
index 0000000..25bbd11
--- /dev/null
+++ b/src/OpenApiLibCore/utils/oas_cache.py
@@ -0,0 +1,16 @@
+"""Module holding the (global) spec cache."""
+
+from dataclasses import dataclass
+from typing import Mapping
+
+from OpenApiLibCore.annotations import JSON
+from OpenApiLibCore.protocols import IResponseValidator
+
+
+@dataclass
+class CachedSpec:
+ specification: Mapping[str, JSON]
+ response_validator: IResponseValidator
+
+
+SPEC_CACHE: dict[str, CachedSpec] = {}
diff --git a/src/OpenApiLibCore/parameter_utils.py b/src/OpenApiLibCore/utils/parameter_utils.py
similarity index 78%
rename from src/OpenApiLibCore/parameter_utils.py
rename to src/OpenApiLibCore/utils/parameter_utils.py
index 4ff258c..5b2d6fe 100644
--- a/src/OpenApiLibCore/parameter_utils.py
+++ b/src/OpenApiLibCore/utils/parameter_utils.py
@@ -5,8 +5,6 @@
from typing import Generator
-from OpenApiLibCore.models import ParameterObject, PathItemObject
-
PARAMETER_REGISTRY: dict[str, str] = {
"body": "body",
}
@@ -72,22 +70,9 @@ def _convert_string_to_python_identifier() -> Generator[str, None, None]:
yield f"_{ascii_code}_"
if _is_python_safe(string):
- return string
+ return string # pragma: no cover
converted_string = "".join(_convert_string_to_python_identifier())
- if not _is_python_safe(converted_string):
+ if not _is_python_safe(converted_string): # pragma: no cover
raise ValueError(f"Failed to convert '{string}' to Python identifier.")
return converted_string
-
-
-def register_path_parameters(paths_data: dict[str, PathItemObject]) -> None:
- def _register_path_parameter(parameter_object: ParameterObject) -> None:
- if parameter_object.in_ == "path":
- _ = get_safe_name_for_oas_name(parameter_object.name)
-
- for path_item in paths_data.values():
- operations = path_item.get_operations()
- for operation in operations.values():
- if parameters := operation.parameters:
- for parameter in parameters:
- _register_path_parameter(parameter_object=parameter)
diff --git a/src/OpenApiLibCore/value_utils.py b/src/OpenApiLibCore/value_utils.py
deleted file mode 100644
index ec133f8..0000000
--- a/src/OpenApiLibCore/value_utils.py
+++ /dev/null
@@ -1,216 +0,0 @@
-# mypy: disable-error-code=no-any-return
-"""Utility module with functions to handle OpenAPI value types and restrictions."""
-
-from copy import deepcopy
-from random import choice
-from typing import Any, Iterable, cast, overload
-
-from OpenApiLibCore.annotations import JSON
-from OpenApiLibCore.localized_faker import FAKE
-from OpenApiLibCore.models import ResolvedSchemaObjectTypes
-
-
-class Ignore:
- """Helper class to flag properties to be ignored in data generation."""
-
- def __str__(self) -> str:
- return "IGNORE"
-
-
-class UnSet:
- """Helper class to flag arguments that have not been set in a keyword call."""
-
- def __str__(self) -> str:
- return "UNSET"
-
-
-IGNORE = Ignore()
-
-UNSET = UnSet()
-
-
-def json_type_name_of_python_type(python_type: Any) -> str:
- """Return the JSON type name for supported Python types."""
- if python_type == str:
- return "string"
- if python_type == bool:
- return "boolean"
- if python_type == int:
- return "integer"
- if python_type == float:
- return "number"
- if python_type == list:
- return "array"
- if python_type == dict:
- return "object"
- if python_type == type(None):
- return "null"
- raise ValueError(f"No json type mapping for Python type {python_type} available.")
-
-
-def python_type_by_json_type_name(type_name: str) -> type:
- """Return the Python type based on the JSON type name."""
- if type_name == "string":
- return str
- if type_name == "boolean":
- return bool
- if type_name == "integer":
- return int
- if type_name == "number":
- return float
- if type_name == "array":
- return list
- if type_name == "object":
- return dict
- if type_name == "null":
- return type(None)
- raise ValueError(f"No Python type mapping for JSON type '{type_name}' available.")
-
-
-def get_invalid_value(
- value_schema: ResolvedSchemaObjectTypes,
- current_value: JSON,
- values_from_constraint: Iterable[JSON] = tuple(),
-) -> JSON | Ignore:
- """Return a random value that violates the provided value_schema."""
- invalid_values: list[JSON | Ignore] = []
- value_type = value_schema.type
-
- if not isinstance(current_value, python_type_by_json_type_name(value_type)):
- current_value = value_schema.get_valid_value()
-
- if values_from_constraint:
- try:
- return get_invalid_value_from_constraint(
- values_from_constraint=list(values_from_constraint),
- value_type=value_type,
- )
- except ValueError:
- pass
-
- # For schemas with a const or enum, add invalidated values from those
- try:
- invalid_value = value_schema.get_invalid_value_from_const_or_enum()
- invalid_values.append(invalid_value)
- except ValueError:
- pass
-
- # Violate min / max values or length if possible
- try:
- values_out_of_bounds = value_schema.get_values_out_of_bounds(
- current_value=current_value # type: ignore[arg-type]
- )
- invalid_values += values_out_of_bounds
- except ValueError:
- pass
-
- # No value constraints or min / max ranges to violate, so change the data type
- if value_type == "string":
- # Since int / float / bool can always be cast to sting, change
- # the string to a nested object.
- # An array gets exploded in query strings, "null" is then often invalid
- invalid_values.append([{"invalid": [None, False]}, "null", None, True])
- else:
- invalid_values.append(FAKE.uuid())
-
- return choice(invalid_values)
-
-
-def get_invalid_value_from_constraint(
- values_from_constraint: list[JSON | Ignore], value_type: str
-) -> JSON | Ignore:
- """
- Return a value of the same type as the values in the values_from_constraints that
- is not in the values_from_constraints, if possible. Otherwise returns None.
- """
- # if IGNORE is in the values_from_constraints, the parameter needs to be
- # ignored for an OK response so leaving the value at it's original value
- # should result in the specified error response
- if any(map(lambda x: isinstance(x, Ignore), values_from_constraint)):
- return IGNORE
- # if the value is forced True or False, return the opposite to invalidate
- if len(values_from_constraint) == 1 and value_type == "boolean":
- return not values_from_constraint[0]
- # for unsupported types or empty constraints lists raise a ValueError
- if (
- value_type not in ["string", "integer", "number", "array", "object"]
- or not values_from_constraint
- ):
- raise ValueError(
- f"Cannot get invalid value for {value_type} from {values_from_constraint}"
- )
-
- values_from_constraint = deepcopy(values_from_constraint)
- # for objects, keep the keys intact but update the values
- if value_type == "object":
- valid_object = cast(dict[str, JSON], values_from_constraint.pop())
- invalid_object: dict[str, JSON] = {}
- for key, value in valid_object.items():
- python_type_of_value = type(value)
- json_type_of_value = json_type_name_of_python_type(python_type_of_value)
- invalid_value = cast(
- JSON,
- get_invalid_value_from_constraint(
- values_from_constraint=[value],
- value_type=json_type_of_value,
- ),
- )
- invalid_object[key] = invalid_value
- return invalid_object
-
- # for arrays, update each value in the array to a value of the same type
- if value_type == "array":
- valid_array = cast(list[JSON], values_from_constraint.pop())
- invalid_array: list[JSON] = []
- for value in valid_array:
- python_type_of_value = type(value)
- json_type_of_value = json_type_name_of_python_type(python_type_of_value)
- invalid_value = cast(
- JSON,
- get_invalid_value_from_constraint(
- values_from_constraint=[value],
- value_type=json_type_of_value,
- ),
- )
- invalid_array.append(invalid_value)
- return invalid_array
-
- if value_type in ["integer", "number"]:
- int_or_number_list = cast(list[int | float], values_from_constraint)
- return get_invalid_int_or_number(values_from_constraint=int_or_number_list)
-
- str_or_bytes_list = cast(list[str] | list[bytes], values_from_constraint)
- invalid_value = get_invalid_str_or_bytes(values_from_constraint=str_or_bytes_list)
- if not invalid_value:
- raise ValueError("Value invalidation yielded an empty string.")
- return invalid_value
-
-
-def get_invalid_int_or_number(values_from_constraint: list[int | float]) -> int | float:
- invalid_values = 2 * values_from_constraint
- invalid_value = invalid_values.pop()
- for value in invalid_values:
- invalid_value = abs(invalid_value) + abs(value)
- if not invalid_value:
- invalid_value += 1
- return invalid_value
-
-
-@overload
-def get_invalid_str_or_bytes(
- values_from_constraint: list[str],
-) -> str: ... # pragma: no cover
-
-
-@overload
-def get_invalid_str_or_bytes(
- values_from_constraint: list[bytes],
-) -> bytes: ... # pragma: no cover
-
-
-def get_invalid_str_or_bytes(values_from_constraint: list[Any]) -> Any:
- invalid_values = 2 * values_from_constraint
- invalid_value = invalid_values.pop()
- for value in invalid_values:
- invalid_value = invalid_value + value
- return invalid_value
diff --git a/src/openapi_libgen/command_line.py b/src/openapi_libgen/command_line.py
index 38f8039..c675419 100644
--- a/src/openapi_libgen/command_line.py
+++ b/src/openapi_libgen/command_line.py
@@ -72,7 +72,7 @@ def main() -> None:
default_module_name,
)
- use_summary = getenv("USE_SUMMARY_AS_KEYWORD_NAME")
+ use_summary: str | bool | None = getenv("USE_SUMMARY_AS_KEYWORD_NAME")
if use_summary is None:
if args.use_summary_as_keyword_name is None:
use_summary = input(
@@ -80,7 +80,7 @@ def main() -> None:
)
use_summary = True if use_summary.lower().startswith("y") else False
- expand_body = getenv("EXPAND_BODY_ARGUMENTS")
+ expand_body: str | bool | None = getenv("EXPAND_BODY_ARGUMENTS")
if expand_body is None:
if args.expand_body_arguments is None:
expand_body = input(
@@ -93,6 +93,6 @@ def main() -> None:
output_folder=path,
library_name=safe_library_name,
module_name=safe_module_name,
- use_summary=is_truthy(use_summary),
- expand_body=is_truthy(expand_body),
+ use_summary=is_truthy(use_summary), # type: ignore[no-untyped-call]
+ expand_body=is_truthy(expand_body), # type: ignore[no-untyped-call]
)
diff --git a/src/openapi_libgen/generator.py b/src/openapi_libgen/generator.py
index 2114c2a..82e8434 100644
--- a/src/openapi_libgen/generator.py
+++ b/src/openapi_libgen/generator.py
@@ -7,7 +7,7 @@
from robot.utils import is_truthy
from openapi_libgen.spec_parser import get_keyword_data
-from OpenApiLibCore.models import OpenApiObject
+from OpenApiLibCore.models.oas_models import OpenApiObject
HERE = Path(__file__).parent.resolve()
@@ -25,7 +25,7 @@ def recursion_limit_handler(
backend="openapi-spec-validator",
recursion_limit=recursion_limit,
recursion_limit_handler=recursion_limit_handler,
- )
+ ) # type: ignore[no-untyped-call]
assert parser.specification is not None, (
"Source was loaded, but no specification was present after parsing."
)
@@ -81,11 +81,11 @@ def generate(
use_summary = getenv("USE_SUMMARY_AS_KEYWORD_NAME")
use_summary = use_summary if use_summary is not None else sys.argv[5]
- use_summary = is_truthy(use_summary)
+ use_summary = is_truthy(use_summary) # type: ignore[no-untyped-call]
expand_body = getenv("EXPAND_BODY_ARGUMENTS")
expand_body = expand_body if expand_body is not None else sys.argv[6]
- expand_body = is_truthy(expand_body)
+ expand_body = is_truthy(expand_body) # type: ignore[no-untyped-call]
spec = load_openapi_spec(source=source, recursion_limit=1, recursion_default={})
diff --git a/src/openapi_libgen/spec_parser.py b/src/openapi_libgen/spec_parser.py
index e8b8c7b..8b4ad8f 100644
--- a/src/openapi_libgen/spec_parser.py
+++ b/src/openapi_libgen/spec_parser.py
@@ -2,14 +2,14 @@
from typing import Generator
from openapi_libgen.parsing_utils import remove_unsafe_characters_from_string
-from OpenApiLibCore.models import (
+from OpenApiLibCore.models.oas_models import (
ObjectSchema,
OpenApiObject,
OperationObject,
PathItemObject,
SchemaObjectTypes,
)
-from OpenApiLibCore.parameter_utils import get_safe_name_for_oas_name
+from OpenApiLibCore.utils.parameter_utils import get_safe_name_for_oas_name
KEYWORD_TEMPLATE = r"""@keyword
{signature}
@@ -47,7 +47,7 @@ def get_path_items(
paths: dict[str, PathItemObject],
) -> Generator[OperationDetails, None, None]:
for path, path_item_object in paths.items():
- operations = path_item_object.get_operations()
+ operations = path_item_object.operations
for method, operation_object in operations.items():
operation_details = OperationDetails(
path=path,
diff --git a/src/openapi_libgen/templates/library.jinja b/src/openapi_libgen/templates/library.jinja
index 5dd486c..303987e 100644
--- a/src/openapi_libgen/templates/library.jinja
+++ b/src/openapi_libgen/templates/library.jinja
@@ -6,7 +6,8 @@ from robot.api.deco import keyword, library
from robot.libraries.BuiltIn import BuiltIn
from OpenApiLibCore import UNSET, OpenApiLibCore, RequestValues
-from OpenApiLibCore.path_functions import substitute_path_parameters
+from OpenApiLibCore.annotations import JSON
+from OpenApiLibCore.keyword_logic.path_functions import substitute_path_parameters
run_keyword = BuiltIn().run_keyword
diff --git a/src/openapitools_docs/docstrings.py b/src/openapitools_docs/docstrings.py
index 20e9bff..66db529 100644
--- a/src/openapitools_docs/docstrings.py
+++ b/src/openapitools_docs/docstrings.py
@@ -170,7 +170,7 @@