diff --git a/.gitignore b/.gitignore index b7faf40..3a12def 100644 --- a/.gitignore +++ b/.gitignore @@ -1,207 +1,222 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[codz] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py.cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# UV -# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -#uv.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock -#poetry.toml - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. -# https://pdm-project.org/en/latest/usage/project/#working-with-version-control -#pdm.lock -#pdm.toml -.pdm-python -.pdm-build/ - -# pixi -# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. -#pixi.lock -# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one -# in the .venv directory. It is recommended not to include this directory in version control. -.pixi - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - -# Environments -.env -.envrc -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# Abstra -# Abstra is an AI-powered process automation framework. -# Ignore directories containing user credentials, local state, and settings. -# Learn more at https://abstra.io/docs -.abstra/ - -# Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore -# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, -# you could uncomment the following to ignore the entire vscode folder -# .vscode/ - -# Ruff stuff: -.ruff_cache/ - -# PyPI configuration file -.pypirc - -# Cursor -# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to -# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data -# refer to https://docs.cursor.com/context/ignore-files -.cursorignore -.cursorindexingignore - -# Marimo -marimo/_static/ -marimo/_lsp/ -__marimo__/ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[codz] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py.cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock +#poetry.toml + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. +# https://pdm-project.org/en/latest/usage/project/#working-with-version-control +#pdm.lock +#pdm.toml +.pdm-python +.pdm-build/ + +# pixi +# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. +#pixi.lock +# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one +# in the .venv directory. It is recommended not to include this directory in version control. +.pixi + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.envrc +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Abstra +# Abstra is an AI-powered process automation framework. +# Ignore directories containing user credentials, local state, and settings. +# Learn more at https://abstra.io/docs +.abstra/ + +# Visual Studio Code +# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore +# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore +# and can be added to the global gitignore or merged into this file. However, if you prefer, +# you could uncomment the following to ignore the entire vscode folder +# .vscode/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc + +# Cursor +# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to +# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data +# refer to https://docs.cursor.com/context/ignore-files +.cursorignore +.cursorindexingignore + +# Marimo +marimo/_static/ +marimo/_lsp/ +__marimo__/ + +.venv/ +*.py[cod] + +# IDE +.idea/ +.vscode/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +clouds.yaml \ No newline at end of file diff --git a/LICENSE b/LICENSE index 261eeb9..29f81d8 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,201 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/README.md b/README.md index 70d7132..f7f304c 100644 --- a/README.md +++ b/README.md @@ -1 +1,27 @@ -# python-t-cloud \ No newline at end of file +# python-t-cloud + +Python SDK for T-Cloud. + +> **Status:** Early development. Not ready for production use. + +## Quick Start + +```bash +uv sync # install dependencies +uv run pytest # run tests +``` + +## Development + +Requires [uv](https://docs.astral.sh/uv/) and Python 3.11+. + +```bash +uv sync --group dev # install with dev dependencies +uv run ruff check src/ # lint +uv run mypy src/ # type check +uv run pytest -v # test +``` + +## License + +Apache-2.0 \ No newline at end of file diff --git a/conftest.py b/conftest.py new file mode 100644 index 0000000..53ee88f --- /dev/null +++ b/conftest.py @@ -0,0 +1,18 @@ +# conftest.py at project root — registers the 'acceptance' marker +# and separates acceptance tests from unit tests. + +import pytest + + +def pytest_collection_modifyitems(config, items): + """Auto-mark tests under acceptance/ directory.""" + for item in items: + if "acceptance" in str(item.fspath): + item.add_marker(pytest.mark.acceptance) + + +def pytest_configure(config): + config.addinivalue_line( + "markers", + "acceptance: marks tests that hit real OTC API (deselect with '-m \"not acceptance\"')", + ) diff --git a/docs/api/core.rst b/docs/api/core.rst new file mode 100644 index 0000000..0624870 --- /dev/null +++ b/docs/api/core.rst @@ -0,0 +1,26 @@ +Core +==== + +Authentication +-------------- + +.. automodule:: sdk.core.auth + :members: + :show-inheritance: + :exclude-members: model_config, model_fields, model_computed_fields + +Exceptions +---------- + +.. automodule:: sdk.core.exceptions + :members: + :undoc-members: + :show-inheritance: + +Logging +------- + +.. automodule:: sdk.core.log + :members: + :undoc-members: + :show-inheritance: diff --git a/docs/api/index.rst b/docs/api/index.rst new file mode 100644 index 0000000..10ad122 --- /dev/null +++ b/docs/api/index.rst @@ -0,0 +1,7 @@ +API Reference +============= + +.. toctree:: + :maxdepth: 2 + + core diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 0000000..bbd7004 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,51 @@ +"""Sphinx configuration for the SDK documentation.""" + +import os +import sys + +# Add src/ to path so autodoc can find the package +sys.path.insert(0, os.path.abspath("../src")) + +# -- Project information ----------------------------------------------------- + +project = "SDK" +copyright = "2026, T Cloud Public" +author = "T Cloud Public" +release = "0.1.0" + +# -- General configuration --------------------------------------------------- + +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.napoleon", # Google-style docstrings + "sphinx_autodoc_typehints", # type hints in docs + "sphinx.ext.viewcode", # [source] links + "sphinx.ext.intersphinx", # link to Python stdlib docs +] + +# Napoleon settings (Google-style) +napoleon_google_docstring = True +napoleon_numpy_docstring = False +napoleon_include_init_with_doc = True +napoleon_include_private_with_doc = False +napoleon_use_param = True +napoleon_use_rtype = True + +# Autodoc settings +autodoc_member_order = "bysource" +autodoc_typehints = "description" +autodoc_class_signature = "separated" +autodoc_pydantic_model_show_field_summary = False + +# Intersphinx — link to Python docs +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "pydantic": ("https://docs.pydantic.dev/latest/", None), +} + +# -- Options for HTML output ------------------------------------------------- + +html_theme = "sphinx_rtd_theme" +html_theme_options = { + "navigation_depth": 3, +} diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..6821194 --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,9 @@ +SDK Documentation +================= + +.. toctree:: + :maxdepth: 2 + :caption: Contents + + quickstart + api/index diff --git a/docs/new_arch.md b/docs/new_arch.md new file mode 100644 index 0000000..45acc2c --- /dev/null +++ b/docs/new_arch.md @@ -0,0 +1,616 @@ +# New Python SDK Architecture for OpenTelekomCloud + +**Status:** Proposal for review + +--- + +## 1. Problems with Current python-otcextensions + +The current Python SDK is built on top of openstacksdk and inherits its architectural decisions, causing systemic issues: + +- **Heavy dependencies.** openstacksdk, keystoneauth1, os-service-types and the entire OpenStack ecosystem pull in dozens of transitive dependencies. Updating or debugging any of them affects the entire SDK. +- **Auth model incompatibility.** AK/SK authentication (AWS Signature V4) does not fit well into keystoneauth — SigV4 requires signing an already-formed HTTP request, while keystoneauth provides headers before request formation. Each new service with AK/SK requires individual workarounds. +- **Implicit contracts.** Request and response models are spread across proxy classes and resources with no clear boundary between input parameters and API responses. + +--- + +## 2. Go SDK Architecture Analysis (gophertelekomcloud) + +### 2.1. Overall Structure + +The Go SDK has a minimalistic structure with **3 dependencies** (testify, golang.org/x/crypto, yaml.v2) and a clean layered organization: + +``` +gophertelekomcloud/ +├── golangsdk (root package) +│ ├── auth_options.go # AuthOptions — token/password auth +│ ├── auth_aksk_options.go # AKSKAuthOptions — AK/SK auth +│ ├── auth_option_provider.go # AuthOptionsProvider — unified interface +│ ├── provider_client.go # ProviderClient — HTTP client with auth +│ ├── service_client.go # ServiceClient — base service client +│ ├── endpoint_search.go # EndpointOpts — endpoint discovery +│ ├── results.go # Result — base response type +│ ├── params.go # Parameter serialization utilities +│ └── signer_helper.go # AK/SK signing (AWS SigV4) +│ +├── internal/ +│ ├── build/ # Request body, query strings, headers +│ └── extract/ # JSON response deserialization +│ +├── openstack/ +│ ├── client.go # Factories: NewDNSV2(), NewComputeV2(), etc. +│ ├── common/ # Shared utilities (tags, metadata, pointerto) +│ │ +│ ├── dns/v2/ # ← Typical service +│ │ ├── clusters/ +│ │ │ ├── common.py # Cluster, Spec, Status (shared models) +│ │ │ ├── create.py # CreateOpts + create() +│ │ │ ├── get.py # get() +│ │ │ ├── list.py # ListOpts + list_clusters() +│ │ │ ├── delete.py # DeleteOpts + delete() +│ │ │ └── update.py # UpdateOpts + update() +│ │ ├── recordsets/ +│ │ └── ... +│ │ +│ ├── vpc/v1/ # Each service is isolated +│ ├── cce/v3/ +│ ├── elb/v3/ +│ └── ... (59+ services) +│ +└── pagination/ # Pagination (linked, marker, offset, single) +``` + +### 2.2. Key Architectural Patterns + +#### Pattern 1: Unified Auth Interface + +A minimal `AuthOptionsProvider` interface with a single method `GetIdentityEndpoint()`. Two auth types — `AuthOptions` (token/password) and `AKSKAuthOptions` (AK/SK) — both implement this interface. Dispatch in `Authenticate()` determines the auth type via type assertion and calls the appropriate strategy: + +``` +AuthOptionsProvider (interface) + ├── AuthOptions → v3auth() or v3authWithAgency() + └── AKSKAuthOptions → v3AKSKAuth() or authWithAgencyByAKSK() +``` + +AK/SK signing is applied transparently at the `ProviderClient.Request()` level — if `AKSKAuthOptions.AccessKey` is set, the request is signed via `Sign()` before sending. + +#### Pattern 2: Two-Level Client System + +- **ProviderClient** — a single HTTP client that holds auth state (token, project ID, domain ID), reauth logic, retry/backoff. All requests go through its `Request()`. +- **ServiceClient** — a lightweight wrapper that adds endpoint and convenience methods (`Get`, `Post`, `Put`, `Patch`, `Delete`). Created via factories in `client.go` (e.g. `NewDNSV2(provider, endpointOpts)`). + +#### Pattern 3: Each Resource Is an Isolated Package + +Each resource (zones, recordsets, publicips, ...) is a separate package with three files: + +| File | Contents | +|------|----------| +| `requests.go` | CRUD functions (free functions, not methods). Input parameter types (`CreateOpts`, `ListOpts`) with builder interfaces (`CreateOptsBuilder`). Validation via struct tags. | +| `results.go` | Response models (`Zone`, `CreateResult`, `GetResult`). Inherit from `golangsdk.Result` for lazy extraction via `Extract()`. | +| `urls.go` | Pure URL construction functions using `ServiceClient.ServiceURL()`. | + +Functions take `*ServiceClient` as their first argument — no magic proxies or resource classes. + +#### Pattern 4: Minimal External Dependencies + +The Go SDK deliberately avoids OpenStack-specific libraries. Everything, including AK/SK signing, is implemented inside the repository. This provides full control and eliminates breaking changes from upstream. + +--- + +## 3. Target Architecture for New Python SDK + +### 3.1. Package Structure + +``` +otc-sdk-python/ +├── pyproject.toml # Minimal deps: httpx, pydantic +│ +├── src/sdk/ +│ ├── __init__.py +│ │ +│ ├── core/ # ← Analogue of root golangsdk package +│ │ ├── auth.py # AuthOptions, AKSKAuthOptions, AuthProvider (Protocol) +│ │ ├── signer.py # AK/SK signing (SigV4) — own implementation +│ │ ├── provider.py # ProviderClient — HTTP client + auth +│ │ ├── service_client.py # ServiceClient — base client for services +│ │ ├── endpoint.py # EndpointOpts, endpoint discovery +│ │ ├── result.py # Base result types +│ │ ├── exceptions.py # Exception hierarchy +│ │ └── pagination.py # Pagination strategies (linked, marker, offset) +│ │ +│ ├── services/ # ← Analogue of openstack/ +│ │ ├── __init__.py +│ │ │ +│ │ ├── dns/ # Each service is a subpackage +│ │ │ ├── __init__.py +│ │ │ ├── v2/ +│ │ │ │ ├── __init__.py +│ │ │ │ ├── client.py # DnsV2Client with factory methods +│ │ │ │ ├── clusters/ +│ │ │ │ │ ├── __init__.py +│ │ │ │ │ ├── common.py # Cluster, Spec, Status (shared models) +│ │ │ │ │ ├──create.py # CreateOpts + create() +│ │ │ │ │ ├── get.py # get() +│ │ │ │ │ ├── list.py # ListOpts + list_clusters() +│ │ │ │ │ ├── delete.py # DeleteOpts + delete() +│ │ │ │ │ └── update.py # UpdateOpts + update() +│ │ │ │ ├── recordsets/ +│ │ │ │ └── ... +│ │ │ └── ... +│ │ │ +│ │ ├── vpc/ +│ │ ├── cce/ +│ │ ├── elb/ +│ │ └── ... +│ │ +│ └── common/ # Shared utilities +│ ├── tags.py +│ └── metadata.py +│ +├── tests/ +│ ├── unit/ +│ │ ├── core/ +│ │ └── services/ +│ └── acceptance/ +│ └── services/ +│ +└── docs/ +``` + +### 3.2. Core Abstractions + +#### AuthConfig + +A single config model that accepts all possible auth parameters. The provider auto-detects which auth strategy to use based on what fields are provided: + +- `access_key` + `secret_key` present → **AK/SK** (AWS Signature V4) +- `password` present → **Token** (Keystone V3 password auth) +- `token_id` present → **Token** (Keystone V3 token auth) + +```python +from pydantic import BaseModel, model_validator + +class AuthConfig(BaseModel): + """Single auth config. Provider auto-selects strategy based on provided fields.""" + + identity_endpoint: str + + # Token/Password auth fields + username: str | None = None + user_id: str | None = None + password: str | None = None + token_id: str | None = None + domain_id: str | None = None + domain_name: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + allow_reauth: bool = False + + # AK/SK auth fields + access_key: str | None = None + secret_key: str | None = None + security_token: str | None = None + + # Common fields + project_id: str | None = None + project_name: str | None = None + region: str | None = None + + # Agency delegation + agency_name: str | None = None + agency_domain_name: str | None = None + delegated_project: str | None = None + + @property + def auth_mode(self) -> str: + """Auto-detect auth strategy from provided fields.""" + if self.access_key and self.secret_key: + return "aksk" + if self.password: + return "password" + if self.token_id: + return "token" + raise ValueError("Cannot determine auth mode: provide access_key+secret_key, password, or token_id") + + @model_validator(mode="after") + def _validate_fields(self): + # Ensure minimum required fields per strategy + self.auth_mode # triggers ValueError if nothing matches + return self +``` + +The user never picks a strategy class — they just pass whatever credentials they have. + +#### ProviderClient + +```python +import httpx + +class ProviderClient: + """Central HTTP client. Manages auth, retry, reauth.""" + + def __init__(self, auth: AuthConfig): + self.auth = auth + self.identity_endpoint: str = auth.identity_endpoint + self.token_id: str | None = None + self.project_id: str | None = None + self.domain_id: str | None = None + self._http: httpx.Client = httpx.Client() + self._reauth_func: Callable | None = None + + def authenticate(self) -> None: + """Auto-select and execute auth strategy.""" + match self.auth.auth_mode: + case "aksk": + self._aksk_auth() + case "password": + self._token_auth() + case "token": + self._token_reuse() + + def request(self, method: str, url: str, **kwargs) -> httpx.Response: + """Send request with auth, retry, reauth.""" + # 1. Add auth headers: + # - aksk mode → sign request with AK/SK (SigV4) + # - token mode → add X-Auth-Token header + # 2. Send request + # 3. Handle 401 → reauth → retry + # 4. Handle 429 → backoff → retry + # 5. Handle errors → typed exceptions + ... +``` + +#### ServiceClient + +```python +class ServiceClient: + """Base client for a specific service.""" + + def __init__(self, provider: ProviderClient, endpoint: str, + resource_base: str | None = None): + self.provider = provider + self.endpoint = endpoint + self.resource_base = resource_base or endpoint + + def service_url(self, *parts: str) -> str: + return self.resource_base + "/".join(parts) + + def get(self, url: str, **kwargs) -> httpx.Response: + return self.provider.request("GET", url, **kwargs) + + def post(self, url: str, **kwargs) -> httpx.Response: + return self.provider.request("POST", url, **kwargs) + + # put, patch, delete similarly +``` + +### 3.3. Service Implementation Example (DNS Zones) + +#### models.py + +```python +from pydantic import BaseModel + +class CreateZoneOpts(BaseModel): + name: str + email: str | None = None + description: str | None = None + ttl: int | None = None + zone_type: str | None = None + +class Zone(BaseModel): + id: str + name: str + email: str | None = None + description: str | None = None + ttl: int | None = None + status: str | None = None + zone_type: str | None = None + record_num: int | None = None + pool_id: str | None = None + project_id: str | None = None + created_at: str | None = None + updated_at: str | None = None + +class ListZonesOpts(BaseModel): + limit: int | None = None + marker: str | None = None + name: str | None = None + status: str | None = None + type: str | None = None +``` + +#### urls.py + +```python +from otc_sdk.core.service_client import ServiceClient + +ROOT = "zones" + +def base_url(client: ServiceClient) -> str: + return client.service_url(ROOT) + +def zone_url(client: ServiceClient, zone_id: str) -> str: + return client.service_url(ROOT, zone_id) +``` + +#### requests.py + +```python +from typing import Iterator +from otc_sdk.core.service_client import ServiceClient +from .models import CreateZoneOpts, Zone, ListZonesOpts +from . import urls + +def create(client: ServiceClient, opts: CreateZoneOpts) -> Zone: + resp = client.post( + urls.base_url(client), + json=opts.model_dump(exclude_none=True), + ) + return Zone.model_validate(resp.json()) + +def get(client: ServiceClient, zone_id: str) -> Zone: + resp = client.get(urls.zone_url(client, zone_id)) + return Zone.model_validate(resp.json()) + +def list_zones(client: ServiceClient, opts: ListZonesOpts | None = None) -> Iterator[Zone]: + """Iterator that automatically walks through all pages.""" + url = urls.base_url(client) + params = opts.model_dump(exclude_none=True) if opts else {} + while url: + resp = client.get(url, params=params) + data = resp.json() + for z in data["zones"]: + yield Zone.model_validate(z) + url = data.get("links", {}).get("next") + params = {} # params already embedded in next URL + +def delete(client: ServiceClient, zone_id: str) -> None: + client.delete(urls.zone_url(client, zone_id)) +``` + +> **Proposal: Generator-based pagination.** In Go, pagination uses `pagination.Pager` with callbacks. In Python, the natural approach is an iterator with `yield` that automatically fetches subsequent pages. The user should never have to think about markers: +> +> ```python +> for zone in zones.list_zones(client): +> print(zone.name) +> ``` + +### 3.4. Client Factory + +```python +# otc_sdk/client.py — main entry point + +class OTCClient: + """Main entry point. Creates ProviderClient and service factories.""" + + def __init__(self, **kwargs): + """Accept auth params directly. Provider auto-detects strategy. + + Usage: + OTCClient(identity_endpoint="...", username="...", password="...") + OTCClient(identity_endpoint="...", access_key="...", secret_key="...") + """ + auth = AuthConfig(**kwargs) + self.provider = ProviderClient(auth) + self.provider.authenticate() + + def dns_v2(self, region: str | None = None) -> ServiceClient: + endpoint = self.provider.find_endpoint("dns", region=region) + return ServiceClient(self.provider, endpoint, + resource_base=endpoint + "v2/") + + def vpc_v1(self, region: str | None = None) -> ServiceClient: + ... +``` + +> **Proposal: Lazy imports for services.** Eagerly importing all 50+ services in `__init__.py` would slow down `import otc_sdk`. Instead, use lazy properties that import a service only on first access: +> +> ```python +> class OTCClient: +> @property +> def dns(self): +> from otc_sdk.services.dns.v2 import client as dns_client +> return dns_client.DnsV2Client(self.provider) +> ``` +> +> This ensures fast application startup — only services that are actually used get imported. No entry point or plugin magic needed. + +### 3.5. Usage Example + +```python +from otc_sdk import OTCClient +from otc_sdk.services.dns.v2 import zones + +# Token authentication — just pass credentials, provider figures out the rest +client = OTCClient( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="domain", + tenant_name="eu-de", +) + +# Or AK/SK — same constructor, different fields +client = OTCClient( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK...", + secret_key="SK...", + project_id="...", + region="eu-de", +) + +# API works identically regardless of auth type +dns = client.dns_v2() +zone = zones.create(dns, zones.CreateZoneOpts(name="example.com.", email="admin@example.com")) + +for z in zones.list_zones(dns): + print(z.name) +``` + +--- + +## 4. Go → Python Mapping + +| Go SDK | Python SDK | Notes | +|--------|-----------|-------| +| `AuthOptionsProvider` (interface) | `AuthConfig` (single pydantic model) | Auto-detects strategy from fields | +| `AuthOptions` + `AKSKAuthOptions` (separate structs) | `AuthConfig.auth_mode` property | User never picks strategy manually | +| `ProviderClient` | `ProviderClient` | httpx instead of net/http | +| `ServiceClient` | `ServiceClient` | Thin wrapper | +| `Sign()` | `sign_request()` | Own SigV4 implementation | +| `openstack/client.go` (factories) | `OTCClient` | Factory methods | +| `openstack/dns/v2/zones/` package | `services/dns/v2/zones/` package | 1:1 mapping | +| `requests.go` (free functions) | `requests.py` (free functions) | Not class methods | +| `results.go` (struct + Extract) | `models.py` (pydantic BaseModel) | model_validate instead of Extract | +| `urls.go` | `urls.py` | Pure functions | +| `CreateOptsBuilder` (interface) | pydantic `BaseModel` | Validation via pydantic | +| struct tags (`json:`, `q:`, `required:`) | pydantic Field + model_dump | exclude_none for optionals | +| `golangsdk.Result.ExtractInto()` | `pydantic.BaseModel.model_validate()` | Automatic deserialization | +| `pagination.Pager` | Iterator/generator | Pythonic approach | +| `go.mod` (3 dependencies) | `pyproject.toml` (httpx + pydantic) | Minimal dependencies | + +--- + +## 5. Dependencies + +| Dependency | Purpose | Notes | +|------------|---------|-------| +| `httpx` | HTTP client | Sync + async out of the box. MVP is sync-only, architecture is async-ready | +| `pydantic` | Model validation | Replaces Go struct tags | + +Everything else (SigV4 signing, retry, pagination) is **implemented internally**. No openstacksdk, keystoneauth, or os-service-types. + +### 5.1. Dev Tooling: uv + +We use [uv](https://docs.astral.sh/uv/) as the project manager. uv is a Rust-based drop-in replacement for pip, virtualenv, and poetry — 10–50x faster, single binary, no Python required to bootstrap. + +`pyproject.toml` remains the standard project config file. uv simply reads it and handles everything else: + +```toml +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "otc-sdk" +version = "0.1.0" +requires-python = ">=3.11" +dependencies = [ + "httpx>=0.27", + "pydantic>=2.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0", + "pytest-httpx>=0.30", + "ruff>=0.5", + "mypy>=1.10", +] +``` + +Daily workflow: + +```bash +# Setup (replaces python -m venv + pip install -e .) +uv sync # creates .venv + installs everything from pyproject.toml +uv sync --group dev # includes dev dependencies + +# Running +uv run pytest # runs in the correct venv automatically +uv run mypy src/ # type checking +uv run ruff check src/ # linting + +# Dependency management +uv add httpx # adds to pyproject.toml + installs +uv remove some-package # removes from pyproject.toml + uninstalls + +# Python version management (optional) +uv python install 3.12 # installs Python 3.12 if not present +uv python pin 3.12 # pins project to 3.12 +``` + +uv generates a `uv.lock` file (replaces `poetry.lock` / `pip-compile` output) — deterministic, cross-platform lock file that should be committed to the repository. + +Why uv over poetry/pip: +- **Speed.** Cold install of the project takes ~1s instead of 15–30s. +- **Standards-based.** Uses standard `pyproject.toml`, no vendor lock-in. The project works with plain `pip install -e .` for anyone who doesn't want uv. +- **Single tool.** Replaces pip + virtualenv + pip-tools + pyenv. Simplifies CI and onboarding. + +--- + +## 6. Principles + +1. **Zero service coupling.** Each service is an isolated subpackage. Depends only on `core/`. +2. **Explicit contracts.** Typed pydantic models for every request and response. No `dict` or `**kwargs` in the public API. +3. **Own auth implementation.** Single `AuthConfig` model — provider auto-detects strategy (AK/SK, password, token) from the fields provided. SigV4 signing implemented inside the SDK. The user never needs to know which auth class to use. +4. **Free functions for operations.** `zones.create(client, opts)` instead of `client.zones.create(opts)`. Follows the Go pattern — easier to test and generate. +5. **Minimal dependencies.** Only httpx + pydantic. Full control over the codebase. +6. **Type hinting & IDE support.** 100% type hint coverage thanks to pydantic and explicit function signatures. + +> **Proposal: Functional style justification.** The functional approach may look unusual to Python developers accustomed to boto3 or azure-sdk (`client.zones.create(opts)`). However, free functions are stateless — `create`, `list` are pure and take a client as a dependency. This simplifies mocking in tests, eliminates circular imports, and dramatically simplifies code generation. We keep the functional approach. + +> **Proposal: Type hinting as a selling point.** In the current SDK (dynamic proxies from openstacksdk), autocomplete in VS Code and PyCharm barely works. In the new SDK — pydantic models with typed fields + explicit function signatures mean IDEs will suggest `CreateZoneOpts` fields and `Zone` response field types. This is a significant developer experience improvement. + +--- + +## 7. Code Generation Benefits (gen-sdk-tooling) + +This architecture is well suited for automatic SDK generation from RST documentation: + +- **Uniform structure** for every service → Jinja2 templates for `models.py`, `requests.py`, `urls.py`. +- **Pydantic models** are generated directly from request/response specs found in RST. +- **Free functions** instead of classes → simpler templates, less inheritance. +- **No OpenStack dependency** → no need to maintain compatibility with external code. + +--- + +## 8. Implementation Plan + +### Phase 1: Core (2–3 weeks) + +- `core/auth.py` — AuthConfig with auto-detection (AK/SK, password, token) +- `core/signer.py` — AK/SK signing (ported from Go) +- `core/provider.py` — ProviderClient with auth, retry, reauth +- `core/service_client.py` — ServiceClient +- `core/exceptions.py` — exception hierarchy +- `core/pagination.py` — pagination strategies + +### Phase 2: Pilot Service (1–2 weeks) + +- Implement DNS v2 manually as a reference +- Write acceptance tests against real OTC +- Debug auth flow for both token and AK/SK + +### Phase 3: Generation (parallel with gen-sdk-tooling) + +- Jinja2 templates for models.py, requests.py, urls.py +- Generate SDK for 2–3 services, compare with reference +- Iterate on generation quality + +### Phase 4: Scaling + +- Generate remaining 50+ services +- CI/CD pipeline for automatic regeneration + +--- + +## 9. Decisions on Open Questions + +> **Proposal:** Close the open questions with the following decisions so this section reads as an action plan rather than uncertainty. + +1. **Async support.** + *Decision:* MVP (Phases 1–2) implements sync API only (`httpx.Client`). The architecture is async-ready: httpx has an identical API for sync and async, and free functions allow adding `async def create(...)` + `httpx.AsyncClient` later with minimal generator changes (template swap). + +2. **Package naming.** + *Decision:* `otc-sdk` (PyPI) / `import otc_sdk`. Short and clear. `otcextensions` is a bad legacy name. + +3. **Service discovery.** + *Decision:* Lazy properties in `OTCClient` (see proposal in section 3.4). Only services that are actually used get imported. No entry points or plugin magic. + +4. **Backward compatibility.** + *Decision:* Full replacement (major version). Maintaining compatibility with the openstacksdk architecture is impossible and counterproductive — it is the root of the problems. The old and new SDKs can be installed side by side (`pip install otc-sdk` alongside `pip install python-otcextensions`). + +5. **Paginators.** + *Decision:* Python iterators with `yield` (see proposal in section 3.3). `for zone in zones.list_zones(client)` — automatic traversal of all pages. \ No newline at end of file diff --git a/docs/quickstart.rst b/docs/quickstart.rst new file mode 100644 index 0000000..ec56e9e --- /dev/null +++ b/docs/quickstart.rst @@ -0,0 +1,4 @@ +Quick Start +=========== + +Coming soon. diff --git a/git b/git new file mode 100644 index 0000000..e69de29 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..7e0bbe9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,51 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "sdk" +version = "0.1.0" +description = "Python SDK for Open Telekom Cloud" +readme = "README.md" +license = "Apache-2.0" +requires-python = ">=3.13" +dependencies = [ + "httpx>=0.28,<1", + "pydantic>=2.12,<3", + "pyyaml>=6.0.2,<7", +] + +[dependency-groups] +dev = [ + "pytest>=9.0.2,<10", + "pytest-httpx>=0.36", + "ruff>=0.15.2", + "mypy>=1.19", +] + +docs = [ + "sphinx>=8.0", + "sphinx-rtd-theme>=3.0", + "sphinx-autodoc-typehints>=2.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/sdk"] + +[tool.ruff] +line-length = 88 +target-version = "py313" + +[tool.ruff.lint] +select = ["E", "F", "I", "UP", "B", "SIM"] + +[tool.mypy] +python_version = "3.13" +strict = true + +[tool.pytest.ini_options] +testpaths = ["tests"] +log_cli = true +log_cli_level = "INFO" +log_cli_format = "%(asctime)s [%(levelname)s] %(message)s" +log_cli_date_format = "%H:%M:%S" \ No newline at end of file diff --git a/src/sdk/__init__.py b/src/sdk/__init__.py new file mode 100644 index 0000000..33dbe38 --- /dev/null +++ b/src/sdk/__init__.py @@ -0,0 +1,5 @@ +"""OTC SDK — Python SDK for Open Telekom Cloud.""" + +from ._version import __version__ + +__all__ = ["__version__"] \ No newline at end of file diff --git a/src/sdk/_version.py b/src/sdk/_version.py new file mode 100644 index 0000000..3f5c4a7 --- /dev/null +++ b/src/sdk/_version.py @@ -0,0 +1 @@ +__version__ = "0.1.0" diff --git a/src/sdk/common/__init__.py b/src/sdk/common/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/core/__init__.py b/src/sdk/core/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/core/auth.py b/src/sdk/core/auth.py new file mode 100644 index 0000000..377fe57 --- /dev/null +++ b/src/sdk/core/auth.py @@ -0,0 +1,225 @@ +"""Authentication configuration. + +Combines Go SDK's ``AuthOptions`` and ``AKSKAuthOptions`` into a single +``AuthConfig`` model. The provider auto-detects the auth strategy +based on which fields are populated: + +- ``access_key`` + ``secret_key`` → AK/SK (AWS Signature V4) +- ``password`` → Token (Keystone V3 password) +- ``token_id`` → Token (Keystone V3 token reuse) + +The user never picks a strategy class — they just pass credentials. +Validation is delegated to per-strategy validator functions. + +Example:: + + from sdk.core.auth import AuthConfig + + # Password — detected automatically + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="secret", + domain_name="my_domain", + ) + cfg.auth_mode # AuthMode.PASSWORD + + # AK/SK — same class, different fields + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK...", + secret_key="SK...", + ) + cfg.auth_mode # AuthMode.AKSK +""" + +from __future__ import annotations + +from collections.abc import Callable +from enum import StrEnum + +from pydantic import (BaseModel, SecretStr, model_validator, + computed_field, ConfigDict) + +from sdk.core.exceptions import MissingCredentialsError + +from typing import Any + + +class AuthMode(StrEnum): + """Authentication strategy identifier. + + Used by ``ProviderClient`` to select the correct auth flow. + """ + + AKSK = "aksk" + PASSWORD = "password" + TOKEN = "token" + + +# --- Strategy validators --- +def _validate_aksk(cfg: AuthConfig) -> None: + """Validate AK/SK auth fields. + + AK/SK only requires ``access_key`` and ``secret_key``, + which are already guaranteed present by mode detection. + """ + if cfg.access_key is None or cfg.secret_key is None: + raise MissingCredentialsError( + "AK/SK auth requires BOTH access_key and secret_key" + ) + +def _validate_password(cfg: AuthConfig) -> None: + """Validate password auth fields. + + - Exactly one of ``username`` or ``user_id`` must be provided. + - If ``username`` is provided, exactly one of ``domain_id`` or + ``domain_name`` is required. + + Raises: + MissingCredentialsError: If required fields are missing + or incompatible fields are present. + """ + if cfg.password is None: + raise MissingCredentialsError( + "Password is required for password authentication" + ) + + # Corresponds to Go: ErrUsernameOrUserID + if not cfg.username and not cfg.user_id: + raise MissingCredentialsError( + "Password auth requires username or user_id" + ) + + # Corresponds to Go: ErrUsernameOrUserID (second check) + if cfg.username and cfg.user_id: + raise MissingCredentialsError( + "Provide either username or user_id, not both" + ) + + if cfg.username: + # Corresponds to Go: ErrDomainIDOrDomainName + if not cfg.domain_id and not cfg.domain_name: + raise MissingCredentialsError( + "Username auth requires domain_id or domain_name" + ) + if cfg.domain_id and cfg.domain_name: + raise MissingCredentialsError( + "Provide either domain_id or domain_name, not both" + ) + +def _validate_token(cfg: AuthConfig) -> None: + """Validate token reuse fields. + + Token auth should not be mixed with password-based fields. + Mirrors Go SDK's ``ErrUsernameWithToken`` / ``ErrUserIDWithToken``. + + Raises: + MissingCredentialsError: If incompatible fields are present. + """ + if not cfg.token_id: + raise MissingCredentialsError("token_id is required") + + +_STRATEGY_VALIDATORS: dict[AuthMode, Callable[[AuthConfig], None]] = { + AuthMode.AKSK: _validate_aksk, + AuthMode.PASSWORD: _validate_password, + AuthMode.TOKEN: _validate_token, +} + + +class AuthConfig(BaseModel): + """Single auth config for all authentication strategies. + + This model provides a unified interface for all authentication strategies. + The SDK automatically detects the correct authentication mode (AK/SK, + Password, or Token) based on the provided fields. The model is frozen + after instantiation to guarantee consistency between the credentials and + the detected ``auth_mode``. + + Attributes: + identity_endpoint: IAM endpoint URL (e.g., ``OS_AUTH_URL``). + username: Keystone username (used for V3 password auth). + user_id: Keystone user ID (alternative to ``username``). + password: Keystone password (stored securely as ``SecretStr``). + passcode: MFA TOTP verification code. + token_id: Existing Keystone token for direct authentication. + access_key: AK/SK access key (AWS Signature V4). + secret_key: AK/SK secret key (stored securely as ``SecretStr``). + domain_id: Domain ID (required when using ``username``). + domain_name: Domain name (alternative to ``domain_id``). + project_id: Project ID for scoping (common across all strategies). + project_name: Project name for scoping. + region: Target region for endpoint discovery (e.g., ``eu-de``). + tenant_id: Project ID for scoping (alias: ``project_id``). + tenant_name: Project name for scoping. + security_token: Temporary security token (STS) used with temporary AK/SK. + allow_reauth: Whether the SDK should cache and automatically refresh tokens. + agency_name: Agency name for cross-account delegated access. + agency_domain_name: Domain name that owns the target agency. + delegated_project: Specific project to access via the agency delegation. + auth_mode: The strictly detected authentication strategy. Computed + automatically based on the provided credentials. + """ + model_config = ConfigDict(frozen=True) + identity_endpoint: str + + # --- Credentials --- + username: str | None = None + user_id: str | None = None + password: SecretStr | None = None + passcode: str | None = None + token_id: str | None = None + access_key: str | None = None + secret_key: SecretStr | None = None + + # --- Context / Scoping --- + domain_id: str | None = None + domain_name: str | None = None + project_id: str | None = None + project_name: str | None = None + region: str | None = None + tenant_id: str | None = None + tenant_name: str | None = None + + # --- Advanced --- + security_token: str | None = None + allow_reauth: bool = False + agency_name: str | None = None + agency_domain_name: str | None = None + delegated_project: str | None = None + + @model_validator(mode="before") + @classmethod + def _sync_tenant_and_project(cls, data: dict[str, Any]) -> dict[str, Any]: + if isinstance(data, dict): + if "tenant_id" in data and "project_id" not in data: + data["project_id"] = data["tenant_id"] + if "tenant_name" in data and "project_name" not in data: + data["project_name"] = data["tenant_name"] + return data + + @computed_field + @property + def auth_mode(self) -> AuthMode: + """Determines auth strategy based on populated fields. + + Strictly enforces that only one credential type is provided. + """ + if self.token_id is not None: + return AuthMode.TOKEN + if self.access_key is not None or self.secret_key is not None: + return AuthMode.AKSK + if self.password is not None or self.passcode is not None: + return AuthMode.PASSWORD + + raise MissingCredentialsError( + "Incomplete credentials: provide AK/SK, password, or token_id" + ) + + @model_validator(mode="after") + def _validate_credentials(self) -> AuthConfig: + """Detect mode and delegate to the appropriate strategy validator.""" + mode = self.auth_mode + _STRATEGY_VALIDATORS[mode](self) + return self diff --git a/src/sdk/core/config.py b/src/sdk/core/config.py new file mode 100644 index 0000000..cee7399 --- /dev/null +++ b/src/sdk/core/config.py @@ -0,0 +1,123 @@ +"""Configuration loader for clouds.yaml.""" + +from __future__ import annotations + +import logging +import os +from pathlib import Path +from typing import Any + +import yaml + +from sdk.core.auth import AuthConfig, AuthMode + +logger = logging.getLogger(__name__) + + +def load_from_yaml(cloud_name: str = "otc", file_path: str | Path | None = None) -> AuthConfig: + """Load authentication configuration from a clouds.yaml file. + + Follows the standard OpenStack search path order if no explicit + path is provided: + 1. Current directory (./clouds.yaml) + 2. User config (~/.config/openstack/clouds.yaml) + 3. System config (/etc/openstack/clouds.yaml) + """ + path_to_load = _find_clouds_yaml(file_path) + logger.debug("Loading cloud config from: %s", path_to_load) + + with open(path_to_load, "r", encoding="utf-8") as f: + try: + data = yaml.safe_load(f) + except yaml.YAMLError as e: + raise ValueError(f"Failed to parse YAML file at {path_to_load}: {e}") from e + + clouds = data.get("clouds", {}) + if cloud_name not in clouds: + raise ValueError(f"Cloud '{cloud_name}' not found in {path_to_load}") + + cloud_config = clouds[cloud_name] + auth_data = cloud_config.get("auth", {}) + + logger.debug("Loaded cloud config: '%s'", cloud_name) + return _map_to_auth_config(cloud_config, auth_data) + + +def _find_clouds_yaml(explicit_path: str | Path | None) -> Path: + """Locate the clouds.yaml file in standard locations.""" + if explicit_path: + p = Path(explicit_path) + if p.exists(): + return p + raise FileNotFoundError(f"Explicit config file not found: {explicit_path}") + + search_paths = [ + Path.cwd() / "clouds.yaml", + Path.home() / ".config" / "openstack" / "clouds.yaml", + Path("/etc/openstack/clouds.yaml"), + ] + + for p in search_paths: + if p.is_file(): + return p + + raise FileNotFoundError( + "Could not find 'clouds.yaml' in standard locations " + "(./, ~/.config/openstack/, /etc/openstack/)." + ) + + +def _resolve_env(value: Any) -> Any: + """Resolve ${ENV_VAR} or $ENV_VAR syntax in strings.""" + if isinstance(value, str) and "$" in value: + # Встроенный метод Python: сам найдет и подставит переменные окружения + return os.path.expandvars(value) + return value + + +def _map_to_auth_config(cloud_config: dict[str, Any], auth_data: dict[str, Any]) -> AuthConfig: + """Map OpenStack clouds.yaml fields to our AuthConfig Pydantic model.""" + + auth = {k: _resolve_env(v) for k, v in auth_data.items()} + cloud = {k: _resolve_env(v) for k, v in cloud_config.items()} + + if auth.get("password"): + mode = AuthMode.PASSWORD + elif any(k in auth for k in ("access_key", "ak", "secret_key", "sk")): + mode = AuthMode.AKSK + else: + mode = AuthMode.TOKEN + + logger.debug("Auth mode resolved: %s", mode) + + raw_url = auth.get("auth_url") or auth.get("identity_endpoint") or "" + auth_url = raw_url.rstrip("/") + if auth_url and not auth_url.endswith("/v3"): + auth_url += "/v3" + + config_kwargs: dict[str, Any] = { + "auth_mode": mode, + "identity_endpoint": auth_url, + "region": cloud.get("region_name", ""), + } + + config_kwargs["project_name"] = auth.get("project_name") or auth.get("tenant_name") + config_kwargs["project_id"] = auth.get("project_id") or auth.get("tenant_id") + + config_kwargs["domain_name"] = ( + auth.get("domain_name") + or auth.get("user_domain_name") + or auth.get("project_domain_name") + ) + config_kwargs["domain_id"] = auth.get("domain_id") or auth.get("user_domain_id") + + if mode == AuthMode.AKSK: + config_kwargs["access_key"] = auth.get("access_key") or auth.get("ak") + config_kwargs["secret_key"] = auth.get("secret_key") or auth.get("sk") + else: + config_kwargs["username"] = auth.get("username") + config_kwargs["password"] = auth.get("password") + + clean_kwargs = {k: v for k, v in config_kwargs.items() if v} + + return AuthConfig(**clean_kwargs) \ No newline at end of file diff --git a/src/sdk/core/endpoint.py b/src/sdk/core/endpoint.py new file mode 100644 index 0000000..eb6f142 --- /dev/null +++ b/src/sdk/core/endpoint.py @@ -0,0 +1,169 @@ +"""Endpoint discovery from the IAM service catalog. + +Extracts the endpoint lookup logic into a reusable module. + +The ``EndpointOpts`` dataclass specifies search criteria, and +``find_endpoint()`` searches a service catalog for a matching URL. +``build_endpoint_locator()`` returns a closure that captures the +catalog and default region, ready to be stored on ``ProviderClient``. + +Example:: + + from sdk.core.endpoint import EndpointOpts, find_endpoint + + catalog = [...] # from IAM auth response + opts = EndpointOpts(service_type="compute", region="eu-de") + url = find_endpoint(catalog, opts) + # → "https://ecs.eu-de.otc.t-systems.com/v2.1/" +""" + +from __future__ import annotations + +from collections.abc import Callable +from enum import StrEnum + +from pydantic import BaseModel, Field, AliasChoices + +from sdk.core.exceptions import EndpointNotFoundError, ServiceNotFoundError + + +class Availability(StrEnum): + """Endpoint visibility level. + """ + + PUBLIC = "public" + INTERNAL = "internal" + ADMIN = "admin" + + +class EndpointOpts(BaseModel): + """Search criteria for locating a service endpoint. + At minimum, ``service_type`` must be provided. + + Attributes: + service_type: Catalog service type (e.g. ``compute``, ``dns``). + name: Optional service name filter (e.g. ``nova``). + region: Region to match. Empty means accept any region. + availability: Endpoint interface visibility. + """ + model_config = {"frozen": True} + + service_type: str + name: str = "" + region: str = "" + availability: Availability = Availability.PUBLIC + + +class CatalogEndpoint(BaseModel): + interface: str + region_id: str = Field(default="", + validation_alias=AliasChoices("region_id", "region") + ) + url: str + + +class CatalogEntry(BaseModel): + type: str + name: str = "" + endpoints: list[CatalogEndpoint] = Field(default_factory=list) + + +def find_endpoint( + catalog: list[CatalogEntry], + opts: EndpointOpts, +) -> str: + """Find a single endpoint URL from the service catalog. + + Searches catalog entries for a match on ``service_type``, + optional ``name``, ``region``, and ``availability``. + Falls back to wildcard (``*``) region entries if no exact + match is found. + + Args: + catalog: Service catalog entries from IAM response. + opts: Search criteria. + + Returns: + Endpoint URL string (always ends with ``/``). + + Raises: + ServiceNotFoundError: No catalog entry matches the type. + EndpointNotFoundError: Entry found but no endpoint matches + region/availability. + """ + matched: list[str] = [] + wildcard: list[str] = [] + service_found = False + + for entry in catalog: + if entry.type != opts.service_type: + continue + if opts.name and entry.name != opts.name: + continue + + service_found = True + + for ep in entry.endpoints: + if ep.interface != opts.availability: + continue + + if not ep.url: + continue + + url = _normalize_url(ep.url) + + if not opts.region or ep.region_id == opts.region: + matched.append(url) + elif ep.region_id == "*": + wildcard.append(url) + + if not matched: + matched = wildcard + + if matched: + return matched[0] + + if not service_found: + raise ServiceNotFoundError(service=opts.service_type) + + raise EndpointNotFoundError( + service=opts.service_type, region=opts.region, + ) + + +EndpointLocator = Callable[[EndpointOpts], str] +"""Callable type that resolves an ``EndpointOpts`` → URL string.""" + + +def build_endpoint_locator( + catalog: list[CatalogEntry], + default_region: str = "", +) -> EndpointLocator: + """Build an endpoint locator closure from a service catalog. + + Returns a callable that accepts ``EndpointOpts`` (or keyword + shorthand) and resolves the endpoint URL. If the opts have + no region set, ``default_region`` is used. + + This is the Python equivalent of the Go SDK pattern where + ``ProviderClient.EndpointLocator`` is a ``func(EndpointOpts) string``. + + Args: + catalog: Service catalog from IAM. + default_region: Fallback region from auth config. + + Returns: + Callable ``(EndpointOpts) → str``. + """ + + def locator(opts: EndpointOpts) -> str: + if not opts.region and default_region: + opts = opts.model_copy(update={"region": default_region}) + return find_endpoint(catalog, opts) + + return locator + + +def _normalize_url(url: str) -> str: + """Ensure URL ends with ``/``.""" + return url if url.endswith("/") else url + "/" diff --git a/src/sdk/core/exceptions/__init__.py b/src/sdk/core/exceptions/__init__.py new file mode 100644 index 0000000..91aa6e6 --- /dev/null +++ b/src/sdk/core/exceptions/__init__.py @@ -0,0 +1,99 @@ +"""Exception hierarchy for the SDK. + +Hierarchy:: + + SDKError + ├── MissingInputError + ├── InvalidInputError + ├── AuthError + │ ├── MissingCredentialsError + │ ├── ReauthError + │ └── PostReauthError + ├── EndpointError + │ ├── ServiceNotFoundError + │ └── EndpointNotFoundError + ├── HttpError + │ ├── BadRequestError (400) + │ ├── UnauthorizedError (401) + │ ├── ForbiddenError (403) + │ ├── NotFoundError (404) + │ ├── MethodNotAllowedError (405) + │ ├── RequestTimeoutError (408) + │ ├── ConflictError (409) + │ ├── TooManyRequestsError (429) + │ ├── InternalServerError (500) + │ └── ServiceUnavailableError (503) + ├── ResourceNotFoundError + ├── MultipleResourcesFoundError + └── SDKTimeoutError +""" + +from .auth import ( + AuthError, + MissingCredentialsError, + PostReauthError, + ReauthError, +) +from .base import ( + InvalidInputError, + MissingInputError, + SDKError, +) +from .endpoint import ( + EndpointError, + EndpointNotFoundError, + ServiceNotFoundError, +) +from .response import ( + BadRequestError, + ConflictError, + ForbiddenError, + HttpError, + InternalServerError, + MethodNotAllowedError, + NotFoundError, + RequestTimeoutError, + ServiceUnavailableError, + TooManyRequestsError, + UnauthorizedError, + raise_for_status, +) +from .lookup import ( + MultipleResourcesFoundError, + ResourceNotFoundError, +) +from .timeout import SDKTimeoutError + +__all__ = [ + # base + "SDKError", + "MissingInputError", + "InvalidInputError", + # auth + "AuthError", + "MissingCredentialsError", + "ReauthError", + "PostReauthError", + # endpoint + "EndpointError", + "ServiceNotFoundError", + "EndpointNotFoundError", + # response + "HttpError", + "BadRequestError", + "UnauthorizedError", + "ForbiddenError", + "NotFoundError", + "MethodNotAllowedError", + "RequestTimeoutError", + "ConflictError", + "TooManyRequestsError", + "InternalServerError", + "ServiceUnavailableError", + "raise_for_status", + # lookup + "ResourceNotFoundError", + "MultipleResourcesFoundError", + # timeout + "SDKTimeoutError", +] diff --git a/src/sdk/core/exceptions/auth.py b/src/sdk/core/exceptions/auth.py new file mode 100644 index 0000000..4e7e1bd --- /dev/null +++ b/src/sdk/core/exceptions/auth.py @@ -0,0 +1,58 @@ +"""Authentication-related exceptions. +""" + +from __future__ import annotations + +from .base import SDKError + +# Corresponds to Go SDK's ``ErrUnableToReauthenticate`` and +# ``ErrErrorAfterReauthentication``. + +class AuthError(SDKError): + """Authentication-related error.""" + + +class MissingCredentialsError(AuthError): + """Required credentials were not provided. + + Raised when ``AuthConfig`` cannot determine an auth strategy + from the provided fields. + """ + + +class ReauthError(AuthError): + """Re-authentication failed. + + Args: + original: The underlying exception that caused the failure. + """ + # Corresponds to Go SDK's ``ErrUnableToReauthenticate``. + def __init__(self, original: Exception | None = None) -> None: + self.original = original + msg = ( + f"Unable to re-authenticate: {original}" + if original + else "Unable to re-authenticate" + ) + super().__init__(msg) + + +class PostReauthError(AuthError): + """Request failed after successful re-authentication. + + Raised when the token was refreshed successfully, but the + subsequent request still failed (usually an HTTP error). + + Args: + original: The underlying exception from the failed request. + """ + # Corresponds to Go SDK's ``ErrErrorAfterReauthentication``. + def __init__(self, original: Exception | None = None) -> None: + self.original = original + msg = ( + f"Successfully re-authenticated, but got error " + f"executing request: {original}" + if original + else "Successfully re-authenticated, but got error executing request" + ) + super().__init__(msg) diff --git a/src/sdk/core/exceptions/base.py b/src/sdk/core/exceptions/base.py new file mode 100644 index 0000000..785685b --- /dev/null +++ b/src/sdk/core/exceptions/base.py @@ -0,0 +1,45 @@ +"""Base exception types for the SDK. +""" + +# Corresponds to Go SDK's ``BaseError``, ``ErrMissingInput``, +# and ``ErrInvalidInput``. + +from __future__ import annotations + +from typing import Any + + +class SDKError(Exception): + """Base exception for all SDK errors. + All SDK exceptions inherit from this class, so + ``except SDKError`` catches every SDK-related error. + """ + # Corresponds to Go SDK's ``BaseError``. + +class MissingInputError(SDKError): + """Required input argument was not provided. + + Args: + argument: Name of the missing argument. + """ + # Corresponds to Go SDK's ``ErrMissingInput``. + def __init__(self, argument: str) -> None: + self.argument = argument + super().__init__(f"Missing input for argument [{argument}]") + + +class InvalidInputError(SDKError): + """Invalid value provided for an input argument. + + Args: + argument: Name of the argument. + value: The invalid value that was provided. + """ + # Corresponds to Go SDK's ``ErrInvalidInput``. + + def __init__(self, argument: str, value: Any) -> None: + self.argument = argument + self.value = value + super().__init__( + f"Invalid input provided for argument [{argument}]: [{value!r}]" + ) diff --git a/src/sdk/core/exceptions/endpoint.py b/src/sdk/core/exceptions/endpoint.py new file mode 100644 index 0000000..33573e0 --- /dev/null +++ b/src/sdk/core/exceptions/endpoint.py @@ -0,0 +1,52 @@ +"""Endpoint discovery exceptions. +""" + +# Corresponds to Go SDK's ``ErrServiceNotFound`` and +# ``ErrEndpointNotFound``. + +from __future__ import annotations + +from .base import SDKError + + +class EndpointError(SDKError): + """Endpoint discovery error.""" + + +class ServiceNotFoundError(EndpointError): + """No matching service found in the service catalog. + + Args: + service: Name of the service that was not found. + """ + # Corresponds to Go SDK's ``ErrServiceNotFound``. + + def __init__(self, service: str = "") -> None: + self.service = service + msg = ( + f"No suitable service could be found in the " + f"service catalog: {service}" + if service + else "No suitable service could be found in the service catalog" + ) + super().__init__(msg) + + +class EndpointNotFoundError(EndpointError): + """No matching endpoint found for the service. + + Args: + service: Name of the service. + region: Region where the endpoint was expected. + """ + # Corresponds to Go SDK's ``ErrEndpointNotFound``. + + def __init__(self, service: str = "", region: str = "") -> None: + self.service = service + self.region = region + parts = ["No suitable endpoint could be found in the service catalog"] + if service: + parts.append(f"for service '{service}'") + if region: + parts.append(f"in region '{region}'") + super().__init__(" ".join(parts)) diff --git a/src/sdk/core/exceptions/lookup.py b/src/sdk/core/exceptions/lookup.py new file mode 100644 index 0000000..21b9562 --- /dev/null +++ b/src/sdk/core/exceptions/lookup.py @@ -0,0 +1,46 @@ +"""Resource lookup exceptions. +""" + +# Corresponds to Go SDK's ``ErrResourceNotFound`` and +# ``ErrMultipleResourcesFound``. + +from __future__ import annotations + +from .base import SDKError + + +class ResourceNotFoundError(SDKError): + """Resource not found during lookup by name. + + Raised when a find-by-name operation returns no results. + + Args: + resource_type: Type of resource (e.g. ``"VPC"``, ``"Subnet"``). + name: Name that was searched for. + """ + # Corresponds to Go SDK's ``ErrResourceNotFound``. + def __init__(self, resource_type: str, name: str) -> None: + self.resource_type = resource_type + self.name = name + super().__init__(f"Unable to find {resource_type} with name {name}") + + +class MultipleResourcesFoundError(SDKError): + """Multiple resources found during lookup by name. + + Raised when a find-by-name operation returns more than + one result and a single match was expected. + + Args: + resource_type: Type of resource (e.g. ``"VPC"``, ``"Subnet"``). + name: Name that was searched for. + count: Number of matching resources found. + """ + # Corresponds to Go SDK's ``ErrMultipleResourcesFound``. + def __init__(self, resource_type: str, name: str, count: int) -> None: + self.resource_type = resource_type + self.name = name + self.count = count + super().__init__( + f"Found {count} {resource_type}s matching {name}" + ) diff --git a/src/sdk/core/exceptions/response.py b/src/sdk/core/exceptions/response.py new file mode 100644 index 0000000..5bb00a5 --- /dev/null +++ b/src/sdk/core/exceptions/response.py @@ -0,0 +1,237 @@ +"""HTTP response exceptions. +""" + +# Corresponds to Go SDK's ``ErrUnexpectedResponseCode`` and all +# ``ErrDefaultNNN`` types. + +from __future__ import annotations + +from typing import Any + +from .base import SDKError + + +class HttpError(SDKError): + """HTTP response error. + + Stores full request context for debuggability. + + Args: + method: HTTP method (GET, POST, etc.). + url: Request URL. + body: Response body text. + expected: List of expected HTTP status codes. + headers: Response headers. + status_code: HTTP status code. Overrides the class-level + default when constructing a generic ``HttpError``. + + Attributes: + status_code: HTTP status code. Set as a class variable + on subclasses (e.g. ``BadRequestError.status_code == 400``). + request_id: Value of the ``X-Request-Id`` response header, + extracted automatically for OTC request tracing. + """ + # Corresponds to Go SDK's ``ErrUnexpectedResponseCode``. + status_code: int = 0 + + def __init__( + self, + *, + method: str, + url: str, + body: str, + expected: list[int] | None = None, + headers: dict[str, Any] | None = None, + status_code: int | None = None, + ) -> None: + self.method = method + self.url = url + self.body = body + self.expected = expected or [] + self.headers = headers or {} + self.request_id: str = self.headers.get("x-request-id", "") + if status_code is not None: + self.status_code = status_code + super().__init__(self._format_message()) + + def _format_message(self) -> str: + msg = ( + f"Expected HTTP response code {self.expected} when accessing " + f"[{self.method} {self.url}], but got {self.status_code} instead" + ) + if self.body: + msg += f"\n{self.body}" + return msg + + +# --- Status-code specific errors --- + + +class BadRequestError(HttpError): + """400 Bad Request. + """ + # Corresponds to Go SDK's ``ErrDefault400``. + status_code = 400 + + def _format_message(self) -> str: + return ( + f"Bad request with: [{self.method} {self.url}], " + f"error message: {self.body}" + ) + + +class UnauthorizedError(HttpError): + """401 Unauthorized. + """ + # Corresponds to Go SDK's ``ErrDefault401``. + status_code = 401 + + def _format_message(self) -> str: + return f"Authentication failed, error message: {self.body}" + + +class ForbiddenError(HttpError): + """403 Forbidden. + """ + # Corresponds to Go SDK's ``ErrDefault403``. + + status_code = 403 + + def _format_message(self) -> str: + return f"Action forbidden, error message: {self.body}" + + +class NotFoundError(HttpError): + """404 Not Found. + """ + # Corresponds to Go SDK's ``ErrDefault404``. + status_code = 404 + + def _format_message(self) -> str: + return ( + f"Resource not found: [{self.method} {self.url}], " + f"error message: {self.body}" + ) + + +class MethodNotAllowedError(HttpError): + """405 Method Not Allowed. + """ + # Corresponds to Go SDK's ``ErrDefault405``. + status_code = 405 + + def _format_message(self) -> str: + return "Method not allowed" + + +class RequestTimeoutError(HttpError): + """408 Request Timeout. + """ + # Corresponds to Go SDK's ``ErrDefault408``. + status_code = 408 + + def _format_message(self) -> str: + return "The server timed out waiting for the request" + + +class ConflictError(HttpError): + """409 Conflict. + """ + # Corresponds to Go SDK's ``ErrDefault409``. + status_code = 409 + + +class TooManyRequestsError(HttpError): + """429 Too Many Requests. + Checks ``Retry-After`` response header when present. + """ + # Corresponds to Go SDK's ``ErrDefault429``. + status_code = 429 + + def _format_message(self) -> str: + retry_after = self.headers.get("Retry-After", "") + msg = ( + "Too many requests have been sent in a given amount of time." + ) + if retry_after: + msg += f" Retry after {retry_after}s." + else: + msg += " Pause requests, wait up to one minute, and try again." + return msg + + +class InternalServerError(HttpError): + """500 Internal Server Error. + """ + # Corresponds to Go SDK's ``ErrDefault500``. + status_code = 500 + + def _format_message(self) -> str: + return "Internal Server Error" + + +class ServiceUnavailableError(HttpError): + """503 Service Unavailable. + """ + # Corresponds to Go SDK's ``ErrDefault503``. + status_code = 503 + + def _format_message(self) -> str: + return ( + "The service is currently unable to handle the request due to " + "a temporary overloading or maintenance. This is a temporary " + "condition. Try again later." + ) + + +# --- Mapping from status code to exception class --- + +HTTP_ERROR_MAP: dict[int, type[HttpError]] = { + 400: BadRequestError, + 401: UnauthorizedError, + 403: ForbiddenError, + 404: NotFoundError, + 405: MethodNotAllowedError, + 408: RequestTimeoutError, + 409: ConflictError, + 429: TooManyRequestsError, + 500: InternalServerError, + 503: ServiceUnavailableError, +} + + +def raise_for_status( + status_code: int, + *, + method: str, + url: str, + body: str, + expected: list[int] | None = None, + headers: dict[str, Any] | None = None, +) -> None: + """Raise the appropriate ``HttpError`` for a non-2xx status code. + + Looks up the status code in ``HTTP_ERROR_MAP`` and raises the + matching exception. Falls back to a generic ``HttpError`` for + unmapped codes. + + Args: + status_code: HTTP response status code. + method: HTTP method (GET, POST, etc.). + url: Request URL. + body: Response body text. + expected: List of expected HTTP status codes. + headers: Response headers. + + Raises: + HttpError: Always raised (specific subclass when possible). + """ + exc_class = HTTP_ERROR_MAP.get(status_code, HttpError) + raise exc_class( + method=method, + url=url, + body=body, + expected=expected, + headers=headers, + status_code=status_code, + ) diff --git a/src/sdk/core/exceptions/timeout.py b/src/sdk/core/exceptions/timeout.py new file mode 100644 index 0000000..477a853 --- /dev/null +++ b/src/sdk/core/exceptions/timeout.py @@ -0,0 +1,18 @@ +"""Client-side timeout exception. + +Corresponds to Go SDK's ``ErrTimeOut``. +""" + +from __future__ import annotations + +from .base import SDKError + + +class SDKTimeoutError(SDKError): + """Client-side operation timeout. + + Distinct from ``RequestTimeoutError`` (HTTP 408) which is a + server response. This error is raised when the SDK's own + timeout is exceeded, e.g. waiting for a resource to become active. + """ + # Corresponds to Go SDK's ``ErrTimeOut``. diff --git a/src/sdk/core/pagination.py b/src/sdk/core/pagination.py new file mode 100644 index 0000000..c65026e --- /dev/null +++ b/src/sdk/core/pagination.py @@ -0,0 +1,316 @@ +"""Pagination strategies for list operations. + +Each strategy is a generator function that yields items one by one, +automatically fetching the next page when needed. + +Three strategies are supported: + +- **Marker**: Next page determined by ``marker`` query param set to + the last item's ID (e.g. CCE clusters, ECS servers). +- **Offset**: Next page determined by ``offset`` + ``limit`` query + params (e.g. SMN topics). +- **Linked**: Next page URL extracted from response body + (e.g. ``links.next`` field — Keystone-style pagination). + +Additionally, ``single_page`` is a trivial helper for non-paginated +list endpoints that return all items at once. + +Example:: + + from sdk.core.pagination import marker_paginate + + # yields individual cluster dicts, fetching pages automatically + for cluster in marker_paginate( + client=service_client, + path="clusters", + items_key="items", + ): + print(cluster["metadata"]["name"]) +""" + +from __future__ import annotations +from pydantic import BaseModel +from typing import TypeVar +from collections.abc import Generator +from typing import Any +from urllib.parse import parse_qs, urlencode, urlparse, urlunparse, urljoin + +from sdk.core.service_client import ServiceClient + +T = TypeVar("T", bound=BaseModel) +PaginatedItem = T | dict[str, Any] + +def marker_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + marker_key: str = "id", + limit: int = 0, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using marker-based strategy. + + Fetches pages by setting ``marker`` query param to the last + item's ``marker_key`` value. Stops when a page returns + fewer items than ``limit`` or an empty list. + + Args: + client: Service client to send requests through. + path: Relative resource path (e.g. ``"servers/detail"``). + items_key: JSON key containing the items list + (e.g. ``"servers"``, ``"items"``). + marker_key: Field name on each item used as the marker. + Default: ``"id"``. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + limit: Page size. If 0, the server default is used. + params: Additional query parameters. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + query: dict[str, str] = dict(params) if params else {} + if limit: + query["limit"] = str(limit) + + while True: + url = _build_url(path, query) + _, items = _fetch_page(client, url, items_key) + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + if limit and len(items) < limit: + return + last = items[-1] + raw_marker = last.get(marker_key) + + if raw_marker is None or raw_marker == "": + return + + marker_str = str(raw_marker) + if query.get("marker") == marker_str: + return + + query["marker"] = marker_str + +def offset_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + limit: int, + start_offset: int = 0, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using offset-based strategy. + + Increments ``offset`` by ``limit`` on each page. Stops when + a page returns an empty list or fewer items than ``limit``. + + This mirrors Go SDK's ``OffsetPageBase`` behavior. + + Args: + client: Service client to send requests through. + path: Relative resource path. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + limit: Page size (required for offset pagination). + start_offset: Starting offset. Default: 0. + params: Additional query parameters. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + if limit <= 0: + raise ValueError("Limit must be strictly positive for offset pagination.") + query: dict[str, str] = dict(params) if params else {} + query["limit"] = str(limit) + offset = start_offset + + while True: + query["offset"] = str(offset) + url = _build_url(path, query) + _, items = _fetch_page(client, url, items_key) + + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + if len(items) < limit: + return + + offset += limit + + +def linked_paginate( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + link_path: list[str] | None = None, + params: dict[str, str] | None = None, +) -> Generator[PaginatedItem, None, None]: + """Paginate using linked (next URL) strategy. + + Follows a ``next`` link embedded in the response body. + The link path defaults to ``["links", "next"]`` (Keystone + convention) but can be customized. + + This mirrors Go SDK's ``LinkedPageBase`` behavior. + + Args: + client: Service client to send requests through. + path: Relative resource path for the first page. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + link_path: List of keys to traverse in the response + to find the next page URL. Default: ``["links", "next"]``. + params: Additional query parameters for the first request. + + Yields: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + if link_path is None: + link_path = ["links", "next"] + + url = _build_url(path, params) if params else path + seen_urls: set[str] = set() + + while url: + if url in seen_urls: + break + seen_urls.add(url) + + data, items = _fetch_page(client, url, items_key) + + if not items: + return + + for item in items: + yield model.model_validate(item) if model else item + + next_url = _extract_link(data, link_path) + if not next_url: + return + url = urljoin(url, next_url) + + +def single_page( + client: ServiceClient, + path: str, + *, + items_key: str, + model: type[T] | None = None, + params: dict[str, str] | None = None, +) -> list[PaginatedItem]: + """Fetch a single (non-paginated) list response. + + Convenience wrapper for endpoints that return all items + at once. Returns a plain list instead of a generator. + + This mirrors Go SDK's ``SinglePageBase``. + + Args: + client: Service client to send requests through. + path: Relative resource path. + items_key: JSON key containing the items list. + model: Optional Pydantic model class. If provided, raw JSON + items will be validated and parsed into instances of this + class. If omitted, raw dicts are returned. + params: Additional query parameters. + + Returns: + Parsed Pydantic model instances (if ``model`` is provided), + otherwise raw resource dicts. + """ + url = _build_url(path, params) if params else path + _, items = _fetch_page(client, url, items_key) + + if model: + return [model.model_validate(item) for item in items] + return items + + +# ====================================================================== +# Internal helpers +# ====================================================================== + + +def _build_url(path: str, params: dict[str, str] | None) -> str: + """Append query parameters to a path. + + If the path already contains query params, they are merged + (new params override existing ones). + + Args: + path: Base path, possibly with existing query string. + params: Query parameters to add. + + Returns: + Path with query string. + """ + if not params: + return path + + parsed = urlparse(path) + existing = parse_qs(parsed.query, keep_blank_values=True) + merged = {k: v[0] if len(v) == 1 else v for k, v in existing.items()} + merged.update(params) + + new_query = urlencode(merged, doseq=True) + return urlunparse(parsed._replace(query=new_query)) + + +def _extract_link(data: dict[str, Any], path: list[str]) -> str: + """Traverse nested dict to extract a link URL. + + Args: + data: Response body dict. + path: Key path to traverse (e.g. ``["links", "next"]``). + + Returns: + URL string, or empty string if not found. + """ + if not path: + return "" + + current: Any = data + for key in path: + if not isinstance(current, dict): + return "" + current = current.get(key) + if current is None: + return "" + return str(current) if current else "" + + +def _fetch_page( + client: ServiceClient, + url: str, + items_key: str +) -> tuple[dict[str, Any], list[dict[str, Any]]]: + """Fetch a page, parse JSON, and strictly validate the items key.""" + resp = client.get(url) + data = resp.json() + + if items_key not in data: + raise ValueError(f"Expected key '{items_key}' not found in API response") + + return data, data[items_key] diff --git a/src/sdk/core/provider.py b/src/sdk/core/provider.py new file mode 100644 index 0000000..13ea265 --- /dev/null +++ b/src/sdk/core/provider.py @@ -0,0 +1,810 @@ +"""Provider client — central HTTP client for the SDK. + +Combines HTTP transport (via ``httpx``), credential management, +and retry logic into a single client. + +The ``authenticate()`` method dispatches to the correct auth flow +based on ``AuthConfig.auth_mode`` and presence of agency fields: + +.. code-block:: text + + AuthConfig.auth_mode + ├── PASSWORD / TOKEN + │ ├── agency_name? → _v3_auth_with_agency() + │ └── else → _v3_auth() + └── AKSK + ├── agency_name? → _aksk_auth_with_agency() + └── else → _aksk_auth() + +Example:: + + from sdk.core.auth import AuthConfig + from sdk.core.provider import ProviderClient + + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="secret", + domain_name="my_domain", + tenant_name="eu-de", + ) + client = ProviderClient(cfg) + client.authenticate() + # client.token_id is now set, endpoint_locator is ready +""" + +from __future__ import annotations + +import logging +import re +import time +from typing import Any + +import httpx + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.endpoint import (EndpointLocator, + build_endpoint_locator, + CatalogEntry) +from sdk.core.exceptions import ( + ReauthError, + UnauthorizedError, + raise_for_status +) +from sdk.core.signer import SignOptions, sign_request + +logger = logging.getLogger(__name__) + +USER_AGENT = "python-t-cloud/0.1.0" +"""Default User-Agent header value.""" + +_DEFAULT_OK_CODES: dict[str, list[int]] = { + "GET": [200], + "POST": [200, 201, 202], + "PUT": [200, 201, 202], + "PATCH": [200, 204], + "DELETE": [200, 202, 204], + "HEAD": [204, 206], +} + +_DEFAULT_MAX_BACKOFF_RETRIES = 20 +"""Maximum number of retries on 429 (Too Many Requests).""" + +_DEFAULT_BACKOFF_TIMEOUT = 60.0 +"""Seconds to wait before retrying on 429.""" + +_DEFAULT_RETRY_COUNT = 1 +"""Number of retries on gateway errors (502, 504).""" + +_DEFAULT_RETRY_TIMEOUT = 0.5 +"""Seconds to wait before retrying on gateway errors.""" + +_VERSION_SUFFIX = re.compile(r"/v\d+(\.\d+)?$") + + +class ProviderClient: + """Central HTTP client for OTC API interaction. + + Holds authentication state (token, AK/SK credentials), + project/domain context, and an endpoint locator built from + the IAM service catalog. All service clients reference a single + ``ProviderClient`` instance. + + .. note:: + + This implementation is not thread-safe. If thread safety is needed, + add external synchronization around ``authenticate()`` and + ``request()``. + + Args: + auth_config: Authentication configuration. + http_client: Optional pre-configured httpx client. + Created automatically if not provided. + max_backoff_retries: Max retries on 429 responses. + backoff_timeout: Wait time (seconds) per 429 retry. + + Attributes: + token_id: Current Keystone token. + project_id: Scoped project ID from auth response. + user_id: Authenticated user ID. + domain_id: Domain ID from auth response. + region_id: Region derived from auth config. + endpoint_locator: Callable to resolve service endpoints. + """ + + def __init__( + self, + auth_config: AuthConfig, + *, + http_client: httpx.Client | None = None, + max_backoff_retries: int = _DEFAULT_MAX_BACKOFF_RETRIES, + backoff_timeout: float = _DEFAULT_BACKOFF_TIMEOUT, + ) -> None: + self.auth_config = auth_config + self._owns_http_client = http_client is None + self._http = http_client or httpx.Client( + headers={"User-Agent": USER_AGENT}, + timeout=httpx.Timeout(30.0), + ) + + self.token_id: str = "" + self.project_id: str = "" + self.user_id: str = "" + self.domain_id: str = "" + self.region_id: str = auth_config.region or "" + self.endpoint_locator: EndpointLocator | None = None + + self.max_backoff_retries = max_backoff_retries + self.backoff_timeout = backoff_timeout + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + @property + def identity_base(self) -> str: + """IAM base URL (without version path). + + Strips ``/v3``, ``/v3.0``, etc. from the identity endpoint. + """ + endpoint = self.auth_config.identity_endpoint.rstrip("/") + endpoint = _VERSION_SUFFIX.sub("", endpoint) + return endpoint + "/" + + @property + def identity_v3_endpoint(self) -> str: + """IAM v3 endpoint URL (always ends with ``/``).""" + return self.identity_base + "v3/" + + def authenticate(self) -> None: + """Run the appropriate auth flow based on ``AuthConfig``. + + Dispatches to one of four internal methods depending on + ``auth_mode`` and presence of ``agency_name``. + + Raises: + UnauthorizedError: If the IAM request fails. + MissingCredentialsError: If auth mode cannot be determined. + """ + mode = self.auth_config.auth_mode + has_agency = bool( + self.auth_config.agency_name + and self.auth_config.agency_domain_name + ) + + if mode in (AuthMode.PASSWORD, AuthMode.TOKEN): + if has_agency: + self._v3_auth_with_agency() + else: + self._v3_auth() + else: + if has_agency: + self._aksk_auth_with_agency() + else: + self._aksk_auth() + if self.endpoint_locator is None: + raise RuntimeError( + "Endpoint locator not initialized after authentication" + ) + + def request( + self, + method: str, + url: str, + *, + service_name: str = "", + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + retry_count: int | None = None, + retry_timeout: float | None = None, + ) -> httpx.Response: + """Execute an authenticated HTTP request with retry logic. + + Handles: + - Auth header injection (token or AK/SK signing) + - 401 → re-authenticate and retry once + - 429 → backoff retry (up to ``max_backoff_retries``) + - 502/504 → gateway retry (up to ``retry_count``) + + Args: + method: HTTP method (GET, POST, etc.). + url: Full request URL. + service_name: Service name. + json: JSON-serializable body. + content: Raw bytes body (mutually exclusive with ``json``). + headers: Additional request headers. + ok_codes: Acceptable status codes. Defaults per HTTP method. + retry_count: Gateway error retries. Default: 1. + retry_timeout: Gateway retry wait (seconds). Default: 0.5. + + Returns: + httpx.Response on success. + + Raises: + HttpError: On non-OK status codes after exhausting retries. + """ + if ok_codes is None: + ok_codes = _DEFAULT_OK_CODES.get(method.upper(), [200]) + if retry_count is None: + retry_count = _DEFAULT_RETRY_COUNT + if retry_timeout is None: + retry_timeout = _DEFAULT_RETRY_TIMEOUT + + return self._do_request( + method=method, + url=url, + service_name=service_name, + json=json, + content=content, + headers=headers, + ok_codes=ok_codes, + retry_count=retry_count, + retry_timeout=retry_timeout, + backoff_remaining=self.max_backoff_retries, + _is_retry=False, + ) + + def close(self) -> None: + """Close the underlying HTTP client.""" + if self._owns_http_client: + self._http.close() + + def __enter__(self) -> ProviderClient: + return self + + def __exit__(self, *_: object) -> None: + self.close() + + # ------------------------------------------------------------------ + # Internal: HTTP request engine + # ------------------------------------------------------------------ + + def _do_request( + self, + *, + method: str, + url: str, + service_name: str, + json: Any | None, + content: bytes | None, + headers: dict[str, str] | None, + ok_codes: list[int], + retry_count: int, + retry_timeout: float, + backoff_remaining: int, + _is_retry: bool = False, + ) -> httpx.Response: + """Core request logic with retry/reauth handling.""" + reauthed = _is_retry + + while True: + req = self._build_request( + method=method, + url=url, + json=json, + content=content, + headers=headers, + ) + + self._apply_auth(req, service_name=service_name) + + t0 = time.monotonic() + resp = self._http.send(req) + duration_ms = (time.monotonic() - t0) * 1000 + + _log_response( + logger, method, url, resp.status_code, + duration_ms, resp.headers.get("x-request-id", ""), + ) + + if resp.status_code in ok_codes: + return resp + + body = resp.text + + if (resp.status_code == 401 + and self.auth_config.allow_reauth + and not reauthed): + logger.debug("Got 401, attempting re-authentication") + try: + self.authenticate() + except Exception as exc: + raise ReauthError(original=exc) from exc + reauthed = True + continue + + if resp.status_code == 429 and backoff_remaining > 0: + logger.warning( + "Rate limited (429), waiting %.1fs (%d retries left)", + self.backoff_timeout, + backoff_remaining, + ) + time.sleep(self.backoff_timeout) + backoff_remaining -= 1 + continue + + if resp.status_code in (502, 504) and retry_count > 0: + logger.warning( + "Gateway error (%d), retrying in %.1fs (%d left)", + resp.status_code, + retry_timeout, + retry_count, + ) + time.sleep(retry_timeout) + retry_count -= 1 + continue + + raise_for_status( + resp.status_code, + method=method, + url=url, + body=body, + expected=ok_codes, + headers=dict(resp.headers), + ) + + def _build_request( + self, + *, + method: str, + url: str, + json: Any | None, + content: bytes | None, + headers: dict[str, str] | None, + ) -> httpx.Request: + """Build a httpx.Request with correct content type.""" + req_headers = dict(self._http.headers) + req_headers["Accept"] = "application/json" + if headers: + req_headers.update(headers) + req_headers.setdefault("User-Agent", USER_AGENT) + + if json is not None: + req_headers.setdefault("Content-Type", "application/json") + return self._http.build_request( + method, url, json=json, headers=req_headers, + ) + if content is not None: + return self._http.build_request( + method, url, content=content, headers=req_headers, + ) + return self._http.build_request( + method, url, headers=req_headers, + ) + + def _apply_auth(self, request: httpx.Request, + service_name: str) -> None: + """Apply auth headers to a request. + + Returns the pre-request token for reauth comparison + (mirrors Go SDK's ``prereqtok`` pattern). + """ + if self.auth_config.auth_mode == AuthMode.AKSK and self.auth_config.access_key: + sign_request( + request, + SignOptions( + access_key=self.auth_config.access_key, + secret_key=_secret_value(self.auth_config.secret_key), + region_name=self.region_id, + service_name=service_name, + ), + ) + # Set project/domain scope headers + if self.project_id and not self.domain_id: + request.headers["x-project-id"] = self.project_id + if self.domain_id: + request.headers["x-domain-id"] = self.domain_id + if self.auth_config.security_token: + request.headers["x-security-token"] = ( + self.auth_config.security_token + ) + elif self.token_id: + request.headers["x-auth-token"] = self.token_id + + # ------------------------------------------------------------------ + # Internal: Auth flows + # ------------------------------------------------------------------ + + def _v3_auth(self) -> None: + """Keystone V3 password/token authentication. + + POST /v3/auth/tokens → extracts token, project, user, catalog. + Sets ``_reauth_func`` for automatic token refresh on 401. + """ + cfg = self.auth_config + + if cfg.token_id: + self.token_id = _secret_value(cfg.token_id) + resp = self._iam_request( + "GET", + self.identity_v3_endpoint + "auth/tokens", + headers={"x-subject-token": self.token_id}, + ) + else: + body = _build_v3_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + + self._extract_auth_result(resp.json()) + + def _v3_auth_with_agency(self) -> None: + """Keystone V3 auth + agency assume_role. + + First authenticates normally (password/token), then issues + a second POST with ``assume_role`` identity method to get + a delegated token. + """ + cfg = self.auth_config + + if not cfg.token_id: + self._v3_auth() + else: + self.token_id = _secret_value(cfg.token_id) + + body = _build_agency_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + + self._extract_auth_result(resp.json()) + + def _aksk_auth(self) -> None: + """AK/SK authentication. + + Does not create a token. Instead, stores AK/SK credentials + for signing future requests and fetches the service catalog + via ``GET /v3/auth/catalog``. + """ + cfg = self.auth_config + self.project_id = self._resolve_project_id( + cfg.project_name) if not cfg.project_id and cfg.project_name \ + else cfg.project_id or "" + self.domain_id = self._resolve_domain_id( + cfg.domain_name) if not cfg.domain_id and cfg.domain_name \ + else cfg.domain_id or "" + self.region_id = cfg.region or "" + + catalog = self._fetch_catalog() + self.endpoint_locator = build_endpoint_locator(catalog, self.region_id) + + def _aksk_auth_with_agency(self) -> None: + """AK/SK auth + agency assume_role. + + First sets up AK/SK signing, then issues a token request + with ``assume_role`` to get a delegated token. After this, + subsequent requests use the token (not AK/SK). + """ + cfg = self.auth_config + self._aksk_auth() + + if not self.domain_id: + raise UnauthorizedError( + method="POST", + url=self.identity_v3_endpoint + "auth/tokens", + body="Agency auth requires domain_id or domain_name", + ) + + body = _build_agency_auth_body(cfg) + resp = self._iam_request( + "POST", + self.identity_v3_endpoint + "auth/tokens", + json=body, + ) + self.token_id = resp.headers.get("x-subject-token", "") + self._extract_auth_result(resp.json()) + + # ------------------------------------------------------------------ + # Internal: IAM helpers + # ------------------------------------------------------------------ + + def _iam_request( + self, + method: str, + url: str, + *, + json: Any | None = None, + headers: dict[str, str] | None = None, + ) -> httpx.Response: + """Send a request to the IAM service. + + AK/SK-signed if in AKSK mode, otherwise uses current token. + Raises on non-2xx. + + Args: + method: HTTP method. + url: Full IAM URL. + json: JSON body. + headers: Extra headers. + + Returns: + httpx.Response. + + Raises: + HttpError: On non-2xx response. + """ + req = self._build_request( + method=method, + url=url, + json=json, + content=None, + headers=headers, + ) + self._apply_auth(req, service_name="iam") + + t0 = time.monotonic() + resp = self._http.send(req) + duration_ms = (time.monotonic() - t0) * 1000 + + _log_response(logger, method, url, resp.status_code, + duration_ms, resp.headers.get("x-request-id", "")) + + if resp.status_code >= 400: + raise_for_status( + resp.status_code, + method=method, + url=url, + body=resp.text, + headers=dict(resp.headers), + ) + return resp + + def _extract_auth_result(self, data: dict[str, Any]) -> None: + """Extract project, user, domain, and catalog from auth response. + + Populates ``project_id``, ``user_id``, ``domain_id``, + ``region_id``, and ``endpoint_locator``. + + Args: + data: Parsed JSON from IAM auth response. + """ + token_data = data.get("token", {}) + + project = token_data.get("project") + if project: + self.project_id = project.get("id", "") + domain = project.get("domain", {}) + if domain: + self.domain_id = domain.get("id", "") + + user = token_data.get("user") + if user: + self.user_id = user.get("id", "") + if not self.domain_id: + domain = user.get("domain", {}) + if domain: + self.domain_id = domain.get("id", "") + + if not self.region_id: + cfg = self.auth_config + self.region_id = cfg.region or cfg.tenant_name or "" + + catalog = token_data.get("catalog", []) + if catalog: + parsed_catalog = [CatalogEntry.model_validate(c) for c in catalog] + self.endpoint_locator = build_endpoint_locator(parsed_catalog, + self.region_id) + + def _fetch_catalog(self) -> list[CatalogEntry]: + """Fetch the service catalog via ``GET /v3/auth/catalog``. + + Used by AK/SK auth where the catalog is not embedded in + a token response. + + Returns: + List of catalog entries. + """ + resp = self._iam_request("GET", + self.identity_v3_endpoint + "auth/catalog") + raw_catalog = resp.json().get("catalog", []) + return [CatalogEntry.model_validate(entry) for entry in raw_catalog] + + def _resolve_project_id(self, name: str) -> str: + """Look up project ID by name via IAM API. + + Args: + name: Project name. + + Returns: + Project ID string. + + Raises: + EndpointNotFoundError: If no project is found. + """ + resp = self._iam_request( + "GET", + self.identity_v3_endpoint + f"projects?name={name}", + ) + data = resp.json() + projects = data.get("projects", []) + if not projects: + raise ValueError(f"Project with name '{name}' not found") + return projects[0]["id"] + + def _resolve_domain_id(self, name: str) -> str: + """Look up domain ID by name via IAM API. + + Args: + name: Domain name. + + Returns: + Domain ID string, or empty string if not found. + """ + resp = self._iam_request("GET", self.identity_v3_endpoint + f"auth/domains?name={name}") + domains = resp.json().get("domains", []) + if not domains: + raise ValueError(f"Domain with name '{name}' not found") + return domains[0]["id"] + +# ====================================================================== +# Module-level helpers +# ====================================================================== + + +def _log_response( + log: logging.Logger, + method: str, + url: str, + status_code: int, + duration_ms: float, + request_id: str, +) -> None: + """Log an HTTP response at the appropriate level. + + - 2xx → DEBUG + - 4xx → WARNING + - 5xx → ERROR + """ + rid = f" [{request_id}]" if request_id else "" + msg = f"{method} {url} → {status_code} ({duration_ms:.0f}ms){rid}" + + if status_code >= 500: + log.error(msg) + elif status_code >= 400: + log.warning(msg) + else: + log.debug(msg) + + +def _secret_value(value: Any) -> str: + """Extract the plain string from a value that may be ``SecretStr``. + + Works transparently with both ``str`` and ``pydantic.SecretStr``, + so callers don't need to know which type ``AuthConfig`` uses + for sensitive fields. + + Args: + value: A ``str`` or ``SecretStr`` instance. + + Returns: + Plain string. + """ + if value is None: + return "" + if hasattr(value, "get_secret_value"): + return value.get_secret_value() + return str(value) + + +def _build_v3_auth_body(cfg: AuthConfig) -> dict[str, Any]: + """Build the JSON body for ``POST /v3/auth/tokens``. + + Constructs the identity and scope sections based on + available credentials (password or token). + + Args: + cfg: Auth configuration. + + Returns: + JSON-serializable dict for the request body. + """ + auth: dict[str, Any] = {"identity": {}} + + if cfg.password: + user: dict[str, Any] = {"password": _secret_value(cfg.password)} + if cfg.user_id: + user["id"] = cfg.user_id + else: + user["name"] = cfg.username + domain: dict[str, str] = {} + if cfg.domain_id: + domain["id"] = cfg.domain_id + elif cfg.domain_name: + domain["name"] = cfg.domain_name + user["domain"] = domain + + auth["identity"]["methods"] = ["password"] + auth["identity"]["password"] = {"user": user} + + if cfg.passcode: + auth["identity"]["methods"].append("totp") + totp_user: dict[str, str] = { + "passcode": _secret_value(cfg.passcode), + } + if cfg.user_id: + totp_user["id"] = cfg.user_id + if cfg.username: + totp_user["name"] = cfg.username + auth["identity"]["totp"] = {"user": totp_user} + + elif cfg.token_id: + auth["identity"]["methods"] = ["token"] + auth["identity"]["token"] = {"id": _secret_value(cfg.token_id)} + + scope = _build_scope(cfg) + if scope: + auth["scope"] = scope + + return {"auth": auth} + + +def _build_agency_auth_body(cfg: AuthConfig) -> dict[str, Any]: + """Build the JSON body for agency ``assume_role`` auth. + + Args: + cfg: Auth configuration with agency fields populated. + + Returns: + JSON-serializable dict for the request body. + """ + auth: dict[str, Any] = { + "identity": { + "methods": ["assume_role"], + "assume_role": { + "domain_name": cfg.agency_domain_name, + "xrole_name": cfg.agency_name, + }, + }, + } + + if cfg.delegated_project and cfg.agency_domain_name: + auth["scope"] = { + "project": { + "name": cfg.delegated_project, + "domain": {"name": cfg.agency_domain_name}, + }, + } + + return {"auth": auth} + + +def _build_scope(cfg: AuthConfig) -> dict[str, Any] | None: + """Build the ``scope`` section of a V3 auth request. + + Args: + cfg: Auth configuration. + + Returns: + Scope dict or None if no scoping fields are set. + """ + project_id = cfg.tenant_id or cfg.project_id + project_name = cfg.tenant_name or cfg.project_name + + if project_id: + return {"project": {"id": project_id}} + + if project_name: + domain: dict[str, str] = {} + if cfg.domain_id: + domain["id"] = cfg.domain_id + elif cfg.domain_name: + domain["name"] = cfg.domain_name + scope: dict[str, Any] = {"project": {"name": project_name}} + if domain: + scope["project"]["domain"] = domain + return scope + + if cfg.domain_id: + return {"domain": {"id": cfg.domain_id}} + if cfg.domain_name: + return {"domain": {"name": cfg.domain_name}} + + return None diff --git a/src/sdk/core/service_client.py b/src/sdk/core/service_client.py new file mode 100644 index 0000000..8967f13 --- /dev/null +++ b/src/sdk/core/service_client.py @@ -0,0 +1,342 @@ +"""Service client — per-service wrapper over ``ProviderClient``. + +Mirrors the Go SDK's ``ServiceClient`` struct. Each OTC service +(compute, DNS, CCE, etc.) gets its own ``ServiceClient`` with +a resolved endpoint and convenience HTTP methods. + +A ``ServiceClient`` delegates all HTTP work to the underlying +``ProviderClient``, adding service-level headers and URL +construction via ``service_url()``. + +Example:: + + from sdk.core.provider import ProviderClient + from sdk.core.service_client import ServiceClient + + provider = ProviderClient(auth_config) + provider.authenticate() + + # Resolve endpoint from catalog + compute = ServiceClient( + provider, + service_type="compute", + region="eu-de", + ) + resp = compute.get("servers/detail") + + # With project-scoped resource base + cce = ServiceClient( + provider, + service_type="ccev2.0", + resource_base=endpoint + "api/v1/projects/" + project_id + "/", + ) + url = cce.service_url("clusters") +""" + +from __future__ import annotations + +from typing import Any + +import httpx + +from sdk.core.endpoint import EndpointOpts +from sdk.core.provider import ProviderClient + + +class ServiceClient: + """Client for a specific OTC service API. + + Wraps a ``ProviderClient`` and holds the resolved service + endpoint. Provides ``service_url()`` for building resource URLs + and convenience HTTP methods that mirror the Go SDK's + ``Get``, ``Post``, ``Put``, ``Patch``, ``Delete``, ``Head``. + + Args: + provider: Authenticated ``ProviderClient``. + service_type: Catalog service type (e.g. ``compute``, ``dns``). + region: Region override. Falls back to ``provider.region_id``. + endpoint_override: Bypass catalog lookup and use this URL + directly. + resource_base: Custom base URL for resource paths. Some + services need a project-scoped base that differs from the + catalog endpoint (see CCE example above). If not set, + ``endpoint`` is used. + extra_headers: Headers merged into every request from this + service client (Go SDK ``MoreHeaders``). + + Attributes: + provider: Reference to the parent ``ProviderClient``. + endpoint: Resolved service endpoint URL (always ends with ``/``). + resource_base: Base URL for ``service_url()`` path building. + service_type: Service type string. + extra_headers: Service-wide headers. + """ + + def __init__( + self, + provider: ProviderClient, + service_type: str = "", + *, + region: str = "", + endpoint_override: str = "", + resource_base: str = "", + extra_headers: dict[str, str] | None = None, + microversion: str = "", + ) -> None: + self.provider = provider + self.service_type = service_type + self.extra_headers: dict[str, str] = extra_headers or {} + self.microversion = microversion + # Resolve endpoint + if endpoint_override: + self.endpoint = _ensure_trailing_slash(endpoint_override) + elif provider.endpoint_locator and service_type: + opts = EndpointOpts(service_type=service_type, region=region) + self.endpoint = provider.endpoint_locator(opts) + else: + self.endpoint = "" + + # Resource base — defaults to endpoint + self.resource_base = ( + _ensure_trailing_slash(resource_base) + if resource_base + else self.endpoint + ) + + # ------------------------------------------------------------------ + # URL building + # ------------------------------------------------------------------ + + def service_url(self, *parts: str) -> str: + """Build a full URL from the resource base and path segments. + + Joins ``resource_base`` with the given path parts using ``/``. + + Example:: + + client.service_url("servers", server_id, "action") + # → "https://ecs.eu-de.../v2.1/servers/{id}/action" + + Args: + *parts: URL path segments to join. + + Returns: + Full URL string. + """ + return self.resource_base + "/".join(parts) + + # ------------------------------------------------------------------ + # HTTP convenience methods + # ------------------------------------------------------------------ + + def get( + self, + path: str, + *, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """GET request. Default ok: 200. + + Args: + path: Relative path appended to ``resource_base``. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request("GET", path, headers=headers, ok_codes=ok_codes) + + def post( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """POST request. Default ok: 200, 201, 202. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "POST", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def put( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """PUT request. Default ok: 200, 201, 202. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "PUT", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def patch( + self, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """PATCH request. Default ok: 200, 204. + + Args: + path: Relative path. + json: JSON-serializable body. + content: Raw bytes body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "PATCH", path, json=json, content=content, + headers=headers, ok_codes=ok_codes, + ) + + def delete( + self, + path: str, + *, + json: Any | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """DELETE request. Default ok: 200, 202, 204. + + Supports optional JSON body for APIs that require + delete-with-body (Go SDK ``DeleteWithBody``). + + Args: + path: Relative path. + json: Optional JSON body. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request( + "DELETE", path, json=json, + headers=headers, ok_codes=ok_codes, + ) + + def head( + self, + path: str, + *, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """HEAD request. Default ok: 204, 206. + + Args: + path: Relative path. + headers: Extra request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + return self._request("HEAD", path, headers=headers, ok_codes=ok_codes) + + # ------------------------------------------------------------------ + # Internal + # ------------------------------------------------------------------ + + def _set_microversion_header(self, headers: dict[str, str]) -> None: + """Set microversion headers based on service type. + + Corresponds to Go SDK's ``setMicroversionHeader``. + """ + if not self.microversion: + return + + mv_header_map = { + "compute": "X-OpenStack-Nova-API-Version", + "sharev2": "X-OpenStack-Manila-API-Version", + "volume": "X-OpenStack-Volume-API-Version", + } + + specific = mv_header_map.get(self.service_type) + if specific: + headers[specific] = self.microversion + + if self.service_type: + headers["OpenStack-API-Version"] = ( + f"{self.service_type} {self.microversion}" + ) + + def _request( + self, + method: str, + path: str, + *, + json: Any | None = None, + content: bytes | None = None, + headers: dict[str, str] | None = None, + ok_codes: list[int] | None = None, + ) -> httpx.Response: + """Build full URL, merge service headers, delegate to provider. + + Args: + method: HTTP method. + path: Relative resource path. + json: JSON body. + content: Raw body. + headers: Per-request headers. + ok_codes: Acceptable status codes. + + Returns: + httpx.Response. + """ + url = self.service_url(path) + merged: dict[str, str] = {**self.extra_headers} + if headers: + merged.update(headers) + self._set_microversion_header(merged) + return self.provider.request( + method, + url, + json=json, + content=content, + headers=merged or None, + ok_codes=ok_codes, + ) + + +def _ensure_trailing_slash(url: str) -> str: + """Ensure a URL ends with ``/``.""" + return url if url.endswith("/") else url + "/" diff --git a/src/sdk/core/signer.py b/src/sdk/core/signer.py new file mode 100644 index 0000000..cdb9780 --- /dev/null +++ b/src/sdk/core/signer.py @@ -0,0 +1,470 @@ +"""AK/SK request signing. + +Signs HTTP requests using the AK/SK authentication scheme compatible +with services. + +The signing process follows these steps: + +1. Build a **canonical request** from method, path, query, headers, body. +2. Build a **string to sign** from algorithm, timestamp, scope, and + the hash of the canonical request. +3. **Derive a signing key** from the secret key, date, region, and service. +4. **Compute the signature** and set the ``Authorization`` header + on the request. + +Example:: + + import httpx + from t_cloud.core.signer import sign_request, SignOptions + + opts = SignOptions( + access_key="AK...", + secret_key="SK...", + region_name="eu-de", + service_name="dns", + ) + request = httpx.Request("GET", "https://dns.eu-de.otc.t-systems.com/v2/zones") + sign_request(request, opts) + # request now has Authorization and X-Sdk-Date headers +""" + +from __future__ import annotations + +import hashlib +import hmac +import re +import logging +import threading +from collections import OrderedDict +from datetime import UTC, datetime, timedelta +from urllib.parse import quote + +import httpx +from pydantic import BaseModel, ConfigDict, computed_field, SecretStr + +logger = logging.getLogger(__name__) + +SIGN_ALGORITHM_HMAC_SHA256 = "SDK-HMAC-SHA256" + +_SUPPORTED_ALGORITHMS = frozenset({ + SIGN_ALGORITHM_HMAC_SHA256, +}) + +_CONTENT_SHA256_HEADER = "x-sdk-content-sha256" + +_SPACE_RE = re.compile(r"\s+") + +_SIGNED_HEADERS_WHITELIST = frozenset({ + "host", + "content-type", + "x-sdk-date", + "x-sdk-content-sha256", +}) + +# === 1. TYPES & CACHE === + + +class _SignKeyCache: + """Thread-safe LRU-like cache for derived signing keys. + + Evicts the oldest entry when ``max_count`` is reached. + + Args: + max_count: Maximum number of cached entries. + """ + + def __init__(self, max_count: int = 300) -> None: + self._max_count = max_count + self._lock = threading.Lock() + self._store: OrderedDict[str, _SignKeyCacheEntry] = OrderedDict() + + def get(self, key: str) -> _SignKeyCacheEntry | None: + with self._lock: + return self._store.get(key) + + def put(self, key: str, entry: _SignKeyCacheEntry) -> None: + with self._lock: + if len(self._store) >= self._max_count and self._store: + self._store.popitem(last=False) + self._store[key] = entry + + +class _SignKeyCacheEntry(BaseModel): + """Cached signing key with its day-of-epoch stamp. + """ + model_config = ConfigDict(frozen=True) + key: bytes + days_since_epoch: int + + +class _SignParams(BaseModel): + """Resolved signing parameters. + """ + + access_key: str + secret_key: SecretStr + region_name: str + service_name: str + sign_algorithm: str + enable_cache_sign_key: bool + signing_time: datetime + + @computed_field + @property + def formatted_date(self) -> str: + return _format_date(self.signing_time) + + @computed_field + @property + def formatted_datetime(self) -> str: + return _format_datetime(self.signing_time) + + @property + def scope(self) -> str: + return ( + f"{self.formatted_date}/" + f"{self.region_name}/" + f"{self.service_name}/" + f"sdk_request" + ) + + @property + def days_since_epoch(self) -> int: + """Number of days since Unix epoch for the signing time.""" + ts = int(self.signing_time.timestamp()) + return ts // 86400 + + +class SignOptions(BaseModel): + """Options for signing a request. + + Args: + access_key: AK/SK access key. + secret_key: AK/SK secret key. + region_name: Target region (e.g. ``eu-de``). + service_name: Service identifier (e.g. ``dns``, ``cce``). + sign_algorithm: Signing algorithm. Defaults to + ``SDK-HMAC-SHA256``. Must be a value from + ``_SUPPORTED_ALGORITHMS``. + enable_cache_sign_key: Cache the derived signing key for + one day. Disabled by default (matches Go SDK default). + time_offset_seconds: Offset in seconds to adjust the + signing timestamp. Useful when the client clock is + out of sync with the server. + """ + model_config = ConfigDict(frozen=True) + + access_key: str + secret_key: SecretStr + region_name: str = "" + service_name: str = "" + sign_algorithm: str = SIGN_ALGORITHM_HMAC_SHA256 + enable_cache_sign_key: bool = False + time_offset_seconds: int = 0 + + +_cache = _SignKeyCache() + + +# === 2. PUBLIC API === + + +def sign_request( + request: httpx.Request, + opts: SignOptions, + *, + timestamp: datetime | None = None, +) -> None: + """Sign a httpx request in place with AK/SK credentials. + + Adds ``X-Sdk-Date``, ``Host``, and ``Authorization`` headers. + + Args: + request: The httpx request to sign (modified in place). + opts: Signing credentials and scope. + timestamp: Override the signing time (for testing). + Defaults to ``datetime.now(UTC)``. + """ + params = _build_sign_params(opts, timestamp) + host = request.url.host or "" + if request.url.port and request.url.port not in (80, 443): + host = f"{host}:{request.url.port}" + request.headers["host"] = host + request.headers["x-sdk-date"] = params.formatted_datetime + + _sign_with_params(request, params) + + +def re_sign_request( + request: httpx.Request, + opts: SignOptions, + *, + timestamp: datetime | None = None, +) -> None: + """Re-sign a request for redirection. + + Corresponds to Go SDK's ``ReSign``. + Overwrites ``X-Sdk-Date`` and removes the old ``Authorization`` + header before re-signing. + + Args: + request: The httpx request to re-sign (modified in place). + opts: Signing credentials and scope. + timestamp: Override the signing time (for testing). + """ + params = _build_sign_params(opts, timestamp) + + # Overwrite date, remove stale auth (matches Go's setRequiredHeaders) + request.headers["x-sdk-date"] = params.formatted_datetime + request.headers.pop("authorization", None) + + _sign_with_params(request, params) + + +# === 3. CORE SIGNING FLOW === + + +def _build_sign_params( + opts: SignOptions, + timestamp: datetime | None, +) -> _SignParams: + """Build resolved signing parameters from options. + + Strips whitespace from keys (matches Go SDK behavior) + and applies time offset. + """ + algorithm = opts.sign_algorithm or SIGN_ALGORITHM_HMAC_SHA256 + if algorithm not in _SUPPORTED_ALGORITHMS: + raise ValueError( + f"Unsupported signing algorithm '{algorithm}', " + f"supported: {sorted(_SUPPORTED_ALGORITHMS)}" + ) + + base_time = timestamp if timestamp is not None else datetime.now(UTC) + signing_time = base_time - timedelta(seconds=opts.time_offset_seconds) + clean_secret = opts.secret_key.get_secret_value().strip() + return _SignParams( + access_key=opts.access_key.strip(), + secret_key=SecretStr(clean_secret), + region_name=opts.region_name, + service_name=opts.service_name, + sign_algorithm=algorithm, + enable_cache_sign_key=opts.enable_cache_sign_key, + signing_time=signing_time, + ) + +def _sign_with_params( + request: httpx.Request, + params: _SignParams, +) -> None: + """Core signing logic shared by ``sign_request`` and ``re_sign_request``.""" + content_sha256 = request.headers.get( + _CONTENT_SHA256_HEADER, + _hash_sha256(_read_body(request)), + ) + canonical = _canonical_request(request, content_sha256) + logger.debug("Canonical Request:\n%s", canonical) + string_to_sign = "\n".join([ + params.sign_algorithm, + params.formatted_datetime, + params.scope, + _hash_sha256(canonical.encode()), + ]) + + signing_key = _derive_signing_key(params) + + signature = _compute_signature( + string_to_sign, signing_key, params.sign_algorithm, + ).hex() + + signed_headers = _signed_headers_string(request) + credential = f"{params.access_key}/{params.scope}" + request.headers["authorization"] = ( + f"{params.sign_algorithm} " + f"Credential={credential}, " + f"SignedHeaders={signed_headers}, " + f"Signature={signature}" + ) + +def _derive_signing_key(params: _SignParams) -> bytes: + """Derive the signing key, optionally using cache. + + Corresponds to Go SDK's ``deriveSigningKey``. + When caching is enabled, the key is cached per + (secret, region, service) and valid for one day. + """ + secret = params.secret_key.get_secret_value() + if not params.enable_cache_sign_key: + return _build_sign_key(params) + + h_secret = _hash_sha256(secret.encode()) + cache_key = f"{h_secret}-{params.region_name}-{params.service_name}" + + cached = _cache.get(cache_key) + if cached is not None and cached.days_since_epoch == params.days_since_epoch: + return cached.key + + sign_key = _build_sign_key(params) + _cache.put(cache_key, _SignKeyCacheEntry( + key=sign_key, + days_since_epoch=params.days_since_epoch, + )) + return sign_key + +def _build_sign_key(params: _SignParams) -> bytes: + """Build signing key from secret + scope components. + + Corresponds to Go SDK's ``buildSignKey``:: + + kDate = HMAC("SDK" + secret, date) + kRegion = HMAC(kDate, region) + kService = HMAC(kRegion, service) + kSigning = HMAC(kService, "sdk_request") + """ + algorithm = params.sign_algorithm + k_secret = f"SDK{params.secret_key.get_secret_value()}".encode() + k_date = _compute_signature(params.formatted_date, k_secret, algorithm) + k_region = _compute_signature(params.region_name, k_date, algorithm) + k_service = _compute_signature(params.service_name, k_region, algorithm) + return _compute_signature("sdk_request", k_service, algorithm) + +# === 4. CANONICALIZATION === + +def _canonical_path(request: httpx.Request) -> str: + """Build the canonical URI path. + Uses the decoded path and re-encodes it to avoid double encoding. + """ + path = request.url.path + if not path.startswith("/"): + path = "/" + path + path = _url_encode(path, is_path=True) + return path or "/" + +def _canonical_query(request: httpx.Request) -> str: + """Build the canonical query string. + Parameters are sorted by encoded key (case-insensitive). + Duplicate keys are preserved. + """ + if _use_payload_for_query(request): + return "" + + pairs = request.url.params.multi_items() + if not pairs: + return "" + + encoded = [(_url_encode(k), _url_encode(v)) for k, v in pairs] + encoded.sort(key=lambda p: (p[0].lower(), p[1])) + + return "&".join(f"{k}={v}" for k, v in encoded) + +def _get_signable_headers(request: httpx.Request) -> dict[str, str]: + result = {} + for key, value in request.headers.items(): + k = key.lower() + if k in _SIGNED_HEADERS_WHITELIST or k.startswith("x-sdk-"): + result[k] = value + return result + +def _canonical_headers(request: httpx.Request) -> str: + """Build canonical header string. + Headers are lowercased, sorted, and whitespace-collapsed. + """ + headers = _get_signable_headers(request) + + lines = [] + for key in sorted(headers.keys()): + name = _SPACE_RE.sub(" ", key.strip()) + value = _SPACE_RE.sub(" ", headers[key].strip()) + lines.append(f"{name}:{value}\n") + return "".join(lines) + +def _signed_headers_string(request: httpx.Request) -> str: + """Build the semicolon-separated signed headers list. + """ + headers = _get_signable_headers(request) + return ";".join(sorted(headers.keys())) + +def _canonical_request(request: httpx.Request, content_sha256: str) -> str: + """Assemble the full canonical request string. + + Format:: + + METHOD + CanonicalURI + CanonicalQueryString + CanonicalHeaders + SignedHeaders + ContentHash + """ + return "\n".join([ + request.method, + _canonical_path(request), + _canonical_query(request), + _canonical_headers(request), + _signed_headers_string(request), + content_sha256, + ]) + +# === 5. UTILS === + +def _hash_sha256(data: bytes) -> str: + """Hex-encoded SHA-256 hash.""" + return hashlib.sha256(data).hexdigest() + +def _hmac_sha256(data: str, key: bytes) -> bytes: + """HMAC-SHA256 of string data with byte key.""" + return hmac.new(key, data.encode(), hashlib.sha256).digest() + +def _compute_signature(data: str, key: bytes, algorithm: str) -> bytes: + """Compute signature with the specified algorithm. + + Corresponds to Go SDK's ``computeSignature``. + + Raises: + ValueError: If the algorithm is not supported. + """ + if algorithm == SIGN_ALGORITHM_HMAC_SHA256: + return _hmac_sha256(data, key) + raise ValueError( + f"Unsupported algorithm '{algorithm}', " + f"supported: {sorted(_SUPPORTED_ALGORITHMS)}" + ) + +def _format_datetime(dt: datetime) -> str: + """Format timestamp as ``20060102T150405Z``.""" + return dt.astimezone(UTC).strftime("%Y%m%dT%H%M%SZ") + +def _format_date(dt: datetime) -> str: + """Format date as ``20060102``.""" + return dt.astimezone(UTC).strftime("%Y%m%d") + +def _read_body(request: httpx.Request) -> bytes: + """Read the request body as bytes. + + For POST with no body, uses the query string as content + (matches Go SDK's ``calculateContentHash``). + """ + if _use_payload_for_query(request): + return str(request.url.params).encode() + + try: + return request.content or b"" + except httpx.RequestNotRead as e: + raise RuntimeError( + "Streaming bodies are not supported for AK/SK signing. " + "The request content must be fully loaded in memory." + ) from e + +def _use_payload_for_query(request: httpx.Request) -> bool: + """Check if query string should be used as payload. + """ + if request.method.upper() != "POST": + return False + body = request.content + return body is None or body == b"" + +def _url_encode(value: str, *, is_path: bool = False) -> str: + """URL-encode a value, preserving ``/`` in paths. + """ + safe = "/-_.~" if is_path else "-_.~" + return quote(value, safe=safe) diff --git a/src/sdk/core/waiter.py b/src/sdk/core/waiter.py new file mode 100644 index 0000000..ba7c9ff --- /dev/null +++ b/src/sdk/core/waiter.py @@ -0,0 +1,66 @@ +"""Waiters for eventual consistency and long-running operations.""" + +from __future__ import annotations + +import logging +import time +from collections.abc import Callable +from typing import TypeVar, Any + +from sdk.core.exceptions import HttpError + +logger = logging.getLogger(__name__) + +T = TypeVar("T") + + +def wait_for( + func: Callable[[], T], + condition: Callable[[T], bool], + timeout: int = 60, + interval: float = 2.0, + label: str = "resource", +) -> T: + """Generic waiter that polls 'func' until 'condition' is True.""" + start_time = time.monotonic() + + while True: + try: + result = func() + if condition(result): + return result + except Exception as exc: + logger.debug("Waiter [%s] caught temporary error: %s", + label, exc) + + if time.monotonic() - start_time > timeout: + raise TimeoutError( + f"Timed out waiting for {label} after {timeout}s" + ) + + time.sleep(interval) + + +def wait_for_delete( + get_func: Callable[[], Any], + timeout: int = 60, + interval: float = 2.0, + label: str = "resource", +) -> None: + """Specialized waiter that polls until the resource returns 404.""" + start_time = time.monotonic() + + while True: + try: + get_func() + except HttpError as exc: + if exc.status_code == 404: + return + raise + + if time.monotonic() - start_time > timeout: + raise TimeoutError( + f"Timed out waiting for {label} deletion after {timeout}s" + ) + + time.sleep(interval) diff --git a/src/sdk/services/__init__.py b/src/sdk/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/services/vpc/__init__.py b/src/sdk/services/vpc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/services/vpc/v1/__init__.py b/src/sdk/services/vpc/v1/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/sdk/services/vpc/v1/models.py b/src/sdk/services/vpc/v1/models.py new file mode 100644 index 0000000..296a5e5 --- /dev/null +++ b/src/sdk/services/vpc/v1/models.py @@ -0,0 +1,155 @@ +"""VPC v1 data models. + +Pydantic models for VPC API v1 request options and response objects. +Based on OTC VPC API Reference: +https://docs.otc.t-systems.com/virtual-private-cloud/api-ref/vpc_apis_v1_v2/virtual_private_cloud/ + +All response models use ``model_validate(resp.json()["vpc"])`` pattern, +replacing the Go SDK's ``Result.Extract()`` approach. +""" + +from __future__ import annotations + +from pydantic import BaseModel, Field + + +class Route(BaseModel): + """VPC route entry. + + Attributes: + destination: Destination CIDR block (IPv4 or IPv6). + nexthop: Next hop IP address within the VPC subnet. + """ + + destination: str = "" + nexthop: str = "" + + +class Vpc(BaseModel): + """VPC resource returned by the API. + + Attributes: + id: VPC UUID. + name: VPC name (max 64 chars). + description: Supplementary info (max 255 chars). + cidr: IP range for subnets in CIDR format. + status: ``OK`` or ``CREATING``. + enterprise_project_id: Enterprise project UUID or ``"0"``. + routes: List of route entries. + enable_shared_snat: Whether shared SNAT is enabled. + tenant_id: Project ID (same as project_id). + created_at: UTC creation time ``yyyy-MM-ddTHH:mm:ss``. + updated_at: UTC update time ``yyyy-MM-ddTHH:mm:ss``. + """ + + id: str + name: str = "" + description: str = "" + cidr: str = "" + status: str = "" + enterprise_project_id: str = Field(default="0", alias="enterprise_project_id") + routes: list[Route] = Field(default_factory=list) + enable_shared_snat: bool = False + tenant_id: str = "" + created_at: str = "" + updated_at: str = "" + + model_config = {"populate_by_name": True} + + +class CreateVpcOpts(BaseModel): + """Options for creating a VPC. + + All fields are optional per the API spec. + + Attributes: + name: VPC name (max 64 chars). + description: Supplementary info (max 255 chars). + cidr: Available IP address range for subnets. + enterprise_project_id: Enterprise project UUID or ``"0"``. + """ + + name: str = "" + description: str = "" + cidr: str = "" + enterprise_project_id: str = "" + + def to_request_body(self) -> dict: + """Build the ``{"vpc": {...}}`` request body. + + Only includes non-empty fields, matching OTC API behavior + where absent fields keep their defaults. + """ + body: dict = {} + if self.name: + body["name"] = self.name + if self.description: + body["description"] = self.description + if self.cidr: + body["cidr"] = self.cidr + if self.enterprise_project_id: + body["enterprise_project_id"] = self.enterprise_project_id + return {"vpc": body} + + +class UpdateVpcOpts(BaseModel): + """Options for updating a VPC. + + All fields are optional. Only provided fields are sent. + + Attributes: + name: New VPC name. + description: New description. + cidr: New CIDR block (must contain all existing subnets). + routes: Replacement route list. + """ + + name: str = "" + description: str = "" + cidr: str = "" + routes: list[Route] | None = None + + def to_request_body(self) -> dict: + """Build the ``{"vpc": {...}}`` request body. + + Only includes non-empty/non-None fields. + """ + body: dict = {} + if self.name: + body["name"] = self.name + if self.description: + body["description"] = self.description + if self.cidr: + body["cidr"] = self.cidr + if self.routes is not None: + body["routes"] = [r.model_dump() for r in self.routes] + return {"vpc": body} + + +class ListVpcsOpts(BaseModel): + """Query parameters for listing VPCs. + + Attributes: + id: Filter by VPC ID. + limit: Page size (0 to 2^31-1, default 2000). + marker: Resource ID to start pagination from. + enterprise_project_id: Filter by enterprise project. + """ + + id: str = "" + limit: int = 0 + marker: str = "" + enterprise_project_id: str = "" + + def to_query_params(self) -> dict[str, str]: + """Build query parameter dict, omitting empty values.""" + params: dict[str, str] = {} + if self.id: + params["id"] = self.id + if self.limit: + params["limit"] = str(self.limit) + if self.marker: + params["marker"] = self.marker + if self.enterprise_project_id: + params["enterprise_project_id"] = self.enterprise_project_id + return params diff --git a/src/sdk/services/vpc/v1/requests.py b/src/sdk/services/vpc/v1/requests.py new file mode 100644 index 0000000..522b417 --- /dev/null +++ b/src/sdk/services/vpc/v1/requests.py @@ -0,0 +1,135 @@ +"""VPC v1 API operations. + +Free functions implementing CRUD for Virtual Private Clouds. +Each function takes a ``ServiceClient`` as first argument — +the functional style from our architecture (no class methods, +easy to mock and generate). + +Usage:: + + from sdk.services.vpc.v1 import requests as vpc + from sdk.services.vpc.v1.models import CreateVpcOpts + + # client is a ServiceClient for service_type="vpc" + new_vpc = vpc.create(client, CreateVpcOpts( + name="my-vpc", + cidr="192.168.0.0/16", + )) + print(new_vpc.id, new_vpc.status) + + all_vpcs = vpc.list(client) + for v in all_vpcs: + print(v.name) +""" + +from __future__ import annotations + +from collections.abc import Generator, Iterator + +from sdk.core.pagination import marker_paginate +from sdk.core.service_client import ServiceClient + +from .models import CreateVpcOpts, ListVpcsOpts, UpdateVpcOpts, Vpc +from .urls import base_url, resource_url + + +def create(client: ServiceClient, opts: CreateVpcOpts) -> Vpc: + """Create a VPC. + + ``POST /v1/{project_id}/vpcs`` + + Args: + client: VPC service client. + opts: Creation options. + + Returns: + Created VPC resource. + """ + url = base_url() + resp = client.post(url, json=opts.to_request_body()) + return Vpc.model_validate(resp.json()["vpc"]) + + +def get(client: ServiceClient, vpc_id: str) -> Vpc: + """Get VPC details. + + ``GET /v1/{project_id}/vpcs/{vpc_id}`` + + Args: + client: VPC service client. + vpc_id: VPC UUID. + + Returns: + VPC resource. + """ + url = resource_url(vpc_id) + resp = client.get(url) + return Vpc.model_validate(resp.json()["vpc"]) + + +def list( + client: ServiceClient, + opts: ListVpcsOpts | None = None, +) -> Iterator[Vpc]: + """List VPCs with auto-pagination. + + ``GET /v1/{project_id}/vpcs`` + + Uses marker-based pagination. Yields VPC objects one by one, + fetching next pages automatically. + + Args: + client: VPC service client. + opts: Optional query filters (limit, marker, etc.). + + Yields: + VPC resources. + """ + params = opts.to_query_params() if opts else None + limit = opts.limit if (opts and opts.limit is not None + and opts.limit > 0) else 0 + + return marker_paginate( + client=client, + path=base_url(), + items_key="vpcs", + model=Vpc, + marker_key="id", + limit=limit, + params=params, + ) + + +def update( + client: ServiceClient, + vpc_id: str, + opts: UpdateVpcOpts, +) -> Vpc: + """Update a VPC. + + ``PUT /v1/{project_id}/vpcs/{vpc_id}`` + + Args: + client: VPC service client. + vpc_id: VPC UUID. + opts: Fields to update. + + Returns: + Updated VPC resource. + """ + url = resource_url(vpc_id) + resp = client.put(url, json=opts.to_request_body()) + return Vpc.model_validate(resp.json()["vpc"]) + + +def delete(client: ServiceClient, vpc_id: str) -> None: + """Delete a VPC. + + ``DELETE /v1/{project_id}/vpcs/{vpc_id}`` + + Args: + client: VPC service client. + vpc_id: VPC UUID. + """ + url = resource_url(vpc_id) + client.delete(url) diff --git a/src/sdk/services/vpc/v1/urls.py b/src/sdk/services/vpc/v1/urls.py new file mode 100644 index 0000000..bc34647 --- /dev/null +++ b/src/sdk/services/vpc/v1/urls.py @@ -0,0 +1,38 @@ +"""VPC v1 URL construction helpers. + +All VPC v1 endpoints follow the pattern:: + + /v1/{project_id}/vpcs[/{vpc_id}] + +The ``project_id`` is obtained from the ``ServiceClient`` via +its provider. +""" + +from __future__ import annotations + +from sdk.core.service_client import ServiceClient + + +def base_url() -> str: + """Return the VPC collection URL: ``vpcs``. + + Args: + client: VPC service client. + + Returns: + Relative URL string. + """ + return f"vpcs" + + +def resource_url(vpc_id: str) -> str: + """Return a single VPC resource URL: ``vpcs/{vpc_id}``. + + Args: + client: VPC service client. + vpc_id: VPC UUID. + + Returns: + Relative URL string. + """ + return f"vpcs/{vpc_id}" diff --git a/tests/acceptance/services/vpc/v1/test_vpc_lifecycle.py b/tests/acceptance/services/vpc/v1/test_vpc_lifecycle.py new file mode 100644 index 0000000..908c4a3 --- /dev/null +++ b/tests/acceptance/services/vpc/v1/test_vpc_lifecycle.py @@ -0,0 +1,138 @@ +"""VPC v1 acceptance test — full lifecycle against real OTC. + +Runs: create → get → list → update → delete + +Requirements: + - A valid clouds.yaml file (in ./, ~/.config/openstack/, or /etc/openstack/) + - Network access to OTC API + +Usage:: + + # Default cloud name is 'otc', but you can override it: + export OS_CLOUD="my-dev-env" + + # Run the tests with output (-s): + uv run pytest tests/acceptance/services/vpc/ -v -s +""" + +from __future__ import annotations + +import os +import uuid +import logging +import pytest +from collections.abc import Iterator +from sdk.core.config import load_from_yaml +from sdk.core.exceptions import HttpError +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient +from sdk.core.waiter import wait_for, wait_for_delete +from sdk.services.vpc.v1 import requests as vpc +from sdk.services.vpc.v1.models import CreateVpcOpts, ListVpcsOpts, UpdateVpcOpts + + +logger = logging.getLogger(__name__) + + +@pytest.fixture(scope="module") +def vpc_client() -> Iterator[ServiceClient]: + cloud_name = os.environ.get("OS_CLOUD", "otc") + config = load_from_yaml(cloud_name) + + provider = ProviderClient(config) + provider.authenticate() + yield ServiceClient(provider, service_type="vpc") + provider.close() + + +def test_vpc_lifecycle(vpc_client: ServiceClient) -> None: + """Full CRUD lifecycle: create → get → list → update → delete. + + Creates a VPC with a unique name, verifies all operations + work against the real API, then cleans up. + """ + unique = uuid.uuid4().hex[:8] + vpc_name = f"sdk-test-{unique}" + vpc_id: str | None = None + + try: + # ── Create ────────────────────────────────────────── + created = vpc.create( + vpc_client, + CreateVpcOpts( + name=vpc_name, + cidr="192.168.0.0/16", + description="acceptance test vpc", + ), + ) + vpc_id = created.id + + assert created.id, "VPC must have an ID" + assert created.name == vpc_name + assert created.cidr == "192.168.0.0/16" + assert created.status in ("OK", "CREATING", "PENDING") + logger.info("Created VPC: %s (%s)", created.id, created.name) + + # ── Get ───────────────────────────────────────────── + fetched = wait_for( + func=lambda: vpc.get(vpc_client, vpc_id), + condition=lambda new_vpc: new_vpc.status == "OK", + label=f"VPC {vpc_id} to be OK" + ) + assert fetched.status == "OK" + assert fetched.id == vpc_id + assert fetched.name == vpc_name + assert fetched.cidr == "192.168.0.0/16" + logger.info("Get VPC: %s, status=%s", fetched.id, fetched.status) + + # ── List ──────────────────────────────────────────── + found = False + for v in vpc.list(vpc_client, ListVpcsOpts(limit=100)): + if v.id == vpc_id: + found = True + break + + assert found, f"Created VPC {vpc_id} not found in list" + logger.info("List VPCs: found %s in results", vpc_id) + + # ── Update ────────────────────────────────────────── + new_name = f"sdk-test-updated-{unique}" + updated = vpc.update( + vpc_client, + vpc_id, + UpdateVpcOpts( + name=new_name, + description="updated by acceptance test", + ), + ) + + assert updated.id == vpc_id + assert updated.name == new_name + assert updated.description == "updated by acceptance test" + logger.info("Updated VPC: name -> %s", updated.name) + + # ── Verify update via get ─────────────────────────── + refetched = vpc.get(vpc_client, vpc_id) + assert refetched.name == new_name + logger.info("Verified update via get") + + # ── Delete ────────────────────────────────────────── + vpc.delete(vpc_client, vpc_id) + wait_for_delete( + get_func=lambda: vpc.get(vpc_client, vpc_id), + label=f"VPC {vpc_id}" + ) + vpc_id = None + with pytest.raises(HttpError) as exc_info: + vpc.get(vpc_client, created.id) + + assert exc_info.value.status_code == 404 + logger.info("Confirmed VPC is gone (404)") + + finally: + if vpc_id is not None: + try: + vpc.delete(vpc_client, vpc_id) + logger.warning("Cleanup: deleted left-over VPC %s", vpc_id) + except Exception as exc: + logger.error("Cleanup failed: %s", exc) \ No newline at end of file diff --git a/tests/functional/__init__.py b/tests/functional/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/functional/conftest.py b/tests/functional/conftest.py new file mode 100644 index 0000000..ef4d433 --- /dev/null +++ b/tests/functional/conftest.py @@ -0,0 +1,45 @@ +from __future__ import annotations + +from collections.abc import Callable + +import httpx + +from sdk.core.auth import AuthConfig +from sdk.core.endpoint import CatalogEntry, build_endpoint_locator +from sdk.core.provider import ProviderClient + + +FAKE_CATALOG = [ + CatalogEntry( + type="vpc", + endpoints=[ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://vpc.eu-de.otc.t-systems.com/v1/test-project-id", + }, + ], + ), +] + + +def make_provider( + handler: Callable[[httpx.Request], httpx.Response], +) -> ProviderClient: + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK_TEST", + secret_key="SK_TEST", + region="eu-de", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + provider = ProviderClient(cfg, http_client=http_client) + + provider.project_id = "test-project-id" + provider.token_id = "test-token" + provider.endpoint_locator = build_endpoint_locator( + FAKE_CATALOG, "eu-de", + ) + + return provider \ No newline at end of file diff --git a/tests/functional/test_headers.py b/tests/functional/test_headers.py new file mode 100644 index 0000000..ce62e1d --- /dev/null +++ b/tests/functional/test_headers.py @@ -0,0 +1,143 @@ +"""Functional tests for HTTP headers merging. + +Verifies that the full pipeline (ProviderClient → ServiceClient → +HTTP request) correctly merges headers from all layers: +default, service-level, per-request, and auth. + +Uses httpx.MockTransport to capture actual outgoing requests. +""" + +from __future__ import annotations + +import httpx + +from sdk.core.service_client import ServiceClient + +from .conftest import make_provider + + +class TestDefaultHeaders: + """Default headers (Accept, User-Agent) are always present.""" + + def test_accept_header(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + client.get("test-path") + + assert "application/json" in captured[0].headers["accept"] + + def test_user_agent_header(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + client.get("test-path") + + assert "user-agent" in captured[0].headers + + def test_content_type_on_json_body(self): + """POST with json body sets Content-Type: application/json.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + client.post("vpcs", json={"vpc": {"name": "test"}}) + + assert "application/json" in captured[0].headers["content-type"] + + +class TestServiceHeaders: + """ServiceClient.extra_headers arrive in the final request.""" + + def test_extra_headers_sent(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + client.extra_headers["X-Language"] = "en-us" + client.extra_headers["X-Custom-Service"] = "my-value" + + client.get("test-path") + + assert captured[0].headers["x-language"] == "en-us" + assert captured[0].headers["x-custom-service"] == "my-value" + + +class TestPerRequestHeaders: + """Per-request headers override service-level headers.""" + + def test_override_service_header(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + client.extra_headers["X-Language"] = "en-us" + + client.get("test-path", headers={"X-Language": "de-de"}) + + assert captured[0].headers["x-language"] == "de-de" + + +class TestAuthHeaders: + """Auth headers are injected into the request.""" + + def test_aksk_authorization_present(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + client.get("test-path") + + assert "authorization" in captured[0].headers + + +class TestAllHeadersCombined: + """All header layers work together in a single request.""" + + def test_default_service_request_and_auth(self): + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + client.extra_headers["X-Custom"] = "service-level" + + client.get("test-path", headers={"X-Request-Id": "req-123"}) + + req = captured[0] + assert "application/json" in captured[0].headers["accept"] + assert "user-agent" in req.headers + assert req.headers["x-custom"] == "service-level" + assert req.headers["x-request-id"] == "req-123" + assert "authorization" in req.headers diff --git a/tests/functional/test_ok_codes.py b/tests/functional/test_ok_codes.py new file mode 100644 index 0000000..315accc --- /dev/null +++ b/tests/functional/test_ok_codes.py @@ -0,0 +1,136 @@ +"""Functional tests for ok_codes error handling. + +Verifies that the full pipeline (ProviderClient → ServiceClient → +HTTP request) correctly raises HttpError when the response status +code is not in the expected ok_codes list. + +Uses httpx.MockTransport to simulate server responses. +""" + +from __future__ import annotations + +import httpx +import pytest + +from sdk.core.exceptions import HttpError +from sdk.core.service_client import ServiceClient + +from .conftest import make_provider + + +class TestUnexpectedStatusRaises: + """Non-ok status codes must raise HttpError.""" + + def test_500_raises(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(500, json={"error": "internal"}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + with pytest.raises(HttpError) as exc_info: + client.get("test-path") + + assert exc_info.value.status_code == 500 + + def test_404_raises(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(404, json={"error": {"message": "not found"}}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + with pytest.raises(HttpError) as exc_info: + client.get("vpcs/nonexistent-id") + + assert exc_info.value.status_code == 404 + + def test_403_raises(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(403, json={"error": "forbidden"}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + with pytest.raises(HttpError) as exc_info: + client.get("test-path") + + assert exc_info.value.status_code == 403 + + +class TestCustomOkCodes: + """Custom ok_codes override default behavior.""" + + def test_custom_ok_codes_accepted(self): + """ok_codes=[204] allows 204 on GET (normally only 200).""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + resp = client.get("test-path", ok_codes=[204]) + assert resp.status_code == 204 + + def test_custom_ok_codes_rejects_unexpected(self): + """ok_codes=[200] rejects 201.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(201, json={}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + with pytest.raises(HttpError) as exc_info: + client.get("test-path", ok_codes=[200]) + + assert exc_info.value.status_code == 201 + + +class TestDefaultOkCodesPerMethod: + """Each HTTP method has its own default ok_codes.""" + + def test_post_accepts_201(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(201, json={"vpc": {}}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + resp = client.post("vpcs", json={"vpc": {}}) + assert resp.status_code == 201 + + def test_delete_accepts_204(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + resp = client.delete("vpcs/some-id") + assert resp.status_code == 204 + + def test_put_accepts_200(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"vpc": {}}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + resp = client.put("vpcs/some-id", json={"vpc": {}}) + assert resp.status_code == 200 + + +class TestErrorContainsDebugInfo: + """HttpError must contain enough info for debugging.""" + + def test_status_code_in_error(self): + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(409, json={"error": "conflict"}) + + provider = make_provider(handler) + client = ServiceClient(provider, "vpc") + + with pytest.raises(HttpError) as exc_info: + client.post("vpcs", json={}) + + assert exc_info.value.status_code == 409 diff --git a/tests/unit/core/test_auth.py b/tests/unit/core/test_auth.py new file mode 100644 index 0000000..10a8fe9 --- /dev/null +++ b/tests/unit/core/test_auth.py @@ -0,0 +1,126 @@ +"""Tests for sdk.core.auth.""" + +import pytest + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.exceptions import MissingCredentialsError + + +class TestAuthModeDetection: + """AuthConfig should auto-detect strategy from provided fields.""" + + def test_password_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="my_domain", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + def test_aksk_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK_TEST", + secret_key="SK_TEST", + project_id="project123", + ) + assert cfg.auth_mode == AuthMode.AKSK + + def test_token_mode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="gAAAA_test_token", + ) + assert cfg.auth_mode == AuthMode.TOKEN + + def test_aksk_takes_priority_over_password(self): + """If both AK/SK and password are provided, AK/SK wins.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + password="pass", + username="user", + domain_name="domain", + ) + assert cfg.auth_mode == AuthMode.AKSK + + +class TestAuthConfigValidation: + """AuthConfig should reject invalid credential combinations.""" + + def test_no_credentials_raises(self): + with pytest.raises(MissingCredentialsError): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + ) + + def test_password_without_username_raises(self): + with pytest.raises(MissingCredentialsError, match="username or user_id"): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + password="pass", + ) + + def test_username_without_domain_raises(self): + with pytest.raises(MissingCredentialsError, match="domain_id or domain_name"): + AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + ) + + def test_password_with_user_id_is_valid(self): + """user_id doesn't require domain.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + user_id="user123", + password="pass", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + def test_password_with_domain_id_is_valid(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_id="domain123", + ) + assert cfg.auth_mode == AuthMode.PASSWORD + + +class TestAuthConfigOptionalFields: + """Optional fields should be preserved.""" + + def test_agency_fields(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + agency_name="my_agency", + agency_domain_name="agency_domain", + delegated_project="delegated", + ) + assert cfg.agency_name == "my_agency" + assert cfg.agency_domain_name == "agency_domain" + assert cfg.delegated_project == "delegated" + + def test_temporary_aksk(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + access_key="AK", + secret_key="SK", + security_token="temp_token", + ) + assert cfg.security_token == "temp_token" + + def test_mfa_passcode(self): + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="domain", + passcode="123456", + ) + assert cfg.passcode == "123456" diff --git a/tests/unit/core/test_endpoint.py b/tests/unit/core/test_endpoint.py new file mode 100644 index 0000000..df2761e --- /dev/null +++ b/tests/unit/core/test_endpoint.py @@ -0,0 +1,287 @@ +"""Tests for ``sdk.core.endpoint``.""" + +from __future__ import annotations +from pydantic import ValidationError + +from typing import Any + +import pytest + +from sdk.core.endpoint import ( + Availability, + EndpointOpts, + build_endpoint_locator, + find_endpoint, + _normalize_url, + CatalogEntry +) +from sdk.core.exceptions import EndpointNotFoundError, ServiceNotFoundError + + +# ====================================================================== +# Test data +# ====================================================================== + + +def _sample_catalog() -> list[CatalogEntry]: + raw_catalog = [ + { + "type": "compute", + "name": "nova", + "endpoints": [ + {"interface": "public", "region_id": "eu-de", "url": "https://ecs.eu-de.otc.t-systems.com/v2.1"}, + {"interface": "internal", "region_id": "eu-de", + "url": "https://ecs-internal.eu-de.otc.t-systems.com/v2.1"}, + {"interface": "public", "region_id": "eu-nl", "url": "https://ecs.eu-nl.otc.t-systems.com/v2.1"} + ] + }, + { + "type": "dns", + "name": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + { + "type": "identity", + "name": "keystone", + "endpoints": [ + {"interface": "public", "region_id": "eu-de", "url": "https://iam.eu-de.otc.t-systems.com/v3"}, + {"interface": "admin", "region_id": "*", "url": "https://iam-admin.otc.t-systems.com/v3"} + ] + }, + ] + return [CatalogEntry.model_validate(entry) for entry in raw_catalog] + + +# ====================================================================== +# _normalize_url +# ====================================================================== + + +class TestNormalizeUrl: + def test_adds_slash(self) -> None: + assert _normalize_url("https://example.com") == "https://example.com/" + + def test_keeps_slash(self) -> None: + assert _normalize_url("https://example.com/") == "https://example.com/" + + def test_with_path(self) -> None: + assert _normalize_url("https://example.com/v2.1") == "https://example.com/v2.1/" + + +# ====================================================================== +# EndpointOpts +# ====================================================================== + + +class TestEndpointOpts: + def test_defaults(self) -> None: + opts = EndpointOpts(service_type="compute") + assert opts.service_type == "compute" + assert opts.name == "" + assert opts.region == "" + assert opts.availability == Availability.PUBLIC + + def test_frozen(self) -> None: + opts = EndpointOpts(service_type="compute") + with pytest.raises(ValidationError): + opts.region = "eu-de" # type: ignore[misc] + + def test_all_fields(self) -> None: + opts = EndpointOpts( + service_type="compute", + name="nova", + region="eu-de", + availability=Availability.INTERNAL, + ) + assert opts.name == "nova" + assert opts.region == "eu-de" + assert opts.availability == Availability.INTERNAL + + +# ====================================================================== +# find_endpoint +# ====================================================================== + + +class TestFindEndpoint: + """Test direct catalog search.""" + + def test_finds_public_compute_eu_de(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_public_compute_eu_nl(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_finds_internal_endpoint(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", + region="eu-de", + availability=Availability.INTERNAL, + ) + url = find_endpoint(catalog, opts) + assert url == "https://ecs-internal.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_dns(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="dns", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_wildcard_region_fallback(self) -> None: + """Wildcard ``*`` region used when no exact match.""" + catalog = _sample_catalog() + opts = EndpointOpts(service_type="identity", region="eu-de") + url = find_endpoint(catalog, opts) + assert url == "https://iam.eu-de.otc.t-systems.com/v3/" + + def test_wildcard_admin(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="identity", + availability=Availability.ADMIN, + ) + url = find_endpoint(catalog, opts) + assert url == "https://iam-admin.otc.t-systems.com/v3/" + + def test_no_region_matches_first(self) -> None: + """Empty region returns the first matching endpoint.""" + catalog = _sample_catalog() + opts = EndpointOpts(service_type="compute") + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_name_filter(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", name="nova", region="eu-de", + ) + url = find_endpoint(catalog, opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_name_filter_no_match(self) -> None: + """Wrong name on correct type → ServiceNotFoundError.""" + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="compute", name="wrong_name", region="eu-de", + ) + with pytest.raises(ServiceNotFoundError): + find_endpoint(catalog, opts) + + def test_service_not_found(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="nonexistent") + with pytest.raises(ServiceNotFoundError): + find_endpoint(catalog, opts) + + def test_endpoint_not_found_wrong_region(self) -> None: + catalog = _sample_catalog() + opts = EndpointOpts(service_type="dns", region="us-west-1") + with pytest.raises(EndpointNotFoundError): + find_endpoint(catalog, opts) + + def test_endpoint_not_found_wrong_availability(self) -> None: + """DNS has only public, asking for admin → EndpointNotFoundError.""" + catalog = _sample_catalog() + opts = EndpointOpts( + service_type="dns", + region="eu-de", + availability=Availability.ADMIN, + ) + with pytest.raises(EndpointNotFoundError): + find_endpoint(catalog, opts) + + def test_empty_catalog(self) -> None: + opts = EndpointOpts(service_type="compute") + with pytest.raises(ServiceNotFoundError): + find_endpoint([], opts) + + def test_uses_region_field_fallback(self) -> None: + """Some catalogs use ``region`` instead of ``region_id``.""" + catalog = [ + { + "type": "object-store", + "endpoints": [ + { + "interface": "public", + "region": "eu-de", + "url": "https://obs.eu-de.example.com", + }, + ], + }, + ] + cat = [CatalogEntry.model_validate(catalog) for catalog in catalog] + opts = EndpointOpts(service_type="object-store", region="eu-de") + url = find_endpoint(cat, opts) + assert url == "https://obs.eu-de.example.com/" + + +# ====================================================================== +# build_endpoint_locator +# ====================================================================== + + +class TestBuildEndpointLocator: + """Test the locator closure factory.""" + + def test_locator_uses_default_region(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="compute") + url = locator(opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_locator_region_in_opts_overrides_default(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = locator(opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_locator_propagates_service_not_found(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="nope") + with pytest.raises(ServiceNotFoundError): + locator(opts) + + def test_locator_propagates_endpoint_not_found(self) -> None: + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, default_region="eu-de") + + opts = EndpointOpts(service_type="dns", region="us-east-1") + with pytest.raises(EndpointNotFoundError): + locator(opts) + + def test_locator_no_default_region(self) -> None: + """Without default region, opts.region is used as-is.""" + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog) + + opts = EndpointOpts(service_type="compute", region="eu-nl") + url = locator(opts) + assert url == "https://ecs.eu-nl.otc.t-systems.com/v2.1/" + + def test_locator_no_region_at_all_returns_first(self) -> None: + """No default, no opts.region → returns first public endpoint.""" + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog) + + opts = EndpointOpts(service_type="compute") + url = locator(opts) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" diff --git a/tests/unit/core/test_exceptions.py b/tests/unit/core/test_exceptions.py new file mode 100644 index 0000000..d0b603d --- /dev/null +++ b/tests/unit/core/test_exceptions.py @@ -0,0 +1,143 @@ +"""Tests for sdk.core.exceptions.""" + +import pytest + +from sdk.core.exceptions import ( + UnauthorizedError, + BadRequestError, + HttpError, + NotFoundError, + SDKError, + SDKTimeoutError, + ServiceUnavailableError, + TooManyRequestsError, + raise_for_status, +) + + +class TestHttpErrorHierarchy: + def test_all_http_errors_inherit_from_sdk_error(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert isinstance(err, SDKError) + assert isinstance(err, HttpError) + + def test_status_code_on_class(self): + assert BadRequestError.status_code == 400 + assert UnauthorizedError.status_code == 401 + assert NotFoundError.status_code == 404 + + def test_default_format(self): + err = BadRequestError(method="POST", url="/v1/zones", body="invalid json") + assert "Bad request" in str(err) + assert "POST" in str(err) + assert "/v1/zones" in str(err) + assert "invalid json" in str(err) + + def test_custom_format_not_found(self): + err = NotFoundError(method="GET", url="/v1/zones/123", body="zone not found") + assert "Resource not found" in str(err) + assert "zone not found" in str(err) + + def test_custom_format_too_many_requests_without_header(self): + err = TooManyRequestsError(method="GET", url="/test", body="") + assert "too many requests" in str(err).lower() + assert "wait up to one minute" in str(err).lower() + + def test_custom_format_too_many_requests_with_retry_after(self): + err = TooManyRequestsError( + method="GET", url="/test", body="", + headers={"Retry-After": "30"}, + ) + assert "30" in str(err) + assert "Retry after" in str(err) + + def test_custom_format_service_unavailable(self): + err = ServiceUnavailableError(method="GET", url="/test", body="") + assert "try again later" in str(err).lower() + + +class TestHttpErrorHeaders: + def test_headers_stored(self): + err = BadRequestError( + method="POST", url="/test", body="bad", + headers={"X-Request-Id": "abc123"}, + ) + assert err.headers["X-Request-Id"] == "abc123" + + def test_headers_default_to_empty_dict(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert err.headers == {} + + def test_request_id_extracted(self): + err = BadRequestError( + method="POST", url="/test", body="bad", + headers={"x-request-id": "req-abc123"}, + ) + assert err.request_id == "req-abc123" + + def test_request_id_empty_when_missing(self): + err = BadRequestError(method="POST", url="/test", body="bad") + assert err.request_id == "" + + +class TestHttpErrorExpectedCodes: + def test_expected_codes_stored(self): + err = HttpError( + method="GET", url="/test", body="error", + expected=[200, 201], status_code=409, + ) + assert err.expected == [200, 201] + assert "[200, 201]" in str(err) + + def test_expected_codes_default_to_empty(self): + err = HttpError( + method="GET", url="/test", body="error", + status_code=500, + ) + assert err.expected == [] + + +class TestRaiseForStatus: + def test_raises_known_status(self): + with pytest.raises(NotFoundError) as exc_info: + raise_for_status(404, method="GET", url="/test", body="gone") + assert exc_info.value.status_code == 404 + + def test_raises_generic_for_unknown_status(self): + with pytest.raises(HttpError) as exc_info: + raise_for_status(418, method="GET", url="/teapot", body="short and stout") + assert exc_info.value.status_code == 418 + + def test_authentication_error_message(self): + with pytest.raises(UnauthorizedError) as exc_info: + raise_for_status(401, method="POST", url="/v1/auth/tokens", body="invalid token") + assert "Authentication failed" in str(exc_info.value) + + def test_passes_headers_through(self): + with pytest.raises(TooManyRequestsError) as exc_info: + raise_for_status( + 429, method="GET", url="/test", body="", + headers={"Retry-After": "60"}, + ) + assert exc_info.value.headers["Retry-After"] == "60" + + def test_passes_expected_codes_through(self): + with pytest.raises(NotFoundError) as exc_info: + raise_for_status( + 404, method="GET", url="/test", body="gone", + expected=[200], + ) + assert exc_info.value.expected == [200] + + +class TestSDKTimeoutError: + def test_not_builtin_timeout(self): + """SDKTimeoutError should not be confused with builtin TimeoutError.""" + err = SDKTimeoutError("operation timed out") + assert isinstance(err, SDKError) + assert not isinstance(err, builtins_timeout_error()) + + +def builtins_timeout_error(): + """Return the builtin TimeoutError for isinstance check.""" + return TimeoutError diff --git a/tests/unit/core/test_pagination.py b/tests/unit/core/test_pagination.py new file mode 100644 index 0000000..b9839f1 --- /dev/null +++ b/tests/unit/core/test_pagination.py @@ -0,0 +1,452 @@ +"""Tests for ``sdk.core.pagination``.""" + +from __future__ import annotations + +import json +from typing import Any + +import httpx +import pytest + +from sdk.core.auth import AuthConfig +from sdk.core.pagination import ( + _build_url, + _extract_link, + linked_paginate, + marker_paginate, + offset_paginate, + single_page, +) +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _make_service_client(handler: Any) -> ServiceClient: + """Build a ServiceClient with mocked transport.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="dom", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + provider = ProviderClient(cfg, http_client=http_client) + provider.token_id = "test-token" + return ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + + +# ====================================================================== +# _build_url +# ====================================================================== + + +class TestBuildUrl: + def test_no_params(self) -> None: + assert _build_url("servers", None) == "servers" + + def test_adds_params(self) -> None: + url = _build_url("servers", {"limit": "10"}) + assert "limit=10" in url + assert url.startswith("servers?") + + def test_merges_existing_params(self) -> None: + url = _build_url("servers?status=ACTIVE", {"limit": "5"}) + assert "status=ACTIVE" in url + assert "limit=5" in url + + def test_overrides_existing(self) -> None: + url = _build_url("servers?limit=20", {"limit": "5"}) + assert "limit=5" in url + assert "limit=20" not in url + + +# ====================================================================== +# _extract_link +# ====================================================================== + + +class TestExtractLink: + def test_simple_path(self) -> None: + data = {"links": {"next": "https://example.com/page2"}} + assert _extract_link(data, ["links", "next"]) == "https://example.com/page2" + + def test_missing_key(self) -> None: + data = {"links": {"prev": "..."}} + assert _extract_link(data, ["links", "next"]) == "" + + def test_null_value(self) -> None: + data = {"links": {"next": None}} + assert _extract_link(data, ["links", "next"]) == "" + + def test_deep_path(self) -> None: + data = {"a": {"b": {"c": "url"}}} + assert _extract_link(data, ["a", "b", "c"]) == "url" + + def test_not_a_dict(self) -> None: + data = {"links": "not-a-dict"} + assert _extract_link(data, ["links", "next"]) == "" + + def test_empty_path(self) -> None: + data = {"links": {"next": "url"}} + assert _extract_link(data, []) == "" + + +# ====================================================================== +# marker_paginate +# ====================================================================== + + +class TestMarkerPaginate: + def test_single_page(self) -> None: + """Single page with fewer items than limit.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "servers": [ + {"id": "s1", "name": "a"}, + {"id": "s2", "name": "b"}, + ], + }) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "servers", items_key="servers", limit=10)) + + assert len(items) == 2 + assert items[0]["id"] == "s1" + + def test_multi_page(self) -> None: + """Two pages, second page is shorter → stops.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}, {"id": "2"}], + }) + else: + return httpx.Response(200, json={ + "items": [{"id": "3"}], + }) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items", limit=2)) + + assert len(items) == 3 + assert call_count == 2 + assert items[-1]["id"] == "3" + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items")) + + assert items == [] + + def test_marker_param_passed(self) -> None: + """Second request should include marker from last item.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"id": "abc"}, {"id": "def"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate(sc, "items", items_key="items", limit=2)) + + assert len(captured) == 2 + assert "marker=def" in str(captured[1].url) + + def test_custom_marker_key(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "marker=" not in url: + return httpx.Response(200, json={ + "items": [{"uid": "x1"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate( + sc, "items", items_key="items", marker_key="uid", limit=1, + )) + + assert "marker=x1" in str(captured[1].url) + + def test_no_limit_stops_on_empty(self) -> None: + """Without limit, pagination stops when page is empty.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(200, json={ + "items": [{"id": "a"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(marker_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + assert call_count == 2 + + def test_extra_params_forwarded(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(marker_paginate( + sc, "items", items_key="items", + params={"status": "ACTIVE"}, + )) + + assert "status=ACTIVE" in str(captured[0].url) + + +# ====================================================================== +# offset_paginate +# ====================================================================== + + +class TestOffsetPaginate: + def test_single_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "topics": [{"id": "t1"}, {"id": "t2"}], + }) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "topics", items_key="topics", limit=10, + )) + + assert len(items) == 2 + + def test_multi_page(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "offset=0" in url or "offset" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}, {"id": "2"}], + }) + elif "offset=2" in url: + return httpx.Response(200, json={ + "items": [{"id": "3"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=2, + )) + + assert len(items) == 3 + assert call_count == 2 + + def test_offset_increments(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + url = str(req.url) + if "offset=0" in url: + return httpx.Response(200, json={ + "items": [{"id": "a"}, {"id": "b"}, {"id": "c"}], + }) + if "offset=3" in url: + return httpx.Response(200, json={ + "items": [{"id": "d"}], + }) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=3, + )) + + assert len(items) == 4 + assert "offset=0" in str(captured[0].url) + assert "offset=3" in str(captured[1].url) + + def test_start_offset(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + list(offset_paginate( + sc, "items", items_key="items", limit=5, start_offset=10, + )) + + assert "offset=10" in str(captured[0].url) + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + items = list(offset_paginate( + sc, "items", items_key="items", limit=10, + )) + + assert items == [] + + +# ====================================================================== +# linked_paginate +# ====================================================================== + + +class TestLinkedPaginate: + def test_single_page_no_next(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + "links": {"next": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + + def test_follows_next_link(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + url = str(req.url) + if "page2" not in url: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + "links": {"next": "items?page=page2"}, + }) + return httpx.Response(200, json={ + "items": [{"id": "2"}], + "links": {"next": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 2 + assert call_count == 2 + + def test_custom_link_path(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(200, json={ + "data": [{"id": "1"}], + "pagination": {"next_url": "data?cursor=abc"}, + }) + return httpx.Response(200, json={ + "data": [{"id": "2"}], + "pagination": {"next_url": None}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate( + sc, "data", items_key="data", + link_path=["pagination", "next_url"], + )) + + assert len(items) == 2 + + def test_empty_first_page(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [], + "links": {"next": "irrelevant"}, + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert items == [] + + def test_missing_links_key(self) -> None: + """No 'links' in response → stops after first page.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "items": [{"id": "1"}], + }) + + sc = _make_service_client(handler) + items = list(linked_paginate(sc, "items", items_key="items")) + + assert len(items) == 1 + + +# ====================================================================== +# single_page +# ====================================================================== + + +class TestSinglePage: + def test_returns_list(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={ + "servers": [{"id": "s1"}, {"id": "s2"}], + }) + + sc = _make_service_client(handler) + items = single_page(sc, "servers", items_key="servers") + + assert isinstance(items, list) + assert len(items) == 2 + + def test_empty_list(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"servers": []}) + + sc = _make_service_client(handler) + items = single_page(sc, "servers", items_key="servers") + + assert items == [] + + def test_with_params(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = _make_service_client(handler) + single_page(sc, "items", items_key="items", params={"foo": "bar"}) + + assert "foo=bar" in str(captured[0].url) diff --git a/tests/unit/core/test_provider.py b/tests/unit/core/test_provider.py new file mode 100644 index 0000000..378e3ae --- /dev/null +++ b/tests/unit/core/test_provider.py @@ -0,0 +1,933 @@ +"""Tests for ``sdk.core.provider``.""" + +from __future__ import annotations + +import json +from typing import Any +from unittest.mock import patch +from pydantic import BaseModel +import httpx +import pytest + +from sdk.core.auth import AuthConfig, AuthMode +from sdk.core.exceptions import ( + UnauthorizedError, + BadRequestError, + EndpointNotFoundError, + InternalServerError, + NotFoundError, + ReauthError, + ServiceNotFoundError, + TooManyRequestsError, +) +from sdk.core.provider import ( + ProviderClient, + _build_agency_auth_body, + _build_scope, + _build_v3_auth_body, +) +from sdk.core.endpoint import build_endpoint_locator, CatalogEntry + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _password_config(**overrides: Any) -> AuthConfig: + """Create a password AuthConfig with defaults.""" + defaults = { + "identity_endpoint": "https://iam.eu-de.otc.t-systems.com/v3", + "username": "testuser", + "password": "secret", + "domain_name": "testdomain", + "tenant_name": "eu-de", + "allow_reauth": True, + } + defaults.update(overrides) + return AuthConfig(**defaults) + + +def _aksk_config(**overrides: Any) -> AuthConfig: + """Create an AK/SK AuthConfig with defaults.""" + defaults = { + "identity_endpoint": "https://iam.eu-de.otc.t-systems.com/v3", + "access_key": "MYACCESSKEY", + "secret_key": "MYSECRETKEY", + "region": "eu-de", + "project_id": "project-123", + } + defaults.update(overrides) + return AuthConfig(**defaults) + + +def _token_response( + *, + token_id: str = "tok-abc-123", + project_id: str = "proj-123", + project_name: str = "eu-de", + domain_id: str = "dom-456", + user_id: str = "user-789", + catalog: list[dict[str, Any]] | None = None, +) -> httpx.Response: + """Build a mock IAM token response.""" + body: dict[str, Any] = { + "token": { + "project": { + "id": project_id, + "name": project_name, + "domain": {"id": domain_id}, + }, + "user": { + "id": user_id, + "domain": {"id": domain_id}, + }, + }, + } + raw_catalog = catalog if catalog is not None else _sample_catalog() + serialized_catalog = [] + for entry in raw_catalog: + if isinstance(entry, BaseModel): + serialized_catalog.append(entry.model_dump(by_alias=True)) + else: + serialized_catalog.append(entry) + + body["token"]["catalog"] = serialized_catalog + + resp = httpx.Response( + 201, + json=body, + headers={"x-subject-token": token_id}, + ) + return resp + + +def _sample_catalog() -> list[CatalogEntry]: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.otc.t-systems.com/v2.1", + }, + { + "interface": "internal", + "region_id": "eu-de", + "url": "https://ecs-internal.eu-de.otc.t-systems.com/v2.1", + }, + ], + }, + { + "type": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + ] + return [CatalogEntry.model_validate(raw) for raw in raw] + + +def _catalog_response() -> httpx.Response: + serialized_catalog = [ + entry.model_dump(by_alias=True) + for entry in _sample_catalog() + ] + return httpx.Response(200, json={"catalog": serialized_catalog}) + + +# ====================================================================== +# ProviderClient: basic properties +# ====================================================================== + + +class TestProviderClientProperties: + """Test basic ProviderClient attributes and properties.""" + + def test_identity_base_strips_v3(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_base_strips_v3_slash(self) -> None: + cfg = _password_config( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3/" + ) + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_base_no_version(self) -> None: + cfg = _password_config( + identity_endpoint="https://iam.eu-de.otc.t-systems.com" + ) + client = ProviderClient(cfg) + assert client.identity_base == "https://iam.eu-de.otc.t-systems.com/" + + def test_identity_v3_endpoint(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.identity_v3_endpoint == "https://iam.eu-de.otc.t-systems.com/v3/" + + def test_initial_state_empty(self) -> None: + cfg = _password_config() + client = ProviderClient(cfg) + assert client.token_id == "" + assert client.project_id == "" + assert client.user_id == "" + assert client.domain_id == "" + # region_id is "" because _password_config has no region field + assert client.region_id == "" + + def test_region_from_config(self) -> None: + cfg = _password_config(region="eu-nl") + client = ProviderClient(cfg) + assert client.region_id == "eu-nl" + + def test_context_manager(self) -> None: + cfg = _password_config() + with ProviderClient(cfg) as client: + assert isinstance(client, ProviderClient) + + +# ====================================================================== +# Auth body builders +# ====================================================================== + + +class TestBuildV3AuthBody: + """Test ``_build_v3_auth_body`` for password and token modes.""" + + def test_password_with_username_domain_name(self) -> None: + cfg = _password_config() + body = _build_v3_auth_body(cfg) + + auth = body["auth"] + assert auth["identity"]["methods"] == ["password"] + user = auth["identity"]["password"]["user"] + assert user["name"] == "testuser" + assert user["password"] == "secret" + assert user["domain"] == {"name": "testdomain"} + + def test_password_with_user_id(self) -> None: + cfg = _password_config(username=None, user_id="uid-123") + body = _build_v3_auth_body(cfg) + + user = body["auth"]["identity"]["password"]["user"] + assert user["id"] == "uid-123" + assert "name" not in user + + def test_password_with_domain_id(self) -> None: + cfg = _password_config(domain_name=None, domain_id="did-456") + body = _build_v3_auth_body(cfg) + + user = body["auth"]["identity"]["password"]["user"] + assert user["domain"] == {"id": "did-456"} + + def test_password_with_totp(self) -> None: + cfg = _password_config(passcode="123456") + body = _build_v3_auth_body(cfg) + + identity = body["auth"]["identity"] + assert "totp" in identity["methods"] + assert identity["totp"]["user"]["passcode"] == "123456" + + def test_token_auth(self) -> None: + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="existing-token", + ) + body = _build_v3_auth_body(cfg) + + identity = body["auth"]["identity"] + assert identity["methods"] == ["token"] + assert identity["token"]["id"] == "existing-token" + + def test_scope_project_by_name(self) -> None: + cfg = _password_config(tenant_name="eu-de") + body = _build_v3_auth_body(cfg) + + scope = body["auth"]["scope"] + assert scope["project"]["name"] == "eu-de" + assert scope["project"]["domain"]["name"] == "testdomain" + + def test_scope_project_by_id(self) -> None: + cfg = _password_config(tenant_id="proj-xyz", tenant_name=None) + body = _build_v3_auth_body(cfg) + + scope = body["auth"]["scope"] + assert scope["project"]["id"] == "proj-xyz" + + def test_no_scope(self) -> None: + cfg = _password_config( + username=None, user_id="uid-1", + tenant_name=None, domain_name=None, domain_id=None, + ) + body = _build_v3_auth_body(cfg) + assert "scope" not in body["auth"] + + +class TestBuildAgencyAuthBody: + """Test ``_build_agency_auth_body``.""" + + def test_agency_body(self) -> None: + cfg = _password_config( + agency_name="my_agency", + agency_domain_name="agency_domain", + delegated_project="delegated_proj", + ) + body = _build_agency_auth_body(cfg) + + identity = body["auth"]["identity"] + assert identity["methods"] == ["assume_role"] + assert identity["assume_role"]["xrole_name"] == "my_agency" + assert identity["assume_role"]["domain_name"] == "agency_domain" + + scope = body["auth"]["scope"] + assert scope["project"]["name"] == "delegated_proj" + + def test_agency_no_delegated_project(self) -> None: + cfg = _password_config( + agency_name="my_agency", + agency_domain_name="agency_domain", + ) + body = _build_agency_auth_body(cfg) + assert "scope" not in body["auth"] + + +class TestBuildScope: + """Test ``_build_scope``.""" + + def test_project_id(self) -> None: + cfg = _password_config(tenant_id="proj-1", tenant_name=None) + scope = _build_scope(cfg) + assert scope == {"project": {"id": "proj-1"}} + + def test_project_name_with_domain(self) -> None: + cfg = _password_config(tenant_name="eu-de") + scope = _build_scope(cfg) + assert scope == { + "project": { + "name": "eu-de", + "domain": {"name": "testdomain"}, + }, + } + + def test_domain_only(self) -> None: + cfg = _password_config(tenant_name=None, domain_name=None, domain_id="did-1") + scope = _build_scope(cfg) + assert scope == {"domain": {"id": "did-1"}} + + def test_none_when_empty(self) -> None: + cfg = _password_config( + username=None, user_id="uid-1", + tenant_name=None, domain_name=None, domain_id=None, + ) + assert _build_scope(cfg) is None + + +# ====================================================================== +# Endpoint locator +# ====================================================================== + + +class TestEndpointLocator: + """Test ``build_endpoint_locator`` (acceptance with provider).""" + + def test_finds_public_endpoint(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="compute")) + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_finds_dns_endpoint(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="dns")) + assert url == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_region_override(self) -> None: + from sdk.core.endpoint import EndpointOpts + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-nl", + "url": "https://ecs.eu-nl.example.com/v2.1", + }, + ], + }, + ] + catalog = [CatalogEntry.model_validate(entry) for entry in raw] + locator = build_endpoint_locator(catalog, "eu-de") + + url = locator(EndpointOpts(service_type="compute", region="eu-nl")) + assert url == "https://ecs.eu-nl.example.com/v2.1/" + + def test_service_not_found(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + with pytest.raises(ServiceNotFoundError): + locator(EndpointOpts(service_type="nonexistent")) + + def test_endpoint_not_found_wrong_region(self) -> None: + from sdk.core.endpoint import EndpointOpts + catalog = _sample_catalog() + locator = build_endpoint_locator(catalog, "eu-de") + + with pytest.raises(EndpointNotFoundError): + locator(EndpointOpts(service_type="compute", region="us-west-1")) + + +# ====================================================================== +# ProviderClient: authenticate (v3_auth) +# ====================================================================== + + +class TestV3Auth: + """Test password/token auth flows using mocked HTTP.""" + + def test_password_auth_sets_state(self) -> None: + """Password auth should set token, project, user, domain.""" + cfg = _password_config() + token_resp = _token_response() + + transport = httpx.MockTransport(lambda req: token_resp) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.token_id == "tok-abc-123" + assert client.project_id == "proj-123" + assert client.user_id == "user-789" + assert client.domain_id == "dom-456" + assert client.endpoint_locator is not None + + def test_password_auth_sends_correct_body(self) -> None: + """Verify the auth request body is correct.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return _token_response() + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert len(captured) == 1 + body = json.loads(captured[0].content) + assert body["auth"]["identity"]["methods"] == ["password"] + assert body["auth"]["identity"]["password"]["user"]["name"] == "testuser" + + def test_token_reuse_auth(self) -> None: + """Token auth should GET and set token from config.""" + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + token_id="existing-tok", + ) + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + serialized_catalog = [ + entry.model_dump(by_alias=True) + for entry in _sample_catalog() + ] + return httpx.Response( + 200, + json={ + "token": { + "user": {"id": "u-1", "domain": {"id": "d-1"}}, + "catalog": serialized_catalog, + }, + }, + headers={"x-subject-token": "existing-tok"}, + ) + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert len(captured) == 1 + assert captured[0].method == "GET" + assert client.token_id == "existing-tok" + + + def test_auth_failure_raises(self) -> None: + cfg = _password_config() + resp_401 = httpx.Response(401, text="Unauthorized") + transport = httpx.MockTransport(lambda req: resp_401) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + with pytest.raises(UnauthorizedError): + client.authenticate() + + +# ====================================================================== +# ProviderClient: authenticate (v3_auth_with_agency) +# ====================================================================== + + +class TestV3AuthWithAgency: + """Test password + agency auth flow.""" + + def test_agency_auth_two_requests(self) -> None: + """Agency auth should issue two requests: normal + assume_role.""" + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + return _token_response( + token_id=f"tok-{call_count}", + ) + + cfg = _password_config( + agency_name="ag1", + agency_domain_name="ag_domain", + delegated_project="proj_deleg", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert call_count == 2 # first: v3auth, second: assume_role + assert client.token_id == "tok-2" + + +# ====================================================================== +# ProviderClient: authenticate (aksk) +# ====================================================================== + + +class TestAKSKAuth: + """Test AK/SK auth flow.""" + + def test_aksk_auth_fetches_catalog(self) -> None: + """AK/SK auth should fetch catalog and set endpoint_locator.""" + cfg = _aksk_config() + + def handler(req: httpx.Request) -> httpx.Response: + # Should be a signed catalog request + assert "authorization" in req.headers + return _catalog_response() + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.token_id == "" # No token in AK/SK mode + assert client.project_id == "project-123" + assert client.endpoint_locator is not None + + def test_aksk_auth_resolves_project_name(self) -> None: + """When project_name given but no project_id, resolve via API.""" + cfg = _aksk_config(project_id=None, project_name="eu-de") + call_idx = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_idx + call_idx += 1 + url = str(req.url) + if "projects" in url: + return httpx.Response( + 200, + json={"projects": [{"id": "resolved-proj-id"}]}, + ) + if "catalog" in url: + return _catalog_response() + return httpx.Response(404, text="Not found") + + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + + client = ProviderClient(cfg, http_client=http_client) + client.authenticate() + + assert client.project_id == "resolved-proj-id" + + +# ====================================================================== +# ProviderClient: request() with retry logic +# ====================================================================== + + +class TestRequest: + """Test the ``request()`` method with mocked transport.""" + + def _authenticated_client( + self, handler: Any, + ) -> ProviderClient: + """Create an already-authenticated client with a mock transport.""" + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "test-token" + return client + + def test_successful_get(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(200, json={"result": "ok"}) + + client = self._authenticated_client(handler) + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert resp.json() == {"result": "ok"} + + def test_auth_header_set(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + client = self._authenticated_client(handler) + client.request("GET", "https://api.example.com/resource") + + assert captured[0].headers["x-auth-token"] == "test-token" + + def test_post_with_json(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(201, json={"id": "new-1"}) + + client = self._authenticated_client(handler) + resp = client.request( + "POST", + "https://api.example.com/resource", + json={"name": "test"}, + ) + + assert resp.status_code == 201 + body = json.loads(captured[0].content) + assert body["name"] == "test" + + def test_400_raises_bad_request(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(400, text="Bad request body") + + client = self._authenticated_client(handler) + with pytest.raises(BadRequestError) as exc_info: + client.request("GET", "https://api.example.com/resource") + assert "Bad request body" in str(exc_info.value) + + def test_404_raises_not_found(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(404, text="Not found") + + client = self._authenticated_client(handler) + with pytest.raises(NotFoundError): + client.request("GET", "https://api.example.com/missing") + + def test_500_raises_internal_server_error(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(500, text="Server error") + + client = self._authenticated_client(handler) + with pytest.raises(InternalServerError): + client.request("GET", "https://api.example.com/broken") + + +class TestRequestRetry: + """Test retry logic on 401, 429, 502, 504.""" + + def test_401_triggers_reauth_and_retry(self) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(401, text="Unauthorized") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "old-token" + + def fake_authenticate() -> None: + client.token_id = "new-token" + + client.authenticate = fake_authenticate + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + assert client.token_id == "new-token" + + def test_401_without_reauth_raises(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(401, text="Unauthorized") + + cfg = _password_config() + cfg = cfg.model_copy(update={"allow_reauth": False}) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + with pytest.raises(UnauthorizedError): + client.request("GET", "https://api.example.com/resource") + + def test_401_reauth_failure_raises_reauth_error(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(401, text="Unauthorized") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + def bad_reauth() -> None: + raise RuntimeError("reauth failed") + + client.authenticate = bad_reauth + + with pytest.raises(ReauthError): + client.request("GET", "https://api.example.com/resource") + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_429_backoff_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count <= 2: + return httpx.Response(429, text="Rate limited") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 3 + assert mock_sleep.call_count == 2 + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_429_exhausts_retries(self, mock_sleep: Any) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(429, text="Rate limited") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient( + cfg, http_client=http_client, max_backoff_retries=2, + ) + client.token_id = "tok" + + with pytest.raises(TooManyRequestsError): + client.request("GET", "https://api.example.com/resource") + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_502_gateway_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(502, text="Bad Gateway") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + + @patch("sdk.core.provider.time.sleep", return_value=None) + def test_504_gateway_retry(self, mock_sleep: Any) -> None: + call_count = 0 + + def handler(req: httpx.Request) -> httpx.Response: + nonlocal call_count + call_count += 1 + if call_count == 1: + return httpx.Response(504, text="Gateway Timeout") + return httpx.Response(200, json={"ok": True}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("GET", "https://api.example.com/resource") + + assert resp.status_code == 200 + assert call_count == 2 + + +class TestRequestAKSKSigning: + """Test that AK/SK requests are signed.""" + + def test_aksk_request_has_authorization(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.project_id = "proj-123" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert "authorization" in req.headers + assert "SDK-HMAC-SHA256" in req.headers["authorization"] + assert "x-project-id" in req.headers + + def test_aksk_request_with_domain_id(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.domain_id = "dom-id" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert req.headers.get("x-domain-id") == "dom-id" + assert "x-project-id" not in req.headers # domain_id present → no project_id + + def test_aksk_request_with_security_token(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + cfg = _aksk_config(security_token="temp-sec-tok") + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.project_id = "proj-123" + + client.request("GET", "https://ecs.eu-de.otc.t-systems.com/v2.1/servers") + + req = captured[0] + assert req.headers.get("x-security-token") == "temp-sec-tok" + + +# ====================================================================== +# ProviderClient: custom ok_codes +# ====================================================================== + + +class TestCustomOkCodes: + """Test custom ok_codes parameter.""" + + def test_custom_ok_codes_accepted(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204, text="") + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request( + "POST", "https://api.example.com/action", + ok_codes=[204], + ) + assert resp.status_code == 204 + + def test_default_post_ok_codes(self) -> None: + """POST default ok_codes include 201.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(201, json={"id": "1"}) + + cfg = _password_config() + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + client = ProviderClient(cfg, http_client=http_client) + client.token_id = "tok" + + resp = client.request("POST", "https://api.example.com/resource") + assert resp.status_code == 201 + + +# ====================================================================== +# SecretStr compatibility +# ====================================================================== + + +class TestSecretStrCompatibility: + """Verify provider works when AuthConfig uses SecretStr fields.""" + + def test_secret_value_with_str(self) -> None: + from sdk.core.provider import _secret_value + assert _secret_value("plain") == "plain" + + def test_secret_value_with_secret_str(self) -> None: + from pydantic import SecretStr + from sdk.core.provider import _secret_value + assert _secret_value(SecretStr("hidden")) == "hidden" + + def test_build_body_with_secret_password(self) -> None: + """If password is SecretStr, body should contain plain string.""" + from pydantic import SecretStr + + cfg = _password_config() + # Simulate SecretStr by monkey-patching + object.__setattr__(cfg, "password", SecretStr("secret")) + + body = _build_v3_auth_body(cfg) + user = body["auth"]["identity"]["password"]["user"] + assert user["password"] == "secret" + assert isinstance(user["password"], str) diff --git a/tests/unit/core/test_service_client.py b/tests/unit/core/test_service_client.py new file mode 100644 index 0000000..410bec0 --- /dev/null +++ b/tests/unit/core/test_service_client.py @@ -0,0 +1,429 @@ +"""Tests for ``sdk.core.service_client``.""" + +from __future__ import annotations + +import json +from typing import Any +import httpx +import pytest + +from sdk.core.auth import AuthConfig +from sdk.core.exceptions import ( + EndpointNotFoundError, + NotFoundError, + ServiceNotFoundError, +) +from sdk.core.endpoint import build_endpoint_locator, CatalogEntry +from sdk.core.provider import ProviderClient +from sdk.core.service_client import ServiceClient, _ensure_trailing_slash + + +# ====================================================================== +# Fixtures +# ====================================================================== + + +def _sample_catalog() -> list[CatalogEntry]: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.otc.t-systems.com/v2.1", + }, + ], + }, + { + "type": "dns", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://dns.eu-de.otc.t-systems.com/v2", + }, + ], + }, + { + "type": "network", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://vpc.eu-de.otc.t-systems.com", + }, + { + "interface": "public", + "region_id": "eu-nl", + "url": "https://vpc.eu-nl.otc.t-systems.com", + }, + ], + }, + ] + return [CatalogEntry.model_validate(entry) for entry in raw] + + +def _make_provider( + handler: Any = None, + *, + catalog: list[CatalogEntry] | None = None, +) -> ProviderClient: + """Create an authenticated ProviderClient with mock transport.""" + if handler is None: + handler = lambda req: httpx.Response(200, json={}) + + cfg = AuthConfig( + identity_endpoint="https://iam.eu-de.otc.t-systems.com/v3", + username="user", + password="pass", + domain_name="dom", + ) + transport = httpx.MockTransport(handler) + http_client = httpx.Client(transport=transport) + provider = ProviderClient(cfg, http_client=http_client) + provider.token_id = "test-token" + provider.region_id = "eu-de" + + cat = catalog if catalog is not None else _sample_catalog() + provider.endpoint_locator = build_endpoint_locator(cat, "eu-de") + + return provider + + +# ====================================================================== +# _ensure_trailing_slash +# ====================================================================== + + +class TestEnsureTrailingSlash: + def test_adds_slash(self) -> None: + assert _ensure_trailing_slash("https://example.com") == "https://example.com/" + + def test_keeps_existing_slash(self) -> None: + assert _ensure_trailing_slash("https://example.com/") == "https://example.com/" + + def test_with_path(self) -> None: + assert _ensure_trailing_slash("https://example.com/v2.1") == "https://example.com/v2.1/" + + +# ====================================================================== +# Construction & endpoint resolution +# ====================================================================== + + +class TestServiceClientConstruction: + """Test endpoint resolution and attribute setup.""" + + def test_resolves_compute_from_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + assert sc.endpoint == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_resolves_dns_from_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "dns") + assert sc.endpoint == "https://dns.eu-de.otc.t-systems.com/v2/" + + def test_endpoint_override_bypasses_catalog(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + endpoint_override="https://custom.example.com/v2", + ) + assert sc.endpoint == "https://custom.example.com/v2/" + assert sc.resource_base == "https://custom.example.com/v2/" + + def test_resource_base_override(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + resource_base="https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123", + ) + assert sc.resource_base == "https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123/" + assert sc.endpoint == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_resource_base_defaults_to_endpoint(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + assert sc.resource_base == sc.endpoint + + def test_service_not_found_raises(self) -> None: + provider = _make_provider() + with pytest.raises(ServiceNotFoundError): + ServiceClient(provider, "nonexistent_service") + + def test_endpoint_not_found_wrong_region(self) -> None: + raw = [ + { + "type": "compute", + "endpoints": [ + { + "interface": "public", + "region_id": "eu-de", + "url": "https://ecs.eu-de.example.com/v2.1", + }, + ], + }, + ] + catalog = [CatalogEntry.model_validate(entry) for entry in raw] + provider = _make_provider(catalog=catalog) + with pytest.raises(EndpointNotFoundError): + ServiceClient(provider, "compute", region="us-east-1") + + def test_region_override(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "network", region="eu-nl") + assert "eu-nl" in sc.endpoint + + def test_extra_headers_stored(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + extra_headers={"X-Custom": "value"}, + ) + assert sc.extra_headers == {"X-Custom": "value"} + + def test_no_endpoint_locator_gives_empty(self) -> None: + """If provider has no locator, endpoint is empty string.""" + provider = _make_provider() + provider.endpoint_locator = None + sc = ServiceClient(provider, "compute") + assert sc.endpoint == "" + + def test_no_service_type_gives_empty(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider) + assert sc.endpoint == "" + + +# ====================================================================== +# service_url +# ====================================================================== + + +class TestServiceUrl: + """Test URL construction.""" + + def test_single_part(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url("servers") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/servers" + + def test_multiple_parts(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url("servers", "abc-123", "action") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/servers/abc-123/action" + + def test_no_parts(self) -> None: + provider = _make_provider() + sc = ServiceClient(provider, "compute") + url = sc.service_url() + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/" + + def test_with_custom_resource_base(self) -> None: + provider = _make_provider() + sc = ServiceClient( + provider, "compute", + resource_base="https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123", + ) + url = sc.service_url("servers") + assert url == "https://ecs.eu-de.otc.t-systems.com/v2.1/proj-123/servers" + + +# ====================================================================== +# HTTP methods +# ====================================================================== + + +class TestHttpMethods: + """Test that convenience methods delegate correctly to provider.""" + + def _service_client(self, handler: Any) -> ServiceClient: + """Build a ServiceClient with mocked transport.""" + provider = _make_provider(handler) + return ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + + def test_get(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"items": []}) + + sc = self._service_client(handler) + resp = sc.get("resources") + + assert resp.status_code == 200 + assert len(captured) == 1 + assert captured[0].method == "GET" + assert str(captured[0].url) == "https://api.example.com/v1/resources" + + def test_post_with_json(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(201, json={"id": "new-1"}) + + sc = self._service_client(handler) + resp = sc.post("resources", json={"name": "test"}) + + assert resp.status_code == 201 + body = json.loads(captured[0].content) + assert body["name"] == "test" + + def test_put(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={"updated": True}) + + sc = self._service_client(handler) + resp = sc.put("resources/123", json={"name": "updated"}) + + assert captured[0].method == "PUT" + assert "resources/123" in str(captured[0].url) + + def test_patch(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + sc = self._service_client(handler) + sc.patch("resources/123", json={"field": "val"}) + + assert captured[0].method == "PATCH" + + def test_delete(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(204, text="") + + sc = self._service_client(handler) + resp = sc.delete("resources/123") + + assert resp.status_code == 204 + assert captured[0].method == "DELETE" + + def test_delete_with_body(self) -> None: + """Go SDK has DeleteWithBody — our delete() supports json param.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + sc = self._service_client(handler) + sc.delete("resources/batch", json={"ids": ["a", "b"]}) + + body = json.loads(captured[0].content) + assert body["ids"] == ["a", "b"] + + def test_head(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(204, text="") + + sc = self._service_client(handler) + sc.head("resources/123", ok_codes=[204]) + + assert captured[0].method == "HEAD" + + def test_error_propagates(self) -> None: + """Non-ok status from provider should raise HttpError.""" + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(404, text="Not found") + + sc = self._service_client(handler) + with pytest.raises(NotFoundError): + sc.get("missing") + + +# ====================================================================== +# Headers +# ====================================================================== + + +class TestHeaders: + """Test header merging between service and request levels.""" + + def test_extra_headers_sent(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, + endpoint_override="https://api.example.com/v1", + extra_headers={"X-Service-Level": "important"}, + ) + sc.get("stuff") + + assert captured[0].headers["x-service-level"] == "important" + + def test_per_request_headers_override(self) -> None: + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, + endpoint_override="https://api.example.com/v1", + extra_headers={"X-Level": "service"}, + ) + sc.get("stuff", headers={"X-Level": "request"}) + + # Per-request header wins + assert captured[0].headers["x-level"] == "request" + + def test_auth_header_present(self) -> None: + """Token auth header should be set by provider.""" + captured: list[httpx.Request] = [] + + def handler(req: httpx.Request) -> httpx.Response: + captured.append(req) + return httpx.Response(200, json={}) + + provider = _make_provider(handler) + sc = ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + sc.get("stuff") + + assert captured[0].headers["x-auth-token"] == "test-token" + + +# ====================================================================== +# Custom ok_codes +# ====================================================================== + + +class TestCustomOkCodes: + def test_custom_ok_codes(self) -> None: + def handler(req: httpx.Request) -> httpx.Response: + return httpx.Response(204, text="") + + provider = _make_provider(handler) + sc = ServiceClient( + provider, endpoint_override="https://api.example.com/v1", + ) + resp = sc.post("action", ok_codes=[204]) + assert resp.status_code == 204 diff --git a/tests/unit/core/test_signer.py b/tests/unit/core/test_signer.py new file mode 100644 index 0000000..fea6d33 --- /dev/null +++ b/tests/unit/core/test_signer.py @@ -0,0 +1,409 @@ +"""Tests for sdk.core.signer.""" + +from datetime import datetime, timezone + +import httpx +import pytest + +from sdk.core.signer import ( + SIGN_ALGORITHM_HMAC_SHA256, + SignOptions, + _build_sign_key, + _build_sign_params, + _canonical_path, + _canonical_query, + _derive_signing_key, + _format_date, + _format_datetime, + _hash_sha256, + _hmac_sha256, + _signed_headers_string, + _use_payload_for_query, + re_sign_request, + sign_request, +) + +# Fixed timestamp for reproducible tests +FIXED_TIME = datetime(2024, 4, 15, 10, 30, 0, tzinfo=timezone.utc) +FIXED_DT_STR = "20240415T103000Z" +FIXED_DATE_STR = "20240415" + +OPTS = SignOptions( + access_key="TESTAKXXXXXXXX", + secret_key="TESTSKXXXXXXXXXXXXXXXXXXXXXXXX", + region_name="eu-de", + service_name="dns", +) + + +class TestFormatting: + def test_format_datetime(self): + assert _format_datetime(FIXED_TIME) == FIXED_DT_STR + + def test_format_date(self): + assert _format_date(FIXED_TIME) == FIXED_DATE_STR + + +class TestCryptoPrimitives: + def test_hash_sha256_empty(self): + result = _hash_sha256(b"") + assert result == ( + "e3b0c44298fc1c149afbf4c8996fb924" + "27ae41e4649b934ca495991b7852b855" + ) + + def test_hash_sha256_data(self): + result = _hash_sha256(b"hello") + assert result == ( + "2cf24dba5fb0a30e26e83b2ac5b9e29e" + "1b161e5c1fa7425e73043362938b9824" + ) + + def test_hmac_sha256(self): + result = _hmac_sha256("data", b"secret") + assert len(result) == 32 # SHA-256 always 32 bytes + + def test_build_sign_key_deterministic(self): + params1 = _build_sign_params(OPTS, FIXED_TIME) + params2 = _build_sign_params(OPTS, FIXED_TIME) + key1 = _build_sign_key(params1) + key2 = _build_sign_key(params2) + assert key1 == key2 + assert len(key1) == 32 + + def test_build_sign_key_different_date(self): + ts1 = datetime(2024, 4, 15, 10, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 4, 16, 10, 0, 0, tzinfo=timezone.utc) + params1 = _build_sign_params(OPTS, ts1) + params2 = _build_sign_params(OPTS, ts2) + key1 = _build_sign_key(params1) + key2 = _build_sign_key(params2) + assert key1 != key2 + + +class TestCanonicalPath: + def test_simple_path(self): + req = httpx.Request("GET", "https://example.com/v2/zones") + path = _canonical_path(req) + assert path == "/v2/zones" + + def test_root_path(self): + req = httpx.Request("GET", "https://example.com/") + path = _canonical_path(req) + assert path == "/" + + def test_trailing_slash_preserved(self): + req = httpx.Request("GET", "https://example.com/v2/zones/") + path = _canonical_path(req) + assert path == "/v2/zones/" + + def test_no_double_encoding(self): + req = httpx.Request("GET", "https://example.com/v1/vpcs/some%20path") + path = _canonical_path(req) + assert "%2520" not in path + assert "%20" in path + + +class TestCanonicalQuery: + def test_no_params(self): + req = httpx.Request("GET", "https://example.com/v2/zones") + assert _canonical_query(req) == "" + + def test_sorted_params(self): + req = httpx.Request( + "GET", "https://example.com/v2/zones?name=test&limit=10", + ) + qs = _canonical_query(req) + assert "limit" in qs + assert "name" in qs + # 'limit' should come before 'name' alphabetically + assert qs.index("limit") < qs.index("name") + + def test_special_chars_encoded(self): + req = httpx.Request( + "GET", "https://example.com/test?key=hello world", + ) + qs = _canonical_query(req) + assert "hello%20world" in qs + + def test_duplicate_keys_preserved(self): + req = httpx.Request( + "GET", "https://example.com/v1?tag=b&tag=a&name=test", + ) + qs = _canonical_query(req) + assert "tag=b" in qs + assert "tag=a" in qs + assert "name=test" in qs + + def test_post_no_body_returns_empty(self): + req = httpx.Request( + "POST", "https://example.com/v1?action=start", + ) + assert _canonical_query(req) == "" + + +class TestUsePayloadForQuery: + def test_post_no_body(self): + req = httpx.Request( + "POST", "https://example.com/v1?action=start", + ) + assert _use_payload_for_query(req) is True + + def test_post_with_body(self): + req = httpx.Request( + "POST", "https://example.com/v1", + content=b'{"name": "test"}', + ) + assert _use_payload_for_query(req) is False + + def test_get_never_uses_payload(self): + req = httpx.Request("GET", "https://example.com/v1?x=1") + assert _use_payload_for_query(req) is False + + +class TestSignRequest: + def test_adds_authorization_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert "authorization" in req.headers + assert req.headers["authorization"].startswith( + SIGN_ALGORITHM_HMAC_SHA256, + ) + + def test_adds_sdk_date_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["x-sdk-date"] == FIXED_DT_STR + + def test_adds_host_header(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["host"] == "dns.eu-de.otc.t-systems.com" + + def test_authorization_contains_credential(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert f"Credential={OPTS.access_key}/" in auth + + def test_authorization_contains_signed_headers(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "SignedHeaders=" in auth + assert "host" in auth + assert "x-sdk-date" in auth + + def test_authorization_contains_signature(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "Signature=" in auth + # Signature is hex, 64 chars + sig = auth.split("Signature=")[1] + assert len(sig) == 64 + + def test_deterministic_signature(self): + """Same request + same timestamp = same signature.""" + req1 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + req2 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req1, OPTS, timestamp=FIXED_TIME) + sign_request(req2, OPTS, timestamp=FIXED_TIME) + assert req1.headers["authorization"] == req2.headers["authorization"] + + def test_different_timestamp_different_signature(self): + ts1 = datetime(2024, 4, 15, 10, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 4, 15, 11, 0, 0, tzinfo=timezone.utc) + req1 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + req2 = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req1, OPTS, timestamp=ts1) + sign_request(req2, OPTS, timestamp=ts2) + assert req1.headers["authorization"] != req2.headers["authorization"] + + def test_post_with_body(self): + req = httpx.Request( + "POST", + "https://dns.eu-de.otc.t-systems.com/v2/zones", + json={"name": "example.com.", "zone_type": "public"}, + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + assert req.headers["authorization"].startswith( + SIGN_ALGORITHM_HMAC_SHA256, + ) + + def test_scope_format(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + expected_scope = f"{FIXED_DATE_STR}/eu-de/dns/sdk_request" + assert expected_scope in auth + + def test_whitespace_trimmed_from_keys(self): + opts = SignOptions( + access_key=" AK_PADDED ", + secret_key=" SK_PADDED ", + region_name="eu-de", + service_name="dns", + ) + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, opts, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert "Credential=AK_PADDED/" in auth + + +class TestReSignRequest: + def test_re_sign_overwrites_date(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + old_date = req.headers["x-sdk-date"] + + ts2 = datetime(2024, 4, 15, 10, 31, 0, tzinfo=timezone.utc) + re_sign_request(req, OPTS, timestamp=ts2) + assert req.headers["x-sdk-date"] != old_date + assert req.headers["x-sdk-date"] == "20240415T103100Z" + + def test_re_sign_produces_new_authorization(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + old_auth = req.headers["authorization"] + + ts2 = datetime(2024, 4, 15, 10, 31, 0, tzinfo=timezone.utc) + re_sign_request(req, OPTS, timestamp=ts2) + assert req.headers["authorization"] != old_auth + + +class TestAlgorithmValidation: + def test_unsupported_algorithm_raises(self): + opts = SignOptions( + access_key="AK", + secret_key="SK", + sign_algorithm="UNSUPPORTED-ALG", + ) + req = httpx.Request("GET", "https://example.com/test") + with pytest.raises(ValueError, match="Unsupported"): + sign_request(req, opts) + + def test_default_algorithm(self): + req = httpx.Request( + "GET", "https://dns.eu-de.otc.t-systems.com/v2/zones", + ) + sign_request(req, OPTS, timestamp=FIXED_TIME) + auth = req.headers["authorization"] + assert auth.startswith(SIGN_ALGORITHM_HMAC_SHA256) + + +class TestSignKeyCache: + def test_cached_key_matches_uncached(self): + opts_cached = SignOptions( + access_key="AK", + secret_key="SK_CACHE_TEST", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + opts_uncached = SignOptions( + access_key="AK", + secret_key="SK_CACHE_TEST", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=False, + ) + params_cached = _build_sign_params(opts_cached, FIXED_TIME) + params_uncached = _build_sign_params(opts_uncached, FIXED_TIME) + + key_cached = _derive_signing_key(params_cached) + key_uncached = _derive_signing_key(params_uncached) + assert key_cached == key_uncached + + def test_cache_returns_same_key_same_day(self): + opts = SignOptions( + access_key="AK", + secret_key="SK_SAME_DAY", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + ts1 = datetime(2024, 6, 15, 8, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 6, 15, 20, 0, 0, tzinfo=timezone.utc) + + params1 = _build_sign_params(opts, ts1) + params2 = _build_sign_params(opts, ts2) + + key1 = _derive_signing_key(params1) + key2 = _derive_signing_key(params2) + assert key1 == key2 + + def test_cache_invalidates_on_new_day(self): + opts = SignOptions( + access_key="AK", + secret_key="SK_NEW_DAY", + region_name="eu-de", + service_name="vpc", + enable_cache_sign_key=True, + ) + ts1 = datetime(2024, 6, 15, 23, 0, 0, tzinfo=timezone.utc) + ts2 = datetime(2024, 6, 16, 1, 0, 0, tzinfo=timezone.utc) + + params1 = _build_sign_params(opts, ts1) + params2 = _build_sign_params(opts, ts2) + + key1 = _derive_signing_key(params1) + key2 = _derive_signing_key(params2) + assert key1 != key2 + + +class TestTimeOffset: + def test_offset_shifts_signing_time(self): + opts_no_offset = SignOptions( + access_key="AK", + secret_key="SK", + region_name="eu-de", + service_name="vpc", + time_offset_seconds=0, + ) + opts_with_offset = SignOptions( + access_key="AK", + secret_key="SK", + region_name="eu-de", + service_name="vpc", + time_offset_seconds=3600, + ) + req1 = httpx.Request( + "GET", "https://vpc.eu-de.otc.t-systems.com/v1/vpcs", + ) + req2 = httpx.Request( + "GET", "https://vpc.eu-de.otc.t-systems.com/v1/vpcs", + ) + sign_request(req1, opts_no_offset, timestamp=FIXED_TIME) + sign_request(req2, opts_with_offset, timestamp=FIXED_TIME) + # Same timestamp but offset makes them different + assert req1.headers["authorization"] != req2.headers["authorization"] + assert req1.headers["x-sdk-date"] != req2.headers["x-sdk-date"] diff --git a/tests/unit/core/test_smoke.py b/tests/unit/core/test_smoke.py new file mode 100644 index 0000000..f4d2307 --- /dev/null +++ b/tests/unit/core/test_smoke.py @@ -0,0 +1,6 @@ +"""Smoke test — verify the package is importable.""" +import sdk + +def test_import(): + + assert sdk.__version__ == "0.1.0" \ No newline at end of file diff --git a/tests/unit/services/vpc/v1/test_models.py b/tests/unit/services/vpc/v1/test_models.py new file mode 100644 index 0000000..8a4c508 --- /dev/null +++ b/tests/unit/services/vpc/v1/test_models.py @@ -0,0 +1,203 @@ +"""Tests for ``sdk.services.vpc.v1.models``.""" + +from sdk.services.vpc.v1.models import ( + CreateVpcOpts, + ListVpcsOpts, + Route, + UpdateVpcOpts, + Vpc, +) + + +# ====================================================================== +# Vpc response model +# ====================================================================== + + +class TestVpc: + """Tests for Vpc model parsing.""" + + SAMPLE_RESPONSE = { + "id": "99d9d709-8478-4b46-9f3f-2206b1023fd3", + "name": "vpc", + "description": "test", + "cidr": "192.168.0.0/16", + "status": "OK", + "enterprise_project_id": "0", + "routes": [], + "enable_shared_snat": False, + "tenant_id": "087679f0aa80d32a2f4ec0172f5e902b", + "created_at": "2022-12-15T02:25:11", + "updated_at": "2022-12-15T02:25:11", + } + + def test_parse_full_response(self): + vpc = Vpc.model_validate(self.SAMPLE_RESPONSE) + assert vpc.id == "99d9d709-8478-4b46-9f3f-2206b1023fd3" + assert vpc.name == "vpc" + assert vpc.description == "test" + assert vpc.cidr == "192.168.0.0/16" + assert vpc.status == "OK" + assert vpc.enterprise_project_id == "0" + assert vpc.routes == [] + assert vpc.enable_shared_snat is False + assert vpc.tenant_id == "087679f0aa80d32a2f4ec0172f5e902b" + assert vpc.created_at == "2022-12-15T02:25:11" + assert vpc.updated_at == "2022-12-15T02:25:11" + + def test_parse_with_routes(self): + data = { + **self.SAMPLE_RESPONSE, + "routes": [ + {"destination": "10.0.0.0/8", "nexthop": "192.168.0.1"}, + {"destination": "172.16.0.0/12", "nexthop": "192.168.0.2"}, + ], + } + vpc = Vpc.model_validate(data) + assert len(vpc.routes) == 2 + assert vpc.routes[0].destination == "10.0.0.0/8" + assert vpc.routes[0].nexthop == "192.168.0.1" + assert vpc.routes[1].destination == "172.16.0.0/12" + + def test_parse_creating_status(self): + data = {**self.SAMPLE_RESPONSE, "status": "CREATING"} + vpc = Vpc.model_validate(data) + assert vpc.status == "CREATING" + + def test_minimal_response(self): + """API always returns id; other fields have defaults.""" + vpc = Vpc.model_validate({"id": "abc-123"}) + assert vpc.id == "abc-123" + assert vpc.name == "" + assert vpc.routes == [] + assert vpc.enable_shared_snat is False + + +# ====================================================================== +# Route model +# ====================================================================== + + +class TestRoute: + def test_route_defaults(self): + r = Route() + assert r.destination == "" + assert r.nexthop == "" + + def test_route_from_dict(self): + r = Route.model_validate( + {"destination": "10.0.0.0/8", "nexthop": "192.168.0.1"} + ) + assert r.destination == "10.0.0.0/8" + assert r.nexthop == "192.168.0.1" + + +# ====================================================================== +# CreateVpcOpts +# ====================================================================== + + +class TestCreateVpcOpts: + def test_full_body(self): + opts = CreateVpcOpts( + name="my-vpc", + description="test vpc", + cidr="192.168.0.0/16", + enterprise_project_id="0aad99bc-f5f6-4f78-8404-c598d76b0ed2", + ) + body = opts.to_request_body() + assert body == { + "vpc": { + "name": "my-vpc", + "description": "test vpc", + "cidr": "192.168.0.0/16", + "enterprise_project_id": "0aad99bc-f5f6-4f78-8404-c598d76b0ed2", + } + } + + def test_minimal_body(self): + """Empty opts => empty vpc dict (all fields optional).""" + opts = CreateVpcOpts() + body = opts.to_request_body() + assert body == {"vpc": {}} + + def test_partial_body(self): + opts = CreateVpcOpts(name="test", cidr="10.0.0.0/8") + body = opts.to_request_body() + assert body == {"vpc": {"name": "test", "cidr": "10.0.0.0/8"}} + + +# ====================================================================== +# UpdateVpcOpts +# ====================================================================== + + +class TestUpdateVpcOpts: + def test_full_body(self): + opts = UpdateVpcOpts( + name="vpc1", + description="updated", + cidr="192.168.0.0/16", + routes=[Route(destination="10.0.0.0/8", nexthop="192.168.0.1")], + ) + body = opts.to_request_body() + assert body == { + "vpc": { + "name": "vpc1", + "description": "updated", + "cidr": "192.168.0.0/16", + "routes": [ + {"destination": "10.0.0.0/8", "nexthop": "192.168.0.1"} + ], + } + } + + def test_empty_routes_list(self): + """Explicitly passing empty list clears routes.""" + opts = UpdateVpcOpts(routes=[]) + body = opts.to_request_body() + assert body == {"vpc": {"routes": []}} + + def test_none_routes_omitted(self): + """routes=None means don't update routes.""" + opts = UpdateVpcOpts(name="new-name") + body = opts.to_request_body() + assert body == {"vpc": {"name": "new-name"}} + assert "routes" not in body["vpc"] + + def test_empty_body(self): + opts = UpdateVpcOpts() + body = opts.to_request_body() + assert body == {"vpc": {}} + + +# ====================================================================== +# ListVpcsOpts +# ====================================================================== + + +class TestListVpcsOpts: + def test_full_params(self): + opts = ListVpcsOpts( + id="abc", + limit=10, + marker="xyz", + enterprise_project_id="0", + ) + params = opts.to_query_params() + assert params == { + "id": "abc", + "limit": "10", + "marker": "xyz", + "enterprise_project_id": "0", + } + + def test_empty_params(self): + opts = ListVpcsOpts() + params = opts.to_query_params() + assert params == {} + + def test_partial_params(self): + opts = ListVpcsOpts(limit=50) + params = opts.to_query_params() + assert params == {"limit": "50"} diff --git a/tests/unit/services/vpc/v1/test_requests.py b/tests/unit/services/vpc/v1/test_requests.py new file mode 100644 index 0000000..f15b132 --- /dev/null +++ b/tests/unit/services/vpc/v1/test_requests.py @@ -0,0 +1,244 @@ +"""Tests for ``sdk.services.vpc.v1.requests``.""" + +from __future__ import annotations +from unittest.mock import MagicMock, PropertyMock + +import httpx + +from sdk.services.vpc.v1 import requests as vpc +from sdk.services.vpc.v1.models import ( + CreateVpcOpts, + ListVpcsOpts, + Route, + UpdateVpcOpts, +) + + +# ====================================================================== +# Helpers +# ====================================================================== + +VPC_RESPONSE = { + "id": "99d9d709-8478-4b46-9f3f-2206b1023fd3", + "name": "vpc", + "description": "test", + "cidr": "192.168.0.0/16", + "status": "OK", + "enterprise_project_id": "0", + "routes": [], + "enable_shared_snat": False, + "tenant_id": "087679f0aa80d32a2f4ec0172f5e902b", + "created_at": "2022-12-15T02:25:11", + "updated_at": "2022-12-15T02:25:11", +} + +PROJECT_ID = "087679f0aa80d32a2f4ec0172f5e902b" + + +def _make_response(body: dict) -> httpx.Response: + """Create a fake httpx.Response with JSON body.""" + resp = httpx.Response( + status_code=200, + json=body, + request=httpx.Request("GET", "https://example.com"), + ) + return resp + + +def _make_client() -> MagicMock: + """Create a mock ServiceClient with project_id.""" + client = MagicMock() + # provider.project_id accessed via urls.py + type(client.provider).project_id = PropertyMock(return_value=PROJECT_ID) + return client + + +# ====================================================================== +# Create +# ====================================================================== + + +class TestCreate: + def test_create_sends_post(self): + client = _make_client() + client.post.return_value = _make_response({"vpc": VPC_RESPONSE}) + + opts = CreateVpcOpts(name="vpc", cidr="192.168.0.0/16") + result = vpc.create(client, opts) + + client.post.assert_called_once_with( + f"vpcs", + json={"vpc": {"name": "vpc", "cidr": "192.168.0.0/16"}}, + ) + assert result.id == "99d9d709-8478-4b46-9f3f-2206b1023fd3" + assert result.name == "vpc" + assert result.status == "OK" + + def test_create_with_enterprise_project(self): + client = _make_client() + client.post.return_value = _make_response({"vpc": VPC_RESPONSE}) + + opts = CreateVpcOpts( + name="vpc", + enterprise_project_id="0aad99bc", + ) + vpc.create(client, opts) + + call_body = client.post.call_args[1]["json"] + assert call_body["vpc"]["enterprise_project_id"] == "0aad99bc" + + +# ====================================================================== +# Get +# ====================================================================== + + +class TestGet: + def test_get_sends_get(self): + client = _make_client() + client.get.return_value = _make_response({"vpc": VPC_RESPONSE}) + + vpc_id = "99d9d709-8478-4b46-9f3f-2206b1023fd3" + result = vpc.get(client, vpc_id) + + client.get.assert_called_once_with( + f"vpcs/{vpc_id}", + ) + assert result.id == vpc_id + assert result.cidr == "192.168.0.0/16" + + +# ====================================================================== +# List +# ====================================================================== + + +class TestList: + def test_list_single_page(self): + """List with limit — returns fewer items than limit, stops.""" + client = _make_client() + vpcs_data = [ + {**VPC_RESPONSE, "id": "aaa", "name": "vpc1"}, + {**VPC_RESPONSE, "id": "bbb", "name": "vpc2"}, + ] + + client.get.return_value = _make_response({"vpcs": vpcs_data}) + + # limit=5, but only 2 returned → single page + opts = ListVpcsOpts(limit=5) + results = list(vpc.list(client, opts)) + + assert len(results) == 2 + assert results[0].id == "aaa" + assert results[1].id == "bbb" + assert client.get.call_count == 1 + + def test_list_with_opts(self): + client = _make_client() + client.get.return_value = _make_response({"vpcs": []}) + + opts = ListVpcsOpts(limit=10, enterprise_project_id="0") + list(vpc.list(client, opts)) + + call_url = client.get.call_args[0][0] + assert f"vpcs" in call_url + + def test_list_pagination(self): + """List follows marker pagination across two pages.""" + client = _make_client() + + page1 = [ + {**VPC_RESPONSE, "id": "id-1", "name": "vpc1"}, + {**VPC_RESPONSE, "id": "id-2", "name": "vpc2"}, + ] + page2 = [ + {**VPC_RESPONSE, "id": "id-3", "name": "vpc3"}, + ] + + client.get.side_effect = [ + _make_response({"vpcs": page1}), + _make_response({"vpcs": page2}), + ] + + # limit=2: page1 has 2 items (==limit) → fetch next; + # page2 has 1 item (