diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml index eb7652b..3fcb11b 100644 --- a/.github/workflows/documentation.yaml +++ b/.github/workflows/documentation.yaml @@ -5,6 +5,7 @@ on: branches: - main pull_request: + types: [opened, reopened, synchronize] jobs: doc-build: diff --git a/.github/workflows/lint-actions.yaml b/.github/workflows/lint-actions.yaml index 21cdbb3..4563a11 100644 --- a/.github/workflows/lint-actions.yaml +++ b/.github/workflows/lint-actions.yaml @@ -3,6 +3,7 @@ name: Python-Lint on: push: pull_request: + types: [reopened, opened] jobs: Black: @@ -15,14 +16,8 @@ jobs: python-version: '3.11' - name: Check out Git repository uses: actions/checkout@v3 - - name: Install and configure Poetry - uses: snok/install-poetry@v1 - with: - version: 1.4.2 - virtualenvs-create: false - virtualenvs-path: ~/local/share/virtualenvs - - run: poetry install --without develop,doc,test - - name: Run linters + - run: pip install black + - name: Run black run: | black src tests @@ -43,7 +38,7 @@ jobs: virtualenvs-create: false virtualenvs-path: ~/local/share/virtualenvs - run: poetry install --without develop,doc - - name: Run linters + - name: Run mypy run: | mypy src tests @@ -53,7 +48,7 @@ jobs: - uses: actions/checkout@v3 - uses: chartboost/ruff-action@v1 with: - version: 0.0.287 + version: 0.0.290 python-requirements: name: Check Python Requirements diff --git a/.github/workflows/test-and-build-action.yaml b/.github/workflows/test-and-build-action.yaml index 758f932..b69a149 100644 --- a/.github/workflows/test-and-build-action.yaml +++ b/.github/workflows/test-and-build-action.yaml @@ -2,6 +2,7 @@ name: Test & Build on: pull_request: + types: [reopened, opened, synchronize] push: jobs: @@ -58,11 +59,12 @@ jobs: with: coverageFile: coverage.xml token: ${{ secrets.GITHUB_TOKEN }} - thresholdAll: 0.8 - thresholdNew: 0.8 - thresholdModified: 0.8 + thresholdAll: 0.7 + thresholdNew: 0.7 + thresholdModified: 0.7 coverage_report: + if: ${{ github.event_name == 'push' }} runs-on: ubuntu-latest needs: test steps: @@ -99,6 +101,7 @@ jobs: build: runs-on: ubuntu-latest needs: test + if: ${{ github.event_name == 'push' }} steps: - name: Setup Python uses: actions/setup-python@v4 diff --git a/.gitignore b/.gitignore index 5371c9d..f536711 100644 --- a/.gitignore +++ b/.gitignore @@ -3,6 +3,9 @@ *secrets.json *secrets_prod.json +# Settings +.vscode/ + # Snyk .dccache @@ -217,3 +220,5 @@ replay_pid* /unit-test-results.xml /lbr-audit.md /.ruff_cache/ +/Jupyter/test.ipynb +/secrets*.json diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c3c7048..922d607 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -26,13 +26,13 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit # Ruff version. - rev: v0.0.287 + rev: v0.0.290 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/psf/black - rev: 23.9.0 + rev: 23.9.1 hooks: - id: black args: [--config=pyproject.toml] @@ -65,6 +65,8 @@ repos: - types-setuptools - types-requests - types-pyOpenSSL + - types-cachetools + - loguru-mypy - repo: https://github.com/frnmst/md-toc rev: 8.2.0 diff --git a/.vscode/settings.json b/.vscode/settings.json deleted file mode 100644 index 7e6882b..0000000 --- a/.vscode/settings.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "files.eol": "\n" -} diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index ac2c1ed..e575007 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -2,7 +2,7 @@ ## Dev Setup -- [Install Python 3.11](https://www.python.org/downloads/release/python-3111/) +- [Install Python 3.11](https://www.python.org/downloads/release/python-3115/) - [Install Poetry](https://python-poetry.org/docs/#installation) - [Install GiT](https://git-scm.com/book/en/v2/Getting-Started-Installing-Git) - [Configure GiT](https://support.atlassian.com/bitbucket-cloud/docs/configure-your-dvcs-username-for-commits/) diff --git a/README.md b/README.md index 1e5e308..1f11c25 100644 --- a/README.md +++ b/README.md @@ -12,15 +12,22 @@ See the [CONTRIBUTING.md](CONTRIBUTING.md) about how code should be formatted and what kind of rules we set ourselves. +## Available entrypoints + +The project has currently the following entrypoint available: + +- data-transfer > Transfers all the data from the mongodb into the sql db to make it available as production data. +- reset-sql > Resets all sql tables in the connected db. + ## DB Connection settings -To connect to the SQL db see [sql/connector.py](./src/aki_prj23_transparenzregister/utils/postgres/connector.py) +To connect to the SQL db see [sql/connector.py](./src/aki_prj23_transparenzregister/utils/sql/connector.py) To connect to the Mongo db see [connect] Create a `secrets.json` in the root of this repo with the following structure (values to be replaces by desired config): ```json - { +{ "postgres": { "username": "postgres", "password": "postgres", @@ -38,8 +45,10 @@ Create a `secrets.json` in the root of this repo with the following structure (v } ``` -Alternatively, the secrets can be provided as environment variables. One option to do so is to add a `.env` file with the following layout: -```ini +Alternatively, the secrets can be provided as environment variables. One option to do so is to add a `.env` file with +the following layout: + +``` PYTHON_POSTGRES_USERNAME=postgres PYTHON_POSTGRES_PASSWORD=postgres PYTHON_POSTGRES_HOST=localhost diff --git a/documentations/conf.py b/documentations/conf.py index 7f6dfc7..e23b762 100644 --- a/documentations/conf.py +++ b/documentations/conf.py @@ -43,6 +43,7 @@ extensions: Final[list[str]] = [ "sphinx.ext.viewcode", "IPython.sphinxext.ipython_console_highlighting", "sphinxcontrib.mermaid", + "notfound.extension", ] # templates_path : Final[list[str]] = ["_templates"] @@ -86,3 +87,5 @@ intersphinx_mapping: Final[dict[str, tuple[str, None]]] = { "scikit-learn": ("https://scikit-learn.org/stable/", None), "sphinx": ("https://docs.sympy.org/latest/", None), } + +notfound_urls_prefix = "/" diff --git a/poetry.lock b/poetry.lock index 06d4b01..66d74c1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -322,6 +322,17 @@ dev = ["CacheControl[filecache,redis]", "black", "build", "cherrypy", "mypy", "p filecache = ["filelock (>=3.8.0)"] redis = ["redis (>=2.10.5)"] +[[package]] +name = "cachetools" +version = "5.3.1" +description = "Extensible memoizing collections and decorators" +optional = false +python-versions = ">=3.7" +files = [ + {file = "cachetools-5.3.1-py3-none-any.whl", hash = "sha256:95ef631eeaea14ba2e36f06437f36463aac3a096799e876ee55e5cdccb102590"}, + {file = "cachetools-5.3.1.tar.gz", hash = "sha256:dce83f2d9b4e1f732a8cd44af8e8fab2dbe46201467fc98b3ef8f269092bf62b"}, +] + [[package]] name = "certifi" version = "2023.7.22" @@ -623,6 +634,77 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.2.0)", "types-Pill test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "wurlitzer"] +[[package]] +name = "contourpy" +version = "1.1.1" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:46e24f5412c948d81736509377e255f6040e94216bf1a9b5ea1eaa9d29f6ec1b"}, + {file = "contourpy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0e48694d6a9c5a26ee85b10130c77a011a4fedf50a7279fa0bdaf44bafb4299d"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a66045af6cf00e19d02191ab578a50cb93b2028c3eefed999793698e9ea768ae"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ebf42695f75ee1a952f98ce9775c873e4971732a87334b099dde90b6af6a916"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6aec19457617ef468ff091669cca01fa7ea557b12b59a7908b9474bb9674cf0"}, + {file = "contourpy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:462c59914dc6d81e0b11f37e560b8a7c2dbab6aca4f38be31519d442d6cde1a1"}, + {file = "contourpy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6d0a8efc258659edc5299f9ef32d8d81de8b53b45d67bf4bfa3067f31366764d"}, + {file = "contourpy-1.1.1-cp310-cp310-win32.whl", hash = "sha256:d6ab42f223e58b7dac1bb0af32194a7b9311065583cc75ff59dcf301afd8a431"}, + {file = "contourpy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:549174b0713d49871c6dee90a4b499d3f12f5e5f69641cd23c50a4542e2ca1eb"}, + {file = "contourpy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:407d864db716a067cc696d61fa1ef6637fedf03606e8417fe2aeed20a061e6b2"}, + {file = "contourpy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe80c017973e6a4c367e037cb31601044dd55e6bfacd57370674867d15a899b"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e30aaf2b8a2bac57eb7e1650df1b3a4130e8d0c66fc2f861039d507a11760e1b"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3de23ca4f381c3770dee6d10ead6fff524d540c0f662e763ad1530bde5112532"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:566f0e41df06dfef2431defcfaa155f0acfa1ca4acbf8fd80895b1e7e2ada40e"}, + {file = "contourpy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04c2f0adaf255bf756cf08ebef1be132d3c7a06fe6f9877d55640c5e60c72c5"}, + {file = "contourpy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0c188ae66b772d9d61d43c6030500344c13e3f73a00d1dc241da896f379bb62"}, + {file = "contourpy-1.1.1-cp311-cp311-win32.whl", hash = "sha256:0683e1ae20dc038075d92e0e0148f09ffcefab120e57f6b4c9c0f477ec171f33"}, + {file = "contourpy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:8636cd2fc5da0fb102a2504fa2c4bea3cbc149533b345d72cdf0e7a924decc45"}, + {file = "contourpy-1.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:560f1d68a33e89c62da5da4077ba98137a5e4d3a271b29f2f195d0fba2adcb6a"}, + {file = "contourpy-1.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24216552104ae8f3b34120ef84825400b16eb6133af2e27a190fdc13529f023e"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:56de98a2fb23025882a18b60c7f0ea2d2d70bbbcfcf878f9067234b1c4818442"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:07d6f11dfaf80a84c97f1a5ba50d129d9303c5b4206f776e94037332e298dda8"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1eaac5257a8f8a047248d60e8f9315c6cff58f7803971170d952555ef6344a7"}, + {file = "contourpy-1.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:19557fa407e70f20bfaba7d55b4d97b14f9480856c4fb65812e8a05fe1c6f9bf"}, + {file = "contourpy-1.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:081f3c0880712e40effc5f4c3b08feca6d064cb8cfbb372ca548105b86fd6c3d"}, + {file = "contourpy-1.1.1-cp312-cp312-win32.whl", hash = "sha256:059c3d2a94b930f4dafe8105bcdc1b21de99b30b51b5bce74c753686de858cb6"}, + {file = "contourpy-1.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:f44d78b61740e4e8c71db1cf1fd56d9050a4747681c59ec1094750a658ceb970"}, + {file = "contourpy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:70e5a10f8093d228bb2b552beeb318b8928b8a94763ef03b858ef3612b29395d"}, + {file = "contourpy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8394e652925a18ef0091115e3cc191fef350ab6dc3cc417f06da66bf98071ae9"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5bd5680f844c3ff0008523a71949a3ff5e4953eb7701b28760805bc9bcff217"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:66544f853bfa85c0d07a68f6c648b2ec81dafd30f272565c37ab47a33b220684"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e0c02b75acfea5cab07585d25069207e478d12309557f90a61b5a3b4f77f46ce"}, + {file = "contourpy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41339b24471c58dc1499e56783fedc1afa4bb018bcd035cfb0ee2ad2a7501ef8"}, + {file = "contourpy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f29fb0b3f1217dfe9362ec55440d0743fe868497359f2cf93293f4b2701b8251"}, + {file = "contourpy-1.1.1-cp38-cp38-win32.whl", hash = "sha256:f9dc7f933975367251c1b34da882c4f0e0b2e24bb35dc906d2f598a40b72bfc7"}, + {file = "contourpy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:498e53573e8b94b1caeb9e62d7c2d053c263ebb6aa259c81050766beb50ff8d9"}, + {file = "contourpy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ba42e3810999a0ddd0439e6e5dbf6d034055cdc72b7c5c839f37a7c274cb4eba"}, + {file = "contourpy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6c06e4c6e234fcc65435223c7b2a90f286b7f1b2733058bdf1345d218cc59e34"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca6fab080484e419528e98624fb5c4282148b847e3602dc8dbe0cb0669469887"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93df44ab351119d14cd1e6b52a5063d3336f0754b72736cc63db59307dabb718"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eafbef886566dc1047d7b3d4b14db0d5b7deb99638d8e1be4e23a7c7ac59ff0f"}, + {file = "contourpy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efe0fab26d598e1ec07d72cf03eaeeba8e42b4ecf6b9ccb5a356fde60ff08b85"}, + {file = "contourpy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:f08e469821a5e4751c97fcd34bcb586bc243c39c2e39321822060ba902eac49e"}, + {file = "contourpy-1.1.1-cp39-cp39-win32.whl", hash = "sha256:bfc8a5e9238232a45ebc5cb3bfee71f1167064c8d382cadd6076f0d51cff1da0"}, + {file = "contourpy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:c84fdf3da00c2827d634de4fcf17e3e067490c4aea82833625c4c8e6cdea0887"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:229a25f68046c5cf8067d6d6351c8b99e40da11b04d8416bf8d2b1d75922521e"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10dab5ea1bd4401c9483450b5b0ba5416be799bbd50fc7a6cc5e2a15e03e8a3"}, + {file = "contourpy-1.1.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4f9147051cb8fdb29a51dc2482d792b3b23e50f8f57e3720ca2e3d438b7adf23"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a75cc163a5f4531a256f2c523bd80db509a49fc23721b36dd1ef2f60ff41c3cb"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b53d5769aa1f2d4ea407c65f2d1d08002952fac1d9e9d307aa2e1023554a163"}, + {file = "contourpy-1.1.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:11b836b7dbfb74e049c302bbf74b4b8f6cb9d0b6ca1bf86cfa8ba144aedadd9c"}, + {file = "contourpy-1.1.1.tar.gz", hash = "sha256:96ba37c2e24b7212a77da85004c38e7c4d155d3e72a45eeaf22c1f03f607e8ab"}, +] + +[package.dependencies] +numpy = {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""} + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.4.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "wurlitzer"] + [[package]] name = "coverage" version = "7.3.1" @@ -689,34 +771,34 @@ toml = ["tomli"] [[package]] name = "cryptography" -version = "41.0.3" +version = "41.0.4" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false python-versions = ">=3.7" files = [ - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, - {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, - {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, - {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, - {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, - {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, - {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, - {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, - {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, - {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:80907d3faa55dc5434a16579952ac6da800935cd98d14dbd62f6f042c7f5e839"}, + {file = "cryptography-41.0.4-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:35c00f637cd0b9d5b6c6bd11b6c3359194a8eba9c46d4e875a3660e3b400005f"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cecfefa17042941f94ab54f769c8ce0fe14beff2694e9ac684176a2535bf9714"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e40211b4923ba5a6dc9769eab704bdb3fbb58d56c5b336d30996c24fcf12aadb"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:23a25c09dfd0d9f28da2352503b23e086f8e78096b9fd585d1d14eca01613e13"}, + {file = "cryptography-41.0.4-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2ed09183922d66c4ec5fdaa59b4d14e105c084dd0febd27452de8f6f74704143"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:5a0f09cefded00e648a127048119f77bc2b2ec61e736660b5789e638f43cc397"}, + {file = "cryptography-41.0.4-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:9eeb77214afae972a00dee47382d2591abe77bdae166bda672fb1e24702a3860"}, + {file = "cryptography-41.0.4-cp37-abi3-win32.whl", hash = "sha256:3b224890962a2d7b57cf5eeb16ccaafba6083f7b811829f00476309bce2fe0fd"}, + {file = "cryptography-41.0.4-cp37-abi3-win_amd64.whl", hash = "sha256:c880eba5175f4307129784eca96f4e70b88e57aa3f680aeba3bab0e980b0f37d"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:004b6ccc95943f6a9ad3142cfabcc769d7ee38a3f60fb0dddbfb431f818c3a67"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:86defa8d248c3fa029da68ce61fe735432b047e32179883bdb1e79ed9bb8195e"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:37480760ae08065437e6573d14be973112c9e6dcaf5f11d00147ee74f37a3829"}, + {file = "cryptography-41.0.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:b5f4dfe950ff0479f1f00eda09c18798d4f49b98f4e2006d644b3301682ebdca"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7e53db173370dea832190870e975a1e09c86a879b613948f09eb49324218c14d"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:5b72205a360f3b6176485a333256b9bcd48700fc755fef51c8e7e67c4b63e3ac"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:93530900d14c37a46ce3d6c9e6fd35dbe5f5601bf6b3a5c325c7bffc030344d9"}, + {file = "cryptography-41.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:efc8ad4e6fc4f1752ebfb58aefece8b4e3c4cae940b0994d43649bdfce8d0d4f"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c3391bd8e6de35f6f1140e50aaeb3e2b3d6a9012536ca23ab0d9c35ec18c8a91"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0d9409894f495d465fe6fda92cb70e8323e9648af912d5b9141d616df40a87b8"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:8ac4f9ead4bbd0bc8ab2d318f97d85147167a488be0e08814a37eb2f439d5cf6"}, + {file = "cryptography-41.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:047c4603aeb4bbd8db2756e38f5b8bd7e94318c047cfe4efeb5d715e08b49311"}, + {file = "cryptography-41.0.4.tar.gz", hash = "sha256:7febc3094125fc126a7f6fb1f420d0da639f3f32cb15c8ff0dc3997c4549f51a"}, ] [package.dependencies] @@ -1171,7 +1253,7 @@ lxml = "^4.6.3" mapbox-vector-tile = "^1.2.1" more-itertools = "^8.10.0" numpy = "^1.19.0" -onnxruntime = "^1.10.0" +onnxruntime = "1.15.1" pandas = "^2.0.3" Pillow = "^10.0.0" protobuf = ">=3.0,<4.0" @@ -1205,7 +1287,7 @@ weiterbildungssuche = ["de-weiterbildungssuche (>=0.1.0,<0.2.0)"] type = "git" url = "https://github.com/TrisNol/deutschland.git" reference = "hotfix/python-3.11-support" -resolved_reference = "507901b95cffa8dede08b69adab23dd8eda92d87" +resolved_reference = "9e47ccc9736b9fb49c1dbe21cb9cd96f4845887b" [[package]] name = "distlib" @@ -1248,6 +1330,17 @@ files = [ {file = "docutils-0.18.1.tar.gz", hash = "sha256:679987caf361a7539d76e584cbeddc311e3aee937877c87346f31debc63e9d06"}, ] +[[package]] +name = "et-xmlfile" +version = "1.1.0" +description = "An implementation of lxml.xmlfile for the standard library" +optional = false +python-versions = ">=3.6" +files = [ + {file = "et_xmlfile-1.1.0-py3-none-any.whl", hash = "sha256:a2ba85d1d6a74ef63837eed693bcb89c3f752169b0e3e7ae5b16ca5e1b3deada"}, + {file = "et_xmlfile-1.1.0.tar.gz", hash = "sha256:8eb9e2bc2f8c97e37a2dc85a09ecdcdec9d8a396530a6d5a33b30b9a92da0c5c"}, +] + [[package]] name = "exceptiongroup" version = "1.1.3" @@ -1578,13 +1671,13 @@ pyreadline3 = {version = "*", markers = "sys_platform == \"win32\" and python_ve [[package]] name = "identify" -version = "2.5.28" +version = "2.5.29" description = "File identification library for Python" optional = false python-versions = ">=3.8" files = [ - {file = "identify-2.5.28-py2.py3-none-any.whl", hash = "sha256:87816de144bf46d161bd5b3e8f5596b16cade3b80be537087334b26bc5c177f3"}, - {file = "identify-2.5.28.tar.gz", hash = "sha256:94bb59643083ebd60dc996d043497479ee554381fbc5307763915cda49b0e78f"}, + {file = "identify-2.5.29-py2.py3-none-any.whl", hash = "sha256:24437fbf6f4d3fe6efd0eb9d67e24dd9106db99af5ceb27996a5f7895f24bf1b"}, + {file = "identify-2.5.29.tar.gz", hash = "sha256:d43d52b86b15918c137e3a74fff5224f60385cd0e9c38e99d07c257f02f151a5"}, ] [package.extras] @@ -1814,13 +1907,13 @@ files = [ [[package]] name = "jsonschema" -version = "4.19.0" +version = "4.19.1" description = "An implementation of JSON Schema validation for Python" optional = false python-versions = ">=3.8" files = [ - {file = "jsonschema-4.19.0-py3-none-any.whl", hash = "sha256:043dc26a3845ff09d20e4420d6012a9c91c9aa8999fa184e7efcfeccb41e32cb"}, - {file = "jsonschema-4.19.0.tar.gz", hash = "sha256:6e1e7569ac13be8139b2dd2c21a55d350066ee3f80df06c608b398cdc6f30e8f"}, + {file = "jsonschema-4.19.1-py3-none-any.whl", hash = "sha256:cd5f1f9ed9444e554b38ba003af06c0a8c2868131e56bfbef0550fb450c0330e"}, + {file = "jsonschema-4.19.1.tar.gz", hash = "sha256:ec84cc37cfa703ef7cd4928db24f9cb31428a5d0fa77747b8b51a847458e0bbf"}, ] [package.dependencies] @@ -2037,13 +2130,13 @@ test = ["coverage", "jupyter-server (>=2.0.0)", "pytest (>=7.0)", "pytest-cov", [[package]] name = "jupyterlab" -version = "4.0.5" +version = "4.0.6" description = "JupyterLab computational environment" optional = false python-versions = ">=3.8" files = [ - {file = "jupyterlab-4.0.5-py3-none-any.whl", hash = "sha256:13b3a326e7b95d72746fe20dbe80ee1e71165d6905e01ceaf1320eb809cb1b47"}, - {file = "jupyterlab-4.0.5.tar.gz", hash = "sha256:de49deb75f9b9aec478ed04754cbefe9c5d22fd796a5783cdc65e212983d3611"}, + {file = "jupyterlab-4.0.6-py3-none-any.whl", hash = "sha256:7d9dacad1e3f30fe4d6d4efc97fda25fbb5012012b8f27cc03a2283abcdee708"}, + {file = "jupyterlab-4.0.6.tar.gz", hash = "sha256:6c43ae5a6a1fd2fdfafcb3454004958bde6da76331abb44cffc6f9e436b19ba1"}, ] [package.dependencies] @@ -2060,8 +2153,8 @@ tornado = ">=6.2.0" traitlets = "*" [package.extras] -dev = ["black[jupyter] (==23.3.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.271)"] -docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8)", "sphinx-copybutton"] +dev = ["black[jupyter] (==23.7.0)", "build", "bump2version", "coverage", "hatch", "pre-commit", "pytest-cov", "ruff (==0.0.286)"] +docs = ["jsx-lexer", "myst-parser", "pydata-sphinx-theme (>=0.13.0)", "pytest", "pytest-check-links", "pytest-tornasync", "sphinx (>=1.8,<7.2.0)", "sphinx-copybutton"] docs-screenshots = ["altair (==5.0.1)", "ipython (==8.14.0)", "ipywidgets (==8.0.6)", "jupyterlab-geojson (==3.4.0)", "jupyterlab-language-pack-zh-cn (==4.0.post0)", "matplotlib (==3.7.1)", "nbconvert (>=7.0.0)", "pandas (==2.0.2)", "scipy (==1.10.1)", "vega-datasets (==0.9.0)"] test = ["coverage", "pytest (>=7.0)", "pytest-check-links (>=0.7)", "pytest-console-scripts", "pytest-cov", "pytest-jupyter (>=0.5.3)", "pytest-timeout", "pytest-tornasync", "requests", "requests-cache", "virtualenv"] @@ -2261,6 +2354,20 @@ win32-setctime = {version = ">=1.0.0", markers = "sys_platform == \"win32\""} [package.extras] dev = ["Sphinx (==7.2.5)", "colorama (==0.4.5)", "colorama (==0.4.6)", "exceptiongroup (==1.1.3)", "freezegun (==1.1.0)", "freezegun (==1.2.2)", "mypy (==v0.910)", "mypy (==v0.971)", "mypy (==v1.4.1)", "mypy (==v1.5.1)", "pre-commit (==3.4.0)", "pytest (==6.1.2)", "pytest (==7.4.0)", "pytest-cov (==2.12.1)", "pytest-cov (==4.1.0)", "pytest-mypy-plugins (==1.9.3)", "pytest-mypy-plugins (==3.0.0)", "sphinx-autobuild (==2021.3.14)", "sphinx-rtd-theme (==1.3.0)", "tox (==3.27.1)", "tox (==4.11.0)"] +[[package]] +name = "loguru-mypy" +version = "0.0.4" +description = "" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "loguru-mypy-0.0.4.tar.gz", hash = "sha256:1f1767d7737f1825295ce147f7e751f91837f5759b3c2f41801adc65691aeed4"}, + {file = "loguru_mypy-0.0.4-py3-none-any.whl", hash = "sha256:98e044be509887a314e683a1e851813310b396be48388c1fe4de97a2eac99d4d"}, +] + +[package.dependencies] +typing-extensions = "*" + [[package]] name = "lxml" version = "4.9.3" @@ -2470,58 +2577,39 @@ files = [ [[package]] name = "matplotlib" -version = "3.7.3" +version = "3.8.0" description = "Python plotting package" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" files = [ - {file = "matplotlib-3.7.3-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:085c33b27561d9c04386789d5aa5eb4a932ddef43cfcdd0e01735f9a6e85ce0c"}, - {file = "matplotlib-3.7.3-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c568e80e1c17f68a727f30f591926751b97b98314d8e59804f54f86ae6fa6a22"}, - {file = "matplotlib-3.7.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7baf98c5ad59c5c4743ea884bb025cbffa52dacdfdac0da3e6021a285a90377e"}, - {file = "matplotlib-3.7.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:236024f582e40dac39bca592258888b38ae47a9fed7b8de652d68d3d02d47d2b"}, - {file = "matplotlib-3.7.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12b4f6795efea037ce2d41e7c417ad8bd02d5719c6ad4a8450a0708f4a1cfb89"}, - {file = "matplotlib-3.7.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b2136cc6c5415b78977e0e8c608647d597204b05b1d9089ccf513c7d913733"}, - {file = "matplotlib-3.7.3-cp310-cp310-win32.whl", hash = "sha256:122dcbf9be0086e2a95d9e5e0632dbf3bd5b65eaa68c369363310a6c87753059"}, - {file = "matplotlib-3.7.3-cp310-cp310-win_amd64.whl", hash = "sha256:4aab27d9e33293389e3c1d7c881d414a72bdfda0fedc3a6bf46c6fa88d9b8015"}, - {file = "matplotlib-3.7.3-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:d5adc743de91e8e0b13df60deb1b1c285b8effea3d66223afceb14b63c9b05de"}, - {file = "matplotlib-3.7.3-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:55de4cf7cd0071b8ebf203981b53ab64f988a0a1f897a2dff300a1124e8bcd8b"}, - {file = "matplotlib-3.7.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac03377fd908aaee2312d0b11735753e907adb6f4d1d102de5e2425249693f6c"}, - {file = "matplotlib-3.7.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:755bafc10a46918ce9a39980009b54b02dd249594e5adf52f9c56acfddb5d0b7"}, - {file = "matplotlib-3.7.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1a6094c6f8e8d18db631754df4fe9a34dec3caf074f6869a7db09f18f9b1d6b2"}, - {file = "matplotlib-3.7.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:272dba2f1b107790ed78ebf5385b8d14b27ad9e90419de340364b49fe549a993"}, - {file = "matplotlib-3.7.3-cp311-cp311-win32.whl", hash = "sha256:591c123bed1cb4b9996fb60b41a6d89c2ec4943244540776c5f1283fb6960a53"}, - {file = "matplotlib-3.7.3-cp311-cp311-win_amd64.whl", hash = "sha256:3bf3a178c6504694cee8b88b353df0051583f2f6f8faa146f67115c27c856881"}, - {file = "matplotlib-3.7.3-cp312-cp312-macosx_10_12_universal2.whl", hash = "sha256:edf54cac8ee3603f3093616b40a931e8c063969756a4d78a86e82c2fea9659f7"}, - {file = "matplotlib-3.7.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:91e36a85ea639a1ba9f91427041eac064b04829945fe331a92617b6cb21d27e5"}, - {file = "matplotlib-3.7.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:caf5eaaf7c68f8d7df269dfbcaf46f48a70ff482bfcebdcc97519671023f2a7d"}, - {file = "matplotlib-3.7.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74bf57f505efea376097e948b7cdd87191a7ce8180616390aef496639edf601f"}, - {file = "matplotlib-3.7.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee152a88a0da527840a426535514b6ed8ac4240eb856b1da92cf48124320e346"}, - {file = "matplotlib-3.7.3-cp312-cp312-win_amd64.whl", hash = "sha256:67a410a9c9e07cbc83581eeea144bbe298870bf0ac0ee2f2e10a015ab7efee19"}, - {file = "matplotlib-3.7.3-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:259999c05285cb993d7f2a419cea547863fa215379eda81f7254c9e932963729"}, - {file = "matplotlib-3.7.3-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:3f4e7fd5a6157e1d018ce2166ec8e531a481dd4a36f035b5c23edfe05a25419a"}, - {file = "matplotlib-3.7.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:faa3d12d8811d08d14080a8b7b9caea9a457dc495350166b56df0db4b9909ef5"}, - {file = "matplotlib-3.7.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:336e88900c11441e458da01c8414fc57e04e17f9d3bb94958a76faa2652bcf6b"}, - {file = "matplotlib-3.7.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:12f4c0dd8aa280d796c8772ea8265a14f11a04319baa3a16daa5556065e8baea"}, - {file = "matplotlib-3.7.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1990955b11e7918d256cf3b956b10997f405b7917a3f1c7d8e69c1d15c7b1930"}, - {file = "matplotlib-3.7.3-cp38-cp38-win32.whl", hash = "sha256:e78707b751260b42b721507ad7aa60fe4026d7f51c74cca6b9cd8b123ebb633a"}, - {file = "matplotlib-3.7.3-cp38-cp38-win_amd64.whl", hash = "sha256:e594ee43c59ea39ca5c6244667cac9d017a3527febc31f5532ad9135cf7469ec"}, - {file = "matplotlib-3.7.3-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:6eaa1cf0e94c936a26b78f6d756c5fbc12e0a58c8a68b7248a2a31456ce4e234"}, - {file = "matplotlib-3.7.3-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:0a97af9d22e8ebedc9f00b043d9bbd29a375e9e10b656982012dded44c10fd77"}, - {file = "matplotlib-3.7.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1f9c6c16597af660433ab330b59ee2934b832ee1fabcaf5cbde7b2add840f31e"}, - {file = "matplotlib-3.7.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7240259b4b9cbc62381f6378cff4d57af539162a18e832c1e48042fabc40b6b"}, - {file = "matplotlib-3.7.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:747c6191d2e88ae854809e69aa358dbf852ff1a5738401b85c1cc9012309897a"}, - {file = "matplotlib-3.7.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec726b08a5275d827aa91bb951e68234a4423adb91cf65bc0fcdc0f2777663f7"}, - {file = "matplotlib-3.7.3-cp39-cp39-win32.whl", hash = "sha256:40e3b9b450c6534f07278310c4e34caff41c2a42377e4b9d47b0f8d3ac1083a2"}, - {file = "matplotlib-3.7.3-cp39-cp39-win_amd64.whl", hash = "sha256:dfc118642903a23e309b1da32886bb39a4314147d013e820c86b5fb4cb2e36d0"}, - {file = "matplotlib-3.7.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:165c8082bf8fc0360c24aa4724a22eaadbfd8c28bf1ccf7e94d685cad48261e4"}, - {file = "matplotlib-3.7.3-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ebd8470cc2a3594746ff0513aecbfa2c55ff6f58e6cef2efb1a54eb87c88ffa2"}, - {file = "matplotlib-3.7.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7153453669c9672b52095119fd21dd032d19225d48413a2871519b17db4b0fde"}, - {file = "matplotlib-3.7.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:498a08267dc69dd8f24c4b5d7423fa584d7ce0027ba71f7881df05fc09b89bb7"}, - {file = "matplotlib-3.7.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:d48999c4b19b5a0c058c9cd828ff6fc7748390679f6cf9a2ad653a3e802c87d3"}, - {file = "matplotlib-3.7.3-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22d65d18b4ee8070a5fea5761d59293f1f9e2fac37ec9ce090463b0e629432fd"}, - {file = "matplotlib-3.7.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c40cde976c36693cc0767e27cf5f443f91c23520060bd9496678364adfafe9c"}, - {file = "matplotlib-3.7.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:39018a2b17592448fbfdf4b8352955e6c3905359939791d4ff429296494d1a0c"}, - {file = "matplotlib-3.7.3.tar.gz", hash = "sha256:f09b3dd6bdeb588de91f853bbb2d6f0ff8ab693485b0c49035eaa510cb4f142e"}, + {file = "matplotlib-3.8.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:c4940bad88a932ddc69734274f6fb047207e008389489f2b6f77d9ca485f0e7a"}, + {file = "matplotlib-3.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a33bd3045c7452ca1fa65676d88ba940867880e13e2546abb143035fa9072a9d"}, + {file = "matplotlib-3.8.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ea6886e93401c22e534bbfd39201ce8931b75502895cfb115cbdbbe2d31f287"}, + {file = "matplotlib-3.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d670b9348e712ec176de225d425f150dc8e37b13010d85233c539b547da0be39"}, + {file = "matplotlib-3.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b37b74f00c4cb6af908cb9a00779d97d294e89fd2145ad43f0cdc23f635760c"}, + {file = "matplotlib-3.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:0e723f5b96f3cd4aad99103dc93e9e3cdc4f18afdcc76951f4857b46f8e39d2d"}, + {file = "matplotlib-3.8.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:5dc945a9cb2deb7d197ba23eb4c210e591d52d77bf0ba27c35fc82dec9fa78d4"}, + {file = "matplotlib-3.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f8b5a1bf27d078453aa7b5b27f52580e16360d02df6d3dc9504f3d2ce11f6309"}, + {file = "matplotlib-3.8.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f25ffb6ad972cdffa7df8e5be4b1e3cadd2f8d43fc72085feb1518006178394"}, + {file = "matplotlib-3.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eee482731c8c17d86d9ddb5194d38621f9b0f0d53c99006275a12523ab021732"}, + {file = "matplotlib-3.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36eafe2128772195b373e1242df28d1b7ec6c04c15b090b8d9e335d55a323900"}, + {file = "matplotlib-3.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:061ee58facb3580cd2d046a6d227fb77e9295599c5ec6ad069f06b5821ad1cfc"}, + {file = "matplotlib-3.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:3cc3776836d0f4f22654a7f2d2ec2004618d5cf86b7185318381f73b80fd8a2d"}, + {file = "matplotlib-3.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6c49a2bd6981264bddcb8c317b6bd25febcece9e2ebfcbc34e7f4c0c867c09dc"}, + {file = "matplotlib-3.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23ed11654fc83cd6cfdf6170b453e437674a050a452133a064d47f2f1371f8d3"}, + {file = "matplotlib-3.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dae97fdd6996b3a25da8ee43e3fc734fff502f396801063c6b76c20b56683196"}, + {file = "matplotlib-3.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:87df75f528020a6299f76a1d986c0ed4406e3b2bd44bc5e306e46bca7d45e53e"}, + {file = "matplotlib-3.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:90d74a95fe055f73a6cd737beecc1b81c26f2893b7a3751d52b53ff06ca53f36"}, + {file = "matplotlib-3.8.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c3499c312f5def8f362a2bf761d04fa2d452b333f3a9a3f58805273719bf20d9"}, + {file = "matplotlib-3.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31e793c8bd4ea268cc5d3a695c27b30650ec35238626961d73085d5e94b6ab68"}, + {file = "matplotlib-3.8.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d5ee602ef517a89d1f2c508ca189cfc395dd0b4a08284fb1b97a78eec354644"}, + {file = "matplotlib-3.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5de39dc61ca35342cf409e031f70f18219f2c48380d3886c1cf5ad9f17898e06"}, + {file = "matplotlib-3.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dd386c80a98b5f51571b9484bf6c6976de383cd2a8cd972b6a9562d85c6d2087"}, + {file = "matplotlib-3.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:f691b4ef47c7384d0936b2e8ebdeb5d526c81d004ad9403dfb9d4c76b9979a93"}, + {file = "matplotlib-3.8.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:0b11f354aae62a2aa53ec5bb09946f5f06fc41793e351a04ff60223ea9162955"}, + {file = "matplotlib-3.8.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f54b9fb87ca5acbcdd0f286021bedc162e1425fa5555ebf3b3dfc167b955ad9"}, + {file = "matplotlib-3.8.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:60a6e04dfd77c0d3bcfee61c3cd335fff1b917c2f303b32524cd1235e194ef99"}, + {file = "matplotlib-3.8.0.tar.gz", hash = "sha256:df8505e1c19d5c2c26aff3497a7cbd3ccfc2e97043d1e4db3e76afa399164b69"}, ] [package.dependencies] @@ -2529,7 +2617,7 @@ contourpy = ">=1.0.1" cycler = ">=0.10" fonttools = ">=4.22.0" kiwisolver = ">=1.0.1" -numpy = ">=1.20,<2" +numpy = ">=1.21,<2" packaging = ">=20.0" pillow = ">=6.2.0" pyparsing = ">=2.3.1" @@ -2621,74 +2709,67 @@ tests = ["pytest (>=4.6)"] [[package]] name = "msgpack" -version = "1.0.5" +version = "1.0.6" description = "MessagePack serializer" optional = false -python-versions = "*" +python-versions = ">=3.8" files = [ - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:525228efd79bb831cf6830a732e2e80bc1b05436b086d4264814b4b2955b2fa9"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4f8d8b3bf1ff2672567d6b5c725a1b347fe838b912772aa8ae2bf70338d5a198"}, - {file = "msgpack-1.0.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cdc793c50be3f01106245a61b739328f7dccc2c648b501e237f0699fe1395b81"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5cb47c21a8a65b165ce29f2bec852790cbc04936f502966768e4aae9fa763cb7"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e42b9594cc3bf4d838d67d6ed62b9e59e201862a25e9a157019e171fbe672dd3"}, - {file = "msgpack-1.0.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:55b56a24893105dc52c1253649b60f475f36b3aa0fc66115bffafb624d7cb30b"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:1967f6129fc50a43bfe0951c35acbb729be89a55d849fab7686004da85103f1c"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20a97bf595a232c3ee6d57ddaadd5453d174a52594bf9c21d10407e2a2d9b3bd"}, - {file = "msgpack-1.0.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d25dd59bbbbb996eacf7be6b4ad082ed7eacc4e8f3d2df1ba43822da9bfa122a"}, - {file = "msgpack-1.0.5-cp310-cp310-win32.whl", hash = "sha256:382b2c77589331f2cb80b67cc058c00f225e19827dbc818d700f61513ab47bea"}, - {file = "msgpack-1.0.5-cp310-cp310-win_amd64.whl", hash = "sha256:4867aa2df9e2a5fa5f76d7d5565d25ec76e84c106b55509e78c1ede0f152659a"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9f5ae84c5c8a857ec44dc180a8b0cc08238e021f57abdf51a8182e915e6299f0"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e6ca5d5699bcd89ae605c150aee83b5321f2115695e741b99618f4856c50898"}, - {file = "msgpack-1.0.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5494ea30d517a3576749cad32fa27f7585c65f5f38309c88c6d137877fa28a5a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ab2f3331cb1b54165976a9d976cb251a83183631c88076613c6c780f0d6e45a"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28592e20bbb1620848256ebc105fc420436af59515793ed27d5c77a217477705"}, - {file = "msgpack-1.0.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe5c63197c55bce6385d9aee16c4d0641684628f63ace85f73571e65ad1c1e8d"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed40e926fa2f297e8a653c954b732f125ef97bdd4c889f243182299de27e2aa9"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b2de4c1c0538dcb7010902a2b97f4e00fc4ddf2c8cda9749af0e594d3b7fa3d7"}, - {file = "msgpack-1.0.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bf22a83f973b50f9d38e55c6aade04c41ddda19b00c4ebc558930d78eecc64ed"}, - {file = "msgpack-1.0.5-cp311-cp311-win32.whl", hash = "sha256:c396e2cc213d12ce017b686e0f53497f94f8ba2b24799c25d913d46c08ec422c"}, - {file = "msgpack-1.0.5-cp311-cp311-win_amd64.whl", hash = "sha256:6c4c68d87497f66f96d50142a2b73b97972130d93677ce930718f68828b382e2"}, - {file = "msgpack-1.0.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:a2b031c2e9b9af485d5e3c4520f4220d74f4d222a5b8dc8c1a3ab9448ca79c57"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f837b93669ce4336e24d08286c38761132bc7ab29782727f8557e1eb21b2080"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1d46dfe3832660f53b13b925d4e0fa1432b00f5f7210eb3ad3bb9a13c6204a6"}, - {file = "msgpack-1.0.5-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:366c9a7b9057e1547f4ad51d8facad8b406bab69c7d72c0eb6f529cf76d4b85f"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:4c075728a1095efd0634a7dccb06204919a2f67d1893b6aa8e00497258bf926c"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:f933bbda5a3ee63b8834179096923b094b76f0c7a73c1cfe8f07ad608c58844b"}, - {file = "msgpack-1.0.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:36961b0568c36027c76e2ae3ca1132e35123dcec0706c4b7992683cc26c1320c"}, - {file = "msgpack-1.0.5-cp36-cp36m-win32.whl", hash = "sha256:b5ef2f015b95f912c2fcab19c36814963b5463f1fb9049846994b007962743e9"}, - {file = "msgpack-1.0.5-cp36-cp36m-win_amd64.whl", hash = "sha256:288e32b47e67f7b171f86b030e527e302c91bd3f40fd9033483f2cacc37f327a"}, - {file = "msgpack-1.0.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:137850656634abddfb88236008339fdaba3178f4751b28f270d2ebe77a563b6c"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c05a4a96585525916b109bb85f8cb6511db1c6f5b9d9cbcbc940dc6b4be944b"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56a62ec00b636583e5cb6ad313bbed36bb7ead5fa3a3e38938503142c72cba4f"}, - {file = "msgpack-1.0.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef8108f8dedf204bb7b42994abf93882da1159728a2d4c5e82012edd92c9da9f"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1835c84d65f46900920b3708f5ba829fb19b1096c1800ad60bae8418652a951d"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:e57916ef1bd0fee4f21c4600e9d1da352d8816b52a599c46460e93a6e9f17086"}, - {file = "msgpack-1.0.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:17358523b85973e5f242ad74aa4712b7ee560715562554aa2134d96e7aa4cbbf"}, - {file = "msgpack-1.0.5-cp37-cp37m-win32.whl", hash = "sha256:cb5aaa8c17760909ec6cb15e744c3ebc2ca8918e727216e79607b7bbce9c8f77"}, - {file = "msgpack-1.0.5-cp37-cp37m-win_amd64.whl", hash = "sha256:ab31e908d8424d55601ad7075e471b7d0140d4d3dd3272daf39c5c19d936bd82"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b72d0698f86e8d9ddf9442bdedec15b71df3598199ba33322d9711a19f08145c"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:379026812e49258016dd84ad79ac8446922234d498058ae1d415f04b522d5b2d"}, - {file = "msgpack-1.0.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:332360ff25469c346a1c5e47cbe2a725517919892eda5cfaffe6046656f0b7bb"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:476a8fe8fae289fdf273d6d2a6cb6e35b5a58541693e8f9f019bfe990a51e4ba"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a9985b214f33311df47e274eb788a5893a761d025e2b92c723ba4c63936b69b1"}, - {file = "msgpack-1.0.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48296af57cdb1d885843afd73c4656be5c76c0c6328db3440c9601a98f303d87"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:addab7e2e1fcc04bd08e4eb631c2a90960c340e40dfc4a5e24d2ff0d5a3b3edb"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:916723458c25dfb77ff07f4c66aed34e47503b2eb3188b3adbec8d8aa6e00f48"}, - {file = "msgpack-1.0.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:821c7e677cc6acf0fd3f7ac664c98803827ae6de594a9f99563e48c5a2f27eb0"}, - {file = "msgpack-1.0.5-cp38-cp38-win32.whl", hash = "sha256:1c0f7c47f0087ffda62961d425e4407961a7ffd2aa004c81b9c07d9269512f6e"}, - {file = "msgpack-1.0.5-cp38-cp38-win_amd64.whl", hash = "sha256:bae7de2026cbfe3782c8b78b0db9cbfc5455e079f1937cb0ab8d133496ac55e1"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:20c784e66b613c7f16f632e7b5e8a1651aa5702463d61394671ba07b2fc9e025"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:266fa4202c0eb94d26822d9bfd7af25d1e2c088927fe8de9033d929dd5ba24c5"}, - {file = "msgpack-1.0.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18334484eafc2b1aa47a6d42427da7fa8f2ab3d60b674120bce7a895a0a85bdd"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57e1f3528bd95cc44684beda696f74d3aaa8a5e58c816214b9046512240ef437"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:586d0d636f9a628ddc6a17bfd45aa5b5efaf1606d2b60fa5d87b8986326e933f"}, - {file = "msgpack-1.0.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a740fa0e4087a734455f0fc3abf5e746004c9da72fbd541e9b113013c8dc3282"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3055b0455e45810820db1f29d900bf39466df96ddca11dfa6d074fa47054376d"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:a61215eac016f391129a013c9e46f3ab308db5f5ec9f25811e811f96962599a8"}, - {file = "msgpack-1.0.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:362d9655cd369b08fda06b6657a303eb7172d5279997abe094512e919cf74b11"}, - {file = "msgpack-1.0.5-cp39-cp39-win32.whl", hash = "sha256:ac9dd47af78cae935901a9a500104e2dea2e253207c924cc95de149606dc43cc"}, - {file = "msgpack-1.0.5-cp39-cp39-win_amd64.whl", hash = "sha256:06f5174b5f8ed0ed919da0e62cbd4ffde676a374aba4020034da05fab67b9164"}, - {file = "msgpack-1.0.5.tar.gz", hash = "sha256:c075544284eadc5cddc70f4757331d99dcbc16b2bbd4849d15f8aae4cf36d31c"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f4321692e7f299277e55f322329b2c972d93bb612d85f3fda8741bec5c6285ce"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f0e36a5fa7a182cde391a128a64f437657d2b9371dfa42eda3436245adccbf5"}, + {file = "msgpack-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b5c8dd9a386a66e50bd7fa22b7a49fb8ead2b3574d6bd69eb1caced6caea0803"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f85200ea102276afdd3749ca94747f057bbb868d1c52921ee2446730b508d0f"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a006c300e82402c0c8f1ded11352a3ba2a61b87e7abb3054c845af2ca8d553c"}, + {file = "msgpack-1.0.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33bbf47ea5a6ff20c23426106e81863cdbb5402de1825493026ce615039cc99d"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:04450e4b5e1e662e7c86b6aafb7c230af9334fd0becf5e6b80459a507884241c"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b06a5095a79384760625b5de3f83f40b3053a385fb893be8a106fbbd84c14980"}, + {file = "msgpack-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3910211b0ab20be3a38e0bb944ed45bd4265d8d9f11a3d1674b95b298e08dd5c"}, + {file = "msgpack-1.0.6-cp310-cp310-win32.whl", hash = "sha256:1dc67b40fe81217b308ab12651adba05e7300b3a2ccf84d6b35a878e308dd8d4"}, + {file = "msgpack-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:885de1ed5ea01c1bfe0a34c901152a264c3c1f8f1d382042b92ea354bd14bb0e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:099c3d8a027367e1a6fc55d15336f04ff65c60c4f737b5739f7db4525c65fe9e"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9b88dc97ba86c96b964c3745a445d9a65f76fe21955a953064fe04adb63e9367"}, + {file = "msgpack-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:00ce5f827d4f26fc094043e6f08b6069c1b148efa2631c47615ae14fb6cafc89"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd6af61388be65a8701f5787362cb54adae20007e0cc67ca9221a4b95115583b"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:652e4b7497825b0af6259e2c54700e6dc33d2fc4ed92b8839435090d4c9cc911"}, + {file = "msgpack-1.0.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b08676a17e3f791daad34d5fcb18479e9c85e7200d5a17cbe8de798643a7e37"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:229ccb6713c8b941eaa5cf13dc7478eba117f21513b5893c35e44483e2f0c9c8"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:95ade0bd4cf69e04e8b8f8ec2d197d9c9c4a9b6902e048dc7456bf6d82e12a80"}, + {file = "msgpack-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b16344032a27b2ccfd341f89dadf3e4ef6407d91e4b93563c14644a8abb3ad7"}, + {file = "msgpack-1.0.6-cp311-cp311-win32.whl", hash = "sha256:55bb4a1bf94e39447bc08238a2fb8a767460388a8192f67c103442eb36920887"}, + {file = "msgpack-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:ae97504958d0bc58c1152045c170815d5c4f8af906561ce044b6358b43d0c97e"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:7ecf431786019a7bfedc28281531d706627f603e3691d64eccdbce3ecd353823"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a635aecf1047255576dbb0927cbf9a7aa4a68e9d54110cc3c926652d18f144e0"}, + {file = "msgpack-1.0.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:102cfb54eaefa73e8ca1e784b9352c623524185c98e057e519545131a56fb0af"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c5e05e4f5756758c58a8088aa10dc70d851c89f842b611fdccfc0581c1846bc"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68569509dd015fcdd1e6b2b3ccc8c51fd27d9a97f461ccc909270e220ee09685"}, + {file = "msgpack-1.0.6-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bf652839d16de91fe1cfb253e0a88db9a548796939533894e07f45d4bdf90a5f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:14db7e1b7a7ed362b2f94897bf2486c899c8bb50f6e34b2db92fe534cdab306f"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:159cfec18a6e125dd4723e2b1de6f202b34b87c850fb9d509acfd054c01135e9"}, + {file = "msgpack-1.0.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6a01a072b2219b65a6ff74df208f20b2cac9401c60adb676ee34e53b4c651077"}, + {file = "msgpack-1.0.6-cp312-cp312-win32.whl", hash = "sha256:e36560d001d4ba469d469b02037f2dd404421fd72277d9474efe9f03f83fced5"}, + {file = "msgpack-1.0.6-cp312-cp312-win_amd64.whl", hash = "sha256:5e7fae9ca93258a956551708cf60dc6c8145574e32ce8c8c4d894e63bcb04341"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:40b801b768f5a765e33c68f30665d3c6ee1c8623a2d2bb78e6e59f2db4e4ceb7"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:da057d3652e698b00746e47f06dbb513314f847421e857e32e1dc61c46f6c052"}, + {file = "msgpack-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f75114c05ec56566da6b55122791cf5bb53d5aada96a98c016d6231e03132f76"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:61213482b5a387ead9e250e9e3cb290292feca39dc83b41c3b1b7b8ffc8d8ecb"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae6c561f11b444b258b1b4be2bdd1e1cf93cd1d80766b7e869a79db4543a8a8"}, + {file = "msgpack-1.0.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:619a63753ba9e792fe3c6c0fc2b9ee2cfbd92153dd91bee029a89a71eb2942cd"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:70843788c85ca385846a2d2f836efebe7bb2687ca0734648bf5c9dc6c55602d2"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:fb4571efe86545b772a4630fee578c213c91cbcfd20347806e47fd4e782a18fe"}, + {file = "msgpack-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bbb4448a05d261fae423d5c0b0974ad899f60825bc77eabad5a0c518e78448c2"}, + {file = "msgpack-1.0.6-cp38-cp38-win32.whl", hash = "sha256:5cd67674db3c73026e0a2c729b909780e88bd9cbc8184256f9567640a5d299a8"}, + {file = "msgpack-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:a1cf98afa7ad5e7012454ca3fde254499a13f9d92fd50cb46118118a249a1355"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d6d25b8a5c70e2334ed61a8da4c11cd9b97c6fbd980c406033f06e4463fda006"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:88cdb1da7fdb121dbb3116910722f5acab4d6e8bfcacab8fafe27e2e7744dc6a"}, + {file = "msgpack-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5658b1f9e486a2eec4c0c688f213a90085b9cf2fec76ef08f98fdf6c62f4b9"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76820f2ece3b0a7c948bbb6a599020e29574626d23a649476def023cbb026787"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c780d992f5d734432726b92a0c87bf1857c3d85082a8dea29cbf56e44a132b3"}, + {file = "msgpack-1.0.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0ed35d6d6122d0baa9a1b59ebca4ee302139f4cfb57dab85e4c73ab793ae7ed"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:32c0aff31f33033f4961abc01f78497e5e07bac02a508632aef394b384d27428"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:35ad5aed9b52217d4cea739d0ea3a492a18dd86fecb4b132668a69f27fb0363b"}, + {file = "msgpack-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47275ff73005a3e5e146e50baa2378e1730cba6e292f0222bc496a8e4c4adfc8"}, + {file = "msgpack-1.0.6-cp39-cp39-win32.whl", hash = "sha256:7baf16fd8908a025c4a8d7b699103e72d41f967e2aee5a2065432bcdbd9fd06e"}, + {file = "msgpack-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:fc97aa4b4fb928ff4d3b74da7c30b360d0cb3ede49a5a6e1fd9705f49aea1deb"}, + {file = "msgpack-1.0.6.tar.gz", hash = "sha256:25d3746da40f3c8c59c3b1d001e49fd2aa17904438f980d9a391370366df001e"}, ] [[package]] @@ -2874,13 +2955,13 @@ traitlets = ">=5" [[package]] name = "nest-asyncio" -version = "1.5.7" +version = "1.5.8" description = "Patch asyncio to allow nested event loops" optional = false python-versions = ">=3.5" files = [ - {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, - {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, + {file = "nest_asyncio-1.5.8-py3-none-any.whl", hash = "sha256:accda7a339a70599cb08f9dd09a67e0c2ef8d8d6f4c07f96ab203f2ae254e48d"}, + {file = "nest_asyncio-1.5.8.tar.gz", hash = "sha256:25aa2ca0d2a5b5531956b9e273b45cf664cae2b145101d73b86b199978d48fdb"}, ] [[package]] @@ -2899,13 +2980,13 @@ setuptools = "*" [[package]] name = "notebook" -version = "7.0.3" +version = "7.0.4" description = "Jupyter Notebook - A web-based notebook environment for interactive computing" optional = false python-versions = ">=3.8" files = [ - {file = "notebook-7.0.3-py3-none-any.whl", hash = "sha256:786ab2e3287c068667adce3029b540dd18fc5d23f49181b4b4ee4f6b48a7ca81"}, - {file = "notebook-7.0.3.tar.gz", hash = "sha256:07f3c5062fd0e6e69864437a0347abc485d991aae87a92c47d659699f571b729"}, + {file = "notebook-7.0.4-py3-none-any.whl", hash = "sha256:ee738414ac01773c1ad6834cf76cc6f1ce140ac8197fd13b3e2d44d89e257f72"}, + {file = "notebook-7.0.4.tar.gz", hash = "sha256:0c1b458f72ce8774445c8ef9ed2492bd0b9ce9605ac996e2b066114f69795e71"}, ] [package.dependencies] @@ -3012,6 +3093,20 @@ packaging = "*" protobuf = "*" sympy = "*" +[[package]] +name = "openpyxl" +version = "3.1.2" +description = "A Python library to read/write Excel 2010 xlsx/xlsm files" +optional = false +python-versions = ">=3.6" +files = [ + {file = "openpyxl-3.1.2-py2.py3-none-any.whl", hash = "sha256:f91456ead12ab3c6c2e9491cf33ba6d08357d802192379bb482f1033ade496f5"}, + {file = "openpyxl-3.1.2.tar.gz", hash = "sha256:a6f5977418eff3b2d5500d54d9db50c8277a368436f4e4f8ddb1be3422870184"}, +] + +[package.dependencies] +et-xmlfile = "*" + [[package]] name = "outcome" version = "1.2.0" @@ -3123,6 +3218,70 @@ sql-other = ["SQLAlchemy (>=1.4.36)"] test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] xml = ["lxml (>=4.8.0)"] +[[package]] +name = "pandas" +version = "2.1.1" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +files = [ + {file = "pandas-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:58d997dbee0d4b64f3cb881a24f918b5f25dd64ddf31f467bb9b67ae4c63a1e4"}, + {file = "pandas-2.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02304e11582c5d090e5a52aec726f31fe3f42895d6bfc1f28738f9b64b6f0614"}, + {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffa8f0966de2c22de408d0e322db2faed6f6e74265aa0856f3824813cf124363"}, + {file = "pandas-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f84c144dee086fe4f04a472b5cd51e680f061adf75c1ae4fc3a9275560f8f4"}, + {file = "pandas-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:75ce97667d06d69396d72be074f0556698c7f662029322027c226fd7a26965cb"}, + {file = "pandas-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:4c3f32fd7c4dccd035f71734df39231ac1a6ff95e8bdab8d891167197b7018d2"}, + {file = "pandas-2.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9e2959720b70e106bb1d8b6eadd8ecd7c8e99ccdbe03ee03260877184bb2877d"}, + {file = "pandas-2.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:25e8474a8eb258e391e30c288eecec565bfed3e026f312b0cbd709a63906b6f8"}, + {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8bd1685556f3374520466998929bade3076aeae77c3e67ada5ed2b90b4de7f0"}, + {file = "pandas-2.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc3657869c7902810f32bd072f0740487f9e030c1a3ab03e0af093db35a9d14e"}, + {file = "pandas-2.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:05674536bd477af36aa2effd4ec8f71b92234ce0cc174de34fd21e2ee99adbc2"}, + {file = "pandas-2.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:b407381258a667df49d58a1b637be33e514b07f9285feb27769cedb3ab3d0b3a"}, + {file = "pandas-2.1.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c747793c4e9dcece7bb20156179529898abf505fe32cb40c4052107a3c620b49"}, + {file = "pandas-2.1.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3bcad1e6fb34b727b016775bea407311f7721db87e5b409e6542f4546a4951ea"}, + {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5ec7740f9ccb90aec64edd71434711f58ee0ea7f5ed4ac48be11cfa9abf7317"}, + {file = "pandas-2.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:29deb61de5a8a93bdd033df328441a79fcf8dd3c12d5ed0b41a395eef9cd76f0"}, + {file = "pandas-2.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4f99bebf19b7e03cf80a4e770a3e65eee9dd4e2679039f542d7c1ace7b7b1daa"}, + {file = "pandas-2.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:84e7e910096416adec68075dc87b986ff202920fb8704e6d9c8c9897fe7332d6"}, + {file = "pandas-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:366da7b0e540d1b908886d4feb3d951f2f1e572e655c1160f5fde28ad4abb750"}, + {file = "pandas-2.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:9e50e72b667415a816ac27dfcfe686dc5a0b02202e06196b943d54c4f9c7693e"}, + {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc1ab6a25da197f03ebe6d8fa17273126120874386b4ac11c1d687df288542dd"}, + {file = "pandas-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0dbfea0dd3901ad4ce2306575c54348d98499c95be01b8d885a2737fe4d7a98"}, + {file = "pandas-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0489b0e6aa3d907e909aef92975edae89b1ee1654db5eafb9be633b0124abe97"}, + {file = "pandas-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:4cdb0fab0400c2cb46dafcf1a0fe084c8bb2480a1fa8d81e19d15e12e6d4ded2"}, + {file = "pandas-2.1.1.tar.gz", hash = "sha256:fecb198dc389429be557cde50a2d46da8434a17fe37d7d41ff102e3987fd947b"}, +] + +[package.dependencies] +numpy = {version = ">=1.23.2", markers = "python_version == \"3.11\""} +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.1" + +[package.extras] +all = ["PyQt5 (>=5.15.6)", "SQLAlchemy (>=1.4.36)", "beautifulsoup4 (>=4.11.1)", "bottleneck (>=1.3.4)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=0.8.1)", "fsspec (>=2022.05.0)", "gcsfs (>=2022.05.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.8.0)", "matplotlib (>=3.6.1)", "numba (>=0.55.2)", "numexpr (>=2.8.0)", "odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pandas-gbq (>=0.17.5)", "psycopg2 (>=2.9.3)", "pyarrow (>=7.0.0)", "pymysql (>=1.0.2)", "pyreadstat (>=1.1.5)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)", "pyxlsb (>=1.0.9)", "qtpy (>=2.2.0)", "s3fs (>=2022.05.0)", "scipy (>=1.8.1)", "tables (>=3.7.0)", "tabulate (>=0.8.10)", "xarray (>=2022.03.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)", "zstandard (>=0.17.0)"] +aws = ["s3fs (>=2022.05.0)"] +clipboard = ["PyQt5 (>=5.15.6)", "qtpy (>=2.2.0)"] +compression = ["zstandard (>=0.17.0)"] +computation = ["scipy (>=1.8.1)", "xarray (>=2022.03.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.0.10)", "pyxlsb (>=1.0.9)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.3)"] +feather = ["pyarrow (>=7.0.0)"] +fss = ["fsspec (>=2022.05.0)"] +gcp = ["gcsfs (>=2022.05.0)", "pandas-gbq (>=0.17.5)"] +hdf5 = ["tables (>=3.7.0)"] +html = ["beautifulsoup4 (>=4.11.1)", "html5lib (>=1.1)", "lxml (>=4.8.0)"] +mysql = ["SQLAlchemy (>=1.4.36)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.8.10)"] +parquet = ["pyarrow (>=7.0.0)"] +performance = ["bottleneck (>=1.3.4)", "numba (>=0.55.2)", "numexpr (>=2.8.0)"] +plot = ["matplotlib (>=3.6.1)"] +postgresql = ["SQLAlchemy (>=1.4.36)", "psycopg2 (>=2.9.3)"] +spss = ["pyreadstat (>=1.1.5)"] +sql-other = ["SQLAlchemy (>=1.4.36)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-asyncio (>=0.17.0)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.8.0)"] + [[package]] name = "pandas-stubs" version = "2.0.3.230814" @@ -3202,67 +3361,65 @@ files = [ [[package]] name = "pillow" -version = "10.0.0" +version = "10.0.1" description = "Python Imaging Library (Fork)" optional = false python-versions = ">=3.8" files = [ - {file = "Pillow-10.0.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:1f62406a884ae75fb2f818694469519fb685cc7eaff05d3451a9ebe55c646891"}, - {file = "Pillow-10.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d5db32e2a6ccbb3d34d87c87b432959e0db29755727afb37290e10f6e8e62614"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf4392b77bdc81f36e92d3a07a5cd072f90253197f4a52a55a8cec48a12483b"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:520f2a520dc040512699f20fa1c363eed506e94248d71f85412b625026f6142c"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:8c11160913e3dd06c8ffdb5f233a4f254cb449f4dfc0f8f4549eda9e542c93d1"}, - {file = "Pillow-10.0.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a74ba0c356aaa3bb8e3eb79606a87669e7ec6444be352870623025d75a14a2bf"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d5d0dae4cfd56969d23d94dc8e89fb6a217be461c69090768227beb8ed28c0a3"}, - {file = "Pillow-10.0.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:22c10cc517668d44b211717fd9775799ccec4124b9a7f7b3635fc5386e584992"}, - {file = "Pillow-10.0.0-cp310-cp310-win_amd64.whl", hash = "sha256:dffe31a7f47b603318c609f378ebcd57f1554a3a6a8effbc59c3c69f804296de"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:9fb218c8a12e51d7ead2a7c9e101a04982237d4855716af2e9499306728fb485"}, - {file = "Pillow-10.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d35e3c8d9b1268cbf5d3670285feb3528f6680420eafe35cccc686b73c1e330f"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3ed64f9ca2f0a95411e88a4efbd7a29e5ce2cea36072c53dd9d26d9c76f753b3"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b6eb5502f45a60a3f411c63187db83a3d3107887ad0d036c13ce836f8a36f1d"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:c1fbe7621c167ecaa38ad29643d77a9ce7311583761abf7836e1510c580bf3dd"}, - {file = "Pillow-10.0.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cd25d2a9d2b36fcb318882481367956d2cf91329f6892fe5d385c346c0649629"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:3b08d4cc24f471b2c8ca24ec060abf4bebc6b144cb89cba638c720546b1cf538"}, - {file = "Pillow-10.0.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d737a602fbd82afd892ca746392401b634e278cb65d55c4b7a8f48e9ef8d008d"}, - {file = "Pillow-10.0.0-cp311-cp311-win_amd64.whl", hash = "sha256:3a82c40d706d9aa9734289740ce26460a11aeec2d9c79b7af87bb35f0073c12f"}, - {file = "Pillow-10.0.0-cp311-cp311-win_arm64.whl", hash = "sha256:bc2ec7c7b5d66b8ec9ce9f720dbb5fa4bace0f545acd34870eff4a369b44bf37"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:d80cf684b541685fccdd84c485b31ce73fc5c9b5d7523bf1394ce134a60c6883"}, - {file = "Pillow-10.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76de421f9c326da8f43d690110f0e79fe3ad1e54be811545d7d91898b4c8493e"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81ff539a12457809666fef6624684c008e00ff6bf455b4b89fd00a140eecd640"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce543ed15570eedbb85df19b0a1a7314a9c8141a36ce089c0a894adbfccb4568"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:685ac03cc4ed5ebc15ad5c23bc555d68a87777586d970c2c3e216619a5476223"}, - {file = "Pillow-10.0.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:d72e2ecc68a942e8cf9739619b7f408cc7b272b279b56b2c83c6123fcfa5cdff"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d50b6aec14bc737742ca96e85d6d0a5f9bfbded018264b3b70ff9d8c33485551"}, - {file = "Pillow-10.0.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:00e65f5e822decd501e374b0650146063fbb30a7264b4d2744bdd7b913e0cab5"}, - {file = "Pillow-10.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:f31f9fdbfecb042d046f9d91270a0ba28368a723302786c0009ee9b9f1f60199"}, - {file = "Pillow-10.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:1ce91b6ec08d866b14413d3f0bbdea7e24dfdc8e59f562bb77bc3fe60b6144ca"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:349930d6e9c685c089284b013478d6f76e3a534e36ddfa912cde493f235372f3"}, - {file = "Pillow-10.0.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3a684105f7c32488f7153905a4e3015a3b6c7182e106fe3c37fbb5ef3e6994c3"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4f69b3700201b80bb82c3a97d5e9254084f6dd5fb5b16fc1a7b974260f89f43"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f07ea8d2f827d7d2a49ecf1639ec02d75ffd1b88dcc5b3a61bbb37a8759ad8d"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:040586f7d37b34547153fa383f7f9aed68b738992380ac911447bb78f2abe530"}, - {file = "Pillow-10.0.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f88a0b92277de8e3ca715a0d79d68dc82807457dae3ab8699c758f07c20b3c51"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:c7cf14a27b0d6adfaebb3ae4153f1e516df54e47e42dcc073d7b3d76111a8d86"}, - {file = "Pillow-10.0.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3400aae60685b06bb96f99a21e1ada7bc7a413d5f49bce739828ecd9391bb8f7"}, - {file = "Pillow-10.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:dbc02381779d412145331789b40cc7b11fdf449e5d94f6bc0b080db0a56ea3f0"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:9211e7ad69d7c9401cfc0e23d49b69ca65ddd898976d660a2fa5904e3d7a9baa"}, - {file = "Pillow-10.0.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:faaf07ea35355b01a35cb442dd950d8f1bb5b040a7787791a535de13db15ed90"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c9f72a021fbb792ce98306ffb0c348b3c9cb967dce0f12a49aa4c3d3fdefa967"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f7c16705f44e0504a3a2a14197c1f0b32a95731d251777dcb060aa83022cb2d"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:76edb0a1fa2b4745fb0c99fb9fb98f8b180a1bbceb8be49b087e0b21867e77d3"}, - {file = "Pillow-10.0.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:368ab3dfb5f49e312231b6f27b8820c823652b7cd29cfbd34090565a015e99ba"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:608bfdee0d57cf297d32bcbb3c728dc1da0907519d1784962c5f0c68bb93e5a3"}, - {file = "Pillow-10.0.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5c6e3df6bdd396749bafd45314871b3d0af81ff935b2d188385e970052091017"}, - {file = "Pillow-10.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:7be600823e4c8631b74e4a0d38384c73f680e6105a7d3c6824fcf226c178c7e6"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:92be919bbc9f7d09f7ae343c38f5bb21c973d2576c1d45600fce4b74bafa7ac0"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f8182b523b2289f7c415f589118228d30ac8c355baa2f3194ced084dac2dbba"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:38250a349b6b390ee6047a62c086d3817ac69022c127f8a5dc058c31ccef17f3"}, - {file = "Pillow-10.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88af2003543cc40c80f6fca01411892ec52b11021b3dc22ec3bc9d5afd1c5334"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c189af0545965fa8d3b9613cfdb0cd37f9d71349e0f7750e1fd704648d475ed2"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce7b031a6fc11365970e6a5686d7ba8c63e4c1cf1ea143811acbb524295eabed"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:db24668940f82321e746773a4bc617bfac06ec831e5c88b643f91f122a785684"}, - {file = "Pillow-10.0.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:efe8c0681042536e0d06c11f48cebe759707c9e9abf880ee213541c5b46c5bf3"}, - {file = "Pillow-10.0.0.tar.gz", hash = "sha256:9c82b5b3e043c7af0d95792d0d20ccf68f61a1fec6b3530e718b688422727396"}, + {file = "Pillow-10.0.1-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:8f06be50669087250f319b706decf69ca71fdecd829091a37cc89398ca4dc17a"}, + {file = "Pillow-10.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:50bd5f1ebafe9362ad622072a1d2f5850ecfa44303531ff14353a4059113b12d"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6a90167bcca1216606223a05e2cf991bb25b14695c518bc65639463d7db722d"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f11c9102c56ffb9ca87134bd025a43d2aba3f1155f508eff88f694b33a9c6d19"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:186f7e04248103482ea6354af6d5bcedb62941ee08f7f788a1c7707bc720c66f"}, + {file = "Pillow-10.0.1-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:0462b1496505a3462d0f35dc1c4d7b54069747d65d00ef48e736acda2c8cbdff"}, + {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d889b53ae2f030f756e61a7bff13684dcd77e9af8b10c6048fb2c559d6ed6eaf"}, + {file = "Pillow-10.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:552912dbca585b74d75279a7570dd29fa43b6d93594abb494ebb31ac19ace6bd"}, + {file = "Pillow-10.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:787bb0169d2385a798888e1122c980c6eff26bf941a8ea79747d35d8f9210ca0"}, + {file = "Pillow-10.0.1-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:fd2a5403a75b54661182b75ec6132437a181209b901446ee5724b589af8edef1"}, + {file = "Pillow-10.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2d7e91b4379f7a76b31c2dda84ab9e20c6220488e50f7822e59dac36b0cd92b1"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19e9adb3f22d4c416e7cd79b01375b17159d6990003633ff1d8377e21b7f1b21"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93139acd8109edcdeffd85e3af8ae7d88b258b3a1e13a038f542b79b6d255c54"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:92a23b0431941a33242b1f0ce6c88a952e09feeea9af4e8be48236a68ffe2205"}, + {file = "Pillow-10.0.1-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:cbe68deb8580462ca0d9eb56a81912f59eb4542e1ef8f987405e35a0179f4ea2"}, + {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:522ff4ac3aaf839242c6f4e5b406634bfea002469656ae8358644fc6c4856a3b"}, + {file = "Pillow-10.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:84efb46e8d881bb06b35d1d541aa87f574b58e87f781cbba8d200daa835b42e1"}, + {file = "Pillow-10.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:898f1d306298ff40dc1b9ca24824f0488f6f039bc0e25cfb549d3195ffa17088"}, + {file = "Pillow-10.0.1-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:bcf1207e2f2385a576832af02702de104be71301c2696d0012b1b93fe34aaa5b"}, + {file = "Pillow-10.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d6c9049c6274c1bb565021367431ad04481ebb54872edecfcd6088d27edd6ed"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28444cb6ad49726127d6b340217f0627abc8732f1194fd5352dec5e6a0105635"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:de596695a75496deb3b499c8c4f8e60376e0516e1a774e7bc046f0f48cd620ad"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:2872f2d7846cf39b3dbff64bc1104cc48c76145854256451d33c5faa55c04d1a"}, + {file = "Pillow-10.0.1-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:4ce90f8a24e1c15465048959f1e94309dfef93af272633e8f37361b824532e91"}, + {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ee7810cf7c83fa227ba9125de6084e5e8b08c59038a7b2c9045ef4dde61663b4"}, + {file = "Pillow-10.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b1be1c872b9b5fcc229adeadbeb51422a9633abd847c0ff87dc4ef9bb184ae08"}, + {file = "Pillow-10.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:98533fd7fa764e5f85eebe56c8e4094db912ccbe6fbf3a58778d543cadd0db08"}, + {file = "Pillow-10.0.1-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:764d2c0daf9c4d40ad12fbc0abd5da3af7f8aa11daf87e4fa1b834000f4b6b0a"}, + {file = "Pillow-10.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fcb59711009b0168d6ee0bd8fb5eb259c4ab1717b2f538bbf36bacf207ef7a68"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:697a06bdcedd473b35e50a7e7506b1d8ceb832dc238a336bd6f4f5aa91a4b500"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f665d1e6474af9f9da5e86c2a3a2d2d6204e04d5af9c06b9d42afa6ebde3f21"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:2fa6dd2661838c66f1a5473f3b49ab610c98a128fc08afbe81b91a1f0bf8c51d"}, + {file = "Pillow-10.0.1-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:3a04359f308ebee571a3127fdb1bd01f88ba6f6fb6d087f8dd2e0d9bff43f2a7"}, + {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:723bd25051454cea9990203405fa6b74e043ea76d4968166dfd2569b0210886a"}, + {file = "Pillow-10.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:71671503e3015da1b50bd18951e2f9daf5b6ffe36d16f1eb2c45711a301521a7"}, + {file = "Pillow-10.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:44e7e4587392953e5e251190a964675f61e4dae88d1e6edbe9f36d6243547ff3"}, + {file = "Pillow-10.0.1-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:3855447d98cced8670aaa63683808df905e956f00348732448b5a6df67ee5849"}, + {file = "Pillow-10.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ed2d9c0704f2dc4fa980b99d565c0c9a543fe5101c25b3d60488b8ba80f0cce1"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f5bb289bb835f9fe1a1e9300d011eef4d69661bb9b34d5e196e5e82c4cb09b37"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a0d3e54ab1df9df51b914b2233cf779a5a10dfd1ce339d0421748232cea9876"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:2cc6b86ece42a11f16f55fe8903595eff2b25e0358dec635d0a701ac9586588f"}, + {file = "Pillow-10.0.1-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ca26ba5767888c84bf5a0c1a32f069e8204ce8c21d00a49c90dabeba00ce0145"}, + {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f0b4b06da13275bc02adfeb82643c4a6385bd08d26f03068c2796f60d125f6f2"}, + {file = "Pillow-10.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bc2e3069569ea9dbe88d6b8ea38f439a6aad8f6e7a6283a38edf61ddefb3a9bf"}, + {file = "Pillow-10.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:8b451d6ead6e3500b6ce5c7916a43d8d8d25ad74b9102a629baccc0808c54971"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-macosx_10_10_x86_64.whl", hash = "sha256:32bec7423cdf25c9038fef614a853c9d25c07590e1a870ed471f47fb80b244db"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b7cf63d2c6928b51d35dfdbda6f2c1fddbe51a6bc4a9d4ee6ea0e11670dd981e"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f6d3d4c905e26354e8f9d82548475c46d8e0889538cb0657aa9c6f0872a37aa4"}, + {file = "Pillow-10.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:847e8d1017c741c735d3cd1883fa7b03ded4f825a6e5fcb9378fd813edee995f"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:7f771e7219ff04b79e231d099c0a28ed83aa82af91fd5fa9fdb28f5b8d5addaf"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459307cacdd4138edee3875bbe22a2492519e060660eaf378ba3b405d1c66317"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b059ac2c4c7a97daafa7dc850b43b2d3667def858a4f112d1aa082e5c3d6cf7d"}, + {file = "Pillow-10.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:d6caf3cd38449ec3cd8a68b375e0c6fe4b6fd04edb6c9766b55ef84a6e8ddf2d"}, + {file = "Pillow-10.0.1.tar.gz", hash = "sha256:d72967b06be9300fed5cfbc8b5bafceec48bf7cdc7dab66b1d2549035287191d"}, ] [package.extras] @@ -3324,13 +3481,13 @@ test = ["coverage[toml]", "pretend", "pytest", "pytest-cov"] [[package]] name = "pip-licenses" -version = "4.3.2" +version = "4.3.3" description = "Dump the software license list of Python packages installed with pip." optional = false python-versions = "~=3.8" files = [ - {file = "pip-licenses-4.3.2.tar.gz", hash = "sha256:27ce33be185a009f3128ea59fe4c1490f4f4da5eb53ed951b87ef3c621b583f9"}, - {file = "pip_licenses-4.3.2-py3-none-any.whl", hash = "sha256:3fede933c47e1f4bc5e91d7cfd1d9b9d4e37a3c03b2875352b12bbad41294cd6"}, + {file = "pip-licenses-4.3.3.tar.gz", hash = "sha256:d14447094135eb5e43e4d9e1e3bcdb17a05751a9199df2d07f043a542c241c7a"}, + {file = "pip_licenses-4.3.3-py3-none-any.whl", hash = "sha256:1b697cace3149d7d380307bb1f1e0505f0db98f25fada64d32b7e6240f37f72c"}, ] [package.dependencies] @@ -3375,13 +3532,13 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-co [[package]] name = "plotly" -version = "5.16.1" +version = "5.17.0" description = "An open-source, interactive data visualization library for Python" optional = false python-versions = ">=3.6" files = [ - {file = "plotly-5.16.1-py2.py3-none-any.whl", hash = "sha256:19cc34f339acd4e624177806c14df22f388f23fb70658b03aad959a0e650a0dc"}, - {file = "plotly-5.16.1.tar.gz", hash = "sha256:295ac25edeb18c893abb71dcadcea075b78fd6fdf07cee4217a4e1009667925b"}, + {file = "plotly-5.17.0-py2.py3-none-any.whl", hash = "sha256:7c84cdf11da162423da957bb093287134f2d6f170eb9a74f1459f825892247c3"}, + {file = "plotly-5.17.0.tar.gz", hash = "sha256:290d796bf7bab87aad184fe24b86096234c4c95dcca6ecbca02d02bdf17d3d97"}, ] [package.dependencies] @@ -4430,13 +4587,13 @@ files = [ [[package]] name = "rich" -version = "13.5.2" +version = "13.5.3" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false python-versions = ">=3.7.0" files = [ - {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, - {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, + {file = "rich-13.5.3-py3-none-any.whl", hash = "sha256:9257b468badc3d347e146a4faa268ff229039d4c2d176ab0cffb4c4fbc73d5d9"}, + {file = "rich-13.5.3.tar.gz", hash = "sha256:87b43e0543149efa1253f485cd845bb7ee54df16c9617b8a893650ab84b4acb6"}, ] [package.dependencies] @@ -4568,28 +4725,28 @@ files = [ [[package]] name = "ruff" -version = "0.0.287" +version = "0.0.290" description = "An extremely fast Python linter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.0.287-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:1e0f9ee4c3191444eefeda97d7084721d9b8e29017f67997a20c153457f2eafd"}, - {file = "ruff-0.0.287-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:e9843e5704d4fb44e1a8161b0d31c1a38819723f0942639dfeb53d553be9bfb5"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8ca1ed11d759a29695aed2bfc7f914b39bcadfe2ef08d98ff69c873f639ad3a8"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1cf4d5ad3073af10f186ea22ce24bc5a8afa46151f6896f35c586e40148ba20b"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66d9d58bcb29afd72d2afe67120afcc7d240efc69a235853813ad556443dc922"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:06ac5df7dd3ba8bf83bba1490a72f97f1b9b21c7cbcba8406a09de1a83f36083"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bfb478e1146a60aa740ab9ebe448b1f9e3c0dfb54be3cc58713310eef059c30"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:00d579a011949108c4b4fa04c4f1ee066dab536a9ba94114e8e580c96be2aeb4"}, - {file = "ruff-0.0.287-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3a810a79b8029cc92d06c36ea1f10be5298d2323d9024e1d21aedbf0a1a13e5"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:150007028ad4976ce9a7704f635ead6d0e767f73354ce0137e3e44f3a6c0963b"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a24a280db71b0fa2e0de0312b4aecb8e6d08081d1b0b3c641846a9af8e35b4a7"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_i686.whl", hash = "sha256:2918cb7885fa1611d542de1530bea3fbd63762da793751cc8c8d6e4ba234c3d8"}, - {file = "ruff-0.0.287-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:33d7b251afb60bec02a64572b0fd56594b1923ee77585bee1e7e1daf675e7ae7"}, - {file = "ruff-0.0.287-py3-none-win32.whl", hash = "sha256:022f8bed2dcb5e5429339b7c326155e968a06c42825912481e10be15dafb424b"}, - {file = "ruff-0.0.287-py3-none-win_amd64.whl", hash = "sha256:26bd0041d135a883bd6ab3e0b29c42470781fb504cf514e4c17e970e33411d90"}, - {file = "ruff-0.0.287-py3-none-win_arm64.whl", hash = "sha256:44bceb3310ac04f0e59d4851e6227f7b1404f753997c7859192e41dbee9f5c8d"}, - {file = "ruff-0.0.287.tar.gz", hash = "sha256:02dc4f5bf53ef136e459d467f3ce3e04844d509bc46c025a05b018feb37bbc39"}, + {file = "ruff-0.0.290-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0e2b09ac4213b11a3520221083866a5816616f3ae9da123037b8ab275066fbac"}, + {file = "ruff-0.0.290-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4ca6285aa77b3d966be32c9a3cd531655b3d4a0171e1f9bf26d66d0372186767"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e3550d1d9f2157b0fcc77670f7bb59154f223bff281766e61bdd1dd854e0c5"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d748c8bd97874f5751aed73e8dde379ce32d16338123d07c18b25c9a2796574a"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982af5ec67cecd099e2ef5e238650407fb40d56304910102d054c109f390bf3c"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bbd37352cea4ee007c48a44c9bc45a21f7ba70a57edfe46842e346651e2b995a"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d9be6351b7889462912e0b8185a260c0219c35dfd920fb490c7f256f1d8313e"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75cdc7fe32dcf33b7cec306707552dda54632ac29402775b9e212a3c16aad5e6"}, + {file = "ruff-0.0.290-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb07f37f7aecdbbc91d759c0c09870ce0fb3eed4025eebedf9c4b98c69abd527"}, + {file = "ruff-0.0.290-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2ab41bc0ba359d3f715fc7b705bdeef19c0461351306b70a4e247f836b9350ed"}, + {file = "ruff-0.0.290-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:150bf8050214cea5b990945b66433bf9a5e0cef395c9bc0f50569e7de7540c86"}, + {file = "ruff-0.0.290-py3-none-musllinux_1_2_i686.whl", hash = "sha256:75386ebc15fe5467248c039f5bf6a0cfe7bfc619ffbb8cd62406cd8811815fca"}, + {file = "ruff-0.0.290-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ac93eadf07bc4ab4c48d8bb4e427bf0f58f3a9c578862eb85d99d704669f5da0"}, + {file = "ruff-0.0.290-py3-none-win32.whl", hash = "sha256:461fbd1fb9ca806d4e3d5c745a30e185f7cf3ca77293cdc17abb2f2a990ad3f7"}, + {file = "ruff-0.0.290-py3-none-win_amd64.whl", hash = "sha256:f1f49f5ec967fd5778813780b12a5650ab0ebcb9ddcca28d642c689b36920796"}, + {file = "ruff-0.0.290-py3-none-win_arm64.whl", hash = "sha256:ae5a92dfbdf1f0c689433c223f8dac0782c2b2584bd502dfdbc76475669f1ba1"}, + {file = "ruff-0.0.290.tar.gz", hash = "sha256:949fecbc5467bb11b8db810a7fa53c7e02633856ee6bd1302b2f43adcd71b88d"}, ] [[package]] @@ -4674,23 +4831,23 @@ testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jar [[package]] name = "setuptools-scm" -version = "7.1.0" +version = "8.0.3" description = "the blessed package to manage your versions by scm tags" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "setuptools_scm-7.1.0-py3-none-any.whl", hash = "sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e"}, - {file = "setuptools_scm-7.1.0.tar.gz", hash = "sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27"}, + {file = "setuptools-scm-8.0.3.tar.gz", hash = "sha256:0169fd70197efda2f8c4d0b2a7a3d614431b488116f37b79d031e9e7ec884d8c"}, + {file = "setuptools_scm-8.0.3-py3-none-any.whl", hash = "sha256:813822234453438a13c78d05c8af29918fbc06f88efb33d38f065340bbb48c39"}, ] [package.dependencies] -packaging = ">=20.0" +packaging = ">=20" setuptools = "*" -typing-extensions = "*" [package.extras] -test = ["pytest (>=6.2)", "virtualenv (>20)"] -toml = ["setuptools (>=42)"] +docs = ["entangled-cli[rich]", "mkdocs", "mkdocs-entangled-plugin", "mkdocs-material", "mkdocstrings[python]", "pygments"] +rich = ["rich"] +test = ["pytest", "rich", "virtualenv (>20)"] [[package]] name = "shapely" @@ -4884,6 +5041,24 @@ sphinx = ">=1.8" code-style = ["pre-commit (==2.12.1)"] rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] +[[package]] +name = "sphinx-notfound-page" +version = "1.0.0" +description = "Sphinx extension to build a 404 page with absolute URLs" +optional = false +python-versions = ">=3.8" +files = [ + {file = "sphinx_notfound_page-1.0.0-py3-none-any.whl", hash = "sha256:40a5741a6b07245a08fe55dbbd603ad6719e191b1419ab2e5337c706ebd16554"}, + {file = "sphinx_notfound_page-1.0.0.tar.gz", hash = "sha256:14cd388956de5cdf8710ab4ff31776ef8d85759c4f46014ee30f368e83bd3a3b"}, +] + +[package.dependencies] +sphinx = ">=5" + +[package.extras] +doc = ["sphinx-autoapi", "sphinx-rtd-theme", "sphinx-tabs", "sphinxemoji"] +test = ["tox"] + [[package]] name = "sphinx-rtd-theme" version = "1.3.0" @@ -5331,6 +5506,17 @@ exceptiongroup = "*" trio = ">=0.11" wsproto = ">=0.14" +[[package]] +name = "types-cachetools" +version = "5.3.0.6" +description = "Typing stubs for cachetools" +optional = false +python-versions = "*" +files = [ + {file = "types-cachetools-5.3.0.6.tar.gz", hash = "sha256:595f0342d246c8ba534f5a762cf4c2f60ecb61e8002b8b2277fd5cf791d4e851"}, + {file = "types_cachetools-5.3.0.6-py3-none-any.whl", hash = "sha256:f7f8a25bfe306f2e6bc2ad0a2f949d9e72f2d91036d509c36d3810bf728bc6e1"}, +] + [[package]] name = "types-pyopenssl" version = "23.2.0.2" @@ -5347,24 +5533,24 @@ cryptography = ">=35.0.0" [[package]] name = "types-pytz" -version = "2023.3.0.1" +version = "2023.3.1.1" description = "Typing stubs for pytz" optional = false python-versions = "*" files = [ - {file = "types-pytz-2023.3.0.1.tar.gz", hash = "sha256:1a7b8d4aac70981cfa24478a41eadfcd96a087c986d6f150d77e3ceb3c2bdfab"}, - {file = "types_pytz-2023.3.0.1-py3-none-any.whl", hash = "sha256:65152e872137926bb67a8fe6cc9cfd794365df86650c5d5fdc7b167b0f38892e"}, + {file = "types-pytz-2023.3.1.1.tar.gz", hash = "sha256:cc23d0192cd49c8f6bba44ee0c81e4586a8f30204970fc0894d209a6b08dab9a"}, + {file = "types_pytz-2023.3.1.1-py3-none-any.whl", hash = "sha256:1999a123a3dc0e39a2ef6d19f3f8584211de9e6a77fe7a0259f04a524e90a5cf"}, ] [[package]] name = "types-requests" -version = "2.31.0.2" +version = "2.31.0.4" description = "Typing stubs for requests" optional = false python-versions = "*" files = [ - {file = "types-requests-2.31.0.2.tar.gz", hash = "sha256:6aa3f7faf0ea52d728bb18c0a0d1522d9bfd8c72d26ff6f61bfc3d06a411cf40"}, - {file = "types_requests-2.31.0.2-py3-none-any.whl", hash = "sha256:56d181c85b5925cbc59f4489a57e72a8b2166f18273fd8ba7b6fe0c0b986f12a"}, + {file = "types-requests-2.31.0.4.tar.gz", hash = "sha256:a111041148d7e04bf100c476bc4db3ee6b0a1cd0b4018777f6a660b1c4f1318d"}, + {file = "types_requests-2.31.0.4-py3-none-any.whl", hash = "sha256:c7a9d6b62776f21b169a94a0e9d2dfcae62fa9149f53594ff791c3ae67325490"}, ] [package.dependencies] @@ -5405,13 +5591,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.7.1" -description = "Backported and Experimental Type Hints for Python 3.7+" +version = "4.8.0" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, - {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, + {file = "typing_extensions-4.8.0-py3-none-any.whl", hash = "sha256:8f92fc8806f9a6b641eaa5318da32b44d401efaac0f6678c9bc448ba3605faa0"}, + {file = "typing_extensions-4.8.0.tar.gz", hash = "sha256:df8e4339e9cb77357558cbdbceca33c303714cf861d1eef15e1070055ae8b7ef"}, ] [[package]] @@ -5458,13 +5644,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake [[package]] name = "urllib3" -version = "2.0.4" +version = "2.0.5" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.7" files = [ - {file = "urllib3-2.0.4-py3-none-any.whl", hash = "sha256:de7df1803967d2c2a98e4b11bb7d6bd9210474c46e8a0401514e3a42a75ebde4"}, - {file = "urllib3-2.0.4.tar.gz", hash = "sha256:8d22f86aae8ef5e410d4f539fde9ce6b2113a001bb4d189e0aed70642d602b11"}, + {file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, + {file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, ] [package.dependencies] @@ -5622,4 +5808,4 @@ ingest = ["selenium"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "f8728eee18c7402c68901a65631c2f3c0bd1a04fb1fb952f3746a12f47a9b9a4" +content-hash = "74a489166b43b622c3292733e264c1771a734de1d2cae0e29fbd80a480bd707f" diff --git a/pyproject.toml b/pyproject.toml index bd3f5a8..5749090 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,7 +6,7 @@ requires = ["poetry-core"] target-version = ["py311"] [tool.coverage.report] -exclude_also = ["if __name__ == .__main__.:"] +exclude_also = ["if __name__ == .__main__.:", "if not isinstance(engine, Engine):"] [tool.coverage.run] branch = true @@ -16,7 +16,7 @@ source = ["src"] [tool.mypy] disallow_untyped_defs = true -exclude = ".ipynb_checkpoints, .mypy_cache, .mytest_cache, build" +exclude = [".ipynb_checkpoints", ".mypy_cache", ".mytest_cache", "build", "venv", ".venv", "Jupyter"] follow_imports = "silent" ignore_missing_imports = true install_types = true @@ -35,30 +35,32 @@ readme = "README.md" version = "0.1.0" [tool.poetry.dependencies] -SQLAlchemy = {version = "^1.4.46", extras = ["mypy"]} +SQLAlchemy = {version = "^1.4.49", extras = ["mypy"]} aenum = "^3.1.15" -dash = "^2.11.1" -dash-bootstrap-components = "^1.4.2" +cachetools = "^5.3.1" +dash = "^2.13.0" +dash-bootstrap-components = "^1.5.0" deutschland = {git = "https://github.com/TrisNol/deutschland.git", branch = "hotfix/python-3.11-support"} loguru = "^0.7.0" -matplotlib = "^3.7.1" -plotly = "^5.14.1" +matplotlib = "^3.7.2" +plotly = "^5.16.1" psycopg2-binary = "^2.9.7" -pymongo = "^4.4.1" +pymongo = "^4.5.0" python = "^3.11" python-dotenv = "^1.0.0" seaborn = "^0.12.2" -selenium = "^4.10.0" -tqdm = "^4.65.0" +selenium = "^4.12.0" +tqdm = "^4.66.1" xmltodict = "^0.13.0" [tool.poetry.extras] ingest = ["selenium"] [tool.poetry.group.develop.dependencies] -black = {extras = ["jupyter"], version = "^23.9.0"} -jupyterlab = "^4.0.5" +black = {extras = ["jupyter"], version = "^23.9.1"} +jupyterlab = "^4.0.6" nbconvert = "^7.8.0" +openpyxl = "^3.1.2" pre-commit = "^3.4.0" rise = "^5.7.1" @@ -66,20 +68,23 @@ rise = "^5.7.1" jupyter = "^1.0.0" myst-parser = "^1.0.0" nbsphinx = "^0.9.2" -sphinx = "^6.2.1" +sphinx = "*" sphinx-copybutton = "^0.5.2" +sphinx-notfound-page = "^1.0.0" sphinx-rtd-theme = "^1.3.0" sphinx_autodoc_typehints = "*" sphinxcontrib-mermaid = "^0.9.2" sphinxcontrib-napoleon = "^0.7" [tool.poetry.group.lint.dependencies] -black = "^23.9.0" +black = "^23.9.1" +loguru-mypy = "^0.0.4" mypy = "^1.5.1" pandas-stubs = "^2.0.3.230814" pip-audit = "^2.6.1" pip-licenses = "^4.3.2" -ruff = "^0.0.287" +ruff = "^0.0.290" +types-cachetools = "^5.3.0.6" types-pyOpenSSL = "*" types-requests = "^2.31.0.2" types-setuptools = "*" @@ -92,6 +97,10 @@ pytest-cov = "^4.1.0" pytest-mock = "^3.11.1" pytest-repeat = "^0.9.1" +[tool.poetry.scripts] +data-transfer = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data" +reset-sql = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables" + [tool.ruff] exclude = [ ".bzr", @@ -129,7 +138,7 @@ unfixable = ["B"] builtins-ignorelist = ["id"] [tool.ruff.per-file-ignores] -"tests/*.py" = ["S101", "D100", "D101", "D107", "D103"] +"tests/*.py" = ["S101", "SLF001", "S311", "D103"] [tool.ruff.pydocstyle] convention = "google" diff --git a/src/aki_prj23_transparenzregister/config/config_providers.py b/src/aki_prj23_transparenzregister/config/config_providers.py index 82c5c27..be818c2 100644 --- a/src/aki_prj23_transparenzregister/config/config_providers.py +++ b/src/aki_prj23_transparenzregister/config/config_providers.py @@ -23,7 +23,6 @@ class ConfigProvider(metaclass=abc.ABCMeta): Returns: PostgreConnectionString: Connection details """ - raise NotImplementedError @abc.abstractmethod def get_mongo_connection_string(self) -> MongoConnection: @@ -35,7 +34,6 @@ class ConfigProvider(metaclass=abc.ABCMeta): Returns: MongoConnection: Connection details """ - raise NotImplementedError class JsonFileConfigProvider(ConfigProvider): @@ -44,7 +42,7 @@ class JsonFileConfigProvider(ConfigProvider): __data__: dict = {} def __init__(self, file_path: str): - """Constructor reading it's data from given .json file. + """Constructor reading its data from a given .json file. Args: file_path (str): PATH to .json file containing config @@ -78,7 +76,7 @@ class JsonFileConfigProvider(ConfigProvider): ) def get_mongo_connection_string(self) -> MongoConnection: - """Read MongodB connection string from .json file added in constructor. + """Read MongoDB connection string from .json file added in constructor. Returns: MongoConnection: Connection details @@ -124,7 +122,7 @@ class EnvironmentConfigProvider(ConfigProvider): ) def get_mongo_connection_string(self) -> MongoConnection: - """Read MongodB connection string from environment variables. + """Read MongoDB connection string from environment variables. Returns: MongoConnection: Connection details diff --git a/src/aki_prj23_transparenzregister/models/company.py b/src/aki_prj23_transparenzregister/models/company.py index 1f272fd..9c2e446 100644 --- a/src/aki_prj23_transparenzregister/models/company.py +++ b/src/aki_prj23_transparenzregister/models/company.py @@ -198,6 +198,8 @@ class Capital: @dataclass class Company: + """_summary_.""" + """Company dataclass.""" id: CompanyID @@ -212,9 +214,5 @@ class Company: founding_date: str | None = None def to_dict(self) -> dict: - """_summary_. - - Returns: - dict: _description_ - """ + """_summary_.""" return asdict(self) diff --git a/src/aki_prj23_transparenzregister/ui/assets/company_stats.css b/src/aki_prj23_transparenzregister/ui/assets/company_stats.css new file mode 100644 index 0000000..a51b778 --- /dev/null +++ b/src/aki_prj23_transparenzregister/ui/assets/company_stats.css @@ -0,0 +1,69 @@ +.company-header { + float: left; + background-color: var(--paynes-gray); + border: 1px solid; + width: 100%; +} + +.company-header .company-header-title { + color: white; + text-align: left; + margin: 0; + vertical-align: middle; + padding: 20px; +} + +.stats-wrapper { + float: left; + width: 100%; + background-color: white; + margin-top: 20px; + margin-right: 2%; +} + +.stats-wrapper .widget-large { + background-color: var(--ash-gray); + border: 1px solid; + border-radius: 10px; + width: 45%; + min-width: 600px; + display: inline-block; + vertical-align: middle; + margin-left: 2%; + overflow: hidden; +} + +.stats-wrapper .widget-large .widget-title { + color: var(--raisin-black); + text-align: center; + margin: 0; + padding-bottom: 10px; + padding-top: 10px; +} + +.stats-wrapper .widget-small { + background-color: var(--ash-gray); + border: 1px solid; + border-radius: 10px; + width: 15%; + min-width: 200px; + height: 150px; + display: inline-block; + vertical-align: middle; + margin-top: 10px; + margin-bottom: 10px; + margin-left: 2%; +} + +.stats-wrapper .widget-small .widget-title { + color: var(--raisin-black); + text-align: center; + margin: 0; + padding-bottom: 10px; + padding-top: 10px; +} + +.stats-wrapper .widget-small .widget-content { + color: var(--raisin-black); + text-align: center; +} diff --git a/src/aki_prj23_transparenzregister/ui/assets/header.css b/src/aki_prj23_transparenzregister/ui/assets/header.css new file mode 100644 index 0000000..9a3690e --- /dev/null +++ b/src/aki_prj23_transparenzregister/ui/assets/header.css @@ -0,0 +1,58 @@ +:root { + --light: #edefef; + --lavender-blush: #f3e8ee; + --ash-gray: #bacdb0; + --cambridge-blue: #729b79; + --paynes-gray: #475b63; + --raisin-black: #2e2c2f; +} + +.header-wrapper { + float:left; + background-color: var(--raisin-black); + border: 1px solid; + width: 100%; + overflow: visible; + min-height: 77px; +} + +.header-wrapper .header-title { + float: left; + text-align: left; + margin: 0; + padding-left: 15px; + padding-top: 20px; + padding-bottom: 20px; + vertical-align: middle; +} + +.header-wrapper .header-title .bi-house-door-fill { + color: white; + font-size: x-large; + display: inline-block; + vertical-align: middle; + padding-right: 15px; +} + +.header-wrapper .header-title .header-title-text { + color: white; + text-align: left; + margin: 0; + display: inline-block; + vertical-align: middle; +} + +.header-wrapper .header-search { + float: right; + width: 400px; + margin: 0; + padding-right: 10px; + padding-bottom: 20px; + padding-top: 20px; + vertical-align: middle; + overflow: visible !important; +} + +.header-wrapper .header-search .header-search-dropdown { + overflow: visible; +} diff --git a/src/aki_prj23_transparenzregister/ui/assets/tabs.css b/src/aki_prj23_transparenzregister/ui/assets/tabs.css new file mode 100644 index 0000000..2ddb1cb --- /dev/null +++ b/src/aki_prj23_transparenzregister/ui/assets/tabs.css @@ -0,0 +1,23 @@ +.tabs { + float: left; + margin-top: 20px; + border: 1px solid; + width: 100%; +} + +.tabs .tab-style { + border-bottom: 1px solid #d6d6d6 !important; + padding: 8px !important; + background-color: white !important; + color: var(--paynes-gray) !important; + font-weight: bold !important; +} + +.tabs .selected-tab-style { + border-bottom: 1px solid #d6d6d6 !important; + border-top: 1px solid #d6d6d6 !important; + padding: 8px !important; + color: white !important; + background-color: var(--paynes-gray) !important; + font-weight: bold !important; +} diff --git a/src/aki_prj23_transparenzregister/ui/company_finance_dash.py b/src/aki_prj23_transparenzregister/ui/company_finance_dash.py index ab6c2fa..f81a78f 100644 --- a/src/aki_prj23_transparenzregister/ui/company_finance_dash.py +++ b/src/aki_prj23_transparenzregister/ui/company_finance_dash.py @@ -1,390 +1,78 @@ """Dash.""" import dash_bootstrap_components as dbc -import pandas as pd -import plotly.graph_objs as go -from dash import Dash, Input, Output, callback, dash_table, dcc, html -from dash.exceptions import PreventUpdate -from sqlalchemy.engine import Engine +from dash import Dash, Input, Output, callback, html from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider -from aki_prj23_transparenzregister.utils.sql import connector, entities +from aki_prj23_transparenzregister.ui.ui_elements import ( + create_company_header, + create_company_stats, + create_header, + create_tabs, + get_company_data, + get_finance_data, +) +from aki_prj23_transparenzregister.utils.sql import connector if __name__ == "__main__": session = connector.get_session(JsonFileConfigProvider("./secrets.json")) - query_finance = session.query( - entities.AnnualFinanceStatement, entities.Company.name, entities.Company.id - ).join(entities.Company) - - query_company = session.query(entities.Company, entities.DistrictCourt.name).join( - entities.DistrictCourt - ) - engine = session.bind - if not isinstance(engine, Engine): - raise TypeError - - finance_df: pd.DataFrame = pd.read_sql(str(query_finance), engine) - company_df: pd.DataFrame = pd.read_sql(str(query_company), engine) - - select_company_df = company_df[["company_id", "company_name"]] - select_company_dropdown = select_company_df.to_dict("records") - options = [ - {"label": i["company_name"], "value": i["company_id"]} - for i in select_company_dropdown - ] - - colors = { - "light": "#edefef", - "lavender-blush": "#f3e8ee", - "ash-gray": "#bacdb0", - "cambridge-blue": "#729b79", - "paynes-gray": "#475b63", - "raisin-black": "#2e2c2f", - } - - def financials_figure( - finance_df: pd.DataFrame, company: str, metric: str - ) -> go.Figure: - """Creates plotly line chart for a specific company and a metric.""" - finance_df = finance_df.loc[finance_df["company_name"] == company] - # create figure - fig_line = go.Figure() - # add trace for company 1 - fig_line.add_trace( - go.Scatter( - x=finance_df["annual_finance_statement_date"], - y=finance_df[metric], - name=company, - line_color=colors["raisin-black"], - marker_color=colors["raisin-black"], - ) - ) - # set title and labels - fig_line.update_layout( - title=metric, - xaxis_title="Jahr", - yaxis_title="in Mio.€", - plot_bgcolor=colors["light"], - ) - return fig_line - - tab_style = { - "borderBottom": "1px solid #d6d6d6", - "padding": "6px", - "backgroundColor": "white", - "color": colors["paynes-gray"], - "fontWeight": "bold", - } - - tab_selected_style = { - "borderTop": "1px solid #d6d6d6", - "borderBottom": "1px solid #d6d6d6", - "padding": "6px", - "backgroundColor": colors["paynes-gray"], - "color": "white", - "fontWeight": "bold", - } - - # TBD: get data from database instead of mock data - company = 1 # selected company id - selected_company = company_df.loc[company_df["company_id"] == company] - - turnover = 123456 - stock = "1,23" - company_data = { - "col1": ["Unternehmen", "Straße", "Stadt"], - "col2": [ - selected_company["company_name"][0], - selected_company["company_street"][0], - str( - selected_company["company_zip_code"][0] - + " " - + selected_company["company_city"][0] - ), - ], - "col3": ["Branche", "Amtsgericht", "Gründungsjahr"], - "col4": [ - selected_company["company_sector"][0], - selected_company["district_court_name"][0], - "xxx", - ], - } - df_company_data = pd.DataFrame(data=company_data) + company_df = get_company_data(session) + finance_df = get_finance_data(session) + options = company_df["company_name"].to_dict() app = Dash( __name__, external_stylesheets=[dbc.icons.BOOTSTRAP] ) # use dbc for icons - - kennzahlen_layout = html.Div( - [ - dcc.Graph( - figure=financials_figure( - finance_df, str(company), "annual_finance_statement_ebit" - ) - ) - ] - ) + app.title = "Company Finance Data" app.layout = html.Div( - [ - # title header of page - html.Div( - style={ - "backgroundColor": colors["raisin-black"], - "border": "1px solid", - }, - children=[ - html.I( - className="bi bi-house-door-fill", - style={ - "fontSize": 24, - "paddingLeft": "10px", - "color": "white", - "display": "inline-block", - "verticalAlign": "middle", - }, - ), - html.H1( - children="Transparenzregister für Kapitalgesellschaften", - style={ - "color": "white", - "textAlign": "left", - "margin": "0", - "paddingLeft": "10px", - "paddingBottom": "20px", - "paddingTop": "20px", - "display": "inline-block", - "verticalAlign": "middle", - }, - ), - html.Div( - dcc.Dropdown( - id="select_company", - placeholder="Suche nach Unternehmen oder Person", - ), - style={ - "float": "right", - "width": "30%", - "margin": "0", - "paddingRight": "10px", - "paddingBottom": "20px", - "paddingTop": "20px", - "display": "inline-block", - "verticalAlign": "middle", - }, - ), - ], - ), - # header company name - html.Div( - style={"backgroundColor": colors["paynes-gray"], "border": "1px solid"}, - children=[ - html.H1( - children=selected_company["company_name"][0], - style={ - "color": "white", - "fontSize": 30, - "textAlign": "left", - "margin": "0", - "paddingLeft": "20px", - "paddingBottom": "20px", - "paddingTop": "20px", - }, - ) - ], - ), - html.Div(style={"height": "20px"}), - html.Div(style={"width": "2%", "display": "inline-block"}), - # table basic company information - html.Div( - style={ - "backgroundColor": colors["ash-gray"], - "border": "1px solid", - "border-radius": 10, - "width": "45%", - "height": "150px", - "display": "inline-block", - "vertical-align": "top", - }, - children=[ - html.H5( - children="Stammdaten", - style={ - "color": colors["raisin-black"], - "fontSize": 16, - "textAlign": "center", - "margin": "0", - "paddingBottom": "10px", - "paddingTop": "10px", - }, - ), - dash_table.DataTable( - df_company_data.to_dict("records"), - [{"name": i, "id": i} for i in df_company_data.columns], - style_table={ - "width": "80%", - "overflowX": "auto", - "marginLeft": "auto", - "marginRight": "auto", - "paddingBottom": "20px", - "color": colors["raisin-black"], - }, - # hide header of table - css=[ - { - "selector": "tr:first-child", - "rule": "display: none", - }, - ], - style_cell={"textAlign": "center"}, - style_cell_conditional=[ - {"if": {"column_id": c}, "fontWeight": "bold"} - for c in ["col1", "col3"] - ], - ), - ], - ), - html.Div(style={"width": "2%", "display": "inline-block"}), - html.Div( - style={ - "backgroundColor": colors["ash-gray"], - "border": "1px solid", - "border-radius": 10, - "width": "15%", - "height": "150px", - "display": "inline-block", - "vertical-align": "top", - }, - children=[ - html.H5( - children="Stimmung", - style={ - "color": colors["raisin-black"], - "fontSize": 16, - "textAlign": "center", - "margin": "0", - "paddingBottom": "10px", - "paddingTop": "10px", - }, - ), - ], - ), - html.Div(style={"width": "2%", "display": "inline-block"}), - html.Div( - style={ - "backgroundColor": colors["ash-gray"], - "border": "1px solid", - "border-radius": 10, - "width": "15%", - "height": "150px", - "display": "inline-block", - "vertical-align": "top", - }, - children=[ - html.H5( - children="Aktienkurs", - style={ - "color": colors["raisin-black"], - "fontSize": 16, - "textAlign": "center", - "margin": "0", - "paddingBottom": "10px", - "paddingTop": "10px", - }, - ), - html.H1( - children=stock, - style={ - "color": colors["raisin-black"], - "textAlign": "center", - }, - ), - ], - ), - html.Div(style={"width": "2%", "display": "inline-block"}), - html.Div( - style={ - "backgroundColor": colors["ash-gray"], - "border": "1px solid", - "border-radius": 10, - "width": "15%", - "height": "150px", - "display": "inline-block", - "vertical-align": "top", - }, - children=[ - html.H5( - children="Umsatz", - style={ - "color": colors["raisin-black"], - "fontSize": 16, - "textAlign": "center", - "margin": "0", - "paddingBottom": "10px", - "paddingTop": "10px", - }, - ), - html.H1( - children=turnover, - style={ - "color": colors["raisin-black"], - "textAlign": "center", - }, - ), - ], - ), - html.Div(style={"width": "2%", "display": "inline-block"}), - # ]), - html.Div( - style={ - "marginTop": "20px", - "border": "1px solid", - }, - children=[ - dcc.Tabs( - id="tabs", - value="tab-1", - children=[ - dcc.Tab( - label="Kennzahlen", - value="tab-1", - style=tab_style, - selected_style=tab_selected_style, - children=[kennzahlen_layout], - ), - dcc.Tab( - label="Beteiligte Personen", - value="tab-2", - style=tab_style, - selected_style=tab_selected_style, - ), - dcc.Tab( - label="Stimmung", - value="tab-3", - style=tab_style, - selected_style=tab_selected_style, - ), - dcc.Tab( - label="Verflechtungen", - value="tab-4", - style=tab_style, - selected_style=tab_selected_style, - ), - ], - ), - html.Div(id="tabs-example-content-1"), - ], - ), - ] + className="page_content", + children=[ + create_header(options), + html.Div(id="id-company-header"), + ], ) @callback( Output("select_company", "options"), Input("select_company", "search_value") ) def update_options(search_value: str) -> list: - """Update page based on selected company.""" + """Update dropdown options based on user input. + + Args: + search_value: The input string in the dropdown field entered by the user. + + Returns: + The available companies matching the input. + """ if not search_value: - raise PreventUpdate - return [o for o in options if search_value in o["label"]] + return [{"label": o, "value": key} for key, o in options.items()] + return [ + {"label": o, "value": key} + for key, o in options.items() + if search_value.upper() in o.upper() + ] + + @callback(Output("id-company-header", "children"), Input("select_company", "value")) + def update_output(value_chosen: int) -> html: + """Update page based on chosen company. + + Args: + value_chosen: Id of the selected company. + + Returns: + The html divs of the company page. + """ + label = options.get(value_chosen) + if not label: + return "" + selected_company = str(label) + selected_company_stats = company_df.loc[value_chosen] + selected_finance_df = finance_df.loc[finance_df["company_id"] == value_chosen] + return ( + create_company_header(selected_company), + create_company_stats(selected_company_stats), + create_tabs(selected_finance_df), + ) app.run_server(debug=True) diff --git a/src/aki_prj23_transparenzregister/ui/ui_elements.py b/src/aki_prj23_transparenzregister/ui/ui_elements.py new file mode 100644 index 0000000..ca06b1a --- /dev/null +++ b/src/aki_prj23_transparenzregister/ui/ui_elements.py @@ -0,0 +1,331 @@ +"""Dash elements.""" + +import pandas as pd +import plotly.graph_objs as go +from dash import dash_table, dcc, html +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session + +from aki_prj23_transparenzregister.utils.sql import entities + +COLORS = { + "light": "#edefef", + "lavender-blush": "#f3e8ee", + "ash-gray": "#bacdb0", + "cambridge-blue": "#729b79", + "paynes-gray": "#475b63", + "raisin-black": "#2e2c2f", +} + + +def get_company_data(session: Session) -> pd.DataFrame: + """Creates a session to the database and get's all available company data. + + Args: + session: A session connecting to the database. + + Returns: + A dataframe containing all available company data including the corresponding district court. + """ + query_company = session.query(entities.Company, entities.DistrictCourt.name).join( + entities.DistrictCourt + ) + engine = session.bind + if not isinstance(engine, Engine): + raise TypeError + + return pd.read_sql(str(query_company), engine, index_col="company_id") + + +def get_finance_data(session: Session) -> pd.DataFrame: + """Creates a session to the database and get's all available company data. + + Args: + session: A session connecting to the database. + + Returns: + A dataframe containing all financial data of all companies. + """ + query_finance = session.query( + entities.AnnualFinanceStatement, entities.Company.name, entities.Company.id + ).join(entities.Company) + + engine = session.bind + if not isinstance(engine, Engine): + raise TypeError + + return pd.read_sql(str(query_finance), engine) + + +def create_header(options: dict) -> html: + """Creates header for dashboard. + + Args: + options: A dictionary with company names and ids for the dropdown. + + Returns: + The html div to create the page's header including the name of the page and the search for companies. + """ + return html.Div( + className="header-wrapper", + children=[ + html.Div( + className="header-title", + children=[ + html.I( + className="bi-house-door-fill", + ), + html.H1( + className="header-title-text", + children="Transparenzregister für Kapitalgesellschaften", + ), + ], + ), + html.Div( + className="header-search", + children=[ + html.Div( + className="header-search-dropdown", + children=[ + dcc.Dropdown( + id="select_company", + options=[ + {"label": o, "value": key} + for key, o in options.items() + ], + placeholder="Suche nach Unternehmen oder Person", + ), + ], + ), + ], + ), + ], + ) + + +def create_company_header(selected_company_name: str) -> html: + """Create company header based on selected company. + + Args: + selected_company_name: The company name that has been chosen in the dropdown. + + Returns: + The html div to create the company header. + """ + return html.Div( + className="company-header", + children=[ + html.H1( + className="company-header-title", + id="id-company-header-title", + children=selected_company_name, + ), + ], + ) + + +def create_company_stats(selected_company_data: pd.Series) -> html: + """Create company stats. + + Args: + selected_company_data: A series containing all company information of the selected company. + + Returns: + The html div to create the company stats table and the three small widgets. + """ + company_data = { + "col1": ["Unternehmen", "Straße", "Stadt"], + "col2": [ + selected_company_data["company_name"], + selected_company_data["company_street"], + str( + selected_company_data["company_zip_code"] + + " " + + selected_company_data["company_city"] + ), + ], + "col3": ["Branche", "Amtsgericht", "Gründungsjahr"], + "col4": [ + selected_company_data["company_sector"], + selected_company_data["district_court_name"], + "xxx", + ], + } + df_company_data = pd.DataFrame(data=company_data) + return html.Div( + className="stats-wrapper", + children=[ + html.Div( + className="widget-large", + children=[ + html.H3( + className="widget-title", + children="Stammdaten", + ), + dash_table.DataTable( + df_company_data.to_dict("records"), + [{"name": i, "id": i} for i in df_company_data.columns], + style_table={ + "width": "90%", + "marginLeft": "auto", + "marginRight": "auto", + "paddingBottom": "20px", + "color": COLORS["raisin-black"], + }, + # hide header of table + css=[ + { + "selector": "tr:first-child", + "rule": "display: none", + }, + ], + style_cell={"textAlign": "center"}, + style_cell_conditional=[ + {"if": {"column_id": c}, "fontWeight": "bold"} + for c in ["col1", "col3"] + ], + style_data={ + "whiteSpace": "normal", + "height": "auto", + }, + ), + ], + ), + html.Div( + className="widget-small", + children=[ + html.H3( + className="widget-title", + children="Stimmung", + ), + ], + ), + html.Div( + className="widget-small", + children=[ + html.H3( + className="widget-title", + children="Aktienkurs", + ), + html.H1( + className="widget-content", + children="123", + ), + ], + ), + html.Div( + className="widget-small", + children=[ + html.H3( + className="widget-title", + children="Umsatz", + ), + html.H1( + className="widget-content", + children="1234", + ), + ], + ), + ], + ) + + +def create_tabs(selected_finance_df: pd.DataFrame) -> html: + """Create tabs for more company information. + + Args: + selected_company_id: Id of the chosen company in the dropdown. + selected_finance_df: A dataframe containing all available finance information of the companies. + + Returns: + The html div to create the tabs of the company page. + """ + return html.Div( + className="tabs", + children=[ + dcc.Tabs( + id="tabs", + value="tab-1", + children=[ + dcc.Tab( + label="Kennzahlen", + value="tab-1", + className="tab-style", + selected_className="selected-tab-style", + children=[kennzahlen_layout(selected_finance_df)], + ), + dcc.Tab( + label="Beteiligte Personen", + value="tab-2", + className="tab-style", + selected_className="selected-tab-style", + ), + dcc.Tab( + label="Stimmung", + value="tab-3", + className="tab-style", + selected_className="selected-tab-style", + ), + dcc.Tab( + label="Verflechtungen", + value="tab-4", + className="tab-style", + selected_className="selected-tab-style", + ), + ], + ), + html.Div(id="tabs-example-content-1"), + ], + ) + + +def kennzahlen_layout(selected_finance_df: pd.DataFrame) -> html: + """Create metrics tab. + + Args: + selected_company_id: Id of the chosen company in the dropdown. + selected_finance_df: A dataframe containing all available finance information of the companies. + + Returns: + The html div to create the metrics tab of the company page. + """ + return html.Div( + [ + dcc.Graph( + figure=financials_figure( + selected_finance_df, "annual_finance_statement_ebit" + ) + ) + ] + ) + + +def financials_figure(selected_finance_df: pd.DataFrame, metric: str) -> go.Figure: + """Creates plotly line chart for a specific company and a metric. + + Args: + selected_finance_df: A dataframe containing all finance information of the selected company. + metric: The metric that should be visualized. + + Returns: + A plotly figure showing the available metric data of the company. + """ + # create figure + fig_line = go.Figure() + # add trace for company 1 + fig_line.add_trace( + go.Scatter( + x=selected_finance_df["annual_finance_statement_date"], + y=selected_finance_df[metric], + line_color=COLORS["raisin-black"], + marker_color=COLORS["raisin-black"], + ) + ) + # set title and labels + fig_line.update_layout( + title=metric, + xaxis_title="Jahr", + yaxis_title="in Mio.€", + plot_bgcolor=COLORS["light"], + ) + return fig_line diff --git a/src/aki_prj23_transparenzregister/utils/data_transfer.py b/src/aki_prj23_transparenzregister/utils/data_transfer.py new file mode 100644 index 0000000..f28712c --- /dev/null +++ b/src/aki_prj23_transparenzregister/utils/data_transfer.py @@ -0,0 +1,444 @@ +"""This module contains the data transfer and refinement functionalities between staging and production DB.""" +import sys +from datetime import date +from typing import Any + +import sqlalchemy as sa +from cachetools import LRUCache, cached +from loguru import logger +from sqlalchemy.orm import Session +from tqdm import tqdm + +from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider +from aki_prj23_transparenzregister.utils.enum_types import RelationTypeEnum +from aki_prj23_transparenzregister.utils.mongo.company_mongo_service import ( + CompanyMongoService, +) +from aki_prj23_transparenzregister.utils.mongo.connector import MongoConnector +from aki_prj23_transparenzregister.utils.sql import entities +from aki_prj23_transparenzregister.utils.sql.connector import ( + get_session, + reset_all_tables, +) +from aki_prj23_transparenzregister.utils.string_tools import simplify_string + + +class DataInvalidError(ValueError): + """This error is thrown if a db entry can't be parsed for the production db.""" + + def __init__(self, message: str) -> None: + """Argument of the error to be parsed along.""" + super().__init__(message) + + +def _refine_district_court_entry(name: str, city: str | None) -> tuple[str, str]: + """Refines the district court entry and tests for consistency. + + Args: + name: The name of the court. + city: The city where the cort is placed. + + Returns: + A tuple containing cort name and court city. + """ + if not name: + raise DataInvalidError("There is no court name.") + if not name.startswith("Amtsgericht "): + raise DataInvalidError( + f"The name of the district court does not start correctly: {name}" + ) + if not city or city not in name.split(" ", 1)[1]: + city = name.split(" ", 1)[1].strip() + return name, city + + +def _read_district_court_id(name: str, city: str, db: Session) -> int | None: + """Reads a district court id for a company if the district court is registered. + + Args: + name: The name of the court. + city: The name of the city where the court is placed. + db: A session to connect to an SQL db via SQLAlchemy. + + + Returns: + The district court id as an int if the district court is known. + Otherwise, returns None. + """ + return ( + db.query(entities.DistrictCourt.id) + .filter(entities.DistrictCourt.name == name) + .filter(entities.DistrictCourt.city == city) + .scalar() + ) + + +def _read_person_id( + name: str, surname: str, date_of_birth: date, db: Session +) -> int | None: + """Reads a person id if the person is already registered. + + Args: + name: The first name of the person. + surname: The last name of the person. + date_of_birth: The date the person was born. + db: A session to connect to an SQL db via SQLAlchemy. + + Returns: + The district court id as an int if the district court is known. + Otherwise, returns None. + """ + return ( + db.query(entities.Person.id) + .filter(entities.Person.name == name) + .filter(entities.Person.surname == surname) + .filter(entities.Person.date_of_birth == date_of_birth) + .scalar() + ) + + +@cached(cache=LRUCache(maxsize=1000), key=lambda name, city, db: hash((name, city))) # type: ignore +def get_district_court_id(name: str, city: str | None, db: Session) -> int: + """Determines the id of a district court. + + Determines the id of a district court and adds an entry to the table if no entry and id could be found. + A lru_cache is used to increase the speed of this application. + + Args: + name: The name of the district court. + city: The name where the court is located. + db: A session to connect to an SQL db via SQLAlchemy. + + Returns: + The id / privat key of a district court in the SQL-database. + """ + name, city = _refine_district_court_entry(name, city) + court_id = _read_district_court_id(name, city, db) + if court_id is not None: + return court_id + court = entities.DistrictCourt(name=name, city=city) + db.add(court) + db.commit() + return court.id # type: ignore + + +@cached(cache=LRUCache(maxsize=2000), key=lambda name, surname, date_of_birth, db: hash((name, surname, date_of_birth))) # type: ignore +def get_person_id( + name: str, surname: str, date_of_birth: date | str | None, db: Session +) -> int: + """Identifies the id of and court. + + Identifies the id of a district court and adds an entry to the table if no entry and id could be found. + A lru_cache is used to increase the speed of this application. + + Args: + name: The first name of the person. + surname: The last name of the person. + date_of_birth: The date the person was born. + db: A session to connect to an SQL db via SQLAlchemy. + + Returns: + The id / privat key of a district court in the SQL-database. + """ + if isinstance(date_of_birth, str) and date_of_birth: + date_of_birth = date.fromisoformat(date_of_birth) + if not name or not surname or not date_of_birth: + raise DataInvalidError( + f'At least one of the three values name: "{name}", surname: "{surname}" or date_of_birth: "{date_of_birth}" is empty.' + ) + assert isinstance(date_of_birth, date) # noqa: S101 + person_id = _read_person_id(name, surname, date_of_birth, db) + if person_id is not None: + return person_id + person = entities.Person(name=name, surname=surname, date_of_birth=date_of_birth) + db.add(person) + db.commit() + return person.id # type: ignore + + +@cached(cache=LRUCache(maxsize=5000), key=lambda name, zip_code, city, db: hash((name, zip_code, city))) # type: ignore +def get_company_id( + name: str, zip_code: str | None, city: str | None, db: Session +) -> int: + """Queries the id of a company. + + Args: + name: The HR entry of the company. + zip_code: The zip code where the company can be found. + city: The city where the company is found in. + db: A session to connect to an SQL db via SQLAlchemy. + + Returns: + The id / privat key of a company. + """ + if not name: + raise DataInvalidError("The name must be given and contain at least one sign.") + zip_code = simplify_string(zip_code) + city = simplify_string(city) + company_id = ( + db.query(entities.Company.id) + .filter( + sa.or_(entities.Company.zip_code == zip_code, entities.Company.city == city) + ) + .filter(entities.Company.name == name) + .scalar() + ) + if company_id is None and zip_code is None and city is None: + company_id = ( + db.query(entities.Company.id) + .filter(entities.Company.name == name) + .scalar() # todo ensure uniqueness + ) + if company_id is None: + raise KeyError(f"No corresponding company could be found to {name}.") + return company_id + + +@logger.catch(level="WARNING", reraise=True) +def add_company(company: dict[str, Any], db: Session) -> None: + """Add a company with all its data found in the mongodb company entry. + + Args: + company: The company to add. + db: A session to connect to an SQL db via SQLAlchemy. + """ + court_id = get_district_court_id(**company["id"]["district_court"], db=db) + location = company["location"] + name = simplify_string(company.get("name")) + if not name: + raise DataInvalidError( + "The company name needs to be valid (not empty and not only whitespace)." + ) + last_update: date | None = ( + date.fromisoformat(company["last_update"]) if company["last_update"] else None + ) + company_entry = entities.Company( + court_id=court_id, + hr=company["id"]["hr_number"].strip().replace(" ", " ").replace(" ", " "), + name=name, + city=simplify_string(location.get("city")), + zip_code=simplify_string(location.get("zip_code")), + street=simplify_string(location.get("street")), + last_update=last_update, + ) + db.add(company_entry) + db.commit() + logger.debug(f"Added the company entry {company['name']} to the db.") + + +def add_companies(companies: list[dict[str, Any]], db: Session) -> None: + """Adds a company to the database. + + Args: + companies: The company to be added. + db: A session to connect to an SQL db via SQLAlchemy. + """ + data_invalid, error_count = 0, 0 + for company in tqdm(companies, desc="Companies added"): + try: + add_company(company, db) + except DataInvalidError: + data_invalid += 1 + except Exception: + error_count += 1 + db.rollback() + if error_count + data_invalid: + logger.warning( + f"When adding companies {error_count + data_invalid} problems occurred " + f"{data_invalid} where caused by invalid data." + ) + else: + logger.info("When adding companies no problems occurred.") + + +@logger.catch(level="WARNING", reraise=True) +def add_relationship( + relationship: dict[str, Any], company_id: int, db: Session +) -> None: + """Adds a relationship to a company. + + Args: + relationship: The relationship and the relationship partner. + company_id: The company id the relations is rooted in. + db: A session to connect to an SQL db via SQLAlchemy. + """ + relation_type = RelationTypeEnum.get_enum_from_name(relationship.get("role")) + relation: entities.CompanyRelation | entities.PersonRelation + if "date_of_birth" in relationship: + name = relationship["name"] + person_id = get_person_id( + name["firstname"], + name["lastname"], + relationship["date_of_birth"], + db, + ) + relation = entities.PersonRelation( + person_id=person_id, + company_id=company_id, + relation=relation_type, + ) + else: + relation_to: int = get_company_id( + relationship["description"], + relationship["location"]["zip_code"], + relationship["location"]["city"], + db=db, + ) + if company_id == relation_to: + raise DataInvalidError( + "For a valid relation both parties can't be the same entity." + ) + relation = entities.CompanyRelation( + company_id=company_id, + relation=relation_type, + company2_id=relation_to, + ) + db.add(relation) + db.commit() + + +def add_relationships(companies: list[dict[str, dict]], db: Session) -> None: + """Add a list of companies to the database. + + Args: + companies: Companies to be added to the db. + db: A session to connect to an SQL db via SQLAlchemy. + """ + total: int = sum(len(company.get("relationships", "")) for company in companies) + with tqdm( + total=total, + desc="Company connections added", + ) as pbar: + for company in companies: + relationships: list[dict[str, Any]] = company.get("relationships", "") # type: ignore + if not relationships: + continue + try: + company_id: int = get_company_id( + company["name"], # type: ignore + company["location"]["zip_code"], + company["location"]["city"], + db=db, + ) + except Exception: + pbar.update(len(relationships)) + db.rollback() + continue + + for relationship in relationships: + try: + add_relationship(relationship, company_id=company_id, db=db) + except Exception: + db.rollback() + pbar.update() + + logger.info("Company connections added.") + + +# yearly_results +def add_annual_report(company_id: int, year: int, report: dict, db: Session) -> None: + """Ads a annual financial report to the SQL database. + + The added report is linked with the company. + # TODO add a link to the accountant. + + Args: + company_id: The SQL id of the company. + year: The year of the result. + report: The result that was + db: A session to connect to an SQL db via SQLAlchemy. + """ + if not report.get("auditors") and not report.get("financials"): + company = db.query(entities.Company).get(company_id) + if company is None: + raise KeyError(f"The company with the id {company_id} could not be found.") + logger.debug(f"No financial data found for {company.name} in the year {year}.") + return + db.add( + entities.AnnualFinanceStatement( + company_id=company_id, + date=date(year, 1, 1), + **report.get("financials", {}), # TODO can we have a date? + ), + ) + for auditor in report.get("auditors", ""): + pass + _ = auditor + # person_id = get_person_id(person.get("name")) # how to create a person relation? + # company relation? + + +def add_annual_financial_reports(companies: list[dict], db: Session) -> None: + """Adds all the yearly results to the sql db. + + Args: + companies: The companies datadump from the MongoDB. + db: A session to connect to an SQL db via SQLAlchemy. + """ + total: int = sum(len(company.get("yearly_results", "")) for company in companies) + with tqdm( + total=total, + desc="Company connections added", + ) as pbar: + for company in companies: + yearly_results: dict[str, dict] = company.get("yearly_results", {}) + if not yearly_results: + continue + try: + company_id: int = get_company_id( + company["name"], + company["location"]["zip_code"], + company["location"]["city"], + db=db, + ) + except Exception: + logger.exception("The company could not be identified.") + pbar.update(len(yearly_results)) + db.rollback() + raise + for year, report in yearly_results.items(): + if not report: + continue + try: + year_int = int(year) + except ValueError: + logger.warning( + f"The company {company['name']} has a yearly result with an invalid year of \"{year}\".", + ) + continue + add_annual_report(company_id, year_int, report, db=db) + pbar.update() + db.commit() + logger.info("Company connections added.") + + +def transfer_data(db: Session | None = None) -> None: + """This functions transfers all the data from a production environment to a staging environment.""" + logger.remove() + logger.add( + sys.stdout, + level="INFO", + catch=True, + ) + logger.add("data-transfer.log", level="INFO", retention=5) + + mongo_connector = MongoConnector( + JsonFileConfigProvider("./secrets.json").get_mongo_connection_string() + ) + mongo_company = CompanyMongoService(mongo_connector) + companies: list[dict[str, Any]] = mongo_company.get_all() # type: ignore + del mongo_company + + if db is None: + db = get_session(JsonFileConfigProvider("./secrets.json")) + + reset_all_tables(db) + + add_companies(companies, db) + add_relationships(companies, db) + add_annual_financial_reports(companies, db) + db.close() + + +if __name__ == "__main__": + transfer_data(get_session("sqlite:///local-test-data.db")) diff --git a/src/aki_prj23_transparenzregister/utils/enum_types.py b/src/aki_prj23_transparenzregister/utils/enum_types.py new file mode 100644 index 0000000..51e9799 --- /dev/null +++ b/src/aki_prj23_transparenzregister/utils/enum_types.py @@ -0,0 +1,69 @@ +"""Collection of enumeration types for the whole project.""" +import enum + + +class RelationTypeEnum(enum.IntEnum): + """RelationTypeEnum.""" + + GESCHAEFTSFUEHRER = enum.auto() + KOMMANDITIST = enum.auto() + VORSTAND = enum.auto() + PROKURIST = enum.auto() + LIQUIDATOR = enum.auto() + INHABER = enum.auto() + PERSOENLICH_HAFTENDER_GESELLSCHAFTER = enum.auto() + PARTNER = enum.auto() + DIREKTOR = enum.auto() + + RECHTSNACHFOLGER = enum.auto() + ORGANISATION = enum.auto() + + @staticmethod + def get_enum_from_name(relation_name: str | None) -> "RelationTypeEnum": + """Translates relation name into a RelationTypeEnum. + + If no translation can be found a warning is given. + + Args: + relation_name: The name of the relation to be translated. + + Returns: + The identified translation or None if no translation can be found. + """ + if relation_name is None: + raise ValueError("A relation type needs to be given.") + relation_name = ( + relation_name.strip() + .replace("(in)", "") + .replace("(r)", "r") + .strip() + .lower() + ) + name = { + "geschäftsführer": RelationTypeEnum.GESCHAEFTSFUEHRER, + "kommanditist": RelationTypeEnum.KOMMANDITIST, + "vorstand": RelationTypeEnum.VORSTAND, + "vorstandsvorsitzender": RelationTypeEnum.VORSTAND, + "prokurist": RelationTypeEnum.PROKURIST, + "liquidator": RelationTypeEnum.LIQUIDATOR, + "inhaber": RelationTypeEnum.INHABER, + "persönlich haftender gesellschafter": RelationTypeEnum.PERSOENLICH_HAFTENDER_GESELLSCHAFTER, + "organisation": RelationTypeEnum.ORGANISATION, + "partner": RelationTypeEnum.PARTNER, + "direktor": RelationTypeEnum.DIREKTOR, + "geschäftsführender direktor": RelationTypeEnum.DIREKTOR, + "mitglied des leitungsorgans": RelationTypeEnum.VORSTAND, + "rechtsnachfolger": RelationTypeEnum.RECHTSNACHFOLGER, + }.get(relation_name) + if name is not None: + return name + raise ValueError(f'Relation type "{relation_name}" is not yet implemented!') + + +class SentimentTypeEnum(enum.Enum): + """SentimentTypeEnum.""" + + employee_voting = "employee_voting" + sustainability = "sustainability" + environmental_aspects = "environmental_aspects" + perception = "perception" diff --git a/src/aki_prj23_transparenzregister/utils/enumy_types.py b/src/aki_prj23_transparenzregister/utils/enumy_types.py deleted file mode 100644 index 30901de..0000000 --- a/src/aki_prj23_transparenzregister/utils/enumy_types.py +++ /dev/null @@ -1,27 +0,0 @@ -"""Collection of enumeration types for the whole project.""" -import enum - - -class RelationTypeEnum(enum.IntEnum): - """RelationTypeEnum.""" - - EXECUTIVE = enum.auto() - AUDITOR = enum.auto() - SUPERVISORY_BOARD = enum.auto() - MANAGING_DIRECTOR = enum.auto() - AUTHORIZED_REPRESENTATIVE = enum.auto() - FINAL_AUDITOR = enum.auto() - - PARTICIPATES_WITH = enum.auto() - HAS_SHARES_OF = enum.auto() - IS_SUPPLIED_BY = enum.auto() - WORKS_WITH = enum.auto() - - -class SentimentTypeEnum(enum.Enum): - """SentimentTypeEnum.""" - - employee_voting = "employee_voting" - sustainability = "sustainability" - environmental_aspects = "environmental_aspects" - perception = "perception" diff --git a/src/aki_prj23_transparenzregister/utils/sql/connector.py b/src/aki_prj23_transparenzregister/utils/sql/connector.py index b2ef367..b3e5d23 100644 --- a/src/aki_prj23_transparenzregister/utils/sql/connector.py +++ b/src/aki_prj23_transparenzregister/utils/sql/connector.py @@ -1,11 +1,13 @@ """Module containing connection utils for PostgreSQL DB.""" import re +import pandas as pd import sqlalchemy as sa from loguru import logger from sqlalchemy.engine import URL, Engine from sqlalchemy.orm import Session, declarative_base, sessionmaker from sqlalchemy.pool import SingletonThreadPool +from tqdm import tqdm from aki_prj23_transparenzregister.config.config_providers import ( ConfigProvider, @@ -81,6 +83,33 @@ def init_db(db: Session) -> None: Base.metadata.create_all(db.bind) +def reset_all_tables(db: Session) -> None: + """Drops all SQL tables and recreates them.""" + logger.info("Resetting all SQL tables.") + Base.metadata.drop_all(db.bind) + init_db(db) + + +@logger.catch(reraise=True) +def transfer_db(*, source: Session, destination: Session) -> None: + """Transfers the data from on db to another db. + + Args: + source: A session to a source db data should be copied from. + destination: A session to a db where the data should be copied to. + """ + reset_all_tables(destination) + init_db(destination) + sbind = source.bind + dbind = destination.bind + assert isinstance(sbind, Engine) # noqa: S101 + assert isinstance(dbind, Engine) # noqa: S101 + for table in tqdm(Base.metadata.sorted_tables): + pd.read_sql_table(str(table), sbind).to_sql( + str(table), dbind, if_exists="append", index=False + ) + + if __name__ == "__main__": """Main flow creating tables""" init_db(get_session(JsonFileConfigProvider("./secrets.json"))) diff --git a/src/aki_prj23_transparenzregister/utils/sql/entities.py b/src/aki_prj23_transparenzregister/utils/sql/entities.py index 2bb90f1..a1f05e6 100644 --- a/src/aki_prj23_transparenzregister/utils/sql/entities.py +++ b/src/aki_prj23_transparenzregister/utils/sql/entities.py @@ -3,7 +3,7 @@ from datetime import datetime import sqlalchemy as sa -from aki_prj23_transparenzregister.utils.enumy_types import ( +from aki_prj23_transparenzregister.utils.enum_types import ( RelationTypeEnum, SentimentTypeEnum, ) @@ -16,7 +16,6 @@ class DistrictCourt(Base): """DistrictCourt.""" __tablename__ = "district_court" - id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) city = sa.Column(sa.String(100), nullable=False) name = sa.Column(sa.String(100), nullable=False, unique=True) @@ -54,6 +53,7 @@ class Person(Base): __tablename__ = "person" __table_args__ = (sa.UniqueConstraint("name", "surname", "date_of_birth"),) + # TODO add a constraint that asks for a minlength of 2 for name and surname id = sa.Column(sa.Integer, primary_key=True) name = sa.Column(sa.String(100), nullable=False) @@ -69,16 +69,25 @@ class AnnualFinanceStatement(Base): id = sa.Column(sa.Integer, primary_key=True) company_id = sa.Column(sa.Integer, sa.ForeignKey("company.id")) - date = sa.Column(sa.DateTime(timezone=True), nullable=False) - total_volume = sa.Column(sa.Float) - ebit = sa.Column(sa.Float) - ebitda = sa.Column(sa.Float) - ebit_margin = sa.Column(sa.Float) - total_balance = sa.Column(sa.Float) - equity = sa.Column(sa.Float) - debt = sa.Column(sa.Float) - return_on_equity = sa.Column(sa.Float) - capital_turnover_rate = sa.Column(sa.Float) + date = sa.Column(sa.Date, nullable=False) + total_volume = sa.Column(sa.Float, default="NaN") + ebit = sa.Column(sa.Float, default="NaN") + ebitda = sa.Column(sa.Float, default="NaN") + ebit_margin = sa.Column(sa.Float, default="NaN") + total_balance = sa.Column(sa.Float, default="NaN") + equity = sa.Column(sa.Float, default="NaN") + debt = sa.Column(sa.Float, default="NaN") + return_on_equity = sa.Column(sa.Float, default="NaN") + capital_turnover_rate = sa.Column(sa.Float, default="NaN") + current_liabilities = sa.Column(sa.Float, default="NaN") + dividends = sa.Column(sa.Float, default="NaN") + net_income = sa.Column(sa.Float, default="NaN") + assets = sa.Column(sa.Float, default="NaN") + long_term_debt = sa.Column(sa.Float, default="NaN") + short_term_debt = sa.Column(sa.Float, default="NaN") + revenue = sa.Column(sa.Float, default="NaN") + cash_flow = sa.Column(sa.Float, default="NaN") + current_assets = sa.Column(sa.Float, default="NaN") # assets vs current assets # company: Mapped[Company] = relationship(Company) diff --git a/src/aki_prj23_transparenzregister/utils/string_tools.py b/src/aki_prj23_transparenzregister/utils/string_tools.py new file mode 100644 index 0000000..be399f0 --- /dev/null +++ b/src/aki_prj23_transparenzregister/utils/string_tools.py @@ -0,0 +1,18 @@ +"""Contains functions fot string manipulation.""" + + +def simplify_string(string_to_simplify: str | None) -> str | None: + """Simplifies a string to None if no valid sting is found. + + Args: + string_to_simplify: The string to simplify. + + Returns: + The simplified string or None if the string was empty. + """ + if string_to_simplify is not None: + if isinstance(string_to_simplify, str): + string_to_simplify = string_to_simplify.strip() + else: + raise TypeError("The string to simplify is not a string.") + return string_to_simplify if string_to_simplify else None diff --git a/tests/apps/enrich_company_financials_test.py b/tests/apps/enrich_company_financials_test.py index 22232bd..4368fe9 100644 --- a/tests/apps/enrich_company_financials_test.py +++ b/tests/apps/enrich_company_financials_test.py @@ -18,7 +18,8 @@ def test_import_enrich_company_financials() -> None: @patch( "aki_prj23_transparenzregister.apps.enrich_company_financials.CompanyMongoService" ) -def test_work(mock_compnay_service: Mock, mock_bundesanzeiger: Mock) -> None: +def test_work(mock_company_service: Mock, mock_bundesanzeiger: Mock) -> None: + """Tests the readout of the company financials.""" mock_bundesanzeiger.return_value = pd.DataFrame( [ { @@ -28,9 +29,8 @@ def test_work(mock_compnay_service: Mock, mock_bundesanzeiger: Mock) -> None: } ] ) - # mock_compnay_service.add_yearly_resreturn_value enrich_company_financials.work( {"_id": "", "name": "ABC AG", "location": {"city": "Haltern am See"}}, - mock_compnay_service, + mock_company_service, ) assert enrich_company_financials diff --git a/tests/config/config_providers_test.py b/tests/config/config_providers_test.py index 0d00cfd..60ebb3e 100644 --- a/tests/config/config_providers_test.py +++ b/tests/config/config_providers_test.py @@ -1,3 +1,4 @@ +"""Tests the config provers.""" import json from unittest.mock import mock_open, patch @@ -10,11 +11,13 @@ from aki_prj23_transparenzregister.config.config_providers import ( def test_json_provider_init_fail() -> None: + """Tests the file not found error if an unknown filepath is given for the JsonFileConfigProvider.""" with pytest.raises(FileNotFoundError): JsonFileConfigProvider("file-that-does-not-exist") def test_json_provider_init_no_json() -> None: + """Tests if a non json file throws the correct error.""" with patch("os.path.isfile") as mock_isfile, patch( "builtins.open", mock_open(read_data="fhdaofhdoas") ): @@ -24,6 +27,7 @@ def test_json_provider_init_no_json() -> None: def test_json_provider_init() -> None: + """Tests the JsonFileConfigProvider creation.""" data = {"hello": "world"} input_data = json.dumps(data) with patch("os.path.isfile") as mock_isfile: @@ -34,6 +38,7 @@ def test_json_provider_init() -> None: def test_json_provider_get_postgres() -> None: + """Tests if the config provider can return the postgre config string.""" data = { "postgres": { "username": "user", @@ -56,6 +61,7 @@ def test_json_provider_get_postgres() -> None: def test_json_provider_get_mongo() -> None: + """Tests the JsonConfigProvider for the mongo db.""" data = { "mongo": { "username": "user", diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..e3b4397 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,178 @@ +"""Global configurations and definitions for pytest.""" +import datetime +import os +from collections.abc import Generator +from inspect import getmembers, isfunction +from typing import Any + +import pytest +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session + +from aki_prj23_transparenzregister.utils import data_transfer +from aki_prj23_transparenzregister.utils.sql import entities +from aki_prj23_transparenzregister.utils.sql.connector import get_session, init_db + + +@pytest.fixture(autouse=True) +def _clear_caches() -> Generator[None, None, None]: + """A function that clears all caches after each test. + + All the modules containing the cached functions need to be listed in the modules tuple. + """ + yield + # https://stackoverflow.com/a/139198/11003343 + modules = (data_transfer,) + functions = [ + function + for module in modules + for name, function in getmembers(module, isfunction) + if function.__dict__.get("cache") is not None + ] + # https://cachetools.readthedocs.io/en/stable/?highlight=clear#memoizing-decorators + for function in functions: + function.cache.clear() # type: ignore + + +@pytest.fixture() +def empty_db() -> Generator[Session, None, None]: + """Generates a db Session to a sql_lite db.""" + if os.path.exists("test-db.db"): + os.remove("test-db.db") + db = get_session("sqlite:///test-db.db") + init_db(db) + yield db + db.close() + bind = db.bind + assert isinstance(bind, Engine) + bind.dispose() + os.remove("test-db.db") + + +@pytest.fixture() +def finance_statements() -> list[dict[str, Any]]: + """Creates a list of finance statements.""" + return [ + { + "id": 1, + "company_id": 1, + "date": datetime.date.fromisoformat("2023-01-01"), + "total_volume": 1000.0, + "ebit": 1000.0, + "ebitda": 1000.0, + "ebit_margin": 1000.0, + "total_balance": 1000.0, + "equity": 1000.0, + "debt": 1000.0, + "return_on_equity": 1000.0, + "capital_turnover_rate": 1000.0, + "current_liabilities": 1000.0, + "dividends": float("NaN"), + "net_income": float("NaN"), + "assets": 1000.0, + "long_term_debt": 1000.0, + "short_term_debt": 1000.0, + "revenue": 1000.0, + "cash_flow": 1000.0, + "current_assets": 1000.0, + }, + { + "id": 2, + "company_id": 1, + "date": datetime.date.fromisoformat("2022-01-01"), + "total_volume": 1100.0, + "ebit": 1100.0, + "ebitda": 1100.0, + "ebit_margin": 1100.0, + "total_balance": 1100.0, + "equity": 1100.0, + "debt": 1100.0, + "return_on_equity": 1100.0, + "capital_turnover_rate": 1100.0, + "current_liabilities": 1100.0, + "dividends": float("NaN"), + "net_income": float("NaN"), + "assets": 1100.0, + "long_term_debt": 1100.0, + "short_term_debt": 1100.0, + "revenue": 1100.0, + "cash_flow": 1100.0, + "current_assets": 1100.0, + }, + ] + + +@pytest.fixture() +def full_db(empty_db: Session, finance_statements: list[dict[str, Any]]) -> Session: + """Fills a db with some test data.""" + empty_db.add_all( + [ + entities.DistrictCourt(name="Amtsgericht Bochum", city="Bochum"), + entities.DistrictCourt(name="Amtsgericht Dortmund", city="Dortmund"), + entities.Person( + name="Max", + surname="Mustermann", + date_of_birth=datetime.date(2023, 1, 1), + ), + entities.Person( + name="Sabine", + surname="Mustermann", + date_of_birth=datetime.date(2023, 1, 1), + ), + entities.Person( + name="Some Firstname", + surname="Some Surname", + date_of_birth=datetime.date(2023, 1, 1), + ), + entities.Person( + name="Some Firstname", + surname="Some Surname", + date_of_birth=datetime.date(2023, 1, 2), + ), + entities.Person( + name="Other Firstname", + surname="Other Surname", + date_of_birth=datetime.date(2023, 1, 2), + ), + ] + ) + empty_db.commit() + empty_db.add_all( + [ + entities.Company( + hr="HRB 123", + court_id=2, + name="Some Company GmbH", + street="Sesamstr.", + zip_code="12345", + city="TV City", + last_update=datetime.date.fromisoformat("2023-01-01"), + ), + entities.Company( + hr="HRB 123", + court_id=1, + name="Other Company GmbH", + street="Sesamstr.", + zip_code="12345", + city="TV City", + last_update=datetime.date.fromisoformat("2023-01-01"), + ), + entities.Company( + hr="HRB 12", + court_id=2, + name="Third Company GmbH", + last_update=datetime.date.fromisoformat("2023-01-01"), + ), + ] + ) + empty_db.commit() + + empty_db.add_all( + [ + entities.AnnualFinanceStatement(**finance_statement) + for finance_statement in finance_statements + ] + ) + empty_db.commit() + # print(pd.read_sql_table("company", empty_db.bind).to_string()) + return empty_db diff --git a/tests/ui/ui_elements_test.py b/tests/ui/ui_elements_test.py new file mode 100644 index 0000000..21650df --- /dev/null +++ b/tests/ui/ui_elements_test.py @@ -0,0 +1,118 @@ +"""Tests for ui elements.""" + +import pandas as pd +from sqlalchemy.orm import Session + +from aki_prj23_transparenzregister.ui import ui_elements + + +def test_import() -> None: + """Checks if an import co ui_elements can be made.""" + assert ui_elements is not None + + +def test_get_company_data(full_db: Session) -> None: + """Checks if data from the company and district court tables can be accessed.""" + company_df = ui_elements.get_company_data(full_db) + + test_data = pd.DataFrame( + { + "company_id": {0: 1, 1: 2, 2: 3}, + "company_hr": {0: "HRB 123", 1: "HRB 123", 2: "HRB 12"}, + "company_court_id": {0: 2, 1: 1, 2: 2}, + "company_name": { + 0: "Some Company GmbH", + 1: "Other Company GmbH", + 2: "Third Company GmbH", + }, + "company_street": {0: "Sesamstr.", 1: "Sesamstr.", 2: None}, + "company_zip_code": {0: "12345", 1: "12345", 2: None}, + "company_city": {0: "TV City", 1: "TV City", 2: None}, + "company_last_update": { + 0: "2023-01-01", + 1: "2023-01-01", + 2: "2023-01-01", + }, + "company_sector": {0: None, 1: None, 2: None}, + "district_court_name": { + 0: "Amtsgericht Dortmund", + 1: "Amtsgericht Bochum", + 2: "Amtsgericht Dortmund", + }, + } + ) + test_data = test_data.set_index("company_id") + pd.testing.assert_frame_equal(company_df, test_data) + + +def test_get_finance_data(full_db: Session) -> None: + """Checks if data from the company and finance tables can be accessed.""" + finance_df = ui_elements.get_finance_data(full_db) + test_data = pd.DataFrame( + { + "annual_finance_statement_id": {0: 1, 1: 2}, + "annual_finance_statement_company_id": {0: 1, 1: 1}, + "annual_finance_statement_date": {0: "2023-01-01", 1: "2022-01-01"}, + "annual_finance_statement_total_volume": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_ebit": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_ebitda": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_ebit_margin": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_total_balance": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_equity": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_debt": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_return_on_equity": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_capital_turnover_rate": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_current_liabilities": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_dividends": {0: None, 1: None}, + "annual_finance_statement_net_income": {0: None, 1: None}, + "annual_finance_statement_assets": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_long_term_debt": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_short_term_debt": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_revenue": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_cash_flow": {0: 1000.0, 1: 1100.0}, + "annual_finance_statement_current_assets": {0: 1000.0, 1: 1100.0}, + "company_name": {0: "Some Company GmbH", 1: "Some Company GmbH"}, + "company_id": {0: 1, 1: 1}, + } + ) + pd.testing.assert_frame_equal(finance_df, test_data) + + +def test_create_header() -> None: + """Checks if the header can be created.""" + options = {1: "a", 2: "b"} + ui_elements.create_header(options) + + +def test_create_company_header() -> None: + """Checks if the company header can be created.""" + selected_company = "Test GmbH" + ui_elements.create_company_header(selected_company) + + +def test_create_company_stats(full_db: Session) -> None: + """Checks if the company widgets can be created.""" + company_df = ui_elements.get_company_data(full_db) + value_chosen = 1 + selected_company_stats = company_df.loc[value_chosen] + ui_elements.create_company_stats(selected_company_stats) + + +def test_create_tabs(full_db: Session) -> None: + """Checks if the tabs of the company page can be created.""" + selected_company_id = 1 + finance_df = ui_elements.get_finance_data(full_db) + selected_finance_df = finance_df.loc[ + finance_df["company_id"] == selected_company_id + ] + ui_elements.create_tabs(selected_finance_df) + + +def test_kennzahlen_layout(full_db: Session) -> None: + """Checks if the financial metric layout of the company page can be created.""" + selected_company_id = 1 + finance_df = ui_elements.get_finance_data(full_db) + selected_finance_df = finance_df.loc[ + finance_df["company_id"] == selected_company_id + ] + ui_elements.kennzahlen_layout(selected_finance_df) diff --git a/tests/utils/data_extraction/bundesanzeiger_test.py b/tests/utils/data_extraction/bundesanzeiger_test.py index 30e8007..8829bbd 100644 --- a/tests/utils/data_extraction/bundesanzeiger_test.py +++ b/tests/utils/data_extraction/bundesanzeiger_test.py @@ -1,3 +1,4 @@ +"""Tests if the bundesanzeiger can be accessed and read.""" from unittest.mock import Mock, patch import pandas as pd diff --git a/tests/utils/data_transfer_test.py b/tests/utils/data_transfer_test.py new file mode 100644 index 0000000..b92bb28 --- /dev/null +++ b/tests/utils/data_transfer_test.py @@ -0,0 +1,1014 @@ +"""Test the transfer functions from mongodb to sql.""" +import random +import string +from datetime import date +from typing import Any + +import numpy as np +import pandas as pd +import pytest +import sqlalchemy as sa +from pytest_mock import MockerFixture +from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session + +from aki_prj23_transparenzregister.utils import data_transfer +from aki_prj23_transparenzregister.utils.sql import entities + + +@pytest.mark.parametrize( + ("original", "expected"), + [ + ( + {"name": "Amtsgericht Herne", "city": "Herne"}, + {"name": "Amtsgericht Herne", "city": "Herne"}, + ), + ( + {"name": "Amtsgericht Herne", "city": ""}, + {"name": "Amtsgericht Herne", "city": "Herne"}, + ), + ( + {"name": "Amtsgericht Herne", "city": None}, + {"name": "Amtsgericht Herne", "city": "Herne"}, + ), + ( + {"name": "Amtsgericht Herne", "city": "Something Wrong"}, + {"name": "Amtsgericht Herne", "city": "Herne"}, + ), + ( + {"name": "Amtsgericht Herne", "city": "NoName"}, + {"name": "Amtsgericht Herne", "city": "Herne"}, + ), + ], +) +def test_refine_district_court_entry(original: dict, expected: dict) -> None: + """Tests the transformation/the cleaning of the district court entry.""" + assert data_transfer._refine_district_court_entry( + **{"name": "Amtsgericht Herne", "city": "Herne"} + ) == tuple(expected.values()) + + +@pytest.mark.parametrize( + "defect_data", + [ + {"name": "Wrong Herne", "city": "Herne"}, + {"name": "Wrong Herne", "city": "NoName"}, + {"city": "Herne", "name": None}, + {"city": "Herne", "name": ""}, + ], +) +def test_refine_district_court_entry_defect_data(defect_data: dict[str, str]) -> None: + """Tests if an error is thrown if the district court data can't be corrected.""" + with pytest.raises(data_transfer.DataInvalidError): + data_transfer._refine_district_court_entry(**defect_data) + + +@pytest.mark.repeat(3) +def test_empty_db_fixture(empty_db: Session) -> None: + """Checks if the db can be created.""" + assert isinstance(empty_db, Session) + + +@pytest.mark.parametrize( + ("name", "city", "id"), + [ + ("Amtsgericht Bochum", "Bochum", 1), + ("Amtsgericht Dortmund", "Dortmund", 2), + ("Amtsgericht Iserlohn", "Iserlohn", None), + ], +) +def test__read_district_court_id( + name: str, city: str, id: int | None, full_db: Session +) -> None: + """Tests if the district court id can be read.""" + assert data_transfer._read_district_court_id(name, city, full_db) == id + + +@pytest.mark.parametrize( + ("firstname", "surname", "date_str", "id"), + [ + ("Max", "Mustermann", "2023-01-01", 1), + ("Sabine", "Mustermann", "2023-01-01", 2), + ("Some Firstname", "Some Surname", "2023-01-01", 3), + ("Some Firstname", "Some Surname", "2023-01-02", 4), + ("Other Firstname", "Other Surname", "2023-01-02", 5), + (None, "Other Surname", "2023-01-02", None), + ("Does not exist", "Other Surname", "2023-01-02", None), + ("Other Firstname", "Does not exists", "2023-01-02", None), + ("Other Firstname", "Other Surname", "1900-01-02", None), + ("Other Firstname", None, "2023-01-02", None), + ], +) +def test__read_person_id( + firstname: str, surname: str, date_str: str, id: int | None, full_db: Session +) -> None: + """Tests if the person id can be read.""" + assert ( + data_transfer._read_person_id( + firstname, surname, date.fromisoformat(date_str), full_db + ) + == id + ) + + +@pytest.mark.parametrize( + ("name", "city", "id"), + [ + ("Amtsgericht Bochum", "Bochum", 1), + ("Amtsgericht Dortmund", "Dortmund", 2), + ("Amtsgericht Iserlohn", "Iserlohn", 3), + ("Amtsgericht Köln", "Köln", 3), + ], +) +def test_get_district_court_id(name: str, city: str, id: int, full_db: Session) -> None: + """Tests if a court id can be returned and the court automatically be added if not yet part of the db.""" + assert data_transfer.get_district_court_id(name, city, full_db) == id + + +@pytest.mark.parametrize( + ("firstname", "surname", "date_str", "id"), + [ + ("Max", "Mustermann", "2023-01-01", 1), + ("Sabine", "Mustermann", "2023-01-01", 2), + ("Some Firstname", "Some Surname", "2023-01-01", 3), + ("Some Firstname", "Some Surname", "2023-01-02", 4), + ("Other Firstname", "Other Surname", "2023-01-02", 5), + ("Does not exist", "Other Surname", "2023-01-02", 6), + ("Other Firstname", "Does not exists", "2023-01-02", 6), + ("Other Firstname", "Other Surname", "1900-01-02", 6), + ], +) +def test_get_person_id( + firstname: str, surname: str, date_str: str, id: int, full_db: Session +) -> None: + """Tests if a person id can be returned and the court automatically be added if not yet part of the db.""" + assert ( + data_transfer.get_person_id( + firstname, surname, date.fromisoformat(date_str), full_db + ) + == id + ) + + +@pytest.mark.parametrize( + ("firstname", "surname", "date_str"), + [ + ("", "Other Surname", "2023-01-02"), + ("Other Firstname", "", "2023-01-02"), + ("Other Firstname", "Other Surname", ""), + ], +) +def test_get_person_id_value_check( + firstname: str, surname: str, date_str: str | None, full_db: Session +) -> None: + """Tests if errors on adding persons can be found.""" + with pytest.raises( + data_transfer.DataInvalidError, match="At least one of the three values name:" + ): + data_transfer.get_person_id( + firstname, + surname, + date.fromisoformat(date_str) if date_str else None, + full_db, + ) + + +@pytest.mark.parametrize( + ("name", "zip_code", "city", "id"), + [ + ("Some Company GmbH", "", "", 1), + ("Some Company GmbH", "12345", "", 1), + ("Some Company GmbH", "12345", "TV City", 1), + ("Some Company GmbH", "", "TV City", 1), + ("Other Company GmbH", "", "", 2), + ("Other Company GmbH", "12345", "", 2), + ("Other Company GmbH", "12345", "TV City", 2), + ("Other Company GmbH", "", "TV City", 2), + ("Third Company GmbH", "", "", 3), + ], +) +def test_get_company_id( + name: str, zip_code: str, city: str, id: int | None, full_db: Session +) -> None: + """Tests if the company id can be returned correctly.""" + assert data_transfer.get_company_id(name, zip_code, city, full_db) == id + + +@pytest.mark.parametrize( + ("name", "zip_code", "city"), + [ + ("Does not exist", "", ""), + ("Does not exist", "41265", ""), + ("Does not exist", "", "Some City"), + ("Other Company GmbH", "TV City", "54321"), + ("Other Company GmbH", "OtherCity", "12345"), + ("Other Company GmbH", "OtherCity", "54321"), + ], +) +def test_get_company_id_not_found( + name: str, + zip_code: str, + city: str, + full_db: Session, +) -> None: + """Test the accessing of missing companies.""" + with pytest.raises(KeyError): + data_transfer.get_company_id(name, zip_code, city, full_db) + + +@pytest.mark.parametrize("name", ["", None]) +def test_get_company_id_nameless(name: str | None, full_db: Session) -> None: + """Test accessing a company without valid name.""" + with pytest.raises(data_transfer.DataInvalidError): + data_transfer.get_company_id(name, "zip_code", "city", full_db) # type: ignore + + +def get_random_string(length: int) -> str: + """Creates a random string of a defined length. + + Args: + length: The length of the string to generate. + + Returns: + The generated string. + """ + letters = string.digits + string.ascii_letters + " " + return "".join(random.choice(letters) for _ in range(length)) + + +def get_random_zip() -> str: + """Creates a random zip.""" + letters = string.digits + return "".join(random.choice(letters) for _ in range(5)) + + +def company_generator(seed: int) -> dict[str, Any]: + """Generates a random company entry.""" + random.seed(seed) + if random.choice([True, False]): + city = "Dortmund" + else: + city = get_random_string(random.randint(5, 30)) + return { + "id": { + "district_court": { + "name": f"Amtsgericht {city}", + "city": city if random.choice([True, False]) else None, + }, + "hr_number": get_random_string(7), + }, + "name": get_random_string(random.randint(3, 150)), + "location": { + "city": city if random.choice([True, False]) else None, + "zip_code": get_random_zip() if random.choice([True, False]) else None, + "street": get_random_string(20) if random.choice([True, False]) else None, + }, + "last_update": date(random.randint(2000, 2023), 1, 1).isoformat(), + } + + +@pytest.mark.parametrize("seed", list(range(70, 75))) +def test_add_company(seed: int, full_db: Session) -> None: + """Tests the addition of a company to the db.""" + company = company_generator(seed) + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(5))) +@pytest.mark.parametrize("overwrite", ["", None, " "]) +def test_add_company_broken_name( + seed: int, overwrite: str | None, full_db: Session +) -> None: + """Tests what happens if a company has a broken / empty name.""" + company = company_generator(seed) + company["name"] = overwrite + if overwrite is None: + with pytest.raises( + data_transfer.DataInvalidError, + match="The company name needs to be valid ", + ): + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(5))) +@pytest.mark.parametrize("overwrite", ["", None, " "]) +def test_add_company_broken_city( + seed: int, overwrite: str | None, full_db: Session +) -> None: + """Tests a broken / empty city entry.""" + company = company_generator(seed) + company["location"]["city"] = overwrite + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(5))) +@pytest.mark.parametrize("overwrite", ["", None, " "]) +def test_add_company_broken_zip_code( + seed: int, overwrite: str | None, full_db: Session +) -> None: + """Tests how to add a company if the zip_code is broken / empty.""" + company = company_generator(seed) + company["location"]["zip_code"] = overwrite + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(5))) +@pytest.mark.parametrize("overwrite", [None]) +def test_add_company_broken_date( + seed: int, overwrite: str | None, full_db: Session +) -> None: + """Tests how the company dadd function deals with a missing date.""" + company = company_generator(seed) + company["last_update"] = overwrite + with pytest.raises(sa.exc.IntegrityError): + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(5))) +@pytest.mark.parametrize("overwrite", ["", None, " "]) +def test_add_company_broken_district_court( + seed: int, overwrite: str | None, full_db: Session +) -> None: + """Test a broken district court entry.""" + company = company_generator(seed) + company["id"]["district_court"]["name"] = overwrite + company["id"]["district_court"]["city"] = get_random_string(10) + with pytest.raises( + data_transfer.DataInvalidError, + match="There is no court name|The name of the district court does not start correctly", + ): + data_transfer.add_company(company, full_db) + + +@pytest.mark.parametrize("seed", list(range(0, 25, 5))) +def test_add_companies(seed: int, mocker: MockerFixture, full_db: Session) -> None: + """Test to add multiple companies.""" + rnd_generator = np.random.default_rng(seed) + companies: list[dict[str, Any]] = [ + company_generator(_) + for _ in set( + rnd_generator.integers(0, 1000, size=rnd_generator.integers(1, 30)).tolist() + ) + ] + spy_warning = mocker.spy(data_transfer.logger, "warning") + spy_info = mocker.spy(data_transfer.logger, "info") + spy_debug = mocker.spy(data_transfer.logger, "debug") + data_transfer.add_companies(companies, full_db) + spy_info.assert_called_once_with("When adding companies no problems occurred.") + spy_warning.assert_not_called() + assert spy_debug.call_count == len(companies) + + +@pytest.mark.parametrize("seed", list(range(1, 25, 5))) +def test_add_companies_duplicate( + seed: int, mocker: MockerFixture, full_db: Session +) -> None: + """Test to add multiple companies.""" + rnd_generator = np.random.default_rng(seed) + companies: list[dict[str, Any]] = [ + company_generator(_) + for _ in set( + rnd_generator.integers(0, 1000, size=rnd_generator.integers(4, 30)).tolist() + ) + ] + unique_companies = len(companies) + companies += companies[-3:] + spy_warning = mocker.spy(data_transfer.logger, "warning") + spy_info = mocker.spy(data_transfer.logger, "info") + spy_debug = mocker.spy(data_transfer.logger, "debug") + data_transfer.add_companies(companies, full_db) + spy_info.assert_not_called() + spy_warning.assert_called_once_with( + "When adding companies 3 problems occurred 0 where caused by invalid data." + ) + assert spy_debug.call_count == unique_companies + + +@pytest.mark.parametrize("seed", list(range(2, 25, 5))) +def test_add_companies_corrupted_data( + seed: int, mocker: MockerFixture, full_db: Session +) -> None: + """Test to add multiple companies.""" + rnd_generator = np.random.default_rng(seed) + companies: list[dict[str, Any]] = [ + company_generator(_) + for _ in set( + rnd_generator.integers(0, 1000, size=rnd_generator.integers(4, 30)).tolist() + ) + ] + companies[len(companies) // 2]["name"] = "" + spy_warning = mocker.spy(data_transfer.logger, "warning") + spy_info = mocker.spy(data_transfer.logger, "info") + spy_debug = mocker.spy(data_transfer.logger, "debug") + data_transfer.add_companies(companies, full_db) + spy_info.assert_not_called() + spy_warning.assert_called_once_with( + "When adding companies 1 problems occurred 1 where caused by invalid data." + ) + assert spy_debug.call_count == len(companies) - 1 + + +@pytest.mark.parametrize("company_id", list(range(5))) +def test_add_relationship_no_relation(company_id: int, full_db: Session) -> None: + """Tests if an error is thrown if the relation type/role is not defined.""" + with pytest.raises(ValueError, match="A relation type needs to be given."): + data_transfer.add_relationship({}, company_id, full_db) + + +@pytest.mark.parametrize("company_id", list(range(5))) +def test_add_relationship_unknown_relation(company_id: int, full_db: Session) -> None: + """Tests if an error is thrown if the relation type/role is unknown.""" + with pytest.raises(ValueError, match="Relation type .* is not yet implemented!"): + data_transfer.add_relationship( + {"role": "something strange"}, company_id, full_db + ) + + +@pytest.mark.parametrize("company_id", [1, 2, 3]) +@pytest.mark.parametrize( + ("firstname", "surname", "date_of_birth"), + [ + ("Max", "Mustermann", "2023-01-01"), + ("Some Firstname", "Some Surname", "2023-01-01"), + ("Other Firstname", "Other Surname", "1900-01-02"), + ], +) +@pytest.mark.parametrize("role", ["Partner", "direktor", "liquidator"]) +def test_add_relationship_person( # noqa: PLR0913 + firstname: str, + surname: str, + date_of_birth: str, + full_db: Session, + company_id: int, + role: str, +) -> None: + """Tests if a personal relation can be added.""" + relation = { + "name": { + "firstname": firstname, + "lastname": surname, + }, + "date_of_birth": date.fromisoformat(date_of_birth), + "role": role, + } + data_transfer.add_relationship(relation, company_id, full_db) + + +@pytest.mark.parametrize("company_id", [1, 2, 3]) +@pytest.mark.parametrize( + ("firstname", "surname", "date_of_birth"), + [ + ("Max", None, "2023-01-01"), + (None, "Some Surname", "2023-01-01"), + ("Other Firstname", "Other Surname", None), + ], +) +@pytest.mark.parametrize("role", ["Partner"]) +def test_add_relationship_person_missing_data( # noqa: PLR0913 + firstname: str, + surname: str, + date_of_birth: str, + full_db: Session, + company_id: int, + role: str, + mocker: MockerFixture, +) -> None: + """Tests if a personal relation can be added.""" + mocker.spy(data_transfer.logger, "warning") + relation = { + "name": { + "firstname": firstname, + "lastname": surname, + }, + "date_of_birth": date_of_birth if date_of_birth else None, + "role": role, + } + with pytest.raises( + data_transfer.DataInvalidError, match="At least one of the three values name:" + ): + data_transfer.add_relationship(relation, company_id, full_db) + + +@pytest.mark.parametrize( + ("company_name", "city", "zip_code", "company_id"), + [ + ("Some Company GmbH", None, None, 2), + ("Some Company GmbH", None, "12345", 2), + ("Some Company GmbH", "TV City", None, 3), + ("Some Company GmbH", "TV City", "12345", 2), + ("Some Company GmbH", "Strange City", "12345", 2), + ("Some Company GmbH", "TV City", "?????", 2), + ("Third Company GmbH", None, None, 1), + ], +) +def test_add_relationship_company( + company_id: int, + company_name: str, + city: str | None, + zip_code: str | None, + full_db: Session, +) -> None: + """Tests if a relationship to another company can be added.""" + data_transfer.add_relationship( + { + "description": company_name, + "location": { + "zip_code": zip_code, + "city": city, + }, + "role": "organisation", + }, + company_id, + full_db, + ) + + +@pytest.mark.parametrize( + ("company_name", "city", "zip_code", "company_id"), + [ + ("Some Company GmbH", None, None, 1), + ("Some Company GmbH", "TV City", "12345", 1), + ("Some Company GmbH", "TV City", None, 1), + ("Third Company GmbH", None, None, 3), + ], +) +def test_add_relationship_company_self_reference( + company_id: int, + company_name: str, + city: str | None, + zip_code: str | None, + full_db: Session, +) -> None: + """Tests if a company referencing a relationship with itself throws an error.""" + with pytest.raises( + data_transfer.DataInvalidError, + match="For a valid relation both parties can't be the same entity.", + ): + data_transfer.add_relationship( + { + "description": company_name, + "location": { + "zip_code": zip_code, + "city": city, + }, + "role": "organisation", + }, + company_id, + full_db, + ) + + +@pytest.mark.parametrize( + ("company_name", "city", "zip_code", "company_id"), + [ + ("Unknown GmbH", None, None, 2), + ("Some Company GmbH", "Strange city", "?????", 2), + ], +) +def test_add_relationship_company_unknown( + company_id: int, + company_name: str, + city: str | None, + zip_code: str | None, + full_db: Session, +) -> None: + """Tests if a relationship to another company can be added.""" + with pytest.raises( + KeyError, match=f"No corresponding company could be found to {company_name}." + ): + data_transfer.add_relationship( + { + "description": company_name, + "location": { + "zip_code": zip_code, + "city": city, + }, + "role": "organisation", + }, + company_id, + full_db, + ) + + +@pytest.mark.parametrize("empty_relations", [[], [{}], [{"relationship": []}]]) +def test_add_relationships_none(empty_relations: list, full_db: Session) -> None: + """Testing what happens if an empty relation is added.""" + data_transfer.add_relationships([], full_db) + + +@pytest.mark.parametrize( + "documents", + [ + [ + { + "_id": {"$oid": "649f16a2ecc"}, + "id": { + "hr_number": "HRB 123", + "district_court": { + "name": "Amtsgericht Dortmund", + "city": "Dortmund", + }, + }, + "location": { + "city": "TV City", + "zip_code": "12345", + "street": "Sesamstr.", + "house_number": "1", + }, + "name": "Some Company GmbH", + "last_update": "2023-05-04", + "relationships": [ + { + "name": {"firstname": "Second person", "lastname": "Köstser"}, + "date_of_birth": "1961-02-09", + "location": {"city": "Stuttgart"}, + "role": "Geschäftsführer", + }, + { + "name": {"firstname": "First Person", "lastname": "Jifpa"}, + "date_of_birth": "1976-04-20", + "location": {"city": "Stuttgart"}, + "role": "Geschäftsführer", + }, + { + "name": {"firstname": "", "lastname": "Jiapa"}, + "date_of_birth": "1976-04-20", + "location": {"city": "Stuttgart"}, + "role": "Geschäftsführer", + }, + { + "name": {"firstname": "Something", "lastname": ""}, + "date_of_birth": "12i3u", + "location": {"city": "Stuttgart"}, + "role": "Geschäftsführer", + }, + { + "name": {"firstname": "First Person", "lastname": "Jipha"}, + "date_of_birth": "1976-04-20", + }, + ], + "yearly_results": {}, + } + ] + ], +) +def test_relationships(documents: list[dict[str, Any]], full_db: Session) -> None: + """Testing to add lots of relations.""" + data_transfer.add_relationships(documents, full_db) + bind = full_db.bind + assert isinstance(bind, Engine) + pd.testing.assert_frame_equal( + pd.read_sql_table("company", bind), + pd.DataFrame( + { + "id": {0: 1, 1: 2, 2: 3}, + "hr": {0: "HRB 123", 1: "HRB 123", 2: "HRB 12"}, + "court_id": {0: 2, 1: 1, 2: 2}, + "name": { + 0: "Some Company GmbH", + 1: "Other Company GmbH", + 2: "Third Company GmbH", + }, + "street": {0: "Sesamstr.", 1: "Sesamstr.", 2: None}, + "zip_code": {0: "12345", 1: "12345", 2: None}, + "city": {0: "TV City", 1: "TV City", 2: None}, + "last_update": { + 0: pd.Timestamp("2023-01-01 00:00:00"), + 1: pd.Timestamp("2023-01-01 00:00:00"), + 2: pd.Timestamp("2023-01-01 00:00:00"), + }, + "sector": {0: None, 1: None, 2: None}, + } + ), + ) + assert len(pd.read_sql_table("company_relation", bind).index) == 0 + pd.testing.assert_frame_equal( + pd.read_sql_table("person_relation", bind), + pd.DataFrame({"id": {0: 1, 1: 2}, "person_id": {0: 6, 1: 7}}), + ) + pd.testing.assert_frame_equal( + pd.read_sql_table("relation", bind), + pd.DataFrame( + { + "id": {0: 1, 1: 2}, + "company_id": {0: 1, 1: 1}, + "date_from": {0: pd.NaT, 1: pd.NaT}, + "date_to": {0: pd.NaT, 1: pd.NaT}, + "relation": {0: "GESCHAEFTSFUEHRER", 1: "GESCHAEFTSFUEHRER"}, + } + ), + ) + pd.testing.assert_frame_equal( + pd.read_sql_table("person", bind), + pd.DataFrame( + { + "id": {0: 1, 1: 2, 2: 3, 3: 4, 4: 5, 5: 6, 6: 7}, + "name": { + 0: "Max", + 1: "Sabine", + 2: "Some Firstname", + 3: "Some Firstname", + 4: "Other Firstname", + 5: "Second person", + 6: "First Person", + }, + "surname": { + 0: "Mustermann", + 1: "Mustermann", + 2: "Some Surname", + 3: "Some Surname", + 4: "Other Surname", + 5: "Köstser", + 6: "Jifpa", + }, + "date_of_birth": { + 0: pd.Timestamp("2023-01-01 00:00:00"), + 1: pd.Timestamp("2023-01-01 00:00:00"), + 2: pd.Timestamp("2023-01-01 00:00:00"), + 3: pd.Timestamp("2023-01-02 00:00:00"), + 4: pd.Timestamp("2023-01-02 00:00:00"), + 5: pd.Timestamp("1961-02-09 00:00:00"), + 6: pd.Timestamp("1976-04-20 00:00:00"), + }, + "works_for": { + 0: None, + 1: None, + 2: None, + 3: None, + 4: None, + 5: None, + 6: None, + }, + } + ), + ) + + +@pytest.mark.parametrize( + "companies", + [ + [], + [{}], + [ + { + "_id": {"$oid": "649f16a2ecc"}, + "id": { + "hr_number": "HRB 123", + "district_court": { + "name": "Amtsgericht Dortmund", + "city": "Dortmund", + }, + }, + "location": { + "city": "TV City", + "zip_code": "12345", + "street": "Sesamstr.", + "house_number": "1", + }, + "name": "Some Company GmbH", + "last_update": "2023-05-04", + "relationships": [], + "yearly_results": {2023: {}, "2023": {}}, + }, + ], + ], +) +def test_add_annual_financial_reports_no_call( + companies: list[dict], full_db: Session, mocker: MockerFixture +) -> None: + """Testing if financial reports are added correctly to the db.""" + spy_warning = mocker.spy(data_transfer.logger, "warning") + info_warning = mocker.spy(data_transfer.logger, "info") + mocker.patch("aki_prj23_transparenzregister.utils.data_transfer.add_annual_report") + data_transfer.add_annual_financial_reports(companies, full_db) + + input_args = mocker.call.args + input_kwargs = mocker.call.kwargs + assert len(input_args) == len(input_kwargs) + spy_warning.assert_not_called() + info_warning.assert_called_once() + + +@pytest.mark.parametrize( + "companies", + [ + [ + { + "_id": {"$oid": "649f16a2ecc"}, + "id": { + "hr_number": "HRB 123", + "district_court": { + "name": "Amtsgericht Dortmund", + "city": "Dortmund", + }, + }, + "location": { + "city": "TV City", + "zip_code": "12345", + "street": "Sesamstr.", + "house_number": "1", + }, + "name": "Some Company GmbH", + "last_update": "2023-05-04", + "relationships": [], + "yearly_results": {"i am not an int": {"auditor": {}}}, + } + ], + ], +) +def test_add_annual_financial_reports_defect_year( + companies: list[dict], full_db: Session, mocker: MockerFixture +) -> None: + """Testing if financial reports are added correctly to the db.""" + spy_warning = mocker.spy(data_transfer.logger, "warning") + info_warning = mocker.spy(data_transfer.logger, "info") + mocker.patch("aki_prj23_transparenzregister.utils.data_transfer.add_annual_report") + data_transfer.add_annual_financial_reports(companies, full_db) + + input_args = mocker.call.args + input_kwargs = mocker.call.kwargs + assert len(input_args) == len(input_kwargs) + spy_warning.assert_called_once() + info_warning.assert_called_once() + + +def test_add_annual_financial_reports(full_db: Session, mocker: MockerFixture) -> None: + """Testing if financial reports are added correctly to the db.""" + companies = [ + { + "_id": {"$oid": "649f16a2ecc"}, + "id": { + "hr_number": "HRB 123", + "district_court": { + "name": "Amtsgericht Dortmund", + "city": "Dortmund", + }, + }, + "location": { + "city": "TV City", + "zip_code": "12345", + "street": "Sesamstr.", + "house_number": "1", + }, + "name": "Some Company GmbH", + "last_update": "2023-05-04", + "relationships": [], + "yearly_results": { + 2023: {"some-text1": {}}, + "cast-me-to-int": {"some-text4": {}}, + 2025: {"some-text2": {}}, + "cast-me-too": {"some-text5": {}}, + 2024: {"some-text3": {}}, + }, + } + ] + + spy_warning = mocker.spy(data_transfer.logger, "warning") + info_warning = mocker.spy(data_transfer.logger, "info") + mocked = mocker.patch( + "aki_prj23_transparenzregister.utils.data_transfer.add_annual_report" + ) + data_transfer.add_annual_financial_reports(companies, full_db) + + spy_warning.assert_has_calls([]) + for input_args in mocked.call_args_list: + assert input_args.args[0] == 1 + assert isinstance(input_args.kwargs["db"], Session) + assert len(input_args.kwargs) == 1 + + for year, input_args in zip([2023, 2025, 2024], mocked.call_args_list, strict=True): + assert year == input_args.args[1] + + report: dict + for report, input_args in zip( + [{"some-text1": {}}, {"some-text2": {}}, {"some-text3": {}}], + mocked.call_args_list, + strict=True, + ): + assert report == input_args.args[2] + + for input_args in mocked.call_args_list: + assert isinstance(input_args.kwargs["db"], Session) + + info_warning.assert_called_once() + + +@pytest.mark.parametrize("year", list(range(2000, 2025, 5))) +@pytest.mark.parametrize("company_id", [1, 2, 3]) +@pytest.mark.parametrize( + "empty_report", + [{}, {"auditors": []}, {"financials": []}, {"auditors": [], "financials": []}], +) +def test_add_annual_report_empty( + year: int, + company_id: int, + empty_report: dict, + full_db: Session, + mocker: MockerFixture, +) -> None: + """Testing if the correct warning is thrown when the financial and auditor records are empty.""" + df_prior = pd.read_sql_table( + entities.AnnualFinanceStatement.__tablename__, full_db.bind # type: ignore + ) + spy_warning = mocker.spy(data_transfer.logger, "debug") + + data_transfer.add_annual_report(company_id, year, empty_report, full_db) + full_db.commit() + spy_warning.assert_called_once() + pd.testing.assert_frame_equal( + df_prior, + pd.read_sql_table(entities.AnnualFinanceStatement.__tablename__, full_db.bind), # type: ignore + ) + + +@pytest.mark.parametrize("year", [2015, 2023, 2024]) +@pytest.mark.parametrize("company_id", [7, 8, 9]) +@pytest.mark.parametrize( + "empty_report", + [{}, {"auditors": []}, {"financials": []}, {"auditors": [], "financials": []}], +) +def test_add_annual_report_to_unknown_company( + year: int, company_id: int, empty_report: dict, full_db: Session +) -> None: + """Tests if an error is thrown when the company id isn't registered in the db.""" + with pytest.raises( + KeyError, match="The company with the id .* could not be found." + ): + data_transfer.add_annual_report(company_id, year, empty_report, full_db) + + +@pytest.mark.parametrize("company_id", [1, 2, 3]) +@pytest.mark.parametrize("year", [2023, 2025, 2020]) +@pytest.mark.parametrize("short_term_debt", [2023.2, 2025.5, 2020.5, float("NaN")]) +def test_add_annual_report( + short_term_debt: float, + company_id: int, + year: int, + finance_statements: list[dict[str, Any]], + full_db: Session, +) -> None: + """Tests the addition of annual financial records.""" + data_transfer.add_annual_report( + company_id, + year, + { + "financials": { + "ebit": 123, + "ebitda": 235, + "short_term_debt": short_term_debt, + }, + "auditors": {}, + }, + db=full_db, + ) + full_db.commit() + df_prior = pd.read_sql_table( + entities.AnnualFinanceStatement.__tablename__, full_db.bind # type: ignore + ) + expected_results = pd.DataFrame( + finance_statements + + [ + { + "id": 3, + "company_id": company_id, + "date": pd.to_datetime(date(year, 1, 1)), + "total_volume": float("NaN"), + "ebit": 123.0, + "ebitda": 235.0, + "ebit_margin": float("NaN"), + "total_balance": float("NaN"), + "equity": float("NaN"), + "debt": float("NaN"), + "return_on_equity": float("NaN"), + "capital_turnover_rate": float("NaN"), + "current_liabilities": float("NaN"), + "dividends": float("NaN"), + "net_income": float("NaN"), + "assets": float("NaN"), + "long_term_debt": float("NaN"), + "short_term_debt": short_term_debt, + "revenue": float("NaN"), + "cash_flow": float("NaN"), + "current_assets": float("NaN"), + } + ] + ) + + expected_results["date"] = pd.to_datetime(expected_results["date"]) + pd.testing.assert_frame_equal( + expected_results, + df_prior, + ) + + +def test_add_annual_report_financial_key_error(full_db: Session) -> None: + """Tests if an error is thrown financial data is tried to be added with an unknown financial record type.""" + with pytest.raises( + TypeError, match="is an invalid keyword argument for AnnualFinanceStatement" + ): + data_transfer.add_annual_report( + 2, + 2023, + {"financials": {"something-strange": 123.12}, "auditors": {}}, + db=full_db, + ) diff --git a/tests/utils/enum_types_test.py b/tests/utils/enum_types_test.py new file mode 100644 index 0000000..f9f744c --- /dev/null +++ b/tests/utils/enum_types_test.py @@ -0,0 +1,40 @@ +"""Tests for the enumeration types.""" +import pytest + +from aki_prj23_transparenzregister.utils import enum_types + + +def test_import() -> None: + """Tests if enum_types can be imported.""" + assert enum_types + + +@pytest.mark.parametrize("relation_name", ["Vorstand", "Prokurist", "Direktor"]) +@pytest.mark.parametrize("changes", ["lower", "upper", None]) +def test_relation_type_enum_from_string( + relation_name: str, changes: str | None +) -> None: + """Tests the transformation of a name to an enumeration type.""" + if changes == "lower": + relation_name = relation_name.lower() + elif changes == "upper": + relation_name = relation_name.upper() + + assert isinstance( + enum_types.RelationTypeEnum.get_enum_from_name(relation_name), + enum_types.RelationTypeEnum, + ) + + +@pytest.mark.parametrize("relation_name", ["does Not Exists", "Also not"]) +@pytest.mark.parametrize("changes", ["lower", "upper", None]) +def test_relation_type_enum_from_string_wrong( + relation_name: str, changes: str | None +) -> None: + """Tests the transformation of a name to an enumeration type if no equivalent can be found.""" + if changes == "lower": + relation_name = relation_name.lower() + elif changes == "upper": + relation_name = relation_name.upper() + with pytest.raises(ValueError, match='Relation type ".*" is not yet implemented!'): + enum_types.RelationTypeEnum.get_enum_from_name(relation_name) diff --git a/tests/utils/mongo/mongo_test.py b/tests/utils/mongo/mongo_test.py index 51a1abe..eb63a4b 100644 --- a/tests/utils/mongo/mongo_test.py +++ b/tests/utils/mongo/mongo_test.py @@ -1,3 +1,4 @@ +"""Tests for connecting to the mongodb.""" from unittest.mock import patch from aki_prj23_transparenzregister.utils.mongo.connector import ( @@ -7,21 +8,25 @@ from aki_prj23_transparenzregister.utils.mongo.connector import ( def test_get_conn_string_no_credentials() -> None: + """Tests the mongo connection string generation.""" conn = MongoConnection("localhost", "", 27017, None, None) assert conn.get_conn_string() == "mongodb://localhost:27017" def test_get_conn_string_no_port_but_credentials() -> None: + """Tests the mongo connection string generation.""" conn = MongoConnection("localhost", "", None, "admin", "password") assert conn.get_conn_string() == "mongodb+srv://admin:password@localhost" def test_get_conn_simple() -> None: + """Tests the mongo connection string generation.""" conn = MongoConnection("localhost", "", None, None, None) assert conn.get_conn_string() == "mongodb+srv://localhost" def test_mongo_connector() -> None: + """Tests the MongoConnector.""" with patch("pymongo.MongoClient") as mock_mongo_client: expected_result = 42 mock_mongo_client.return_value = {"db": expected_result} diff --git a/tests/utils/mongo/news_mongo_service_test.py b/tests/utils/mongo/news_mongo_service_test.py index ddf1564..9257c51 100644 --- a/tests/utils/mongo/news_mongo_service_test.py +++ b/tests/utils/mongo/news_mongo_service_test.py @@ -1,3 +1,4 @@ +"""Tests for the mongo news service.""" from unittest.mock import Mock, patch import pytest @@ -50,6 +51,7 @@ def test_init(mock_mongo_connector: Mock, mock_collection: Mock) -> None: def test_get_all(mock_mongo_connector: Mock, mock_collection: Mock) -> None: + """Tests the get_all function from the mongo connector.""" mock_mongo_connector.database = {"news": mock_collection} service = MongoNewsService(mock_mongo_connector) @@ -60,6 +62,7 @@ def test_get_all(mock_mongo_connector: Mock, mock_collection: Mock) -> None: def test_get_by_id_with_result( mock_mongo_connector: Mock, mock_collection: Mock ) -> None: + """Tests the get_by_id_with_result function from the mongo connector.""" mock_mongo_connector.database = {"news": mock_collection} service = MongoNewsService(mock_mongo_connector) @@ -72,6 +75,7 @@ def test_get_by_id_with_result( def test_get_by_id_no_result(mock_mongo_connector: Mock, mock_collection: Mock) -> None: + """Test if the mongo connector can get an object by id.""" mock_mongo_connector.database = {"news": mock_collection} service = MongoNewsService(mock_mongo_connector) @@ -80,6 +84,7 @@ def test_get_by_id_no_result(mock_mongo_connector: Mock, mock_collection: Mock) def test_insert(mock_mongo_connector: Mock, mock_collection: Mock) -> None: + """Tests the insert function from the mongo connector.""" mock_mongo_connector.database = {"news": mock_collection} service = MongoNewsService(mock_mongo_connector) @@ -92,6 +97,7 @@ def test_insert(mock_mongo_connector: Mock, mock_collection: Mock) -> None: def test_transform_ingoing() -> None: + """Tests the transform_ingoing function from the mongo connector.""" news = News("42", None, None, None, None) # type: ignore result = MongoEntryTransformer.transform_ingoing(news) assert result["_id"] == "42" @@ -99,6 +105,7 @@ def test_transform_ingoing() -> None: def test_transform_outgoing() -> None: + """Tests the transform_outgoing function from the mongo connector.""" data = { "_id": "4711", "title": "Hello", diff --git a/tests/utils/sql/connector_test.py b/tests/utils/sql/connector_test.py index 658ba57..6dbd9ba 100644 --- a/tests/utils/sql/connector_test.py +++ b/tests/utils/sql/connector_test.py @@ -1,21 +1,27 @@ +"""Tests the sql connector.""" import os.path from collections.abc import Generator from typing import Any from unittest.mock import Mock, patch +import pandas as pd import pytest from sqlalchemy.engine import Engine +from sqlalchemy.orm import Session from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider from aki_prj23_transparenzregister.config.config_template import PostgreConnectionString from aki_prj23_transparenzregister.utils.sql.connector import ( + Base, get_pg_engine, get_session, init_db, + transfer_db, ) def test_get_engine_pg() -> None: + """Tests the creation of a postgre engine.""" conn_args = PostgreConnectionString("", "", "", "", 42) with patch( "aki_prj23_transparenzregister.utils.sql.connector.sa.create_engine" @@ -25,6 +31,36 @@ def test_get_engine_pg() -> None: assert get_pg_engine(conn_args) == result +@pytest.fixture() +def destination_db() -> Generator[Session, None, None]: + """Generates a db Session to a sqlite db to copy data to.""" + if os.path.exists("secondary.db"): + os.remove("secondary.db") + db = get_session("sqlite:///secondary.db") + init_db(db) + yield db + db.close() + bind = db.bind + assert isinstance(bind, Engine) + bind.dispose() + os.remove("secondary.db") + + +def test_transfer_db(full_db: Session, destination_db: Session) -> None: + """Tests if the data transfer between two sql tables works.""" + transfer_db(source=full_db, destination=destination_db) + sbind = full_db.bind + dbind = destination_db.bind + assert isinstance(sbind, Engine) + assert isinstance(dbind, Engine) + + for table in Base.metadata.sorted_tables: + pd.testing.assert_frame_equal( + pd.read_sql_table(str(table), dbind), + pd.read_sql_table(str(table), sbind), + ) + + @pytest.fixture() def delete_sqlite_table() -> Generator[str, None, None]: """Cleans a path before and deletes the table after a test. diff --git a/tests/utils/sql/entities_test.py b/tests/utils/sql/entities_test.py index 14bc361..bcd3068 100644 --- a/tests/utils/sql/entities_test.py +++ b/tests/utils/sql/entities_test.py @@ -1,4 +1,8 @@ -def test_import() -> None: - from aki_prj23_transparenzregister.utils.sql import entities +"""Tests for the sql entities.""" +from aki_prj23_transparenzregister.utils.sql import entities + + +def test_import() -> None: # + """Tests if the entities can be imported.""" assert entities diff --git a/tests/utils/string_tools_test.py b/tests/utils/string_tools_test.py new file mode 100644 index 0000000..26a7b1b --- /dev/null +++ b/tests/utils/string_tools_test.py @@ -0,0 +1,35 @@ +"""Tests for the string tool module.""" +from typing import Any + +import pytest + +from aki_prj23_transparenzregister.utils import string_tools + + +def test_import() -> None: + """Tests if the import is possible.""" + assert string_tools + + +@pytest.mark.parametrize( + ("value", "expected"), + [ + ("None ", "None"), + (" ", None), + ("", None), + ("\t", None), + ("\n", None), + (" Some String ", "Some String"), + ("Some String", "Some String"), + ], +) +def test_simplify_string(value: str | None, expected: str | None) -> None: + """Tests the sting simplification.""" + assert string_tools.simplify_string(value) == expected + + +@pytest.mark.parametrize("value", [0, 0.1, True, ("1",), {}, set()]) +def test_simplify_string_type_error(value: Any) -> None: + """Tests if the type error is thrown when the value is the wrong type.""" + with pytest.raises(TypeError): + assert string_tools.simplify_string(value)