Add a cli interface to choose a configuration (#163)

- [x] add a cli to the webserver to take env variables into account 
- [x] add a cli to the data processing that takes enviromental variable
as a valid source into account
- [x] rework the cli for the reset sql command
- [x] rework the cli for the copying of sql data from one db to another
This commit is contained in:
Philipp Horstenkamp 2023-10-02 20:31:42 +02:00 committed by GitHub
parent 2abe12f027
commit d2d4a436f8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 650 additions and 304 deletions

View File

@ -27,4 +27,5 @@ FROM base as web-server
RUN pip install --find-links=dist aki-prj23-transparenzregister[web-server] --no-cache-dir && \ RUN pip install --find-links=dist aki-prj23-transparenzregister[web-server] --no-cache-dir && \
rm dist/ -R rm dist/ -R
CMD python -m aki_prj23_transparenzregister.ui.company_finance_dash ENTRYPOINT ["webserver", "ENV"]
CMD ["--level", "DEBUG"]

View File

@ -16,8 +16,10 @@ See the [CONTRIBUTING.md](CONTRIBUTING.md) about how code should be formatted an
The project has currently the following entrypoint available: The project has currently the following entrypoint available:
- **data-transfer** > Transfers all the data from the mongodb into the sql db to make it available as production data. - **data-processing** > Transfers all the data from the mongodb into the sql db to make it available as production data.
- **reset-sql** > Resets all sql tables in the connected db. - **reset-sql** > Resets all sql tables in the connected db.
- **copy-sql** > Copys the content of a db to another db.
- **webserver** > Starts the webserver showing the analysis results.
## DB Connection settings ## DB Connection settings
@ -26,13 +28,15 @@ To connect to the Mongo db see [connect]
Create a `secrets.json` in the root of this repo with the following structure (values to be replaces by desired config): Create a `secrets.json` in the root of this repo with the following structure (values to be replaces by desired config):
The sqlite db is alternative to the postgres section.
```json ```json
{ {
"sqlite": "path-to-sqlite.db",
"postgres": { "postgres": {
"username": "postgres", "username": "username",
"password": "postgres", "password": "password",
"host": "localhost", "host": "localhost",
"database": "postgres", "database": "db-name",
"port": 5432 "port": 5432
}, },
"mongo": { "mongo": {
@ -61,6 +65,8 @@ PYTHON_MONGO_PASSWORD=password
PYTHON_MONGO_PORT=27017 PYTHON_MONGO_PORT=27017
PYTHON_MONGO_DATABASE=transparenzregister PYTHON_MONGO_DATABASE=transparenzregister
PYTHON_SQLITE_PATH=PathToSQLite3.db # An overwrite path to an sqllite db
PYTHON_DASH_LOGIN_USERNAME=some-login-to-webgui PYTHON_DASH_LOGIN_USERNAME=some-login-to-webgui
PYTHON_DASH_LOGIN_PW=some-pw-to-login-to-webgui PYTHON_DASH_LOGIN_PW=some-pw-to-login-to-webgui

116
poetry.lock generated
View File

@ -719,63 +719,63 @@ test-no-images = ["pytest", "pytest-cov", "wurlitzer"]
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "7.3.1" version = "7.3.2"
description = "Code coverage measurement for Python" description = "Code coverage measurement for Python"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{file = "coverage-7.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:cd0f7429ecfd1ff597389907045ff209c8fdb5b013d38cfa7c60728cb484b6e3"}, {file = "coverage-7.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d872145f3a3231a5f20fd48500274d7df222e291d90baa2026cc5152b7ce86bf"},
{file = "coverage-7.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:966f10df9b2b2115da87f50f6a248e313c72a668248be1b9060ce935c871f276"}, {file = "coverage-7.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:310b3bb9c91ea66d59c53fa4989f57d2436e08f18fb2f421a1b0b6b8cc7fffda"},
{file = "coverage-7.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0575c37e207bb9b98b6cf72fdaaa18ac909fb3d153083400c2d48e2e6d28bd8e"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f47d39359e2c3779c5331fc740cf4bce6d9d680a7b4b4ead97056a0ae07cb49a"},
{file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:245c5a99254e83875c7fed8b8b2536f040997a9b76ac4c1da5bff398c06e860f"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa72dbaf2c2068404b9870d93436e6d23addd8bbe9295f49cbca83f6e278179c"},
{file = "coverage-7.3.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c96dd7798d83b960afc6c1feb9e5af537fc4908852ef025600374ff1a017392"}, {file = "coverage-7.3.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:beaa5c1b4777f03fc63dfd2a6bd820f73f036bfb10e925fce067b00a340d0f3f"},
{file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:de30c1aa80f30af0f6b2058a91505ea6e36d6535d437520067f525f7df123887"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dbc1b46b92186cc8074fee9d9fbb97a9dd06c6cbbef391c2f59d80eabdf0faa6"},
{file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:50dd1e2dd13dbbd856ffef69196781edff26c800a74f070d3b3e3389cab2600d"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:315a989e861031334d7bee1f9113c8770472db2ac484e5b8c3173428360a9148"},
{file = "coverage-7.3.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b9c0c19f70d30219113b18fe07e372b244fb2a773d4afde29d5a2f7930765136"}, {file = "coverage-7.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d1bc430677773397f64a5c88cb522ea43175ff16f8bfcc89d467d974cb2274f9"},
{file = "coverage-7.3.1-cp310-cp310-win32.whl", hash = "sha256:770f143980cc16eb601ccfd571846e89a5fe4c03b4193f2e485268f224ab602f"}, {file = "coverage-7.3.2-cp310-cp310-win32.whl", hash = "sha256:a889ae02f43aa45032afe364c8ae84ad3c54828c2faa44f3bfcafecb5c96b02f"},
{file = "coverage-7.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:cdd088c00c39a27cfa5329349cc763a48761fdc785879220d54eb785c8a38520"}, {file = "coverage-7.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:c0ba320de3fb8c6ec16e0be17ee1d3d69adcda99406c43c0409cb5c41788a611"},
{file = "coverage-7.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:74bb470399dc1989b535cb41f5ca7ab2af561e40def22d7e188e0a445e7639e3"}, {file = "coverage-7.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ac8c802fa29843a72d32ec56d0ca792ad15a302b28ca6203389afe21f8fa062c"},
{file = "coverage-7.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:025ded371f1ca280c035d91b43252adbb04d2aea4c7105252d3cbc227f03b375"}, {file = "coverage-7.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:89a937174104339e3a3ffcf9f446c00e3a806c28b1841c63edb2b369310fd074"},
{file = "coverage-7.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a6191b3a6ad3e09b6cfd75b45c6aeeffe7e3b0ad46b268345d159b8df8d835f9"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e267e9e2b574a176ddb983399dec325a80dbe161f1a32715c780b5d14b5f583a"},
{file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7eb0b188f30e41ddd659a529e385470aa6782f3b412f860ce22b2491c89b8593"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2443cbda35df0d35dcfb9bf8f3c02c57c1d6111169e3c85fc1fcc05e0c9f39a3"},
{file = "coverage-7.3.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c8f0df9dfd8ff745bccff75867d63ef336e57cc22b2908ee725cc552689ec8"}, {file = "coverage-7.3.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4175e10cc8dda0265653e8714b3174430b07c1dca8957f4966cbd6c2b1b8065a"},
{file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:7eb3cd48d54b9bd0e73026dedce44773214064be93611deab0b6a43158c3d5a0"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0cbf38419fb1a347aaf63481c00f0bdc86889d9fbf3f25109cf96c26b403fda1"},
{file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:ac3c5b7e75acac31e490b7851595212ed951889918d398b7afa12736c85e13ce"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:5c913b556a116b8d5f6ef834038ba983834d887d82187c8f73dec21049abd65c"},
{file = "coverage-7.3.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5b4ee7080878077af0afa7238df1b967f00dc10763f6e1b66f5cced4abebb0a3"}, {file = "coverage-7.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1981f785239e4e39e6444c63a98da3a1db8e971cb9ceb50a945ba6296b43f312"},
{file = "coverage-7.3.1-cp311-cp311-win32.whl", hash = "sha256:229c0dd2ccf956bf5aeede7e3131ca48b65beacde2029f0361b54bf93d36f45a"}, {file = "coverage-7.3.2-cp311-cp311-win32.whl", hash = "sha256:43668cabd5ca8258f5954f27a3aaf78757e6acf13c17604d89648ecc0cc66640"},
{file = "coverage-7.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:c6f55d38818ca9596dc9019eae19a47410d5322408140d9a0076001a3dcb938c"}, {file = "coverage-7.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10c39c0452bf6e694511c901426d6b5ac005acc0f78ff265dbe36bf81f808a2"},
{file = "coverage-7.3.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5289490dd1c3bb86de4730a92261ae66ea8d44b79ed3cc26464f4c2cde581fbc"}, {file = "coverage-7.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:4cbae1051ab791debecc4a5dcc4a1ff45fc27b91b9aee165c8a27514dd160836"},
{file = "coverage-7.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ca833941ec701fda15414be400c3259479bfde7ae6d806b69e63b3dc423b1832"}, {file = "coverage-7.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12d15ab5833a997716d76f2ac1e4b4d536814fc213c85ca72756c19e5a6b3d63"},
{file = "coverage-7.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cd694e19c031733e446c8024dedd12a00cda87e1c10bd7b8539a87963685e969"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c7bba973ebee5e56fe9251300c00f1579652587a9f4a5ed8404b15a0471f216"},
{file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aab8e9464c00da5cb9c536150b7fbcd8850d376d1151741dd0d16dfe1ba4fd26"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fe494faa90ce6381770746077243231e0b83ff3f17069d748f645617cefe19d4"},
{file = "coverage-7.3.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87d38444efffd5b056fcc026c1e8d862191881143c3aa80bb11fcf9dca9ae204"}, {file = "coverage-7.3.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6e9589bd04d0461a417562649522575d8752904d35c12907d8c9dfeba588faf"},
{file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:8a07b692129b8a14ad7a37941a3029c291254feb7a4237f245cfae2de78de037"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d51ac2a26f71da1b57f2dc81d0e108b6ab177e7d30e774db90675467c847bbdf"},
{file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:2829c65c8faaf55b868ed7af3c7477b76b1c6ebeee99a28f59a2cb5907a45760"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:99b89d9f76070237975b315b3d5f4d6956ae354a4c92ac2388a5695516e47c84"},
{file = "coverage-7.3.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1f111a7d85658ea52ffad7084088277135ec5f368457275fc57f11cebb15607f"}, {file = "coverage-7.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:fa28e909776dc69efb6ed975a63691bc8172b64ff357e663a1bb06ff3c9b589a"},
{file = "coverage-7.3.1-cp312-cp312-win32.whl", hash = "sha256:c397c70cd20f6df7d2a52283857af622d5f23300c4ca8e5bd8c7a543825baa5a"}, {file = "coverage-7.3.2-cp312-cp312-win32.whl", hash = "sha256:289fe43bf45a575e3ab10b26d7b6f2ddb9ee2dba447499f5401cfb5ecb8196bb"},
{file = "coverage-7.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:5ae4c6da8b3d123500f9525b50bf0168023313963e0e2e814badf9000dd6ef92"}, {file = "coverage-7.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:7dbc3ed60e8659bc59b6b304b43ff9c3ed858da2839c78b804973f613d3e92ed"},
{file = "coverage-7.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca70466ca3a17460e8fc9cea7123c8cbef5ada4be3140a1ef8f7b63f2f37108f"}, {file = "coverage-7.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f94b734214ea6a36fe16e96a70d941af80ff3bfd716c141300d95ebc85339738"},
{file = "coverage-7.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f2781fd3cabc28278dc982a352f50c81c09a1a500cc2086dc4249853ea96b981"}, {file = "coverage-7.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:af3d828d2c1cbae52d34bdbb22fcd94d1ce715d95f1a012354a75e5913f1bda2"},
{file = "coverage-7.3.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6407424621f40205bbe6325686417e5e552f6b2dba3535dd1f90afc88a61d465"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630b13e3036e13c7adc480ca42fa7afc2a5d938081d28e20903cf7fd687872e2"},
{file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:04312b036580ec505f2b77cbbdfb15137d5efdfade09156961f5277149f5e344"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9eacf273e885b02a0273bb3a2170f30e2d53a6d53b72dbe02d6701b5296101c"},
{file = "coverage-7.3.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9ad38204887349853d7c313f53a7b1c210ce138c73859e925bc4e5d8fc18e7"}, {file = "coverage-7.3.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8f17966e861ff97305e0801134e69db33b143bbfb36436efb9cfff6ec7b2fd9"},
{file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:53669b79f3d599da95a0afbef039ac0fadbb236532feb042c534fbb81b1a4e40"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b4275802d16882cf9c8b3d057a0839acb07ee9379fa2749eca54efbce1535b82"},
{file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:614f1f98b84eb256e4f35e726bfe5ca82349f8dfa576faabf8a49ca09e630086"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:72c0cfa5250f483181e677ebc97133ea1ab3eb68645e494775deb6a7f6f83901"},
{file = "coverage-7.3.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:f1a317fdf5c122ad642db8a97964733ab7c3cf6009e1a8ae8821089993f175ff"}, {file = "coverage-7.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cb536f0dcd14149425996821a168f6e269d7dcd2c273a8bff8201e79f5104e76"},
{file = "coverage-7.3.1-cp38-cp38-win32.whl", hash = "sha256:defbbb51121189722420a208957e26e49809feafca6afeef325df66c39c4fdb3"}, {file = "coverage-7.3.2-cp38-cp38-win32.whl", hash = "sha256:307adb8bd3abe389a471e649038a71b4eb13bfd6b7dd9a129fa856f5c695cf92"},
{file = "coverage-7.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:f4f456590eefb6e1b3c9ea6328c1e9fa0f1006e7481179d749b3376fc793478e"}, {file = "coverage-7.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:88ed2c30a49ea81ea3b7f172e0269c182a44c236eb394718f976239892c0a27a"},
{file = "coverage-7.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f12d8b11a54f32688b165fd1a788c408f927b0960984b899be7e4c190ae758f1"}, {file = "coverage-7.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b631c92dfe601adf8f5ebc7fc13ced6bb6e9609b19d9a8cd59fa47c4186ad1ce"},
{file = "coverage-7.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f09195dda68d94a53123883de75bb97b0e35f5f6f9f3aa5bf6e496da718f0cb6"}, {file = "coverage-7.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d3d9df4051c4a7d13036524b66ecf7a7537d14c18a384043f30a303b146164e9"},
{file = "coverage-7.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c6601a60318f9c3945be6ea0f2a80571f4299b6801716f8a6e4846892737ebe4"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f7363d3b6a1119ef05015959ca24a9afc0ea8a02c687fe7e2d557705375c01f"},
{file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07d156269718670d00a3b06db2288b48527fc5f36859425ff7cec07c6b367745"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2f11cc3c967a09d3695d2a6f03fb3e6236622b93be7a4b5dc09166a861be6d25"},
{file = "coverage-7.3.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:636a8ac0b044cfeccae76a36f3b18264edcc810a76a49884b96dd744613ec0b7"}, {file = "coverage-7.3.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:149de1d2401ae4655c436a3dced6dd153f4c3309f599c3d4bd97ab172eaf02d9"},
{file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5d991e13ad2ed3aced177f524e4d670f304c8233edad3210e02c465351f785a0"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:3a4006916aa6fee7cd38db3bfc95aa9c54ebb4ffbfc47c677c8bba949ceba0a6"},
{file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:586649ada7cf139445da386ab6f8ef00e6172f11a939fc3b2b7e7c9082052fa0"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9028a3871280110d6e1aa2df1afd5ef003bab5fb1ef421d6dc748ae1c8ef2ebc"},
{file = "coverage-7.3.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4aba512a15a3e1e4fdbfed2f5392ec221434a614cc68100ca99dcad7af29f3f8"}, {file = "coverage-7.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f805d62aec8eb92bab5b61c0f07329275b6f41c97d80e847b03eb894f38d083"},
{file = "coverage-7.3.1-cp39-cp39-win32.whl", hash = "sha256:6bc6f3f4692d806831c136c5acad5ccedd0262aa44c087c46b7101c77e139140"}, {file = "coverage-7.3.2-cp39-cp39-win32.whl", hash = "sha256:d1c88ec1a7ff4ebca0219f5b1ef863451d828cccf889c173e1253aa84b1e07ce"},
{file = "coverage-7.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:553d7094cb27db58ea91332e8b5681bac107e7242c23f7629ab1316ee73c4981"}, {file = "coverage-7.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4767da59464bb593c07afceaddea61b154136300881844768037fd5e859353f"},
{file = "coverage-7.3.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:220eb51f5fb38dfdb7e5d54284ca4d0cd70ddac047d750111a68ab1798945194"}, {file = "coverage-7.3.2-pp38.pp39.pp310-none-any.whl", hash = "sha256:ae97af89f0fbf373400970c0a21eef5aa941ffeed90aee43650b81f7d7f47637"},
{file = "coverage-7.3.1.tar.gz", hash = "sha256:6cb7fe1581deb67b782c153136541e20901aa312ceedaf1467dcb35255787952"}, {file = "coverage-7.3.2.tar.gz", hash = "sha256:be32ad29341b0170e795ca590e1c07e81fc061cb5b10c74ce7203491484404ef"},
] ]
[package.extras] [package.extras]
@ -5828,13 +5828,13 @@ dev = ["flake8", "flake8-annotations", "flake8-bandit", "flake8-bugbear", "flake
[[package]] [[package]]
name = "urllib3" name = "urllib3"
version = "2.0.5" version = "2.0.6"
description = "HTTP library with thread-safe connection pooling, file post, and more." description = "HTTP library with thread-safe connection pooling, file post, and more."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{file = "urllib3-2.0.5-py3-none-any.whl", hash = "sha256:ef16afa8ba34a1f989db38e1dbbe0c302e4289a47856990d0682e374563ce35e"}, {file = "urllib3-2.0.6-py3-none-any.whl", hash = "sha256:7a7c7003b000adf9e7ca2a377c9688bbc54ed41b985789ed576570342a375cd2"},
{file = "urllib3-2.0.5.tar.gz", hash = "sha256:13abf37382ea2ce6fb744d4dad67838eec857c9f4f57009891805e0b5e123594"}, {file = "urllib3-2.0.6.tar.gz", hash = "sha256:b19e1a85d206b56d7df1d5e683df4a7725252a964e3993648dd0fb5a1c157564"},
] ]
[package.dependencies] [package.dependencies]
@ -5987,11 +5987,11 @@ files = [
] ]
[extras] [extras]
ingest = ["deutschland", "selenium"] ingest = ["deutschland", "selenium", "xmltodict"]
processing = [] processing = []
web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"] web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"]
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = "^3.11" python-versions = "^3.11"
content-hash = "54b48197ec857377c4cbb8a122f7dc76cc714bb437c31631984caa44f3adb943" content-hash = "9be00fc417a83776eef560907a9c6b853dbe8e1a75909b65f3db2f613f10db65"

View File

@ -6,7 +6,7 @@ requires = ["poetry-core"]
target-version = ["py311"] target-version = ["py311"]
[tool.coverage.report] [tool.coverage.report]
exclude_also = ["if __name__ == .__main__.:", "if not isinstance(engine, Engine):"] exclude_also = ["if __name__ == .__main__.:", "if not isinstance(engine, Engine):", "@overload"]
[tool.coverage.run] [tool.coverage.run]
branch = true branch = true
@ -69,7 +69,7 @@ tqdm = "^4.66.1"
xmltodict = "^0.13.0" xmltodict = "^0.13.0"
[tool.poetry.extras] [tool.poetry.extras]
ingest = ["selenium", "deutschland"] ingest = ["selenium", "deutschland", "xmltodict"]
processing = [] processing = []
web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"] web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"]
@ -119,8 +119,8 @@ pytest-repeat = "^0.9.1"
[tool.poetry.scripts] [tool.poetry.scripts]
copy-sql = "aki_prj23_transparenzregister.utils.sql.copy_sql:copy_db_cli" copy-sql = "aki_prj23_transparenzregister.utils.sql.copy_sql:copy_db_cli"
data-transfer = {reference = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data", extras = ["processing"], type = "console"} data-processing = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data_cli"
reset-sql = {reference = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables", extras = ["processing"], type = "console"} reset-sql = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables_cli"
webserver = "aki_prj23_transparenzregister.ui.app:main" webserver = "aki_prj23_transparenzregister.ui.app:main"
[tool.ruff] [tool.ruff]

View File

@ -4,20 +4,31 @@ import abc
import errno import errno
import json import json
import os import os
import re
from pathlib import Path from pathlib import Path
from typing import Any from typing import Any, Final
from dotenv import load_dotenv from dotenv import load_dotenv
from aki_prj23_transparenzregister.config.config_template import PostgreConnectionString from aki_prj23_transparenzregister.config.config_template import (
from aki_prj23_transparenzregister.utils.mongo.connector import MongoConnection MongoConnection,
PostgreConnectionString,
SQLConnectionString,
SQLiteConnectionString,
)
HELP_TEXT_CONFIG: Final[str] = (
"Database configuration. "
"Either give the paths to a *.json containing the secrets. "
"Alternativly specify the use of enviromental vairables by entering the ENV or the einviromental prefix ending with a '_'."
)
class ConfigProvider(metaclass=abc.ABCMeta): class ConfigProvider(metaclass=abc.ABCMeta):
"""Generic abstract class for a wrapper providing the config options for PostgreSQL and MongoDB.""" """Generic abstract class for a wrapper providing the config options for PostgreSQL and MongoDB."""
@abc.abstractmethod @abc.abstractmethod
def get_postgre_connection_string(self) -> PostgreConnectionString: def get_sql_connection_string(self) -> SQLConnectionString:
"""Get PostgreSQL connection string. """Get PostgreSQL connection string.
Raises: Raises:
@ -41,7 +52,7 @@ class ConfigProvider(metaclass=abc.ABCMeta):
def __str__(self) -> str: def __str__(self) -> str:
connections = [] connections = []
try: try:
pg_str = str(self.get_postgre_connection_string()) pg_str = str(self.get_sql_connection_string())
except KeyError: except KeyError:
pass pass
else: else:
@ -80,12 +91,15 @@ class JsonFileConfigProvider(ConfigProvider):
except Exception as error: except Exception as error:
raise TypeError("File content is not a valid JSON object") from error raise TypeError("File content is not a valid JSON object") from error
def get_postgre_connection_string(self) -> PostgreConnectionString: def get_sql_connection_string(self) -> SQLConnectionString:
"""Read PostgreSQL connection string from .json file added in constructor. """Read PostgreSQL connection string from .json file added in constructor.
Returns: Returns:
PostgreConnectionString: Connection details PostgreConnectionString: Connection details
""" """
if "sqlite" in self.__data__:
return SQLiteConnectionString(self.__data__["sqlite"]) # type: ignore
details = self.__data__["postgres"] details = self.__data__["postgres"]
return PostgreConnectionString( return PostgreConnectionString(
details["username"], details["username"],
@ -123,16 +137,18 @@ class EnvironmentConfigProvider(ConfigProvider):
prefix (str, optional): Variable prefix. Defaults to "PYTHON_". prefix (str, optional): Variable prefix. Defaults to "PYTHON_".
""" """
load_dotenv() load_dotenv()
relevant_keys = [key for key in os.environ if key.startswith(prefix)] for key in os.environ:
for key in relevant_keys: if key.startswith(prefix):
self.__data__[key.replace(prefix, "")] = os.environ.get(key) self.__data__[key.replace(prefix, "")] = os.environ.get(key)
def get_postgre_connection_string(self) -> PostgreConnectionString: def get_sql_connection_string(self) -> SQLConnectionString:
"""Read PostgreSQL connection string from environment variables. """Read PostgreSQL connection string from environment variables.
Returns: Returns:
PostgreConnectionString: Connection details PostgreConnectionString: Connection details
""" """
if "SQLITE_PATH" in self.__data__:
return SQLiteConnectionString(self.__data__["SQLITE_PATH"])
return PostgreConnectionString( return PostgreConnectionString(
self.__data__["POSTGRES_USERNAME"], self.__data__["POSTGRES_USERNAME"],
self.__data__["POSTGRES_PASSWORD"], self.__data__["POSTGRES_PASSWORD"],
@ -154,3 +170,23 @@ class EnvironmentConfigProvider(ConfigProvider):
self.__data__["MONGO_USERNAME"], self.__data__["MONGO_USERNAME"],
self.__data__["MONGO_PASSWORD"], self.__data__["MONGO_PASSWORD"],
) )
def get_config_provider(config_place: str | Path | None) -> ConfigProvider:
"""Collects the configuration and passes them to a parser.
Args:
config_place: The place where a configuration is placed.
Returns:
An config provider.
"""
if not config_place or (
isinstance(config_place, str) and config_place.upper() == "ENV"
):
return EnvironmentConfigProvider()
if isinstance(config_place, str) and re.fullmatch(r"[A-Z]+_", config_place):
return EnvironmentConfigProvider(config_place)
if isinstance(config_place, Path) or re.fullmatch(r".*\.json", config_place):
return JsonFileConfigProvider(config_place)
raise ValueError("No configuration found.")

View File

@ -3,7 +3,12 @@ from dataclasses import dataclass
@dataclass @dataclass
class PostgreConnectionString: class SQLConnectionString:
"""An SQL Connection args wrapper."""
@dataclass
class PostgreConnectionString(SQLConnectionString):
"""PostgreSQL Connection String args wrapper.""" """PostgreSQL Connection String args wrapper."""
username: str username: str
@ -14,3 +19,44 @@ class PostgreConnectionString:
def __str__(self) -> str: def __str__(self) -> str:
return f"Postgre configuration: username: {self.username}, password {self.password}, host {self.host}:{self.port}, database {self.database}." return f"Postgre configuration: username: {self.username}, password {self.password}, host {self.host}:{self.port}, database {self.database}."
@dataclass
class SQLiteConnectionString(SQLConnectionString):
"""SQLite Connection String wrapper."""
connect_to: str
def __str__(self) -> str:
return f"sqlite:///{self.connect_to}"
@dataclass
class MongoConnection:
"""Wrapper for MongoDB connection string."""
hostname: str
database: str
port: int | None
username: str | None
password: str | None
def __str__(self) -> str:
return f"Mongo configuration: username: {self.username}, password {self.password}, host {self.hostname}:{self.port}, database {self.database}."
def get_conn_string(self) -> str:
"""Transforms the information of the object to a MongoDB connection string.
Returns:
str: Connection string
"""
if self.username is not None and self.password is not None:
connection_string = (
f"mongodb+srv://{self.username}:{self.password}@{self.hostname}"
)
else:
connection_string = f"mongodb+srv://{self.hostname}"
if self.port is not None:
connection_string += f":{self.port}"
connection_string = connection_string.replace("mongodb+srv", "mongodb")
return connection_string

View File

@ -1,13 +1,22 @@
"""Main Dash app.""" """Main Dash app."""
import argparse
import sys
import dash import dash
import dash_bootstrap_components as dbc import dash_bootstrap_components as dbc
from cachetools import TTLCache, cached from cachetools import TTLCache, cached
from dash import Dash, Input, Output, dcc, html from dash import Dash, Input, Output, dcc, html
from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider from aki_prj23_transparenzregister.config.config_providers import (
HELP_TEXT_CONFIG,
get_config_provider,
)
from aki_prj23_transparenzregister.ui import ui_elements from aki_prj23_transparenzregister.ui import ui_elements
from aki_prj23_transparenzregister.ui.session_handler import SessionHandler from aki_prj23_transparenzregister.ui.session_handler import SessionHandler
from aki_prj23_transparenzregister.utils.logger_config import (
add_logger_options_to_argparse,
configer_logger,
)
from aki_prj23_transparenzregister.utils.sql import connector from aki_prj23_transparenzregister.utils.sql import connector
app = Dash( app = Dash(
@ -85,9 +94,23 @@ def update_options(search_value: str) -> list:
def main() -> None: def main() -> None:
"""The main application starting the Dashboard.""" """The main application starting the Dashboard."""
SessionHandler.session = connector.get_session( parser = argparse.ArgumentParser(
JsonFileConfigProvider("./secrets.json") prog="Transparenzregister Webserver",
description="Starts an Dash Webserver that shows our Analysis.",
epilog="Example: webserver --log-level ERROR --log-path print.log",
) )
parser.add_argument(
"config",
metavar="config",
default="ENV",
help=HELP_TEXT_CONFIG,
)
add_logger_options_to_argparse(parser)
parsed = parser.parse_args(sys.argv[1:])
configer_logger(namespace=parsed)
config = parsed.config
SessionHandler.session = connector.get_session(get_config_provider(config))
app.run(debug=False) app.run(debug=False)

View File

@ -0,0 +1 @@
"""A package containing the pages of the Dashboard."""

View File

@ -1,4 +1,6 @@
"""This module contains the data transfer and refinement functionalities between staging and production DB.""" """This module contains the data transfer and refinement functionalities between staging and production DB."""
import argparse
import sys
from datetime import date from datetime import date
from functools import lru_cache from functools import lru_cache
from typing import Any, Final, Literal from typing import Any, Final, Literal
@ -11,10 +13,17 @@ from loguru import logger
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from tqdm import tqdm from tqdm import tqdm
from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider from aki_prj23_transparenzregister.config.config_providers import (
HELP_TEXT_CONFIG,
ConfigProvider,
get_config_provider,
)
from aki_prj23_transparenzregister.models.company import CapitalTypeEnum, CurrencyEnum from aki_prj23_transparenzregister.models.company import CapitalTypeEnum, CurrencyEnum
from aki_prj23_transparenzregister.utils.enum_types import RelationTypeEnum from aki_prj23_transparenzregister.utils.enum_types import RelationTypeEnum
from aki_prj23_transparenzregister.utils.logger_config import configer_logger from aki_prj23_transparenzregister.utils.logger_config import (
add_logger_options_to_argparse,
configer_logger,
)
from aki_prj23_transparenzregister.utils.mongo.company_mongo_service import ( from aki_prj23_transparenzregister.utils.mongo.company_mongo_service import (
CompanyMongoService, CompanyMongoService,
) )
@ -427,10 +436,7 @@ def add_annual_report(company_id: int, year: int, report: dict, db: Session) ->
), ),
) )
for auditor in report.get("auditors", ""): for auditor in report.get("auditors", ""):
pass
_ = auditor _ = auditor
# person_id = get_person_id(person.get("name")) # how to create a person relation?
# company relation?
def add_annual_financial_reports(companies: list[dict], db: Session) -> None: def add_annual_financial_reports(companies: list[dict], db: Session) -> None:
@ -478,20 +484,17 @@ def add_annual_financial_reports(companies: list[dict], db: Session) -> None:
logger.info("Company connections added.") logger.info("Company connections added.")
def transfer_data(db: Session | None = None) -> None: def transfer_data(config_provider: ConfigProvider) -> None:
"""This functions transfers all the data from a production environment to a staging environment.""" """This functions transfers all the data from a production environment to a staging environment.
configer_logger("info", "data-transfer.log")
mongo_connector = MongoConnector( Args:
JsonFileConfigProvider("./secrets.json").get_mongo_connection_string() config_provider: The configuration that defines the connection to sql and mongodb.
) """
mongo_connector = MongoConnector(config_provider.get_mongo_connection_string())
mongo_company = CompanyMongoService(mongo_connector) mongo_company = CompanyMongoService(mongo_connector)
companies: list[dict[str, Any]] = mongo_company.get_all() # type: ignore companies: list[dict[str, Any]] = mongo_company.get_all() # type: ignore
del mongo_company del mongo_company
db = get_session(config_provider)
if db is None:
db = get_session(JsonFileConfigProvider("./secrets.json"))
reset_all_tables(db) reset_all_tables(db)
add_companies(companies, db) add_companies(companies, db)
@ -500,5 +503,20 @@ def transfer_data(db: Session | None = None) -> None:
db.close() db.close()
if __name__ == "__main__": def transfer_data_cli() -> None:
transfer_data(get_session("sqlite:///local-test-data.db")) """A cli interface for the data transfer."""
parser = argparse.ArgumentParser(
prog="Process data",
description="Copy data from one SQL database to another.",
epilog="Example: 'data-processing secrets.json' or 'data-processing ENV_VARS_'",
)
parser.add_argument(
"config",
metavar="config",
default="ENV",
help=HELP_TEXT_CONFIG,
)
add_logger_options_to_argparse(parser)
parsed = parser.parse_args(sys.argv[1:])
configer_logger(namespace=parsed)
transfer_data(get_config_provider(parsed.config))

View File

@ -1,22 +1,68 @@
"""Configures the logger.""" """Configures the logger."""
import sys import sys
from argparse import ArgumentParser, Namespace
from pathlib import Path from pathlib import Path
from typing import Literal from typing import Literal, overload
from loguru import logger from loguru import logger
@overload
def configer_logger( def configer_logger(
*,
level: Literal["info", "debug", "warning", "error"], level: Literal["info", "debug", "warning", "error"],
path: str | Path, path: str | Path,
) -> None:
...
@overload
def configer_logger(*, namespace: Namespace | None) -> None:
...
def configer_logger(
*,
level: Literal["info", "debug", "warning", "error"] | None = None,
path: str | Path | None = None,
namespace: Namespace | None = None,
) -> None: ) -> None:
"""Configures the logger. """Configures the logger.
Args: Args:
level: Defines the logging level that should be used. level: Defines the logging level that should be used.
path: The path where the logs should be saved. path: The path where the logs should be saved.
namespace: A ArgParsed namespace containing the log arguments.
""" """
logger.remove() logger.remove()
logger.add(sys.stdout, level=level.upper(), catch=True) level_combined: str = str(
if path: level if level else (namespace.level if namespace else "info")
logger.add(path, level=level.upper(), retention=5) ).upper()
logger.add(sys.stdout, level=level_combined, catch=True)
if path or (namespace and namespace.log_path):
logger.add(
path if path else namespace.log_path if namespace else "",
level=level_combined,
retention=5,
)
def add_logger_options_to_argparse(parser: ArgumentParser) -> None:
"""Adds logging options to the ArgumentParser.
Args:
parser: The parser to whom the arguments for logging should be added.
"""
parser.add_argument(
"--level",
choices=["info", "debug", "error", "warning"],
default="info",
metavar="level",
help="The log level for the output.",
type=str.lower,
)
parser.add_argument(
"--log-path",
metavar="log_path",
help="A path to write the log to.",
)

View File

@ -1,38 +1,8 @@
"""Mongo Wrapper.""" """Mongo Wrapper."""
from dataclasses import dataclass
import pymongo import pymongo
from aki_prj23_transparenzregister.config.config_template import MongoConnection
@dataclass
class MongoConnection:
"""Wrapper for MongoDB connection string."""
hostname: str
database: str
port: int | None
username: str | None
password: str | None
def __str__(self) -> str:
return f"Mongo configuration: username: {self.username}, password {self.password}, host {self.hostname}:{self.port}, database {self.database}."
def get_conn_string(self) -> str:
"""Transforms the information of the object to a MongoDB connection string.
Returns:
str: Connection string
"""
if self.username is not None and self.password is not None:
connection_string = (
f"mongodb+srv://{self.username}:{self.password}@{self.hostname}"
)
else:
connection_string = f"mongodb+srv://{self.hostname}"
if self.port is not None:
connection_string += f":{self.port}"
connection_string = connection_string.replace("mongodb+srv", "mongodb")
return connection_string
class MongoConnector: class MongoConnector:

View File

@ -1,5 +1,6 @@
"""Module containing connection utils for PostgreSQL DB.""" """Module containing connection utils for PostgreSQL DB."""
import re import argparse
import sys
import sqlalchemy as sa import sqlalchemy as sa
from loguru import logger from loguru import logger
@ -8,18 +9,29 @@ from sqlalchemy.orm import Session, declarative_base, sessionmaker
from sqlalchemy.pool import SingletonThreadPool from sqlalchemy.pool import SingletonThreadPool
from aki_prj23_transparenzregister.config.config_providers import ( from aki_prj23_transparenzregister.config.config_providers import (
HELP_TEXT_CONFIG,
ConfigProvider, ConfigProvider,
JsonFileConfigProvider, JsonFileConfigProvider,
get_config_provider,
)
from aki_prj23_transparenzregister.config.config_template import (
PostgreConnectionString,
SQLConnectionString,
SQLiteConnectionString,
)
from aki_prj23_transparenzregister.utils.logger_config import (
add_logger_options_to_argparse,
configer_logger,
) )
from aki_prj23_transparenzregister.config.config_template import PostgreConnectionString
def get_pg_engine(conn_args: PostgreConnectionString) -> Engine: def get_engine(conn_args: SQLConnectionString) -> Engine:
"""Creates an engine connected to a Postgres instance. """Creates an engine connected to a Postgres instance.
Returns: Returns:
sqlalchemy.engine: connection engine sqlalchemy.engine: connection engine
""" """
if isinstance(conn_args, PostgreConnectionString):
url = URL.create( url = URL.create(
drivername="postgresql", drivername="postgresql",
username=conn_args.username, username=conn_args.username,
@ -29,24 +41,18 @@ def get_pg_engine(conn_args: PostgreConnectionString) -> Engine:
port=conn_args.port, port=conn_args.port,
) )
return sa.create_engine(url) return sa.create_engine(url)
if isinstance(conn_args, SQLiteConnectionString):
def get_sqlite_engine(connect_to: str) -> Engine:
"""Creates an engine connected to a sqlite instance.
Returns:
sqlalchemy.engine: connection engine
"""
return sa.create_engine( return sa.create_engine(
connect_to, str(conn_args),
connect_args={"check_same_thread": True}, connect_args={"check_same_thread": True},
poolclass=SingletonThreadPool, poolclass=SingletonThreadPool,
) )
raise TypeError("The type of the configuration is invalid.")
def get_session( def get_session(
connect_to: ConfigProvider | str, connect_to: ConfigProvider,
) -> Session: # pragma: no cover ) -> Session:
"""Creates a sql session. """Creates a sql session.
Args: Args:
@ -55,23 +61,10 @@ def get_session(
Returns: Returns:
A session to connect to an SQL db via SQLAlchemy. A session to connect to an SQL db via SQLAlchemy.
""" """
engine: Engine sql_connector_config = connect_to.get_sql_connection_string()
if isinstance(connect_to, str) and re.fullmatch(r".*\.json$", connect_to): return sessionmaker(
logger.debug(connect_to) autocommit=False, autoflush=False, bind=get_engine(sql_connector_config)
connect_to = JsonFileConfigProvider(connect_to) )()
if isinstance(connect_to, ConfigProvider):
pg_string = connect_to.get_postgre_connection_string()
logger.debug(str(connect_to))
engine = get_pg_engine(pg_string)
elif isinstance(connect_to, str) and re.fullmatch(
r"sqlite:\/{3}[A-Za-z].*", connect_to
):
engine = get_sqlite_engine(connect_to)
logger.debug(f"Connection to sqlite3 {connect_to}")
else:
raise TypeError("No valid connection is defined!")
return sessionmaker(autocommit=False, autoflush=False, bind=engine)()
Base = declarative_base() Base = declarative_base()
@ -93,6 +86,25 @@ def reset_all_tables(db: Session) -> None:
init_db(db) init_db(db)
def reset_all_tables_cli() -> None:
"""Resets all tables via a cli."""
parser = argparse.ArgumentParser(
prog="Reset SQL",
description="Copy data from one SQL database to another.",
epilog="Example: 'reset-sql secrets.json' or 'reset-sql ENV_VARS_'",
)
parser.add_argument(
"config",
metavar="config",
default="ENV",
help=HELP_TEXT_CONFIG,
)
add_logger_options_to_argparse(parser)
parsed = parser.parse_args(sys.argv[1:])
configer_logger(namespace=parsed)
reset_all_tables(get_session(get_config_provider(parsed.config)))
if __name__ == "__main__": if __name__ == "__main__":
"""Main flow creating tables""" """Main flow creating tables"""
init_db(get_session(JsonFileConfigProvider("./secrets.json"))) init_db(get_session(JsonFileConfigProvider("./secrets.json")))

View File

@ -7,7 +7,11 @@ from loguru import logger
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from aki_prj23_transparenzregister.utils.logger_config import configer_logger from aki_prj23_transparenzregister.config.config_providers import get_config_provider
from aki_prj23_transparenzregister.utils.logger_config import (
add_logger_options_to_argparse,
configer_logger,
)
from aki_prj23_transparenzregister.utils.sql.connector import ( from aki_prj23_transparenzregister.utils.sql.connector import (
Base, Base,
get_session, get_session,
@ -36,48 +40,30 @@ def transfer_db_function(*, source: Session, destination: Session) -> None:
) )
def copy_db_cli(args: list[str] | None = None) -> None: def copy_db_cli() -> None:
"""CLI interfaces to copy a db from source to destination. """CLI interfaces to copy a db from source to destination."""
Args:
args: The args ar automaticlly collected from the cli if none are given. They should only be given for testing.
"""
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(
prog="copy-sql", prog="copy-sql",
description="Copy data from one SQL database to another.", description="Copy data from one SQL database to another.",
epilog="Example: copy-sql source.db destination.json", epilog="Example: copy-sql source.db destination.json",
) )
parser.add_argument( parser.add_argument(
"source", "source-configuration",
metavar="source", metavar="source",
help="Source database configuration.", help="Source database configuration. Define the path to an sql lite db or a secrets.json",
) )
parser.add_argument( parser.add_argument(
"destination", "destination-configuration",
metavar="destination", metavar="destination",
help="Destination database configuration.", help="Destination database configuration. Define the path to an sql lite db or a secrets.json",
)
parser.add_argument(
"--log-level",
choices=["info", "debug", "error", "warning"],
default="info",
metavar="log-level",
help="The log level for the output.",
)
parser.add_argument(
"--log-path",
metavar="log_path",
help="A path to write the log to.",
) )
add_logger_options_to_argparse(parser)
if not args: parsed = parser.parse_args(sys.argv[1:])
args = sys.argv[1:] configer_logger(namespace=parsed)
source = get_session(get_config_provider(parsed.source))
parsed = parser.parse_args(args)
configer_logger(level=parsed.log_level, path=parsed.log_path)
source = get_session(parsed.source)
logger.info(f"Connecting to {source.bind} as a source to copy from.") logger.info(f"Connecting to {source.bind} as a source to copy from.")
destination = get_session(parsed.destination) destination = get_session(get_config_provider(parsed.destination))
logger.info(f"Connecting to {destination.bind} as a destination to copy to.") logger.info(f"Connecting to {destination.bind} as a destination to copy to.")
transfer_db_function( transfer_db_function(
source=source, source=source,

View File

@ -1,12 +1,19 @@
"""Tests the config provers.""" """Tests the config provers."""
import json import json
from pathlib import Path
from unittest.mock import mock_open, patch from unittest.mock import mock_open, patch
import pytest import pytest
from _pytest.monkeypatch import MonkeyPatch
from aki_prj23_transparenzregister.config.config_providers import ( from aki_prj23_transparenzregister.config.config_providers import (
EnvironmentConfigProvider, EnvironmentConfigProvider,
JsonFileConfigProvider, JsonFileConfigProvider,
get_config_provider,
)
from aki_prj23_transparenzregister.config.config_template import (
PostgreConnectionString,
SQLiteConnectionString,
) )
@ -19,7 +26,7 @@ def test_json_provider_init_fail() -> None:
def test_json_provider_init_no_json() -> None: def test_json_provider_init_no_json() -> None:
"""Tests if a non json file throws the correct error.""" """Tests if a non json file throws the correct error."""
with patch("os.path.isfile") as mock_isfile, patch( with patch("os.path.isfile") as mock_isfile, patch(
"builtins.open", mock_open(read_data="fhdaofhdoas") "builtins.open", mock_open(read_data="fh1da23of4!hdo4s")
): ):
mock_isfile.return_value = True mock_isfile.return_value = True
with pytest.raises(TypeError): with pytest.raises(TypeError):
@ -30,20 +37,47 @@ def test_json_provider_init() -> None:
"""Tests the JsonFileConfigProvider creation.""" """Tests the JsonFileConfigProvider creation."""
data = {"hello": "world"} data = {"hello": "world"}
input_data = json.dumps(data) input_data = json.dumps(data)
with patch("os.path.isfile") as mock_isfile: with patch("os.path.isfile") as mock_isfile, patch(
"builtins.open", mock_open(read_data=input_data)
):
mock_isfile.return_value = True mock_isfile.return_value = True
with patch("builtins.open", mock_open(read_data=input_data)):
provider = JsonFileConfigProvider("someWhere") provider = JsonFileConfigProvider("someWhere")
assert provider.__data__ == data assert provider.__data__ == data
def test_json_provider_get_postgres() -> None: @pytest.mark.parametrize("as_path", [True, False])
def test_get_config_provider_json(as_path: bool) -> None:
"""Tests the JsonFileConfigProvider creation by the factory method."""
data = {"hello": "world"}
input_data = json.dumps(data)
with patch("os.path.isfile") as mock_isfile:
mock_isfile.return_value = True
with patch("builtins.open", mock_open(read_data=input_data)):
config_provider = get_config_provider(
Path("some_where.json") if as_path else "some_where.json"
)
assert isinstance(config_provider, JsonFileConfigProvider)
assert config_provider.__data__ == data
@pytest.mark.parametrize("args", ["ENV", "env", "", None, "PYTHON_", "ELSE_"])
def test_get_config_provider_env(args: str | None) -> None:
env_provider = get_config_provider(args)
assert isinstance(env_provider, EnvironmentConfigProvider)
def test_get_config_provider_fail() -> None:
with pytest.raises(ValueError, match="No configuration found"):
get_config_provider("something-else")
def test_json_provider_get_sql() -> None:
"""Tests if the config provider can return the postgre config string.""" """Tests if the config provider can return the postgre config string."""
data = { data = {
"postgres": { "postgres": {
"username": "user", "username": "user",
"password": "pass", "password": "pass",
"host": "locahost", "host": "localhost",
"database": "postgres", "database": "postgres",
"port": 420, "port": 420,
} }
@ -52,7 +86,8 @@ def test_json_provider_get_postgres() -> None:
with patch("os.path.isfile") as mock_isfile: with patch("os.path.isfile") as mock_isfile:
mock_isfile.return_value = True mock_isfile.return_value = True
with patch("builtins.open", mock_open(read_data=input_data)): with patch("builtins.open", mock_open(read_data=input_data)):
config = JsonFileConfigProvider("someWhere").get_postgre_connection_string() config = JsonFileConfigProvider("someWhere").get_sql_connection_string()
assert isinstance(config, PostgreConnectionString)
assert config.username == data["postgres"]["username"] assert config.username == data["postgres"]["username"]
assert config.password == data["postgres"]["password"] assert config.password == data["postgres"]["password"]
assert config.host == data["postgres"]["host"] assert config.host == data["postgres"]["host"]
@ -67,14 +102,39 @@ def test_json_provider_get_postgres() -> None:
assert "Mongo" not in str(JsonFileConfigProvider("someWhere")) assert "Mongo" not in str(JsonFileConfigProvider("someWhere"))
@pytest.mark.parametrize("additional_data", [True, False])
def test_json_provider_get_sqlit3(additional_data: bool) -> None:
"""Tests if the config provider can return the sqlite config string."""
data = {
"sqlite": "some.db",
}
if additional_data:
data |= {
"postgres": { # type: ignore
"username": "user",
"password": "pass",
"host": "localhost",
"database": "postgres",
"port": 420,
}
}
input_data = json.dumps(data)
with patch("os.path.isfile") as mock_isfile:
mock_isfile.return_value = True
with patch("builtins.open", mock_open(read_data=input_data)):
assert JsonFileConfigProvider(
"someWhere"
).get_sql_connection_string() == SQLiteConnectionString("some.db")
def test_json_provider_get_mongo() -> None: def test_json_provider_get_mongo() -> None:
"""Tests the JsonConfigProvider for the mongo db.""" """Tests the JsonConfigProvider for the mongo db."""
data = { data = {
"mongo": { "mongo": {
"username": "user", "username": "user",
"password": "pass", "password": "pass",
"host": "locahost", "host": "localhost",
"database": "postgres", "database": "mongo",
"port": 420, "port": 420,
} }
} }
@ -98,35 +158,59 @@ def test_json_provider_get_mongo() -> None:
assert "Postgre" not in str(JsonFileConfigProvider("someWhere")) assert "Postgre" not in str(JsonFileConfigProvider("someWhere"))
def test_env_provider_constructor() -> None: def test_env_provider_constructor(monkeypatch: MonkeyPatch) -> None:
with patch("aki_prj23_transparenzregister.config.config_providers.os") as mock_os: env_configs = {"PYTHON_TEST": "test", "NOT_PYTHON_TEST": ""}
keys = {"PYTHON_TEST": "test", "NOT_PYTHON_TEST": ""} for key, value in env_configs.items():
mock_os.environ = keys monkeypatch.setenv(key, value)
provider = EnvironmentConfigProvider() provider = EnvironmentConfigProvider()
assert provider.__data__ == {"TEST": "test"} assert "TEST" in provider.__data__
assert provider.__data__["TEST"] == "test"
assert "NOT_PYTHON_TEST" not in provider.__data__
def test_env_provider_postgres() -> None: def test_env_provider_postgres(monkeypatch: MonkeyPatch) -> None:
provider = EnvironmentConfigProvider() env_configs = {
env_data = { "PYTHON_POSTGRES_USERNAME": "postgres-user",
"POSTGRES_USERNAME": "postgres", "PYTHON_POSTGRES_PASSWORD": "postgres-pw",
"POSTGRES_PASSWORD": "postgres", "PYTHON_POSTGRES_HOST": "localhost",
"POSTGRES_HOST": "localhost", "PYTHON_POSTGRES_DATABASE": "postgres",
"POSTGRES_DATABASE": "postgres", "PYTHON_POSTGRES_PORT": "5432",
"POSTGRES_PORT": "5432",
} }
provider.__data__ = env_data for env_config in env_configs.items():
conn_string = provider.get_postgre_connection_string() monkeypatch.setenv(*env_config)
assert conn_string.database == env_data["POSTGRES_DATABASE"]
assert conn_string.host == env_data["POSTGRES_HOST"]
assert conn_string.password == env_data["POSTGRES_PASSWORD"]
assert conn_string.port == env_data["POSTGRES_PORT"]
assert conn_string.username == env_data["POSTGRES_USERNAME"]
def test_env_provider_mongodb() -> None:
provider = EnvironmentConfigProvider() provider = EnvironmentConfigProvider()
conn_string = provider.get_sql_connection_string()
assert isinstance(conn_string, PostgreConnectionString)
assert conn_string.database == env_configs["PYTHON_POSTGRES_DATABASE"]
assert conn_string.host == env_configs["PYTHON_POSTGRES_HOST"]
assert conn_string.password == env_configs["PYTHON_POSTGRES_PASSWORD"]
assert conn_string.port == env_configs["PYTHON_POSTGRES_PORT"]
assert conn_string.username == env_configs["PYTHON_POSTGRES_USERNAME"]
@pytest.mark.parametrize("additional_args", [True, False])
def test_env_provider_sqlite(additional_args: bool, monkeypatch: MonkeyPatch) -> None:
env_configs = {"PYTHON_SQLITE_PATH": "some.db"}
if additional_args:
env_configs |= {
"PYTHON_POSTGRES_USERNAME": "postgres-user",
"PYTHON_POSTGRES_PASSWORD": "postgres-pw",
"PYTHON_POSTGRES_HOST": "localhost",
"PYTHON_POSTGRES_DATABASE": "postgres",
"PYTHON_POSTGRES_PORT": "5432",
}
for env_config in env_configs.items():
monkeypatch.setenv(*env_config)
provider = EnvironmentConfigProvider()
assert provider.get_sql_connection_string() == SQLiteConnectionString("some.db")
@pytest.mark.parametrize("prefix", ["", "NON_PYTHON", "ELSE"])
def test_env_provider_mongodb(prefix: str, monkeypatch: MonkeyPatch) -> None:
env_data = { env_data = {
"MONGO_USERNAME": "username", "MONGO_USERNAME": "username",
"MONGO_HOST": "localhost", "MONGO_HOST": "localhost",
@ -134,6 +218,13 @@ def test_env_provider_mongodb() -> None:
"MONGO_PORT": 27017, "MONGO_PORT": 27017,
"MONGO_DATABASE": "transparenzregister", "MONGO_DATABASE": "transparenzregister",
} }
for key, value in env_data.items():
monkeypatch.setenv(f"{prefix if prefix else 'PYTHON_'}{key}", str(value))
if prefix:
provider = EnvironmentConfigProvider(prefix)
else:
provider = EnvironmentConfigProvider()
provider.__data__ = env_data provider.__data__ = env_data
conn_string = provider.get_mongo_connection_string() conn_string = provider.get_mongo_connection_string()

View File

@ -0,0 +1,34 @@
"""Test for config templates."""
from aki_prj23_transparenzregister.config.config_template import (
MongoConnection,
SQLiteConnectionString,
)
def test_sqlite_connection_string() -> None:
"""Tests if the sqlite protocol is correctly added to a path."""
assert str(SQLiteConnectionString("some-path-to.db")) == "sqlite:///some-path-to.db"
assert (
str(SQLiteConnectionString("other-path-to.db")) == "sqlite:///other-path-to.db"
)
assert SQLiteConnectionString("some-path-to.db") == SQLiteConnectionString(
"some-path-to.db"
)
def test_get_conn_string_no_credentials() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", 27017, None, None)
assert conn.get_conn_string() == "mongodb://localhost:27017"
def test_get_conn_string_no_port_but_credentials() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", None, "admin", "password")
assert conn.get_conn_string() == "mongodb+srv://admin:password@localhost"
def test_get_conn_simple() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", None, None, None)
assert conn.get_conn_string() == "mongodb+srv://localhost"

View File

@ -7,11 +7,15 @@ from typing import Any
import pytest import pytest
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session from sqlalchemy.orm import Session, sessionmaker
from aki_prj23_transparenzregister.config.config_template import SQLiteConnectionString
from aki_prj23_transparenzregister.utils import data_transfer from aki_prj23_transparenzregister.utils import data_transfer
from aki_prj23_transparenzregister.utils.sql import entities from aki_prj23_transparenzregister.utils.sql import entities
from aki_prj23_transparenzregister.utils.sql.connector import get_session, init_db from aki_prj23_transparenzregister.utils.sql.connector import (
get_engine,
init_db,
)
@pytest.fixture(autouse=True) @pytest.fixture(autouse=True)
@ -39,7 +43,11 @@ def empty_db() -> Generator[Session, None, None]:
"""Generates a db Session to a sql_lite db.""" """Generates a db Session to a sql_lite db."""
if os.path.exists("test-db.db"): if os.path.exists("test-db.db"):
os.remove("test-db.db") os.remove("test-db.db")
db = get_session("sqlite:///test-db.db") db = sessionmaker(
autocommit=False,
autoflush=False,
bind=get_engine(SQLiteConnectionString("test-db.db")),
)()
init_db(db) init_db(db)
yield db yield db
db.close() db.close()

View File

@ -1,7 +1,11 @@
"""Test for the main app dashboard.""" """Test for the main app dashboard."""
import sys
from collections.abc import Generator from collections.abc import Generator
from unittest.mock import MagicMock
import pytest import pytest
from _pytest.monkeypatch import MonkeyPatch
from pytest_mock import MockerFixture
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from aki_prj23_transparenzregister.ui import app from aki_prj23_transparenzregister.ui import app
@ -31,3 +35,27 @@ def test_go_to_company_page() -> None:
"""Checks if the go_to_company_page callback yields a result.""" """Checks if the go_to_company_page callback yields a result."""
output = app.go_to_company_page(1) output = app.go_to_company_page(1)
assert output == "/Unternehmensdetails/1" assert output == "/Unternehmensdetails/1"
def test_main_of_app(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(sys, "argv", [sys.argv[0]])
with pytest.raises(SystemExit):
app.main()
def test_main_of_app_help(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(sys, "argv", [sys.argv[0], "-h"])
with pytest.raises(SystemExit):
app.main()
@pytest.mark.parametrize("upper", [True, False])
def test_main_of_app_env(
monkeypatch: MonkeyPatch, upper: bool, mocker: MockerFixture
) -> None:
MagicMock()
monkeypatch.setattr(sys, "argv", [sys.argv[0], "ENV" if upper else "env"])
mocked = mocker.patch("aki_prj23_transparenzregister.ui.app.Dash.run")
mocked.return_value = None
app.main()
mocked.assert_called_once()

View File

@ -1,6 +1,7 @@
"""Test the transfer functions from mongodb to sql.""" """Test the transfer functions from mongodb to sql."""
import random import random
import string import string
import sys
from datetime import date from datetime import date
from typing import Any from typing import Any
@ -8,13 +9,13 @@ import numpy as np
import pandas as pd import pandas as pd
import pytest import pytest
import sqlalchemy as sa import sqlalchemy as sa
from _pytest.monkeypatch import MonkeyPatch
from pytest_mock import MockerFixture from pytest_mock import MockerFixture
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from aki_prj23_transparenzregister.models.company import CapitalTypeEnum, CurrencyEnum from aki_prj23_transparenzregister.models.company import CapitalTypeEnum, CurrencyEnum
from aki_prj23_transparenzregister.utils import data_transfer from aki_prj23_transparenzregister.utils import data_transfer
from aki_prj23_transparenzregister.utils.data_transfer import norm_capital
from aki_prj23_transparenzregister.utils.sql import entities from aki_prj23_transparenzregister.utils.sql import entities
@ -1025,7 +1026,9 @@ def test_add_annual_report_financial_key_error(full_db: Session) -> None:
@pytest.mark.parametrize("currency", ["", "EUR"]) @pytest.mark.parametrize("currency", ["", "EUR"])
def test_norm_capital_eur(currency: str, capital_type: str) -> None: def test_norm_capital_eur(currency: str, capital_type: str) -> None:
"""Tests if eur entries can be converted / normed correctly.""" """Tests if eur entries can be converted / normed correctly."""
assert norm_capital({"value": 5, "currency": currency, "type": capital_type}) == { assert data_transfer.norm_capital(
{"value": 5, "currency": currency, "type": capital_type}
) == {
"capital_value": 5.0, "capital_value": 5.0,
"capital_currency": CurrencyEnum("EUR"), "capital_currency": CurrencyEnum("EUR"),
"capital_type": CapitalTypeEnum(capital_type), "capital_type": CapitalTypeEnum(capital_type),
@ -1036,7 +1039,7 @@ def test_norm_capital_eur(currency: str, capital_type: str) -> None:
@pytest.mark.parametrize("currency", ["DM", "DEM"]) @pytest.mark.parametrize("currency", ["DM", "DEM"])
def test_norm_capital_dm(currency: str, capital_type: CapitalTypeEnum) -> None: def test_norm_capital_dm(currency: str, capital_type: CapitalTypeEnum) -> None:
"""Tests if dm entries can be converted / normed correctly.""" """Tests if dm entries can be converted / normed correctly."""
assert norm_capital( assert data_transfer.norm_capital(
capital={"value": 5, "currency": currency, "type": capital_type} capital={"value": 5, "currency": currency, "type": capital_type}
) == { ) == {
"capital_value": 2.56, "capital_value": 2.56,
@ -1047,7 +1050,7 @@ def test_norm_capital_dm(currency: str, capital_type: CapitalTypeEnum) -> None:
def test_norm_capital_fail() -> None: def test_norm_capital_fail() -> None:
"""Tests if the entry is dropped if it isn't complete.""" """Tests if the entry is dropped if it isn't complete."""
assert norm_capital({"something": "something"}) == {} # type: ignore assert data_transfer.norm_capital({"something": "something"}) == {} # type: ignore
@pytest.mark.parametrize( @pytest.mark.parametrize(
@ -1062,3 +1065,30 @@ def test_norm_capital_fail() -> None:
) )
def test_get_geocodes(zip_code: str | None, results: dict) -> None: def test_get_geocodes(zip_code: str | None, results: dict) -> None:
assert data_transfer.get_geocodes(zip_code) == results assert data_transfer.get_geocodes(zip_code) == results
def test_transfer_data_cli(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(sys, "argv", [sys.argv[0]])
with pytest.raises(SystemExit):
data_transfer.transfer_data_cli()
def test_transfer_data_cli_help(monkeypatch: MonkeyPatch) -> None:
monkeypatch.setattr(sys, "argv", [sys.argv[0], "-h"])
with pytest.raises(SystemExit):
data_transfer.transfer_data_cli()
@pytest.mark.parametrize("upper", [True, False])
def test_transfer_data_cli_env(
monkeypatch: MonkeyPatch, upper: bool, mocker: MockerFixture
) -> None:
monkeypatch.setattr(sys, "argv", [sys.argv[0], "ENV" if upper else "env"])
mocker.patch(
"aki_prj23_transparenzregister.utils.data_transfer.transfer_data", lambda _: _
)
spy = mocker.spy(data_transfer, "transfer_data")
# with pytest.raises(KeyError):
data_transfer.transfer_data_cli()
spy.assert_called_once()

View File

@ -1,15 +1,21 @@
"""Smoke-test over the logger config.""" """Smoke-test over the logger config."""
from argparse import ArgumentParser
from pathlib import Path from pathlib import Path
import pytest import pytest
from aki_prj23_transparenzregister.utils.logger_config import configer_logger from aki_prj23_transparenzregister.utils.logger_config import (
add_logger_options_to_argparse,
configer_logger,
)
@pytest.mark.parametrize("parser", [True, False])
@pytest.mark.parametrize("path", [None, "test-log.log", ""]) @pytest.mark.parametrize("path", [None, "test-log.log", ""])
@pytest.mark.parametrize("upper", [True, False]) @pytest.mark.parametrize("upper", [True, False])
@pytest.mark.parametrize("level", ["info", "debug", "error", "warning"]) @pytest.mark.parametrize("level", ["info", "debug", "error"])
def test_configer_logger( def test_configer_logger(
parser: bool,
level: str, level: str,
upper: bool, upper: bool,
path: Path | str | None, path: Path | str | None,
@ -17,10 +23,32 @@ def test_configer_logger(
"""Tests the configuration of the logger. """Tests the configuration of the logger.
Args: Args:
parser: If the arguments should be given via the parser or not.
level: The log-level to configure. level: The log-level to configure.
upper: If the upper variant of the level should be used. upper: If the upper variant of the level should be used.
path: The path where to save the log. path: The path where to save the log.
""" """
if level.upper(): if level.upper():
level = level.upper() level = level.upper()
configer_logger(level, path) # type: ignore if parser:
args_parser = ArgumentParser()
add_logger_options_to_argparse(args_parser)
configer_logger(
namespace=args_parser.parse_args(
[
"--level",
level if level else "",
"--log-path",
str(path) if path else "",
]
)
)
else:
configer_logger(level=level, path=path) # type: ignore
def test_add_logger_options_to_argparse() -> None:
"""A test checking if the ArgumentParser is modified."""
args_parser = ArgumentParser()
add_logger_options_to_argparse(args_parser)

View File

@ -1,30 +1,12 @@
"""Tests for connecting to the mongodb.""" """Tests for connecting to the mongodb."""
from unittest.mock import patch from unittest.mock import patch
from aki_prj23_transparenzregister.config.config_template import MongoConnection
from aki_prj23_transparenzregister.utils.mongo.connector import ( from aki_prj23_transparenzregister.utils.mongo.connector import (
MongoConnection,
MongoConnector, MongoConnector,
) )
def test_get_conn_string_no_credentials() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", 27017, None, None)
assert conn.get_conn_string() == "mongodb://localhost:27017"
def test_get_conn_string_no_port_but_credentials() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", None, "admin", "password")
assert conn.get_conn_string() == "mongodb+srv://admin:password@localhost"
def test_get_conn_simple() -> None:
"""Tests the mongo connection string generation."""
conn = MongoConnection("localhost", "", None, None, None)
assert conn.get_conn_string() == "mongodb+srv://localhost"
def test_mongo_connector() -> None: def test_mongo_connector() -> None:
"""Tests the MongoConnector.""" """Tests the MongoConnector."""
with patch("pymongo.MongoClient") as mock_mongo_client: with patch("pymongo.MongoClient") as mock_mongo_client:

View File

@ -1,16 +1,14 @@
"""Tests the sql connector.""" """Tests the sql connector."""
import os.path import os.path
from collections.abc import Generator from collections.abc import Generator
from typing import Any
from unittest.mock import Mock, patch from unittest.mock import Mock, patch
import pytest import pytest
from sqlalchemy.engine import Engine
from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider
from aki_prj23_transparenzregister.config.config_template import PostgreConnectionString from aki_prj23_transparenzregister.config.config_template import PostgreConnectionString
from aki_prj23_transparenzregister.utils.sql.connector import ( from aki_prj23_transparenzregister.utils.sql.connector import (
get_pg_engine, get_engine,
get_session, get_session,
init_db, init_db,
) )
@ -24,7 +22,13 @@ def test_get_engine_pg() -> None:
) as mock_create_engine: ) as mock_create_engine:
result = "someThing" result = "someThing"
mock_create_engine.return_value = result mock_create_engine.return_value = result
assert get_pg_engine(conn_args) == result assert get_engine(conn_args) == result
def test_get_engine_fail() -> None:
"""Tests what happens if the wrong type is given to the engine factory."""
with pytest.raises(TypeError, match="The type of the configuration is invalid."):
get_engine(None) # type: ignore
@pytest.fixture() @pytest.fixture()
@ -44,29 +48,10 @@ def delete_sqlite_table() -> Generator[str, None, None]:
os.remove(sqlite_test_path) os.remove(sqlite_test_path)
def test_get_sqlite_init(delete_sqlite_table: str) -> None:
"""Tests if a sql table file can be initiated."""
assert not os.path.exists(delete_sqlite_table)
session = get_session(f"sqlite:///{delete_sqlite_table}")
init_db(session)
session.close()
engine = session.bind
assert isinstance(engine, Engine)
engine.dispose()
assert os.path.exists(delete_sqlite_table)
@pytest.mark.parametrize("connection", ["faulty-name", 0, 9.2, True])
def test_get_invalid_connection(connection: Any) -> None:
"""Tests if an error is thrown on a faulty connections."""
with pytest.raises(TypeError):
get_session(connection)
def test_init_pd_db() -> None: def test_init_pd_db() -> None:
"""Tests if a pg sql database can be connected and initiated to.""" """Tests if a pg sql database can be connected and initiated to."""
with patch( with patch(
"aki_prj23_transparenzregister.utils.sql.connector.get_pg_engine" "aki_prj23_transparenzregister.utils.sql.connector.get_engine"
) as mock_get_engine, patch( ) as mock_get_engine, patch(
"aki_prj23_transparenzregister.utils.sql.connector.declarative_base" "aki_prj23_transparenzregister.utils.sql.connector.declarative_base"
) as mock_declarative_base: ) as mock_declarative_base:
@ -77,6 +62,6 @@ def test_init_pd_db() -> None:
mock_declarative_base.return_value = mock_value mock_declarative_base.return_value = mock_value
mock_value = Mock(spec=JsonFileConfigProvider) mock_value = Mock(spec=JsonFileConfigProvider)
mock_value.get_postgre_connection_string.return_value = "" mock_value.get_sql_connection_string.return_value = ""
init_db(get_session(mock_value)) init_db(get_session(mock_value))

View File

@ -1,13 +1,20 @@
"""Test if the sql db can be copied.""" """Test if the sql db can be copied."""
import os import os
import sys
from collections.abc import Generator from collections.abc import Generator
import pandas as pd import pandas as pd
import pytest import pytest
from _pytest.monkeypatch import MonkeyPatch
from sqlalchemy.engine import Engine from sqlalchemy.engine import Engine
from sqlalchemy.orm import Session from sqlalchemy.orm import Session, sessionmaker
from aki_prj23_transparenzregister.utils.sql.connector import Base, get_session, init_db from aki_prj23_transparenzregister.config.config_template import SQLiteConnectionString
from aki_prj23_transparenzregister.utils.sql.connector import (
Base,
get_engine,
init_db,
)
from aki_prj23_transparenzregister.utils.sql.copy_sql import ( from aki_prj23_transparenzregister.utils.sql.copy_sql import (
copy_db_cli, copy_db_cli,
transfer_db_function, transfer_db_function,
@ -19,7 +26,13 @@ def destination_db() -> Generator[Session, None, None]:
"""Generates a db Session to a sqlite db to copy data to.""" """Generates a db Session to a sqlite db to copy data to."""
if os.path.exists("secondary.db"): if os.path.exists("secondary.db"):
os.remove("secondary.db") os.remove("secondary.db")
db = get_session("sqlite:///secondary.db")
db = sessionmaker(
autocommit=False,
autoflush=False,
bind=get_engine(SQLiteConnectionString("secondary.db")),
)()
init_db(db) init_db(db)
yield db yield db
db.close() db.close()
@ -44,13 +57,15 @@ def test_transfer_db(full_db: Session, destination_db: Session) -> None:
) )
def test_copy_db_cli_help1() -> None: def test_copy_db_cli_help1(monkeypatch: MonkeyPatch) -> None:
"""Tests if the help argument exits the software gracefully.""" """Tests if the help argument exits the software gracefully."""
with pytest.raises(SystemExit): with monkeypatch.context() as m, pytest.raises(SystemExit): # noqa: PT012
copy_db_cli(["-h"]) m.setattr(sys, "argv", [sys.argv[0], "-h"])
copy_db_cli()
def test_copy_db_cli_help2() -> None: def test_copy_db_cli_help2(monkeypatch: MonkeyPatch) -> None:
"""Tests if the help argument exits the software gracefully.""" """Tests if the help argument exits the software gracefully."""
with pytest.raises(SystemExit): with monkeypatch.context() as m, pytest.raises(SystemExit): # noqa: PT012
copy_db_cli(["eskse", "-h", "asdf"]) m.setattr(sys, "argv", [sys.argv[0], "eskse", "-h", "asdf"])
copy_db_cli()