diff --git a/.github/workflows/delete-branch.yaml b/.github/workflows/delete-branch.yaml new file mode 100644 index 0000000..67dbbc8 --- /dev/null +++ b/.github/workflows/delete-branch.yaml @@ -0,0 +1,11 @@ +name: Branch Deleted +on: delete +jobs: + delete: + if: github.event.ref_type == 'branch' + runs-on: ubuntu-latest + steps: + - name: Clean up + run: | + echo "Clean up for branch ${{ github.event.ref }}" + # uses: bots-house/ghcr-delete-image-action@v1.1.0 diff --git a/.github/workflows/documentation.yaml b/.github/workflows/documentation.yaml index 3fcb11b..fe0bb2d 100644 --- a/.github/workflows/documentation.yaml +++ b/.github/workflows/documentation.yaml @@ -5,7 +5,7 @@ on: branches: - main pull_request: - types: [opened, reopened, synchronize] + types: [reopened, opened, synchronize] jobs: doc-build: @@ -23,7 +23,7 @@ jobs: with: version: 1.4.2 virtualenvs-create: false - - run: poetry install --only doc,root,develop + - run: poetry install --only doc,root,develop --all-extras - name: Doc-Build run: | cd documentations diff --git a/.github/workflows/lint-actions.yaml b/.github/workflows/lint-actions.yaml index 06c8968..eb76d79 100644 --- a/.github/workflows/lint-actions.yaml +++ b/.github/workflows/lint-actions.yaml @@ -37,7 +37,7 @@ jobs: version: 1.4.2 virtualenvs-create: false virtualenvs-path: ~/local/share/virtualenvs - - run: poetry install --without develop,doc + - run: poetry install --without develop,doc --all-extras - name: Run mypy run: | mypy src tests @@ -69,7 +69,6 @@ jobs: uses: actions/checkout@v3 - name: Install root, pip-licenses & pip-audit run: | - poetry install --only-root poetry run pip install pip-licenses pip-audit - name: Poetry export run: poetry export -f requirements.txt --output requirements.txt diff --git a/.github/workflows/test-and-build-action.yaml b/.github/workflows/test-and-build-action.yaml index b69a149..d503fda 100644 --- a/.github/workflows/test-and-build-action.yaml +++ b/.github/workflows/test-and-build-action.yaml @@ -1,9 +1,12 @@ name: Test & Build on: + push: pull_request: types: [reopened, opened, synchronize] - push: + release: + types: + - created jobs: test: @@ -27,7 +30,7 @@ jobs: with: path: ~/.local/share/virtualenvs key: venv - - run: poetry install --without develop,doc,lint + - run: poetry install --without develop,doc,lint --all-extras - name: Run test suite run: | poetry run pytest --junit-xml=unit-test-results.xml --cov-report "xml:coverage.xml" --cov=src tests/ @@ -85,7 +88,7 @@ jobs: version: 1.4.2 virtualenvs-path: ~/local/share/virtualenvs - run: | - poetry install --only test + poetry install --only test --all-extras - uses: actions/download-artifact@v3 with: name: code-coverage-report @@ -99,9 +102,9 @@ jobs: path: htmlcov/ build: + if: ${{ github.event_name == 'push' }} runs-on: ubuntu-latest needs: test - if: ${{ github.event_name == 'push' }} steps: - name: Setup Python uses: actions/setup-python@v4 @@ -120,10 +123,87 @@ jobs: - name: Check out repository code uses: actions/checkout@v3 - run: | - poetry install --without develop,doc,lint,test poetry build - name: Archive builds uses: actions/upload-artifact@v3 with: - name: builds + name: wheel-builds path: dist/ + + docker-build: + if: ${{ github.event_name == 'push' }} + runs-on: ubuntu-latest + needs: [build] + steps: + - name: Check out repository code + uses: actions/checkout@v3 + + - uses: actions/download-artifact@v3 + with: + name: wheel-builds + path: dist + + - id: tag_or_main + run: | + if [ "$GITHUB_EVENT_NAME" = "release" ]; then + echo "TAG_OR_MAIN=$(echo $GITHUB_REF | awk -F / '{print $NF}')" >> $GITHUB_ENV + echo "LATEST=latest" >> $GITHUB_ENV + elif [ "$GITHUB_EVENT_NAME" = "push" ] && [ "$GITHUB_REF" = "refs/heads/main" ]; then + echo "TAG_OR_MAIN=main" >> $GITHUB_ENV + echo "LATEST=main" >> $GITHUB_ENV + else + echo "TAG_OR_MAIN=experimental" >> $GITHUB_ENV + echo "LATEST=experimental" >> $GITHUB_ENV + fi + + + - run: | + echo "Print ${{ env.TAG_OR_MAIN }}" + + - name: Build All prior to push + run: | + docker build --target ingest . + docker build --target data-transformation . + docker build --target web-server . + docker image ls + + - name: Exit workflow on pull request + if: ${{ github.event_name == 'pull_request' }} + run: exit 0 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push ingest + uses: docker/build-push-action@v4 + with: + push: true + tags: | + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/ingest:${{ env.TAG_OR_MAIN }} + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/ingest:${{ env.LATEST }} + target: ingest + context: . + + - name: Build and push data-transformation + uses: docker/build-push-action@v4 + with: + push: true + tags: | + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/data-transformation:${{ env.TAG_OR_MAIN }} + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/data-transformation:${{ env.LATEST }} + target: data-transformation + context: . + + - name: Build and push web-server + uses: docker/build-push-action@v4 + with: + push: true + tags: | + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/web-server:${{ env.TAG_OR_MAIN }} + ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/web-server:${{ env.LATEST }} + target: web-server + context: . diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e4f5041 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,30 @@ +FROM python:3.11-slim as base +LABEL AUTHOR="AKI Projektseminar 23" + +ARG APP_HOME="transparenzregister" +WORKDIR /${APP_HOME}/ +USER root +RUN apt update -y && \ + apt install git sqlite3 iputils-ping traceroute -y && \ + rm -rf /var/lib/apt/lists/* + +COPY dist/*.whl dist/ + +RUN pip install --find-links=dist aki-prj23-transparenzregister --no-cache-dir + +FROM base as ingest + +RUN pip install --find-links=dist aki-prj23-transparenzregister[ingest] --no-cache-dir && \ + rm dist/ -R + +FROM base as data-transformation + +RUN pip install --find-links=dist aki-prj23-transparenzregister[processing] --no-cache-dir && \ + rm dist/ -R + +FROM base as web-server + +RUN pip install --find-links=dist aki-prj23-transparenzregister[web-server] --no-cache-dir && \ + rm dist/ -R + +CMD python -m aki_prj23_transparenzregister.ui.company_finance_dash diff --git a/README.md b/README.md index 679df46..03414c2 100644 --- a/README.md +++ b/README.md @@ -54,11 +54,20 @@ PYTHON_POSTGRES_PASSWORD=postgres PYTHON_POSTGRES_HOST=localhost PYTHON_POSTGRES_DATABASE=postgres PYTHON_POSTGRES_PORT=5432 + PYTHON_MONGO_USERNAME=username PYTHON_MONGO_HOST=localhost PYTHON_MONGO_PASSWORD=password PYTHON_MONGO_PORT=27017 PYTHON_MONGO_DATABASE=transparenzregister + +PYTHON_DASH_LOGIN_USERNAME=some-login-to-webgui +PYTHON_DASH_LOGIN_PW=some-pw-to-login-to-webgui + +CR=ghcr.io/fhswf/aki_prj23_transparenzregister +TAG=latest + +HTTP_PORT=80 ``` The prefix `PYTHON_` can be customized by setting a different `prefix` when constructing the ConfigProvider. diff --git a/docker-compose.yml b/docker-compose.yml index c7efb62..b4d1446 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,27 +1,75 @@ version: '3.8' services: - postgres: - image: postgres - restart: always + + ingest: + image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/ingest:${TAG:-latest} + depends_on: + - postgres + - mongodb environment: - POSTGRES_PASSWORD: postgres - volumes: - - postgres_data:/var/lib/postgresql/data - ports: - - 5432:5432 + # mongo access + PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb} + PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017} + PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} + mongodb: image: mongo:4.4.6 - container_name: mongodb restart: unless-stopped environment: - MONGO_INITDB_ROOT_USERNAME: username - MONGO_INITDB_ROOT_PASSWORD: password - MONGO_INITDB_DATABASE: transparenzregister - ports: - - 27017:27017 + MONGO_INITDB_ROOT_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + MONGO_INITDB_ROOT_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + MONGO_INITDB_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} volumes: - mongo_data:/data/db + postgres: + image: postgres:15.4 + restart: always + environment: + POSTGRES_USER: ${PYTHON_POSTGRES_USERNAME:-postgres} + POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + POSTGRES_DB: ${PYTHON_POSTGRES_DATABASE:-db} + volumes: + - postgres_data:/var/lib/postgresql/data + + data-transformation: + image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/data-transformation:${TAG:-latest} + depends_on: + - postgres + - mongodb + environment: + # postgres access + PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres} + PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres} + PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db} + PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432} + # mongo access + PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb} + PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017} + PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} + + web-server: + image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/web-server:${TAG:-latest} + depends_on: + - postgres + ports: + - ${HTTP_PORT}:${HTTP_PORT} + environment: + PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres} + PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres} + PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db} + PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432} + PYTHON_DASH_LOGIN_PW: ${PYTHON_DASH_LOGIN_PW} + PYTHON_DASH_LOGIN_USERNAME: ${PYTHON_DASH_LOGIN_USERNAME} + PORT: ${HTTP_PORT} + HOST: 0.0.0.0 + volumes: postgres_data: mongo_data: diff --git a/local-docker-compose.yml b/local-docker-compose.yml new file mode 100644 index 0000000..5b06255 --- /dev/null +++ b/local-docker-compose.yml @@ -0,0 +1,101 @@ +version: '3.8' + +name: local-aki_prj23_transparenzregister + +services: + + ingest: + image: ingestor:local-build + build: + context: . + target: ingest + environment: + # mongo access + PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb} + PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017} + PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} + + mongodb: + image: mongo:4.4.6 + restart: unless-stopped + environment: + MONGO_INITDB_ROOT_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + MONGO_INITDB_ROOT_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + MONGO_INITDB_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} + ports: + - ${PYTHON_MONGO_PORT}:27017 + volumes: + - mongo_data:/data/db + deploy: + replicas: 1 # set to one to activate + + postgres: + image: postgres:15.4 + restart: always + environment: + POSTGRES_USER: ${PYTHON_POSTGRES_USERNAME:-postgres} + POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + POSTGRES_DB: ${PYTHON_POSTGRES_DATABASE:-db} + volumes: + - postgres_data:/var/lib/postgresql/data + ports: + - ${PYTHON_POSTGRES_PORT:-5432}:5432 + deploy: + replicas: 1 # set to one to activate + + data-transformation: + image: data-transformation:local-build + depends_on: + - postgres + - mongodb + build: + context: . + target: data-transformation + environment: + # postgres access + PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres} + PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres} + PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db} + PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432} + # mongo access + PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error} + PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root} + PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb} + PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017} + PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister} + + web-server: + image: web-server:local-build + build: + context: . + target: web-server + depends_on: + - postgres + ports: + - 8888:8888 + environment: + PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres} + PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error} + PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres} + PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db} + PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432} + PYTHON_DASH_LOGIN_PW: ${PYTHON_DASH_LOGIN_PW} + PYTHON_DASH_LOGIN_USERNAME: ${PYTHON_DASH_LOGIN_USERNAME} + PORT: 8888 + HOST: 0.0.0.0 + +volumes: + postgres_data: + mongo_data: + +networks: + default: + driver: bridge + ipam: + driver: default + config: + - subnet: 192.168.7.0/24 + gateway: 192.168.7.1 diff --git a/poetry.lock b/poetry.lock index c73d2fa..e9fedd0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand. [[package]] name = "aenum" @@ -875,6 +875,20 @@ dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"] diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"] testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"] +[[package]] +name = "dash-auth" +version = "2.0.0" +description = "Dash Authorization Package." +optional = false +python-versions = ">=3.6" +files = [ + {file = "dash_auth-2.0.0-py3-none-any.whl", hash = "sha256:32e60a700937d2c59e983a17d9422752da0f4eaabfd7a8babcae1b4266b9a95f"}, + {file = "dash_auth-2.0.0.tar.gz", hash = "sha256:d9eafb8f3e1f1580bcf90023f8a5915f8f44ee07d80e35490b32f3548da49cc5"}, +] + +[package.dependencies] +dash = ">=1.1.1" + [[package]] name = "dash-bootstrap-components" version = "1.5.0" @@ -5803,9 +5817,11 @@ files = [ ] [extras] -ingest = ["selenium"] +ingest = ["deutschland", "selenium"] +processing = [] +web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"] [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "2496706146d1d83ba9f22d7d4ddc9de7019803cc9c6ebeccb2372610ec1cf736" +content-hash = "e6a72339834d20e81bd534e754e23ffe9427f69da658195060e5a671fbfb5cbe" diff --git a/pyproject.toml b/pyproject.toml index 104b700..9b3d6b6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ source = ["src"] [tool.mypy] disallow_untyped_defs = true -exclude = [".ipynb_checkpoints", ".mypy_cache", ".mytest_cache", "build", "venv", ".venv", "Jupyter"] +exclude = ["\\.ipynb_checkpoints", "\\.mypy_cache", "\\.mytest_cache", "build", "venv", "\\.venv", "Jupyter"] follow_imports = "silent" ignore_missing_imports = true install_types = true @@ -35,15 +35,15 @@ readme = "README.md" version = "0.1.0" [tool.poetry.dependencies] -SQLAlchemy = {version = "^1.4.49", extras = ["mypy"]} aenum = "^3.1.15" +SQLAlchemy = "^1.4.49" cachetools = "^5.3.1" dash = "^2.13.0" +dash-auth = "^2.0.0" dash-bootstrap-components = "^1.5.0" deutschland = {git = "https://github.com/TrisNol/deutschland.git", branch = "hotfix/python-3.11-support"} loguru = "^0.7.0" matplotlib = "^3.7.2" -plotly = "^5.16.1" psycopg2-binary = "^2.9.7" pymongo = "^4.5.0" python = "^3.11" @@ -54,7 +54,9 @@ tqdm = "^4.66.1" xmltodict = "^0.13.0" [tool.poetry.extras] -ingest = ["selenium"] +ingest = ["selenium", "deutschland"] +processing = [] +web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"] [tool.poetry.group.develop.dependencies] black = {extras = ["jupyter"], version = "^23.9.1"} @@ -77,6 +79,7 @@ sphinxcontrib-mermaid = "^0.9.2" sphinxcontrib-napoleon = "^0.7" [tool.poetry.group.lint.dependencies] +SQLAlchemy = {version = "^1.4.46", extras = ["mypy"]} black = "^23.9.1" loguru-mypy = "^0.0.4" mypy = "^1.5.1" @@ -99,8 +102,8 @@ pytest-repeat = "^0.9.1" [tool.poetry.scripts] copy-sql = "aki_prj23_transparenzregister.utils.sql.copy_sql:copy_db_cli" -data-transfer = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data" -reset-sql = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables" +data-transfer = {reference = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data", extras = ["processing"], type = "console"} +reset-sql = {reference = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables", extras = ["processing"], type = "console"} [tool.ruff] exclude = [ @@ -127,7 +130,7 @@ exclude = [ "venv" ] # Never enforce `E501` (line length violations). -ignore = ["E501"] +ignore = ["E501", "D105"] line-length = 88 # Enable flake8-bugbear (`B`) rules. select = ["E", "F", "B", "I", "S", "RSE", "RET", "SLF", "SIM", "TID", "PD", "PL", "PLE", "PLR", "PLW", "NPY", "UP", "D", "N", "A", "C4", "T20", "PT"] diff --git a/rebuild-and-start.bat b/rebuild-and-start.bat new file mode 100644 index 0000000..97bf80b --- /dev/null +++ b/rebuild-and-start.bat @@ -0,0 +1,4 @@ +poetry build +docker network prune -f +docker-compose -f local-docker-compose.yml build +docker-compose -f local-docker-compose.yml up diff --git a/src/aki_prj23_transparenzregister/config/config_providers.py b/src/aki_prj23_transparenzregister/config/config_providers.py index 51a76fc..b6c3345 100644 --- a/src/aki_prj23_transparenzregister/config/config_providers.py +++ b/src/aki_prj23_transparenzregister/config/config_providers.py @@ -5,6 +5,7 @@ import errno import json import os from pathlib import Path +from typing import Any from dotenv import load_dotenv @@ -37,11 +38,28 @@ class ConfigProvider(metaclass=abc.ABCMeta): MongoConnection: Connection details """ + def __str__(self) -> str: + connections = [] + try: + pg_str = str(self.get_postgre_connection_string()) + except KeyError: + pass + else: + connections.append(pg_str) + try: + mongo_str = str(self.get_mongo_connection_string()) + except KeyError: + pass + else: + connections.append(mongo_str) + + return f"Connection definition: {', '.join(connections)}" + class JsonFileConfigProvider(ConfigProvider): """Config provider based on .json file.""" - __data__: dict = {} + __data__: dict[str, dict[str, Any]] = {} def __init__(self, file_path: str | Path): """Constructor reading its data from a given .json file. @@ -55,7 +73,7 @@ class JsonFileConfigProvider(ConfigProvider): """ if not os.path.isfile(file_path): raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_path) - with open(file_path) as file: + with open(file_path, mode="r") as file: try: data = json.loads(file.read()) self.__data__ = data @@ -96,7 +114,7 @@ class JsonFileConfigProvider(ConfigProvider): class EnvironmentConfigProvider(ConfigProvider): """Config provider based on .json file.""" - __data__: dict = {} + __data__: dict[str, Any] = {} def __init__(self, prefix: str = "PYTHON_"): """Reads secrets from local environment while also ingesting .env files if available. diff --git a/src/aki_prj23_transparenzregister/config/config_template.py b/src/aki_prj23_transparenzregister/config/config_template.py index 5884729..a8d8548 100644 --- a/src/aki_prj23_transparenzregister/config/config_template.py +++ b/src/aki_prj23_transparenzregister/config/config_template.py @@ -11,3 +11,6 @@ class PostgreConnectionString: host: str database: str port: int | None + + def __str__(self) -> str: + return f"Postgre configuration: username: {self.username}, password {self.password}, host {self.host}:{self.port}, database {self.database}." diff --git a/src/aki_prj23_transparenzregister/ui/company_finance_dash.py b/src/aki_prj23_transparenzregister/ui/company_finance_dash.py index f81a78f..9a621d5 100644 --- a/src/aki_prj23_transparenzregister/ui/company_finance_dash.py +++ b/src/aki_prj23_transparenzregister/ui/company_finance_dash.py @@ -3,7 +3,10 @@ import dash_bootstrap_components as dbc from dash import Dash, Input, Output, callback, html -from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider +from aki_prj23_transparenzregister.config.config_providers import ( + EnvironmentConfigProvider, +) +from aki_prj23_transparenzregister.ui.protection import add_auth from aki_prj23_transparenzregister.ui.ui_elements import ( create_company_header, create_company_stats, @@ -13,9 +16,11 @@ from aki_prj23_transparenzregister.ui.ui_elements import ( get_finance_data, ) from aki_prj23_transparenzregister.utils.sql import connector +from aki_prj23_transparenzregister.utils.sql.connector import init_db if __name__ == "__main__": - session = connector.get_session(JsonFileConfigProvider("./secrets.json")) + session = connector.get_session(EnvironmentConfigProvider()) + init_db(session) company_df = get_company_data(session) finance_df = get_finance_data(session) options = company_df["company_name"].to_dict() @@ -23,6 +28,7 @@ if __name__ == "__main__": app = Dash( __name__, external_stylesheets=[dbc.icons.BOOTSTRAP] ) # use dbc for icons + add_auth(app) app.title = "Company Finance Data" app.layout = html.Div( @@ -75,4 +81,4 @@ if __name__ == "__main__": create_tabs(selected_finance_df), ) - app.run_server(debug=True) + app.run_server(debug=False) diff --git a/src/aki_prj23_transparenzregister/ui/company_stats_dash.py b/src/aki_prj23_transparenzregister/ui/company_stats_dash.py index a48375c..44fd66e 100644 --- a/src/aki_prj23_transparenzregister/ui/company_stats_dash.py +++ b/src/aki_prj23_transparenzregister/ui/company_stats_dash.py @@ -3,18 +3,25 @@ import pandas as pd from dash import Dash, Input, Output, callback, dash_table, dcc, html -from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider +from aki_prj23_transparenzregister.config.config_providers import ( + EnvironmentConfigProvider, +) +from aki_prj23_transparenzregister.ui.protection import add_auth from aki_prj23_transparenzregister.utils.sql import entities from aki_prj23_transparenzregister.utils.sql.connector import ( get_session, + init_db, ) if __name__ == "__main__": - session = get_session(JsonFileConfigProvider("./secrets.json")) + session = get_session(EnvironmentConfigProvider()) + init_db(session) query = session.query(entities.Company) companies_df: pd.DataFrame = pd.read_sql(str(query), session.bind) # type: ignore app = Dash(__name__) + app.title = "Company stats Dashboard" + add_auth(app) app.layout = html.Div( [ @@ -41,4 +48,4 @@ if __name__ == "__main__": columns=[{"id": c, "name": c} for c in companies_df.columns], ) - app.run(debug=True) + app.run(debug=False) diff --git a/src/aki_prj23_transparenzregister/ui/protection.py b/src/aki_prj23_transparenzregister/ui/protection.py new file mode 100644 index 0000000..5bd4509 --- /dev/null +++ b/src/aki_prj23_transparenzregister/ui/protection.py @@ -0,0 +1,19 @@ +"""Adds and imports basic auth.""" +import os + +import dash_auth +from dash import Dash +from loguru import logger + + +def add_auth(app: Dash) -> None: + """Checks if there is a password protection configured and adds it to a given dash app. + + Args: + app: The app a basic auth should be added to. + """ + if os.getenv("PYTHON_DASH_LOGIN_USERNAME") and os.getenv("PYTHON_DASH_LOGIN_PW"): + login = os.getenv("PYTHON_DASH_LOGIN_USERNAME") + pw = os.getenv("PYTHON_DASH_LOGIN_PW") + logger.info("Staring app in password protected mode!") + dash_auth.BasicAuth(app, {login: pw}) diff --git a/src/aki_prj23_transparenzregister/utils/mongo/connector.py b/src/aki_prj23_transparenzregister/utils/mongo/connector.py index 18763cf..11d8b90 100644 --- a/src/aki_prj23_transparenzregister/utils/mongo/connector.py +++ b/src/aki_prj23_transparenzregister/utils/mongo/connector.py @@ -14,6 +14,9 @@ class MongoConnection: username: str | None password: str | None + def __str__(self) -> str: + return f"Mongo configuration: username: {self.username}, password {self.password}, host {self.hostname}:{self.port}, database {self.database}." + def get_conn_string(self) -> str: """Transforms the information of the object to a MongoDB connection string. diff --git a/src/aki_prj23_transparenzregister/utils/sql/__init__.py b/src/aki_prj23_transparenzregister/utils/sql/__init__.py index 6a09042..c85f788 100644 --- a/src/aki_prj23_transparenzregister/utils/sql/__init__.py +++ b/src/aki_prj23_transparenzregister/utils/sql/__init__.py @@ -1 +1 @@ -"""PostgreSQL related modules.""" +"""SQL related modules.""" diff --git a/src/aki_prj23_transparenzregister/utils/sql/connector.py b/src/aki_prj23_transparenzregister/utils/sql/connector.py index 922bc24..441ea9e 100644 --- a/src/aki_prj23_transparenzregister/utils/sql/connector.py +++ b/src/aki_prj23_transparenzregister/utils/sql/connector.py @@ -45,7 +45,7 @@ def get_sqlite_engine(connect_to: str) -> Engine: def get_session( - connect_to: JsonFileConfigProvider | str, + connect_to: ConfigProvider | str, ) -> Session: # pragma: no cover """Creates a sql session. @@ -60,13 +60,15 @@ def get_session( logger.debug(connect_to) connect_to = JsonFileConfigProvider(connect_to) if isinstance(connect_to, ConfigProvider): - engine = get_pg_engine(connect_to.get_postgre_connection_string()) + pg_string = connect_to.get_postgre_connection_string() + logger.debug(str(connect_to)) + engine = get_pg_engine(pg_string) elif isinstance(connect_to, str) and re.fullmatch( r"sqlite:\/{3}[A-Za-z].*", connect_to ): engine = get_sqlite_engine(connect_to) - logger.info(f"Connection to sqlite3 {connect_to}") + logger.debug(f"Connection to sqlite3 {connect_to}") else: raise TypeError("No valid connection is defined!") return sessionmaker(autocommit=False, autoflush=False, bind=engine)() diff --git a/tests/config/config_providers_test.py b/tests/config/config_providers_test.py index 60ebb3e..3a06f51 100644 --- a/tests/config/config_providers_test.py +++ b/tests/config/config_providers_test.py @@ -58,6 +58,13 @@ def test_json_provider_get_postgres() -> None: assert config.host == data["postgres"]["host"] assert config.database == data["postgres"]["database"] assert config.port == data["postgres"]["port"] + assert isinstance(str(config), str) + assert "Mongo" not in str(config) + assert "Postgre" in str(config) + + assert isinstance(str(JsonFileConfigProvider("someWhere")), str) + assert str(config) in str(JsonFileConfigProvider("someWhere")) + assert "Mongo" not in str(JsonFileConfigProvider("someWhere")) def test_json_provider_get_mongo() -> None: @@ -82,6 +89,14 @@ def test_json_provider_get_mongo() -> None: assert config.database == data["mongo"]["database"] assert config.port == data["mongo"]["port"] + assert isinstance(str(config), str) + assert "Mongo" in str(config) + assert "Postgre" not in str(config) + + assert isinstance(str(JsonFileConfigProvider("someWhere")), str) + assert str(config) in str(JsonFileConfigProvider("someWhere")) + assert "Postgre" not in str(JsonFileConfigProvider("someWhere")) + def test_env_provider_constructor() -> None: with patch("aki_prj23_transparenzregister.config.config_providers.os") as mock_os: diff --git a/tests/ui/protection_test.py b/tests/ui/protection_test.py new file mode 100644 index 0000000..bec8979 --- /dev/null +++ b/tests/ui/protection_test.py @@ -0,0 +1,26 @@ +"""Tests the addition of an authentication framework.""" +from _pytest.monkeypatch import MonkeyPatch +from dash import Dash +from pytest_mock import MockerFixture + +from aki_prj23_transparenzregister.ui import protection + + +def test_no_auth(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None: + """Tests if no authentication is added.""" + monkeypatch.setenv("PYTHON_DASH_LOGIN_USERNAME", "") + monkeypatch.setenv("PYTHON_DASH_LOGIN_PW", "") + + basic_auth_mocker = mocker.spy(protection.dash_auth, "BasicAuth") + protection.add_auth(Dash()) + basic_auth_mocker.assert_not_called() + + +def test_add_auth(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None: + """Tests if an authentication is added.""" + monkeypatch.setenv("PYTHON_DASH_LOGIN_USERNAME", "some-login") + monkeypatch.setenv("PYTHON_DASH_LOGIN_PW", "some-pw") + + basic_auth_mocker = mocker.spy(protection.dash_auth, "BasicAuth") + protection.add_auth(Dash()) + basic_auth_mocker.assert_called_once()