mirror of
https://github.com/fhswf/aki_prj23_transparenzregister.git
synced 2025-04-20 23:02:54 +02:00
build first set of docker container in pipline and place them in the the github registry (#142)
- added a Dockerfile for the thre containers - added a workflow step to build and placing the container in the registry - added a docker-compose.yaml to use the build images - added a docker compose to build the images locally and a script for prebuild steps
This commit is contained in:
parent
5c8d20f4c2
commit
091e67de79
11
.github/workflows/delete-branch.yaml
vendored
Normal file
11
.github/workflows/delete-branch.yaml
vendored
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
name: Branch Deleted
|
||||||
|
on: delete
|
||||||
|
jobs:
|
||||||
|
delete:
|
||||||
|
if: github.event.ref_type == 'branch'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Clean up
|
||||||
|
run: |
|
||||||
|
echo "Clean up for branch ${{ github.event.ref }}"
|
||||||
|
# uses: bots-house/ghcr-delete-image-action@v1.1.0
|
4
.github/workflows/documentation.yaml
vendored
4
.github/workflows/documentation.yaml
vendored
@ -5,7 +5,7 @@ on:
|
|||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
pull_request:
|
pull_request:
|
||||||
types: [opened, reopened, synchronize]
|
types: [reopened, opened, synchronize]
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
doc-build:
|
doc-build:
|
||||||
@ -23,7 +23,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
version: 1.4.2
|
version: 1.4.2
|
||||||
virtualenvs-create: false
|
virtualenvs-create: false
|
||||||
- run: poetry install --only doc,root,develop
|
- run: poetry install --only doc,root,develop --all-extras
|
||||||
- name: Doc-Build
|
- name: Doc-Build
|
||||||
run: |
|
run: |
|
||||||
cd documentations
|
cd documentations
|
||||||
|
3
.github/workflows/lint-actions.yaml
vendored
3
.github/workflows/lint-actions.yaml
vendored
@ -37,7 +37,7 @@ jobs:
|
|||||||
version: 1.4.2
|
version: 1.4.2
|
||||||
virtualenvs-create: false
|
virtualenvs-create: false
|
||||||
virtualenvs-path: ~/local/share/virtualenvs
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
- run: poetry install --without develop,doc
|
- run: poetry install --without develop,doc --all-extras
|
||||||
- name: Run mypy
|
- name: Run mypy
|
||||||
run: |
|
run: |
|
||||||
mypy src tests
|
mypy src tests
|
||||||
@ -69,7 +69,6 @@ jobs:
|
|||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- name: Install root, pip-licenses & pip-audit
|
- name: Install root, pip-licenses & pip-audit
|
||||||
run: |
|
run: |
|
||||||
poetry install --only-root
|
|
||||||
poetry run pip install pip-licenses pip-audit
|
poetry run pip install pip-licenses pip-audit
|
||||||
- name: Poetry export
|
- name: Poetry export
|
||||||
run: poetry export -f requirements.txt --output requirements.txt
|
run: poetry export -f requirements.txt --output requirements.txt
|
||||||
|
92
.github/workflows/test-and-build-action.yaml
vendored
92
.github/workflows/test-and-build-action.yaml
vendored
@ -1,9 +1,12 @@
|
|||||||
name: Test & Build
|
name: Test & Build
|
||||||
|
|
||||||
on:
|
on:
|
||||||
|
push:
|
||||||
pull_request:
|
pull_request:
|
||||||
types: [reopened, opened, synchronize]
|
types: [reopened, opened, synchronize]
|
||||||
push:
|
release:
|
||||||
|
types:
|
||||||
|
- created
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
test:
|
test:
|
||||||
@ -27,7 +30,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: ~/.local/share/virtualenvs
|
path: ~/.local/share/virtualenvs
|
||||||
key: venv
|
key: venv
|
||||||
- run: poetry install --without develop,doc,lint
|
- run: poetry install --without develop,doc,lint --all-extras
|
||||||
- name: Run test suite
|
- name: Run test suite
|
||||||
run: |
|
run: |
|
||||||
poetry run pytest --junit-xml=unit-test-results.xml --cov-report "xml:coverage.xml" --cov=src tests/
|
poetry run pytest --junit-xml=unit-test-results.xml --cov-report "xml:coverage.xml" --cov=src tests/
|
||||||
@ -85,7 +88,7 @@ jobs:
|
|||||||
version: 1.4.2
|
version: 1.4.2
|
||||||
virtualenvs-path: ~/local/share/virtualenvs
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
- run: |
|
- run: |
|
||||||
poetry install --only test
|
poetry install --only test --all-extras
|
||||||
- uses: actions/download-artifact@v3
|
- uses: actions/download-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: code-coverage-report
|
name: code-coverage-report
|
||||||
@ -99,9 +102,9 @@ jobs:
|
|||||||
path: htmlcov/
|
path: htmlcov/
|
||||||
|
|
||||||
build:
|
build:
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
needs: test
|
needs: test
|
||||||
if: ${{ github.event_name == 'push' }}
|
|
||||||
steps:
|
steps:
|
||||||
- name: Setup Python
|
- name: Setup Python
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
@ -120,10 +123,87 @@ jobs:
|
|||||||
- name: Check out repository code
|
- name: Check out repository code
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
- run: |
|
- run: |
|
||||||
poetry install --without develop,doc,lint,test
|
|
||||||
poetry build
|
poetry build
|
||||||
- name: Archive builds
|
- name: Archive builds
|
||||||
uses: actions/upload-artifact@v3
|
uses: actions/upload-artifact@v3
|
||||||
with:
|
with:
|
||||||
name: builds
|
name: wheel-builds
|
||||||
path: dist/
|
path: dist/
|
||||||
|
|
||||||
|
docker-build:
|
||||||
|
if: ${{ github.event_name == 'push' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: [build]
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: wheel-builds
|
||||||
|
path: dist
|
||||||
|
|
||||||
|
- id: tag_or_main
|
||||||
|
run: |
|
||||||
|
if [ "$GITHUB_EVENT_NAME" = "release" ]; then
|
||||||
|
echo "TAG_OR_MAIN=$(echo $GITHUB_REF | awk -F / '{print $NF}')" >> $GITHUB_ENV
|
||||||
|
echo "LATEST=latest" >> $GITHUB_ENV
|
||||||
|
elif [ "$GITHUB_EVENT_NAME" = "push" ] && [ "$GITHUB_REF" = "refs/heads/main" ]; then
|
||||||
|
echo "TAG_OR_MAIN=main" >> $GITHUB_ENV
|
||||||
|
echo "LATEST=main" >> $GITHUB_ENV
|
||||||
|
else
|
||||||
|
echo "TAG_OR_MAIN=experimental" >> $GITHUB_ENV
|
||||||
|
echo "LATEST=experimental" >> $GITHUB_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
- run: |
|
||||||
|
echo "Print ${{ env.TAG_OR_MAIN }}"
|
||||||
|
|
||||||
|
- name: Build All prior to push
|
||||||
|
run: |
|
||||||
|
docker build --target ingest .
|
||||||
|
docker build --target data-transformation .
|
||||||
|
docker build --target web-server .
|
||||||
|
docker image ls
|
||||||
|
|
||||||
|
- name: Exit workflow on pull request
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
run: exit 0
|
||||||
|
|
||||||
|
- name: Login to GitHub Container Registry
|
||||||
|
uses: docker/login-action@v2
|
||||||
|
with:
|
||||||
|
registry: ghcr.io
|
||||||
|
username: ${{ github.actor }}
|
||||||
|
password: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Build and push ingest
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/ingest:${{ env.TAG_OR_MAIN }}
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/ingest:${{ env.LATEST }}
|
||||||
|
target: ingest
|
||||||
|
context: .
|
||||||
|
|
||||||
|
- name: Build and push data-transformation
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/data-transformation:${{ env.TAG_OR_MAIN }}
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/data-transformation:${{ env.LATEST }}
|
||||||
|
target: data-transformation
|
||||||
|
context: .
|
||||||
|
|
||||||
|
- name: Build and push web-server
|
||||||
|
uses: docker/build-push-action@v4
|
||||||
|
with:
|
||||||
|
push: true
|
||||||
|
tags: |
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/web-server:${{ env.TAG_OR_MAIN }}
|
||||||
|
ghcr.io/${{ github.repository_owner }}/${{ github.event.repository.name }}/web-server:${{ env.LATEST }}
|
||||||
|
target: web-server
|
||||||
|
context: .
|
||||||
|
30
Dockerfile
Normal file
30
Dockerfile
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
FROM python:3.11-slim as base
|
||||||
|
LABEL AUTHOR="AKI Projektseminar 23"
|
||||||
|
|
||||||
|
ARG APP_HOME="transparenzregister"
|
||||||
|
WORKDIR /${APP_HOME}/
|
||||||
|
USER root
|
||||||
|
RUN apt update -y && \
|
||||||
|
apt install git sqlite3 iputils-ping traceroute -y && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY dist/*.whl dist/
|
||||||
|
|
||||||
|
RUN pip install --find-links=dist aki-prj23-transparenzregister --no-cache-dir
|
||||||
|
|
||||||
|
FROM base as ingest
|
||||||
|
|
||||||
|
RUN pip install --find-links=dist aki-prj23-transparenzregister[ingest] --no-cache-dir && \
|
||||||
|
rm dist/ -R
|
||||||
|
|
||||||
|
FROM base as data-transformation
|
||||||
|
|
||||||
|
RUN pip install --find-links=dist aki-prj23-transparenzregister[processing] --no-cache-dir && \
|
||||||
|
rm dist/ -R
|
||||||
|
|
||||||
|
FROM base as web-server
|
||||||
|
|
||||||
|
RUN pip install --find-links=dist aki-prj23-transparenzregister[web-server] --no-cache-dir && \
|
||||||
|
rm dist/ -R
|
||||||
|
|
||||||
|
CMD python -m aki_prj23_transparenzregister.ui.company_finance_dash
|
@ -54,11 +54,20 @@ PYTHON_POSTGRES_PASSWORD=postgres
|
|||||||
PYTHON_POSTGRES_HOST=localhost
|
PYTHON_POSTGRES_HOST=localhost
|
||||||
PYTHON_POSTGRES_DATABASE=postgres
|
PYTHON_POSTGRES_DATABASE=postgres
|
||||||
PYTHON_POSTGRES_PORT=5432
|
PYTHON_POSTGRES_PORT=5432
|
||||||
|
|
||||||
PYTHON_MONGO_USERNAME=username
|
PYTHON_MONGO_USERNAME=username
|
||||||
PYTHON_MONGO_HOST=localhost
|
PYTHON_MONGO_HOST=localhost
|
||||||
PYTHON_MONGO_PASSWORD=password
|
PYTHON_MONGO_PASSWORD=password
|
||||||
PYTHON_MONGO_PORT=27017
|
PYTHON_MONGO_PORT=27017
|
||||||
PYTHON_MONGO_DATABASE=transparenzregister
|
PYTHON_MONGO_DATABASE=transparenzregister
|
||||||
|
|
||||||
|
PYTHON_DASH_LOGIN_USERNAME=some-login-to-webgui
|
||||||
|
PYTHON_DASH_LOGIN_PW=some-pw-to-login-to-webgui
|
||||||
|
|
||||||
|
CR=ghcr.io/fhswf/aki_prj23_transparenzregister
|
||||||
|
TAG=latest
|
||||||
|
|
||||||
|
HTTP_PORT=80
|
||||||
```
|
```
|
||||||
|
|
||||||
The prefix `PYTHON_` can be customized by setting a different `prefix` when constructing the ConfigProvider.
|
The prefix `PYTHON_` can be customized by setting a different `prefix` when constructing the ConfigProvider.
|
||||||
|
@ -1,27 +1,75 @@
|
|||||||
version: '3.8'
|
version: '3.8'
|
||||||
services:
|
services:
|
||||||
postgres:
|
|
||||||
image: postgres
|
ingest:
|
||||||
restart: always
|
image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/ingest:${TAG:-latest}
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- mongodb
|
||||||
environment:
|
environment:
|
||||||
POSTGRES_PASSWORD: postgres
|
# mongo access
|
||||||
volumes:
|
PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
- postgres_data:/var/lib/postgresql/data
|
PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
ports:
|
PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb}
|
||||||
- 5432:5432
|
PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017}
|
||||||
|
PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
|
|
||||||
mongodb:
|
mongodb:
|
||||||
image: mongo:4.4.6
|
image: mongo:4.4.6
|
||||||
container_name: mongodb
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
environment:
|
environment:
|
||||||
MONGO_INITDB_ROOT_USERNAME: username
|
MONGO_INITDB_ROOT_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
MONGO_INITDB_ROOT_PASSWORD: password
|
MONGO_INITDB_ROOT_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
MONGO_INITDB_DATABASE: transparenzregister
|
MONGO_INITDB_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
ports:
|
|
||||||
- 27017:27017
|
|
||||||
volumes:
|
volumes:
|
||||||
- mongo_data:/data/db
|
- mongo_data:/data/db
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:15.4
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
POSTGRES_DB: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
|
||||||
|
data-transformation:
|
||||||
|
image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/data-transformation:${TAG:-latest}
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- mongodb
|
||||||
|
environment:
|
||||||
|
# postgres access
|
||||||
|
PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres}
|
||||||
|
PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432}
|
||||||
|
# mongo access
|
||||||
|
PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
|
PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
|
PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb}
|
||||||
|
PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017}
|
||||||
|
PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
|
|
||||||
|
web-server:
|
||||||
|
image: ${CR:-ghcr.io/fhswf/aki_prj23_transparenzregister}/web-server:${TAG:-latest}
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
ports:
|
||||||
|
- ${HTTP_PORT}:${HTTP_PORT}
|
||||||
|
environment:
|
||||||
|
PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres}
|
||||||
|
PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432}
|
||||||
|
PYTHON_DASH_LOGIN_PW: ${PYTHON_DASH_LOGIN_PW}
|
||||||
|
PYTHON_DASH_LOGIN_USERNAME: ${PYTHON_DASH_LOGIN_USERNAME}
|
||||||
|
PORT: ${HTTP_PORT}
|
||||||
|
HOST: 0.0.0.0
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
postgres_data:
|
postgres_data:
|
||||||
mongo_data:
|
mongo_data:
|
||||||
|
101
local-docker-compose.yml
Normal file
101
local-docker-compose.yml
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
version: '3.8'
|
||||||
|
|
||||||
|
name: local-aki_prj23_transparenzregister
|
||||||
|
|
||||||
|
services:
|
||||||
|
|
||||||
|
ingest:
|
||||||
|
image: ingestor:local-build
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: ingest
|
||||||
|
environment:
|
||||||
|
# mongo access
|
||||||
|
PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
|
PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
|
PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb}
|
||||||
|
PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017}
|
||||||
|
PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
|
|
||||||
|
mongodb:
|
||||||
|
image: mongo:4.4.6
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
|
MONGO_INITDB_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
|
ports:
|
||||||
|
- ${PYTHON_MONGO_PORT}:27017
|
||||||
|
volumes:
|
||||||
|
- mongo_data:/data/db
|
||||||
|
deploy:
|
||||||
|
replicas: 1 # set to one to activate
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
image: postgres:15.4
|
||||||
|
restart: always
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
POSTGRES_DB: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
volumes:
|
||||||
|
- postgres_data:/var/lib/postgresql/data
|
||||||
|
ports:
|
||||||
|
- ${PYTHON_POSTGRES_PORT:-5432}:5432
|
||||||
|
deploy:
|
||||||
|
replicas: 1 # set to one to activate
|
||||||
|
|
||||||
|
data-transformation:
|
||||||
|
image: data-transformation:local-build
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- mongodb
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: data-transformation
|
||||||
|
environment:
|
||||||
|
# postgres access
|
||||||
|
PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres}
|
||||||
|
PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432}
|
||||||
|
# mongo access
|
||||||
|
PYTHON_MONGO_PASSWORD: ${PYTHON_MONGO_PASSWORD:?error}
|
||||||
|
PYTHON_MONGO_USERNAME: ${PYTHON_MONGO_USERNAME:-root}
|
||||||
|
PYTHON_MONGO_HOST: ${PYTHON_MONGO_HOST:-mongodb}
|
||||||
|
PYTHON_MONGO_PORT: ${PYTHON_MONGO_PORT:-27017}
|
||||||
|
PYTHON_MONGO_DATABASE: ${PYTHON_MONGO_DATABASE:-transparenzregister}
|
||||||
|
|
||||||
|
web-server:
|
||||||
|
image: web-server:local-build
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
target: web-server
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
ports:
|
||||||
|
- 8888:8888
|
||||||
|
environment:
|
||||||
|
PYTHON_POSTGRES_USERNAME: ${PYTHON_POSTGRES_USERNAME:-postgres}
|
||||||
|
PYTHON_POSTGRES_PASSWORD: ${PYTHON_POSTGRES_PASSWORD:?error}
|
||||||
|
PYTHON_POSTGRES_HOST: ${PYTHON_POSTGRES_HOST:-postgres}
|
||||||
|
PYTHON_POSTGRES_DATABASE: ${PYTHON_POSTGRES_DATABASE:-db}
|
||||||
|
PYTHON_POSTGRES_PORT: ${PYTHON_POSTGRES_PORT:-5432}
|
||||||
|
PYTHON_DASH_LOGIN_PW: ${PYTHON_DASH_LOGIN_PW}
|
||||||
|
PYTHON_DASH_LOGIN_USERNAME: ${PYTHON_DASH_LOGIN_USERNAME}
|
||||||
|
PORT: 8888
|
||||||
|
HOST: 0.0.0.0
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
postgres_data:
|
||||||
|
mongo_data:
|
||||||
|
|
||||||
|
networks:
|
||||||
|
default:
|
||||||
|
driver: bridge
|
||||||
|
ipam:
|
||||||
|
driver: default
|
||||||
|
config:
|
||||||
|
- subnet: 192.168.7.0/24
|
||||||
|
gateway: 192.168.7.1
|
22
poetry.lock
generated
22
poetry.lock
generated
@ -1,4 +1,4 @@
|
|||||||
# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand.
|
# This file is automatically @generated by Poetry 1.5.0 and should not be changed by hand.
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "aenum"
|
name = "aenum"
|
||||||
@ -875,6 +875,20 @@ dev = ["PyYAML (>=5.4.1)", "coloredlogs (>=15.0.1)", "fire (>=0.4.0)"]
|
|||||||
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
|
diskcache = ["diskcache (>=5.2.1)", "multiprocess (>=0.70.12)", "psutil (>=5.8.0)"]
|
||||||
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
|
testing = ["beautifulsoup4 (>=4.8.2)", "cryptography (<3.4)", "dash-testing-stub (>=0.0.2)", "lxml (>=4.6.2)", "multiprocess (>=0.70.12)", "percy (>=2.0.2)", "psutil (>=5.8.0)", "pytest (>=6.0.2)", "requests[security] (>=2.21.0)", "selenium (>=3.141.0,<=4.2.0)", "waitress (>=1.4.4)"]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "dash-auth"
|
||||||
|
version = "2.0.0"
|
||||||
|
description = "Dash Authorization Package."
|
||||||
|
optional = false
|
||||||
|
python-versions = ">=3.6"
|
||||||
|
files = [
|
||||||
|
{file = "dash_auth-2.0.0-py3-none-any.whl", hash = "sha256:32e60a700937d2c59e983a17d9422752da0f4eaabfd7a8babcae1b4266b9a95f"},
|
||||||
|
{file = "dash_auth-2.0.0.tar.gz", hash = "sha256:d9eafb8f3e1f1580bcf90023f8a5915f8f44ee07d80e35490b32f3548da49cc5"},
|
||||||
|
]
|
||||||
|
|
||||||
|
[package.dependencies]
|
||||||
|
dash = ">=1.1.1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dash-bootstrap-components"
|
name = "dash-bootstrap-components"
|
||||||
version = "1.5.0"
|
version = "1.5.0"
|
||||||
@ -5803,9 +5817,11 @@ files = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[extras]
|
[extras]
|
||||||
ingest = ["selenium"]
|
ingest = ["deutschland", "selenium"]
|
||||||
|
processing = []
|
||||||
|
web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
python-versions = "^3.11"
|
python-versions = "^3.11"
|
||||||
content-hash = "2496706146d1d83ba9f22d7d4ddc9de7019803cc9c6ebeccb2372610ec1cf736"
|
content-hash = "e6a72339834d20e81bd534e754e23ffe9427f69da658195060e5a671fbfb5cbe"
|
||||||
|
@ -16,7 +16,7 @@ source = ["src"]
|
|||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
disallow_untyped_defs = true
|
disallow_untyped_defs = true
|
||||||
exclude = [".ipynb_checkpoints", ".mypy_cache", ".mytest_cache", "build", "venv", ".venv", "Jupyter"]
|
exclude = ["\\.ipynb_checkpoints", "\\.mypy_cache", "\\.mytest_cache", "build", "venv", "\\.venv", "Jupyter"]
|
||||||
follow_imports = "silent"
|
follow_imports = "silent"
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
install_types = true
|
install_types = true
|
||||||
@ -35,15 +35,15 @@ readme = "README.md"
|
|||||||
version = "0.1.0"
|
version = "0.1.0"
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
[tool.poetry.dependencies]
|
||||||
SQLAlchemy = {version = "^1.4.49", extras = ["mypy"]}
|
|
||||||
aenum = "^3.1.15"
|
aenum = "^3.1.15"
|
||||||
|
SQLAlchemy = "^1.4.49"
|
||||||
cachetools = "^5.3.1"
|
cachetools = "^5.3.1"
|
||||||
dash = "^2.13.0"
|
dash = "^2.13.0"
|
||||||
|
dash-auth = "^2.0.0"
|
||||||
dash-bootstrap-components = "^1.5.0"
|
dash-bootstrap-components = "^1.5.0"
|
||||||
deutschland = {git = "https://github.com/TrisNol/deutschland.git", branch = "hotfix/python-3.11-support"}
|
deutschland = {git = "https://github.com/TrisNol/deutschland.git", branch = "hotfix/python-3.11-support"}
|
||||||
loguru = "^0.7.0"
|
loguru = "^0.7.0"
|
||||||
matplotlib = "^3.7.2"
|
matplotlib = "^3.7.2"
|
||||||
plotly = "^5.16.1"
|
|
||||||
psycopg2-binary = "^2.9.7"
|
psycopg2-binary = "^2.9.7"
|
||||||
pymongo = "^4.5.0"
|
pymongo = "^4.5.0"
|
||||||
python = "^3.11"
|
python = "^3.11"
|
||||||
@ -54,7 +54,9 @@ tqdm = "^4.66.1"
|
|||||||
xmltodict = "^0.13.0"
|
xmltodict = "^0.13.0"
|
||||||
|
|
||||||
[tool.poetry.extras]
|
[tool.poetry.extras]
|
||||||
ingest = ["selenium"]
|
ingest = ["selenium", "deutschland"]
|
||||||
|
processing = []
|
||||||
|
web-server = ["dash", "dash-auth", "dash-bootstrap-components", "matplotlib", "seaborn"]
|
||||||
|
|
||||||
[tool.poetry.group.develop.dependencies]
|
[tool.poetry.group.develop.dependencies]
|
||||||
black = {extras = ["jupyter"], version = "^23.9.1"}
|
black = {extras = ["jupyter"], version = "^23.9.1"}
|
||||||
@ -77,6 +79,7 @@ sphinxcontrib-mermaid = "^0.9.2"
|
|||||||
sphinxcontrib-napoleon = "^0.7"
|
sphinxcontrib-napoleon = "^0.7"
|
||||||
|
|
||||||
[tool.poetry.group.lint.dependencies]
|
[tool.poetry.group.lint.dependencies]
|
||||||
|
SQLAlchemy = {version = "^1.4.46", extras = ["mypy"]}
|
||||||
black = "^23.9.1"
|
black = "^23.9.1"
|
||||||
loguru-mypy = "^0.0.4"
|
loguru-mypy = "^0.0.4"
|
||||||
mypy = "^1.5.1"
|
mypy = "^1.5.1"
|
||||||
@ -99,8 +102,8 @@ pytest-repeat = "^0.9.1"
|
|||||||
|
|
||||||
[tool.poetry.scripts]
|
[tool.poetry.scripts]
|
||||||
copy-sql = "aki_prj23_transparenzregister.utils.sql.copy_sql:copy_db_cli"
|
copy-sql = "aki_prj23_transparenzregister.utils.sql.copy_sql:copy_db_cli"
|
||||||
data-transfer = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data"
|
data-transfer = {reference = "aki_prj23_transparenzregister.utils.data_transfer:transfer_data", extras = ["processing"], type = "console"}
|
||||||
reset-sql = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables"
|
reset-sql = {reference = "aki_prj23_transparenzregister.utils.sql.connector:reset_all_tables", extras = ["processing"], type = "console"}
|
||||||
|
|
||||||
[tool.ruff]
|
[tool.ruff]
|
||||||
exclude = [
|
exclude = [
|
||||||
@ -127,7 +130,7 @@ exclude = [
|
|||||||
"venv"
|
"venv"
|
||||||
]
|
]
|
||||||
# Never enforce `E501` (line length violations).
|
# Never enforce `E501` (line length violations).
|
||||||
ignore = ["E501"]
|
ignore = ["E501", "D105"]
|
||||||
line-length = 88
|
line-length = 88
|
||||||
# Enable flake8-bugbear (`B`) rules.
|
# Enable flake8-bugbear (`B`) rules.
|
||||||
select = ["E", "F", "B", "I", "S", "RSE", "RET", "SLF", "SIM", "TID", "PD", "PL", "PLE", "PLR", "PLW", "NPY", "UP", "D", "N", "A", "C4", "T20", "PT"]
|
select = ["E", "F", "B", "I", "S", "RSE", "RET", "SLF", "SIM", "TID", "PD", "PL", "PLE", "PLR", "PLW", "NPY", "UP", "D", "N", "A", "C4", "T20", "PT"]
|
||||||
|
4
rebuild-and-start.bat
Normal file
4
rebuild-and-start.bat
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
poetry build
|
||||||
|
docker network prune -f
|
||||||
|
docker-compose -f local-docker-compose.yml build
|
||||||
|
docker-compose -f local-docker-compose.yml up
|
@ -5,6 +5,7 @@ import errno
|
|||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
from dotenv import load_dotenv
|
from dotenv import load_dotenv
|
||||||
|
|
||||||
@ -37,11 +38,28 @@ class ConfigProvider(metaclass=abc.ABCMeta):
|
|||||||
MongoConnection: Connection details
|
MongoConnection: Connection details
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
connections = []
|
||||||
|
try:
|
||||||
|
pg_str = str(self.get_postgre_connection_string())
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
connections.append(pg_str)
|
||||||
|
try:
|
||||||
|
mongo_str = str(self.get_mongo_connection_string())
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
connections.append(mongo_str)
|
||||||
|
|
||||||
|
return f"Connection definition: {', '.join(connections)}"
|
||||||
|
|
||||||
|
|
||||||
class JsonFileConfigProvider(ConfigProvider):
|
class JsonFileConfigProvider(ConfigProvider):
|
||||||
"""Config provider based on .json file."""
|
"""Config provider based on .json file."""
|
||||||
|
|
||||||
__data__: dict = {}
|
__data__: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
def __init__(self, file_path: str | Path):
|
def __init__(self, file_path: str | Path):
|
||||||
"""Constructor reading its data from a given .json file.
|
"""Constructor reading its data from a given .json file.
|
||||||
@ -55,7 +73,7 @@ class JsonFileConfigProvider(ConfigProvider):
|
|||||||
"""
|
"""
|
||||||
if not os.path.isfile(file_path):
|
if not os.path.isfile(file_path):
|
||||||
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_path)
|
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), file_path)
|
||||||
with open(file_path) as file:
|
with open(file_path, mode="r") as file:
|
||||||
try:
|
try:
|
||||||
data = json.loads(file.read())
|
data = json.loads(file.read())
|
||||||
self.__data__ = data
|
self.__data__ = data
|
||||||
@ -96,7 +114,7 @@ class JsonFileConfigProvider(ConfigProvider):
|
|||||||
class EnvironmentConfigProvider(ConfigProvider):
|
class EnvironmentConfigProvider(ConfigProvider):
|
||||||
"""Config provider based on .json file."""
|
"""Config provider based on .json file."""
|
||||||
|
|
||||||
__data__: dict = {}
|
__data__: dict[str, Any] = {}
|
||||||
|
|
||||||
def __init__(self, prefix: str = "PYTHON_"):
|
def __init__(self, prefix: str = "PYTHON_"):
|
||||||
"""Reads secrets from local environment while also ingesting .env files if available.
|
"""Reads secrets from local environment while also ingesting .env files if available.
|
||||||
|
@ -11,3 +11,6 @@ class PostgreConnectionString:
|
|||||||
host: str
|
host: str
|
||||||
database: str
|
database: str
|
||||||
port: int | None
|
port: int | None
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Postgre configuration: username: {self.username}, password {self.password}, host {self.host}:{self.port}, database {self.database}."
|
||||||
|
@ -3,7 +3,10 @@
|
|||||||
import dash_bootstrap_components as dbc
|
import dash_bootstrap_components as dbc
|
||||||
from dash import Dash, Input, Output, callback, html
|
from dash import Dash, Input, Output, callback, html
|
||||||
|
|
||||||
from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider
|
from aki_prj23_transparenzregister.config.config_providers import (
|
||||||
|
EnvironmentConfigProvider,
|
||||||
|
)
|
||||||
|
from aki_prj23_transparenzregister.ui.protection import add_auth
|
||||||
from aki_prj23_transparenzregister.ui.ui_elements import (
|
from aki_prj23_transparenzregister.ui.ui_elements import (
|
||||||
create_company_header,
|
create_company_header,
|
||||||
create_company_stats,
|
create_company_stats,
|
||||||
@ -13,9 +16,11 @@ from aki_prj23_transparenzregister.ui.ui_elements import (
|
|||||||
get_finance_data,
|
get_finance_data,
|
||||||
)
|
)
|
||||||
from aki_prj23_transparenzregister.utils.sql import connector
|
from aki_prj23_transparenzregister.utils.sql import connector
|
||||||
|
from aki_prj23_transparenzregister.utils.sql.connector import init_db
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
session = connector.get_session(JsonFileConfigProvider("./secrets.json"))
|
session = connector.get_session(EnvironmentConfigProvider())
|
||||||
|
init_db(session)
|
||||||
company_df = get_company_data(session)
|
company_df = get_company_data(session)
|
||||||
finance_df = get_finance_data(session)
|
finance_df = get_finance_data(session)
|
||||||
options = company_df["company_name"].to_dict()
|
options = company_df["company_name"].to_dict()
|
||||||
@ -23,6 +28,7 @@ if __name__ == "__main__":
|
|||||||
app = Dash(
|
app = Dash(
|
||||||
__name__, external_stylesheets=[dbc.icons.BOOTSTRAP]
|
__name__, external_stylesheets=[dbc.icons.BOOTSTRAP]
|
||||||
) # use dbc for icons
|
) # use dbc for icons
|
||||||
|
add_auth(app)
|
||||||
app.title = "Company Finance Data"
|
app.title = "Company Finance Data"
|
||||||
|
|
||||||
app.layout = html.Div(
|
app.layout = html.Div(
|
||||||
@ -75,4 +81,4 @@ if __name__ == "__main__":
|
|||||||
create_tabs(selected_finance_df),
|
create_tabs(selected_finance_df),
|
||||||
)
|
)
|
||||||
|
|
||||||
app.run_server(debug=True)
|
app.run_server(debug=False)
|
||||||
|
@ -3,18 +3,25 @@
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from dash import Dash, Input, Output, callback, dash_table, dcc, html
|
from dash import Dash, Input, Output, callback, dash_table, dcc, html
|
||||||
|
|
||||||
from aki_prj23_transparenzregister.config.config_providers import JsonFileConfigProvider
|
from aki_prj23_transparenzregister.config.config_providers import (
|
||||||
|
EnvironmentConfigProvider,
|
||||||
|
)
|
||||||
|
from aki_prj23_transparenzregister.ui.protection import add_auth
|
||||||
from aki_prj23_transparenzregister.utils.sql import entities
|
from aki_prj23_transparenzregister.utils.sql import entities
|
||||||
from aki_prj23_transparenzregister.utils.sql.connector import (
|
from aki_prj23_transparenzregister.utils.sql.connector import (
|
||||||
get_session,
|
get_session,
|
||||||
|
init_db,
|
||||||
)
|
)
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
session = get_session(JsonFileConfigProvider("./secrets.json"))
|
session = get_session(EnvironmentConfigProvider())
|
||||||
|
init_db(session)
|
||||||
query = session.query(entities.Company)
|
query = session.query(entities.Company)
|
||||||
|
|
||||||
companies_df: pd.DataFrame = pd.read_sql(str(query), session.bind) # type: ignore
|
companies_df: pd.DataFrame = pd.read_sql(str(query), session.bind) # type: ignore
|
||||||
app = Dash(__name__)
|
app = Dash(__name__)
|
||||||
|
app.title = "Company stats Dashboard"
|
||||||
|
add_auth(app)
|
||||||
|
|
||||||
app.layout = html.Div(
|
app.layout = html.Div(
|
||||||
[
|
[
|
||||||
@ -41,4 +48,4 @@ if __name__ == "__main__":
|
|||||||
columns=[{"id": c, "name": c} for c in companies_df.columns],
|
columns=[{"id": c, "name": c} for c in companies_df.columns],
|
||||||
)
|
)
|
||||||
|
|
||||||
app.run(debug=True)
|
app.run(debug=False)
|
||||||
|
19
src/aki_prj23_transparenzregister/ui/protection.py
Normal file
19
src/aki_prj23_transparenzregister/ui/protection.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
"""Adds and imports basic auth."""
|
||||||
|
import os
|
||||||
|
|
||||||
|
import dash_auth
|
||||||
|
from dash import Dash
|
||||||
|
from loguru import logger
|
||||||
|
|
||||||
|
|
||||||
|
def add_auth(app: Dash) -> None:
|
||||||
|
"""Checks if there is a password protection configured and adds it to a given dash app.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
app: The app a basic auth should be added to.
|
||||||
|
"""
|
||||||
|
if os.getenv("PYTHON_DASH_LOGIN_USERNAME") and os.getenv("PYTHON_DASH_LOGIN_PW"):
|
||||||
|
login = os.getenv("PYTHON_DASH_LOGIN_USERNAME")
|
||||||
|
pw = os.getenv("PYTHON_DASH_LOGIN_PW")
|
||||||
|
logger.info("Staring app in password protected mode!")
|
||||||
|
dash_auth.BasicAuth(app, {login: pw})
|
@ -14,6 +14,9 @@ class MongoConnection:
|
|||||||
username: str | None
|
username: str | None
|
||||||
password: str | None
|
password: str | None
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f"Mongo configuration: username: {self.username}, password {self.password}, host {self.hostname}:{self.port}, database {self.database}."
|
||||||
|
|
||||||
def get_conn_string(self) -> str:
|
def get_conn_string(self) -> str:
|
||||||
"""Transforms the information of the object to a MongoDB connection string.
|
"""Transforms the information of the object to a MongoDB connection string.
|
||||||
|
|
||||||
|
@ -1 +1 @@
|
|||||||
"""PostgreSQL related modules."""
|
"""SQL related modules."""
|
||||||
|
@ -45,7 +45,7 @@ def get_sqlite_engine(connect_to: str) -> Engine:
|
|||||||
|
|
||||||
|
|
||||||
def get_session(
|
def get_session(
|
||||||
connect_to: JsonFileConfigProvider | str,
|
connect_to: ConfigProvider | str,
|
||||||
) -> Session: # pragma: no cover
|
) -> Session: # pragma: no cover
|
||||||
"""Creates a sql session.
|
"""Creates a sql session.
|
||||||
|
|
||||||
@ -60,13 +60,15 @@ def get_session(
|
|||||||
logger.debug(connect_to)
|
logger.debug(connect_to)
|
||||||
connect_to = JsonFileConfigProvider(connect_to)
|
connect_to = JsonFileConfigProvider(connect_to)
|
||||||
if isinstance(connect_to, ConfigProvider):
|
if isinstance(connect_to, ConfigProvider):
|
||||||
engine = get_pg_engine(connect_to.get_postgre_connection_string())
|
pg_string = connect_to.get_postgre_connection_string()
|
||||||
|
logger.debug(str(connect_to))
|
||||||
|
engine = get_pg_engine(pg_string)
|
||||||
|
|
||||||
elif isinstance(connect_to, str) and re.fullmatch(
|
elif isinstance(connect_to, str) and re.fullmatch(
|
||||||
r"sqlite:\/{3}[A-Za-z].*", connect_to
|
r"sqlite:\/{3}[A-Za-z].*", connect_to
|
||||||
):
|
):
|
||||||
engine = get_sqlite_engine(connect_to)
|
engine = get_sqlite_engine(connect_to)
|
||||||
logger.info(f"Connection to sqlite3 {connect_to}")
|
logger.debug(f"Connection to sqlite3 {connect_to}")
|
||||||
else:
|
else:
|
||||||
raise TypeError("No valid connection is defined!")
|
raise TypeError("No valid connection is defined!")
|
||||||
return sessionmaker(autocommit=False, autoflush=False, bind=engine)()
|
return sessionmaker(autocommit=False, autoflush=False, bind=engine)()
|
||||||
|
@ -58,6 +58,13 @@ def test_json_provider_get_postgres() -> None:
|
|||||||
assert config.host == data["postgres"]["host"]
|
assert config.host == data["postgres"]["host"]
|
||||||
assert config.database == data["postgres"]["database"]
|
assert config.database == data["postgres"]["database"]
|
||||||
assert config.port == data["postgres"]["port"]
|
assert config.port == data["postgres"]["port"]
|
||||||
|
assert isinstance(str(config), str)
|
||||||
|
assert "Mongo" not in str(config)
|
||||||
|
assert "Postgre" in str(config)
|
||||||
|
|
||||||
|
assert isinstance(str(JsonFileConfigProvider("someWhere")), str)
|
||||||
|
assert str(config) in str(JsonFileConfigProvider("someWhere"))
|
||||||
|
assert "Mongo" not in str(JsonFileConfigProvider("someWhere"))
|
||||||
|
|
||||||
|
|
||||||
def test_json_provider_get_mongo() -> None:
|
def test_json_provider_get_mongo() -> None:
|
||||||
@ -82,6 +89,14 @@ def test_json_provider_get_mongo() -> None:
|
|||||||
assert config.database == data["mongo"]["database"]
|
assert config.database == data["mongo"]["database"]
|
||||||
assert config.port == data["mongo"]["port"]
|
assert config.port == data["mongo"]["port"]
|
||||||
|
|
||||||
|
assert isinstance(str(config), str)
|
||||||
|
assert "Mongo" in str(config)
|
||||||
|
assert "Postgre" not in str(config)
|
||||||
|
|
||||||
|
assert isinstance(str(JsonFileConfigProvider("someWhere")), str)
|
||||||
|
assert str(config) in str(JsonFileConfigProvider("someWhere"))
|
||||||
|
assert "Postgre" not in str(JsonFileConfigProvider("someWhere"))
|
||||||
|
|
||||||
|
|
||||||
def test_env_provider_constructor() -> None:
|
def test_env_provider_constructor() -> None:
|
||||||
with patch("aki_prj23_transparenzregister.config.config_providers.os") as mock_os:
|
with patch("aki_prj23_transparenzregister.config.config_providers.os") as mock_os:
|
||||||
|
26
tests/ui/protection_test.py
Normal file
26
tests/ui/protection_test.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
"""Tests the addition of an authentication framework."""
|
||||||
|
from _pytest.monkeypatch import MonkeyPatch
|
||||||
|
from dash import Dash
|
||||||
|
from pytest_mock import MockerFixture
|
||||||
|
|
||||||
|
from aki_prj23_transparenzregister.ui import protection
|
||||||
|
|
||||||
|
|
||||||
|
def test_no_auth(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None:
|
||||||
|
"""Tests if no authentication is added."""
|
||||||
|
monkeypatch.setenv("PYTHON_DASH_LOGIN_USERNAME", "")
|
||||||
|
monkeypatch.setenv("PYTHON_DASH_LOGIN_PW", "")
|
||||||
|
|
||||||
|
basic_auth_mocker = mocker.spy(protection.dash_auth, "BasicAuth")
|
||||||
|
protection.add_auth(Dash())
|
||||||
|
basic_auth_mocker.assert_not_called()
|
||||||
|
|
||||||
|
|
||||||
|
def test_add_auth(monkeypatch: MonkeyPatch, mocker: MockerFixture) -> None:
|
||||||
|
"""Tests if an authentication is added."""
|
||||||
|
monkeypatch.setenv("PYTHON_DASH_LOGIN_USERNAME", "some-login")
|
||||||
|
monkeypatch.setenv("PYTHON_DASH_LOGIN_PW", "some-pw")
|
||||||
|
|
||||||
|
basic_auth_mocker = mocker.spy(protection.dash_auth, "BasicAuth")
|
||||||
|
protection.add_auth(Dash())
|
||||||
|
basic_auth_mocker.assert_called_once()
|
Loading…
x
Reference in New Issue
Block a user