Merge branch 'main' into feature/Verflechtungsanalyse
61
.github/workflows/documentation.yaml
vendored
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
name: Documentation-Action
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- '*'
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
doc-build:
|
||||||
|
name: Build
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- run: sudo apt install pandoc -y
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-create: false
|
||||||
|
- run: poetry install --only doc,root,develop
|
||||||
|
- name: Doc-Build
|
||||||
|
run: |
|
||||||
|
cd documentations
|
||||||
|
sphinx-apidoc -o . ../src/aki_prj23_transparenzregister -feP
|
||||||
|
make html
|
||||||
|
- name: Package artifact
|
||||||
|
uses: actions/upload-pages-artifact@v1
|
||||||
|
with:
|
||||||
|
path: documentations/_build/html/
|
||||||
|
|
||||||
|
doc-deploy:
|
||||||
|
name: Deployment
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: doc-build
|
||||||
|
permissions:
|
||||||
|
pages: write
|
||||||
|
id-token: write
|
||||||
|
concurrency:
|
||||||
|
group: pages
|
||||||
|
cancel-in-progress: false
|
||||||
|
if: github.ref == 'refs/heads/main'
|
||||||
|
environment:
|
||||||
|
name: github-pages
|
||||||
|
url: ${{ steps.deployment.outputs.page_url }}
|
||||||
|
steps:
|
||||||
|
- run: echo "Deployment URL = ${{ steps.deployment.outputs.page_url }}"
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: github-pages
|
||||||
|
- name: Deploy to GitHub Pages
|
||||||
|
id: deployment
|
||||||
|
uses: actions/deploy-pages@v2
|
||||||
|
with:
|
||||||
|
artifact_name: github-pages
|
82
.github/workflows/lint-actions.yaml
vendored
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
name: Python-Lint
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- '*.py'
|
||||||
|
- poetry.lock
|
||||||
|
- pyproject.toml
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
run-linters:
|
||||||
|
name: Black & mypy
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Set up python
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- name: Check out Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-create: false
|
||||||
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
|
- run: poetry install --without develop,doc,test
|
||||||
|
- name: Run linters
|
||||||
|
uses: wearerequired/lint-action@v2
|
||||||
|
with:
|
||||||
|
black: true
|
||||||
|
mypy: true
|
||||||
|
|
||||||
|
ruff:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: chartboost/ruff-action@v1
|
||||||
|
with:
|
||||||
|
version: 0.0.277
|
||||||
|
|
||||||
|
python-requirements:
|
||||||
|
name: Check Python Requirements
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Set up python
|
||||||
|
id: setup-python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: '3.11'
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
|
- name: Cache pipenv
|
||||||
|
id: cache-pipenv
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: venv
|
||||||
|
- name: Check out Git repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Poetry export
|
||||||
|
run: poetry export -f requirements.txt --output requirements.txt
|
||||||
|
- name: Check license
|
||||||
|
run: |
|
||||||
|
poetry run pip install pip-licenses
|
||||||
|
poetry run pip-licenses --format=markdown --output-file=license-summary.md
|
||||||
|
- name: Archive license summary
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: license-summary
|
||||||
|
path: |
|
||||||
|
license-summary.md
|
||||||
|
requirements.txt
|
||||||
|
- name: Check requirements security with pip-audit
|
||||||
|
uses: pypa/gh-action-pip-audit@v1.0.0
|
||||||
|
with:
|
||||||
|
inputs: requirements.txt
|
131
.github/workflows/test-and-build-action.yaml
vendored
Normal file
@ -0,0 +1,131 @@
|
|||||||
|
name: Test & Build
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
pull_request_target:
|
||||||
|
push:
|
||||||
|
paths:
|
||||||
|
- '*.py'
|
||||||
|
- poetry.lock
|
||||||
|
- pyproject.toml
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
test:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 10
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
|
- id: cache-pipenv
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: venv
|
||||||
|
- run: poetry install --without develop,doc,lint
|
||||||
|
- name: Run test suite
|
||||||
|
run: |
|
||||||
|
poetry run pytest --junit-xml=unit-test-results.xml --cov-report "xml:coverage.xml" --cov=src tests/
|
||||||
|
- name: Archive code coverage results
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: code-coverage-report
|
||||||
|
path: |
|
||||||
|
coverage.xml
|
||||||
|
.coverage
|
||||||
|
- name: Archive code coverage results
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: test-report
|
||||||
|
path: |
|
||||||
|
unit-test-results.xml
|
||||||
|
if-no-files-found: error
|
||||||
|
|
||||||
|
coverage_pull_request:
|
||||||
|
if: ${{ github.event_name == 'pull_request' }}
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
steps:
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: code-coverage-report
|
||||||
|
- name: Get Cover
|
||||||
|
uses: orgoro/coverage@v3.1
|
||||||
|
with:
|
||||||
|
coverageFile: coverage.xml
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
thresholdAll: 0.8
|
||||||
|
thresholdNew: 0.8
|
||||||
|
thresholdModified: 0.8
|
||||||
|
|
||||||
|
coverage_report:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
steps:
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
- id: cache-pipenv
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: venv
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
|
- run: |
|
||||||
|
poetry install --only test
|
||||||
|
- uses: actions/download-artifact@v3
|
||||||
|
with:
|
||||||
|
name: code-coverage-report
|
||||||
|
- name: Make Coverage Report
|
||||||
|
run: |
|
||||||
|
poetry run coverage html
|
||||||
|
- name: Archive builds
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: Coverage Report HTML
|
||||||
|
path: htmlcov/
|
||||||
|
|
||||||
|
build:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs: test
|
||||||
|
steps:
|
||||||
|
- name: Setup Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.11
|
||||||
|
- name: Install and configure Poetry
|
||||||
|
uses: snok/install-poetry@v1
|
||||||
|
with:
|
||||||
|
version: 1.4.2
|
||||||
|
virtualenvs-path: ~/local/share/virtualenvs
|
||||||
|
- id: cache-pipenv
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: ~/.local/share/virtualenvs
|
||||||
|
key: venv
|
||||||
|
- name: Check out repository code
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- run: |
|
||||||
|
poetry install --without develop,doc,lint,test
|
||||||
|
poetry build
|
||||||
|
- name: Archive builds
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: builds
|
||||||
|
path: dist/
|
5
.gitignore
vendored
@ -1,5 +1,6 @@
|
|||||||
# Temporary hosting of Deutschland/Bundesanzeiger library
|
# Temporary hosting of Deutschland/Bundesanzeiger library
|
||||||
**/deutschland
|
**/deutschland
|
||||||
|
*secrets.json
|
||||||
|
|
||||||
# Snyk
|
# Snyk
|
||||||
.dccache
|
.dccache
|
||||||
@ -209,3 +210,7 @@ replay_pid*
|
|||||||
|
|
||||||
/handelsregister.db
|
/handelsregister.db
|
||||||
/handelsregister.png
|
/handelsregister.png
|
||||||
|
/documentations/_build/
|
||||||
|
/documentations/aki_prj23_transparenzregister.*
|
||||||
|
/documentations/modules.rst
|
||||||
|
/unit-test-results.xml
|
||||||
|
@ -12,7 +12,7 @@ repos:
|
|||||||
- id: check-xml
|
- id: check-xml
|
||||||
- id: check-ast
|
- id: check-ast
|
||||||
- id: check-added-large-files
|
- id: check-added-large-files
|
||||||
args: [--enforce-all]
|
args: [--enforce-all --maxkb=50000]
|
||||||
- id: name-tests-test
|
- id: name-tests-test
|
||||||
- id: detect-private-key
|
- id: detect-private-key
|
||||||
- id: check-case-conflict
|
- id: check-case-conflict
|
||||||
@ -23,6 +23,13 @@ repos:
|
|||||||
- id: debug-statements
|
- id: debug-statements
|
||||||
- id: pretty-format-json
|
- id: pretty-format-json
|
||||||
|
|
||||||
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
|
# Ruff version.
|
||||||
|
rev: v0.0.277
|
||||||
|
hooks:
|
||||||
|
- id: ruff
|
||||||
|
args: [--fix, --exit-non-zero-on-fix]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 23.3.0
|
rev: 23.3.0
|
||||||
hooks:
|
hooks:
|
||||||
@ -33,7 +40,7 @@ repos:
|
|||||||
|
|
||||||
|
|
||||||
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
- repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
|
||||||
rev: v2.8.0
|
rev: v2.9.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: pretty-format-ini
|
- id: pretty-format-ini
|
||||||
args: [--autofix]
|
args: [--autofix]
|
||||||
@ -44,56 +51,31 @@ repos:
|
|||||||
exclude: (^poetry.lock$)
|
exclude: (^poetry.lock$)
|
||||||
|
|
||||||
|
|
||||||
- repo: https://github.com/domdfcoding/flake2lint
|
|
||||||
rev: v0.4.2
|
|
||||||
hooks:
|
|
||||||
- id: flake2lint
|
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: 6.0.0
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
args: [--config=tox.ini]
|
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||||
rev: v1.2.0
|
rev: v1.4.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: mypy
|
- id: mypy
|
||||||
|
additional_dependencies:
|
||||||
|
- pandas==2.*
|
||||||
|
- pandas-stubs==2.0.*
|
||||||
|
- types-requests
|
||||||
|
|
||||||
- repo: https://github.com/frnmst/md-toc
|
- repo: https://github.com/frnmst/md-toc
|
||||||
rev: 8.1.9
|
rev: 8.1.9
|
||||||
hooks:
|
hooks:
|
||||||
- id: md-toc
|
- id: md-toc
|
||||||
|
|
||||||
- repo: https://gitlab.com/smop/pre-commit-hooks
|
- repo: https://github.com/python-poetry/poetry
|
||||||
rev: v1.0.0
|
rev: 1.5.0
|
||||||
hooks: []
|
hooks:
|
||||||
# - id: check-poetry
|
- id: poetry-check
|
||||||
|
|
||||||
- repo: https://github.com/Lucas-C/pre-commit-hooks-java
|
- repo: https://github.com/Lucas-C/pre-commit-hooks-java
|
||||||
rev: 1.3.10
|
rev: 1.3.10
|
||||||
hooks: []
|
|
||||||
# - id: validate-html
|
|
||||||
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.3.2
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: validate-html
|
||||||
args: [--py311-plus]
|
|
||||||
|
|
||||||
- repo: https://github.com/pylint-dev/pylint
|
- repo: https://github.com/python-jsonschema/check-jsonschema
|
||||||
rev: v3.0.0a6
|
rev: 0.23.2
|
||||||
hooks: []
|
|
||||||
# - id: pylint
|
|
||||||
# args: [--disable=import-error]
|
|
||||||
|
|
||||||
- repo: https://github.com/MarcoGorelli/absolufy-imports
|
|
||||||
rev: v0.3.1
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: absolufy-imports
|
- id: check-github-workflows
|
||||||
|
|
||||||
- repo: https://github.com/pycqa/isort
|
|
||||||
rev: 5.12.0
|
|
||||||
hooks:
|
|
||||||
- id: isort
|
|
||||||
name: isort (python)
|
|
||||||
|
3
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"files.eol": "\n"
|
||||||
|
}
|
@ -2,7 +2,11 @@
|
|||||||
"cells": [
|
"cells": [
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"metadata": {},
|
"metadata": {
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "slide"
|
||||||
|
}
|
||||||
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"# FinBert\n",
|
"# FinBert\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -19,6 +23,11 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "slide"
|
||||||
|
}
|
||||||
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"## Libraries\n",
|
"## Libraries\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -31,23 +40,22 @@
|
|||||||
"* torchaudio\n",
|
"* torchaudio\n",
|
||||||
"* sentencepiece\n",
|
"* sentencepiece\n",
|
||||||
"* sacremoses"
|
"* sacremoses"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 25,
|
"execution_count": 25,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"ExecuteTime": {
|
"ExecuteTime": {
|
||||||
"start_time": "2023-05-01T13:16:08.554998Z",
|
"end_time": "2023-05-01T13:16:13.740927Z",
|
||||||
"end_time": "2023-05-01T13:16:13.740927Z"
|
"start_time": "2023-05-01T13:16:08.554998Z"
|
||||||
},
|
},
|
||||||
"collapsed": false,
|
|
||||||
"jupyter": {
|
"jupyter": {
|
||||||
"outputs_hidden": false
|
"outputs_hidden": false
|
||||||
},
|
},
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "skip"
|
||||||
|
},
|
||||||
"tags": []
|
"tags": []
|
||||||
},
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
@ -108,26 +116,30 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "slide"
|
||||||
|
}
|
||||||
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"### Importing and creation of models and tokenizer"
|
"### Importing and creation of models and tokenizer"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 26,
|
"execution_count": 26,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"collapsed": false,
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:15.121662Z",
|
||||||
|
"start_time": "2023-05-01T13:16:13.743921Z"
|
||||||
|
},
|
||||||
"jupyter": {
|
"jupyter": {
|
||||||
"outputs_hidden": false
|
"outputs_hidden": false
|
||||||
},
|
},
|
||||||
"tags": [],
|
"slideshow": {
|
||||||
"ExecuteTime": {
|
"slide_type": "subslide"
|
||||||
"start_time": "2023-05-01T13:16:13.743921Z",
|
},
|
||||||
"end_time": "2023-05-01T13:16:15.121662Z"
|
"tags": []
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -145,30 +157,39 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "slide"
|
||||||
|
}
|
||||||
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"### Analyze a single sentiment"
|
"### Analyze a single sentiment"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 27,
|
"execution_count": 27,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"collapsed": false,
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:15.194193Z",
|
||||||
|
"start_time": "2023-05-01T13:16:15.122665Z"
|
||||||
|
},
|
||||||
"jupyter": {
|
"jupyter": {
|
||||||
"outputs_hidden": false
|
"outputs_hidden": false
|
||||||
},
|
},
|
||||||
"ExecuteTime": {
|
"slideshow": {
|
||||||
"start_time": "2023-05-01T13:16:15.122665Z",
|
"slide_type": "-"
|
||||||
"end_time": "2023-05-01T13:16:15.194193Z"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": "+ 0.034084\n0 0.932933\n- 0.032982\ndtype: float32"
|
"text/plain": [
|
||||||
|
"+ 0.034084\n",
|
||||||
|
"0 0.932933\n",
|
||||||
|
"- 0.032982\n",
|
||||||
|
"dtype: float32"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 27,
|
"execution_count": 27,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -192,34 +213,29 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {
|
||||||
|
"slideshow": {
|
||||||
|
"slide_type": "slide"
|
||||||
|
}
|
||||||
|
},
|
||||||
"source": [
|
"source": [
|
||||||
"### Creating test data"
|
"### Creating test data"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 28,
|
"execution_count": null,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"tags": [],
|
|
||||||
"ExecuteTime": {
|
"ExecuteTime": {
|
||||||
"start_time": "2023-05-01T13:16:15.198186Z",
|
"end_time": "2023-05-01T13:16:15.208856Z",
|
||||||
"end_time": "2023-05-01T13:16:15.208856Z"
|
"start_time": "2023-05-01T13:16:15.198186Z"
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"outputs": [
|
"slideshow": {
|
||||||
{
|
"slide_type": "skip"
|
||||||
"data": {
|
|
||||||
"text/plain": " text lan\n0 Microsoft fails to hit profit expectations en\n1 Am Aktienmarkt überwieg weiter die Zuversicht,... de\n2 Stocks rallied and the British pound gained. en\n3 Meyer Burger bedient ab sofort australischen M... de\n4 Meyer Burger enters Australian market and exhi... en\n5 J&T Express Vietnam hilft lokalen Handwerksdör... de\n6 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... de\n7 Microsoft aktie fällt. de\n8 Microsoft aktie steigt. de",
|
|
||||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>text</th>\n <th>lan</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>Microsoft fails to hit profit expectations</td>\n <td>en</td>\n </tr>\n <tr>\n <th>1</th>\n <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n <td>de</td>\n </tr>\n <tr>\n <th>2</th>\n <td>Stocks rallied and the British pound gained.</td>\n <td>en</td>\n </tr>\n <tr>\n <th>3</th>\n <td>Meyer Burger bedient ab sofort australischen M...</td>\n <td>de</td>\n </tr>\n <tr>\n <th>4</th>\n <td>Meyer Burger enters Australian market and exhi...</td>\n <td>en</td>\n </tr>\n <tr>\n <th>5</th>\n <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n <td>de</td>\n </tr>\n <tr>\n <th>6</th>\n <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n <td>de</td>\n </tr>\n <tr>\n <th>7</th>\n <td>Microsoft aktie fällt.</td>\n <td>de</td>\n </tr>\n <tr>\n <th>8</th>\n <td>Microsoft aktie steigt.</td>\n <td>de</td>\n </tr>\n </tbody>\n</table>\n</div>"
|
|
||||||
},
|
},
|
||||||
"execution_count": 28,
|
"tags": []
|
||||||
"metadata": {},
|
},
|
||||||
"output_type": "execute_result"
|
"outputs": [],
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
"source": [
|
||||||
"text_df = pd.DataFrame(\n",
|
"text_df = pd.DataFrame(\n",
|
||||||
" [\n",
|
" [\n",
|
||||||
@ -248,44 +264,270 @@
|
|||||||
" {\"text\": \"Microsoft aktie fällt.\", \"lan\": \"de\"},\n",
|
" {\"text\": \"Microsoft aktie fällt.\", \"lan\": \"de\"},\n",
|
||||||
" {\"text\": \"Microsoft aktie steigt.\", \"lan\": \"de\"},\n",
|
" {\"text\": \"Microsoft aktie steigt.\", \"lan\": \"de\"},\n",
|
||||||
" ]\n",
|
" ]\n",
|
||||||
")\n",
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:15.208856Z",
|
||||||
|
"start_time": "2023-05-01T13:16:15.198186Z"
|
||||||
|
},
|
||||||
|
"tags": []
|
||||||
|
},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>text</th>\n",
|
||||||
|
" <th>lan</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>Microsoft fails to hit profit expectations</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>Stocks rallied and the British pound gained.</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>Meyer Burger bedient ab sofort australischen M...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>Meyer Burger enters Australian market and exhi...</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>Microsoft aktie fällt.</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>Microsoft aktie steigt.</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" text lan\n",
|
||||||
|
"0 Microsoft fails to hit profit expectations en\n",
|
||||||
|
"1 Am Aktienmarkt überwieg weiter die Zuversicht,... de\n",
|
||||||
|
"2 Stocks rallied and the British pound gained. en\n",
|
||||||
|
"3 Meyer Burger bedient ab sofort australischen M... de\n",
|
||||||
|
"4 Meyer Burger enters Australian market and exhi... en\n",
|
||||||
|
"5 J&T Express Vietnam hilft lokalen Handwerksdör... de\n",
|
||||||
|
"6 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... de\n",
|
||||||
|
"7 Microsoft aktie fällt. de\n",
|
||||||
|
"8 Microsoft aktie steigt. de"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 28,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
"text_df"
|
"text_df"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
"source": [],
|
"metadata": {},
|
||||||
"metadata": {
|
"source": []
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"### Analyze multiple Sentiments"
|
"### Analyze multiple Sentiments"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 29,
|
"execution_count": 29,
|
||||||
"metadata": {
|
"metadata": {
|
||||||
"collapsed": false,
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:16.132009Z",
|
||||||
|
"start_time": "2023-05-01T13:16:15.211858Z"
|
||||||
|
},
|
||||||
"jupyter": {
|
"jupyter": {
|
||||||
"outputs_hidden": false
|
"outputs_hidden": false
|
||||||
},
|
|
||||||
"ExecuteTime": {
|
|
||||||
"start_time": "2023-05-01T13:16:15.211858Z",
|
|
||||||
"end_time": "2023-05-01T13:16:16.132009Z"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": " text lan + 0 \n0 Microsoft fails to hit profit expectations en 0.034084 0.932933 \\\n1 Am Aktienmarkt überwieg weiter die Zuversicht,... de 0.053528 0.027950 \n2 Stocks rallied and the British pound gained. en 0.898361 0.034474 \n3 Meyer Burger bedient ab sofort australischen M... de 0.116597 0.012790 \n4 Meyer Burger enters Australian market and exhi... en 0.187527 0.008846 \n5 J&T Express Vietnam hilft lokalen Handwerksdör... de 0.066277 0.020608 \n6 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... de 0.050346 0.022004 \n7 Microsoft aktie fällt. de 0.066061 0.016440 \n8 Microsoft aktie steigt. de 0.041449 0.018471 \n\n - \n0 0.032982 \n1 0.918522 \n2 0.067165 \n3 0.870613 \n4 0.803627 \n5 0.913115 \n6 0.927650 \n7 0.917498 \n8 0.940080 ",
|
"text/html": [
|
||||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>text</th>\n <th>lan</th>\n <th>+</th>\n <th>0</th>\n <th>-</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>Microsoft fails to hit profit expectations</td>\n <td>en</td>\n <td>0.034084</td>\n <td>0.932933</td>\n <td>0.032982</td>\n </tr>\n <tr>\n <th>1</th>\n <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n <td>de</td>\n <td>0.053528</td>\n <td>0.027950</td>\n <td>0.918522</td>\n </tr>\n <tr>\n <th>2</th>\n <td>Stocks rallied and the British pound gained.</td>\n <td>en</td>\n <td>0.898361</td>\n <td>0.034474</td>\n <td>0.067165</td>\n </tr>\n <tr>\n <th>3</th>\n <td>Meyer Burger bedient ab sofort australischen M...</td>\n <td>de</td>\n <td>0.116597</td>\n <td>0.012790</td>\n <td>0.870613</td>\n </tr>\n <tr>\n <th>4</th>\n <td>Meyer Burger enters Australian market and exhi...</td>\n <td>en</td>\n <td>0.187527</td>\n <td>0.008846</td>\n <td>0.803627</td>\n </tr>\n <tr>\n <th>5</th>\n <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n <td>de</td>\n <td>0.066277</td>\n <td>0.020608</td>\n <td>0.913115</td>\n </tr>\n <tr>\n <th>6</th>\n <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n <td>de</td>\n <td>0.050346</td>\n <td>0.022004</td>\n <td>0.927650</td>\n </tr>\n <tr>\n <th>7</th>\n <td>Microsoft aktie fällt.</td>\n <td>de</td>\n <td>0.066061</td>\n <td>0.016440</td>\n <td>0.917498</td>\n </tr>\n <tr>\n <th>8</th>\n <td>Microsoft aktie steigt.</td>\n <td>de</td>\n <td>0.041449</td>\n <td>0.018471</td>\n <td>0.940080</td>\n </tr>\n </tbody>\n</table>\n</div>"
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>text</th>\n",
|
||||||
|
" <th>lan</th>\n",
|
||||||
|
" <th>+</th>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <th>-</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>Microsoft fails to hit profit expectations</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>0.034084</td>\n",
|
||||||
|
" <td>0.932933</td>\n",
|
||||||
|
" <td>0.032982</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.053528</td>\n",
|
||||||
|
" <td>0.027950</td>\n",
|
||||||
|
" <td>0.918522</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>Stocks rallied and the British pound gained.</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>0.898361</td>\n",
|
||||||
|
" <td>0.034474</td>\n",
|
||||||
|
" <td>0.067165</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>Meyer Burger bedient ab sofort australischen M...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.116597</td>\n",
|
||||||
|
" <td>0.012790</td>\n",
|
||||||
|
" <td>0.870613</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>Meyer Burger enters Australian market and exhi...</td>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>0.187527</td>\n",
|
||||||
|
" <td>0.008846</td>\n",
|
||||||
|
" <td>0.803627</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.066277</td>\n",
|
||||||
|
" <td>0.020608</td>\n",
|
||||||
|
" <td>0.913115</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.050346</td>\n",
|
||||||
|
" <td>0.022004</td>\n",
|
||||||
|
" <td>0.927650</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>Microsoft aktie fällt.</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.066061</td>\n",
|
||||||
|
" <td>0.016440</td>\n",
|
||||||
|
" <td>0.917498</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>Microsoft aktie steigt.</td>\n",
|
||||||
|
" <td>de</td>\n",
|
||||||
|
" <td>0.041449</td>\n",
|
||||||
|
" <td>0.018471</td>\n",
|
||||||
|
" <td>0.940080</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" text lan + 0 \n",
|
||||||
|
"0 Microsoft fails to hit profit expectations en 0.034084 0.932933 \\\n",
|
||||||
|
"1 Am Aktienmarkt überwieg weiter die Zuversicht,... de 0.053528 0.027950 \n",
|
||||||
|
"2 Stocks rallied and the British pound gained. en 0.898361 0.034474 \n",
|
||||||
|
"3 Meyer Burger bedient ab sofort australischen M... de 0.116597 0.012790 \n",
|
||||||
|
"4 Meyer Burger enters Australian market and exhi... en 0.187527 0.008846 \n",
|
||||||
|
"5 J&T Express Vietnam hilft lokalen Handwerksdör... de 0.066277 0.020608 \n",
|
||||||
|
"6 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... de 0.050346 0.022004 \n",
|
||||||
|
"7 Microsoft aktie fällt. de 0.066061 0.016440 \n",
|
||||||
|
"8 Microsoft aktie steigt. de 0.041449 0.018471 \n",
|
||||||
|
"\n",
|
||||||
|
" - \n",
|
||||||
|
"0 0.032982 \n",
|
||||||
|
"1 0.918522 \n",
|
||||||
|
"2 0.067165 \n",
|
||||||
|
"3 0.870613 \n",
|
||||||
|
"4 0.803627 \n",
|
||||||
|
"5 0.913115 \n",
|
||||||
|
"6 0.927650 \n",
|
||||||
|
"7 0.917498 \n",
|
||||||
|
"8 0.940080 "
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 29,
|
"execution_count": 29,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -304,19 +546,18 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"## Conclusion about FinBert\n",
|
"## Conclusion about FinBert\n",
|
||||||
"\n",
|
"\n",
|
||||||
"The current form of this model can't be used for the german language.\n",
|
"The current form of this model can't be used for the german language.\n",
|
||||||
"It could be used if the text is translated beforehand. But it is questionable if that will work well.\n",
|
"It could be used if the text is translated beforehand. But it is questionable if that will work well.\n",
|
||||||
"Another way would be to retrain the same model with translated text from this models' data. But I do not believe this to be feasible."
|
"Another way would be to retrain the same model with translated text from this models' data. But I do not believe this to be feasible."
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"# Translating sentiments before analysing them with FinBert\n",
|
"# Translating sentiments before analysing them with FinBert\n",
|
||||||
"\n",
|
"\n",
|
||||||
@ -326,14 +567,17 @@
|
|||||||
"[Translator: Helsinki-NLP/opus-mt-de-en](https://huggingface.co/Helsinki-NLP/opus-mt-de-en)\n",
|
"[Translator: Helsinki-NLP/opus-mt-de-en](https://huggingface.co/Helsinki-NLP/opus-mt-de-en)\n",
|
||||||
"https://huggingface.co/docs/transformers/main/en/model_doc/marian#transformers.MarianMTModel\n",
|
"https://huggingface.co/docs/transformers/main/en/model_doc/marian#transformers.MarianMTModel\n",
|
||||||
"\n"
|
"\n"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 30,
|
"execution_count": 30,
|
||||||
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:19.308043Z",
|
||||||
|
"start_time": "2023-05-01T13:16:16.135009Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"from transformers import AutoTokenizer, AutoModelForSeq2SeqLM\n",
|
"from transformers import AutoTokenizer, AutoModelForSeq2SeqLM\n",
|
||||||
@ -341,18 +585,17 @@
|
|||||||
"translation_tokenizer = AutoTokenizer.from_pretrained(\"Helsinki-NLP/opus-mt-de-en\")\n",
|
"translation_tokenizer = AutoTokenizer.from_pretrained(\"Helsinki-NLP/opus-mt-de-en\")\n",
|
||||||
"\n",
|
"\n",
|
||||||
"translation_model = AutoModelForSeq2SeqLM.from_pretrained(\"Helsinki-NLP/opus-mt-de-en\")"
|
"translation_model = AutoModelForSeq2SeqLM.from_pretrained(\"Helsinki-NLP/opus-mt-de-en\")"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"ExecuteTime": {
|
|
||||||
"start_time": "2023-05-01T13:16:16.135009Z",
|
|
||||||
"end_time": "2023-05-01T13:16:19.308043Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 31,
|
"execution_count": 31,
|
||||||
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:19.928232Z",
|
||||||
|
"start_time": "2023-05-01T13:16:19.310046Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stderr",
|
"name": "stderr",
|
||||||
@ -364,7 +607,9 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": "'J&T Express Vietnam helps local craft villages increase their reach.'"
|
"text/plain": [
|
||||||
|
"'J&T Express Vietnam helps local craft villages increase their reach.'"
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 31,
|
"execution_count": 31,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -385,18 +630,17 @@
|
|||||||
")\n",
|
")\n",
|
||||||
"tf = translate_sentiment(headline)\n",
|
"tf = translate_sentiment(headline)\n",
|
||||||
"tf"
|
"tf"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"ExecuteTime": {
|
|
||||||
"start_time": "2023-05-01T13:16:19.310046Z",
|
|
||||||
"end_time": "2023-05-01T13:16:19.928232Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 32,
|
"execution_count": 32,
|
||||||
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:23.381261Z",
|
||||||
|
"start_time": "2023-05-01T13:16:19.933234Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stdout",
|
"name": "stdout",
|
||||||
@ -412,8 +656,112 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": " lan orig \n0 en NaN \\\n1 de_translated Am Aktienmarkt überwieg weiter die Zuversicht,... \n2 en NaN \n3 de_translated Meyer Burger bedient ab sofort australischen M... \n4 en NaN \n5 de_translated J&T Express Vietnam hilft lokalen Handwerksdör... \n6 de_translated 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... \n7 de_translated Microsoft aktie fällt. \n8 de_translated Microsoft aktie steigt. \n\n text \n0 Microsoft fails to hit profit expectations \n1 On the stock market, confidence continued to p... \n2 Stocks rallied and the British pound gained. \n3 Meyer Burger is now serving the Australian mar... \n4 Meyer Burger enters Australian market and exhi... \n5 J&T Express Vietnam helps local craft villages... \n6 7 experts recommend the stock for purchase, 1 ... \n7 Microsoft Aktie falls. \n8 Microsoft share is rising. ",
|
"text/html": [
|
||||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>lan</th>\n <th>orig</th>\n <th>text</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>en</td>\n <td>NaN</td>\n <td>Microsoft fails to hit profit expectations</td>\n </tr>\n <tr>\n <th>1</th>\n <td>de_translated</td>\n <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n <td>On the stock market, confidence continued to p...</td>\n </tr>\n <tr>\n <th>2</th>\n <td>en</td>\n <td>NaN</td>\n <td>Stocks rallied and the British pound gained.</td>\n </tr>\n <tr>\n <th>3</th>\n <td>de_translated</td>\n <td>Meyer Burger bedient ab sofort australischen M...</td>\n <td>Meyer Burger is now serving the Australian mar...</td>\n </tr>\n <tr>\n <th>4</th>\n <td>en</td>\n <td>NaN</td>\n <td>Meyer Burger enters Australian market and exhi...</td>\n </tr>\n <tr>\n <th>5</th>\n <td>de_translated</td>\n <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n <td>J&T Express Vietnam helps local craft villages...</td>\n </tr>\n <tr>\n <th>6</th>\n <td>de_translated</td>\n <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n <td>7 experts recommend the stock for purchase, 1 ...</td>\n </tr>\n <tr>\n <th>7</th>\n <td>de_translated</td>\n <td>Microsoft aktie fällt.</td>\n <td>Microsoft Aktie falls.</td>\n </tr>\n <tr>\n <th>8</th>\n <td>de_translated</td>\n <td>Microsoft aktie steigt.</td>\n <td>Microsoft share is rising.</td>\n </tr>\n </tbody>\n</table>\n</div>"
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>lan</th>\n",
|
||||||
|
" <th>orig</th>\n",
|
||||||
|
" <th>text</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Microsoft fails to hit profit expectations</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n",
|
||||||
|
" <td>On the stock market, confidence continued to p...</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Stocks rallied and the British pound gained.</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Meyer Burger bedient ab sofort australischen M...</td>\n",
|
||||||
|
" <td>Meyer Burger is now serving the Australian mar...</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Meyer Burger enters Australian market and exhi...</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n",
|
||||||
|
" <td>J&T Express Vietnam helps local craft villages...</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n",
|
||||||
|
" <td>7 experts recommend the stock for purchase, 1 ...</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Microsoft aktie fällt.</td>\n",
|
||||||
|
" <td>Microsoft Aktie falls.</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Microsoft aktie steigt.</td>\n",
|
||||||
|
" <td>Microsoft share is rising.</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" lan orig \n",
|
||||||
|
"0 en NaN \\\n",
|
||||||
|
"1 de_translated Am Aktienmarkt überwieg weiter die Zuversicht,... \n",
|
||||||
|
"2 en NaN \n",
|
||||||
|
"3 de_translated Meyer Burger bedient ab sofort australischen M... \n",
|
||||||
|
"4 en NaN \n",
|
||||||
|
"5 de_translated J&T Express Vietnam hilft lokalen Handwerksdör... \n",
|
||||||
|
"6 de_translated 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... \n",
|
||||||
|
"7 de_translated Microsoft aktie fällt. \n",
|
||||||
|
"8 de_translated Microsoft aktie steigt. \n",
|
||||||
|
"\n",
|
||||||
|
" text \n",
|
||||||
|
"0 Microsoft fails to hit profit expectations \n",
|
||||||
|
"1 On the stock market, confidence continued to p... \n",
|
||||||
|
"2 Stocks rallied and the British pound gained. \n",
|
||||||
|
"3 Meyer Burger is now serving the Australian mar... \n",
|
||||||
|
"4 Meyer Burger enters Australian market and exhi... \n",
|
||||||
|
"5 J&T Express Vietnam helps local craft villages... \n",
|
||||||
|
"6 7 experts recommend the stock for purchase, 1 ... \n",
|
||||||
|
"7 Microsoft Aktie falls. \n",
|
||||||
|
"8 Microsoft share is rising. "
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 32,
|
"execution_count": 32,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -443,23 +791,167 @@
|
|||||||
"\n",
|
"\n",
|
||||||
"translated_df = translate_sentiments(text_df.copy())\n",
|
"translated_df = translate_sentiments(text_df.copy())\n",
|
||||||
"translated_df"
|
"translated_df"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"ExecuteTime": {
|
|
||||||
"start_time": "2023-05-01T13:16:19.933234Z",
|
|
||||||
"end_time": "2023-05-01T13:16:23.381261Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 33,
|
"execution_count": 33,
|
||||||
|
"metadata": {
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-05-01T13:16:24.076261Z",
|
||||||
|
"start_time": "2023-05-01T13:16:23.383269Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": " lan orig \n0 en NaN \\\n1 de_translated Am Aktienmarkt überwieg weiter die Zuversicht,... \n2 en NaN \n3 de_translated Meyer Burger bedient ab sofort australischen M... \n4 en NaN \n5 de_translated J&T Express Vietnam hilft lokalen Handwerksdör... \n6 de_translated 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... \n7 de_translated Microsoft aktie fällt. \n8 de_translated Microsoft aktie steigt. \n\n text + 0 \n0 Microsoft fails to hit profit expectations 0.034084 0.932933 \\\n1 On the stock market, confidence continued to p... 0.919673 0.018426 \n2 Stocks rallied and the British pound gained. 0.898361 0.034474 \n3 Meyer Burger is now serving the Australian mar... 0.221019 0.006844 \n4 Meyer Burger enters Australian market and exhi... 0.187527 0.008846 \n5 J&T Express Vietnam helps local craft villages... 0.891114 0.007633 \n6 7 experts recommend the stock for purchase, 1 ... 0.040850 0.016722 \n7 Microsoft Aktie falls. 0.027456 0.889160 \n8 Microsoft share is rising. 0.952216 0.019054 \n\n - \n0 0.032982 \n1 0.061901 \n2 0.067165 \n3 0.772137 \n4 0.803627 \n5 0.101254 \n6 0.942427 \n7 0.083384 \n8 0.028730 ",
|
"text/html": [
|
||||||
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>lan</th>\n <th>orig</th>\n <th>text</th>\n <th>+</th>\n <th>0</th>\n <th>-</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>en</td>\n <td>NaN</td>\n <td>Microsoft fails to hit profit expectations</td>\n <td>0.034084</td>\n <td>0.932933</td>\n <td>0.032982</td>\n </tr>\n <tr>\n <th>1</th>\n <td>de_translated</td>\n <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n <td>On the stock market, confidence continued to p...</td>\n <td>0.919673</td>\n <td>0.018426</td>\n <td>0.061901</td>\n </tr>\n <tr>\n <th>2</th>\n <td>en</td>\n <td>NaN</td>\n <td>Stocks rallied and the British pound gained.</td>\n <td>0.898361</td>\n <td>0.034474</td>\n <td>0.067165</td>\n </tr>\n <tr>\n <th>3</th>\n <td>de_translated</td>\n <td>Meyer Burger bedient ab sofort australischen M...</td>\n <td>Meyer Burger is now serving the Australian mar...</td>\n <td>0.221019</td>\n <td>0.006844</td>\n <td>0.772137</td>\n </tr>\n <tr>\n <th>4</th>\n <td>en</td>\n <td>NaN</td>\n <td>Meyer Burger enters Australian market and exhi...</td>\n <td>0.187527</td>\n <td>0.008846</td>\n <td>0.803627</td>\n </tr>\n <tr>\n <th>5</th>\n <td>de_translated</td>\n <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n <td>J&T Express Vietnam helps local craft villages...</td>\n <td>0.891114</td>\n <td>0.007633</td>\n <td>0.101254</td>\n </tr>\n <tr>\n <th>6</th>\n <td>de_translated</td>\n <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n <td>7 experts recommend the stock for purchase, 1 ...</td>\n <td>0.040850</td>\n <td>0.016722</td>\n <td>0.942427</td>\n </tr>\n <tr>\n <th>7</th>\n <td>de_translated</td>\n <td>Microsoft aktie fällt.</td>\n <td>Microsoft Aktie falls.</td>\n <td>0.027456</td>\n <td>0.889160</td>\n <td>0.083384</td>\n </tr>\n <tr>\n <th>8</th>\n <td>de_translated</td>\n <td>Microsoft aktie steigt.</td>\n <td>Microsoft share is rising.</td>\n <td>0.952216</td>\n <td>0.019054</td>\n <td>0.028730</td>\n </tr>\n </tbody>\n</table>\n</div>"
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>lan</th>\n",
|
||||||
|
" <th>orig</th>\n",
|
||||||
|
" <th>text</th>\n",
|
||||||
|
" <th>+</th>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <th>-</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Microsoft fails to hit profit expectations</td>\n",
|
||||||
|
" <td>0.034084</td>\n",
|
||||||
|
" <td>0.932933</td>\n",
|
||||||
|
" <td>0.032982</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Am Aktienmarkt überwieg weiter die Zuversicht,...</td>\n",
|
||||||
|
" <td>On the stock market, confidence continued to p...</td>\n",
|
||||||
|
" <td>0.919673</td>\n",
|
||||||
|
" <td>0.018426</td>\n",
|
||||||
|
" <td>0.061901</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Stocks rallied and the British pound gained.</td>\n",
|
||||||
|
" <td>0.898361</td>\n",
|
||||||
|
" <td>0.034474</td>\n",
|
||||||
|
" <td>0.067165</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Meyer Burger bedient ab sofort australischen M...</td>\n",
|
||||||
|
" <td>Meyer Burger is now serving the Australian mar...</td>\n",
|
||||||
|
" <td>0.221019</td>\n",
|
||||||
|
" <td>0.006844</td>\n",
|
||||||
|
" <td>0.772137</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>en</td>\n",
|
||||||
|
" <td>NaN</td>\n",
|
||||||
|
" <td>Meyer Burger enters Australian market and exhi...</td>\n",
|
||||||
|
" <td>0.187527</td>\n",
|
||||||
|
" <td>0.008846</td>\n",
|
||||||
|
" <td>0.803627</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>J&T Express Vietnam hilft lokalen Handwerksdör...</td>\n",
|
||||||
|
" <td>J&T Express Vietnam helps local craft villages...</td>\n",
|
||||||
|
" <td>0.891114</td>\n",
|
||||||
|
" <td>0.007633</td>\n",
|
||||||
|
" <td>0.101254</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>7 Experten empfehlen die Aktie zum Kauf, 1 Exp...</td>\n",
|
||||||
|
" <td>7 experts recommend the stock for purchase, 1 ...</td>\n",
|
||||||
|
" <td>0.040850</td>\n",
|
||||||
|
" <td>0.016722</td>\n",
|
||||||
|
" <td>0.942427</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Microsoft aktie fällt.</td>\n",
|
||||||
|
" <td>Microsoft Aktie falls.</td>\n",
|
||||||
|
" <td>0.027456</td>\n",
|
||||||
|
" <td>0.889160</td>\n",
|
||||||
|
" <td>0.083384</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>de_translated</td>\n",
|
||||||
|
" <td>Microsoft aktie steigt.</td>\n",
|
||||||
|
" <td>Microsoft share is rising.</td>\n",
|
||||||
|
" <td>0.952216</td>\n",
|
||||||
|
" <td>0.019054</td>\n",
|
||||||
|
" <td>0.028730</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" lan orig \n",
|
||||||
|
"0 en NaN \\\n",
|
||||||
|
"1 de_translated Am Aktienmarkt überwieg weiter die Zuversicht,... \n",
|
||||||
|
"2 en NaN \n",
|
||||||
|
"3 de_translated Meyer Burger bedient ab sofort australischen M... \n",
|
||||||
|
"4 en NaN \n",
|
||||||
|
"5 de_translated J&T Express Vietnam hilft lokalen Handwerksdör... \n",
|
||||||
|
"6 de_translated 7 Experten empfehlen die Aktie zum Kauf, 1 Exp... \n",
|
||||||
|
"7 de_translated Microsoft aktie fällt. \n",
|
||||||
|
"8 de_translated Microsoft aktie steigt. \n",
|
||||||
|
"\n",
|
||||||
|
" text + 0 \n",
|
||||||
|
"0 Microsoft fails to hit profit expectations 0.034084 0.932933 \\\n",
|
||||||
|
"1 On the stock market, confidence continued to p... 0.919673 0.018426 \n",
|
||||||
|
"2 Stocks rallied and the British pound gained. 0.898361 0.034474 \n",
|
||||||
|
"3 Meyer Burger is now serving the Australian mar... 0.221019 0.006844 \n",
|
||||||
|
"4 Meyer Burger enters Australian market and exhi... 0.187527 0.008846 \n",
|
||||||
|
"5 J&T Express Vietnam helps local craft villages... 0.891114 0.007633 \n",
|
||||||
|
"6 7 experts recommend the stock for purchase, 1 ... 0.040850 0.016722 \n",
|
||||||
|
"7 Microsoft Aktie falls. 0.027456 0.889160 \n",
|
||||||
|
"8 Microsoft share is rising. 0.952216 0.019054 \n",
|
||||||
|
"\n",
|
||||||
|
" - \n",
|
||||||
|
"0 0.032982 \n",
|
||||||
|
"1 0.061901 \n",
|
||||||
|
"2 0.067165 \n",
|
||||||
|
"3 0.772137 \n",
|
||||||
|
"4 0.803627 \n",
|
||||||
|
"5 0.101254 \n",
|
||||||
|
"6 0.942427 \n",
|
||||||
|
"7 0.083384 \n",
|
||||||
|
"8 0.028730 "
|
||||||
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 33,
|
"execution_count": 33,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
@ -469,30 +961,22 @@
|
|||||||
"source": [
|
"source": [
|
||||||
"sentiments = analyse_sentiments(translated_df)\n",
|
"sentiments = analyse_sentiments(translated_df)\n",
|
||||||
"sentiments"
|
"sentiments"
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false,
|
|
||||||
"ExecuteTime": {
|
|
||||||
"start_time": "2023-05-01T13:16:23.383269Z",
|
|
||||||
"end_time": "2023-05-01T13:16:24.076261Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
"source": [
|
"source": [
|
||||||
"## Conclusion about a translated FinBert\n",
|
"## Conclusion about a translated FinBert\n",
|
||||||
"\n",
|
"\n",
|
||||||
"When translating a german text to english before using FinBert the results look much better and could be used for our project.\n",
|
"When translating a german text to english before using FinBert the results look much better and could be used for our project.\n",
|
||||||
"The big problem is that it will take even more CPU.\n",
|
"The big problem is that it will take even more CPU.\n",
|
||||||
"It should probably be combined with a language recognition and could be used to take multiple languages in since there are many variances of this translation model."
|
"It should probably be combined with a language recognition and could be used to take multiple languages in since there are many variances of this translation model."
|
||||||
],
|
]
|
||||||
"metadata": {
|
|
||||||
"collapsed": false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"metadata": {
|
"metadata": {
|
||||||
|
"celltoolbar": "Slideshow",
|
||||||
"kernelspec": {
|
"kernelspec": {
|
||||||
"display_name": "Python 3 (ipykernel)",
|
"display_name": "Python 3 (ipykernel)",
|
||||||
"language": "python",
|
"language": "python",
|
||||||
|
@ -8,14 +8,6 @@
|
|||||||
"# Daten Extraktion aus dem Bundesanzeiger"
|
"# Daten Extraktion aus dem Bundesanzeiger"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"attachments": {},
|
|
||||||
"cell_type": "markdown",
|
|
||||||
"metadata": {},
|
|
||||||
"source": [
|
|
||||||
"In order to run this notebooks, download the `deutschland` library source code from: [TrisNol/deutschland](https://github.com/TrisNol/deutschland/tree/feat/bundesanzeiger-raw-report) and place it in the `Jupyter/API-tests/Bundesanzeiger/deutschland` directory. Since the PR adding the required features to the main repo has not been completet as of of yet (see: [PR](https://github.com/bundesAPI/deutschland/pull/88)) we have to include it in another way..."
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"attachments": {},
|
"attachments": {},
|
||||||
"cell_type": "markdown",
|
"cell_type": "markdown",
|
||||||
@ -26,18 +18,9 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 5,
|
"execution_count": 32,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [],
|
||||||
{
|
|
||||||
"name": "stderr",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"C:\\Users\\trist\\AppData\\Roaming\\Python\\Python310\\site-packages\\requests\\__init__.py:102: RequestsDependencyWarning: urllib3 (1.26.12) or chardet (5.1.0)/charset_normalizer (2.0.12) doesn't match a supported version!\n",
|
|
||||||
" warnings.warn(\"urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported \"\n"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
"source": [
|
||||||
"import pandas as pd\n",
|
"import pandas as pd\n",
|
||||||
"from deutschland.bundesanzeiger import Bundesanzeiger"
|
"from deutschland.bundesanzeiger import Bundesanzeiger"
|
||||||
@ -45,26 +28,28 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 6,
|
"execution_count": 33,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stdout",
|
"name": "stdout",
|
||||||
"output_type": "stream",
|
"output_type": "stream",
|
||||||
"text": [
|
"text": [
|
||||||
"dict_keys(['040860c00ef9020cfb6db2f58a163256', '9eb401c5af2f0289b8207233bf852b81', 'b14d979ea9f42367d413589050bd04e5', 'ecb8c1011456ea0d40f87e850fc216bf', '3a7c6c1f1d1b89bf5ceb165f0ee88053', '03b2e6aac2f2da2c0c5e8f23de9caec4', 'a5f8dc87fa797e7d2f8fb88c49a23c36', '9a3a8a3e84290ee650cbccf32323b3d7', '6c0fcc20a58aaa18a9d13f35a51e3996', 'bf276d441c339e787e22385d2b69b277', '90a79d28f3c11a2122d2827d2bf6acda', '88c785ce3b3c580dcc285661c7790cca', 'd3064baa8246c3ed02e30b5038200edc', '5bf92eed2808b484c005409764b825b7', 'fece6303c991a280850be1900ff78f8f', '26b0624c60cdbf647f3d45f4917ec6ea', '9f98bee55f598908cca60b6a47e5d49d', '99267bb7474e6d1d5d9e091ba5ef3ee8', '102738ef4b91408ed043d84fe785b50b', '94711f3e509518d073e1760d97550347'])\n"
|
"dict_keys(['c1051233030a8e0232523052fd4a2310', '57d129e6fd7505d567fa13919e5e6bdd'])\n"
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"ba = Bundesanzeiger()\n",
|
"ba = Bundesanzeiger()\n",
|
||||||
"reports = ba.get_reports(\"Atos IT-Dienstleistung und Beratung GmbH\")\n",
|
"reports = ba.get_reports(\n",
|
||||||
|
" \"Volkswagen Economy Service Erdle Bernhard Erdle GmbH\"\n",
|
||||||
|
") # \"Atos IT-Dienstleistung und Beratung GmbH\")\n",
|
||||||
"print(reports.keys())"
|
"print(reports.keys())"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 7,
|
"execution_count": 34,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -75,7 +60,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 8,
|
"execution_count": 35,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
@ -109,42 +94,18 @@
|
|||||||
" <tbody>\n",
|
" <tbody>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>0</th>\n",
|
" <th>0</th>\n",
|
||||||
" <td>2023-03-17</td>\n",
|
" <td>2023-05-25</td>\n",
|
||||||
" <td>Aufsichtsrat</td>\n",
|
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstl...</td>\n",
|
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>1</th>\n",
|
" <th>1</th>\n",
|
||||||
" <td>2022-03-25</td>\n",
|
" <td>2023-05-24</td>\n",
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>2</th>\n",
|
|
||||||
" <td>2021-03-11</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>3</th>\n",
|
|
||||||
" <td>2020-03-24</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtosIT-Dienstleistung...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>4</th>\n",
|
|
||||||
" <td>2018-12-11</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
" </tbody>\n",
|
" </tbody>\n",
|
||||||
@ -153,35 +114,23 @@
|
|||||||
],
|
],
|
||||||
"text/plain": [
|
"text/plain": [
|
||||||
" date name \\\n",
|
" date name \\\n",
|
||||||
"0 2023-03-17 Aufsichtsrat \n",
|
"0 2023-05-25 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
||||||
"1 2022-03-25 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
"1 2023-05-24 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
||||||
"2 2021-03-11 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"3 2020-03-24 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"4 2018-12-11 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" company \\\n",
|
" company \\\n",
|
||||||
"0 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"0 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"1 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"1 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"2 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"3 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"4 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" report \\\n",
|
" report \\\n",
|
||||||
"0 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstl... \n",
|
"0 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se... \n",
|
||||||
"1 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
"1 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se... \n",
|
||||||
"2 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
|
||||||
"3 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtosIT-Dienstleistung... \n",
|
|
||||||
"4 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" raw_report \n",
|
" raw_report \n",
|
||||||
"0 <div class=\"publication_container\">\\n <div cla... \n",
|
"0 <div class=\"publication_container\">\\n <div cla... \n",
|
||||||
"1 <div class=\"publication_container\">\\n <div cla... \n",
|
"1 <div class=\"publication_container\">\\n <div cla... "
|
||||||
"2 <div class=\"publication_container\">\\n <div cla... \n",
|
|
||||||
"3 <div class=\"publication_container\">\\n <div cla... \n",
|
|
||||||
"4 <div class=\"publication_container\">\\n <div cla... "
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 8,
|
"execution_count": 35,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
@ -193,7 +142,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 9,
|
"execution_count": 36,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
@ -228,46 +177,19 @@
|
|||||||
" <tbody>\n",
|
" <tbody>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>0</th>\n",
|
" <th>0</th>\n",
|
||||||
" <td>2023-03-17</td>\n",
|
" <td>2023-05-25</td>\n",
|
||||||
" <td>Aufsichtsrat</td>\n",
|
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstl...</td>\n",
|
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" <td>Aufsichtsrat</td>\n",
|
" <td>Jahresabschluss</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>1</th>\n",
|
" <th>1</th>\n",
|
||||||
" <td>2022-03-25</td>\n",
|
" <td>2023-05-24</td>\n",
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>Jahresabschluss</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>2</th>\n",
|
|
||||||
" <td>2021-03-11</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>Jahresabschluss</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>3</th>\n",
|
|
||||||
" <td>2020-03-24</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtosIT-Dienstleistung...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>Jahresabschluss</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>4</th>\n",
|
|
||||||
" <td>2018-12-11</td>\n",
|
|
||||||
" <td>Jahresabschluss zum Geschäftsjahr vom 01.01.20...</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td>\\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun...</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" <td>Jahresabschluss</td>\n",
|
" <td>Jahresabschluss</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
@ -277,35 +199,23 @@
|
|||||||
],
|
],
|
||||||
"text/plain": [
|
"text/plain": [
|
||||||
" date name \\\n",
|
" date name \\\n",
|
||||||
"0 2023-03-17 Aufsichtsrat \n",
|
"0 2023-05-25 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
||||||
"1 2022-03-25 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
"1 2023-05-24 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
||||||
"2 2021-03-11 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"3 2020-03-24 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"4 2018-12-11 Jahresabschluss zum Geschäftsjahr vom 01.01.20... \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" company \\\n",
|
" company \\\n",
|
||||||
"0 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"0 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"1 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"1 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"2 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"3 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"4 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" report \\\n",
|
" report \\\n",
|
||||||
"0 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstl... \n",
|
"0 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se... \n",
|
||||||
"1 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
"1 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nVolkswagen Economy Se... \n",
|
||||||
"2 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
|
||||||
"3 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtosIT-Dienstleistung... \n",
|
|
||||||
"4 \\n\\n\\n\\n \\n\\n\\n\\n\\n\\n\\n\\nAtos IT-Dienstleistun... \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" raw_report type \n",
|
" raw_report type \n",
|
||||||
"0 <div class=\"publication_container\">\\n <div cla... Aufsichtsrat \n",
|
"0 <div class=\"publication_container\">\\n <div cla... Jahresabschluss \n",
|
||||||
"1 <div class=\"publication_container\">\\n <div cla... Jahresabschluss \n",
|
"1 <div class=\"publication_container\">\\n <div cla... Jahresabschluss "
|
||||||
"2 <div class=\"publication_container\">\\n <div cla... Jahresabschluss \n",
|
|
||||||
"3 <div class=\"publication_container\">\\n <div cla... Jahresabschluss \n",
|
|
||||||
"4 <div class=\"publication_container\">\\n <div cla... Jahresabschluss "
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 9,
|
"execution_count": 36,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
@ -317,21 +227,9 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 10,
|
"execution_count": 37,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
|
||||||
"name": "stderr",
|
|
||||||
"output_type": "stream",
|
|
||||||
"text": [
|
|
||||||
"C:\\Users\\trist\\AppData\\Local\\Temp\\ipykernel_6460\\963182859.py:2: SettingWithCopyWarning: \n",
|
|
||||||
"A value is trying to be set on a copy of a slice from a DataFrame.\n",
|
|
||||||
"Try using .loc[row_indexer,col_indexer] = value instead\n",
|
|
||||||
"\n",
|
|
||||||
"See the caveats in the documentation: https://pandas.pydata.org/pandas-docs/stable/user_guide/indexing.html#returning-a-view-versus-a-copy\n",
|
|
||||||
" df_jahresabschluss['jahr'] = df_jahresabschluss.name.apply(\n"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/html": [
|
"text/html": [
|
||||||
@ -361,61 +259,34 @@
|
|||||||
" </thead>\n",
|
" </thead>\n",
|
||||||
" <tbody>\n",
|
" <tbody>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>1</th>\n",
|
" <th>0</th>\n",
|
||||||
" <td>2022-03-25</td>\n",
|
" <td>2023-05-25</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" <td>2020</td>\n",
|
" <td>2020</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
" <tr>\n",
|
" <tr>\n",
|
||||||
" <th>2</th>\n",
|
" <th>1</th>\n",
|
||||||
" <td>2021-03-11</td>\n",
|
" <td>2023-05-24</td>\n",
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
" <td>Volkswagen Economy Service Erdle Bernhard Erdl...</td>\n",
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
||||||
" <td>2019</td>\n",
|
" <td>2019</td>\n",
|
||||||
" </tr>\n",
|
" </tr>\n",
|
||||||
" <tr>\n",
|
|
||||||
" <th>3</th>\n",
|
|
||||||
" <td>2020-03-24</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>2018</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>4</th>\n",
|
|
||||||
" <td>2018-12-11</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>2017</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>6</th>\n",
|
|
||||||
" <td>2018-01-03</td>\n",
|
|
||||||
" <td>Atos IT-Dienstleistung und Beratung GmbH</td>\n",
|
|
||||||
" <td><div class=\"publication_container\">\\n <div cla...</td>\n",
|
|
||||||
" <td>2016</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" </tbody>\n",
|
" </tbody>\n",
|
||||||
"</table>\n",
|
"</table>\n",
|
||||||
"</div>"
|
"</div>"
|
||||||
],
|
],
|
||||||
"text/plain": [
|
"text/plain": [
|
||||||
" date company \\\n",
|
" date company \\\n",
|
||||||
"1 2022-03-25 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"0 2023-05-25 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"2 2021-03-11 Atos IT-Dienstleistung und Beratung GmbH \n",
|
"1 2023-05-24 Volkswagen Economy Service Erdle Bernhard Erdl... \n",
|
||||||
"3 2020-03-24 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"4 2018-12-11 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"6 2018-01-03 Atos IT-Dienstleistung und Beratung GmbH \n",
|
|
||||||
"\n",
|
"\n",
|
||||||
" raw_report jahr \n",
|
" raw_report jahr \n",
|
||||||
"1 <div class=\"publication_container\">\\n <div cla... 2020 \n",
|
"0 <div class=\"publication_container\">\\n <div cla... 2020 \n",
|
||||||
"2 <div class=\"publication_container\">\\n <div cla... 2019 \n",
|
"1 <div class=\"publication_container\">\\n <div cla... 2019 "
|
||||||
"3 <div class=\"publication_container\">\\n <div cla... 2018 \n",
|
|
||||||
"4 <div class=\"publication_container\">\\n <div cla... 2017 \n",
|
|
||||||
"6 <div class=\"publication_container\">\\n <div cla... 2016 "
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 10,
|
"execution_count": 37,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
@ -439,7 +310,7 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 11,
|
"execution_count": 38,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
@ -449,11 +320,12 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 12,
|
"execution_count": 39,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"sample_report = df_jahresabschluss.iloc[0].raw_report"
|
"sample_report = df_jahresabschluss.iloc[0].raw_report\n",
|
||||||
|
"sample_report_content = df_jahresabschluss.iloc[0].raw_report"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -466,45 +338,20 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 14,
|
"execution_count": 40,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [],
|
"outputs": [],
|
||||||
"source": [
|
"source": [
|
||||||
"import re\n",
|
"import re\n",
|
||||||
|
"from dataclasses import dataclass\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"@dataclass\n",
|
||||||
|
"class Auditor:\n",
|
||||||
|
" name: str\n",
|
||||||
|
" company: str\n",
|
||||||
"\n",
|
"\n",
|
||||||
"\n",
|
"\n",
|
||||||
"def extract_auditors(report: str) -> list:\n",
|
|
||||||
" auditor_regex = r\"[a-z A-Z,.'-]+, Wirtschaftsprüfer\"\n",
|
|
||||||
" hits = re.findall(auditor_regex, report)\n",
|
|
||||||
" return [hit.replace(\", Wirtschaftsprüfer\", \"\").lstrip() for hit in hits]"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 15,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [
|
|
||||||
{
|
|
||||||
"data": {
|
|
||||||
"text/plain": [
|
|
||||||
"['Eckhard Lewe', 'Renate Hermsdorf']"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"execution_count": 15,
|
|
||||||
"metadata": {},
|
|
||||||
"output_type": "execute_result"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"source": [
|
|
||||||
"extract_auditors(sample_report)"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"cell_type": "code",
|
|
||||||
"execution_count": 16,
|
|
||||||
"metadata": {},
|
|
||||||
"outputs": [],
|
|
||||||
"source": [
|
|
||||||
"def extract_auditor_company(report: str) -> str:\n",
|
"def extract_auditor_company(report: str) -> str:\n",
|
||||||
" soup = BeautifulSoup(report, features=\"html.parser\")\n",
|
" soup = BeautifulSoup(report, features=\"html.parser\")\n",
|
||||||
" temp = soup.find_all(\"b\")\n",
|
" temp = soup.find_all(\"b\")\n",
|
||||||
@ -512,27 +359,37 @@
|
|||||||
" br = elem.findChildren(\"br\")\n",
|
" br = elem.findChildren(\"br\")\n",
|
||||||
" if len(br) > 0:\n",
|
" if len(br) > 0:\n",
|
||||||
" return elem.text.split(\"\\n\")[1].strip()\n",
|
" return elem.text.split(\"\\n\")[1].strip()\n",
|
||||||
" return None"
|
" return None\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"def extract_auditors(report: str) -> list:\n",
|
||||||
|
" auditor_company = extract_auditor_company(report)\n",
|
||||||
|
" auditor_regex = r\"[a-z A-Z,.'-]+, Wirtschaftsprüfer\"\n",
|
||||||
|
" hits = re.findall(auditor_regex, report)\n",
|
||||||
|
" return [\n",
|
||||||
|
" Auditor(hit.replace(\", Wirtschaftsprüfer\", \"\").lstrip(), auditor_company)\n",
|
||||||
|
" for hit in hits\n",
|
||||||
|
" ]"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 17,
|
"execution_count": 41,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/plain": [
|
"text/plain": [
|
||||||
"'Warth & Klein Grant Thornton AG'"
|
"[]"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 17,
|
"execution_count": 41,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"extract_auditor_company(sample_report)"
|
"extract_auditors(sample_report)"
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -561,97 +418,177 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 18,
|
"execution_count": 42,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"data": {
|
"data": {
|
||||||
"text/html": [
|
|
||||||
"<div>\n",
|
|
||||||
"<style scoped>\n",
|
|
||||||
" .dataframe tbody tr th:only-of-type {\n",
|
|
||||||
" vertical-align: middle;\n",
|
|
||||||
" }\n",
|
|
||||||
"\n",
|
|
||||||
" .dataframe tbody tr th {\n",
|
|
||||||
" vertical-align: top;\n",
|
|
||||||
" }\n",
|
|
||||||
"\n",
|
|
||||||
" .dataframe thead th {\n",
|
|
||||||
" text-align: right;\n",
|
|
||||||
" }\n",
|
|
||||||
"</style>\n",
|
|
||||||
"<table border=\"1\" class=\"dataframe\">\n",
|
|
||||||
" <thead>\n",
|
|
||||||
" <tr style=\"text-align: right;\">\n",
|
|
||||||
" <th></th>\n",
|
|
||||||
" <th>Unnamed: 0</th>\n",
|
|
||||||
" <th>Anhang</th>\n",
|
|
||||||
" <th>2020 TEUR</th>\n",
|
|
||||||
" <th>Vorjahr TEUR</th>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" </thead>\n",
|
|
||||||
" <tbody>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>0</th>\n",
|
|
||||||
" <td>1. Umsatzerlöse</td>\n",
|
|
||||||
" <td>(1)</td>\n",
|
|
||||||
" <td>69.819</td>\n",
|
|
||||||
" <td>77.429</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>1</th>\n",
|
|
||||||
" <td>2. Veränderung des Bestandes an unfertigen Lei...</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>-41.000</td>\n",
|
|
||||||
" <td>-66.000</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>2</th>\n",
|
|
||||||
" <td>3. Sonstige betriebliche Erträge</td>\n",
|
|
||||||
" <td>(2)</td>\n",
|
|
||||||
" <td>489.000</td>\n",
|
|
||||||
" <td>1.816</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>3</th>\n",
|
|
||||||
" <td>4. Materialaufwand</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" <tr>\n",
|
|
||||||
" <th>4</th>\n",
|
|
||||||
" <td>a) Aufwendungen für bezogene Waren</td>\n",
|
|
||||||
" <td>NaN</td>\n",
|
|
||||||
" <td>-1.220</td>\n",
|
|
||||||
" <td>-3.003</td>\n",
|
|
||||||
" </tr>\n",
|
|
||||||
" </tbody>\n",
|
|
||||||
"</table>\n",
|
|
||||||
"</div>"
|
|
||||||
],
|
|
||||||
"text/plain": [
|
"text/plain": [
|
||||||
" Unnamed: 0 Anhang 2020 TEUR \\\n",
|
"{'net_income': 23484.67, 'equity': 65083.84, 'current_assets': 357613.61}"
|
||||||
"0 1. Umsatzerlöse (1) 69.819 \n",
|
|
||||||
"1 2. Veränderung des Bestandes an unfertigen Lei... NaN -41.000 \n",
|
|
||||||
"2 3. Sonstige betriebliche Erträge (2) 489.000 \n",
|
|
||||||
"3 4. Materialaufwand NaN NaN \n",
|
|
||||||
"4 a) Aufwendungen für bezogene Waren NaN -1.220 \n",
|
|
||||||
"\n",
|
|
||||||
" Vorjahr TEUR \n",
|
|
||||||
"0 77.429 \n",
|
|
||||||
"1 -66.000 \n",
|
|
||||||
"2 1.816 \n",
|
|
||||||
"3 NaN \n",
|
|
||||||
"4 -3.003 "
|
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
"execution_count": 18,
|
"execution_count": 42,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"output_type": "execute_result"
|
"output_type": "execute_result"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
"source": [
|
||||||
|
"def extract_kpis(report_content) -> dict:\n",
|
||||||
|
" \"\"\"\n",
|
||||||
|
" Source: https://github.com/bundesAPI/deutschland/pull/87/files#diff-f5b9db5384cf523fcc677056065041e7793bfc4da9cf74c4eebd6fab732739bd\n",
|
||||||
|
" Extracts Key Performance Indicators (KPIs) from the financial reports.\n",
|
||||||
|
" Args:\n",
|
||||||
|
" reports (dict): A dictionary containing the financial reports with their hash as keys and report details as values.\n",
|
||||||
|
" Returns:\n",
|
||||||
|
" dict: A dictionary containing the extracted KPIs with their report hash as keys and KPIs as values.\n",
|
||||||
|
" \"\"\"\n",
|
||||||
|
"\n",
|
||||||
|
" kpis = {}\n",
|
||||||
|
"\n",
|
||||||
|
" # Define KPI patterns to search for\n",
|
||||||
|
" kpi_patterns = {\n",
|
||||||
|
" \"revenue\": r\"(?:revenue|umsatz|erlöse)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"net_income\": r\"(?:net income|jahresüberschuss|nettoeinkommen|Ergebnis nach Steuern)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"ebit\": r\"(?:ebit|operating income)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"ebitda\": r\"(?:ebitda)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"gross_profit\": r\"(?:gross profit|bruttogewinn)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"operating_profit\": r\"(?:operating profit|betriebsgewinn)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"assets\": r\"(?:total assets|bilanzsumme)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"liabilities\": r\"(?:total liabilities|gesamtverbindlichkeiten)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"equity\": r\"(?:shareholders'? equity|eigenkapital)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"current_assets\": r\"(?:current assets|umlaufvermögen)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"current_liabilities\": r\"(?:current liabilities|kurzfristige verbindlichkeiten)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"long_term_debt\": r\"(?:long[-\\s]?term debt|langfristige verbindlichkeiten)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"short_term_debt\": r\"(?:short[-\\s]?term debt|kurzfristige verbindlichkeiten)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"cash_and_cash_equivalents\": r\"(?:cash (?:and cash equivalents)?|barmittel)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"dividends\": r\"(?:dividends?|dividende)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" \"cash_flow\": r\"(?:cash flow|cashflow|cash flow from operating activities)[:\\s]*([\\d,.]+[mmb]?)\",\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" report_kpis = {}\n",
|
||||||
|
" for kpi, pattern in kpi_patterns.items():\n",
|
||||||
|
" match = re.search(pattern, report_content, flags=re.IGNORECASE | re.UNICODE)\n",
|
||||||
|
" if match:\n",
|
||||||
|
" value = match.group(1)\n",
|
||||||
|
"\n",
|
||||||
|
" # Clean and validate the extracted number\n",
|
||||||
|
" try:\n",
|
||||||
|
" if not value: # Check if value is empty\n",
|
||||||
|
" cleaned_value = None\n",
|
||||||
|
" else:\n",
|
||||||
|
" multiplier = 1\n",
|
||||||
|
" if value[-1].lower() == \"m\":\n",
|
||||||
|
" value = value[:-1]\n",
|
||||||
|
" multiplier = 1_000_000\n",
|
||||||
|
" elif value[-1].lower() == \"b\":\n",
|
||||||
|
" value = value[:-1]\n",
|
||||||
|
" multiplier = 1_000_000_000\n",
|
||||||
|
"\n",
|
||||||
|
" # Remove commas after checking for multipliers\n",
|
||||||
|
" value = value.replace(\".\", \"\").replace(\",\", \".\").strip()\n",
|
||||||
|
" cleaned_value = float(value) * multiplier\n",
|
||||||
|
" except ValueError:\n",
|
||||||
|
" cleaned_value = None\n",
|
||||||
|
"\n",
|
||||||
|
" if cleaned_value is not None:\n",
|
||||||
|
" report_kpis[kpi] = cleaned_value\n",
|
||||||
|
" return report_kpis\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"extract_kpis(\n",
|
||||||
|
" BeautifulSoup(sample_report, features=\"html.parser\").get_text().replace(\"\\n\", \" \")\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 43,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import os\n",
|
||||||
|
"\n",
|
||||||
|
"with open(\"./temp.txt\", \"w\") as file:\n",
|
||||||
|
" file.write(\n",
|
||||||
|
" BeautifulSoup(sample_report, features=\"html.parser\")\n",
|
||||||
|
" .get_text()\n",
|
||||||
|
" .replace(\"\\n\", \" \")\n",
|
||||||
|
" )"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 46,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"MultiIndex([('Aktiva', 'Unnamed: 0_level_1'),\n",
|
||||||
|
" ('Aktiva', '31.12.2020 EUR'),\n",
|
||||||
|
" ('Aktiva', '31.12.2019 EUR')],\n",
|
||||||
|
" )\n",
|
||||||
|
"Aktiva Unnamed: 0_level_1 object\n",
|
||||||
|
" 31.12.2020 EUR object\n",
|
||||||
|
" 31.12.2019 EUR object\n",
|
||||||
|
"dtype: object\n",
|
||||||
|
"MultiIndex([('Passiva', 'Unnamed: 0_level_1'),\n",
|
||||||
|
" ('Passiva', '31.12.2020 EUR'),\n",
|
||||||
|
" ('Passiva', '31.12.2019 EUR')],\n",
|
||||||
|
" )\n",
|
||||||
|
"Passiva Unnamed: 0_level_1 object\n",
|
||||||
|
" 31.12.2020 EUR object\n",
|
||||||
|
" 31.12.2019 EUR object\n",
|
||||||
|
"dtype: object\n",
|
||||||
|
"Index(['Angaben zur Identifikation der Gesellschaft laut Registergericht', 'Angaben zur Identifikation der Gesellschaft laut Registergericht.1'], dtype='object')\n",
|
||||||
|
"Angaben zur Identifikation der Gesellschaft laut Registergericht object\n",
|
||||||
|
"Angaben zur Identifikation der Gesellschaft laut Registergericht.1 object\n",
|
||||||
|
"dtype: object\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"{}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 46,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"def parse_tables(report: str) -> list:\n",
|
||||||
|
" result = {}\n",
|
||||||
|
" soup = BeautifulSoup(report, features=\"html.parser\")\n",
|
||||||
|
" for table in soup.find_all(\"table\", {\"class\": \"std_table\"}):\n",
|
||||||
|
" df = pd.read_html(StringIO(str(table)))[0]\n",
|
||||||
|
" print(df.columns)\n",
|
||||||
|
" print(df.dtypes)\n",
|
||||||
|
" return result\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"parse_tables(sample_report)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 45,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"ename": "KeyError",
|
||||||
|
"evalue": "'Passiva'",
|
||||||
|
"output_type": "error",
|
||||||
|
"traceback": [
|
||||||
|
"\u001b[1;31m---------------------------------------------------------------------------\u001b[0m",
|
||||||
|
"\u001b[1;31mKeyError\u001b[0m Traceback (most recent call last)",
|
||||||
|
"\u001b[1;32mc:\\Users\\trist\\Documents\\Code\\M.Sc\\aki_prj23_transparenzregister\\Jupyter\\API-tests\\Bundesanzeiger\\notebook.ipynb Cell 21\u001b[0m in \u001b[0;36m1\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/trist/Documents/Code/M.Sc/aki_prj23_transparenzregister/Jupyter/API-tests/Bundesanzeiger/notebook.ipynb#X26sZmlsZQ%3D%3D?line=10'>11</a>\u001b[0m \u001b[39mreturn\u001b[39;00m result\n\u001b[0;32m <a href='vscode-notebook-cell:/c%3A/Users/trist/Documents/Code/M.Sc/aki_prj23_transparenzregister/Jupyter/API-tests/Bundesanzeiger/notebook.ipynb#X26sZmlsZQ%3D%3D?line=13'>14</a>\u001b[0m bilanz \u001b[39m=\u001b[39m get_bilanz(sample_report)\n\u001b[1;32m---> <a href='vscode-notebook-cell:/c%3A/Users/trist/Documents/Code/M.Sc/aki_prj23_transparenzregister/Jupyter/API-tests/Bundesanzeiger/notebook.ipynb#X26sZmlsZQ%3D%3D?line=14'>15</a>\u001b[0m bilanz[\u001b[39m\"\u001b[39;49m\u001b[39mPassiva\u001b[39;49m\u001b[39m\"\u001b[39;49m]\u001b[39m.\u001b[39mhead()\n",
|
||||||
|
"\u001b[1;31mKeyError\u001b[0m: 'Passiva'"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
"source": [
|
"source": [
|
||||||
"def get_bilanz(report: str) -> any:\n",
|
"def get_bilanz(report: str) -> any:\n",
|
||||||
" result = {}\n",
|
" result = {}\n",
|
||||||
@ -672,30 +609,30 @@
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
"cell_type": "code",
|
"cell_type": "code",
|
||||||
"execution_count": 19,
|
"execution_count": null,
|
||||||
"metadata": {},
|
"metadata": {},
|
||||||
"outputs": [
|
"outputs": [
|
||||||
{
|
{
|
||||||
"name": "stdout",
|
"name": "stdout",
|
||||||
"output_type": "stream",
|
"output_type": "stream",
|
||||||
"text": [
|
"text": [
|
||||||
"Index(['Gesellschafterbeschluss', 'Shareholder Resolution'], dtype='object')\n",
|
"Int64Index([0, 1], dtype='int64')\n",
|
||||||
"Index(['Unnamed: 0', 'Anhang', '31.12.2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'Anhang', '31.12.2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', 'Anhang', '2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'Anhang', '2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Aufgliederung nach Tätigkeitsbereichen', '2020 TEUR',\n",
|
"Index(['Aufgliederung nach Tätigkeitsbereichen', '2021 TEUR',\n",
|
||||||
" 'Vorjahr TEUR'],\n",
|
" 'Vorjahr TEUR'],\n",
|
||||||
" dtype='object')\n",
|
" dtype='object')\n",
|
||||||
"Index(['Aufgliederung nach Inland und Ausland', '2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Aufgliederung nach Inland und Ausland', '2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', '31.12.2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', '31.12.2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', '31.12.2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', '31.12.2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', '31.12.2020'], dtype='object')\n",
|
"Index(['Unnamed: 0', '31.12.2021'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
||||||
"Int64Index([0, 1, 2], dtype='int64')\n",
|
"Int64Index([0, 1, 2], dtype='int64')\n",
|
||||||
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', 'TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', '31.12.2020 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
"Index(['Unnamed: 0', '31.12.2021 TEUR', 'Vorjahr TEUR'], dtype='object')\n",
|
||||||
"Index(['Unnamed: 0', '2020 Anzahl MA', 'Vorjahr Anzahl MA'], dtype='object')\n",
|
"Index(['Unnamed: 0', '2021 Anzahl MA', 'Vorjahr Anzahl MA'], dtype='object')\n",
|
||||||
"MultiIndex([('Art des Geschäfts', 'Unnamed: 0_level_1'),\n",
|
"MultiIndex([('Art des Geschäfts', 'Unnamed: 0_level_1'),\n",
|
||||||
" ('Art der Beziehung', 'Gesellschafterin TEUR'),\n",
|
" ('Art der Beziehung', 'Gesellschafterin TEUR'),\n",
|
||||||
" ('Art der Beziehung', 'Verbundene Unternehmen TEUR')],\n",
|
" ('Art der Beziehung', 'Verbundene Unternehmen TEUR')],\n",
|
||||||
@ -707,24 +644,23 @@
|
|||||||
" ('Anschaffungs- oder Herstellungskosten', ...),\n",
|
" ('Anschaffungs- oder Herstellungskosten', ...),\n",
|
||||||
" ('Anschaffungs- oder Herstellungskosten', ...)],\n",
|
" ('Anschaffungs- oder Herstellungskosten', ...)],\n",
|
||||||
" )\n",
|
" )\n",
|
||||||
"MultiIndex([('Unnamed: 0_level_0', 'Unnamed: 0_level_1'),\n",
|
"MultiIndex([('Unnamed: 0_level_0', ...),\n",
|
||||||
" ( 'Abschreibungen', 'Stand 01.01.2020 EUR'),\n",
|
" ( 'Abschreibungen', ...),\n",
|
||||||
" ( 'Abschreibungen', 'Abschreibungen des Geschäftsjahres EUR'),\n",
|
" ( 'Abschreibungen', ...),\n",
|
||||||
" ( 'Abschreibungen', 'Abgänge Umbuchung EUR'),\n",
|
" ( 'Abschreibungen', ...),\n",
|
||||||
" ( 'Abschreibungen', 'Stand 31.12.2020 EUR')],\n",
|
" ( 'Abschreibungen', ...)],\n",
|
||||||
" )\n",
|
" )\n",
|
||||||
"MultiIndex([('Unnamed: 0_level_0', 'Unnamed: 0_level_1'),\n",
|
"MultiIndex([('Unnamed: 0_level_0', 'Unnamed: 0_level_1'),\n",
|
||||||
" ( 'Buchwerte', 'Stand 31.12.2020 EUR'),\n",
|
" ( 'Buchwerte', 'Stand 31.12.2021 EUR'),\n",
|
||||||
" ( 'Buchwerte', 'Stand 31.12.2019 EUR')],\n",
|
" ( 'Buchwerte', 'Stand 31.12.2020 EUR')],\n",
|
||||||
" )\n",
|
" )\n",
|
||||||
"Index(['Nichtfinanzieller Leistungsindikator', 'Unnamed: 1', '2020', '2019',\n",
|
"Index(['Nichtfinanzieller Leistungsindikator', 'Unnamed: 1', '2021', '2020',\n",
|
||||||
" '2018'],\n",
|
" '2019'],\n",
|
||||||
" dtype='object')\n",
|
" dtype='object')\n",
|
||||||
"Index(['Gewinn- und Verlustrechnung', '2020 TEUR', 'Vorjahr TEUR',\n",
|
"Index(['Gewinn- und Verlustrechnung', '2021 TEUR', 'Vorjahr TEUR',\n",
|
||||||
" 'Veränderung TEUR'],\n",
|
" 'Veränderung TEUR'],\n",
|
||||||
" dtype='object')\n",
|
" dtype='object')\n",
|
||||||
"Index(['Bilanz', '31.12.2020 TEUR', 'Vorjahr TEUR', 'Veränderung TEUR'], dtype='object')\n",
|
"Index(['Bilanz', '31.12.2021 TEUR', 'Vorjahr TEUR', 'Veränderung TEUR'], dtype='object')\n"
|
||||||
"Index(['Bericht des Aufsichtsrats', 'Report of the Supervisory Board'], dtype='object')\n"
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
|
1
Jupyter/API-tests/News/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
data/
|
879
Jupyter/API-tests/News/notebook.ipynb
Normal file
1
Jupyter/API-tests/News/requirements.txt
Normal file
@ -0,0 +1 @@
|
|||||||
|
pymongo
|
1
Jupyter/API-tests/Unternehmensregister/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
data/*
|
192
Jupyter/API-tests/Unternehmensregister/main.py
Normal file
@ -0,0 +1,192 @@
|
|||||||
|
"""Unternehmensregister Scraping."""
|
||||||
|
import glob
|
||||||
|
import logging
|
||||||
|
import multiprocessing
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.common.by import By
|
||||||
|
from selenium.webdriver.support import expected_conditions as ec
|
||||||
|
from selenium.webdriver.support.ui import WebDriverWait
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
def scrape(query: str, download_dir: list[str]):
|
||||||
|
"""Fetch results from Unternehmensregister for given query.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query (str): Search Query (RegEx supported)
|
||||||
|
download_dir (list[str]): Directory to place output files in
|
||||||
|
"""
|
||||||
|
download_path = os.path.join(str(Path.cwd()), *download_dir)
|
||||||
|
options = webdriver.ChromeOptions()
|
||||||
|
preferences = {
|
||||||
|
"profile.default_content_settings.popups": 0,
|
||||||
|
"safebrowsing.enabled": True,
|
||||||
|
"download": {
|
||||||
|
"directory_upgrade": True,
|
||||||
|
"prompt_for_download": False,
|
||||||
|
"extensions_to_open": "",
|
||||||
|
"default_directory": download_path,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
options.add_argument("--headless=new")
|
||||||
|
options.add_experimental_option("prefs", preferences)
|
||||||
|
|
||||||
|
driver = webdriver.Chrome(options=options)
|
||||||
|
|
||||||
|
driver.get("https://www.unternehmensregister.de/ureg/")
|
||||||
|
# Accept Cookies
|
||||||
|
driver.find_elements(
|
||||||
|
By.XPATH, '//button[text()="Nur technisch notwendige Cookies akzeptieren"]'
|
||||||
|
)[0].click()
|
||||||
|
# Enter search query
|
||||||
|
driver.find_elements(By.ID, "globalSearchForm:extendedResearchCompanyName")[
|
||||||
|
0
|
||||||
|
].send_keys(query)
|
||||||
|
# Trigger search
|
||||||
|
driver.find_elements(By.ID, "globalSearchForm:btnExecuteSearchOld")[0].click()
|
||||||
|
# Wait for results
|
||||||
|
wait = WebDriverWait(driver, 15)
|
||||||
|
wait.until(
|
||||||
|
lambda driver: driver.current_url != "https://www.unternehmensregister.de/ureg/"
|
||||||
|
)
|
||||||
|
|
||||||
|
num_pages = int(
|
||||||
|
driver.find_element(By.XPATH, '//*[@class="page_count"]').text.split(" ")[0]
|
||||||
|
)
|
||||||
|
|
||||||
|
processed_companies = []
|
||||||
|
|
||||||
|
for _ in tqdm(range(num_pages)):
|
||||||
|
# Find all "Registerinformationen"
|
||||||
|
companies_tab = driver.find_elements(
|
||||||
|
By.LINK_TEXT, "Registerinformationen des Registergerichts"
|
||||||
|
)
|
||||||
|
company_names = [
|
||||||
|
elem.text
|
||||||
|
for elem in driver.find_elements(
|
||||||
|
By.XPATH, '//div[@class="company_result"]/span/b'
|
||||||
|
)
|
||||||
|
]
|
||||||
|
for index, company_link in enumerate(companies_tab):
|
||||||
|
company_name = company_names[index]
|
||||||
|
if company_name in processed_companies:
|
||||||
|
continue
|
||||||
|
# Go to intermediary page
|
||||||
|
company_link.click()
|
||||||
|
# Trigger next redirect
|
||||||
|
driver.find_element(By.LINK_TEXT, "Registerinformationen anzeigen").click()
|
||||||
|
# Trigger SI download
|
||||||
|
driver.find_element(By.LINK_TEXT, "SI").click()
|
||||||
|
# Show shopping cart
|
||||||
|
wait.until(
|
||||||
|
ec.visibility_of_element_located(
|
||||||
|
(By.LINK_TEXT, "Dokumentenkorb ansehen")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
driver.find_element(By.LINK_TEXT, "Dokumentenkorb ansehen").click()
|
||||||
|
# Get document
|
||||||
|
elems = driver.find_elements(By.TAG_NAME, "input")
|
||||||
|
elems[-2].click()
|
||||||
|
|
||||||
|
wait.until(
|
||||||
|
ec.visibility_of_element_located((By.ID, "paymentFormOverview:btnNext"))
|
||||||
|
)
|
||||||
|
driver.find_element(By.ID, "paymentFormOverview:btnNext").click()
|
||||||
|
|
||||||
|
wait.until(
|
||||||
|
ec.visibility_of_element_located((By.LINK_TEXT, "Zum Dokumentenkorb"))
|
||||||
|
)
|
||||||
|
driver.find_element(By.LINK_TEXT, "Zum Dokumentenkorb").click()
|
||||||
|
|
||||||
|
num_files = get_num_files(download_path)
|
||||||
|
driver.find_element(By.CLASS_NAME, "download-wrapper").click()
|
||||||
|
|
||||||
|
try:
|
||||||
|
wait.until(wait_for_download_condition(download_path, num_files))
|
||||||
|
file_name = "".join(e for e in company_name if e.isalnum()) + ".xml"
|
||||||
|
rename_latest_file(
|
||||||
|
download_path,
|
||||||
|
file_name,
|
||||||
|
)
|
||||||
|
processed_companies.append(company_name)
|
||||||
|
except Exception:
|
||||||
|
logger.warning("Exception caught in Scraping")
|
||||||
|
finally:
|
||||||
|
for _ in range(6):
|
||||||
|
driver.back()
|
||||||
|
driver.find_element(By.XPATH, '//*[@class="fas fa-angle-right"]').click()
|
||||||
|
driver.close()
|
||||||
|
|
||||||
|
|
||||||
|
def wait_for_download_condition(
|
||||||
|
path: str, num_files: int, pattern: str = "*.xml"
|
||||||
|
) -> bool:
|
||||||
|
"""Selenium wait condition monitoring number of files in a dir.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Directory path
|
||||||
|
num_files (int): Current number of file
|
||||||
|
pattern (str, optional): File pattern. Defaults to "*.xml".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: Current num file exceeded
|
||||||
|
"""
|
||||||
|
return len(glob.glob1(path, pattern)) > num_files
|
||||||
|
|
||||||
|
|
||||||
|
def get_num_files(path: str, pattern: str = "*.xml") -> int:
|
||||||
|
"""Get number of files in directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Directory to scan
|
||||||
|
pattern (str, optional): File pattern. Defaults to "*.xml".
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int: Number of files matching pattern
|
||||||
|
"""
|
||||||
|
return len(glob.glob1(path, pattern))
|
||||||
|
|
||||||
|
|
||||||
|
def rename_latest_file(path: str, filename: str, pattern: str = "*.xml"):
|
||||||
|
"""Rename file in dir with latest change date.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
path (str): Dir to check
|
||||||
|
filename (str): Name of file
|
||||||
|
pattern (str, optional): File pattern. Defaults to "*.xml".
|
||||||
|
"""
|
||||||
|
list_of_files = [os.path.join(path, file) for file in glob.glob1(path, pattern)]
|
||||||
|
latest_download = max(list_of_files, key=os.path.getctime)
|
||||||
|
os.rename(latest_download, os.path.join(path, filename))
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
"""Main procedure"""
|
||||||
|
import pandas as pd
|
||||||
|
|
||||||
|
df_relevant_companies = pd.read_excel(
|
||||||
|
"./data/study_id42887_top-100-unternehmen-deutschland.xlsx",
|
||||||
|
sheet_name="Toplist",
|
||||||
|
skiprows=1,
|
||||||
|
)
|
||||||
|
df_relevant_companies = df_relevant_companies[df_relevant_companies["Name"].notna()]
|
||||||
|
|
||||||
|
batch_size = 5
|
||||||
|
pool = multiprocessing.Pool(processes=batch_size)
|
||||||
|
params = [
|
||||||
|
(query, ["data", "Unternehmensregister", "scraping", query.strip()])
|
||||||
|
for query in df_relevant_companies.Name
|
||||||
|
]
|
||||||
|
# Map the process_handler function to the parameter list using the Pool
|
||||||
|
pool.starmap(scrape, params)
|
||||||
|
|
||||||
|
# Close the Pool to prevent any more tasks from being submitted
|
||||||
|
pool.close()
|
||||||
|
|
||||||
|
# Wait for all the processes to complete
|
||||||
|
pool.join()
|
4322
Jupyter/API-tests/Unternehmensregister/notebook.ipynb
Normal file
10
Jupyter/API-tests/Unternehmensregister/requirements.txt
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
ocrmypdf
|
||||||
|
pytesseract
|
||||||
|
opencv-python
|
||||||
|
pdf2image
|
||||||
|
bs4
|
||||||
|
selenium
|
||||||
|
xmltodict
|
||||||
|
tqdm
|
||||||
|
openpyxl
|
||||||
|
pandas
|
28
Jupyter/API-tests/docker-compose.yml
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
version: '3.8'
|
||||||
|
services:
|
||||||
|
mongodb:
|
||||||
|
image: mongo:6.0.6
|
||||||
|
container_name: mongodb
|
||||||
|
restart: unless-stopped
|
||||||
|
environment:
|
||||||
|
MONGO_INITDB_ROOT_USERNAME: root
|
||||||
|
MONGO_INITDB_ROOT_PASSWORD: pR0R0v2e2
|
||||||
|
MONGO_INITDB_DATABASE: transparenzregister
|
||||||
|
ports:
|
||||||
|
- 27017:27017
|
||||||
|
volumes:
|
||||||
|
- mongodb_data:/data/db
|
||||||
|
|
||||||
|
mongo-express:
|
||||||
|
image: mongo-express:1.0.0-alpha
|
||||||
|
container_name: mongo-express
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- 8081:8081
|
||||||
|
environment:
|
||||||
|
ME_CONFIG_MONGODB_SERVER: mongodb
|
||||||
|
ME_CONFIG_MONGODB_ADMINUSERNAME: root
|
||||||
|
ME_CONFIG_MONGODB_ADMINPASSWORD: pR0R0v2e2
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
mongodb_data:
|
236
Jupyter/connection-counter.ipynb
Normal file
@ -0,0 +1,236 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 54,
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": true,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-06-03T01:36:32.345509400Z",
|
||||||
|
"start_time": "2023-06-03T01:36:32.332130700Z"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from typing import Final\n",
|
||||||
|
"\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": " Company 1 Connection Weight Company 2\n0 21 83 58\n1 37 88 86\n2 40 6 83\n3 60 35 2\n4 11 22 10\n.. ... ... ...\n695 62 37 11\n696 10 24 27\n697 97 40 55\n698 14 87 66\n699 50 55 82\n\n[693 rows x 3 columns]",
|
||||||
|
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Company 1</th>\n <th>Connection Weight</th>\n <th>Company 2</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>21</td>\n <td>83</td>\n <td>58</td>\n </tr>\n <tr>\n <th>1</th>\n <td>37</td>\n <td>88</td>\n <td>86</td>\n </tr>\n <tr>\n <th>2</th>\n <td>40</td>\n <td>6</td>\n <td>83</td>\n </tr>\n <tr>\n <th>3</th>\n <td>60</td>\n <td>35</td>\n <td>2</td>\n </tr>\n <tr>\n <th>4</th>\n <td>11</td>\n <td>22</td>\n <td>10</td>\n </tr>\n <tr>\n <th>...</th>\n <td>...</td>\n <td>...</td>\n <td>...</td>\n </tr>\n <tr>\n <th>695</th>\n <td>62</td>\n <td>37</td>\n <td>11</td>\n </tr>\n <tr>\n <th>696</th>\n <td>10</td>\n <td>24</td>\n <td>27</td>\n </tr>\n <tr>\n <th>697</th>\n <td>97</td>\n <td>40</td>\n <td>55</td>\n </tr>\n <tr>\n <th>698</th>\n <td>14</td>\n <td>87</td>\n <td>66</td>\n </tr>\n <tr>\n <th>699</th>\n <td>50</td>\n <td>55</td>\n <td>82</td>\n </tr>\n </tbody>\n</table>\n<p>693 rows × 3 columns</p>\n</div>"
|
||||||
|
},
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"from typing import Final\n",
|
||||||
|
"\n",
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd\n",
|
||||||
|
"\n",
|
||||||
|
"number_of_entries = 100\n",
|
||||||
|
"number_of_contacts = 10\n",
|
||||||
|
"ids: Final = [_ for _ in range(number_of_entries)]\n",
|
||||||
|
"companies = pd.DataFrame(columns=[], index=pd.Index(ids, name=\"company_id\"))\n",
|
||||||
|
"companies\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"id1 = (\n",
|
||||||
|
" pd.Series(ids * number_of_contacts, name=\"Company 1\")\n",
|
||||||
|
" .sample(frac=0.7, random_state=42)\n",
|
||||||
|
" .reset_index(drop=True)\n",
|
||||||
|
")\n",
|
||||||
|
"id2 = (\n",
|
||||||
|
" pd.Series(ids * number_of_contacts, name=\"Company 2\")\n",
|
||||||
|
" .sample(frac=0.7, random_state=43)\n",
|
||||||
|
" .reset_index(drop=True)\n",
|
||||||
|
")\n",
|
||||||
|
"connections = (\n",
|
||||||
|
" pd.DataFrame(\n",
|
||||||
|
" [\n",
|
||||||
|
" id1,\n",
|
||||||
|
" pd.Series(\n",
|
||||||
|
" np.random.randint(0, 100, size=(max(len(id1), len(id2)))),\n",
|
||||||
|
" name=\"Connection Weight\",\n",
|
||||||
|
" ),\n",
|
||||||
|
" id2,\n",
|
||||||
|
" ]\n",
|
||||||
|
" )\n",
|
||||||
|
" .T.dropna()\n",
|
||||||
|
" .astype(int)\n",
|
||||||
|
")\n",
|
||||||
|
"connections = connections.loc[(connections[\"Company 1\"] != connections[\"Company 2\"])]\n",
|
||||||
|
"connections"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-06-03T10:15:42.647508100Z",
|
||||||
|
"start_time": "2023-06-03T10:15:40.656713900Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 69,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": " Company 1 Connection Weight Company 2\n0 21 36 58\n1 37 59 86\n2 40 26 83\n3 60 21 2\n4 11 2 10\n.. ... ... ...\n695 62 45 11\n696 10 64 27\n697 97 24 55\n698 14 51 66\n699 50 93 82\n\n[693 rows x 3 columns]",
|
||||||
|
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Company 1</th>\n <th>Connection Weight</th>\n <th>Company 2</th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>21</td>\n <td>36</td>\n <td>58</td>\n </tr>\n <tr>\n <th>1</th>\n <td>37</td>\n <td>59</td>\n <td>86</td>\n </tr>\n <tr>\n <th>2</th>\n <td>40</td>\n <td>26</td>\n <td>83</td>\n </tr>\n <tr>\n <th>3</th>\n <td>60</td>\n <td>21</td>\n <td>2</td>\n </tr>\n <tr>\n <th>4</th>\n <td>11</td>\n <td>2</td>\n <td>10</td>\n </tr>\n <tr>\n <th>...</th>\n <td>...</td>\n <td>...</td>\n <td>...</td>\n </tr>\n <tr>\n <th>695</th>\n <td>62</td>\n <td>45</td>\n <td>11</td>\n </tr>\n <tr>\n <th>696</th>\n <td>10</td>\n <td>64</td>\n <td>27</td>\n </tr>\n <tr>\n <th>697</th>\n <td>97</td>\n <td>24</td>\n <td>55</td>\n </tr>\n <tr>\n <th>698</th>\n <td>14</td>\n <td>51</td>\n <td>66</td>\n </tr>\n <tr>\n <th>699</th>\n <td>50</td>\n <td>93</td>\n <td>82</td>\n </tr>\n </tbody>\n</table>\n<p>693 rows × 3 columns</p>\n</div>"
|
||||||
|
},
|
||||||
|
"execution_count": 69,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"id1 = (\n",
|
||||||
|
" pd.Series(ids * number_of_contacts, name=\"Company 1\")\n",
|
||||||
|
" .sample(frac=0.7, random_state=42)\n",
|
||||||
|
" .reset_index(drop=True)\n",
|
||||||
|
")\n",
|
||||||
|
"id2 = (\n",
|
||||||
|
" pd.Series(ids * number_of_contacts, name=\"Company 2\")\n",
|
||||||
|
" .sample(frac=0.7, random_state=43)\n",
|
||||||
|
" .reset_index(drop=True)\n",
|
||||||
|
")\n",
|
||||||
|
"connections = (\n",
|
||||||
|
" pd.DataFrame(\n",
|
||||||
|
" [\n",
|
||||||
|
" id1,\n",
|
||||||
|
" pd.Series(\n",
|
||||||
|
" np.random.randint(0, 100, size=(max(len(id1), len(id2)))),\n",
|
||||||
|
" name=\"Connection Weight\",\n",
|
||||||
|
" ),\n",
|
||||||
|
" id2,\n",
|
||||||
|
" ]\n",
|
||||||
|
" )\n",
|
||||||
|
" .T.dropna()\n",
|
||||||
|
" .astype(int)\n",
|
||||||
|
")\n",
|
||||||
|
"connections = connections.loc[(connections[\"Company 1\"] != connections[\"Company 2\"])]\n",
|
||||||
|
"connections"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-06-03T01:40:08.441882700Z",
|
||||||
|
"start_time": "2023-06-03T01:40:08.406876900Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 73,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": " Company 2\nCompany 1 \n0 6\n1 6\n2 5\n3 9\n4 7\n... ...\n95 7\n96 8\n97 7\n98 6\n99 8\n\n[100 rows x 1 columns]",
|
||||||
|
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Company 2</th>\n </tr>\n <tr>\n <th>Company 1</th>\n <th></th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>6</td>\n </tr>\n <tr>\n <th>1</th>\n <td>6</td>\n </tr>\n <tr>\n <th>2</th>\n <td>5</td>\n </tr>\n <tr>\n <th>3</th>\n <td>9</td>\n </tr>\n <tr>\n <th>4</th>\n <td>7</td>\n </tr>\n <tr>\n <th>...</th>\n <td>...</td>\n </tr>\n <tr>\n <th>95</th>\n <td>7</td>\n </tr>\n <tr>\n <th>96</th>\n <td>8</td>\n </tr>\n <tr>\n <th>97</th>\n <td>7</td>\n </tr>\n <tr>\n <th>98</th>\n <td>6</td>\n </tr>\n <tr>\n <th>99</th>\n <td>8</td>\n </tr>\n </tbody>\n</table>\n<p>100 rows × 1 columns</p>\n</div>"
|
||||||
|
},
|
||||||
|
"execution_count": 73,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"connections[[\"Company 1\", \"Company 2\"]].groupby(\"Company 1\").count()"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-06-03T01:44:23.433333600Z",
|
||||||
|
"start_time": "2023-06-03T01:44:23.424841700Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 72,
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": " Analysis-d0 Analysis-d1\ncompany_id \n0 1 6\n1 1 6\n2 1 5\n3 1 9\n4 1 7\n... ... ...\n95 1 7\n96 1 8\n97 1 7\n98 1 6\n99 1 8\n\n[100 rows x 2 columns]",
|
||||||
|
"text/html": "<div>\n<style scoped>\n .dataframe tbody tr th:only-of-type {\n vertical-align: middle;\n }\n\n .dataframe tbody tr th {\n vertical-align: top;\n }\n\n .dataframe thead th {\n text-align: right;\n }\n</style>\n<table border=\"1\" class=\"dataframe\">\n <thead>\n <tr style=\"text-align: right;\">\n <th></th>\n <th>Analysis-d0</th>\n <th>Analysis-d1</th>\n </tr>\n <tr>\n <th>company_id</th>\n <th></th>\n <th></th>\n </tr>\n </thead>\n <tbody>\n <tr>\n <th>0</th>\n <td>1</td>\n <td>6</td>\n </tr>\n <tr>\n <th>1</th>\n <td>1</td>\n <td>6</td>\n </tr>\n <tr>\n <th>2</th>\n <td>1</td>\n <td>5</td>\n </tr>\n <tr>\n <th>3</th>\n <td>1</td>\n <td>9</td>\n </tr>\n <tr>\n <th>4</th>\n <td>1</td>\n <td>7</td>\n </tr>\n <tr>\n <th>...</th>\n <td>...</td>\n <td>...</td>\n </tr>\n <tr>\n <th>95</th>\n <td>1</td>\n <td>7</td>\n </tr>\n <tr>\n <th>96</th>\n <td>1</td>\n <td>8</td>\n </tr>\n <tr>\n <th>97</th>\n <td>1</td>\n <td>7</td>\n </tr>\n <tr>\n <th>98</th>\n <td>1</td>\n <td>6</td>\n </tr>\n <tr>\n <th>99</th>\n <td>1</td>\n <td>8</td>\n </tr>\n </tbody>\n</table>\n<p>100 rows × 2 columns</p>\n</div>"
|
||||||
|
},
|
||||||
|
"execution_count": 72,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"companies[\"Analysis-d0\"] = 1\n",
|
||||||
|
"companies[\"Analysis-d1\"] = connections[[\"Company 1\", \"Company 2\"]].groupby(\"Company 1\").count()\n",
|
||||||
|
"connection_sum = connections.join(connections.set_index(\"Company 2\"), on=)\n",
|
||||||
|
"companies[\"Analysis-d1\"] = connections[[\"Company 1\", \"Company 2\"]].groupby(\"Company 1\").count()\n",
|
||||||
|
"# for tiers in range(5):\n",
|
||||||
|
"companies"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"end_time": "2023-06-03T01:43:25.341850700Z",
|
||||||
|
"start_time": "2023-06-03T01:43:25.318015500Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"companies"
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"start_time": "2023-06-03T01:36:32.382091200Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"outputs": [],
|
||||||
|
"source": [],
|
||||||
|
"metadata": {
|
||||||
|
"collapsed": false,
|
||||||
|
"ExecuteTime": {
|
||||||
|
"start_time": "2023-06-03T01:36:32.385093700Z"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 2
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython2",
|
||||||
|
"version": "2.7.6"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 0
|
||||||
|
}
|
4974
Jupyter/mongoDB/01_How_to_connect_to_MongoDB_StagingDB.ipynb
Normal file
316
Jupyter/mongoDB/02_How_to_write_to_MongoDB_StagingDB.ipynb
Normal file
@ -0,0 +1,316 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "3730fd2c",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import configurationFH as conf #hier wird die Configuration für das K8 cluster verwendet\n",
|
||||||
|
"import aki_prj23_transparenzregister.utils.mongo as mongo"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "305f2cc1",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"Für die ersten Experimente zum Lesen und Schreiben von Daten, bieten sich die Tutorials von PyMongo an:\n",
|
||||||
|
"\n",
|
||||||
|
"https://pymongo.readthedocs.io/en/stable/tutorial.html"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "e7c41906",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Erstellen eines Connection-Objekts\n",
|
||||||
|
"## utils.mongo.MongoConnection\n",
|
||||||
|
"Dem Connection Object werden die Credentials und Name der Datenbank übergeben, um ein Verbindungsobjekt zu erzeugen."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "efecf29d",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#Create Connection-Object\n",
|
||||||
|
"MongoObject=mongo.MongoConnection(conf.HOSTNAME,conf.DATABASE,conf.PORT,conf.USERNAME,conf.PASSWORD)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "6975afde",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Verbindungsherstellung\n",
|
||||||
|
"## utils.mongo.MongoConnector\n",
|
||||||
|
"Das erzeugte Objekt stellt mit der pymongo-Bilbiothek eine Verbindung zur Datenbank her."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "e755c39b",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#create Connection\n",
|
||||||
|
"MongoConnect=mongo.MongoConnector(MongoObject)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "97d3ef0c",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Erzeuge eine Collection-Instanz\n",
|
||||||
|
"Jede Mongo Datenbank kann mehrere Collections beinhalten. Um auf diese unabhängigen Collections zuzugreifen, muss eine Instanz erstellt werden."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "34e19d26",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#create Object for news-collection\n",
|
||||||
|
"CollNews=MongoConnect.database[\"news\"]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "ed7a99f7",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Auslesen der Newsartikel\n",
|
||||||
|
"\n",
|
||||||
|
"Mit der find()-Methode wird eine Anfrage an die Datenbank gesendet, welche alle Dokumente der gewählten Collection zurückgibt.\n",
|
||||||
|
"Der cursor verweist somit auf das erste Element der Collection.\n",
|
||||||
|
"Mit der find-Methode können auch einzelne Elemente der Collection gefunden werden, die definierte Attribute erfüllen.\n",
|
||||||
|
"\n",
|
||||||
|
"Der Code führt eine Abfrage in der MongoDB-Sammlung \"news\" durch, erhält die Ergebnisse in einem Cursor und wandelt den Cursor dann in eine Python-Liste um, um auf die Daten einfacher zugreifen zu können. Die Liste NewsList enthält nun alle Dokumente aus der \"news\"-Sammlung als Python-Listenobjekte. Jedes Element in der Liste entspricht einem Dokument aus der Datenbank."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"id": "495d5448",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#get all documents from collection\n",
|
||||||
|
"cursor=CollNews.find({})\n",
|
||||||
|
"NewsList=list(cursor)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"id": "192c5199",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"918"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"len(NewsList)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "3de93755",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Auswählen eines Artikels"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 7,
|
||||||
|
"id": "4978218a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"{'_id': 'tagesschau_90fb6340-abc5-40bb-85c3-3cfd98aa795a',\n",
|
||||||
|
" 'title': 'GDL kündigt neue Streiks an',\n",
|
||||||
|
" 'date': '2023-06-28T16:46:37.324+02:00',\n",
|
||||||
|
" 'text': ' Die Deutsche Bahn soll wieder bestreikt werden. Die Lokführergewerkschaft GDL hat neue Arbeitskämpfe angekündigt, nachdem die Tarifverhandlungen mit der Bahn gescheitert waren. Ab Freitag könnten die Arbeitsniederlegungen beginnen. Die Lokführergewerkschaft GDL hat erneute Streiks bei der Deutschen Bahn angekündigt. Zum genauen Zeitpunkt und Dauer machte GDL-Chef Claus Weselsky noch keine Angaben. Er teilte lediglich mit, dass am Mittwoch und Donnerstag nicht gestreikt werde. Die GDL werde den Streik \"rechtzeitig ankündigen\". Vor der Entscheidung der GDL-Spitzengremien für die Wiederaufnahme von\\xa0Streiks hatte Weselsky von rund 100 Stunden Arbeitskampf gesprochen. Das wären mehr als vier Tage. Die GDL hatte im Herbst bereits sechs Mal bundesweit zu Arbeitsniederlegungen aufgerufen und damit den Bahnverkehr stark eingeschränkt. Die\\xa0GDL warf der Bahn vor der Streik-Entscheidung vor, in der laufenden Tarifrunde auf Zeit zu spielen. Der Konzern habe in der jüngsten Runde am 11. Februar zuvor gemachte Zusagen zurückgenommen. Die Bahn habe die Chance, den Arbeitskampf noch abzuwenden, wenn sie ein Positionspapier der GDL doch noch unterzeichne, so Weselsky. Das bundeseigene Unternehmen hatte zuvor eine Unterschrift unter das Dokument abgelehnt. Damit erfüllte sie ein von der GDL gesetztes Ultimatum nicht. In dem Papier sind Grundpositionen der Gewerkschaft festgeschrieben. Aus Sicht der Bahn enthält es aber nicht den Sachstand, sondern Maximalforderungen der GDL. \"Es gibt keinerlei Veranlassung, der Verhandlungsstand ist eigentlich so, dass wir zu Ergebnissen kommen könnten\", sagte Bahn-Personalvorstand Ulrich Weber. \"Von daher halte ich diese angekündigten Streiks für unverhältnismäßig.\" Da sich die GDL noch nicht auf einen Termin und eine Dauer festgelegt hat, habe er die Hoffnung, den Arbeitskampf in Gesprächen mit der Gewerkschaft noch abwenden zu können. Neben verkürzten Arbeitszeiten und fünf Prozent mehr Geld verlangt die GDL auch für ihre Mitglieder außerhalb der Lokführer-Berufsgruppe eigenständige Tarifverträge. Diese waren bislang von der konkurrierenden DGB-Gewerkschaft\\xa0EVG ausgehandelt worden. Die Bahn will nach eigenem Bekunden in den parallel geführten Verhandlungen unterschiedliche Regelungen für ein und dieselbe Berufsgruppe verhindern. Die Eisenbahn- und Verkehrsgewerkschaft (EVG) will ebenfalls voneinander abweichende Tarifverträge vermeiden. Dieses Thema im Programm: v',\n",
|
||||||
|
" 'source_url': 'https://www.tagesschau.de/wirtschaft/gdl-bahn-streik-105.html',\n",
|
||||||
|
" 'company': '[Deutsche Bahn, GDL]'}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 7,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"NewsList[212]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "0ccba849",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Erweitern des JSONS"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 8,
|
||||||
|
"id": "9904c9c3",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"{'_id': 'tagesschau_90fb6340-abc5-40bb-85c3-3cfd98aa795a', 'title': 'GDL kündigt neue Streiks an', 'date': '2023-06-28T16:46:37.324+02:00', 'text': ' Die Deutsche Bahn soll wieder bestreikt werden. Die Lokführergewerkschaft GDL hat neue Arbeitskämpfe angekündigt, nachdem die Tarifverhandlungen mit der Bahn gescheitert waren. Ab Freitag könnten die Arbeitsniederlegungen beginnen. Die Lokführergewerkschaft GDL hat erneute Streiks bei der Deutschen Bahn angekündigt. Zum genauen Zeitpunkt und Dauer machte GDL-Chef Claus Weselsky noch keine Angaben. Er teilte lediglich mit, dass am Mittwoch und Donnerstag nicht gestreikt werde. Die GDL werde den Streik \"rechtzeitig ankündigen\". Vor der Entscheidung der GDL-Spitzengremien für die Wiederaufnahme von\\xa0Streiks hatte Weselsky von rund 100 Stunden Arbeitskampf gesprochen. Das wären mehr als vier Tage. Die GDL hatte im Herbst bereits sechs Mal bundesweit zu Arbeitsniederlegungen aufgerufen und damit den Bahnverkehr stark eingeschränkt. Die\\xa0GDL warf der Bahn vor der Streik-Entscheidung vor, in der laufenden Tarifrunde auf Zeit zu spielen. Der Konzern habe in der jüngsten Runde am 11. Februar zuvor gemachte Zusagen zurückgenommen. Die Bahn habe die Chance, den Arbeitskampf noch abzuwenden, wenn sie ein Positionspapier der GDL doch noch unterzeichne, so Weselsky. Das bundeseigene Unternehmen hatte zuvor eine Unterschrift unter das Dokument abgelehnt. Damit erfüllte sie ein von der GDL gesetztes Ultimatum nicht. In dem Papier sind Grundpositionen der Gewerkschaft festgeschrieben. Aus Sicht der Bahn enthält es aber nicht den Sachstand, sondern Maximalforderungen der GDL. \"Es gibt keinerlei Veranlassung, der Verhandlungsstand ist eigentlich so, dass wir zu Ergebnissen kommen könnten\", sagte Bahn-Personalvorstand Ulrich Weber. \"Von daher halte ich diese angekündigten Streiks für unverhältnismäßig.\" Da sich die GDL noch nicht auf einen Termin und eine Dauer festgelegt hat, habe er die Hoffnung, den Arbeitskampf in Gesprächen mit der Gewerkschaft noch abwenden zu können. Neben verkürzten Arbeitszeiten und fünf Prozent mehr Geld verlangt die GDL auch für ihre Mitglieder außerhalb der Lokführer-Berufsgruppe eigenständige Tarifverträge. Diese waren bislang von der konkurrierenden DGB-Gewerkschaft\\xa0EVG ausgehandelt worden. Die Bahn will nach eigenem Bekunden in den parallel geführten Verhandlungen unterschiedliche Regelungen für ein und dieselbe Berufsgruppe verhindern. Die Eisenbahn- und Verkehrsgewerkschaft (EVG) will ebenfalls voneinander abweichende Tarifverträge vermeiden. Dieses Thema im Programm: v', 'source_url': 'https://www.tagesschau.de/wirtschaft/gdl-bahn-streik-105.html', 'company': '[Deutsche Bahn, GDL]'}\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import json\n",
|
||||||
|
"\n",
|
||||||
|
"# select the news article\n",
|
||||||
|
"data=NewsList[212]\n",
|
||||||
|
"jsonID=data['_id']\n",
|
||||||
|
"\n",
|
||||||
|
"#add the new attribute \n",
|
||||||
|
"data['company'] = '[Deutsche Bahn, GDL]'\n",
|
||||||
|
"\n",
|
||||||
|
"print(data)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 9,
|
||||||
|
"id": "4eef3942",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"'tagesschau_90fb6340-abc5-40bb-85c3-3cfd98aa795a'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 9,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"jsonID"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"id": "0752a77a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"<pymongo.results.UpdateResult at 0x1f0048b26d0>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 10,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"CollNews.update_one(\n",
|
||||||
|
" {\"_id\": data[\"_id\"]},\n",
|
||||||
|
" {\"$set\": data},\n",
|
||||||
|
" upsert=True # Setze upsert=True, um das Dokument einzufügen, falls es noch nicht existiert\n",
|
||||||
|
")\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 11,
|
||||||
|
"id": "dae4966a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"#get all documents from collection\n",
|
||||||
|
"cursor=CollNews.find({})\n",
|
||||||
|
"NewsList=list(cursor)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 12,
|
||||||
|
"id": "0412f2f8",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"{'_id': 'tagesschau_90fb6340-abc5-40bb-85c3-3cfd98aa795a',\n",
|
||||||
|
" 'title': 'GDL kündigt neue Streiks an',\n",
|
||||||
|
" 'date': '2023-06-28T16:46:37.324+02:00',\n",
|
||||||
|
" 'text': ' Die Deutsche Bahn soll wieder bestreikt werden. Die Lokführergewerkschaft GDL hat neue Arbeitskämpfe angekündigt, nachdem die Tarifverhandlungen mit der Bahn gescheitert waren. Ab Freitag könnten die Arbeitsniederlegungen beginnen. Die Lokführergewerkschaft GDL hat erneute Streiks bei der Deutschen Bahn angekündigt. Zum genauen Zeitpunkt und Dauer machte GDL-Chef Claus Weselsky noch keine Angaben. Er teilte lediglich mit, dass am Mittwoch und Donnerstag nicht gestreikt werde. Die GDL werde den Streik \"rechtzeitig ankündigen\". Vor der Entscheidung der GDL-Spitzengremien für die Wiederaufnahme von\\xa0Streiks hatte Weselsky von rund 100 Stunden Arbeitskampf gesprochen. Das wären mehr als vier Tage. Die GDL hatte im Herbst bereits sechs Mal bundesweit zu Arbeitsniederlegungen aufgerufen und damit den Bahnverkehr stark eingeschränkt. Die\\xa0GDL warf der Bahn vor der Streik-Entscheidung vor, in der laufenden Tarifrunde auf Zeit zu spielen. Der Konzern habe in der jüngsten Runde am 11. Februar zuvor gemachte Zusagen zurückgenommen. Die Bahn habe die Chance, den Arbeitskampf noch abzuwenden, wenn sie ein Positionspapier der GDL doch noch unterzeichne, so Weselsky. Das bundeseigene Unternehmen hatte zuvor eine Unterschrift unter das Dokument abgelehnt. Damit erfüllte sie ein von der GDL gesetztes Ultimatum nicht. In dem Papier sind Grundpositionen der Gewerkschaft festgeschrieben. Aus Sicht der Bahn enthält es aber nicht den Sachstand, sondern Maximalforderungen der GDL. \"Es gibt keinerlei Veranlassung, der Verhandlungsstand ist eigentlich so, dass wir zu Ergebnissen kommen könnten\", sagte Bahn-Personalvorstand Ulrich Weber. \"Von daher halte ich diese angekündigten Streiks für unverhältnismäßig.\" Da sich die GDL noch nicht auf einen Termin und eine Dauer festgelegt hat, habe er die Hoffnung, den Arbeitskampf in Gesprächen mit der Gewerkschaft noch abwenden zu können. Neben verkürzten Arbeitszeiten und fünf Prozent mehr Geld verlangt die GDL auch für ihre Mitglieder außerhalb der Lokführer-Berufsgruppe eigenständige Tarifverträge. Diese waren bislang von der konkurrierenden DGB-Gewerkschaft\\xa0EVG ausgehandelt worden. Die Bahn will nach eigenem Bekunden in den parallel geführten Verhandlungen unterschiedliche Regelungen für ein und dieselbe Berufsgruppe verhindern. Die Eisenbahn- und Verkehrsgewerkschaft (EVG) will ebenfalls voneinander abweichende Tarifverträge vermeiden. Dieses Thema im Programm: v',\n",
|
||||||
|
" 'source_url': 'https://www.tagesschau.de/wirtschaft/gdl-bahn-streik-105.html',\n",
|
||||||
|
" 'company': '[Deutsche Bahn, GDL]'}"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 12,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"NewsList[212]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "426cdc6a",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
5
Jupyter/mongoDB/configuration.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
HOSTNAME="stagingdbtransparenzreg.ioappzs.mongodb.net"
|
||||||
|
DATABASE="transparenzregister"
|
||||||
|
PORT=None
|
||||||
|
USERNAME="db_user"
|
||||||
|
PASSWORD="secret_password"
|
5
Jupyter/mongoDB/configurationFH.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
HOSTNAME="172.17.38.210"
|
||||||
|
DATABASE="transparenzregister"
|
||||||
|
PORT=30217
|
||||||
|
USERNAME="root"
|
||||||
|
PASSWORD="secret_password"
|
@ -1,5 +1,13 @@
|
|||||||
# aki_prj23_transparenzregister
|
# aki_prj23_transparenzregister
|
||||||
|
|
||||||
|
[](https://github.com/astral-sh/ruff)
|
||||||
|
[](https://github.com/astral-sh/ruff/actions)
|
||||||
|
[](https://github.com/fhswf/aki_prj23_transparenzregister/actions/workflows/test-action.yaml)
|
||||||
|
[](https://github.com/fhswf/aki_prj23_transparenzregister/actions/workflows/lint-actions.yaml)
|
||||||
|
[](https://github.com/psf/black)
|
||||||
|
|
||||||
## Contributions
|
## Contributions
|
||||||
|
|
||||||
See the [CONTRIBUTING.md](CONTRIBUTING.md) about how code should be formatted and what kind of rules we set ourselves.
|
See the [CONTRIBUTING.md](CONTRIBUTING.md) about how code should be formatted and what kind of rules we set ourselves.
|
||||||
|
|
||||||
|
[](https://github.com/fhswf/aki_prj23_transparenzregister/actions/workflows/bandit-action.yaml)
|
||||||
|
20
documentations/Makefile
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
# Minimal makefile for Sphinx documentation
|
||||||
|
#
|
||||||
|
|
||||||
|
# You can set these variables from the command line, and also
|
||||||
|
# from the environment for the first two.
|
||||||
|
SPHINXOPTS ?=
|
||||||
|
SPHINXBUILD ?= sphinx-build
|
||||||
|
SOURCEDIR = .
|
||||||
|
BUILDDIR = _build
|
||||||
|
|
||||||
|
# Put it first so that "make" without argument is like "make help".
|
||||||
|
help:
|
||||||
|
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||||
|
|
||||||
|
.PHONY: help Makefile
|
||||||
|
|
||||||
|
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||||
|
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||||
|
%: Makefile
|
||||||
|
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
88
documentations/conf.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
"""Python sphinx documentation build configuration."""
|
||||||
|
# Configuration file for the Sphinx documentation builder.
|
||||||
|
#
|
||||||
|
# For the full list of built-in configuration values, see the documentation:
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from importlib.metadata import metadata
|
||||||
|
from typing import Final
|
||||||
|
|
||||||
|
# -- Project information -----------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
|
||||||
|
|
||||||
|
_DISTRIBUTION_METADATA = metadata("aki-prj23-transparenzregister")
|
||||||
|
|
||||||
|
__author__: Final[str] = _DISTRIBUTION_METADATA["Author"]
|
||||||
|
__email__: Final[str] = _DISTRIBUTION_METADATA["Author-email"]
|
||||||
|
__version__: Final[str] = _DISTRIBUTION_METADATA["Version"]
|
||||||
|
|
||||||
|
project: Final[str] = "transparenzregister"
|
||||||
|
copyright: Final[str] = "2023, AKI PRJ23" # noqa: A001
|
||||||
|
author: Final[str] = __author__
|
||||||
|
version: Final[str] = __version__
|
||||||
|
release: Final[str] = __version__
|
||||||
|
|
||||||
|
|
||||||
|
sys.path.insert(0, os.path.abspath("../src")) # Add the path to your Python package
|
||||||
|
sys.path.insert(0, os.path.abspath("../src/aki_prj23_transparenzregister"))
|
||||||
|
|
||||||
|
|
||||||
|
# -- General configuration ---------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
|
||||||
|
|
||||||
|
extensions: Final[list[str]] = [
|
||||||
|
"sphinx.ext.autodoc",
|
||||||
|
"nbsphinx",
|
||||||
|
"myst_parser",
|
||||||
|
"sphinx.ext.napoleon",
|
||||||
|
"sphinx_copybutton",
|
||||||
|
"sphinx_autodoc_typehints",
|
||||||
|
"sphinx.ext.intersphinx",
|
||||||
|
"sphinx.ext.autosectionlabel",
|
||||||
|
"sphinx.ext.viewcode",
|
||||||
|
"IPython.sphinxext.ipython_console_highlighting",
|
||||||
|
"sphinxcontrib.mermaid",
|
||||||
|
]
|
||||||
|
|
||||||
|
# templates_path : Final[list[str]] = ["_templates"]
|
||||||
|
exclude_patterns: Final[list[str]] = ["_build", "Thumbs.db", ".DS_Store", "templates"]
|
||||||
|
|
||||||
|
root_doc: Final[str] = "index"
|
||||||
|
# master_doc = "index"
|
||||||
|
|
||||||
|
autodoc_default_flags: Final[list[str]] = [
|
||||||
|
"members",
|
||||||
|
"inherited-members",
|
||||||
|
"show-inheritance",
|
||||||
|
]
|
||||||
|
autodoc_class_signature: Final[str] = "separated"
|
||||||
|
autodoc_default_options: Final[dict[str, bool]] = {
|
||||||
|
_: True for _ in autodoc_default_flags
|
||||||
|
}
|
||||||
|
autodoc_typehints: Final[str] = "signature"
|
||||||
|
simplify_optional_unions: Final[bool] = True
|
||||||
|
typehint_defaults: Final[str] = "comma"
|
||||||
|
source_suffix: Final[list[str]] = [".rst", ".md"]
|
||||||
|
mermaid_output_format: Final[str] = "raw"
|
||||||
|
|
||||||
|
# -- Options for HTML output -------------------------------------------------
|
||||||
|
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
|
||||||
|
|
||||||
|
html_theme: Final[str] = "sphinx_rtd_theme"
|
||||||
|
html_static_path: Final[list[str]] = ["_static"]
|
||||||
|
|
||||||
|
napoleon_google_docstring: Final[bool] = True
|
||||||
|
napoleon_numpy_docstring: Final[bool] = False
|
||||||
|
|
||||||
|
|
||||||
|
nbsphinx_execute = "never"
|
||||||
|
|
||||||
|
intersphinx_mapping: Final[dict[str, tuple[str, None]]] = {
|
||||||
|
"python": ("https://docs.python.org/3", None),
|
||||||
|
"pandas": ("https://pandas.pydata.org/docs/", None),
|
||||||
|
"numpy": ("https://numpy.org/doc/stable/", None),
|
||||||
|
"matplotlib": ("https://matplotlib.org/stable/", None),
|
||||||
|
"scikit-learn": ("https://scikit-learn.org/stable/", None),
|
||||||
|
"sphinx": ("https://docs.sympy.org/latest/", None),
|
||||||
|
}
|
58
documentations/index.rst
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
.. Your Package Name documentation master file, created by Sphinx
|
||||||
|
|
||||||
|
Transparenzregister Dokumentation
|
||||||
|
=================================
|
||||||
|
This is the documentation for the AKI project group on the german transparenzregister and an Analysis there of.
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:maxdepth: 3
|
||||||
|
:caption: Project Management
|
||||||
|
|
||||||
|
project_management/*
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 1
|
||||||
|
:caption: Meeting Notes:
|
||||||
|
|
||||||
|
meeting-notes/*
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 3
|
||||||
|
:caption: Research
|
||||||
|
|
||||||
|
research/*
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 3
|
||||||
|
:caption: Seminararbeiten
|
||||||
|
|
||||||
|
seminararbeiten/*
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 1
|
||||||
|
:caption: UI Mock Ups
|
||||||
|
|
||||||
|
mock_up/*
|
||||||
|
|
||||||
|
.. toctree::
|
||||||
|
:glob:
|
||||||
|
:maxdepth: 0
|
||||||
|
:caption: Modules
|
||||||
|
|
||||||
|
modules
|
||||||
|
|
||||||
|
.. automodule:: aki_prj23_transparenzregister
|
||||||
|
:members:
|
||||||
|
:undoc-members:
|
||||||
|
:show-inheritance:
|
||||||
|
:inherited-members:
|
||||||
|
:autodoc_member_order:
|
||||||
|
|
||||||
|
Indices and tables
|
||||||
|
==================
|
||||||
|
* :ref:`genindex`
|
||||||
|
* :ref:`modindex`
|
35
documentations/make.bat
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
@ECHO OFF
|
||||||
|
|
||||||
|
pushd %~dp0
|
||||||
|
|
||||||
|
REM Command file for Sphinx documentation
|
||||||
|
|
||||||
|
if "%SPHINXBUILD%" == "" (
|
||||||
|
set SPHINXBUILD=sphinx-build
|
||||||
|
)
|
||||||
|
set SOURCEDIR=.
|
||||||
|
set BUILDDIR=_build
|
||||||
|
|
||||||
|
%SPHINXBUILD% >NUL 2>NUL
|
||||||
|
if errorlevel 9009 (
|
||||||
|
echo.
|
||||||
|
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||||
|
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||||
|
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||||
|
echo.may add the Sphinx directory to PATH.
|
||||||
|
echo.
|
||||||
|
echo.If you don't have Sphinx installed, grab it from
|
||||||
|
echo.https://www.sphinx-doc.org/
|
||||||
|
exit /b 1
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "" goto help
|
||||||
|
|
||||||
|
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
goto end
|
||||||
|
|
||||||
|
:help
|
||||||
|
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||||
|
|
||||||
|
:end
|
||||||
|
popd
|
90
documentations/meeting-notes/Meeting_2023-06-09.md
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
# Weekly *5*: 09.06.2023
|
||||||
|
|
||||||
|
## Teilnehmer
|
||||||
|
- Prof. Arinir
|
||||||
|
- Tristan Nolde
|
||||||
|
- Tim Ronneburg
|
||||||
|
- Phillip Horstenkamp
|
||||||
|
- Kim Mesewinkel-Risse
|
||||||
|
- Sascha Zhu
|
||||||
|
- Sebastian Zeleny
|
||||||
|
|
||||||
|
## Themen
|
||||||
|
|
||||||
|
- Stepstone Projekt:
|
||||||
|
- Gewünscht wird ein initialer Austausch mit Stepstone
|
||||||
|
- Befürchtung ist, dass es zu einem Hinderniss wird
|
||||||
|
- Entscheidung liegt daher beim Projekt-Team
|
||||||
|
- Weitere Informationen sind nicht aktuell nicht vorhanden
|
||||||
|
- Vorschlag Prof. Arinir: Sollte das Thema nochmal zum Team getragen werden, wird der aktuelle Stand vorgestellt und der Link zum Repo wird geteilt. Darüber hinaus werden keine Ressourcen zugesprochen.
|
||||||
|
- Vorstellung [vorheriger Absprache](https://github.com/orgs/fhswf/projects/17?pane=issue&itemId=29707639) und Feedback:
|
||||||
|
- Ändert sich der Scope - Nein
|
||||||
|
- NDA - Nein
|
||||||
|
- Veröffentlichung - maximal Impressionen
|
||||||
|
- Was muss geleistet werden - nicht direkt an Stepstone sondern über FH als Mediator
|
||||||
|
- Sollen Präsentationen vorab zur Verfügung gestellt werden?
|
||||||
|
- Einige Tage vorher in das Git Repo. hochladen und Prof. Arinir benachrichtigen
|
||||||
|
- Rücksprache Seminarpräsentationen
|
||||||
|
- Verflechtungsanalyse:
|
||||||
|
- Graphen Theorie
|
||||||
|
- Social Network Analyse
|
||||||
|
- Erweiterung über Graphen Theorie hinaus
|
||||||
|
- Fokus auf Anwendung und Mehrwert, weniger genauer mathematischer Lösung
|
||||||
|
- Feedback:
|
||||||
|
- Präsentation scheint sehr umfangreich; Wunsch nach Reduzierung der Folienanzahl
|
||||||
|
- Formeln hinter den Analysen spannend, ggf. doch drauf eingehen, um Kennzahl in Kontext zu setzen
|
||||||
|
- Visualiserung:
|
||||||
|
- Prinzipien
|
||||||
|
- Vorteile
|
||||||
|
- Bibliotheken für Umsetzung (Network X, PyViz, ...)
|
||||||
|
- Effekt von Farbwahl
|
||||||
|
- Erste Umsetzung im Jupyter Notebook
|
||||||
|
- Feedback:
|
||||||
|
- Es werden extem viele Datenpunkte angezeigt werden müssen, wie wird dies in den Bibliotheken umgesetzt? Kann dort gefiltert werden?
|
||||||
|
- Wenn nicht direkt am Graphen (der Darstellung) gefiltert werden kann, dann frühzeitig filtern, bevor der Graph gebaut wird
|
||||||
|
- Datenspeicherung
|
||||||
|
- Erste Integration von Visualisierung mit Datenspeicherung
|
||||||
|
- Vorstellung der "Datencluster"
|
||||||
|
- Stammdaten
|
||||||
|
- Stimmungsdaten
|
||||||
|
- Social Graph
|
||||||
|
- Zeitseriendaten
|
||||||
|
- Relationales DB Modell
|
||||||
|
- Fokus ebenfalls auf Abfrage der Daten für Folge-Projekte wie Visualiserung und Mehrwert fürs Team, weniger Theorie
|
||||||
|
- Feedback:
|
||||||
|
- Es müssen Erfahrungen mit der Library und Darstellung gesammelt werden, um den Mehrwert der Lösung hervorzuheben
|
||||||
|
- Modellierung der Finzanz-Kennzahlen
|
||||||
|
- Spaltennamen sollen sprechend sein, z.B. "value" statt "sum"
|
||||||
|
- Präferenz zum Modell mit einzelnem Eintrag mit mehren Kennzahl Spalten stallt generischer Lösung über Enum
|
||||||
|
- Text Mining
|
||||||
|
- Fokus auf Sentiment Analyse
|
||||||
|
- Vergleich verschiedener Lösungen und ML Modelle
|
||||||
|
- Abschließendes Fazit, welches Tool am besten geeignet ist
|
||||||
|
- Daten Extraktion
|
||||||
|
- Fokus auf Web Mining/Scraping im Rahmen des Transparenzregisters
|
||||||
|
- Datenquellen
|
||||||
|
- API
|
||||||
|
- Websites (HTML)
|
||||||
|
- PDF
|
||||||
|
- Datenextraktion aus diesen Quellen
|
||||||
|
- Orchestrierung mit Airflow
|
||||||
|
- DevOps
|
||||||
|
- Dependency Management in Python
|
||||||
|
- Standard requirements.txt
|
||||||
|
- pip-tools
|
||||||
|
- poetry
|
||||||
|
- Vorteile von Lintern
|
||||||
|
- GitHub
|
||||||
|
- Actions
|
||||||
|
- Security
|
||||||
|
- etc.
|
||||||
|
- Feedback:
|
||||||
|
- Git wird als State-of-the-Art angesehen und muss nicht näher erläutert werden
|
||||||
|
|
||||||
|
## Abgeleitete Action Items
|
||||||
|
|
||||||
|
| Action Item | Verantwortlicher | Deadline |
|
||||||
|
|-------------|------------------|-----------------|
|
||||||
|
| Folien hochladen | Projekt Team | vor Präsentationstermin |
|
||||||
|
| Absprache Abgrenzung von Verflechtungsanalyse und Visualisierung | Tim und Kim | nächster Abgleich |
|
||||||
|
| Deployment Plan aufstellen | Projekt Team | nach Seminararbeiten |
|
BIN
documentations/mock_up/Mock-Up_16-06-2023.jpg
Normal file
After Width: | Height: | Size: 3.3 MiB |
BIN
documentations/mock_up/Mock-Up_16-06-2023.pdf
Normal file
BIN
documentations/mock_up/Mock-Up_16-06-2023.rtb
Normal file
@ -3,7 +3,7 @@
|
|||||||
Version 0.1 Erstellt am 07.04.2023
|
Version 0.1 Erstellt am 07.04.2023
|
||||||
|
|
||||||
| Autoren | Matrikelnummer |
|
| Autoren | Matrikelnummer |
|
||||||
|----------|---------|
|
|--------------------|----------------|
|
||||||
| Kim Mesewinkel | 000 |
|
| Kim Mesewinkel | 000 |
|
||||||
| Tristan Nolde | 000 |
|
| Tristan Nolde | 000 |
|
||||||
| Sebastian Zelenie | 000 |
|
| Sebastian Zelenie | 000 |
|
||||||
@ -17,11 +17,11 @@ Version 0.1 Erstellt am 07.04.2023
|
|||||||
## Historie der Dokumentenversion <a name="historie"></a>
|
## Historie der Dokumentenversion <a name="historie"></a>
|
||||||
|
|
||||||
| Version | Datum | Autor | Änderungsgrund / Bemerkung |
|
| Version | Datum | Autor | Änderungsgrund / Bemerkung |
|
||||||
|----------|---------| ---------| ---------|
|
|-----------|------------|---------------|----------------------------------------|
|
||||||
| 0.1 | 07.04.2023 | Tim Ronneburg | Intialaufsetzen des Pflichtenhefts |
|
| 0.1 | 07.04.2023 | Tim Ronneburg | Initiales aufsetzen des Pflichtenhefts |
|
||||||
| 0.2 | 000 |
|
| 0.2 | 000 | | |
|
||||||
| ... | 000 |
|
| ... | 000 | | |
|
||||||
| 1.0 | 000 |
|
| 1.0 | 000 | | |
|
||||||
|
|
||||||
## Inhaltsverzeichnis <a name="inhaltsverzeichnis"></a>
|
## Inhaltsverzeichnis <a name="inhaltsverzeichnis"></a>
|
||||||
[Historie der Dokumentenversion](#historie)
|
[Historie der Dokumentenversion](#historie)
|
||||||
@ -78,7 +78,7 @@ Test
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Funktionale Anforderungenn <a name="f_anforderung"></a>
|
## Funktionale Anforderungen <a name="f_anforderung"></a>
|
||||||
|
|
||||||
### **Muss Ziele**
|
### **Muss Ziele**
|
||||||
|
|
71
documentations/project_management/timeline.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# Timeline
|
||||||
|
```{mermaid}
|
||||||
|
|
||||||
|
gantt
|
||||||
|
|
||||||
|
title Timeline PG Transparenzregister
|
||||||
|
dateFormat YYYY-MM-DD
|
||||||
|
section Organisation
|
||||||
|
Kennenlernen des Projektteams : done, a1, 2023-03-30, 1d
|
||||||
|
Erstellen des Organigramms : done, after a1 , 1d
|
||||||
|
GitHub : done, 2023-04-06, 7d
|
||||||
|
Zeitplanung SoSe : active , 2023-04-06, 7d
|
||||||
|
|
||||||
|
section Dokumentation
|
||||||
|
Meeting Notes: active, 2023-03-30, 10w
|
||||||
|
Seminarthemen: active, 2023-04-13, 8w
|
||||||
|
Lastenheft: active, 2023-04-06, 5w
|
||||||
|
Pflichtenheft: milestone, 2023-05-11
|
||||||
|
Reserve: crit, 2023-06-08, 1w
|
||||||
|
|
||||||
|
|
||||||
|
section Meeting
|
||||||
|
Weekly 1 : done, 2023-03-30, 0.5h
|
||||||
|
Statustermin 1 : done ,2023-03-30 , 1h
|
||||||
|
Weekly 2 : done, 2023-04-06, 2h
|
||||||
|
|
||||||
|
Statustermin 2 : active, 2023-04-13, 1h
|
||||||
|
Weekly 3 : active, 2023-04-13, 0.5h
|
||||||
|
Weekly 4 : active, 2023-04-20, 2h
|
||||||
|
|
||||||
|
Weekly 5 : active, 2023-04-27, 0.5h
|
||||||
|
Statustermin 3 : active, 2023-04-27, 1h
|
||||||
|
|
||||||
|
Weekly 6 : active, 2023-05-04, 2h
|
||||||
|
|
||||||
|
Weekly 7 : active, 2023-05-11, 0.5h
|
||||||
|
Statustermin 4 : active, 2023-05-11, 1h
|
||||||
|
|
||||||
|
Weekly 8 : active, 2023-05-18, 2h
|
||||||
|
Weekly 9 : active, 2023-05-25, 0.9h
|
||||||
|
Statustermin 5 : active, 2023-05-25, 1h
|
||||||
|
|
||||||
|
Weekly 10 : active, 2023-06-01, 2h
|
||||||
|
Weekly 11 : active, 2023-06-01, 0.9h
|
||||||
|
Statustermin 6 : active, 2023-06-08, 1h
|
||||||
|
|
||||||
|
section Recherche
|
||||||
|
Unternehmensformen : active, 2023-04-06, 14d
|
||||||
|
Kennzahlen : active, 2023-04-10, 14d
|
||||||
|
Datenquellen : active, 2023-04-10, 14d
|
||||||
|
rechtliche Verwendbarkeit: active, 2023-04-06, 18d
|
||||||
|
Verwendete Metriken, Datenquellen, Rechtmäßigkeit: milestone, 2023-04-24
|
||||||
|
Reserve: crit, 2023-04-24, 3d
|
||||||
|
|
||||||
|
section Definition
|
||||||
|
fachl. Aufgabe : active, 2023-04-27, 1d
|
||||||
|
techn. Aufgabe : active, 2023-04-27, 1d
|
||||||
|
Funktionelle Anf. : active, 2023-04-27, 7d
|
||||||
|
Qualitative Anf. : active, 2023-04-27, 7d
|
||||||
|
Modell: active, 2023-05-04, 7d
|
||||||
|
Hierarchie: active, 2023-05-04, 7d
|
||||||
|
Definition der Anforderungen : milestone, 2023-05-11
|
||||||
|
Reserve: crit, 2023-05-11, 1w
|
||||||
|
|
||||||
|
section Proof of concept
|
||||||
|
Project Proposal : active, 2023-05-18, 10d
|
||||||
|
Vorstellung Project Proposal: milestone, 2023-05-28
|
||||||
|
Implementierung des Proposals: active, 2023-05-25, 14d
|
||||||
|
Vorstellung Proof of Concept: milestone, 2023-06-08
|
||||||
|
Reserve: crit, 2023-06-08, 1w
|
||||||
|
```
|
@ -1,68 +0,0 @@
|
|||||||
# Aufgabe: Inhaltliche Skizze für die Seminararbeit zur Thematik Datenspeicherung
|
|
||||||
|
|
||||||
# 1. Allgemeine Anforderungen an Datenbank
|
|
||||||
- **Speicherung** von strukturierten Daten, wie Kennzahlen, Stammdaten
|
|
||||||
- **Skalierbarkeit:** Datenbank sollte skalierbar sein, um zukünftige Daten weiterhin zu speichern und weitere Unternehmen hinzuzufügen
|
|
||||||
- **Sicherheit:** Die Datenbank muss Funktionen unterstützen, um die Datenvor unbefugtem Zugriff zu schützen.
|
|
||||||
- **Datensicherung- und Wiederherstellung: ** Die Datenbank muss Funktionen zur Sicherung und Wiederherstellung unterstützen.
|
|
||||||
- **Leistung:** Die Performance der Datenbank ist eher zweitrangig, da die Abfrage nicht hochdynamisch sein muss. Ausserdem werden nicht viele Anfragen erwartet.
|
|
||||||
- **Integration:** Die Datenbank muss sich in ein Python Framework einbinden lassen und mit dem bevorzugten Frontend Daten austauschen können.
|
|
||||||
|
|
||||||
# 2. Datenarten
|
|
||||||
Welche Daten erwarten wir im Projekt? \
|
|
||||||
Cluster, wie z.B. Stammdaten, Stimmungsdaten, Social Graph, Zeitseriendaten/Historien
|
|
||||||
|
|
||||||
> Abstimmung mit den Bereichen Textmining und Datenbeschaffung über verwendete Daten und Formulierung von Anforderungen an Daten.
|
|
||||||
|
|
||||||
## 2.1 strukturierte Daten
|
|
||||||
Was sind strukturierte Daten?
|
|
||||||
|
|
||||||
## 2.2 unstrukturierte Daten
|
|
||||||
Was sind unstrukturierte Daten?
|
|
||||||
|
|
||||||
> Definiere eine Anforderung an die Struktur der Daten.
|
|
||||||
|
|
||||||
# 3. Arten von Datenbanken
|
|
||||||
## 3.1 Relational
|
|
||||||
Was ist eine reltionale Datenbank?
|
|
||||||
Wie werden Daten gespeichert?
|
|
||||||
Beispiel für relationale Datenbank
|
|
||||||
|
|
||||||
## 3.2 Graph
|
|
||||||
Was ist eine Graph Datenbank?
|
|
||||||
Wie werden Daten gespeichert?
|
|
||||||
Beispiel für Graph Datenbank
|
|
||||||
|
|
||||||
## 3.3 Zeitserien
|
|
||||||
Was ist eine Zeitserien Datenbank?
|
|
||||||
Wie werden Daten gespeichert?
|
|
||||||
Beispiel für Zeitserien Datenbank
|
|
||||||
|
|
||||||
> Kurzvorstellung von Datenbanksystemen
|
|
||||||
|
|
||||||
# 4. DBS Transparenzregister
|
|
||||||
## 4.1 relationales Datenbankmodell
|
|
||||||
|
|
||||||
> Modell zur Abbildung der Relationen im Projekt Transparenzregister
|
|
||||||
|
|
||||||
## 4.2 verteilte Datenbank oder ein System
|
|
||||||
Ein DBS: Wenn nur ein Datenbanksystem verwendet wird, muss nur ein System gepflegt und integriert werden.
|
|
||||||
- Vorteil: einfache Verwaltung und schnelle Abfrage von Datenbeziehungen
|
|
||||||
|
|
||||||
verteiltes System: spezialisierte Datenbank für jeden Datenytp, wie z.B. Zeitseriendaten oder Graph Daten
|
|
||||||
|
|
||||||
> Definiere eine Empfehlung/Anforderung für das Projekt Transparenzregister.
|
|
||||||
|
|
||||||
## 4.3 Analyse zur Auswahl eines Datenbanksystems
|
|
||||||
Was sollte bei der Auswahl eines Datenbanksystems beachtet werden?
|
|
||||||
|
|
||||||
> Empfehlungen für DBS-Auswahl
|
|
||||||
|
|
||||||
## 4.4 Anbindung an Front- und Backend
|
|
||||||
Wie kann das DBS an das Front- und Backend angebunden werden?
|
|
||||||
> Jupyter Notebook mit Beispiel
|
|
||||||
|
|
||||||
## 4.5 Abfragen in der Datenbank
|
|
||||||
Wie können Unternehmensdaten abgefragt werden?
|
|
||||||
Wie können Verflechtungen abgefragt werden?
|
|
||||||
> Jupyter Notebook mit Beispiel
|
|
@ -0,0 +1,748 @@
|
|||||||
|

|
||||||
|
|
||||||
|
<div style="page-break-after: always;"></div>
|
||||||
|
|
||||||
|
# Datenspeicherung
|
||||||
|
## Inhaltsverzeichnis
|
||||||
|
|
||||||
|
- [Datenspeicherung](#datenspeicherung)
|
||||||
|
- [Inhaltsverzeichnis](#inhaltsverzeichnis)
|
||||||
|
- [Motivation: Warum speichern wird Daten?](#motivation-warum-speichern-wird-daten)
|
||||||
|
- [1. Allgemeine Anforderungen an Datenbank](#1-allgemeine-anforderungen-an-datenbank)
|
||||||
|
- [2. Datenarten](#2-datenarten)
|
||||||
|
- [2.1 Welche Daten erwarten wir im Projekt?](#21-welche-daten-erwarten-wir-im-projekt)
|
||||||
|
- [2.2 strukturierte Daten](#22-strukturierte-daten)
|
||||||
|
- [2.3 unstrukturierte Daten](#23-unstrukturierte-daten)
|
||||||
|
- [3. Arten von Datenbanken](#3-arten-von-datenbanken)
|
||||||
|
- [3.1 Relationale Datenbank](#31-relationale-datenbank)
|
||||||
|
- [3.1.1 Anlegen von Tabellen](#311-anlegen-von-tabellen)
|
||||||
|
- [3.1.2 SQL - Abfrage von relationalen Datenbanken](#312-sql---abfrage-von-relationalen-datenbanken)
|
||||||
|
- [3.2 Graphdatenbank](#32-graphdatenbank)
|
||||||
|
- [3.2.1 Erstellung eines Datensatzes](#321-erstellung-eines-datensatzes)
|
||||||
|
- [3.2.2 Cypher - Abfrage von Graphdatenbanken](#322-cypher---abfrage-von-graphdatenbanken)
|
||||||
|
- [3.3 Zeitseriendatenbank](#33-zeitseriendatenbank)
|
||||||
|
- [3.3.1 Erstellung eines Datensatzes](#331-erstellung-eines-datensatzes)
|
||||||
|
- [3.3.2 FluxQuery](#332-fluxquery)
|
||||||
|
- [3.4 Dokumenten Datenbank ](#34-dokumenten-datenbank-)
|
||||||
|
- [3.4.1 Erstellen einer Collection / Ablegen von Dokumenten](#341-erstellen-einer-collection--ablegen-von-dokumenten)
|
||||||
|
- [3.5 Aufbau einer Datenbank](#35-aufbau-einer-datenbank)
|
||||||
|
- [4. Datenbanken Transparenzregister](#4-datenbanken-transparenzregister)
|
||||||
|
- [4.1 Production DB - relationales Datenbankmodell](#41-production-db---relationales-datenbankmodell)
|
||||||
|
- [4.2 Staging DB](#42-staging-db)
|
||||||
|
- [4.3 SQL Alchemy](#43-sql-alchemy)
|
||||||
|
- [5. Proof of Concept](#5-proof-of-concept)
|
||||||
|
- [5.1 Docker](#51-docker)
|
||||||
|
- [5.2 PG Admin](#52-pg-admin)
|
||||||
|
- [5.3 Erstellen von Mock Daten](#53-erstellen-von-mock-daten)
|
||||||
|
- [5.4 Anlegen der relationalen Tabellen](#54-anlegen-der-relationalen-tabellen)
|
||||||
|
- [5.5 Abfragen der Datenbank](#55-abfragen-der-datenbank)
|
||||||
|
- [6. Zusammenfassung](#6-zusammenfassung)
|
||||||
|
- [Quellen](#quellen)
|
||||||
|
|
||||||
|
<div style="page-break-after: always;"></div>
|
||||||
|
|
||||||
|
|
||||||
|
## Motivation: Warum speichern wird Daten?
|
||||||
|
Für die Speicherung von Daten gibt es verschiedene Motivationen:
|
||||||
|
- **Sammlung:** Zur Aufbewahrung von Wissen und Informationen über Objekte, Ereignisse oder Prozesse werden Daten gespeichert.
|
||||||
|
- **Historisierung:** Durch die Speicherung von Daten in einem zeitlichen Zusammenhang, wird eine Historie erstellt, mit welcher Muster, Trends oder Zusammenhänge erkannt werden können. Historische Daten helfen ausserdem bei der Entscheidungsfindung.
|
||||||
|
- **Bewertung:** Mit gespeicherten Daten können Systeme, Produkte und Prozesse nachvollzogen, bewertet und verbessert werden.
|
||||||
|
|
||||||
|
Im Projekt Transparenzregister ist die Datenspeicherung eine Kernkomponente, da die gesammelten Informationen die Grundlage für Analysen darstellen. \
|
||||||
|
Mit geeigneten Pipelines werden aus diesen Daten Erkenntnisse extrahiert, um z.B. Verflechtungen zwischen Personen und Unternehmen oder den wirtschaftlichen Trend eines Unternehmens visualisieren und bewerten zu können.
|
||||||
|
|
||||||
|
## 1. Allgemeine Anforderungen an Datenbank
|
||||||
|
- **1.1 Speicherung/Integrität**: Das verwendete System muss Daten, wie Unternehmenskennzahlen, Stammdaten und Verflechtungen speichern. Die Daten müssen korrekt und konsistent gespeichert werden. Konsistent bedeutet in einem gültigen und widerspruchsfreien Zustand und die Transaktionen sollen den ACID-Eigenschaften entsprechen.
|
||||||
|
- **Atomarity:** Eine Transaktion wird atomar betrachte, d.h. es ist die kleinste unteilbare Einheit, wodurch eine Transaktion entweder vollständig durchgeführt und übernommen wird (Commit) oder bei einem Fehler rückgängig gemacht wird (Rollback).
|
||||||
|
- **Consistency:** Konsistenz bedeutet, dass eine Transaktion den Datenbankzustand von einem gültigen in einen anderen gültihgen Zustand überführt. Sollte eine Transaktion eine Konsitenzverletzung verursachen, wird diese abgebrochen und die Änderungen zurückgesetzt.
|
||||||
|
- **Isolation:** Isolation sorgt dafür, dass alle Transaktion unabhängig voneinander ausgeführt werden, damit sich diese bei der Ausführung nicht gegenseitig beeinflussen.
|
||||||
|
- **Durability:** Dauerhaftigkeit bedeutet, dass die Ergebnisse einer Transaktion dauerhaft in der Datenbank gespeichert werden und auch nach einem Systemneustart oder Systemfehler erhalten bleiben.
|
||||||
|
- **1.2 Skalierbarkeit:** Das System soll skalierbar sein, um zukünftige Daten weiterhin zu speichern und weitere Unternehmen hinzuzufügen. Durch Hinzufügen von Ressourcen kann das System an steigende Datenmengen und Benutzeranforderungen angepasst werden. Man spricht von horizontaler Skalierung, da die Last auf mehrere Datenbankserver verteilt wird.
|
||||||
|
- **1.3 Sicherheit:** Die Datenbank muss Mechanismen bereitstellen, um die Daten vor unbefugtem Zugriff zu schützen.
|
||||||
|
- **Authentifizierung:** Überprüfung der Identität eines Benutzers, durch Benutzername und Passwort. Meist wird eine Zwei-Faktor-Authentifizierung verwendet, um das Sicherheitslevel zu erhöhen.
|
||||||
|
- **Autorisierung:** Der authentifizierte Benutzer erhält bei der Autorisierung Zugriffsrechte und Ressourcen, welche auf seiner Benutzerrolle basieren. Ein Benutzer mit Administratorrechten, erhält Zugriff auf alle Systemressourcen, wohingegen ein normaler Benutzer nur beschränkten Zugriff erhält.
|
||||||
|
- **Verschlüsselung:** Durch Verschlüsselung werden Daten in ein nicht interpretierbaren Code umgewandelt, um den Inhalt vor unbefugtem Zugriff zu schützen. Dafür wird ein Algorithmus verwendet, welcher einen Schlüssel generiert und die Daten mit diesem verschlüsselt. Um die Daten wieder lesen zu können, müssen diese mit dem Schlüssel dechiffriert werden.
|
||||||
|
- **1.4 Datensicherung- und Wiederherstellung:** Die Datenbank muss Funktionen zur Sicherung und Wiederherstellung unterstützen. Im Falle eines Ausfalls oder Fehlers muss sichergestellt sein, dass Mechanismen die Daten schützen und wiederherstellen.
|
||||||
|
Die meisten Daten in einer Datenbank ändern sich nur langsam, manche allerdings schnell. Je nach Anwendungsfall muss eine geeignete Sicherungsstrategie ausgewählt werden, um nur die Daten zu sichern, die sich tatsächlich ändern.
|
||||||
|
Jedes Datenbankmanagementsystem bietet unterschiedliche Mechanismen zur Datensicherung und Wiederherstellung, dessen Möglichkeiten nach Auswahl eines Systems
|
||||||
|
- **vollständiges Backup:** Das vollständige Backup ist eine komplette Kopie der Datenbank inkl. aller Daten, Indizes, Tabellen und Metadaten. Es benötigt viel Speicherplatz und Zeit zur Erzeugung der Sicherung und auch zur Wiederherstellung.
|
||||||
|
- **inkrementelles Backup:** Ein inkrementelles Backup sichert nur die Änderungen seit dem letzten vollständigem bzw. inkrementellen Backup. Durch den verringerten Datenbestand ist es deutlich schneller und datensparsamer, als das vollständige Backup. Zur Wiederherstellung wird das letzte vollständige und alle inkrementellen Backups benötigt. Allerdings entsteht eine Abhängigkeitskette, da jedes Backup seine Vorgänger zur Wiederherstellung benötigt.
|
||||||
|
- **differentielles Backup:** Beim differentiellen Backup werden alle Änderungen seit dem letzten vollständigem Backup gesichert. D.h. je weiter die letzte vollständige Sicherung zurückliegt, desto größer und langsamer wird das Backup. Zur Wiederherstellung werden das letzte vollständige und differentielle Backup benötigt.
|
||||||
|
|
||||||
|
<script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
|
||||||
|
<script type="text/x-mathjax-config">
|
||||||
|
MathJax.Hub.Config({ tex2jax: {inlineMath: [['$', '$']]}, messageStyle: "none" });
|
||||||
|
</script>
|
||||||
|
|
||||||
|
**Backuphäufigkeit:**
|
||||||
|
Die Backuphäufigkeit ist eine Abwägung aus Risiken, Kosten und Arbeitsaufwand. Dieses muss individuell abgeschätzt werden aufgrund des Datenbankumfangs und der Änderungshäufigkeit der Daten, um eine geeignete Backup-Strategie zu entwerfen. \
|
||||||
|
*Beispiel:*
|
||||||
|
- Vorgabe: Datenbank mit 500GB Größe
|
||||||
|
- Anforderungen
|
||||||
|
- min. vierfache Backupkapazität --> 2 TB
|
||||||
|
- Backupdauer vollständig: \
|
||||||
|
USB 2.0:$\frac {500GB}{\frac{60MB/s}{1024}} = 8533 sec. \approx 142Min. \approx 2,37 Std.$ \
|
||||||
|
USB 3.0:$\frac {500GB}{\frac{625MB/s}{1024}} = 820 sec. \approx 13,6Min. \approx 0,23 Std.$ \
|
||||||
|
VDSL 100:$\frac {500GB}{\frac{5MB/s}{1024}} = 102400 sec. \approx 1706Min. \approx 28,4 Std.$ \
|
||||||
|
Glasfaser:$\frac {500GB}{\frac{62,5MB/s}{1024}} = 8192 sec. \approx 136,5Min. \approx 2,3 Std.$
|
||||||
|
|
||||||
|
- **1.5 Leistung:** Die Performanceanforderungen an die Datenbank ergibt sich aus verschiedenen Merkmalen. Diese können kombiniert gestellt werden und sind abhängig von den Anforderungen an das System. Eine Analyse der Anwendungsfälle ist notwendig, um die Anforderungen zu spezifizieren.
|
||||||
|
- **Latenz:** Die Datenbank soll Anfragen effizient und in einer akzeptablen Antwortzeit verarbeiten. Typische Datenbankapplikationen, wie z.B. ein Webshop benötigen viele einzelne Zugriffe, wofür jedes Mal ein Kommunikationsprotokoll angewendet wird. Durch viele kleine Datenbankzugriffe wird die Applikation verlangsamt, da auf die Netzwerkkommunikation gewartet wird. Für das Benutzererlebnis eines Webshops ist die Latenz ein wichtiges Merkmal.
|
||||||
|
- **Durchsatz:** Ist eine Metrik für die Anzahl an Transaktionen pro Zeiteinheit. Der Durchsatz ist wichtig bei großen Benutzeraufkommen in einem Webshop.
|
||||||
|
- **Verfügbarkeit:** Eine hohe Verfügbarkeit, also Erreichbarkeit der Datenbank, wird durch Redundanz (mehrfaches Vorhandensein) und Wiederherstellungsmechanismen gewährleistet, damit Daten koninuierlich verfügbar sind.
|
||||||
|
- **Wartbarkeit:** Eine einfach zu wartende Datenbank muss Funktionen zur Überwachung, Diagnose, Wartung, Datensicherung und Wiederherstellung bereitstellen. Durch diese automatisierten Pipelines können andere Eigenschaften, wie z.B. die Verfügbarkeit negativ beeinflusst werden, weil Prozesse die Datenbank blockieren.
|
||||||
|
- **1.6 Integration:** Die Datenbank muss Schnittstellen bereitstellen, um die gespeicherten Daten für eine Anwendung bzw. Systeme zur Verfügung zu stellen.
|
||||||
|
- **API:** Das *Application Programming Interface* ist eine definierte Schnittstelle, welche Methoden und Funktionen bereit stellt, um auf die Datenbank zuzugreifen bzw. um diese zu verwalten.
|
||||||
|
- **REST:** REpresential State Transfer beschreibt eine Schnittstelle, die das http-Protokoll verwendet, wo mit den Methoden GET, POST, PUT, DELETE die Kommunikation realisiert wird.
|
||||||
|
- **SOAP:** Simple Object Access Protocol ist eine Schnittstelle, welche auf XML basiert.
|
||||||
|
- **ODBC:** Open Database Connectivity ist eine standardisierte Schnittstelle zum Austausch zwischen Anwendungen und Datenbanken.
|
||||||
|
- **JDBC:** Java Database Connectivity
|
||||||
|
|
||||||
|
|
||||||
|
## 2. Datenarten
|
||||||
|
|
||||||
|
Zur Beschreibung von Unternehmen, werden verschiedene Datenarten verwendet.
|
||||||
|
Die folgenden Datenarten sind eine allgemeine Zusammenfassung und sollen das Brainstorming für die projektspezifischen Daten unterstützen.
|
||||||
|
- **Stammdaten:** Stammdaten beinhalten die grundsätzlichen Eigenschaften und
|
||||||
|
Informationen von realen Objekten, welche für die periodische Verarbeitung notwendig sind. Ein Stammsatz für Personal besteht z.B. aus einer Personalnummer, dem Mitarbeiternamen, Anschrift und Bankverbindung. \
|
||||||
|
Je nach Anwendungsfall bzw. Geschäftsprozess muss der Inhalt definiert werden, wie z.B. bei Unternehmens-, Kunden-, Material- oder Patientenstammdaten.
|
||||||
|
|
||||||
|
- **Metadaten:** Mit Metadaten werden weitere Daten beschrieben und vereinfachen das Auffinden und Arbeiten mit diesen. Metadaten beinhalten beispielsweise den Verfasser, das Erstellungs- oder Änderungsdatum, die Dateigröße oder den Ablageort. \
|
||||||
|
Mit Metadaten können Datenbestände einfacher und effizienter verwaltet und abgefragt werden.
|
||||||
|
|
||||||
|
- **Transaktionsdaten:** Transaktionsdaten beschreiben eine Veränderung des Zustands, wie z.B. eine Kapitalbewegung oder eine Ein-/Auslieferung aus einem Lager.
|
||||||
|
|
||||||
|
- **Referenzdaten:** Referenzdaten sind eine Teilmenge von Stammdaten und beschreiben die zulässigen Daten. Diese werden nur selten geändert oder angepasst und gelten als konstant. Beispiele für Referenzdaten sind: Postleitzahlen, Kostenstellen, Währungen oder Finanzhierarchien.
|
||||||
|
|
||||||
|
- **Bestandsdaten:** Bestandsdaten sind dauerhafter Veränderung ausgesetzt, da diese z.B. die Artikelmenge in einem Lager oder das Guthaben auf einem Konto beschreiben. Diese korrelieren mit den Transaktionsdaten.
|
||||||
|
|
||||||
|
Diese Datenarten müssen im Kontext des Projektes betrachtet werden und sollen das Brainstorming unterstützen. \
|
||||||
|
*Stammdaten:* Unternehmensname, Anschrift, Branche \
|
||||||
|
*Metadaten:* Verfasser einer Nachricht - Veröffentlichungsdatum; Prüfungsunternehmen - Prüfdatum \
|
||||||
|
*Transaktionsdaten:* Wer hat wann wo gearbeitet? \
|
||||||
|
*Referenzdaten:* Einheit von Metriken (Umsatz, EBIT usw.) \
|
||||||
|
*Bestandsdaten:* Vorstand, Geschäftsführer, Aufsichtsrat
|
||||||
|
|
||||||
|
### 2.1 Welche Daten erwarten wir im Projekt?
|
||||||
|
Aus den vorangehenden, allgemeinen Datenarten haben wir Cluster identifiziert, welche im Projekt benötigt werden.
|
||||||
|
Die Kombination aus den folgend aufgeführten Datenclustern ermöglicht eine ganzheitliche Betrachtung und Bewertung der Unternehmen.
|
||||||
|
|
||||||
|
- **Unternehmensstammdaten:** Die Stammdaten beinhalten grundlegende Informationen zu einem Unternehmen, wie z.B. Name, Anschrift, Gesellschaftsform und Branche.
|
||||||
|
|
||||||
|
- **Sentimentdaten:** Die Sentiment- oder Stimmungsdaten beschreiben die Aussenwahrnehmung des Unternehmens hinsichtlich der Mitarbeiterzufriedenheit, Nachhaltigkeit und Umweltfreundlichkeit.
|
||||||
|
> Mit Sentimentdaten können folgende Fragen beantwortet werden:
|
||||||
|
>- Welchen Ruf hat das Unternehmen?
|
||||||
|
>- Wie ist die Aussenwahrnehmung?
|
||||||
|
>- Wie ist die Kundenbindung?
|
||||||
|
- **Finanzdaten:** Die Finanzdaten sind Metriken bzw, Indikatoren, um den wirtschaftlichen Erfolg des Unternehmens zu bewerten. Hierzu zählen z.B. Umsatz, EBIT, EBIT Marge, Bilanzsumme, Eigenkapitalanteil, Fremdkapitalanteil, Verschuldungsgrad, Eigenkapitalrentabilität, Umschlaghäufigkeit des Eigenkapitals.
|
||||||
|
> Mit Finanzdaten können folgende Fragen beantwortet werden:
|
||||||
|
>- Wie rentabel wirtschaftet das Unternehmen?
|
||||||
|
>- Wie ist der wirtschaftliche Trend?
|
||||||
|
>- Bewerten anhand verschiedender Metriken.
|
||||||
|
|
||||||
|
- **Verflechtungsdaten/Social Graph:** Die Verbindungen bzw. Beziehungen zu Personen oder Unternehmen wird in den Verflechtungsdaten abgelegt. Beziehungen entstehen, wenn eine Person Geschäftsführer, Vorstand, Aufsichtsratmitglied, Prokurist oder Auditor ist und Unternehmen z.B. gemeinsam arbeiten, beliefert wird oder Anteile an einem anderen Unternehmen besitzt.
|
||||||
|
> Mit Verflechtungsdaten können folgende Fragen beantwortet werden:
|
||||||
|
>- Gibt es strategische Partnerschaften?
|
||||||
|
>- Wie sind die Lieferketten aufgebaut?
|
||||||
|
>- Wie ist die Qualität der Geschäftsbeziehungen?
|
||||||
|
>- Ist das Unternehmen widerstandsfähig aufgestellt?
|
||||||
|
>- Gibt es Zusammenhänge zu Personen?
|
||||||
|
|
||||||
|
Die abgebildete Mind Map ist nicht vollständig und bildet nicht den finalen Datenumfang des Projekts ab. Es ist eine Momentaufnahme, bevor das relationale Schema entwickelt und die Implementierung begonnen wurde.
|
||||||
|
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### 2.2 strukturierte Daten
|
||||||
|
|
||||||
|
Strukturierte Daten liegen in einem definierten Format. Vorab wird ein Schema definiert, um Felder, Datentypen und Reihenfolgen festzulegen und die Daten entsprechend abzulegen.
|
||||||
|
Diese Art von Daten wird z.B. in relationalen Datenbanken verwendet, wobei jede Zeile einer Tabelle einen Datensatz repräsentiert. Die Beziehungen untereinander sind über die Entitäten definiert.
|
||||||
|
Das Beispiel unten zeigt ein einfaches Beispiel, wie die Daten für die Klasse *Company* definiert sind. Mit diesem Schema kann die Datenaufbereitung umgesetzt werden.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
```mermaid
|
||||||
|
---
|
||||||
|
title: Structured Data
|
||||||
|
---
|
||||||
|
classDiagram
|
||||||
|
class Company:::styleClass {
|
||||||
|
int ID
|
||||||
|
string Name
|
||||||
|
string Street
|
||||||
|
int ZipCode
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
```
|
||||||
|
|Vorteile|Nachteile|
|
||||||
|
|:-----:|:------:|
|
||||||
|
|einfach nutzbar, da organisiert |Einschränkung der Verwendungsmöglichkeit durch Schema |
|
||||||
|
| bei bekannten Schema sind Werkzeuge vorhanden|begrenze Speichermöglichkeit, da starre Schemata vorgegeben sind |
|
||||||
|
|gut automatisierbar | |
|
||||||
|
|
||||||
|
### 2.3 unstrukturierte Daten
|
||||||
|
Unstrukturierte Daten unterliegen keinem Schema, wie z.B. E-Mails, Textdokumente, Blogs, Chats, Bilder, Videos oder Audiodateien.
|
||||||
|
- **Textanalyse:** Aus unstrukturierten Texten werden z.B. durch Analyse und Mining Informationen gewonnen, um diese zu extrahieren. Es wird das Vorkommen von bestimmten Wörtern mittels Named Entity Recognition ermittelt oder die Stimmung bzw. das Thema in einem Artikel.
|
||||||
|
- **Audio-/Videoanalyse:** Bei der Verarbeitung unstrukturierter Audio- oder Videodateien werden Objekte, Gesichter, Stimmen oder Muster erkannt, um diese für Sprachassistenten oder autonome Fahrzeuge nutzbar zu machen.
|
||||||
|
|
||||||
|
Eine wichtige Informationsquelle sind unstrukturierte Daten für Explorations- und Analyseaufgaben. Dabei werden Datenquellen wie z.B. E-Mails, RSS-Feeds, Blogs durchsucht, um bestimmte Informationen zu finden oder Zusammenhänge zwischen verschiedenen Quellen hherzustellen. Dies ermöglicht tiefe Einsicht in die Daten zu erhalten und unterstützt die Entscheidungsfindung bei unklaren Sachverhalten und die Entdeckung neuer Erkenntnisse.
|
||||||
|
|
||||||
|
|Vorteile|Nachteile|
|
||||||
|
|:-----:|:------:|
|
||||||
|
|großes Potenzial Erkenntnisse zu erlangen |aufwändige Bearbeitung notwendig, um Daten nutzbar zu machen|
|
||||||
|
|unbegrenzte Anwendungsmöglichkeiten, da kein Schema vorhanden ist|spezielle Tools zur Aufbereitung notwendig|
|
||||||
|
| |Expertenwissen über die Daten und Datenaufbereitung notwendig |
|
||||||
|
|
||||||
|
## 3. Arten von Datenbanken
|
||||||
|
### 3.1 Relationale Datenbank
|
||||||
|
Eine relationale Datenbank speichert und verwaltet strukturierte Daten. Dabei werden die Daten in Tabellen organisiert, welche aus Zeilen und Spalten bestehen. \
|
||||||
|
In den Zeilen der Tabellen sind die Datensätze gespeichert, d.h. jede Zeile repräsentiert einen Datensatz. Durch logisches Verbinden der Tabellen können die Beziehungen zwischen den Daten abgebildet werden. \
|
||||||
|
Die wichtigsten Elemente einer relationalen Datenbank werden folgend erklärt:
|
||||||
|
|
||||||
|
**Tabelle:** Eine Tabelle repräsentiert eine Entität bzw. Objekt , wie z.B. Unternehmen, Kunde oder Bestellung. Die Tabelle besteht aus Spalten, welche die Attribute der Entität speichern. \
|
||||||
|
Jede Zeile ist eine Instanz des Objekts und enthält konkrete Werte.
|
||||||
|
|
||||||
|
|
||||||
|
**Table_Person**
|
||||||
|
|**ID**|**Name**|**Age**|**Salary**|**Height**|
|
||||||
|
|---|---|---|---|---|
|
||||||
|
|1|Tim|31|300.00|191.20|
|
||||||
|
|2|Tom|21|400.00|181.87|
|
||||||
|
|3|Tam|51|500.00|176.54|
|
||||||
|
|
||||||
|
https://www.sqlservercentral.com/articles/creating-markdown-formatted-text-for-results-from-sql-server-tables
|
||||||
|
|
||||||
|
**Primärschlüssel:** Der Primärschlüssel ist ein eindeutiger Bezeichner für jede einzelne Zeile einer Tabelle und wird zur Identifikation einer einzelnen Zeile benötigt. Im oberen Beispiel ist die Spalte *ID* der Primärschlüssel.
|
||||||
|
|
||||||
|
**Fremdschlüssel:** Ein Fremdschlüssel verweist auf einen Primärschlüssel einer anderen Tabelle, um eine Beziehung zwischen den Tabellen herzustellen. \
|
||||||
|
Im Beispiel ist bezieht sich die Spalte *customer_id* auf den Primärschlüssel der Tabelle *Table_Person*.
|
||||||
|
|
||||||
|
**Table_Orders**
|
||||||
|
|**ID**|**Product**|**total**|**customer_id**|
|
||||||
|
|---|---|---|---|
|
||||||
|
|1|Paper|12|2|
|
||||||
|
|2|Book|3|2|
|
||||||
|
|3|Marker|5|3|
|
||||||
|
|
||||||
|
**Beziehungen:** Wie bereits beschrieben, können mit der Verwendung von Fremdschlüsseln Beziehungen zwischen den Tabellen hergestellt werden. \
|
||||||
|
Es gibt verschiedene Beziehungstypen:
|
||||||
|
|
||||||
|
|**Typ**|**Beschreibung**|
|
||||||
|
|---|---|
|
||||||
|
|1:1|Jeder Primärschlüsselwert bezieht sich auf nur einen Datensatz. **Beispiel:** Jede Person hat genau eine Bestellung. |
|
||||||
|
|1:n|Der Primärschlüssel ist eindeutig, tritt in der bezogenen Tabelle 0..n mal auf. **Beispiel:** Jede Person kann keine, eine oder mehrere Bestellungen haben. |
|
||||||
|
|n:n|Jeder Datensatz von beiden Tabellen kann zu beliebig vielen Datensätzen (oder auch zu keinem Datensatz) stehen. Meist wird für diesen Typ eine dritte Tabelle verwendet, welche als Zuordnungs- bzw. Verknüpfungstabelle angelegt wird, da andernfalls keine direkte Verbindung hergestellt werden kann. |
|
||||||
|
|
||||||
|
https://www.ibm.com/docs/de/control-desk/7.6.1.2?topic=structure-database-relationships
|
||||||
|
|
||||||
|
#### 3.1.1 Anlegen von Tabellen
|
||||||
|
Der Umgang von relationalen Datenbanken erfolgt mittels SQL. Folgend ein Beispiel zum Anlegen einer Tabelle mit Attributen.
|
||||||
|
|
||||||
|
```
|
||||||
|
CREATE TABLE Bildungsstaette (
|
||||||
|
ID INT PRIMARY KEY NOT NULL,
|
||||||
|
Name VARCHAR(255) NOT NULL,
|
||||||
|
Anschrift VARCHAR(255),
|
||||||
|
Art VARCHAR(100)
|
||||||
|
);
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3.1.2 SQL - Abfrage von relationalen Datenbanken
|
||||||
|
|
||||||
|
Für die Verwaltung und Abfrage wird SQL (Structured Query Language) verwendet.
|
||||||
|
Mit dieser Syntax können Tabellen erstellt, Daten eingefügt, aktualisiert und gelöscht und Daten abgefragt werden.
|
||||||
|
|
||||||
|
**Anzeige aller Attribute einer Tabelle:**
|
||||||
|
```
|
||||||
|
SELECT * FROM table_name;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Anzeige definierter Attribute einer Tabelle:**
|
||||||
|
```
|
||||||
|
SELECT column1, column2 FROM table_name;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Gefilterte Anzeige einer Tabelle:**
|
||||||
|
```
|
||||||
|
SELECT * FROM table_name WHERE condition;
|
||||||
|
```
|
||||||
|
|
||||||
|
**Daten aus mehreren Tabellen abrufen (Join):**
|
||||||
|
```
|
||||||
|
SELECT t1.column1, t2.column2
|
||||||
|
FROM table1 t1
|
||||||
|
JOIN table2 t2 ON t1.id = t2.id;
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.2 Graphdatenbank
|
||||||
|
Eine Graphdatenbank basiert auf dem Graphenkonzept. \
|
||||||
|
Ein Graph besteht aus Knoten und Kanten (Beziehungen), welche die Verbindungen zwischen den Knoten darstellen. \
|
||||||
|
Die Stärke der Graphdatenbank liegt in der Darstellung von komplexen Beziehungen.
|
||||||
|
|
||||||
|
**Knoten:** Jeder Knoten repräsentiert eine Entität bzw. Objekt. Jeder Knoten hat eine eindeutige ID oder Bezeichner, um auf diesen zugreifen zu können. Es können auch Attribute hinterlegt werden, um zusätzliche Informationen zu speichern, wie z.B. Geburtsjahr, Wohnort einer Person.
|
||||||
|
|
||||||
|
**Kanten:** Die Kanten verbinden die Knoten und repräsentieren damit die Beziehungen unter den Objekten. Die Kanten können gerichtet und ungerichtet sein. Bei einer gerichteten Beziehung muss die Richtung vom Quell- zum Zielknoten beachtet werden, wohingegen eine ungerichtete Kante eine symmetrische Beziehung darstellt. \
|
||||||
|
*gerichtete Beziehung:* Ein Unternehmen ist abhängig vom Bericht des Wirtschaftsprüfers. \
|
||||||
|
*ungerichtete Beziehung:** Unternehmen A arbeitet gemeinsam mit Unternehmen B an einem Projekt.
|
||||||
|
|
||||||
|
**Label:** Label werden verwendet, um die Knoten zu kategorisieren/gruppieren. Ein Knoten kann auch mehrere Label besitzen, um die Zugehörigkeit an verschiedenen Kategorien darzustellen (z.B. Unternehmensbranche).
|
||||||
|
|
||||||
|
#### 3.2.1 Erstellung eines Datensatzes
|
||||||
|
1. Knotenerstellung: Es wird zuerst ein Knoten erstellt, der die Entität repräsentiert.
|
||||||
|
2. ID: Der Knoten benötigt eine eindeutige Identifikationsnummer, welche automatisch erzeugt oder manuell festgelegt werden kann.
|
||||||
|
3. Knoten einfügen: Wenn die beiden notwendigen Elemente (Knoten und ID) festgelegt sind, kann der Knoten eingefügt werden.
|
||||||
|
4. Beziehungen/Kanten festlegen: Wenn der Knoten Beziehungen zu anderen Knoten hat, können diese hinzugefügt werden.
|
||||||
|
|
||||||
|
**Beispiel:**
|
||||||
|
Folgender Code legt in neo4j zwei Knoten und die entsprechenden Beziehungen an.
|
||||||
|
|
||||||
|
```
|
||||||
|
CREATE (:University {id: 4711, name: 'FH SWF - Iserlohn'}),
|
||||||
|
(:University {id: 1234, name: 'FH SWF - Meschede'})
|
||||||
|
WITH *
|
||||||
|
MATCH (u1:University {id: 4711}), (u2:University {id: 1234})
|
||||||
|
CREATE (u1)-[:cooparates_with]->(u2),
|
||||||
|
(u2)-[:cooparates_with]->(u1)
|
||||||
|
```
|
||||||
|

|
||||||
|
|
||||||
|
#### 3.2.2 Cypher - Abfrage von Graphdatenbanken
|
||||||
|
Um Daten abzufragen wird die Abfragesprache Cypher verwendet.\
|
||||||
|
Es werden folgend nur einige grundlegende Befehle gezeigt.\
|
||||||
|
|
||||||
|
**Abfrage aller Knoten**
|
||||||
|
```
|
||||||
|
MATCH (n)
|
||||||
|
RETURN n
|
||||||
|
```
|
||||||
|
**Abfrage aller Kanten/Beziehungen**
|
||||||
|
```
|
||||||
|
MATCH ()-[r]-()
|
||||||
|
RETURN r
|
||||||
|
```
|
||||||
|
|
||||||
|
**Abfrage von Knoten mit definierten Eigenschaften**
|
||||||
|
```
|
||||||
|
MATCH (n:Label)
|
||||||
|
WHERE n.property = value
|
||||||
|
RETURN n
|
||||||
|
```
|
||||||
|
|
||||||
|
**Beziehung zwischen zwei Knoten abfragen**
|
||||||
|
```
|
||||||
|
MATCH (n1)-[r]->(n2)
|
||||||
|
WHERE n1.property = value1 AND n2.property = value2
|
||||||
|
RETURN r
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3 Zeitseriendatenbank
|
||||||
|
|
||||||
|
Zeitserien fallen überall dort an, wo eine Metrik zeitlich betrachtet wird, wie z.B. Umsatz oder EBIT.
|
||||||
|
D.h. zu jedem Messwert gibt es einen zeitlich zugeordneten Zeitstempel, wobei die einzelnen Zeitpunkte zu einer Serie zusammengefasst werden, um den Zusammenhang zu betrachten. \
|
||||||
|
Diese Datenbanken sind spezialisiert auf die Speicherung, Verwaltung und Abfrage von Zeitserien. \
|
||||||
|
Die folgenden Erklärungen beziehen sich auf die InfluxDB.
|
||||||
|
|
||||||
|
**Bucket:** Der Bucket separiert Daten in verschiedene Speicher und ist mit der Datenbank bei relationalen Datenbanken vergleichbar.
|
||||||
|
|
||||||
|
**Datapoint:** Unter dem Bucket werden die Datenpunkte gespeichert. Ein Datapoint setzt sich aus mehreren Elementen zusammen, welche erorderlihc oder optional sind:
|
||||||
|
|
||||||
|
|**Element**|**Eigenschaft**|
|
||||||
|
|---|---|
|
||||||
|
|Measurement |Datentyp: String<br>Leerzeichen sind verboten<br>Max. 64kB|
|
||||||
|
|Tags| Sind optional<br> Bestehen aus einem Key/Value-Paar <br> Datentyp: String <br>Leerzeichen sind verboten <br> Max. 64 kB|
|
||||||
|
|Fields| Min. 1 Field=value Paar wird benötigt <br> Nicht alle Felder müssen in jedem Punkt vorhanden sein <br> Datentypen: Float, String, Integer, Boolean|
|
||||||
|
|Timestamp| Sind optional <br>Influx schreibt standardmäßig die Systemzeit als Zeitstempel <br>Genauigkeit kann eingestellt werden (Default: Nanosekunden)|
|
||||||
|
|
||||||
|
#### 3.3.1 Erstellung eines Datensatzes
|
||||||
|
Die Einrichtung von Zeitseriendatenbanken erfolgt mit der CLI von Influx.
|
||||||
|
|
||||||
|
**Anlegen eines Buckets:**
|
||||||
|
```
|
||||||
|
CREATE DATABASE finance
|
||||||
|
```
|
||||||
|
|
||||||
|
#### 3.3.2 FluxQuery
|
||||||
|
Zur Abfrage von Datenpunkten gibt es FluxQuery, welche sich stark an SQL orientiert. \
|
||||||
|
|
||||||
|
**Abrufen aller Daten aus Bucket:**
|
||||||
|
```
|
||||||
|
from(bucket: "my-bucket")
|
||||||
|
```
|
||||||
|
|
||||||
|
**Festlegen des Zeitbereich:**
|
||||||
|
```
|
||||||
|
range(start: -1h, stop: now())
|
||||||
|
```
|
||||||
|
|
||||||
|
**Filtern nach Bedingungen:**
|
||||||
|
```
|
||||||
|
filter(fn: (r) => r._measurement == "temperature")
|
||||||
|
```
|
||||||
|
|
||||||
|
**Transformieren von Datenpunkten:**
|
||||||
|
```
|
||||||
|
map(fn: (r) => ({r with temperatureF: r.temperature * 2.34 + 123}))
|
||||||
|
```
|
||||||
|
### 3.4 Dokumenten Datenbank <a name="3.4"></a>
|
||||||
|
|
||||||
|
Eine Dokumentendatenbank ist ein System, welches für das Speichern von Dokumenten entwicklet wurde. Es gibt verschiedene Arten von Dokumenten, wie z.B. Textdateien (JSON, HTML, XML) oder PDF.
|
||||||
|
Es muss kein Schema für die Dokumente festgelegt werden, dadurch ist es möglich Dokumente mit verschiedenen Datenfeldern zu speichern.
|
||||||
|
Gleiche oder ähnliche Dokumente werden gemeinsam in *Collections* gespeichert.
|
||||||
|
Die wichtigsten Elemente einer Dokumenten-Datenbank sind:
|
||||||
|
|
||||||
|
**Database:** Unter Database versteht man einen Container, unter welchem Dokumente gespeichert werden. Dies dient der Isolierung bzw. logischen Trennung von Daten.
|
||||||
|
|
||||||
|
**Collection:** Collections werden verwendet, um Dokumente mit ähnlichen Eigenschaften zusammenzufassen. Da Dokumenten-Datenbanken schemenlos sind, dienen die Collections der Organisation.
|
||||||
|
|
||||||
|
**Document:** Das Dokument ist ein einzelnes Datenobjekt und die kleinste Einheit in einer Dokumenten-DB. Ein Dokument kann z.B. ein JSON mit einer eigenen internen Struktur.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
#### 3.4.1 Erstellen einer Collection / Ablegen von Dokumenten
|
||||||
|
Folgend ein Code-Snippet zum Verbinden mit der Datenbank, Anlegen einer Collection und ablegen von Dokumenten.
|
||||||
|
|
||||||
|
``` python
|
||||||
|
from pymongo import MongoClient
|
||||||
|
|
||||||
|
# Verbindung zur MongoDB-Datenbank herstellen
|
||||||
|
client = MongoClient('mongodb://localhost:27017')
|
||||||
|
|
||||||
|
# erstelle ein Cleint-Objekt zur Datenbank
|
||||||
|
db = client['transparenz']
|
||||||
|
|
||||||
|
# Collection erstellen
|
||||||
|
collection = db['Tagesschau_API']
|
||||||
|
|
||||||
|
# Beispiel-Dokumente einfügen
|
||||||
|
doc1 = {
|
||||||
|
'title': 'BASF wird verkauft!',
|
||||||
|
'content': 'BASF wird an Bayer AG verkauft',
|
||||||
|
'date': '2023-06-22'
|
||||||
|
}
|
||||||
|
|
||||||
|
doc2 = {
|
||||||
|
'title': 'Bayer Aktie erreicht Rekordniveau',
|
||||||
|
'content': 'Aufgrund des Zukaufs von BASF.....',
|
||||||
|
'date': '2023-06-23'
|
||||||
|
}
|
||||||
|
|
||||||
|
# Dokumente in die Collection einfügen
|
||||||
|
collection.insert_one(doc1)
|
||||||
|
collection.insert_one(doc2)
|
||||||
|
|
||||||
|
# Verbindung zur Datenbank schließen
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
### 3.5 Aufbau einer Datenbank
|
||||||
|
Vor dem Aufbau einer relationalen Datenbank sollten planerische Schritte durchgeführt werden, um ein System zu entwerfen, dass den Anforderungen gerecht wird. \
|
||||||
|
Die wichtigsten Schritte sind:
|
||||||
|
|
||||||
|
**Anforderungsanalyse:** Identifikation und Definition von Anforderungen an die Datenbank durch Betrachtung des Anwendungsfalls.
|
||||||
|
|
||||||
|
**Datenmodell:** Analysieren der Strukturen und Beziehungen, die sich aus der Anforderungsanalyse ergeben. Auswahl eines Datenbankmodells, welches am besten geeignet ist.
|
||||||
|
|
||||||
|
**Tabellenentwurf:** Basierend auf den identifizierten Anforderungen wird die Tabellenstruktur der Datenbank entworfen. Für jede Tabelle werden Spaltennamen, Datentyp und mögliche Einschränkungen wie Primärschlüssel und Fremdschlüssel definiert.
|
||||||
|
|
||||||
|
**Erstellung der Tabellen:** Wenn der Tabellenentwurf schlüssig ist und bereits diskutiert wurde, können die Tabellen erstellt werden. Es werden die zuvor festgelegten Bezeichner, Datenytpen und Constraints hinzugefügt.
|
||||||
|
|
||||||
|
**Beziehungen festlegen:** Um die Beziehungen zwischen Tabellen festzulegen, werden Fremdschlüssel verwendet. Mit Fremdschlüsseln verknüpft man Tabellen mit den Primärschlüsseln anderer, abhängiger Tabellen.
|
||||||
|
|
||||||
|
## 4. Datenbanken Transparenzregister
|
||||||
|
Nachdem die Datencluster identifiziert wurden, welche für das Transparenzregister notwendig sind, wurde Rechereche zu den benötigten Datenquellen betrieben. \
|
||||||
|
Es gibt verschiedene Quellen, mit unterschiedlichen Schnittstellen bzw. Zugriff auf die Daten, z.B. mit API´s oder über Web Scrapping.
|
||||||
|
|
||||||
|
Es wurde eine Architektur definiert, welche den Aufbau der späteren Software skizziert:
|
||||||
|

|
||||||
|
|
||||||
|
Mittels geeigneter Techniken werden Daten aus diversen Quellen extrahiert (Data Extraction) und in der Staging DB gespeichert.
|
||||||
|
Mit unterschiedlichen Daten-Extraktionspipelines (Dazta Loader, Sentiment Analysis, Graph Analysis) werden die Daten aus der Staging DB verarbeitet und die strukturierten und aufbereiteten Daten in der Production DB abgelegt. \
|
||||||
|
Das Frontend kann auf diese strukturierten Daten zugreifen, um diese zu visualisieren.
|
||||||
|
|
||||||
|
### 4.1 Production DB - relationales Datenbankmodell
|
||||||
|
|
||||||
|
Für die Production DB ist eine relationale Datenbank vorgesehen, da diese die Daten organisiert und durch Verwendung von definierten Schemata strukturiert. \
|
||||||
|
Diese Strukturen erleichtern die Wartung und Integration zwischen Back- und Frontend.
|
||||||
|

|
||||||
|
|
||||||
|
Zentrales Element ist die Stammdatentabelle **company**, welche einen zusammengesetzten Primärschlüssel aus der Nummer des Handelsregister und dem zuständigen Amtsgericht bildet. \
|
||||||
|
Die Handelsregisternummer ist nicht eindeutig und wird deutschlandweit mehrfach vergeben, allerdings nur einfach unter einem Amtsgericht.
|
||||||
|
|
||||||
|
Es schließt sich die Tabelle **finance** an, in welcher die Finanzdaten persisitiert werden. Diese steht in einer 1:n Beziehung zur Unternehmenstabelle, da ein Unternehmen viele Finanzdaten haben kann und jeder Datensatz genau einem Unternehmen zugewiesen ist. \
|
||||||
|
Die einzelnen Metriken wurden als Attribute definiert, wodurch es viele NULL-Werte in jeder Zeile gibt. Vorteilhaft bei dieser Notation ist allerdings, dass die Metriken durch den Spalztenbezeichner eindeutig sind.
|
||||||
|
|
||||||
|
Die Tabelle **Sentiment** speichert die Stimmungsdaten zu einem Unternehmen. Auch hier besteht eine 1:n Beziehung zu der Unternehmenstabelle. Es gibt einen eigenen Enumeration-Typ, der die Art der Stimmungsdaten festlegt.
|
||||||
|
|
||||||
|
Die Tabelle **district_court** speichert die Amtsgericht, unter welchen die Unternehmen registriert sind. Diese Information ist wichtig, um mit der Handelsregisternummer und dem Amtsgericht ein Unternehmen eindeutig zu identifizieren.
|
||||||
|
|
||||||
|
Die Tabelle **person** speichert Personen, welche unterschiedliche Beziehungen zu Unternehmen haben können. Daraus ergibt sich eine n:m Beziehung (many-to-many), da jede Person mehrere Beziehungen zu einem Unternehmen haben kann bz. jedes Unternehmen mehrfach mit einer Person in Verbindung steht. \
|
||||||
|
Um diese Relation aufzulösen, wird eine Beziehungstabelle **person_relation** benötigt, um die n:m Beziehung auf zwei 1:n Beziehungen zu reduzieren. Diese enthält die Fremdschlüssel der bezogenen Tabellen, um die Beziehung zu modellieren.
|
||||||
|
|
||||||
|
Abschließend gibt es noch die Tabelle **company_relation**, welche die Verbindung zwischen Unternehmen modelliert. Hierfür wurde ein Enumaration-Typ erzeugt, welcher die Art der Beziehung angibt (wird_beliefert_von, arbeitet_mit, ist_beteiligt_an, hat_Anteile_an).
|
||||||
|
|
||||||
|
### 4.2 Staging DB
|
||||||
|
|
||||||
|
Die Staging DB ist eine dokumentbasierte Datenbank zu Speicherung von unstrukturierten und semi-strukturierten Daten. Sie dient als Zwischenspeicher oder "Rohdatenbank" für die Extraktions-Pipelines. \
|
||||||
|
Aufgaben der Staging-DB:\
|
||||||
|
**1. Datenvorbereitung:** Sammlung und Speicherung von Rohdaten aus verschiedenen Quellen\
|
||||||
|
**2. Überprüfung:** Entsprechen die Daten den Anforderungen ggfs. Ermittlung von Fehlern oder Inkonsistenzen\
|
||||||
|
**3. Testumgebung:** Die Rohdaten aus der Staging DB können mehrfach verwendet werden, um verschiedene Szenarien und Funktionalitäten der Extraktionspipelines zu erproben\
|
||||||
|
**4. Backup:** Wenn sich im Laufe des Projekts eine Datenquelle ändert (z.B. Struktur oder Zugang zum Bundesanzeiger) sind die Daten weiterhin verfügbar oder wenn es Änderungen am Schema der Production DB gibt, kann durch eine Änderung am Data Loader das neue Tabellenschema implementiert werden
|
||||||
|
|
||||||
|
Die Staging DB erhält Collections der unterschiedlichen Quellen, unter welchen die Dokumente gespeichert werden.
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
|
||||||
|
### 4.3 SQL Alchemy
|
||||||
|
|
||||||
|
SQL Alchemy ist eine Python Bibliothek, um mit relationalen Datenbanken zu kommunizieren.
|
||||||
|
Dieses ORM (Object-Relational-Mapping) Framework bildet die Datenbanktabellen als Pythonklassen an und vereinfacht damit das Erstellen, Lesen, Aktualsieren und Löschen von Daten aus Pythonanwendungen.\
|
||||||
|
Wichtige Eigenschaften:
|
||||||
|
- erleichterte Entwicklung: durch die Abbildung von Datenbanktabellen als Pythonklassen wird durchgängig Pythoncode verwendet
|
||||||
|
- Flexibilität: Durch Verwendung eines Backend-Treibers für die unterschiedlichen Datenbanken, muss der Code nicht geändert werden. Wenn eine andere Datenbank zum Einsatz kommt, muss nur der Treiber ausgetauscht werden (Plattformunabhängigkeit)
|
||||||
|
- Erhöhung der Produktivität: Es werden keine Kompetenzen für SQL Programierung und Wartung benötigt.
|
||||||
|
|
||||||
|
## 5. Proof of Concept
|
||||||
|
### 5.1 Docker
|
||||||
|
|
||||||
|
Für die Umsetzung der bisher vorgestellten theoretischen Betrachtungen wird ein Docker Container verwendet. Dieser Container beinhaltet eine relationale und eine dokumentbasierte Datenbank. \
|
||||||
|
Mit Jupyter Notebooks soll die Implementierung und Befüllung der Datenbank erprobt werden, um als Startpunkt für die anstehende Softwareentwicklung zu dienen.
|
||||||
|
```yaml
|
||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgres:14.1-alpine
|
||||||
|
container_name: postgres
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
volumes:
|
||||||
|
- ./PostgreSQL:/var/lib/postgresql/data
|
||||||
|
pgadmin:
|
||||||
|
image: dpage/pgadmin4:7.2
|
||||||
|
container_name: pgadmin4_container
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "5050:80"
|
||||||
|
environment:
|
||||||
|
PGADMIN_DEFAULT_EMAIL: admin@fh-swf.de
|
||||||
|
PGADMIN_DEFAULT_PASSWORD: admin
|
||||||
|
volumes:
|
||||||
|
- ./pgadmin:/var/lib/pgadmin
|
||||||
|
|
||||||
|
mongodb:
|
||||||
|
image: mongo:7.0.0-rc4
|
||||||
|
ports:
|
||||||
|
- '27017:27017'
|
||||||
|
volumes:
|
||||||
|
- ./mongo:/data/db
|
||||||
|
|
||||||
|
```
|
||||||
|
|Eintrag|Beschreibung|
|
||||||
|
|---|---|
|
||||||
|
|version|Version von docker-compose|
|
||||||
|
|services|Definition der Services, welche gestartet werden|
|
||||||
|
|
||||||
|
|Option|Beschreibung|
|
||||||
|
|---|---|
|
||||||
|
|image|Angabe des zu verwendenden Image|
|
||||||
|
|restart|Option, um Container erneut zu starten, falls dieser gestoppt wurde|
|
||||||
|
|environment|Umgebungsvariablen, wie z.B. Username und Passwort|
|
||||||
|
|Ports|Mapping des Containerports zum Port der Hostmaschine|
|
||||||
|
|volumes|Angabe eines Volumes zum Persistieren der Containerdaten|
|
||||||
|
|
||||||
|
Beim Ausführen der docker-compose werden in diesem Verzeichnis Ordner für die Datenablage angelegt. Da zum Verfassungszeitpunkt noch nicht feststeht, wie im Projekt der Datenaustausch stattfindet, könnten diese Ordner bzw. die Volumes einfach untereinander ausgetauscht werden.
|
||||||
|
|
||||||
|
Zum Starten des Containers den folgenden Befehl ausführen:
|
||||||
|
```
|
||||||
|
docker-compose -f docker-compose.yml up
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2 PG Admin
|
||||||
|
PG Admin ist ein grafisches Administartionstool für Postgres. Wenn der Container gestartet ist, kann man sich über http://localhost:5050/browser/ mit dem Web-UI verbinden. \
|
||||||
|
Dieses Tool dient lediglich der Überprüfung von Commits der Tabellen und daten.
|
||||||
|
|
||||||
|
Die Anmeldedaten lauten:
|
||||||
|
>User: admin@fh-swf.de \
|
||||||
|
>Passwort: admin
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
Zuerst muss der Server angelegt werden, dafür einen Rechtsklick auf Server und den Button „Register“ auswählen. Im geöffneten Dialog muss die Konfiguration festgelegt werden.
|
||||||
|
|
||||||
|
|Reiter|Parameter|Wert|
|
||||||
|
|---|---|---|
|
||||||
|
|General|Name|postgres|
|
||||||
|
|Connection|Host name/address|postgres (siehe docker-compose)|
|
||||||
|
|Connection|Username|postgres (siehe docker-compose)|
|
||||||
|
|Connection|Password|postgres (siehe docker-compose)|
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
### 5.3 Erstellen von Mock Daten
|
||||||
|
**Unternehmensstammdaten:**\
|
||||||
|
Um das Konzept und den Umgang mit den ausgewählten Datenbanken zu überprüfen, sollen Daten in die Datenbank geschrieben werden. Hier für wurde auf Statista recherchiert, welches die größten deutschen Unternehmen sind, um einen kleinen Stamm an Unternehmensdaten zu generieren (01_Stammdaten_Unternehmen_HR.csv). /
|
||||||
|
Die Relation zu den Amtsgerichten ist frei erfunden und wurde nicht recherchiert.
|
||||||
|

|
||||||
|
|
||||||
|
**Amtsgerichte:**
|
||||||
|
Die Amtsgerichte sind aus https://www.gerichtsverzeichnis.de/ extrahiert, wobei lediglich 12 Amstgerichte eingefügt wurden (Amtsgerichte.csv).
|
||||||
|
|
||||||
|
**Finanzdaten:** Es wurden für drei Unternehmen (EON, Telekom, BASF) die Finanzdaten bezüglich Umsatz, Ebit und Ebitda auf Statista ermittelt und als separate Dateien gespeichert (BASF_data.csv, Telekom_data.csv, EON_data.csv).
|
||||||
|
|
||||||
|
**Personen:** Die Personentabelle ist frei erfunden. Mit einer Onlinebibliothek wurde 1000 Vor- und Nachnamen erzeugt und gespeichert (Person1000.csv).
|
||||||
|
|
||||||
|
**Personen-Unternehmens-Beziehung:** Diese Tabelle ist zufällig erzeugt und dient lediglich für weitere Experimente. Hierfür wurde ein Python-Skript erstellt, welches mit der mehreren Random-Funktionen die Beziehungen zufälloig generiert.
|
||||||
|
|
||||||
|
**Sentiment:** keine Mock-Daten vorhanden
|
||||||
|
|
||||||
|
**Unternehmens-Unternehmens-Beziehung:** keine Mock-Daten vorhanden
|
||||||
|
|
||||||
|
|
||||||
|
### 5.4 Anlegen der relationalen Tabellen
|
||||||
|
Für das Verbinden zu der Postgre Datenbank und das Anlegen der Tabellen wird ein Jupyter Notebooks verwendet (11_Create_Tables_with_SQL-Alchemy.ipynb). \
|
||||||
|
Die benötigten Bibliotheken werden importiert und das Erstellen von Tabellen als Python-Objekte beschrieben. \
|
||||||
|
Nach dem Anlegen der Tabellen werden die Mock-Daten in die Datenbank geschrieben. \
|
||||||
|
Eine Überprüfung, ob die Daten abgelegt wurden ist sehr einfach mit PGAdmin möglich.
|
||||||
|

|
||||||
|
|
||||||
|
Das grundsätzliche Vorgehen bei der Verwendung von SQLAlchemy ist:
|
||||||
|
1. Verbindung zur Datenbank herstellen
|
||||||
|
```python
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
# Connection URL für postgres
|
||||||
|
url = URL.create(
|
||||||
|
drivername="postgresql",
|
||||||
|
username="postgres",
|
||||||
|
password="postgres",
|
||||||
|
host="localhost",
|
||||||
|
database="postgres")
|
||||||
|
|
||||||
|
#Verbindung zur Datenbank
|
||||||
|
engine = create_engine(database_url)
|
||||||
|
```
|
||||||
|
2. Erstellen einer Klasse als Repräsentation der Tabelle.
|
||||||
|
> Es ist üblich und empfehlenswert die Klassendefinitionen in einer separaten Datei vorzunehmen (model.py), damit diese auch in andere Modulen importiert und verwendet werden können
|
||||||
|
```python
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import Column, Integer, String
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
class MyClass(Base):
|
||||||
|
__tablename__ = 'company'
|
||||||
|
|
||||||
|
id = Column(Integer, primary_key=True)
|
||||||
|
name = Column(String)
|
||||||
|
city = Column(String)
|
||||||
|
```
|
||||||
|
3. Starten einer Session/Verbindung, um Daten lesen und schreiben zu können
|
||||||
|
```python
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
|
||||||
|
#starte die Verbindung
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
```
|
||||||
|
4. Daten abfragen
|
||||||
|
```python
|
||||||
|
# Alle Daten der Klasse/Tabelle abrufen
|
||||||
|
data = session.query(MyClass).all()
|
||||||
|
```
|
||||||
|
5. Daten speichern, wenn z.B. Datensätze in die Datenbank geschrieben werden, muss dies mit der **commit()**-Funktion ausgeführt werden. Das folgende Snippet iteriert durch einen Dataframe, um jede Zeile in die Datenbank zu schreiben.
|
||||||
|
```python
|
||||||
|
for i in range(len(df)):
|
||||||
|
#get data from dataframe
|
||||||
|
myNewData=MyClass(
|
||||||
|
name = str(df['Name'].iloc[i]),
|
||||||
|
city = str(df['Surname'].iloc[i])
|
||||||
|
)
|
||||||
|
session.add(myNewData)
|
||||||
|
session.commit()
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.5 Abfragen der Datenbank
|
||||||
|
Der folgende Code-Snippet zeigt, wie man eine Abfrage gestaltet.
|
||||||
|
|
||||||
|
```python
|
||||||
|
from sqlalchemy import create_engine
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy import Column, Integer, String
|
||||||
|
|
||||||
|
# Erstelle eine SQLite-Datenbankdatei oder gib den Pfad zur vorhandenen Datei an
|
||||||
|
url = URL.create(
|
||||||
|
drivername="postgresql",
|
||||||
|
username="postgres",
|
||||||
|
password="postgres",
|
||||||
|
host="localhost",
|
||||||
|
database="postgres"
|
||||||
|
)
|
||||||
|
|
||||||
|
#Erstelle eine Engine zur Verbindung mit der Datenbank
|
||||||
|
engine = create_engine(url)
|
||||||
|
|
||||||
|
#Erstelle eine Klasse, die eine Tabelle repräsentiert
|
||||||
|
Base = declarative_base()
|
||||||
|
class Company(Base):
|
||||||
|
__tablename__ = 'company'
|
||||||
|
|
||||||
|
hr = Column(Integer(), nullable=False, primary_key=True)
|
||||||
|
court_id = Column(Integer, ForeignKey("district_court.id"), nullable=False, primary_key=True)
|
||||||
|
name = Column(String(100), nullable=False)
|
||||||
|
street = Column(String(100), nullable=False)
|
||||||
|
zip = Column(Integer(), nullable=False)
|
||||||
|
city = Column(String(100), nullable=False)
|
||||||
|
sector = Column(String(100), nullable=False)
|
||||||
|
|
||||||
|
__table_args__ = (
|
||||||
|
PrimaryKeyConstraint('hr', 'court_id', name='pk_company_hr_court'),
|
||||||
|
)
|
||||||
|
|
||||||
|
#starte die Verbindung zur Datenbank
|
||||||
|
Session = sessionmaker(bind=engine)
|
||||||
|
session = Session()
|
||||||
|
|
||||||
|
#Abfrage aller Spalten der Tabelle/Klasse Company
|
||||||
|
Comps = session.query(Company).all()
|
||||||
|
|
||||||
|
#Gebe die Spalten name, hr und court_id der Tabelle company aus
|
||||||
|
for comp in Comps:
|
||||||
|
print(comp.name, comp.hr, comp.court_id)
|
||||||
|
```
|
||||||
|
<div style="page-break-after: always;"></div>
|
||||||
|
|
||||||
|
## 6. Zusammenfassung
|
||||||
|
|
||||||
|
Die vorliegende Seminararbeit behandelt das Thema der Datenspeicherung mit Fokus auf dem Projekt Transparenzregister. Es wurde erläutert, warum Daten gespeichert werden und welche Art von Daten es gibt.\
|
||||||
|
Für das Projekt sind Daten und die Speicherung eine Kernkomponente, um die geforderten Analysen bezüglich Verflechtungen, unternehmerischen Erfolgs und Aussenwahrnehmung zu ermöglichen.
|
||||||
|
|
||||||
|
Es wurden Datencluster definiert und entsprechende Quellen gefunden, welche über geeignete Extraktionspipelines die erforderlichen Informationen extrahieren. Zum Speichern dieser extrahierten Daten wurde ein relationales Modell erarbeitet, um ein Konzept für die folgende Implementierung zu haben.
|
||||||
|
|
||||||
|
Um das Konzept zu überprüfen, wurde ein Proof of Concept durchgeführt, um geeignete Werkzeuge zu erproben und das Modell auf seine Tauglichkeit zu überprüfen. \
|
||||||
|
Hierbei wurde ein Dockercontainer eingesetzt, um die Datenbankumgebung bereitzustellen. Mithilfe der SQL-Alchemy-Bibliothek, wurden die Tabellen innerhalb der Datenbank erstellt.\
|
||||||
|
Anschließend wurden die Tabellen mit eigenen Mock-Daten befüllt, um die Funktionalität der Datenbank zu testen.
|
||||||
|
|
||||||
|
Insgesamt bietet die Seminararbeit einen umfassenden Überblick über die Bedeutung der Datenspeicherung und die verschiedenen Arten von Datenbanken.
|
||||||
|
Es wurde ein erstes relationales Modell und ein High level design für die Softwarearchitektur erarbeitet.
|
||||||
|
Diese Arbeit hat grundsätzliche Fragen geklärt und Verständnis für die Datenspeicherung im Zusammenhang mit dem Projekt Transparenzregister geschaffen und unterstützt die weitere Entwicklung.
|
||||||
|
|
||||||
|
<div style="page-break-after: always;"></div>
|
||||||
|
|
||||||
|
## Quellen
|
||||||
|
Klug, Uwe: SQL-Der Einstieg in die deklarative Programmierung, 2. Auflage, Dortmund, Springer, 2017\
|
||||||
|
Steiner, Rene: Grundkurs relationale Datenbanken, 10. Auflage, Wiesbaden, Springer, 2021\
|
||||||
|
https://backupchain.de/daten-backup-tipps-3-wie-oft-daten-sichern/ \
|
||||||
|
https://www.talend.com/de/resources/strukturierte-vs-unstrukturierte-daten/ \
|
||||||
|
https://www.sqlservercentral.com/articles/creating-markdown-formatted-text-for-results-from-sql-server-tables \
|
||||||
|
https://www.sqlalchemy.org/ \
|
||||||
|
https://medium.com/@arthurapp98/using-sqlalchemy-to-create-and-populate-a-postgresql-database-with-excel-data-eb6049d93402
|
||||||
|
|
@ -0,0 +1,18 @@
|
|||||||
|
## Action List "Datenspeicherung
|
||||||
|
|
||||||
|
- [x] Erstelle ein relationales Schema für Unternehmens- und Finanzdaten, bei welchem die Jahre berücksichtigt werden
|
||||||
|
- [x] Erstelle docker-compose für postgresgl, pgadmin, neo4j
|
||||||
|
- [x] Erstelle eine Kurzanleitung für die Handhabung von Docker
|
||||||
|
- [x] erstelle Jupyter Notebook zum Verbinden mit Datenbank und Anlegen von Tabellen
|
||||||
|
- [x] Recherchiere nach den 10 größten deutschen Unternehmen und ermittel Finanzdaten (Umsatz, Ebit, Ebitda)
|
||||||
|
- [x] Erstelle ein Jupyter Notebook um diese Daten in die Datenbank zu übertragen
|
||||||
|
- [x] Erstelle ein Jupyter Notebook, um die Daten abzufragen
|
||||||
|
- [x] Erstelle ein Schema für Stimmungsdaten
|
||||||
|
- [x] Erstelle ein Schema für Verflechtungen
|
||||||
|
- [ ] Erzeuge Beispieldaten für Stimmung
|
||||||
|
- [x] Erzeuge Beispieldaten für Verflechtung
|
||||||
|
- [ ] Erstelle eine Prototypen GUI in Mercury zur einfachen Abfrage von Daten
|
||||||
|
- [ ] Verwende SQLalchemy, um eine Verbindung zur Datenbank aufzubauen, Tabellen anzulegen und Daten zu schreiben -->
|
||||||
|
- [x] Ersetze den enumeration type in den Finanzdaten gegen einzelne (eindeutig bezeichnete) Spalten
|
||||||
|
- [x] Lade das DB Schema hoch, um es den anderen Teammitgliedern bereitzustellen
|
||||||
|
- [ ]
|
@ -0,0 +1 @@
|
|||||||
|
<mxfile host="Electron" modified="2023-06-09T06:52:32.151Z" agent="5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) draw.io/19.0.3 Chrome/102.0.5005.63 Electron/19.0.3 Safari/537.36" etag="YzJ30O3iCiKXb3qmuW1k" version="19.0.3" type="device"><diagram id="M31xxMy7zny7NdG5GnKM" name="Seite-1">5Vxbc9o4G/41zOxepOMTxr4MadI9tZMt7e52b74RtrDV2BIrywnh13+SLYOFFIITDITeJFjyied93rPEwL3KFx8omKcfSQyzgWPFi4H7fuA4juXa/J8YeaxHbMf165GEorges9YDE7SE8sRmtEQxLJQTGSEZQ3M5KB8QEYxhxJQxQCl5UE+bkSxWBuYggcrdxcAkAhnUTvsbxSyVo7Yfrid+gShJ5aMDZ1RP5KA5Wd64SEFMHlpD7vXAvaKEsPpTvriCmUBPxeXmidnVi1GI2S4XfL/BX6KhxW4W/+GbICK/f/jwy4Ut3/YeZKX8xvJt2WMDQUJJOdefJl/gHlIGFyZZgGkGN78uJwokOWT0kZ/XXCUfKSkyrI8e1mg7nnzLtAW0HcgTgZRwsrrxGgT+QeLQBRP/eUw4JDiG4i72wB0/pIjByRxEYvaBawIfS1meyekZwUwym39Xfoyy7IpkhFb3cuMhDGKPjxeMkjvYmgmcqev7fEZHf7s0N2WiY38QeOc3C6f4CuPfbuH1n39ef7v963/lha2h+xUzSDFMc4g1oIsHlGcAw15AaMyU/26oUtDXOGiHlgEky+oJJEcD6RPI4cDxM/7o8ZTyT4n49IEOrtzB+ArHJU6K7yCl+jkTBnBMKNNnxhTgKDXdFhb8JYsoBTNWzAjNNblwaJnKcpChBPPPGZyJGQE/4nb0Ug7nKI7FxWMKC7SUtsHix3OCMKvwG44Hw/fiTiUjhdQXTS0wqcjQ1iE5pPNjK/92pkfjiiQ5HFcnh+Xr5Bh5Pdkn3TxxCed5DJhBf3Q57QpnI88ICvXcJlGT+VsbSKuxgPKlNi2i16dqh+6Gagea8HyDYrt70GvjN2ief0x/63oqoUe6tXNDVwdl2IRu+/e43oE9LoDBLDJ5XD8K4HTGZxIKYsSxf9bGbBdzJ0d8YNRdDfSPiAE6hRxauixnFMEYYv4ubA9euQMicjYIN1S3Majt0MUJDYiFPQGms/RfFSVLuDAewHIv5lg8F+L+1JpUsc3b9J9bafNS/+mGuv/0jRFoT2LUU54JQ3kuAijpQzfDoWZEmBVFjP5/JWkmLmrIL/kJtjNf1JfJ+eZGP+3tThMuJMQDZh7bgeyxWMVwHJH63vVpPz8xft5xQndjY4/UJNTRSXrQOEEPEz6BKE1BxlBydyyL7KoY2UNHDxwcQyRs9wZTqMF0wQ85Dgm3ywXHClYWmHK1TpAhzxFnl/kDzNiMQk7EDFXpkHVLSVzesSeumIi8aCYMhXVPxN/LymkW80wkY5ceW5qMiLjyV4zJPWCIaw6uFRzDUjyQJ1xRWl/NM65c1NEyMc7fif/9AqMUc63j3+GJG38BFM2mqEoDzWdcIyzoU82rr0353Q3JobioyswLVueYbNm+uCBLBLIKBI7Wd3hXWc4bbkpn4iE7pSJvxeV11hQ1cHEMAbZt0hPL6SvAtjRF+bpB+6NZFS3Oc0I9w3ZsQ6Bn225feOlFKqEOn2FcLhGktR6IujTXKwwp10v+facUlFFaGDTpU9noDo8FS34mR5GurzXp9EeEUb56lNRXnplUJiLLnrIDpsuuc1QUtclxfJALr4unxXy3KOCtqOiK4S/V0ZGBc47OOb+3yoBe1rksiwLiB5BWxdFKpEfQT99WgPJsg3aODNbM7yt+d/QA/pYzi6H7ddaFYQLqAZAJLf0M8B2qdAKU4lhEU5Rbvd1KZm9FCVYceqESNIUhRbS2wfD2JdomUGxJA8YJbFICQllKEsIznuv16EYKsT7nD0LmEufvkLFHmUgIEajihQvE/ml9/iZu9W40lIfvF/LW1cFjO1VplZhqUTY9wqFRaBLKgpQ0gtuMmZQjAzSBW62eLEkIkLaKm8JM6ANU3mP/BZLhMcVnvRu2BGhvlR4XCX38p33QFro4Xl9XHZ2U2Juu+qmI/QSayIHlKaYs0BN4xzN4qTDoy5bZw+dR2WdRG9rxEI5MRe3QH7mgQxvZe2EbuS94jW08vRj7N6JMtiznsi06E8HAq2OmDng0HWWVjEPXsKjBMtSTRn31k3Uu/mbsFZu7zG8zRNpKmxd3f3W7MuypMGh8fz1L+JoXgC1PgebBsVmu+yEzyydlnr/RlsxWUry4JaOndH1Vu43vr1e7r8e/fjkFStujY3NaL3H/GJzu3sI5LU4bSq5jxNm7LIyCOgK3Hf/Y3DaUWX8McnevVp4aux3dZKME4jswRwxwIjOIsm0kL3iS22QzjntA1rvDo7NeX3jzg7De6Sq7U2O9nnRe4iVIxYWttVMnyXvPOzrvd01Cz473r006D8h7cylLE9xfhBZiWfkpxDG+ob/eG7PNfZcdiS2qK+fUb3rtUvpmQVFLcJ6hz9+ctne56QbpX7EgBlIE8Q+wnL675oVqL9jR5XfY1fR6jnVKRWBXAWtk62gd2Eztmm6dm51yXp1uHdlQNYqmLA6ZFWIBQ0HBPtZtvZ7eoaHjdlh675pXnR29X51XHZveel415piy5WlY7qBxe8ejdpf+3VlR+9Wp05Gp7QWaNI62osnZvqJpvSbm3WoVzLf23CkviWmKCyeyJMbVQy2NB/tc/DGbQT8y7miMR+HUsrYJocMGkI2do76hG+41i9gV9Qp7A1qPjH7nUFGSZVO4RDB9ZtmsKoRNlHfeBdoZSs8KVChHBh8zMliqZqvc/pHUwyexovxSlvKtemHtGIpDlNQLzAGu/vCpaiOI8kMOpuXpl3gJ60059d2UC67EsNilIx8id5FYKZh28mnPZ9i9ebVN/uzo5VYk3pk+FxutPENtNzBsJu7Nzbl6BFf1g1iZF6eth56K5MgzpTEGk9bbz8+4eqx3oSpepRXiIY12PrFP7WPJuCJUe9zqbWRELHjnjnT94x7nrlidw0c18nf1Led2GBxSr/QlTepWwdNWLt9X4Aw8ww/r+AY71Z+T05caCf1IYA4RLqrFj6vtjGal+gPBGaSiwY05EFIQGcOoOG9N6pxE2xu1YoMquQd1UfpKqltIi3pf3OnpjhuoAWJo8PGuaVdkb7rj6dX2z0RsU642Mj9VeL+qfFdTzBFHauVSjDzZTTwrDQp3pkDjffSUwDX90lRv27E8PYudkAgBcWH1g487COxt9Me6u7bAUdTTN/To7cYEvrIZxg/XPxNZzbV+bdO9/j8=</diagram></mxfile>
|
@ -0,0 +1,70 @@
|
|||||||
|
https://geshan.com.np/blog/2021/12/docker-postgres/
|
||||||
|
https://belowthemalt.com/2021/06/09/run-postgresql-and-pgadmin-in-docker-for-local-development-using-docker-compose/
|
||||||
|
https://thibaut-deveraux.medium.com/how-to-install-neo4j-with-docker-compose-36e3ba939af0
|
||||||
|
https://towardsdatascience.com/how-to-run-postgresql-and-pgadmin-using-docker-3a6a8ae918b5
|
||||||
|
|
||||||
|
# Installation Docker Desktop
|
||||||
|
## Starten eines Containers:
|
||||||
|
|
||||||
|
> docker run --name basic-postgres --rm -e POSTGRES_USER=postgres -e POSTGRES_PASSWORD=4y7sV96vA9wv46VR -e PGDATA=/var/lib/postgresql/data/pgdata -v /tmp:/var/lib/postgresql/data -p 5432:5432 -it postgres:14.1-alpine
|
||||||
|
|
||||||
|
Dieser Befehl startet einen Container mit dem Postgres14.1-alpine Image, welches von Dockerhub geladen wird. Der Container läuft unter dem Namen basic-postgres
|
||||||
|
|
||||||
|
| Syntax | Attribut | Beschreibung |
|
||||||
|
| ----------- | ----------- | ----------- |
|
||||||
|
| basic-postgres | --name | Angabe des Containernamens|
|
||||||
|
| | --rm | Bei Beendigung des Containers wird das erstellte Dateisystem entfernt|
|
||||||
|
| |-e| Verwende Umgebungsvariablen |
|
||||||
|
| POSTGRES_USER | | Umgebungsvariable für den anzulegenden Benutzer: postgres|
|
||||||
|
|POSTGRES_PASSWORD| | Umgebungsvariable für das anzulegende Passwort: 4y7sV96vA9wv46VR |
|
||||||
|
| PGDATA | | Umgebungsvariable für den Ort der Datenbank|
|
||||||
|
| | -v | Einzubindendes Volumen: /tmp:/var/lib/postgresql/data |
|
||||||
|
| |-p | Angabe des Containerports und des öffentlich zugänglichen Ports |
|
||||||
|
| | -it | Interactive: der Container bleibt aktiv, damit mit diesem interagiert werden kann |
|
||||||
|
|
||||||
|
Mit einem zweiten Terminalfenster kann man auf die Bash des Containers öffnen und auf die Datenbank zugreifen.
|
||||||
|
|
||||||
|
> docker exec -it basic-postgres /bin/sh
|
||||||
|
|
||||||
|
Die folgenden Befehle starten die Postgres CLI, Ausgabe aller Datenbanken und beendet die CLI.
|
||||||
|
> Psql –username postgres \
|
||||||
|
> \l \
|
||||||
|
Exit
|
||||||
|
|
||||||
|
Der Container kann durch Betätigung von STRG + C beendet werden.
|
||||||
|
|
||||||
|
## Docker Compose
|
||||||
|
Das oben erklärte Vorgehen zum Starten eines Containers, festlegen der Umgebungsvariablen und zusätzliche verlinken zu einer Anwendung wird nun in einer yml-Datei beschrieben, um die Verwaltung und das Erstellen zu vereinfachen.
|
||||||
|
| | | Beschreibung |
|
||||||
|
| ----------- | ----------- | ----------- |
|
||||||
|
|Version | | Version von docker-compose |
|
||||||
|
|Services| |Definition der Services, wobei jeder ein eigenen docker-run Befehl ausführt.|
|
||||||
|
| | image | Angabe des zu verwendenden Images |
|
||||||
|
| | restart | Option um Container erneut zu starten, falls dieser gestoppt wird |
|
||||||
|
| | Environment | Umgebungsvariablen: Username und Passwort |
|
||||||
|
| | Ports | Mapping des Containerports zum Port der Hostmaschine |
|
||||||
|
| | Volumes | Angabe eines Volumes zum Persistieren der Containerdaten, damit nach einem Neustart die Daten wieder verfügbar sind |
|
||||||
|
|
||||||
|
|
||||||
|
Nun kann der Container mittels Docker-Compose gestartet werden.
|
||||||
|
> docker-compose -f /.../docker-compose-postgres.yml up
|
||||||
|
|
||||||
|
## pgAdmin
|
||||||
|
pgAdmin ist ein grafisches Administrationswerkezug für postgreSQL und macht die oben gezeigte Administration komfortabler. \
|
||||||
|
Erreichbar ist das Interface über: http://localhost:5050 \
|
||||||
|
Als Login werden die Daten aus der docker-compose verwendet:
|
||||||
|
>User: admin@fh-swf.de
|
||||||
|
>Passwort: admin
|
||||||
|
|
||||||
|
### Anlegen eines Servers
|
||||||
|
Zuerst muss der Server angelegt werden, dafür einen Rechtsklick auf Server und den Button „Register“ auswählen. \
|
||||||
|
Im geöffneten Dialog muss die Konfiguration festgelegt werden.
|
||||||
|
|
||||||
|
| Reiter | Parameter | Wert |
|
||||||
|
| ----------- | ----------- | ----------- |
|
||||||
|
| General| Name | postgres_docker |
|
||||||
|
| Connection | Host name/address | local_pgdb (siehe docker-compose) |
|
||||||
|
| Connection | Username | postgres (siehe docker-compose) |
|
||||||
|
| Connection | Password | postgres (siehe docker-compose) |
|
||||||
|
|
||||||
|
|
@ -0,0 +1,13 @@
|
|||||||
|
HR;Amtsgericht;Name;Strasse;PLZ;Stadt;Branche
|
||||||
|
12334;2;Volkswagen;Berliner Ring 2;38440;Wolfsburg;Automobil
|
||||||
|
64566;2;Mercedes-Benz Group;Mercedesstraße 120;70372;Stuttgart;Automobil
|
||||||
|
5433;3;Allianz;Reinsburgstraße 19;70178;Stuttgart;Versicherung, Finanzdienstleistung
|
||||||
|
12435;4;BMW Group;Petuelring 130;80809;München;Automobil
|
||||||
|
12336;5;Deutsche Telekom;Landgrabenweg 151;53227;Bonn;Telekommunikation, Informationstechnologie
|
||||||
|
559;6;Deutsche Post DHL Group;Charles-de-Gaulle-Str. 20;53113;Bonn;Logistik
|
||||||
|
555;7;Bosch Group;Robert-Bosch-Platz 1;70839;Gerlingen-Schillerhöhe;Kraftfahrzeugtechnik, Industrietechnik, Gebrauchsgüter, Energie- und Gebäudetechnik
|
||||||
|
12384;8;BASF;Carl-Bosch-Straße 38;67056;Ludwigshafen;Chemie
|
||||||
|
64345;9;E.ON;Arnulfstraße 203;80634;München;Energie
|
||||||
|
4344;10;Munich Re Group;Königinstr. 107;80802;München;Versicherung
|
||||||
|
866;11;Siemens;Werner-von-Siemens-Straße 1;80333;München;Automatisierung, Digitalisierung
|
||||||
|
9875;12;Deutsche Bahn;Potsdamer Platz 2;10785;Berlin;Transport, Logistik
|
|
@ -0,0 +1,13 @@
|
|||||||
|
HR;Amtsgericht;Name;Strasse;PLZ;Stadt;Branche
|
||||||
|
12334;2;Volkswagen;Berliner Ring 2;38440;Wolfsburg;Automobil
|
||||||
|
64566;2;Mercedes-Benz Group;Mercedesstraße 120;70372;Stuttgart;Automobil
|
||||||
|
5433;3;Allianz;Reinsburgstraße 19;70178;Stuttgart;Versicherung, Finanzdienstleistung
|
||||||
|
12334;4;BMW Group;Petuelring 130;80809;München;Automobil
|
||||||
|
12336;5;Deutsche Telekom;Landgrabenweg 151;53227;Bonn;Telekommunikation, Informationstechnologie
|
||||||
|
555;6;Deutsche Post DHL Group;Charles-de-Gaulle-Str. 20;53113;Bonn;Logistik
|
||||||
|
555;7;Bosch Group;Robert-Bosch-Platz 1;70839;Gerlingen-Schillerhöhe;Kraftfahrzeugtechnik, Industrietechnik, Gebrauchsgüter, Energie- und Gebäudetechnik
|
||||||
|
12384;8;BASF;Carl-Bosch-Straße 38;67056;Ludwigshafen;Chemie
|
||||||
|
64345;9;E.ON;Arnulfstraße 203;80634;München;Energie
|
||||||
|
4344;1;Munich Re Group;Königinstr. 107;80802;München;Versicherung
|
||||||
|
866;1;Siemens;Werner-von-Siemens-Straße 1;80333;München;Automatisierung, Digitalisierung
|
||||||
|
9875;1;Deutsche Bahn;Potsdamer Platz 2;10785;Berlin;Transport, Logistik
|
|
@ -0,0 +1,15 @@
|
|||||||
|
Stadt;Name
|
||||||
|
Aschaffenburg;Amtsgericht Aschaffenburg
|
||||||
|
Bamberg;Amtsgericht Bamberg
|
||||||
|
Bayreuth;Amtsgericht Bayreuth
|
||||||
|
Duesseldorf;Amtsgericht Duesseldorf
|
||||||
|
Duisburg;Amtsgericht Duisburg
|
||||||
|
Duisburg;Amtsgericht Duisburg-Hamborn
|
||||||
|
Duisburg;Amtsgericht Duisburg-Ruhrort
|
||||||
|
Oberhausen;Amtsgericht Oberhausen
|
||||||
|
Wuppertal;Amtsgericht Wuppertal
|
||||||
|
Berlin;Amtsgericht Mitte
|
||||||
|
Berlin;Amtsgericht Ost
|
||||||
|
Berlin;Amtsgericht West
|
||||||
|
Berlin;Amtsgericht Nord
|
||||||
|
Berlin;Amtsgericht Sued
|
|
@ -0,0 +1,25 @@
|
|||||||
|
Company_HR;Company_Court;Jahr;Umsatz;Ebit;EBITDA
|
||||||
|
;12384;8;1999;29473;;
|
||||||
|
;12384;8;2000;35946;;
|
||||||
|
;12384;8;2001;32500;;
|
||||||
|
;12384;8;2002;32216;;
|
||||||
|
;12384;8;2003;33361;;
|
||||||
|
;12384;8;2004;37537;;
|
||||||
|
;12384;8;2005;42745;5830;
|
||||||
|
;12384;8;2006;52610;6750;
|
||||||
|
;12384;8;2007;57951;7316;
|
||||||
|
;12384;8;2008;62304;6463;9562
|
||||||
|
;12384;8;2009;50693;3677;7388
|
||||||
|
;12384;8;2010;63873;7761;11131
|
||||||
|
;12384;8;2011;73497;8586;11993
|
||||||
|
;12384;8;2012;72129;6742;10009
|
||||||
|
;12384;8;2013;73973;7160;10432
|
||||||
|
;12384;8;2014;74326;7626;11043
|
||||||
|
;12384;8;2015;70449;6248;10649
|
||||||
|
;12384;8;2016;57550;6275;10526
|
||||||
|
;12384;8;2017;61223;7587;10765
|
||||||
|
;12384;8;2018;60220;5974;8970
|
||||||
|
;12384;8;2019;59316;4201;8185
|
||||||
|
;12384;8;2020;59149;-191;6494
|
||||||
|
;12384;8;2021;78598;7677;11355
|
||||||
|
;12384;8;2022;87327;6548;10748
|
|
@ -0,0 +1,17 @@
|
|||||||
|
Company_HR;Company_Court;Jahr;Umsatz;Ebit;EBITDA
|
||||||
|
;64345;9;2007;66912;;
|
||||||
|
;64345;9;2008;84873;;
|
||||||
|
;64345;9;2009;79974;;
|
||||||
|
;64345;9;2010;92863;;
|
||||||
|
;64345;9;2011;112954;;
|
||||||
|
;64345;9;2012;132093;7010;
|
||||||
|
;64345;9;2013;119615;5640;
|
||||||
|
;64345;9;2014;113095;4700;
|
||||||
|
;64345;9;2015;42656;3600;
|
||||||
|
;64345;9;2016;38173;3100;
|
||||||
|
;64345;9;2017;37965;3100;
|
||||||
|
;64345;9;2018;30084;2990;4840
|
||||||
|
;64345;9;2019;41284;3220;5558
|
||||||
|
;64345;9;2020;60944;3780;6905
|
||||||
|
;64345;9;2021;77358;4720;7889
|
||||||
|
;64345;9;2022;115660;5200;8059
|
|
@ -0,0 +1,13 @@
|
|||||||
|
Name;Straße;PLZ;Stadt;Branche
|
||||||
|
Volkswagen;Berliner Ring 2;38440;Wolfsburg;Automobil
|
||||||
|
Mercedes-Benz Group;Mercedesstraße 120;70372;Stuttgart;Automobil
|
||||||
|
Allianz;Reinsburgstraße 19;70178;Stuttgart;Versicherung, Finanzdienstleistung
|
||||||
|
BMW Group;Petuelring 130;80809;München;Automobil
|
||||||
|
Deutsche Telekom;Landgrabenweg 151;53227;Bonn;Telekommunikation, Informationstechnologie
|
||||||
|
Deutsche Post DHL Group;Charles-de-Gaulle-Str. 20;53113;Bonn;Logistik
|
||||||
|
Bosch Group;Robert-Bosch-Platz 1;70839;Gerlingen-Schillerhöhe;Kraftfahrzeugtechnik, Industrietechnik, Gebrauchsgüter, Energie- und Gebäudetechnik
|
||||||
|
BASF;Carl-Bosch-Straße 38;67056;Ludwigshafen;Chemie
|
||||||
|
E.ON;Arnulfstraße 203;80634;München;Energie
|
||||||
|
Munich Re Group;Königinstr. 107;80802;München;Versicherung
|
||||||
|
Siemens;Werner-von-Siemens-Straße 1;80333;München;Automatisierung, Digitalisierung
|
||||||
|
Deutsche Bahn;Potsdamer Platz 2;10785;Berlin;Transport, Logistik
|
|
@ -0,0 +1,479 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "dbd6eae9",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd\n",
|
||||||
|
"import ipywidgets as widgets\n",
|
||||||
|
"pd.options.plotting.backend = \"plotly\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "8b447b09",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"df=pd.read_csv('Telekom_Data_NewOrder.csv', sep=';',decimal=',') "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "5fc7b7d2",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>Metrik</th>\n",
|
||||||
|
" <th>Datum</th>\n",
|
||||||
|
" <th>Summe [Milliarden €]</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2005</td>\n",
|
||||||
|
" <td>59.600</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2006</td>\n",
|
||||||
|
" <td>61.300</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2007</td>\n",
|
||||||
|
" <td>62.500</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2008</td>\n",
|
||||||
|
" <td>61.700</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2009</td>\n",
|
||||||
|
" <td>64.600</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2010</td>\n",
|
||||||
|
" <td>62.420</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2011</td>\n",
|
||||||
|
" <td>58.650</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2012</td>\n",
|
||||||
|
" <td>58.170</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2013</td>\n",
|
||||||
|
" <td>60.130</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>9</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2014</td>\n",
|
||||||
|
" <td>62.660</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>10</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2015</td>\n",
|
||||||
|
" <td>69.230</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>11</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2016</td>\n",
|
||||||
|
" <td>73.100</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>12</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2017</td>\n",
|
||||||
|
" <td>74.950</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>13</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>75.660</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>14</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>80.530</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>15</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>99.950</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>16</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>107.610</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>17</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>114.200</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>18</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2005</td>\n",
|
||||||
|
" <td>7.600</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>19</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2006</td>\n",
|
||||||
|
" <td>5.300</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>20</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2007</td>\n",
|
||||||
|
" <td>5.300</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>21</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2008</td>\n",
|
||||||
|
" <td>7.000</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>22</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2009</td>\n",
|
||||||
|
" <td>6.000</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>23</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2010</td>\n",
|
||||||
|
" <td>5.510</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>24</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2011</td>\n",
|
||||||
|
" <td>5.560</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>25</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2012</td>\n",
|
||||||
|
" <td>-3.960</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>26</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2013</td>\n",
|
||||||
|
" <td>4.930</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>27</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2014</td>\n",
|
||||||
|
" <td>7.250</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>28</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2015</td>\n",
|
||||||
|
" <td>7.030</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>29</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2016</td>\n",
|
||||||
|
" <td>9.160</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>30</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2017</td>\n",
|
||||||
|
" <td>9.380</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>31</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>8.000</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>32</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>9.460</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>33</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>12.370</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>34</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>12.580</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>35</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>15.410</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>36</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>23.333</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>37</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>24.731</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>38</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>35.017</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>39</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>37.330</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>40</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>40.208</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" Metrik Datum Summe [Milliarden €]\n",
|
||||||
|
"0 Umsatz 01.01.2005 59.600\n",
|
||||||
|
"1 Umsatz 01.01.2006 61.300\n",
|
||||||
|
"2 Umsatz 01.01.2007 62.500\n",
|
||||||
|
"3 Umsatz 01.01.2008 61.700\n",
|
||||||
|
"4 Umsatz 01.01.2009 64.600\n",
|
||||||
|
"5 Umsatz 01.01.2010 62.420\n",
|
||||||
|
"6 Umsatz 01.01.2011 58.650\n",
|
||||||
|
"7 Umsatz 01.01.2012 58.170\n",
|
||||||
|
"8 Umsatz 01.01.2013 60.130\n",
|
||||||
|
"9 Umsatz 01.01.2014 62.660\n",
|
||||||
|
"10 Umsatz 01.01.2015 69.230\n",
|
||||||
|
"11 Umsatz 01.01.2016 73.100\n",
|
||||||
|
"12 Umsatz 01.01.2017 74.950\n",
|
||||||
|
"13 Umsatz 01.01.2018 75.660\n",
|
||||||
|
"14 Umsatz 01.01.2019 80.530\n",
|
||||||
|
"15 Umsatz 01.01.2020 99.950\n",
|
||||||
|
"16 Umsatz 01.01.2021 107.610\n",
|
||||||
|
"17 Umsatz 01.01.2022 114.200\n",
|
||||||
|
"18 EBIT 01.01.2005 7.600\n",
|
||||||
|
"19 EBIT 01.01.2006 5.300\n",
|
||||||
|
"20 EBIT 01.01.2007 5.300\n",
|
||||||
|
"21 EBIT 01.01.2008 7.000\n",
|
||||||
|
"22 EBIT 01.01.2009 6.000\n",
|
||||||
|
"23 EBIT 01.01.2010 5.510\n",
|
||||||
|
"24 EBIT 01.01.2011 5.560\n",
|
||||||
|
"25 EBIT 01.01.2012 -3.960\n",
|
||||||
|
"26 EBIT 01.01.2013 4.930\n",
|
||||||
|
"27 EBIT 01.01.2014 7.250\n",
|
||||||
|
"28 EBIT 01.01.2015 7.030\n",
|
||||||
|
"29 EBIT 01.01.2016 9.160\n",
|
||||||
|
"30 EBIT 01.01.2017 9.380\n",
|
||||||
|
"31 EBIT 01.01.2018 8.000\n",
|
||||||
|
"32 EBIT 01.01.2019 9.460\n",
|
||||||
|
"33 EBIT 01.01.2020 12.370\n",
|
||||||
|
"34 EBIT 01.01.2021 12.580\n",
|
||||||
|
"35 EBIT 01.01.2022 15.410\n",
|
||||||
|
"36 EBITDA 01.01.2018 23.333\n",
|
||||||
|
"37 EBITDA 01.01.2019 24.731\n",
|
||||||
|
"38 EBITDA 01.01.2020 35.017\n",
|
||||||
|
"39 EBITDA 01.01.2021 37.330\n",
|
||||||
|
"40 EBITDA 01.01.2022 40.208"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"df"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "d5c6c68d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"---------------------------------\n",
|
||||||
|
"# Schreibe Unternehmensdaten in PostgreSQL"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"id": "6c09bdca",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import psycopg2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "383fb9a9",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Verbinde zur Datenbank"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"id": "3e1ea224",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Database connected successfully\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"conn = psycopg2.connect(\n",
|
||||||
|
" host=\"localhost\",\n",
|
||||||
|
" database=\"transparenz\",\n",
|
||||||
|
" user=\"postgres\",\n",
|
||||||
|
" password=\"postgres\")\n",
|
||||||
|
"\n",
|
||||||
|
"print(\"Database connected successfully\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "22b9ab1d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Iteriere durch Dataframe und schreibe Datensätze in Tabelle *Company*"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"id": "961ac836",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"cur = conn.cursor()\n",
|
||||||
|
"\n",
|
||||||
|
"PK_ID=5 #BASF hat den PK 8, deshalb wird dieser manuell hinzugefügt\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"for i in range(len(df)):\n",
|
||||||
|
" #get data from dataframe\n",
|
||||||
|
" kind_of=str(df['Metrik'].iloc[i])\n",
|
||||||
|
" date=str(df['Datum'].iloc[i])\n",
|
||||||
|
" amount=float(df['Summe [Milliarden €]'].iloc[i])\n",
|
||||||
|
" \n",
|
||||||
|
" postgres_insert_query = \"\"\" INSERT INTO finance (company_id,kind_of, date, sum) VALUES (%s,%s,%s,%s)\"\"\" \n",
|
||||||
|
" record_to_insert = (PK_ID,kind_of,date,amount)\n",
|
||||||
|
" cur.execute(postgres_insert_query, record_to_insert) \n",
|
||||||
|
" #print(postgres_insert_query, record_to_insert)\n",
|
||||||
|
" \n",
|
||||||
|
"conn.commit()\n",
|
||||||
|
"conn.close()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "46b5be7c",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
@ -0,0 +1,416 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"id": "dbd6eae9",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import numpy as np\n",
|
||||||
|
"import pandas as pd\n",
|
||||||
|
"import ipywidgets as widgets\n",
|
||||||
|
"pd.options.plotting.backend = \"plotly\""
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"id": "8b447b09",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"df=pd.read_csv('EON_Data_NewOrder.csv', sep=';',decimal=',') "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"id": "5fc7b7d2",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"<div>\n",
|
||||||
|
"<style scoped>\n",
|
||||||
|
" .dataframe tbody tr th:only-of-type {\n",
|
||||||
|
" vertical-align: middle;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe tbody tr th {\n",
|
||||||
|
" vertical-align: top;\n",
|
||||||
|
" }\n",
|
||||||
|
"\n",
|
||||||
|
" .dataframe thead th {\n",
|
||||||
|
" text-align: right;\n",
|
||||||
|
" }\n",
|
||||||
|
"</style>\n",
|
||||||
|
"<table border=\"1\" class=\"dataframe\">\n",
|
||||||
|
" <thead>\n",
|
||||||
|
" <tr style=\"text-align: right;\">\n",
|
||||||
|
" <th></th>\n",
|
||||||
|
" <th>Metrik</th>\n",
|
||||||
|
" <th>Datum</th>\n",
|
||||||
|
" <th>Summe [Milliarden €]</th>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </thead>\n",
|
||||||
|
" <tbody>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>0</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2007</td>\n",
|
||||||
|
" <td>66.912</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>1</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2008</td>\n",
|
||||||
|
" <td>84.873</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>2</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2009</td>\n",
|
||||||
|
" <td>79.974</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>3</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2010</td>\n",
|
||||||
|
" <td>92.863</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>4</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2011</td>\n",
|
||||||
|
" <td>112.954</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>5</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2012</td>\n",
|
||||||
|
" <td>132.093</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>6</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2013</td>\n",
|
||||||
|
" <td>119.615</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>7</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2014</td>\n",
|
||||||
|
" <td>113.095</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>8</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2015</td>\n",
|
||||||
|
" <td>42.656</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>9</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2016</td>\n",
|
||||||
|
" <td>38.173</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>10</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2017</td>\n",
|
||||||
|
" <td>37.965</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>11</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>30.084</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>12</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>41.284</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>13</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>60.944</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>14</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>77.358</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>15</th>\n",
|
||||||
|
" <td>Umsatz</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>115.660</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>16</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2012</td>\n",
|
||||||
|
" <td>7.010</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>17</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2013</td>\n",
|
||||||
|
" <td>5.640</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>18</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2014</td>\n",
|
||||||
|
" <td>4.700</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>19</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2015</td>\n",
|
||||||
|
" <td>3.600</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>20</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2016</td>\n",
|
||||||
|
" <td>3.100</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>21</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2017</td>\n",
|
||||||
|
" <td>3.100</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>22</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>2.990</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>23</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>3.220</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>24</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>3.780</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>25</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>4.720</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>26</th>\n",
|
||||||
|
" <td>EBIT</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>5.200</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>27</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2018</td>\n",
|
||||||
|
" <td>4.840</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>28</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2019</td>\n",
|
||||||
|
" <td>5.558</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>29</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2020</td>\n",
|
||||||
|
" <td>6.905</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>30</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2021</td>\n",
|
||||||
|
" <td>7.889</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" <tr>\n",
|
||||||
|
" <th>31</th>\n",
|
||||||
|
" <td>EBITDA</td>\n",
|
||||||
|
" <td>01.01.2022</td>\n",
|
||||||
|
" <td>8.059</td>\n",
|
||||||
|
" </tr>\n",
|
||||||
|
" </tbody>\n",
|
||||||
|
"</table>\n",
|
||||||
|
"</div>"
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
" Metrik Datum Summe [Milliarden €]\n",
|
||||||
|
"0 Umsatz 01.01.2007 66.912\n",
|
||||||
|
"1 Umsatz 01.01.2008 84.873\n",
|
||||||
|
"2 Umsatz 01.01.2009 79.974\n",
|
||||||
|
"3 Umsatz 01.01.2010 92.863\n",
|
||||||
|
"4 Umsatz 01.01.2011 112.954\n",
|
||||||
|
"5 Umsatz 01.01.2012 132.093\n",
|
||||||
|
"6 Umsatz 01.01.2013 119.615\n",
|
||||||
|
"7 Umsatz 01.01.2014 113.095\n",
|
||||||
|
"8 Umsatz 01.01.2015 42.656\n",
|
||||||
|
"9 Umsatz 01.01.2016 38.173\n",
|
||||||
|
"10 Umsatz 01.01.2017 37.965\n",
|
||||||
|
"11 Umsatz 01.01.2018 30.084\n",
|
||||||
|
"12 Umsatz 01.01.2019 41.284\n",
|
||||||
|
"13 Umsatz 01.01.2020 60.944\n",
|
||||||
|
"14 Umsatz 01.01.2021 77.358\n",
|
||||||
|
"15 Umsatz 01.01.2022 115.660\n",
|
||||||
|
"16 EBIT 01.01.2012 7.010\n",
|
||||||
|
"17 EBIT 01.01.2013 5.640\n",
|
||||||
|
"18 EBIT 01.01.2014 4.700\n",
|
||||||
|
"19 EBIT 01.01.2015 3.600\n",
|
||||||
|
"20 EBIT 01.01.2016 3.100\n",
|
||||||
|
"21 EBIT 01.01.2017 3.100\n",
|
||||||
|
"22 EBIT 01.01.2018 2.990\n",
|
||||||
|
"23 EBIT 01.01.2019 3.220\n",
|
||||||
|
"24 EBIT 01.01.2020 3.780\n",
|
||||||
|
"25 EBIT 01.01.2021 4.720\n",
|
||||||
|
"26 EBIT 01.01.2022 5.200\n",
|
||||||
|
"27 EBITDA 01.01.2018 4.840\n",
|
||||||
|
"28 EBITDA 01.01.2019 5.558\n",
|
||||||
|
"29 EBITDA 01.01.2020 6.905\n",
|
||||||
|
"30 EBITDA 01.01.2021 7.889\n",
|
||||||
|
"31 EBITDA 01.01.2022 8.059"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"df"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "d5c6c68d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"---------------------------------\n",
|
||||||
|
"# Schreibe Unternehmensdaten in PostgreSQL"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"id": "6c09bdca",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import psycopg2"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "383fb9a9",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Verbinde zur Datenbank"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"id": "3e1ea224",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Database connected successfully\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"conn = psycopg2.connect(\n",
|
||||||
|
" host=\"localhost\",\n",
|
||||||
|
" database=\"transparenz\",\n",
|
||||||
|
" user=\"postgres\",\n",
|
||||||
|
" password=\"postgres\")\n",
|
||||||
|
"\n",
|
||||||
|
"print(\"Database connected successfully\")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"id": "22b9ab1d",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Iteriere durch Dataframe und schreibe Datensätze in Tabelle *Company*"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 7,
|
||||||
|
"id": "961ac836",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"cur = conn.cursor()\n",
|
||||||
|
"\n",
|
||||||
|
"PK_ID=9 #BASF hat den PK 8, deshalb wird dieser manuell hinzugefügt\n",
|
||||||
|
"\n",
|
||||||
|
"\n",
|
||||||
|
"for i in range(len(df)):\n",
|
||||||
|
" #get data from dataframe\n",
|
||||||
|
" kind_of=str(df['Metrik'].iloc[i])\n",
|
||||||
|
" date=str(df['Datum'].iloc[i])\n",
|
||||||
|
" amount=float(df['Summe [Milliarden €]'].iloc[i])\n",
|
||||||
|
" \n",
|
||||||
|
" postgres_insert_query = \"\"\" INSERT INTO finance (company_id,kind_of, date, sum) VALUES (%s,%s,%s,%s)\"\"\" \n",
|
||||||
|
" record_to_insert = (PK_ID,kind_of,date,amount)\n",
|
||||||
|
" cur.execute(postgres_insert_query, record_to_insert) \n",
|
||||||
|
" #print(postgres_insert_query, record_to_insert)\n",
|
||||||
|
" \n",
|
||||||
|
"conn.commit()\n",
|
||||||
|
"conn.close()"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"id": "46b5be7c",
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.8.8"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 5
|
||||||
|
}
|
@ -0,0 +1,10 @@
|
|||||||
|
Stadt;Name
|
||||||
|
Aschaffenburg;Amtsgericht Aschaffenburg
|
||||||
|
Bamberg;Amtsgericht Bamberg
|
||||||
|
Bayreuth;Amtsgericht Bayreuth
|
||||||
|
Duesseldorf;Amtsgericht Duesseldorf
|
||||||
|
Duisburg;Amtsgericht Duisburg
|
||||||
|
Duisburg;Amtsgericht Duisburg-Hamborn
|
||||||
|
Duisburg;Amtsgericht Duisburg-Ruhrort
|
||||||
|
Oberhausen;Amtsgericht Oberhausen
|
||||||
|
Wuppertal;Amtsgericht Wuppertal
|
|
@ -0,0 +1,58 @@
|
|||||||
|
Metrik;Datum;Summe [Milliarden €]
|
||||||
|
Umsatz;01.01.1999;29,473
|
||||||
|
Umsatz;01.01.2000;35,946
|
||||||
|
Umsatz;01.01.2001;32,5
|
||||||
|
Umsatz;01.01.2002;32,216
|
||||||
|
Umsatz;01.01.2003;33,361
|
||||||
|
Umsatz;01.01.2004;37,537
|
||||||
|
Umsatz;01.01.2005;42,745
|
||||||
|
Umsatz;01.01.2006;52,61
|
||||||
|
Umsatz;01.01.2007;57,951
|
||||||
|
Umsatz;01.01.2008;62,304
|
||||||
|
Umsatz;01.01.2009;50,693
|
||||||
|
Umsatz;01.01.2010;63,873
|
||||||
|
Umsatz;01.01.2011;73,497
|
||||||
|
Umsatz;01.01.2012;72,129
|
||||||
|
Umsatz;01.01.2013;73,973
|
||||||
|
Umsatz;01.01.2014;74,326
|
||||||
|
Umsatz;01.01.2015;70,449
|
||||||
|
Umsatz;01.01.2016;57,55
|
||||||
|
Umsatz;01.01.2017;61,223
|
||||||
|
Umsatz;01.01.2018;60,22
|
||||||
|
Umsatz;01.01.2019;59,316
|
||||||
|
Umsatz;01.01.2020;59,149
|
||||||
|
Umsatz;01.01.2021;78,598
|
||||||
|
Umsatz;01.01.2022;87,327
|
||||||
|
EBIT;01.01.2005;5,83
|
||||||
|
EBIT;01.01.2006;6,75
|
||||||
|
EBIT;01.01.2007;7,316
|
||||||
|
EBIT;01.01.2008;6,463
|
||||||
|
EBIT;01.01.2009;3,677
|
||||||
|
EBIT;01.01.2010;7,761
|
||||||
|
EBIT;01.01.2011;8,586
|
||||||
|
EBIT;01.01.2012;6,742
|
||||||
|
EBIT;01.01.2013;7,16
|
||||||
|
EBIT;01.01.2014;7,626
|
||||||
|
EBIT;01.01.2015;6,248
|
||||||
|
EBIT;01.01.2016;6,275
|
||||||
|
EBIT;01.01.2017;7,587
|
||||||
|
EBIT;01.01.2018;5,974
|
||||||
|
EBIT;01.01.2019;4,201
|
||||||
|
EBIT;01.01.2020;-0,191
|
||||||
|
EBIT;01.01.2021;7,677
|
||||||
|
EBIT;01.01.2022;6,548
|
||||||
|
EBITDA;01.01.2008;9,562
|
||||||
|
EBITDA;01.01.2009;7,388
|
||||||
|
EBITDA;01.01.2010;11,131
|
||||||
|
EBITDA;01.01.2011;11,993
|
||||||
|
EBITDA;01.01.2012;10,009
|
||||||
|
EBITDA;01.01.2013;10,432
|
||||||
|
EBITDA;01.01.2014;11,043
|
||||||
|
EBITDA;01.01.2015;10,649
|
||||||
|
EBITDA;01.01.2016;10,526
|
||||||
|
EBITDA;01.01.2017;10,765
|
||||||
|
EBITDA;01.01.2018;8,97
|
||||||
|
EBITDA;01.01.2019;8,185
|
||||||
|
EBITDA;01.01.2020;6,494
|
||||||
|
EBITDA;01.01.2021;11,355
|
||||||
|
EBITDA;01.01.2022;10,748
|
|
@ -0,0 +1,33 @@
|
|||||||
|
Metrik;Datum;Summe [Milliarden €]
|
||||||
|
Umsatz;01.01.2007;66,912
|
||||||
|
Umsatz;01.01.2008;84,873
|
||||||
|
Umsatz;01.01.2009;79,974
|
||||||
|
Umsatz;01.01.2010;92,863
|
||||||
|
Umsatz;01.01.2011;112,954
|
||||||
|
Umsatz;01.01.2012;132,093
|
||||||
|
Umsatz;01.01.2013;119,615
|
||||||
|
Umsatz;01.01.2014;113,095
|
||||||
|
Umsatz;01.01.2015;42,656
|
||||||
|
Umsatz;01.01.2016;38,173
|
||||||
|
Umsatz;01.01.2017;37,965
|
||||||
|
Umsatz;01.01.2018;30,084
|
||||||
|
Umsatz;01.01.2019;41,284
|
||||||
|
Umsatz;01.01.2020;60,944
|
||||||
|
Umsatz;01.01.2021;77,358
|
||||||
|
Umsatz;01.01.2022;115,66
|
||||||
|
EBIT;01.01.2012;7,01
|
||||||
|
EBIT;01.01.2013;5,64
|
||||||
|
EBIT;01.01.2014;4,7
|
||||||
|
EBIT;01.01.2015;3,6
|
||||||
|
EBIT;01.01.2016;3,1
|
||||||
|
EBIT;01.01.2017;3,1
|
||||||
|
EBIT;01.01.2018;2,99
|
||||||
|
EBIT;01.01.2019;3,22
|
||||||
|
EBIT;01.01.2020;3,78
|
||||||
|
EBIT;01.01.2021;4,72
|
||||||
|
EBIT;01.01.2022;5,2
|
||||||
|
EBITDA;01.01.2018;4,84
|
||||||
|
EBITDA;01.01.2019;5,558
|
||||||
|
EBITDA;01.01.2020;6,905
|
||||||
|
EBITDA;01.01.2021;7,889
|
||||||
|
EBITDA;01.01.2022;8,059
|
|
@ -0,0 +1,42 @@
|
|||||||
|
Metrik;Datum;Summe [Milliarden €]
|
||||||
|
Umsatz;01.01.2005;59,6
|
||||||
|
Umsatz;01.01.2006;61,3
|
||||||
|
Umsatz;01.01.2007;62,5
|
||||||
|
Umsatz;01.01.2008;61,7
|
||||||
|
Umsatz;01.01.2009;64,6
|
||||||
|
Umsatz;01.01.2010;62,42
|
||||||
|
Umsatz;01.01.2011;58,65
|
||||||
|
Umsatz;01.01.2012;58,17
|
||||||
|
Umsatz;01.01.2013;60,13
|
||||||
|
Umsatz;01.01.2014;62,66
|
||||||
|
Umsatz;01.01.2015;69,23
|
||||||
|
Umsatz;01.01.2016;73,1
|
||||||
|
Umsatz;01.01.2017;74,95
|
||||||
|
Umsatz;01.01.2018;75,66
|
||||||
|
Umsatz;01.01.2019;80,53
|
||||||
|
Umsatz;01.01.2020;99,95
|
||||||
|
Umsatz;01.01.2021;107,61
|
||||||
|
Umsatz;01.01.2022;114,2
|
||||||
|
EBIT;01.01.2005;7,6
|
||||||
|
EBIT;01.01.2006;5,3
|
||||||
|
EBIT;01.01.2007;5,3
|
||||||
|
EBIT;01.01.2008;7
|
||||||
|
EBIT;01.01.2009;6
|
||||||
|
EBIT;01.01.2010;5,51
|
||||||
|
EBIT;01.01.2011;5,56
|
||||||
|
EBIT;01.01.2012;-3,96
|
||||||
|
EBIT;01.01.2013;4,93
|
||||||
|
EBIT;01.01.2014;7,25
|
||||||
|
EBIT;01.01.2015;7,03
|
||||||
|
EBIT;01.01.2016;9,16
|
||||||
|
EBIT;01.01.2017;9,38
|
||||||
|
EBIT;01.01.2018;8
|
||||||
|
EBIT;01.01.2019;9,46
|
||||||
|
EBIT;01.01.2020;12,37
|
||||||
|
EBIT;01.01.2021;12,58
|
||||||
|
EBIT;01.01.2022;15,41
|
||||||
|
EBITDA;01.01.2018;23,333
|
||||||
|
EBITDA;01.01.2019;24,731
|
||||||
|
EBITDA;01.01.2020;35,017
|
||||||
|
EBITDA;01.01.2021;37,33
|
||||||
|
EBITDA;01.01.2022;40,208
|
|
@ -0,0 +1,20 @@
|
|||||||
|
Mohammed;Klein
|
||||||
|
Myriam;Koch
|
||||||
|
Dorothe;Zerusedemeiner
|
||||||
|
Emine;Puviplau
|
||||||
|
Galina;Tosewede
|
||||||
|
Hans-Walter;Mädidostein
|
||||||
|
Ludmilla;Krause
|
||||||
|
Jessica;Lesibedemeiner
|
||||||
|
Franz;Lowufohein
|
||||||
|
Krzysztof;Gaselatemüller
|
||||||
|
Gerolf;Navusedeson
|
||||||
|
Sibylla;Sutedihein
|
||||||
|
Nina;Golebede
|
||||||
|
Alicja;Revibodomeiner
|
||||||
|
Meryem;Kadeduhein
|
||||||
|
Janina;Zimmermann
|
||||||
|
Hendrik;Krüger
|
||||||
|
Oskar;Podadi
|
||||||
|
Maria-Luise;Nelaflodeson
|
||||||
|
Nadine;Niwogatemeiner
|
|
@ -0,0 +1,19 @@
|
|||||||
|
Company_HR;Company_Court;Jahr;Umsatz;Ebit;EBITDA
|
||||||
|
;12336;5;2005;59600;7600;
|
||||||
|
;12336;5;2006;61300;5300;
|
||||||
|
;12336;5;2007;62500;5300;
|
||||||
|
;12336;5;2008;61700;7000;
|
||||||
|
;12336;5;2009;64600;6000;
|
||||||
|
;12336;5;2010;62420;5510;
|
||||||
|
;12336;5;2011;58650;5560;
|
||||||
|
;12336;5;2012;58170;-3960;
|
||||||
|
;12336;5;2013;60130;4930;
|
||||||
|
;12336;5;2014;62660;7250;
|
||||||
|
;12336;5;2015;69230;7030;
|
||||||
|
;12336;5;2016;73100;9160;
|
||||||
|
;12336;5;2017;74950;9380;
|
||||||
|
;12336;5;2018;75660;8000;23333
|
||||||
|
;12336;5;2019;80530;9460;24731
|
||||||
|
;12336;5;2020;99950;12370;35017
|
||||||
|
;12336;5;2021;107610;12580;37330
|
||||||
|
;12336;5;2022;114200;15410;40208
|
|
2001
documentations/seminararbeiten/Datenspeicherung/Jupyter/edges.csv
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
version: "3.8"
|
||||||
|
services:
|
||||||
|
db:
|
||||||
|
image: postgres:14.1-alpine
|
||||||
|
container_name: postgres
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
volumes:
|
||||||
|
#- db:/var/lib/postgresql/data
|
||||||
|
- ./PostgreSQL:/var/lib/postgresql/data
|
||||||
|
pgadmin:
|
||||||
|
image: dpage/pgadmin4:7.2
|
||||||
|
container_name: pgadmin4_container
|
||||||
|
restart: always
|
||||||
|
ports:
|
||||||
|
- "5050:80"
|
||||||
|
environment:
|
||||||
|
PGADMIN_DEFAULT_EMAIL: admin@fh-swf.de
|
||||||
|
PGADMIN_DEFAULT_PASSWORD: admin
|
||||||
|
volumes:
|
||||||
|
# - pgadmin:/var/lib/pgadmin
|
||||||
|
- ./pgadmin:/var/lib/pgadmin
|
||||||
|
|
||||||
|
mongodb:
|
||||||
|
image: mongo:7.0.0-rc4
|
||||||
|
ports:
|
||||||
|
- '27017:27017'
|
||||||
|
volumes:
|
||||||
|
# - dbdata6:/data/db
|
||||||
|
- ./mongo:/data/db
|
After Width: | Height: | Size: 54 KiB |
After Width: | Height: | Size: 59 KiB |
After Width: | Height: | Size: 7.0 KiB |
BIN
documentations/seminararbeiten/Datenspeicherung/images/Front.PNG
Normal file
After Width: | Height: | Size: 39 KiB |
BIN
documentations/seminararbeiten/Datenspeicherung/images/Graph.PNG
Normal file
After Width: | Height: | Size: 19 KiB |
BIN
documentations/seminararbeiten/Datenspeicherung/images/HLD.PNG
Normal file
After Width: | Height: | Size: 16 KiB |
After Width: | Height: | Size: 59 KiB |
After Width: | Height: | Size: 29 KiB |
After Width: | Height: | Size: 54 KiB |
After Width: | Height: | Size: 90 KiB |
After Width: | Height: | Size: 30 KiB |
After Width: | Height: | Size: 100 KiB |
@ -40,8 +40,3 @@ Kennzahlen:
|
|||||||
Best Practice:
|
Best Practice:
|
||||||
- [Science Direct](https://www.sciencedirect.com/science/article/pii/S2666389920301896)
|
- [Science Direct](https://www.sciencedirect.com/science/article/pii/S2666389920301896)
|
||||||
- [Toptal](https://www.toptal.com/designers/data-visualization/data-visualization-best-practices)
|
- [Toptal](https://www.toptal.com/designers/data-visualization/data-visualization-best-practices)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
After Width: | Height: | Size: 15 KiB |
After Width: | Height: | Size: 22 KiB |
After Width: | Height: | Size: 39 KiB |
After Width: | Height: | Size: 31 KiB |
After Width: | Height: | Size: 45 KiB |
After Width: | Height: | Size: 6.9 KiB |
After Width: | Height: | Size: 20 KiB |
After Width: | Height: | Size: 27 KiB |
After Width: | Height: | Size: 29 KiB |
After Width: | Height: | Size: 20 KiB |
After Width: | Height: | Size: 51 KiB |
After Width: | Height: | Size: 170 KiB |
After Width: | Height: | Size: 137 KiB |
@ -0,0 +1,457 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"# Visualisierung eines Netzwerks\n",
|
||||||
|
"\n",
|
||||||
|
"In diesem Beispiel wird ein Graph mit networkx erstellt und anschließend mit pyvis visualisiert. Der Graph basiert auf Beispieldaten. Es werden erste Optionen in den Bereichen Größe, Farbe und Form der Knoten und Mouse-Over-Texte gezeigt.\n",
|
||||||
|
"\n",
|
||||||
|
"Der Code basiert auf den Dokumentationen der beiden Bibliotheken:\n",
|
||||||
|
"- [Networkx Dokumentation](https://networkx.org/documentation/stable/)\n",
|
||||||
|
"- [Pyvis Dokumentation](https://pyvis.readthedocs.io/en/latest/index.html)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Installation der Bibliotheken\n",
|
||||||
|
"\n",
|
||||||
|
"Networkx ist eine Python Bibliothek zur Erstellung und Analyse von Netzwerken. Pyvis ist eine Python Bibliothek zur interaktiven Visualisierung von Netzwerkgraphen. Beide können mit `pip` installiert werden. "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Requirement already satisfied: networkx in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (2.6.3)\n",
|
||||||
|
"Requirement already satisfied: pyvis in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (0.3.2)\n",
|
||||||
|
"Requirement already satisfied: jinja2>=2.9.6 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from pyvis) (2.11.3)\n",
|
||||||
|
"Requirement already satisfied: networkx>=1.11 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from pyvis) (2.6.3)\n",
|
||||||
|
"Requirement already satisfied: ipython>=5.3.0 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from pyvis) (7.29.0)\n",
|
||||||
|
"Requirement already satisfied: jsonpickle>=1.4.1 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from pyvis) (3.0.1)\n",
|
||||||
|
"Requirement already satisfied: backcall in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (0.2.0)\n",
|
||||||
|
"Requirement already satisfied: pexpect>4.3 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (4.8.0)\n",
|
||||||
|
"Requirement already satisfied: jedi>=0.16 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (0.18.0)\n",
|
||||||
|
"Requirement already satisfied: traitlets>=4.2 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (5.1.0)\n",
|
||||||
|
"Requirement already satisfied: decorator in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (5.1.0)\n",
|
||||||
|
"Requirement already satisfied: prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (3.0.20)\n",
|
||||||
|
"Requirement already satisfied: matplotlib-inline in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (0.1.2)\n",
|
||||||
|
"Requirement already satisfied: appnope in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (0.1.2)\n",
|
||||||
|
"Requirement already satisfied: pygments in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (2.10.0)\n",
|
||||||
|
"Requirement already satisfied: pickleshare in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (0.7.5)\n",
|
||||||
|
"Requirement already satisfied: setuptools>=18.5 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from ipython>=5.3.0->pyvis) (58.0.4)\n",
|
||||||
|
"Requirement already satisfied: parso<0.9.0,>=0.8.0 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from jedi>=0.16->ipython>=5.3.0->pyvis) (0.8.2)\n",
|
||||||
|
"Requirement already satisfied: MarkupSafe>=0.23 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from jinja2>=2.9.6->pyvis) (1.1.1)\n",
|
||||||
|
"Requirement already satisfied: ptyprocess>=0.5 in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from pexpect>4.3->ipython>=5.3.0->pyvis) (0.7.0)\n",
|
||||||
|
"Requirement already satisfied: wcwidth in /Users/kim/opt/anaconda3/lib/python3.9/site-packages (from prompt-toolkit!=3.0.0,!=3.0.1,<3.1.0,>=2.0.0->ipython>=5.3.0->pyvis) (0.2.5)\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# install networkx and pyvis using pip\n",
|
||||||
|
"!pip install networkx\n",
|
||||||
|
"!pip install pyvis"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Erstellen eines Netzwerks mit Networkx\n",
|
||||||
|
"\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"[0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0]"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import networkx as nx\n",
|
||||||
|
"\n",
|
||||||
|
"# create graph and use MultiGraph for nodes with multiple edges\n",
|
||||||
|
"G = nx.MultiGraph()\n",
|
||||||
|
"\n",
|
||||||
|
"# create list of nodes with attributes as a dictionary\n",
|
||||||
|
"nodes = [(1, {'label': 'Firma 1', 'branche': 'Branche 1', 'land': 'Land 1'}), \n",
|
||||||
|
" (2, {'label': 'Firma 2', 'branche': 'Branche 1', 'land': 'Land 2'}),\n",
|
||||||
|
" (3, {'label': 'Firma 3', 'branche': 'Branche 1', 'land': 'Land 3'}),\n",
|
||||||
|
" (4, {'label': 'Firma 4', 'branche': 'Branche 2', 'land': 'Land 4'}),\n",
|
||||||
|
" (5, {'label': 'Firma 5', 'branche': 'Branche 2', 'land': 'Land 1'}),\n",
|
||||||
|
" (6, {'label': 'Firma 6', 'branche': 'Branche 2', 'land': 'Land 3'}),\n",
|
||||||
|
" (7, {'label': 'Firma 7', 'branche': 'Branche 3', 'land': 'Land 3'}),\n",
|
||||||
|
" (8, {'label': 'Firma 8', 'branche': 'Branche 3', 'land': 'Land 2'}),\n",
|
||||||
|
" (9, {'label': 'Firma 9', 'branche': 'Branche 4', 'land': 'Land 1'}),\n",
|
||||||
|
" (10, {'label': 'Firma 10', 'branche': 'Branche 4', 'land': 'Land 4'}),\n",
|
||||||
|
" ]\n",
|
||||||
|
"\n",
|
||||||
|
"# create list of edges with attributes as a dictionary\n",
|
||||||
|
"edges = [\n",
|
||||||
|
" (1, 2, {'label': 'beziehung1'}), \n",
|
||||||
|
" (5, 2, {'label': 'beziehung2'}), \n",
|
||||||
|
" (1, 3, {'label': 'beziehung3'}), \n",
|
||||||
|
" (2, 4, {'label': 'beziehung3'}), \n",
|
||||||
|
" (2, 6, {'label': 'beziehung4'}), \n",
|
||||||
|
" (2, 5, {'label': 'beziehung4'}),\n",
|
||||||
|
" (8, 10, {'label': 'beziehung4'}),\n",
|
||||||
|
" (9, 10, {'label': 'beziehung3'}), \n",
|
||||||
|
" (3, 7, {'label': 'beziehung2'}), \n",
|
||||||
|
" (6, 8, {'label': 'beziehung1'}), \n",
|
||||||
|
" (6, 9, {'label': 'beziehung1'}), \n",
|
||||||
|
" (1, 6, {'label': 'beziehung2'})\n",
|
||||||
|
" ]\n",
|
||||||
|
"\n",
|
||||||
|
"# add nodes to the graph\n",
|
||||||
|
"G.add_nodes_from(nodes)\n",
|
||||||
|
"\n",
|
||||||
|
"# add edges to the graph, to hide arrow heads of the edges use option arrows = 'false'\n",
|
||||||
|
"G.add_edges_from(edges, arrows = 'false')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Information für das Mouse-Over hinterlegen\n",
|
||||||
|
"\n",
|
||||||
|
"Anforderung: Wenn man mit der Maus über einzelne Knoten fährt, sollten weitere Informationen sichtbar werden\n",
|
||||||
|
"\n",
|
||||||
|
"Aktuelle Umsetzung: 'title' wird als String für jeden Knoten gesetzt aus Name der Firma und Anzahl der Verbindungen.\n",
|
||||||
|
"\n",
|
||||||
|
"Erweiterungen/offene Fragen: Weitere Stammdaten-Informationen sind möglich, sofern sie zur Verfügung stehen."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"for node in G.nodes:\n",
|
||||||
|
" G.nodes[node]['title'] = G.nodes[node]['label'] + '\\n' + 'Anzahl Verbindungen: ' + str(G.degree[node])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Ändern der Größe der Knoten\n",
|
||||||
|
"\n",
|
||||||
|
"Anforderung: Größe in Abhängigkeit bestimmter Attribute ändern.\n",
|
||||||
|
"\n",
|
||||||
|
"Aktuelle Umsetzung: Setzen der Größe anhand der Anzahl der Kanten.\n",
|
||||||
|
"\n",
|
||||||
|
"Erweiterungen/offene Fragen: Weitere Attribute wie EBIT, Umsatz sollten möglich sein. "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"#### Erster Test zum Bestimmen der Verbindungen und der Anzahl"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"[2, 3, 6]\n",
|
||||||
|
"3\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# get all nodes connected to node 1\n",
|
||||||
|
"print(list(G.adj[1]))\n",
|
||||||
|
"\n",
|
||||||
|
"# get number of nodes connected to node 1\n",
|
||||||
|
"print(G.degree[1])"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"#### Skalieren der Größe "
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 5,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# scaling the size of the nodes by 5*degree\n",
|
||||||
|
"scale = 5 \n",
|
||||||
|
"\n",
|
||||||
|
"# getting all nodes and their number of connections\n",
|
||||||
|
"d = dict(G.degree)\n",
|
||||||
|
"\n",
|
||||||
|
"# updating dict\n",
|
||||||
|
"d.update((x, scale*(y+1)) for x, y in d.items())\n",
|
||||||
|
"\n",
|
||||||
|
"# setting size attribute according to created dictionary\n",
|
||||||
|
"nx.set_node_attributes(G,d,'size')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"## Visualisierung mit Pyvis\n",
|
||||||
|
"\n",
|
||||||
|
"Beim Anlegen des Netzwerks kann mit `neighborhood_highlight=True` bereits aktiviert werden, dass ein Klick auf einen Knoten benachbarte Knoten hervorhebt"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"from pyvis.network import Network\n",
|
||||||
|
"\n",
|
||||||
|
"# create network, 'directed = true' allows multiple edges between nodes\n",
|
||||||
|
"nt = Network('1000px', '1000px', neighborhood_highlight=True, notebook=True, cdn_resources='in_line', directed=True)\n",
|
||||||
|
"\n",
|
||||||
|
"# populates the nodes and edges data structures\n",
|
||||||
|
"nt.from_nx(G)\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Erste Tests zum Ändern der Art und Farbe eines einzelnen Knotens\n",
|
||||||
|
"\n",
|
||||||
|
"Change shape of one node:\n",
|
||||||
|
"`nt.nodes[1]['shape'] = 'square'`\n",
|
||||||
|
"\n",
|
||||||
|
"Change color of one node:\n",
|
||||||
|
"`nt.nodes[1]['color'] = 'red'`"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Ändern der Farbe aller Knoten \n",
|
||||||
|
"\n",
|
||||||
|
"Anforderung: Ändere die Farbe basierend auf den Attributen \"Branche\" oder \"Land\"\n",
|
||||||
|
"\n",
|
||||||
|
"Aktuelle Umsetzung: Funktion, die die Farbe der Knoten anhand des ausgewählten Attributs (type) setzt.\n",
|
||||||
|
"\n",
|
||||||
|
"Erweiterungen/offene Fragen: Mögliche Branchen und Länder haben eine festcodierte Farbe, geht das generischer? Wie können weitere Attribute integriert werden?\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 7,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# define new function that sets the color of the nodes\n",
|
||||||
|
"def color_type (net, type):\n",
|
||||||
|
" ''' color_type sets the color of a network depending on an attribute of the nodes\n",
|
||||||
|
" net: network\n",
|
||||||
|
" type: 'branche' or 'land' '''\n",
|
||||||
|
"\n",
|
||||||
|
" colormap = {'Branche 1': '#87CEEB',\n",
|
||||||
|
" 'Branche 2': '#0f4c81',\n",
|
||||||
|
" 'Branche 3': '#B2FFFF', \n",
|
||||||
|
" 'Branche 4': '#191970',\n",
|
||||||
|
" 'Land 1': '#F8D568', \n",
|
||||||
|
" 'Land 2': '#F58025', \n",
|
||||||
|
" 'Land 3': '#CC5500', \n",
|
||||||
|
" 'Land 4': '#C0362C'}\n",
|
||||||
|
" for node in net.nodes:\n",
|
||||||
|
" node['color'] = colormap[node[type]]\n",
|
||||||
|
" return net\n",
|
||||||
|
"\n",
|
||||||
|
"# set color based on attribute\n",
|
||||||
|
"nt = color_type(nt, 'branche')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Ändern der Farbe aller Kanten\n",
|
||||||
|
"Normalerweise übernehmen die Kanten die Farben von ihren Knoten. Mit der Option 'color' kann für alle Kanten die gleiche Farbe gesetzt werden."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 8,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"# set all edge colors \n",
|
||||||
|
"nt.options.edges.color = 'grey'"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Speichern des Netzwerks als HTML\n",
|
||||||
|
"\n",
|
||||||
|
"Die Ausrichtung und Spannkräfte des Netzwerks können mit den 'physics options' gesetzt werden."
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 9,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Netzwerk_Verflechtungsanalyse.html\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"\n",
|
||||||
|
" <iframe\n",
|
||||||
|
" width=\"1000px\"\n",
|
||||||
|
" height=\"1000px\"\n",
|
||||||
|
" src=\"Netzwerk_Verflechtungsanalyse.html\"\n",
|
||||||
|
" frameborder=\"0\"\n",
|
||||||
|
" allowfullscreen\n",
|
||||||
|
" \n",
|
||||||
|
" ></iframe>\n",
|
||||||
|
" "
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.lib.display.IFrame at 0x10b82b940>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 9,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"# activate physics options to try out different solver\n",
|
||||||
|
"#nt.show_buttons(filter_=['physics'])\n",
|
||||||
|
"\n",
|
||||||
|
"# set physics options\n",
|
||||||
|
"nt.barnes_hut(gravity=-8000, central_gravity=0.3, spring_length=200, spring_strength=0.1, damping=0.09, overlap=0)\n",
|
||||||
|
"\n",
|
||||||
|
"# create html and save in same folder\n",
|
||||||
|
"nt.show('Netzwerk_Verflechtungsanalyse.html')"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "markdown",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"### Erstellen eines minimalen Netzwerks"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 10,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"Netzwerk.html\n"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/html": [
|
||||||
|
"\n",
|
||||||
|
" <iframe\n",
|
||||||
|
" width=\"1000px\"\n",
|
||||||
|
" height=\"1000px\"\n",
|
||||||
|
" src=\"Netzwerk.html\"\n",
|
||||||
|
" frameborder=\"0\"\n",
|
||||||
|
" allowfullscreen\n",
|
||||||
|
" \n",
|
||||||
|
" ></iframe>\n",
|
||||||
|
" "
|
||||||
|
],
|
||||||
|
"text/plain": [
|
||||||
|
"<IPython.lib.display.IFrame at 0x10bedbf70>"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 10,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"import networkx as nx\n",
|
||||||
|
"from pyvis.network import Network\n",
|
||||||
|
"\n",
|
||||||
|
"sn = nx.Graph()\n",
|
||||||
|
"sn_nodes = [1,2,3,4,5,6,7]\n",
|
||||||
|
"sn_edges = [(1,4),(2,4),(3,4),(4,5),(5,6),(5,7)]\n",
|
||||||
|
"\n",
|
||||||
|
"sn.add_nodes_from(sn_nodes, color = '#00509b')\n",
|
||||||
|
"sn.add_edges_from(sn_edges)\n",
|
||||||
|
"\n",
|
||||||
|
"net = Network('1000px', '1000px', notebook=True, cdn_resources='in_line')\n",
|
||||||
|
"\n",
|
||||||
|
"net.from_nx(sn)\n",
|
||||||
|
"net.show('Netzwerk.html')\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"interpreter": {
|
||||||
|
"hash": "aee8b7b246df8f9039afb4144a1f6fd8d2ca17a180786b69acc140d282b71a49"
|
||||||
|
},
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3.10.1 64-bit",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.10.1"
|
||||||
|
},
|
||||||
|
"orig_nbformat": 4
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
@ -0,0 +1 @@
|
|||||||
|
|