diff --git a/pyrate/.dockerignore b/pyrate/.dockerignore
new file mode 100644
index 0000000..83b4753
--- /dev/null
+++ b/pyrate/.dockerignore
@@ -0,0 +1,164 @@
+### TortoiseGit template
+# Project-level settings
+/.tgitconfig
+
+### JupyterNotebooks template
+# gitignore template for Jupyter Notebooks
+# website: http://jupyter.org/
+
+.ipynb_checkpoints
+*/.ipynb_checkpoints/*
+
+# IPython
+profile_default/
+ipython_config.py
+
+# Remove previous ipynb_checkpoints
+# git rm -r .ipynb_checkpoints/
+
+### Python template
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+.gitignore
+.gitlab-ci.yml
+Experiments.ipynb
+.gitlab
+.dockerignore
+*.dockerfile
+.git
diff --git a/pyrate/.gitignore b/pyrate/.gitignore
new file mode 100644
index 0000000..c2d9b00
--- /dev/null
+++ b/pyrate/.gitignore
@@ -0,0 +1,140 @@
+.idea
+.vscode
+.test-artifacts.junit.xml
+.pth
+/stda-env/
+
+# For mutation testing
+.hammett-db
+.mutmut-cache
+
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+pip-wheel-metadata/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+coverage.json
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+.python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
diff --git a/pyrate/.gitlab-ci.yml b/pyrate/.gitlab-ci.yml
new file mode 100644
index 0000000..5c7298d
--- /dev/null
+++ b/pyrate/.gitlab-ci.yml
@@ -0,0 +1,196 @@
+# ~~~~~~~~~~~~~~~~~~~~~ Base image
+image: ubuntu:22.04
+
+# ~~~~~~~~~~~~~~~~~~~~~ Caches & Environment variables
+variables:
+ PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
+
+cache:
+ # Share cache among commits on the same branch
+ key: ${CI_COMMIT_REF_SLUG}
+ paths:
+ - .cache/pip
+ - .cache/apt
+ # cache these such that subsequent invocations test at least the previous examples
+ - .hypothesis/examples
+ # mypy works incrementally
+ - .mypy_cache
+
+# ~~~~~~~~~~~~~~~~~~~~~ Config
+stages:
+ - Static Code Analysis
+ - Automatic Testing
+ - Manual Testing
+ - Documentation
+ - Deploy
+
+default:
+ timeout: 15 minutes
+
+# ~~~~~~~~~~~~~~~~~~~~~ Static Code Analysis
+Pylint:
+ image: python:3.10
+ stage: Static Code Analysis
+ needs: []
+ before_script:
+ - pip install "pylint~=2.13.4"
+ script:
+ - pylint -j 0 pyrate tests scripts | awk '!seen[$0]++'
+
+Flake8 Linter:
+ image: python:3.10
+ stage: Static Code Analysis
+ needs: []
+ before_script:
+ - pip install hacking # This also installs flake8, mccabe and others
+ script:
+ - flake8 | awk '!seen[$0]++'
+
+Mypy Static Type Checker:
+ stage: Static Code Analysis
+ needs: []
+ before_script:
+ # (This section is partly copied from the Pytest job)
+
+ # Setup APT cache based on
+ # https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
+ - rm -f /etc/apt/apt.conf.d/docker-clean
+ - mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
+
+ # Install Pip & additional requirements that cannot be fulfilled with pip
+ - apt-get update -qq
+ - apt-get install -qqy python3-pip g++ python3-dev python3-gdal libgdal-dev
+
+ # Print version information and install Pyrate
+ - python3 --version
+ - pip3 install .
+ script:
+ - mypy pyrate tests scripts
+
+Black Linter:
+ image: python:3.10
+ stage: Static Code Analysis
+ needs: []
+ before_script:
+ - pip install black
+ script:
+ - black --check --diff .
+ # Black is allowed to fail since we do not strictly enforce its advice
+ allow_failure: true
+
+# ~~~~~~~~~~~~~~~~~~~~~ Automatic Testing
+Pytest:
+ stage: Automatic Testing
+ needs: []
+ timeout: 3 hours
+ # ~~~~~~~~~~~~~~~~~~~~~ Setup & Installation
+ before_script:
+ # Setup APT cache based on
+ # https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
+ - rm -f /etc/apt/apt.conf.d/docker-clean
+ - mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
+
+ # Install Pip & additional requirements that cannot be fulfilled with pip
+ - apt-get update -qq
+ - apt-get install software-properties-common -qqy
+ - add-apt-repository ppa:antiprism/ppa -y
+ - apt-get install -qqy python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism
+
+ # Print version information and install Pyrate
+ - python3 --version
+ - pip3 install .
+ script:
+ # Run the tests and collect coverage
+ - pytest --junitxml=.test-artifacts.junit.xml
+
+ # Convert the coverage report extra (instead of with --cov-report xml:coverage.xml) to correct the source paths
+ # This is for Gitlab to automatically parse it
+ - python3 -m coverage xml
+
+ # Make sure to crash at less than 100% statement coverage
+ - python3 -m coverage json
+ - echo Checking for 100.00% statement coverage ...
+ - >
+ cat coverage.json | python3 -c $'import json, sys; miss = bool(json.load(sys.stdin)["totals"]["missing_lines"])\nif miss: sys.exit("\033[91m\033[01mERROR: Statement Coverage is <100%.\033[0m");'
+ coverage: '/^TOTAL\s+\d+\s+\d+\s+\d+\s+\d+\s+(\d+.\d+\%)/'
+ artifacts:
+ reports:
+ junit: .test-artifacts.junit.xml
+ cobertura: coverage.xml
+
+Mutation Testing:
+ extends: Pytest
+ stage: Manual Testing
+ needs: [Pytest, Mypy Static Type Checker] # Always make sure that the normal tests run fine!
+ timeout: 3 days # This can take very long, since the test suite might get run many times
+ when: manual
+ script:
+ - pip3 install mutmut
+ - mutmut run --no-progress # Update less regularly
+ - mutmut results
+ - mutmut html
+ artifacts:
+ reports: {}
+ paths:
+ - html/
+ expire_in: 500 days
+
+# ~~~~~~~~~~~~~~~~~~~~~ Documentation
+
+Build Documentation:
+ stage: Documentation
+ needs: []
+ retry: # Needed due to random "std::bad_alloc" occurrences
+ max: 2
+ when: script_failure
+ before_script:
+ # Installation as in the "Pytest" job
+ # Setup APT cache based on
+ # https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
+ - rm -f /etc/apt/apt.conf.d/docker-clean
+ - mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
+
+ # Setup pip
+ - apt-get update -qq
+ - apt-get install -qqy python3-pip
+
+ # Install additional requirements that cannot be fulfilled with pip
+ - apt-get install -qqy g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite
+
+ # Print version information and install Pyrate
+ - python3 --version
+ - pip3 install .[docs]
+
+ # Install additional requirements specifically for the documentation
+ - apt-get -qqy install graphviz
+ script:
+ - cd doc
+ - make html # only build HTML as it supports all features, is fast and required for the Gitlab pages website
+ artifacts:
+ paths:
+ - doc/build/html/
+
+# ~~~~~~~~~~~~~~~~~~~~~ Deploy
+
+# This job is separate from "Build Documentation" job since this one shall only run on the master branch while the other
+# should run as part of all pipelines
+pages:
+ image: alpine:latest
+ stage: Deploy
+ needs: [Build Documentation]
+ only:
+ - master
+ script:
+ - mkdir public
+ - cp -R doc/build/html/* public/
+ artifacts:
+ paths:
+ - public
+
+# This job triggers the pipeline of the ros-nodes repository on changes of the master branch
+Trigger Downstream Pipelines:
+ # needs: [] although this does not really require other jobs, it shall only be run at the end if all others succeeded
+ stage: Deploy
+ only:
+ - master
+ trigger: informatik/ros-nodes
diff --git a/pyrate/CHANGELOG.md b/pyrate/CHANGELOG.md
new file mode 100644
index 0000000..a95bf7f
--- /dev/null
+++ b/pyrate/CHANGELOG.md
@@ -0,0 +1,36 @@
+Version 22.04
+=============
+
+For this release, the following major features were implemented:
+ - `pyrate.sense`: A foundation for vision based obstacle detection has been merged, including a horizon line detector and base classes for simple image regions.
+ - `pyrate.plan`: The classes based on *Shapely* have been prepared for a version bump to shapely 2.0 in the future.
+ - `scripts`: A script has been added to quickly create test databases from GeoJSON.
+
+Additionally, bugfixes and enhancements to the CI pipeline have been implemented.
+As an example, reporting on missing statement coverage is now explicit in the respective job.
+
+The [Wiki](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/wikis) has now been moved into a Sailing Team wide repository.
+Here, information of all teams can be gathered in a single Wiki to increase exchange of information and improve coordination.
+
+Version 21.10
+=============
+
+For this release, the following major features were implemented:
+ - `pyrate.common`: Strategies for generating test cases for hypothesis are now available within Pyrate and can be
+ used by other projects too.
+ - `pyrate.plan`: The database can now handle additional types of chart objects (points and line strings).
+
+In addition, there were many bug fixes and QA improvements: We now reach 100% statement coverage and also collect
+branch coverage. The latter is consistently above 95-99%, and we also enforce at least 90% in every contribution.
+
+While not strictly part of Pyrate, it is worth mentioning that the [Wiki](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/wikis)
+grew substantially. In particular, large articles on the hardware of our boats and the components were written.
+Most pieces of equipment should now be straightforward to set up just by following the instructions there.
+
+
+Version 21.04
+=============
+
+Versioning of *Pyrate* starts with this version as the current version covers all intended major areas of application.
+A lot of early quirks have now been ironed out, and we hope that changes from now on will be less disrupting.
+Prior to this, there were no labeled versions but simply an ongoing development effort.
diff --git a/pyrate/README.md b/pyrate/README.md
new file mode 100644
index 0000000..e306324
--- /dev/null
+++ b/pyrate/README.md
@@ -0,0 +1,69 @@
+# Pyrate ⛵🛥️🗺
+
+[](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
+[](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
+[](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
+
+[](http://informatik.pages.sailingteam.hg.tu-darmstadt.de/pyrate/)
+[](https://devguide.python.org/#status-of-python-branches)
+
+This project offers algorithms targeted towards autonomous surface vehicles implemented in the Python 3.6+ programming language.
+The functionality included in the Pyrate Python package enables the perception and processing of environmental features, planning of strategies and trajectories as well as action taking.
+Pyrate is therefore divided into multiple distinct subpackages *sense*, *plan* and *act*, as well as the *common* package for additional features such as file I/O and mathematical abstractions.
+
+The sense subpackage includes algorithms for computer vision, single and multi target state estimation, data smoothing and more.
+In the plan subpackage, the perceived state of the agent and its environment is processed to compute trajectories for long- and short-term navigation as well as strategic decisions.
+This also includes methods for gradient based and gradient free optimization methods.
+Finally, the act subpackage contains state controllers to carry out planned behavior.
+
+
+
+This project aims at providing the algorithmic backend for the ROS driven on-board systems and simulations.
+Models of the robot's mechanics, electronics and its environment shall be developed within their own respective repository.
+Furthermore, technical specifications, maps and so on go into a separate project repository as well.
+This ensures a clean separation of distinct requirements for quality assurance and responsibilities that distinguish these projects.
+
+## Features
+
+These are the currently supported features:
+
+- Sense:
+ - Filters:
+ - Kalman filters for linear and non-linear state estimation
+ - A gaussian mixture PHD filter for multi target tracking
+ - Smoothers:
+ - Rauch-Tung-Striebel based smoothing of time series data.
+ - Extended and Unscented Kalman filter extension to the RTS smoothing approach for non-linear models.
+ - Vision:
+ - Base classes for simple image regions, e.g. lines and boxes.
+ - Horizon line detection as basis for later obstacle localization.
+- Plan:
+ - Geometric primitives for both cartesian and polar coordinate systems
+ - Locations, polygons and routes
+ - Transformations, unary and binary geometric operations based on [shapely](https://shapely.readthedocs.io/en/latest/project.html)
+ - Graphs for use in navigation
+ - Base classes and common abstractions like (de)serialization and pruning
+ - Generation of graphs covering the entire globe
+- Act
+ - Controllers
+ - PID & LQR implementations
+ - Optional anti-windup
+- Common
+ - Chart IO
+ - Discovery and loading of obstacles, e.g. landmasses, from S-57 chart files
+ - Writing and querying of a [spatialite database](https://www.gaia-gis.it/fossil/libspatialite) containing obstacles/polygons
+ - Raster dataset IO
+ - Math helpers
+- Other:
+ - [Hypothesis](https://hypothesis.readthedocs.io/en/latest/) driven testing environment (including generators for simple testing of geometric calculations)
+ - Documentation generated by [Sphinx](https://www.sphinx-doc.org/en/master/)
+ - Continuous integration (CI) pipeline with linting, type checking, auto formatting, testing and documentation generation
+
+**Documentation**: For a complete overview over the project, how to set things up and contribute,
+please visit [our documentation](http://informatik.pages.sailingteam.hg.tu-darmstadt.de/pyrate/).
+It can also be reached by the "External Wiki" link on the left sidebar the project overview.
+The CI pipeline ensures that its content is always up to date with the current `master` branch.
+
+**New Features**: Upcoming or required features and existing problems can be found, added and discussed in the
+[Gitlab issues](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/boards)
+and [merge requests](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/merge_requests).
diff --git a/pyrate/doc/Makefile b/pyrate/doc/Makefile
new file mode 100644
index 0000000..8b6275a
--- /dev/null
+++ b/pyrate/doc/Makefile
@@ -0,0 +1,20 @@
+# Minimal makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line, and also
+# from the environment for the first two.
+SPHINXOPTS ?= -W --keep-going
+SPHINXBUILD ?= sphinx-build
+SOURCEDIR = source
+BUILDDIR = build
+
+# Put it first so that "make" without argument is like "make help".
+help:
+ @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
+
+.PHONY: help Makefile
+
+# Catch-all target: route all unknown targets to Sphinx using the new
+# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
+%: Makefile
+ @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
diff --git a/pyrate/doc/make.bat b/pyrate/doc/make.bat
new file mode 100644
index 0000000..9534b01
--- /dev/null
+++ b/pyrate/doc/make.bat
@@ -0,0 +1,35 @@
+@ECHO OFF
+
+pushd %~dp0
+
+REM Command file for Sphinx documentation
+
+if "%SPHINXBUILD%" == "" (
+ set SPHINXBUILD=sphinx-build
+)
+set SOURCEDIR=source
+set BUILDDIR=build
+
+if "%1" == "" goto help
+
+%SPHINXBUILD% >NUL 2>NUL
+if errorlevel 9009 (
+ echo.
+ echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
+ echo.installed, then set the SPHINXBUILD environment variable to point
+ echo.to the full path of the 'sphinx-build' executable. Alternatively you
+ echo.may add the Sphinx directory to PATH.
+ echo.
+ echo.If you don't have Sphinx installed, grab it from
+ echo.http://sphinx-doc.org/
+ exit /b 1
+)
+
+%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+goto end
+
+:help
+%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
+
+:end
+popd
diff --git a/pyrate/doc/source/act/act.rst b/pyrate/doc/source/act/act.rst
new file mode 100644
index 0000000..3e43e00
--- /dev/null
+++ b/pyrate/doc/source/act/act.rst
@@ -0,0 +1,10 @@
+Act
+---
+
+.. automodule:: pyrate.act
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Subpackages:
+
+ control/control
diff --git a/pyrate/doc/source/act/control/anti_windup_lqr.rst b/pyrate/doc/source/act/control/anti_windup_lqr.rst
new file mode 100644
index 0000000..ba0d7b3
--- /dev/null
+++ b/pyrate/doc/source/act/control/anti_windup_lqr.rst
@@ -0,0 +1,8 @@
+Anti windup LQR
+---------------
+
+.. automodule:: pyrate.act.control.anti_windup_lqr
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff --git a/pyrate/doc/source/act/control/anti_windup_pid.rst b/pyrate/doc/source/act/control/anti_windup_pid.rst
new file mode 100644
index 0000000..752974c
--- /dev/null
+++ b/pyrate/doc/source/act/control/anti_windup_pid.rst
@@ -0,0 +1,7 @@
+Anti windup PID
+---------------
+
+.. automodule:: pyrate.act.control.anti_windup_pid
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/act/control/control.rst b/pyrate/doc/source/act/control/control.rst
new file mode 100644
index 0000000..4692b05
--- /dev/null
+++ b/pyrate/doc/source/act/control/control.rst
@@ -0,0 +1,13 @@
+Control
+-------
+
+.. automodule:: pyrate.act.control
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ pid
+ anti_windup_pid
+ lqr
+ anti_windup_lqr
diff --git a/pyrate/doc/source/act/control/lqr.rst b/pyrate/doc/source/act/control/lqr.rst
new file mode 100644
index 0000000..82e4163
--- /dev/null
+++ b/pyrate/doc/source/act/control/lqr.rst
@@ -0,0 +1,8 @@
+LQR
+---
+
+.. automodule:: pyrate.act.control.lqr
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
diff --git a/pyrate/doc/source/act/control/pid.rst b/pyrate/doc/source/act/control/pid.rst
new file mode 100644
index 0000000..8e9be20
--- /dev/null
+++ b/pyrate/doc/source/act/control/pid.rst
@@ -0,0 +1,7 @@
+PID
+---
+
+.. automodule:: pyrate.act.control.pid
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/charts/charts.rst b/pyrate/doc/source/common/charts/charts.rst
new file mode 100644
index 0000000..821b006
--- /dev/null
+++ b/pyrate/doc/source/common/charts/charts.rst
@@ -0,0 +1,11 @@
+Charts
+------
+
+The charts package enables users to read and write charts based on different file formats and databases.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ database
+ s57_files
diff --git a/pyrate/doc/source/common/charts/database.rst b/pyrate/doc/source/common/charts/database.rst
new file mode 100644
index 0000000..2148c06
--- /dev/null
+++ b/pyrate/doc/source/common/charts/database.rst
@@ -0,0 +1,7 @@
+Database
+--------
+
+.. automodule:: pyrate.common.charts.db
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/charts/s57_files.rst b/pyrate/doc/source/common/charts/s57_files.rst
new file mode 100644
index 0000000..424e9e2
--- /dev/null
+++ b/pyrate/doc/source/common/charts/s57_files.rst
@@ -0,0 +1,7 @@
+Raw Files
+---------
+
+.. automodule:: pyrate.common.charts.s57_files
+ :members:
+ :undoc-members:
+ :show-inheritance:
\ No newline at end of file
diff --git a/pyrate/doc/source/common/common.rst b/pyrate/doc/source/common/common.rst
new file mode 100644
index 0000000..727f587
--- /dev/null
+++ b/pyrate/doc/source/common/common.rst
@@ -0,0 +1,13 @@
+Common
+------
+
+.. automodule:: pyrate.common
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Subpackages:
+
+ charts/charts
+ math/math
+ raster_datasets/raster_datasets
+ testing/testing
diff --git a/pyrate/doc/source/common/math/gaussian.rst b/pyrate/doc/source/common/math/gaussian.rst
new file mode 100644
index 0000000..1a4a4de
--- /dev/null
+++ b/pyrate/doc/source/common/math/gaussian.rst
@@ -0,0 +1,7 @@
+Gaussian
+--------
+
+.. automodule:: pyrate.common.math.gaussian
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/math/math.rst b/pyrate/doc/source/common/math/math.rst
new file mode 100644
index 0000000..4c7739b
--- /dev/null
+++ b/pyrate/doc/source/common/math/math.rst
@@ -0,0 +1,10 @@
+Math
+----
+
+The math package contains useful abstractions for common mathematical objects.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ gaussian
diff --git a/pyrate/doc/source/common/raster_datasets/geo_datasets.rst b/pyrate/doc/source/common/raster_datasets/geo_datasets.rst
new file mode 100644
index 0000000..72809af
--- /dev/null
+++ b/pyrate/doc/source/common/raster_datasets/geo_datasets.rst
@@ -0,0 +1,7 @@
+Geographical Datasets
+---------------------
+
+.. automodule:: pyrate.common.raster_datasets.geo_datasets
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_depth.png b/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_depth.png
new file mode 100644
index 0000000..530f24a
Binary files /dev/null and b/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_depth.png differ
diff --git a/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_fraction_navigable.png b/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_fraction_navigable.png
new file mode 100644
index 0000000..54a5357
Binary files /dev/null and b/pyrate/doc/source/common/raster_datasets/plot_global_bathymetry_fraction_navigable.png differ
diff --git a/pyrate/doc/source/common/raster_datasets/raster_datasets.rst b/pyrate/doc/source/common/raster_datasets/raster_datasets.rst
new file mode 100644
index 0000000..654f9f7
--- /dev/null
+++ b/pyrate/doc/source/common/raster_datasets/raster_datasets.rst
@@ -0,0 +1,12 @@
+Raster Datasets
+---------------
+
+.. automodule:: pyrate.common.raster_datasets
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ geo_datasets
+ transformer_base
+ transformers_concrete
diff --git a/pyrate/doc/source/common/raster_datasets/transformer_base.rst b/pyrate/doc/source/common/raster_datasets/transformer_base.rst
new file mode 100644
index 0000000..6354fc8
--- /dev/null
+++ b/pyrate/doc/source/common/raster_datasets/transformer_base.rst
@@ -0,0 +1,7 @@
+Transformer Base Classes
+------------------------
+
+.. automodule:: pyrate.common.raster_datasets.transformer_base
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/raster_datasets/transformers_concrete.rst b/pyrate/doc/source/common/raster_datasets/transformers_concrete.rst
new file mode 100644
index 0000000..b6ffdc4
--- /dev/null
+++ b/pyrate/doc/source/common/raster_datasets/transformers_concrete.rst
@@ -0,0 +1,7 @@
+Concrete Transformers
+---------------------
+
+.. automodule:: pyrate.common.raster_datasets.transformers_concrete
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/testing/dynamic_system.rst b/pyrate/doc/source/common/testing/dynamic_system.rst
new file mode 100644
index 0000000..7b31649
--- /dev/null
+++ b/pyrate/doc/source/common/testing/dynamic_system.rst
@@ -0,0 +1,7 @@
+Dynamic System
+--------------
+
+.. automodule:: pyrate.common.testing.strategies.dynamic_system
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/testing/geometry.rst b/pyrate/doc/source/common/testing/geometry.rst
new file mode 100644
index 0000000..feb7a06
--- /dev/null
+++ b/pyrate/doc/source/common/testing/geometry.rst
@@ -0,0 +1,7 @@
+Geometry
+--------
+
+.. automodule:: pyrate.common.testing.strategies.geometry
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/common/testing/strategies.rst b/pyrate/doc/source/common/testing/strategies.rst
new file mode 100644
index 0000000..cf06f98
--- /dev/null
+++ b/pyrate/doc/source/common/testing/strategies.rst
@@ -0,0 +1,11 @@
+Strategies
+----------
+
+.. automodule:: pyrate.common.testing.strategies
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ dynamic_system
+ geometry
diff --git a/pyrate/doc/source/common/testing/testing.rst b/pyrate/doc/source/common/testing/testing.rst
new file mode 100644
index 0000000..4944ded
--- /dev/null
+++ b/pyrate/doc/source/common/testing/testing.rst
@@ -0,0 +1,13 @@
+Testing
+-------
+
+.. automodule:: pyrate.common.testing
+ :members:
+ :undoc-members:
+ :show-inheritance:
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ strategies
diff --git a/pyrate/doc/source/conf.py b/pyrate/doc/source/conf.py
new file mode 100644
index 0000000..c206bb3
--- /dev/null
+++ b/pyrate/doc/source/conf.py
@@ -0,0 +1,122 @@
+# Configuration file for the Sphinx documentation builder.
+#
+# This file only contains a selection of the most common options. For a full
+# list see the documentation:
+# https://www.sphinx-doc.org/en/master/usage/configuration.html
+
+# -- Path setup --------------------------------------------------------------
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+from os.path import abspath
+from os.path import dirname
+from os.path import join
+import sys
+
+sys.path.insert(0, abspath(join(dirname(__file__), "../../"))) # for scripts/
+
+import pyrate # noqa: E402
+
+# -- Project information -----------------------------------------------------
+
+project = "Pyrate"
+copyright = pyrate.__author__
+author = pyrate.__author__
+
+# The version info for the project, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = pyrate.__version__.split("-")[0]
+# The full version, including alpha/beta/rc tags
+release = pyrate.__version__
+
+# -- General configuration ---------------------------------------------------
+
+primary_domain = "py"
+
+# If this is True, todo and todolist produce output, else they produce nothing.
+# The default is False.
+todo_include_todos = True
+
+language = "en"
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ "sphinx_markdown_builder",
+ "sphinx.ext.autodoc",
+ "sphinx.ext.intersphinx",
+ "sphinx.ext.mathjax",
+ "sphinx.ext.viewcode",
+ "sphinx.ext.napoleon",
+ "sphinx.ext.autosummary",
+ "sphinx.ext.doctest",
+ "sphinx.ext.inheritance_diagram",
+ "sphinx_rtd_theme",
+ "sphinxcontrib.programoutput",
+]
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+# This pattern also affects html_static_path and html_extra_path.
+exclude_patterns = []
+
+source_suffix = [".rst"]
+
+intersphinx_mapping = {
+ "python": ("https://docs.python.org/3", None),
+ "shapely": ("https://shapely.readthedocs.io/en/stable", None),
+ "numpy": ("https://numpy.org/doc/stable", None),
+ "scipy": ("https://docs.scipy.org/doc/scipy", None),
+ "matplotlib": ("https://matplotlib.org/stable", None),
+ "pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
+ "h5py": ("https://docs.h5py.org/en/stable", None),
+ "tables": ("https://www.pytables.org", None),
+ "pyproj": ("https://pyproj4.github.io/pyproj/stable", None),
+ "rasterio": ("https://rasterio.readthedocs.io/en/stable", None),
+ "geopy": ("https://geopy.readthedocs.io/en/stable", None),
+ "cartopy": ("https://scitools.org.uk/cartopy/docs/latest", None),
+ "pytest": ("https://docs.pytest.org/en/stable", None),
+ "pytest-cov": ("https://pytest-cov.readthedocs.io/en/stable", None),
+ "hypothesis": ("https://hypothesis.readthedocs.io/en/latest", None),
+}
+
+nitpicky = False
+
+# -- Options for HTML output -------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes or:
+# - https://sphinx-themes.org/
+# - https://www.writethedocs.org/guide/tools/sphinx-themes/
+html_theme = "sphinx_rtd_theme"
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = [] # '_static'
+
+html_favicon = "../../resources/logo.svg"
+html_logo = "../../resources/logo.svg"
+
+html_sidebars = {"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]}
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_engine = "pdflatex"
+
+latex_elements = {
+ # The paper size ('letterpaper' or 'a4paper').
+ "papersize": "a4paper",
+ # The font size ('10pt', '11pt' or '12pt').
+ # 'pointsize': '10pt',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [("index", "pyrate.tex", f"{project} Documentation", author, "manual")]
diff --git a/pyrate/doc/source/contribution.rst b/pyrate/doc/source/contribution.rst
new file mode 100644
index 0000000..6e145d3
--- /dev/null
+++ b/pyrate/doc/source/contribution.rst
@@ -0,0 +1,12 @@
+Contribution
+============
+
+A long-term goal of this project is to provide a stable foundation of algorithms and data structures to enable a reliable and autonomous operation of a surface vehicle.
+To achieve this goal, every addition or change to the software needs to be reviewed by the team and follow a set of rules as defined by our CI pipeline.
+This obviously includes writing tests and documentation as needed.
+
+To contribute to the project, feel free to create a new branch with a meaningful name and start your work from there.
+If you feel that your work is ready to be merged into the master branch of the project, it is time to open a merge request.
+
+If you are new to the Python programming language, you can find a good overview of Pythonic idioms `here `_.
+We use `Google-style `_ docstrings within the code and `Sphinx `_ to generate the documentation you are currently reading.
diff --git a/pyrate/doc/source/index.rst b/pyrate/doc/source/index.rst
new file mode 100644
index 0000000..555a7e9
--- /dev/null
+++ b/pyrate/doc/source/index.rst
@@ -0,0 +1,30 @@
+Welcome to Pyrate!
+==================
+
+This project offers algorithms targeted towards autonomous surface vehicles implemented in the Python 3.6+ programming language.
+The functionality included in the Pyrate Python package enables the perception and processing of environmental features, planning of strategies and trajectories as well as action taking.
+Pyrate is therefore divided into multiple distinct subpackages *sense*, *plan* and *act*, as well as the *common* package for additional features such as file I/O and mathematical abstractions.
+
+The sense subpackage includes algorithms for computer vision, single and multi target state estimation, data smoothing and more.
+In the plan subpackage, the perceived state of the agent and its environment is processed to compute trajectories for long- and short-term navigation as well as strategic decisions.
+This also includes methods for gradient based and gradient free optimization methods are included.
+Finally, the act subpackage contains state controllers to carry out planned behavior.
+
+.. image:: ../../resources/project_structure.png
+
+This project aims at providing the algorithmic backend for the ROS driven on-board systems and simulations.
+Models of the robot's mechanics, electronics and its environment shall be developed within their own respective repository.
+Furthermore technical specifications, maps and so on go into a separate project repository as well.
+This ensures a clean separation of distinct requirements for quality assurance and responsibilities that distinguish these projects.
+
+.. toctree::
+ :hidden:
+
+ installation
+ sense/sense
+ plan/plan
+ act/act
+ common/common
+ scripts/scripts
+ contribution
+ quality_assurance
diff --git a/pyrate/doc/source/installation.rst b/pyrate/doc/source/installation.rst
new file mode 100644
index 0000000..a1f24f8
--- /dev/null
+++ b/pyrate/doc/source/installation.rst
@@ -0,0 +1,93 @@
+Installation
+============
+
+This section leads you through the process of setting up your machine to be able to contribute and use the Pyrate package.
+
+
+Creating a virtualenv
+---------------------
+
+.. note:: This step is not necessary but recommended
+
+To encapsulate Pyrate, its specific dependencies and the rest of the STDA Python workspace, it can be a good decision to first set up a *virtualenv*.
+A *virtualenv* behaves in a similar way yo the Python installation your distribution probably is already shipped with, but keeps all the installed packages in a separate space.
+This way you do not run into incompatibilities if you participate in multiple projects at the same time.
+A new *virtualenv* named *stda-env* is created with the following command within your current working directory.
+
+.. code-block:: bash
+
+ python -m venv stda-env
+
+To actually use this newly created environment, you need to activate the environment.
+To do this you can switch into the project directory and execute :code:`source PathToEnvironment/stda-env/bin/activate`.
+This causes your current shell to switch from the system wide Python installation to the *virtualenv*.
+For ease of use, you can append one of the following lines to your `~/.bashrc`.
+
+.. code-block:: bash
+
+ # Always activate the environment when a new shell is created
+ source PathToEnvironment/stda-env/bin/activate
+
+ # Create a new command that activates the environment by hand
+ alias activate-stda="source PathToEnvironment/stda-env/bin/activate"
+
+To return from the virtualenv to using the system's Python interpreter, simply use the command :code:`deactivate`.
+
+
+Setting up Pyrate
+-----------------
+
+To install Pyrate and its dependencies to your Linux-based Python 3.10+ environment, simply do the following.
+Afterwards Pyrate's functionality will be available to you either globally or, if you followed the instructions above, with your *virtuelenv* activated.
+
+.. code-block:: bash
+
+ # Dependencies that are not installed via pip
+ sudo add-apt-repository ppa:antiprism/ppa
+ sudo apt install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism
+ pip install wheel
+
+ # Clone the repository and change into the created directory
+ git clone git@gitlab.sailingteam.hg.tu-darmstadt.de:informatik/pyrate.git
+ cd pyrate
+ # To install, choose either option A (static) or B (editable)
+ # A: Install a static version of Pyrate
+ pip install .
+
+ # B: If you want to contribute to Pyrate, you need to install in editable mode
+ pip install -e .
+
+You can check that everything worked by executing :code:`python3 -c "import pyrate"`.
+If no :code:`ImportError` or alike pops up, the installation has succeeded.
+
+
+Building the docs
+-----------------
+
+The documentation you are currently reading can easily be build for the locally installed branch using `Sphinx `__.
+The most recent version from the ``master`` branch is also available `online `__.
+
+Three formats are currently supported.
+After you have built any of them with the below instructions, open these files to view the documentation:
+
+- HTML (multiple linked pages): open ``doc/build/html/index.html`` in a web browser
+- PDF (single document): open ``doc/build/latex/pyrate.pdf`` in a PDF viewer
+- Markdown (multiple linked pages, limited functionality): open ``doc/build/markdown/index.md``
+
+.. code-block:: bash
+
+ # install the extra Python dependencies
+ pip install .[docs]
+
+ # install the `dot` program to render inheritance diagrams (else they will appear as gibberish text)
+ sudo apt install graphviz
+
+ # change into the documentation directory
+ cd doc
+
+ # compile the docs into one or more of the below formats
+ make html
+ make latexpdf # requires pdflatex
+ make markdown
+
+On Windows, `make.bat` can be used in place of `make` (untested, possibly requires installing additional dependencies).
diff --git a/pyrate/doc/source/plan/geometry/geometry.rst b/pyrate/doc/source/plan/geometry/geometry.rst
new file mode 100644
index 0000000..092e27c
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/geometry.rst
@@ -0,0 +1,215 @@
+Geometry
+========
+
+The geometry package provides a foundation for planning methods by implementing
+several commonly used geometric objects, e.g. locations, polygons, and routes.
+Each of them comes in a polar coordinates (i.e. latitude & longitude) and
+cartesian coordinates (i.e. local x- & y-axis on a tangent plane) variant.
+
+The cartesian ones are based on `Shapely `_
+and the polar ones try to mimic their interface and functionality.
+All coordinates are referenced to the widely used
+`world geodetic system (WGS84) `__.
+In this inheritance diagram, :class:`~shapely.BaseGeometry` as well as classes inheriting directly from it
+are provided by *Shapely*.
+It shows that all geometric objects of *Pyrate* inherit from :class:`~pyrate.plan.geometry.geospatial.Geospatial`:
+
+.. inheritance-diagram::
+ pyrate.plan.geometry.location.CartesianLocation
+ pyrate.plan.geometry.location.PolarLocation
+ pyrate.plan.geometry.polygon.CartesianPolygon
+ pyrate.plan.geometry.polygon.PolarPolygon
+ pyrate.plan.geometry.route.CartesianRoute
+ pyrate.plan.geometry.route.PolarRoute
+ :parts: 1
+ :top-classes: pyrate.plan.geometry.geospatial.Geospatial
+
+See :ref:`geometry-plotting` on how to easily plot geometries like points, polygons and routes.
+See :ref:`design-decisions-local-projections` on how the implementation of the projections
+between local and global coordinate systems has developed.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ geospatial
+ location
+ polygon
+ route
+ helpers
+
+
+.. _geometry-plotting:
+
+Geometry Plotting
+-----------------
+
+There are many possibilities to visualize geometries with Python. For simplicity, we chose to not provide
+direct visualization methods, but support `GeoJSON `_. This format can be read very
+easily by many programs, including the website `geojson.io `_. You can simply
+copy-paste it into there or use the convenient command-line tool `geojsonio `_.
+However, when objects become very large, other tools like `QGIS Desktop `_ may be more appropriate.
+The code below gives and example of how the
+*GeoJSON* representation can be obtained. After that, a few interesting references are given.
+Also, see :meth:`~pyrate.plan.geometry.geospatial.Geospatial.to_geo_json`.
+
+.. code-block:: python
+
+ from geojson import dumps, Feature
+ from pyrate.plan.geometry import PolarPolygon
+
+ # create a geometry object
+ some_geometry = PolarPolygon(...)
+
+ # then simply dump it to standard out
+ print(some_geometry.to_geo_json())
+
+ # or more general
+ print(dumps(Feature(geometry=some_geometry)))
+
+.. code-block:: bash
+
+ echo '{"type": "Point", "coordinates": [30, 10]}' | geojsonio
+ geojsonio some_gemetry.json
+ # see https://github.com/mapbox/geojsonio-cli#examples for more examples
+
+This works for
+
+- :class:`~pyrate.plan.geometry.location.PolarLocation`,
+- :class:`~pyrate.plan.geometry.location.CartesianLocation`,
+- :class:`~pyrate.plan.geometry.polygon.PolarPolygon`,
+- :class:`~pyrate.plan.geometry.polygon.CartesianPolygon`,
+- :class:`~pyrate.plan.geometry.route.PolarRoute`,
+- :class:`~pyrate.plan.geometry.route.CartesianRoute`,
+- and any object that provides a ``__geo_interface__`` attribute/property.
+
+Further References
+~~~~~~~~~~~~~~~~~~
+
+- The original `Gitlab issue #54 `_ that collected initial ideas
+- `Interaktive Visualisierung von Geodaten in Jupyter Notebooks (Lightning Talk, FOSSGIS 2017) `_
+- Examples in the *Folium* library: `Quickstart - GeoJSON/TopoJSON Overlays `_
+
+
+.. _design-decisions-local-projections:
+
+Design decisions on the local projections
+-----------------------------------------
+This section documents our arguments for and against `Universal Transverse Mercator (UTM) `_ vs
+`local tangent plane coordinates `_ based on freely chosen reference points,
+as means of `horizontal position representation `_.
+A third approach would be to provide both.
+This discussion was copied and adapted from the `issue #40 `_, which initially collected this discussion.
+
+Overview of the arguments
+~~~~~~~~~~~~~~~~~~~~~~~~~
+Firstly, the three approaches are presented with the arguments for using them.
+
+Pro UTM
+.......
+1. A worldwide standard for navigation
+2. Data easy to import/export to other teams/projects (can be important e.g. for the WRSC competition). However, WGS84 coordinates will probably suffice.
+3. UTM locations can be pre-computed while arbitrary projections constantly change. Example from DB (pseudo-SQL): ``SELECT obstacle WHERE obstacle.zone IN {boat_zone, boat_zone + 1, boat_zone - 1, ...}``. Compared to *local* where the PolarLocation is transformed into local coordinates, distance computed and then decided whether to use or drop.
+4. UTM errors are guaranteed to be +-1m per 1km within a single zone, see for reference e.g. `here `_.
+5. UTM makes tiling the map easy. This might help to choose which obstacles to include while planning. However, a single UTM zone is also quite large.
+6. Slicing can be done once, offline.
+
+Pro local
+.........
+1. Better precision around boat position/obstacles close to the boat. If we also use the Traverse Mercator projection like UTM, we might even get better resolution. However, this might come at some increased computational cost since it cannot be easily done offline/beforehand.
+2. No tiling needed, select obstacles that are within a range of boat, and clip the non-relevant parts (already implemented in the *spatialite* database with polar coordinates)
+3. Do special cases due to UTM zones not being entirely uniform
+4. Could, in theory, allow for different projections for different needs (preserve the visual shape, preserve the area, etc.), though it might be too complicated and not worth the effort
+5. Works exactly the same, no matter where on the globe something is
+
+Pro for both and therefore neutral
+..................................
+1. Tested and documented packages for UTM (`utm `_) and for arbitrary local transformations exist (`pyproj `_)
+2. Slicing Polygons provided by shapely (either ``island.intersect(Point(x, y).buffer(radius))`` or ``island.intersect(Polygon([(0, 0), (max, 0), (max, max), (0, max)]))``)
+3. Both approaches would provide sufficiently precise approximations of the earth surface for our needs
+
+About implementing both
+.......................
+1. Would have the best of both worlds
+2. How would this complicate the implementation? (Too much, and it would spark discussions and incompatibilities.)
+
+Decision
+~~~~~~~~
+In the end, the main argument against UTM zones was the handling of the cases near zone borders and that there are some irregularities in the UTM zones that might complicate things.
+However, using local projections was feared to have a huge performance impact on embedded computes, so we performed a benchmark of a basic implementation.
+The results when benchmarking in the scenario tested below confirmed that using local projections was feasible on our embedded computers.
+Thus, the local transformation approach was selected.
+
+.. _benchmarking-db-and-local-projections:
+
+Benchmarking results of the custom local transformation approach
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+The performance was initially tested on a *Raspberry Pi 4B* with 2GB RAM and a *SandDisk Extreme 64GB (Class 3)*.
+A *Raspberry Pi* was chosen as it will likely be the actual computer being used in many challenges.
+The OS was *Raspberry Pi OS (32-bit)* with version *May 2020* and the variant "with desktop and recommended software".
+The overall performance was concluded to be very acceptable.
+
+The benchmarking was performed with the chart database from the `data repository `__
+on `commit 0abe9269026de87b7265f664d10a0b9599314313 `__.
+It contained the entirety of North America as was available from the (US) NOAA.
+The benchmark script (and *Pyrate* code) was from `commit 0ae4c33e361369321b10d677067deeb07ed27493 `__.
+See :ref:`script-benchmark_db_and_projections` for details on what is actually tested.
+
+The following tests were carried out on an Intel(R) Core(TM) i5-6300U with a SATA SSD and plenty of RAM.
+
+Results with realistic parameters: radius 100km
+...............................................
+
+.. code-block:: bash
+
+ user@ubuntu:~/sailing/pyrate $ python scripts/benchmark_db_and_projections.py ../data/charts/noaa_vector/all_combined_simplified_25m.sqlite --iterations 10 --radius 100
+ Information on the setting:
+ number of rows/polygons in database: 648828
+ sum of vertices of all rows/polygons of in database: 13727653
+ extracted number of polygons: 6266
+ extracted total number of vertices: 120179
+
+ Executed "query_database" 10 times:
+ average: 2.977373 seconds
+ std dev: 0.042802 seconds
+ variance: 0.001832 seconds
+
+ Executed "project_to_cartesian_and_back" 10 times:
+ average: 1.465923 seconds
+ std dev: 0.033850 seconds
+ variance: 0.001146 seconds
+
+Results with stress testing parameters: radius 999km
+....................................................
+
+.. code-block:: bash
+
+ user@ubuntu:~/sailing/pyrate $ python scripts/benchmark_db_and_projections.py ../data/charts/noaa_vector/all_combined_simplified_25m.sqlite --iterations 10 --radius 999
+ Information on the setting:
+ number of rows/polygons in database: 648828
+ sum of vertices of all rows/polygons of in database: 13727653
+ extracted number of polygons: 90539
+ extracted total number of vertices: 2131078
+
+ Executed "query_database" 10 times:
+ average: 34.120787 seconds
+ std dev: 0.499919 seconds
+ variance: 0.249919 seconds
+
+ Executed "project_to_cartesian_and_back" 10 times:
+ average: 23.383787 seconds
+ std dev: 0.224816 seconds
+ variance: 0.050542 seconds
+
+Notes and conclusions
+.....................
+
+Comparing the results with radius 100km and 999km, we can see that ``_project_to_cartesian_and_back()`` grows very linearly, as expected: 12 μs/vertex (100km) vs. 11 μs/vertex (999km).
+The ``_query_database()`` benchmark runs even better (sub linear in the number of vertices): 24 μs/vertex (100km) vs. 16 μs/vertex (999km).
+Also note, that having a lot of polygons outside of the relevant area seems to be non-problematic.
+Here, the spatial index really shines, as ``_query_database()`` took *a lot* longer before its introduction.
+
+About 66% of the time when projecting is spent reassembling the polygon after it was converted, so that's probably something we can improve if we eventually need to.
+Also, one could reduce the fidelity of the features by using stronger simplification or reduce the query radius.
+
+Memory seems to not be a problem either. No precise measurements were made though.
diff --git a/pyrate/doc/source/plan/geometry/geospatial.rst b/pyrate/doc/source/plan/geometry/geospatial.rst
new file mode 100644
index 0000000..012ce48
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/geospatial.rst
@@ -0,0 +1,7 @@
+Geospatial
+----------
+
+.. automodule:: pyrate.plan.geometry.geospatial
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/plan/geometry/helpers.rst b/pyrate/doc/source/plan/geometry/helpers.rst
new file mode 100644
index 0000000..f0eafe0
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/helpers.rst
@@ -0,0 +1,7 @@
+Geometry Helpers
+----------------
+
+.. automodule:: pyrate.plan.geometry.helpers
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/plan/geometry/location.rst b/pyrate/doc/source/plan/geometry/location.rst
new file mode 100644
index 0000000..a822787
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/location.rst
@@ -0,0 +1,7 @@
+Location
+--------
+
+.. automodule:: pyrate.plan.geometry.location
+ :members:
+ :undoc-members:
+ :show-inheritance:
\ No newline at end of file
diff --git a/pyrate/doc/source/plan/geometry/polygon.rst b/pyrate/doc/source/plan/geometry/polygon.rst
new file mode 100644
index 0000000..eafd6b2
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/polygon.rst
@@ -0,0 +1,7 @@
+Polygon
+-------
+
+.. automodule:: pyrate.plan.geometry.polygon
+ :members:
+ :undoc-members:
+ :show-inheritance:
\ No newline at end of file
diff --git a/pyrate/doc/source/plan/geometry/route.rst b/pyrate/doc/source/plan/geometry/route.rst
new file mode 100644
index 0000000..9e7303f
--- /dev/null
+++ b/pyrate/doc/source/plan/geometry/route.rst
@@ -0,0 +1,7 @@
+Route
+-----
+
+.. automodule:: pyrate.plan.geometry.route
+ :members:
+ :undoc-members:
+ :show-inheritance:
\ No newline at end of file
diff --git a/pyrate/doc/source/plan/graph/generate.rst b/pyrate/doc/source/plan/graph/generate.rst
new file mode 100644
index 0000000..ea1ef09
--- /dev/null
+++ b/pyrate/doc/source/plan/graph/generate.rst
@@ -0,0 +1,7 @@
+Earth Graph Generation
+----------------------
+
+.. automodule:: pyrate.plan.graph.generate
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/plan/graph/geo_graph.rst b/pyrate/doc/source/plan/graph/geo_graph.rst
new file mode 100644
index 0000000..753ab91
--- /dev/null
+++ b/pyrate/doc/source/plan/graph/geo_graph.rst
@@ -0,0 +1,7 @@
+Geo-referenced Graph Implementation
+-----------------------------------
+
+.. automodule:: pyrate.plan.graph.geo_graph
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/plan/graph/graph.rst b/pyrate/doc/source/plan/graph/graph.rst
new file mode 100644
index 0000000..a2491b5
--- /dev/null
+++ b/pyrate/doc/source/plan/graph/graph.rst
@@ -0,0 +1,7 @@
+Graph Implementation
+--------------------
+
+.. automodule:: pyrate.plan.graph.graph
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/plan/graph/graph_overview.rst b/pyrate/doc/source/plan/graph/graph_overview.rst
new file mode 100644
index 0000000..4ebee6d
--- /dev/null
+++ b/pyrate/doc/source/plan/graph/graph_overview.rst
@@ -0,0 +1,16 @@
+Graph
+=====
+
+.. automodule:: pyrate.plan.graph
+
+.. inheritance-diagram:: pyrate.plan.graph.graph.NavigationGraph pyrate.plan.graph.geo_graph.GeoNavigationGraph
+ :parts: 1
+ :top-classes: pyrate.plan.graph.graph.NavigationGraph
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ graph
+ geo_graph
+ generate
diff --git a/pyrate/doc/source/plan/graph/vertices_area_of_influence.png b/pyrate/doc/source/plan/graph/vertices_area_of_influence.png
new file mode 100644
index 0000000..31140d3
Binary files /dev/null and b/pyrate/doc/source/plan/graph/vertices_area_of_influence.png differ
diff --git a/pyrate/doc/source/plan/graph/vertices_distribution_mercator.png b/pyrate/doc/source/plan/graph/vertices_distribution_mercator.png
new file mode 100644
index 0000000..79c1998
Binary files /dev/null and b/pyrate/doc/source/plan/graph/vertices_distribution_mercator.png differ
diff --git a/pyrate/doc/source/plan/plan.rst b/pyrate/doc/source/plan/plan.rst
new file mode 100644
index 0000000..3029be3
--- /dev/null
+++ b/pyrate/doc/source/plan/plan.rst
@@ -0,0 +1,11 @@
+Plan
+----
+
+.. automodule:: pyrate.plan
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Subpackages:
+
+ geometry/geometry
+ graph/graph_overview
diff --git a/pyrate/doc/source/quality_assurance.rst b/pyrate/doc/source/quality_assurance.rst
new file mode 100644
index 0000000..24eb54e
--- /dev/null
+++ b/pyrate/doc/source/quality_assurance.rst
@@ -0,0 +1,62 @@
+Quality Assurance
+=================
+
+This section shows you how to test the software locally on your machine and ensure that your contributions follow our common coding standards.
+Note that with every contribution pushed to the Gitlab server the routines that are described here are automatically executed for you.
+The results are then visible in `the repository's CI/CD section `__ and in the specific merge request view.
+`The Software section of the Wiki `__ also holds further information on our `styleguide `__, `documentation `__ and `testing `__.
+
+Coding Style
+------------
+
+A common style when collaborating on a big project is crucial to keep everything maintainable and easy to understand in the long run.
+To make sure you are following the rules we employ a number of programs that help us to analyse the source automatically.
+Since Python is an interpreted language, we do not have a compiler that ensures that everything we write will make sense at runtime.
+Linting and enforcing coding conventions thereby can have a very important role in keeping our software reliable.
+To get reports on the code you have written you can use the following commands in the packages root directory.
+
+
+.. code-block:: bash
+
+ flake8
+ mypy pyrate tests scripts
+ pylint -j 0 pyrate tests scripts
+
+
+Testing
+-------
+
+Tests can be run by simply executing :code:`pytest` within the repositories root directory.
+This will also result in coverage statistics being printed, which are a good indicator whether your tests are covering all the lines of your code.
+Again, since Python is not compiled, this can be very useful.
+Nevertheless, make sure that your tests ideally go beyond mere execution of code and assert its correctness.
+Besides statement coverage, we also collect branch coverage (see `here `__).
+We require that all contributions achieve 100% statement coverage and 90% branch coverage.
+
+Two parts of the library are special when it comes to testing:
+Both the tests around :code:`pyrate.common.charts.SpatialiteDatabase` and around :code:`pyrate.common.charts.S57ChartHandler` are only exercised if the required dependencies are met and if not running on a CI server.
+If the dependencies cannot be found, the tests will be marked as skipped.
+This allows for easier development as less dependencies are actually required for running the tests, but the CI server will still check these modules for you.
+
+
+Hypothesis Testing and Speed
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+Many tests use the `Hypothesis property based testing framework `__.
+Everyone is encouraged to do so too, and there are already some handy example generators in ``tests/strategies/``.
+The settings (along with some more general setup) are declared in ``tests/__init__.py``.
+
+However, a few tests take a lot of time to process, like for example the tests that use the very slow cartesian route example generation.
+Therefore, it might be required to reduce the number of tests, as is done in ``tests/plan/geometry/primitives/test_polygons.py`` using the ``@settings()`` decorator.
+Timings for (all) individual tests can be obtained by running pytest with ``--durations=0`` (see `the pytest docs `__).
+You may want to temporarily add this argument to the ``addopts`` option in the section ``[tool.pytest.ini_options]`` in ``pyproject.toml``.
+
+
+Downstream CI Pipeline Triggers
+-------------------------------
+
+Other projects like *ros-nodes* depend on *Pyrate*: They are *downstream* projects, as changes in *Pyrate* flow down the "stream of dependencies" to them.
+To ensure that changes here in the upstream *Pyrate* project do not break such downstream projects (or just to remind us to fix stuff over there too),
+the pipeline of this repository triggers the ones of downstream projects.
+This is configured in a special ``Deploy``-stage job called ``Trigger Downstream Pipelines`` at the bottom of the ``.gitlab-ci.yml`` file (in this upstream project!).
+The capabilities are documented in `the official GitLab docs on "Multi-project pipelines" `__.
diff --git a/pyrate/doc/source/scripts/reference.rst b/pyrate/doc/source/scripts/reference.rst
new file mode 100644
index 0000000..75f8877
--- /dev/null
+++ b/pyrate/doc/source/scripts/reference.rst
@@ -0,0 +1,52 @@
+.. _scripts-reference:
+
+API Reference
+-------------
+
+
+``s57_charts_to_db.py``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.s57_charts_to_db
+ :members:
+ :undoc-members:
+
+
+``benchmark_db_and_projections.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.benchmark_db_and_projections
+ :members:
+ :undoc-members:
+
+
+``create_earth_graph.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.create_earth_graph
+ :members:
+ :undoc-members:
+
+
+``earth_graph_frequency_statistics.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.earth_graph_frequency_statistics
+ :members:
+ :undoc-members:
+
+
+``visualize_earth_graph.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.visualize_earth_graph
+ :members:
+ :undoc-members:
+
+
+``benchmark_graph_neighbor_search.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. automodule:: scripts.benchmark_graph_neighbor_search
+ :members:
+ :undoc-members:
diff --git a/pyrate/doc/source/scripts/scripts.rst b/pyrate/doc/source/scripts/scripts.rst
new file mode 100644
index 0000000..cdfb6ff
--- /dev/null
+++ b/pyrate/doc/source/scripts/scripts.rst
@@ -0,0 +1,81 @@
+Scripts
+=======
+
+*Pyrate* contains a few scripts in the directory `pyrate/scripts/`.
+They are mainly meant for actually applying the algorithms to real-world data and to also serve as some examples for the library code.
+
+Requirements
+------------
+
+Most script documentation assumes the typical Sailing Team directory layout as described in
+`the installation guide `_.
+To execute these programs, you need the datasets that you want to work with (if any).
+Therefore, you will probably want to download the data repository as described
+`here `_,
+if you haven't already.
+
+
+Usage
+-----
+
+This section just lists the parameters of the scrips. See :ref:`scripts-reference` for more complete explanations.
+
+
+.. _script-s57_charts_to_db:
+
+``s57_charts_to_db.py``
+~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/s57_charts_to_db.py --help
+
+
+.. _script-benchmark_db_and_projections:
+
+``benchmark_db_and_projections.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/benchmark_db_and_projections.py --help
+
+
+.. _script-create_earth_graph:
+
+``create_earth_graph.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/create_earth_graph.py --help
+
+
+.. _script-earth_graph_frequency_statistics:
+
+``earth_graph_frequency_statistics.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/earth_graph_frequency_statistics.py --help
+
+
+.. _script-visualize_earth_graph:
+
+``visualize_earth_graph.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/visualize_earth_graph.py --help
+
+
+.. _script-benchmark_graph_neighbor_search:
+
+``benchmark_graph_neighbor_search.py``
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+.. command-output:: ../../scripts/benchmark_graph_neighbor_search.py --help
+
+
+Reference
+---------
+
+This section above just lists the usage of the scrips.
+The complete API reference can be found below:
+
+.. toctree::
+ :maxdepth: 2
+
+ reference
diff --git a/pyrate/doc/source/sense/filters/extended.rst b/pyrate/doc/source/sense/filters/extended.rst
new file mode 100644
index 0000000..2ca3620
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/extended.rst
@@ -0,0 +1,7 @@
+Extended Kalman Filter
+----------------------
+
+.. automodule:: pyrate.sense.filters.extended
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/filters/extended_gmphd.rst b/pyrate/doc/source/sense/filters/extended_gmphd.rst
new file mode 100644
index 0000000..71f0d64
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/extended_gmphd.rst
@@ -0,0 +1,7 @@
+Extended Gaussian Mixture PHD Filter
+------------------------------------
+
+.. automodule:: pyrate.sense.filters.extended_gmphd
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/filters/filters.rst b/pyrate/doc/source/sense/filters/filters.rst
new file mode 100644
index 0000000..2913efa
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/filters.rst
@@ -0,0 +1,16 @@
+Filters
+-------
+
+The filters package provides single and multi target tracking capabilities based on Bayesian methods.
+A prime example for such a filter is the so called Kalman filter and its derivatives for nonlinear estimation.
+Additionally, the gaussian mixture probability hypothesis density (PHD) filter is provided.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ kalman
+ extended
+ unscented
+ gmphd
+ extended_gmphd
diff --git a/pyrate/doc/source/sense/filters/gmphd.rst b/pyrate/doc/source/sense/filters/gmphd.rst
new file mode 100644
index 0000000..919e4df
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/gmphd.rst
@@ -0,0 +1,7 @@
+Gaussian Mixture PHD Filter
+---------------------------
+
+.. automodule:: pyrate.sense.filters.gmphd
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/filters/kalman.rst b/pyrate/doc/source/sense/filters/kalman.rst
new file mode 100644
index 0000000..5e50b39
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/kalman.rst
@@ -0,0 +1,7 @@
+Kalman Filter
+-------------
+
+.. automodule:: pyrate.sense.filters.kalman
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/filters/unscented.rst b/pyrate/doc/source/sense/filters/unscented.rst
new file mode 100644
index 0000000..f2d864a
--- /dev/null
+++ b/pyrate/doc/source/sense/filters/unscented.rst
@@ -0,0 +1,7 @@
+Unscented Kalman Filter
+-----------------------
+
+.. automodule:: pyrate.sense.filters.unscented
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/sense.rst b/pyrate/doc/source/sense/sense.rst
new file mode 100644
index 0000000..38cf062
--- /dev/null
+++ b/pyrate/doc/source/sense/sense.rst
@@ -0,0 +1,9 @@
+Sense
+-----
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Subpackages:
+
+ filters/filters
+ smoothers/smoothers
diff --git a/pyrate/doc/source/sense/smoothers/extended.rst b/pyrate/doc/source/sense/smoothers/extended.rst
new file mode 100644
index 0000000..adb8413
--- /dev/null
+++ b/pyrate/doc/source/sense/smoothers/extended.rst
@@ -0,0 +1,7 @@
+Extended RTS Smoother
+---------------------
+
+.. automodule:: pyrate.sense.smoothers.extended
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/smoothers/rts.rst b/pyrate/doc/source/sense/smoothers/rts.rst
new file mode 100644
index 0000000..b23f80a
--- /dev/null
+++ b/pyrate/doc/source/sense/smoothers/rts.rst
@@ -0,0 +1,7 @@
+RTS Smoother
+------------
+
+.. automodule:: pyrate.sense.smoothers.rts
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/doc/source/sense/smoothers/smoothers.rst b/pyrate/doc/source/sense/smoothers/smoothers.rst
new file mode 100644
index 0000000..561c19a
--- /dev/null
+++ b/pyrate/doc/source/sense/smoothers/smoothers.rst
@@ -0,0 +1,13 @@
+Smoothers
+---------
+
+Smoothing is the problem of state estimation, where not only previous measurements but also future observations are part of a single estimate.
+A popular example for smoothing is the so called Rauch-Tung-Striebel (RTS) smoother, which is based on the Kalman filter and its derivatives for nonlinear estimation.
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Modules:
+
+ rts
+ extended
+ unscented
diff --git a/pyrate/doc/source/sense/smoothers/unscented.rst b/pyrate/doc/source/sense/smoothers/unscented.rst
new file mode 100644
index 0000000..c4ab302
--- /dev/null
+++ b/pyrate/doc/source/sense/smoothers/unscented.rst
@@ -0,0 +1,7 @@
+Unscented RTS Smoother
+----------------------
+
+.. automodule:: pyrate.sense.smoothers.unscented
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/pyrate/experiments.py b/pyrate/experiments.py
new file mode 100644
index 0000000..61859ce
--- /dev/null
+++ b/pyrate/experiments.py
@@ -0,0 +1,93 @@
+from typing import Dict, List
+
+from shapely.geometry import Polygon, Point
+
+from pyrate.plan.geometry.polygon import CartesianPolygon
+from pyrate.plan.nearplanner.cost_functions import *
+from pyrate.plan.nearplanner.evaluated_timing_frame import EvaluatedTimingFrame
+from pyrate.plan.nearplanner.holders import EstimationParameters
+from pyrate.plan.nearplanner.holders import OptimizationParameters
+from pyrate.plan.nearplanner.obstacle import Obstacle
+from pyrate.plan.nearplanner.optimizer import Optimizer
+
+estimation_param = EstimationParameters()
+optimization_param = OptimizationParameters(estimation_param)
+
+optimization_param.verbose = False
+
+
+def create_context(
+ position: Point,
+ goal: Point,
+ wind: Tuple[float, float],
+ obstacles: Dict[str, Polygon],
+ costfunction_obstacles_width: float = 40,
+ costfunction_obstacles_scale: float = 0.02,
+) -> Optimizer:
+ fct = CostFunctionExp(scale=costfunction_obstacles_scale, safety_dist=costfunction_obstacles_width)
+
+ cartesian_polys: Dict[str, CartesianPolygon] = {
+ i: CartesianPolygon.from_shapely(k) for i, k in obstacles.items()
+ }
+
+ obstacle_dict: Dict[str, Obstacle] = {
+ i: Obstacle(poly, np.array([0, 0]), cost_function=fct) for i, poly in cartesian_polys.items()
+ }
+
+ context = Optimizer(wind_information=(0, 0), obstacles={}, position=Point(0, 0))
+
+ context.wind_speed, context.wind_angle = wind[0], np.deg2rad(wind[1])
+
+ context.position = position
+ context.goal = goal
+
+ context.on_reset_obstacles(obstacle_dict)
+
+ return context
+
+
+# sueden nach norden -> wind (25 m/s, 0)
+def generate_route(
+ position: Point,
+ goal: Point,
+ wind: Tuple[float, float],
+ obstacles: Dict[str, Polygon],
+ costfunction_obstacles_width: float = 40,
+ costfunction_obstacles_scale: float = 0.02,
+) -> Tuple[Optional[EvaluatedTimingFrame], Optional[List[EvaluatedTimingFrame]]]:
+ """Function that generates a route in the first of the tuple return value.
+ Second value contains a list of timing frames created during optimization.
+ """
+
+ context = create_context(
+ position=position,
+ goal=goal,
+ wind=wind,
+ obstacles=obstacles,
+ costfunction_obstacles_width=costfunction_obstacles_width,
+ costfunction_obstacles_scale=costfunction_obstacles_scale,
+ )
+
+ return context.optimize(goal=goal, optimization_parameters=optimization_param)
+
+
+# poly = Polygon([(-2, 1), (-0.1, 3), (3, 3), (2,1)])
+# poly = Polygon([(-80, 10), (-50, 30), (30, 30), (20,20), (0,20)])
+
+# poly_ = Polygon([(1000,1000),(1010,1010),(1000,1020)])
+# poly2 = Polygon([(-50, 70), (50, 70), (50, 80), (-50,80)])
+
+# ob_set = {"0": poly2, "1": poly_, "2": poly}
+# ob_set_2 = {"0": poly_}
+# print("START")
+
+# route = [Point(0,0), Point(0,1), Point(0,2), Point(2,2), Point(2,1), Point(-2,2), Point(0,10)]
+# _ = np.array([shapely_point_to_ndarray(p) for p in route])
+# print(_)
+# frame = TimingFrame(CartesianRoute.from_numpy(_))
+# print(frame)
+# frame2 = frame.remove_single_cycles()
+
+# print(frame2)
+# print("_"*10)
+# print(frame.remove_single_cycles())
diff --git a/pyrate/pyproject.toml b/pyrate/pyproject.toml
new file mode 100644
index 0000000..2fc999f
--- /dev/null
+++ b/pyrate/pyproject.toml
@@ -0,0 +1,549 @@
+# see MR !31 for why this only uses the legacy version
+[build-system]
+requires = ["setuptools>=40.8.0", "wheel"]
+build-backend = "setuptools.build_meta:__legacy__"
+
+
+[tool.black]
+line-length = 110
+target-version = [
+ "py310",
+]
+
+
+[tool.pytest.ini_options]
+addopts = "-v --color=yes --cov=pyrate --doctest-modules"
+junit_family = "xunit2"
+testpaths = [
+ # for the doctests:
+ "pyrate",
+ # for the actual tests:
+ "tests"
+]
+doctest_optionflags = [
+ "IGNORE_EXCEPTION_DETAIL",
+ "DONT_ACCEPT_TRUE_FOR_1"
+]
+filterwarnings = [
+ "error",
+ "error::DeprecationWarning",
+ "error::PendingDeprecationWarning",
+ "ignore:The height, width, and precision:rasterio.errors.RasterioDeprecationWarning", # See https://github.com/rasterio/rasterio/issues/2466
+]
+
+
+[tool.coverage.run]
+concurrency = ["multiprocessing"]
+branch = true
+
+[tool.coverage.report]
+fail_under = 95.00
+precision = 2
+show_missing = true
+exclude_lines = [
+ # Regexes for lines to exclude from consideration
+
+ # Have to re-enable the standard pragma
+ "pragma: no cover",
+
+ # Don't complain about missing debug-only code:
+ "def __repr__",
+
+ # Don't complain if tests don't hit defensive assertion code:
+ "raise AssertionError",
+ "raise NotImplementedError",
+
+ # Don't complain if non-runnable code isn't run:
+ "if __name__ == .__main__.:",
+
+ # It's okay to not cover unimplemented comparison methods
+ "return NotImplemented"
+]
+
+
+[tool.pylint.master]
+# A comma-separated list of package or module names from where C extensions may
+# be loaded. Extensions are loading into the active Python interpreter and may
+# run arbitrary code
+extension-pkg-whitelist=["cv2"]
+
+# Add files or directories to the blacklist. They should be base names, not
+# paths.
+ignore=["CVS"]
+
+# Add files or directories matching the regex patterns to the blacklist. The
+# regex matches against base names, not paths.
+ignore-patterns=[]
+
+# Python code to execute, usually for sys.path manipulation such as
+# pygtk.require().
+#init-hook=
+
+# Use multiple processes to speed up Pylint.
+jobs=0
+
+# List of plugins (as comma separated values of python modules names) to load,
+# usually to register additional checkers.
+load-plugins=[]
+
+# Pickle collected data for later comparisons.
+persistent="yes"
+
+# Specify a configuration file.
+#rcfile=
+
+# When enabled, pylint would attempt to guess common misconfiguration and emit
+# user-friendly hints instead of false-positive error messages
+suggestion-mode="yes"
+
+# Allow loading of arbitrary C extensions. Extensions are imported into the
+# active Python interpreter and may run arbitrary code.
+unsafe-load-any-extension="no"
+
+
+[tool.pylint.messages_control]
+
+# Only show warnings with the listed confidence levels. Leave empty to show
+# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
+confidence=[]
+
+# Disable the message, report, category or checker with the given id(s). You
+# can either give multiple identifiers separated by comma (,) or put this
+# option multiple times (only on the command line, not in the configuration
+# file where it should appear only once).You can also use "--disable=all" to
+# disable everything first and then reenable specific checks. For example, if
+# you want to run only the similarities checker, you can use "--disable=all
+# --enable=similarities". If you want to run only the classes checker, but have
+# no Warning level messages displayed, use"--disable=all --enable=classes
+# --disable=W"
+disable=[
+ "locally-disabled",
+ "locally-enabled",
+ "bad-continuation", # conflicts with flake8's rule W504 that we adopted over W503
+ "no-value-for-parameter", # the last one does not work with hypothesis, cf. https://github.com/HypothesisWorks/hypothesis/issues/1654
+ "import-error", # to lint code without the need to install all dependencies
+ "too-many-instance-attributes",
+ "duplicate-code", # too many false positives
+]
+
+# Enable the message, report, category or checker with the given id(s). You can
+# either give multiple identifier separated by comma (,) or put this option
+# multiple time (only on the command line, not in the configuration file where
+# it should appear only once). See also the "--disable" option for examples.
+enable= [
+ "c-extension-no-member",
+ "useless-suppression",
+]
+
+[tool.pylint.reports]
+
+# Python expression which should return a note less than 10 (10 is the highest
+# note). You have access to the variables errors warning, statement which
+# respectively contain the number of errors / warnings messages and the total
+# number of statements analyzed. This is used by the global evaluation report
+# (RP0004).
+evaluation="10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)"
+
+# Template used to display messages. This is a python new-style format string
+# used to format the message information. See doc for all details
+#msg-template=
+
+# Set the output format. Available formats are text, parseable, colorized, json
+# and msvs (visual studio).You can also give a reporter class, eg
+# mypackage.mymodule.MyReporterClass.
+output-format="colorized"
+
+# Tells whether to display a full report or only the messages
+reports="no"
+
+# Activate the evaluation score.
+score="yes"
+
+
+[tool.pylint.refactoring]
+
+# Maximum number of nested blocks for function / method body
+max-nested-blocks=5
+
+# Complete name of functions that never returns. When checking for
+# inconsistent-return-statements if a never returning function is called then
+# it will be considered as an explicit return statement and no message will be
+# printed.
+never-returning-functions=["optparse.Values", "sys.exit", "exit"]
+
+
+[tool.pylint.variables]
+
+# List of additional names supposed to be defined in builtins. Remember that
+# you should avoid to define new builtins when possible.
+additional-builtins=[]
+
+# Tells whether unused global variables should be treated as a violation.
+allow-global-unused-variables="yes"
+
+# List of strings which can identify a callback function by name. A callback
+# name must start or end with one of those strings.
+callbacks=[]
+
+# A regular expression matching the name of dummy variables (i.e. expectedly
+# not used).
+dummy-variables-rgx="_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_"
+
+# Argument names that match this expression will be ignored. Default to name
+# with leading underscore
+ignored-argument-names="_.*|^ignored_|^unused_"
+
+# Tells whether we should check for unused import in __init__ files.
+init-import="no"
+
+# List of qualified module names which can have objects that can redefine
+# builtins.
+redefining-builtins-modules=[]
+
+
+[tool.pylint.logging]
+
+# Logging modules to check that the string format arguments are in logging
+# function parameter format
+logging-modules=["logging"]
+
+
+[tool.pylint.miscellaneous]
+
+# List of note tags to take in consideration, separated by a comma.
+notes=[
+ "FIXME",
+ "TODO"
+]
+
+[tool.pylint.format]
+
+# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
+expected-line-ending-format=""
+
+# Regexp for a line that is allowed to be longer than the limit.
+ignore-long-lines="^\\s*(# )??$"
+
+# Number of spaces of indent required inside a hanging or continued line.
+indent-after-paren=4
+
+# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
+# tab).
+indent-string=' '
+
+# Maximum number of characters on a single line.
+max-line-length=110
+
+# Maximum number of lines in a module
+max-module-lines=1000
+
+# List of optional constructs for which whitespace checking is disabled. `dict-
+# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
+# `trailing-comma` allows a space between comma and closing bracket: (a, ).
+# `empty-line` allows space-only lines.
+no-space-check=[
+ "trailing-comma",
+ "dict-separator"
+]
+
+# Allow the body of a class to be on the same line as the declaration if body
+# contains single statement.
+single-line-class-stmt="no"
+
+# Allow the body of an if to be on the same line as the test if there is no
+# else.
+single-line-if-stmt="no"
+
+
+[tool.pylint.basic]
+
+# Naming style matching correct argument names
+argument-naming-style="snake_case"
+
+# Regular expression matching correct argument names. Overrides argument-
+# naming-style
+#argument-rgx=
+
+# Naming style matching correct attribute names
+attr-naming-style="snake_case"
+
+# Regular expression matching correct attribute names. Overrides attr-naming-
+# style
+#attr-rgx=
+
+# Bad variable names which should always be refused, separated by a comma
+bad-names=[
+ "foo",
+ "bar",
+ "baz"
+]
+
+# Naming style matching correct class attribute names
+class-attribute-naming-style="any"
+
+# Regular expression matching correct class attribute names. Overrides class-
+# attribute-naming-style
+#class-attribute-rgx=
+
+# Naming style matching correct class names
+class-naming-style="PascalCase"
+
+# Regular expression matching correct class names. Overrides class-naming-style
+#class-rgx=
+
+# Naming style matching correct constant names
+const-naming-style="UPPER_CASE"
+
+# Regular expression matching correct constant names. Overrides const-naming-
+# style
+#const-rgx=
+
+# Minimum line length for functions/classes that require docstrings, shorter
+# ones are exempt.
+docstring-min-length=-1
+
+# Naming style matching correct function names
+function-naming-style="snake_case"
+
+# Regular expression matching correct function names. Overrides function-
+# naming-style
+#function-rgx=
+
+# Good variable names which should always be accepted, separated by a comma
+good-names=[
+ "i",
+ "j",
+ "k",
+ "ex",
+ "Run",
+ "_",
+ "up",
+ "x",
+ "y",
+ "z"
+]
+
+# Include a hint for the correct naming format with invalid-name
+include-naming-hint="yes"
+
+# Naming style matching correct inline iteration names
+inlinevar-naming-style="any"
+
+# Regular expression matching correct inline iteration names. Overrides
+# inlinevar-naming-style
+#inlinevar-rgx=
+
+# Naming style matching correct method names
+method-naming-style="snake_case"
+
+# Regular expression matching correct method names. Overrides method-naming-
+# style
+#method-rgx=
+
+# Naming style matching correct module names
+module-naming-style="snake_case"
+
+# Regular expression matching correct module names. Overrides module-naming-
+# style
+#module-rgx=
+
+# Colon-delimited sets of names that determine each other's naming style when
+# the name regexes allow several styles.
+name-group=[]
+
+# Regular expression which should only match function or class names that do
+# not require a docstring.
+no-docstring-rgx="^_"
+
+# List of decorators that produce properties, such as abc.abstractproperty. Add
+# to this list to register other decorators that produce valid properties.
+property-classes=["abc.abstractproperty"]
+
+# Naming style matching correct variable names
+variable-naming-style="snake_case"
+
+# Regular expression matching correct variable names. Overrides variable-
+# naming-style
+#variable-rgx=
+
+
+[tool.pylint.similarities]
+
+# Ignore comments when computing similarities.
+ignore-comments="yes"
+
+# Ignore docstrings when computing similarities.
+ignore-docstrings="yes"
+
+# Ignore imports when computing similarities.
+ignore-imports="no"
+
+# Minimum lines number of a similarity.
+min-similarity-lines=4
+
+
+[tool.pylint.typecheck]
+
+# List of decorators that produce context managers, such as
+# contextlib.contextmanager. Add to this list to register other decorators that
+# produce valid context managers.
+contextmanager-decorators=["contextlib.contextmanager"]
+
+# List of members which are set dynamically and missed by pylint inference
+# system, and so shouldn't trigger E1101 when accessed. Python regular
+# expressions are accepted.
+generated-members=[]
+
+# Tells whether missing members accessed in mixin class should be ignored. A
+# mixin class is detected if its name ends with "mixin" (case insensitive).
+ignore-mixin-members="yes"
+
+# This flag controls whether pylint should warn about no-member and similar
+# checks whenever an opaque object is returned when inferring. The inference
+# can return multiple potential results while evaluating a Python object, but
+# some branches might not be evaluated, which results in partial inference. In
+# that case, it might be useful to still emit no-member and other checks for
+# the rest of the inferred objects.
+ignore-on-opaque-inference="yes"
+
+# List of class names for which member attributes should not be checked (useful
+# for classes with dynamically set attributes). This supports the use of
+# qualified names.
+ignored-classes=["optparse.Values","thread._local","_thread._local"]
+
+# List of module names for which member attributes should not be checked
+# (useful for modules/projects where namespaces are manipulated during runtime
+# and thus existing member attributes cannot be deduced by static analysis. It
+# supports qualified module names, as well as Unix pattern matching.
+ignored-modules=["cv2"]
+
+# Show a hint with possible names when a member name was not found. The aspect
+# of finding the hint is based on edit distance.
+missing-member-hint="yes"
+
+# The minimum edit distance a name should have in order to be considered a
+# similar match for a missing member name.
+missing-member-hint-distance=1
+
+# The total number of similar names that should be taken in consideration when
+# showing a hint for a missing member.
+missing-member-max-choices=1
+
+
+[tool.pylint.spelling]
+
+# Limits count of emitted suggestions for spelling mistakes
+max-spelling-suggestions=4
+
+# Spelling dictionary name. Available dictionaries: none. To make it working
+# install python-enchant package.
+spelling-dict=""
+
+# List of comma separated words that should not be checked.
+spelling-ignore-words=[]
+
+# A path to a file that contains private dictionary; one word per line.
+spelling-private-dict-file=""
+
+# Tells whether to store unknown words to indicated private dictionary in
+# --spelling-private-dict-file option instead of raising a message.
+spelling-store-unknown-words="no"
+
+
+[tool.pylint.design]
+
+# Maximum number of arguments for function / method
+max-args=5
+
+# Maximum number of attributes for a class (see R0902).
+max-attributes=7
+
+# Maximum number of boolean expressions in a if statement
+max-bool-expr=5
+
+# Maximum number of branch for function / method body
+max-branches=12
+
+# Maximum number of locals for function / method body
+max-locals=15
+
+# Maximum number of parents for a class (see R0901).
+max-parents=7
+
+# Maximum number of public methods for a class (see R0904).
+max-public-methods=20
+
+# Maximum number of return / yield for function / method body
+max-returns=6
+
+# Maximum number of statements in function / method body
+max-statements=50
+
+# Minimum number of public methods for a class (see R0903).
+min-public-methods=2
+
+
+[tool.pylint.classes]
+
+# List of method names used to declare (i.e. assign) instance attributes.
+defining-attr-methods= [
+ "__init__",
+ "__new__",
+ "setUp"
+]
+
+# List of member names, which should be excluded from the protected access
+# warning.
+exclude-protected= [
+ "_asdict",
+ "_fields",
+ "_replace",
+ "_source",
+ "_make"
+]
+
+# List of valid names for the first argument in a class method.
+valid-classmethod-first-arg="cls"
+
+# List of valid names for the first argument in a metaclass class method.
+valid-metaclass-classmethod-first-arg="mcs"
+
+
+[tool.pylint.imports]
+
+# Allow wildcard imports from modules that define __all__.
+allow-wildcard-with-all="no"
+
+# Analyse import fallback blocks. This can be used to support both Python 2 and
+# 3 compatible code, which means that the block might have code that exists
+# only in one or another interpreter, leading to false positives when analysed.
+analyse-fallback-blocks="no"
+
+# Deprecated modules which should not be used, separated by a comma
+deprecated-modules=[]
+
+# Create a graph of external dependencies in the given file (report RP0402 must
+# not be disabled)
+ext-import-graph=""
+
+# Create a graph of every (i.e. internal and external) dependencies in the
+# given file (report RP0402 must not be disabled)
+import-graph=""
+
+# Create a graph of internal dependencies in the given file (report RP0402 must
+# not be disabled)
+int-import-graph=""
+
+# Force import order to recognize a module as part of the standard
+# compatibility libraries.
+known-standard-library=[]
+
+# Force import order to recognize a module as part of a third party library.
+known-third-party=[]
+
+
+[tool.pylint.exception]
+
+# Exceptions that will emit a warning when being caught. Defaults to
+# "Exception"
+overgeneral-exceptions="Exception"
diff --git a/pyrate/pyrate.data_generator.dockerfile b/pyrate/pyrate.data_generator.dockerfile
new file mode 100644
index 0000000..f912eef
--- /dev/null
+++ b/pyrate/pyrate.data_generator.dockerfile
@@ -0,0 +1,27 @@
+FROM ubuntu:22.04
+
+
+# Resolve APT dependencies
+RUN apt-get update -qq
+RUN apt-get install apt-utils software-properties-common -qq
+RUN add-apt-repository ppa:antiprism/ppa -y
+RUN apt-get install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism git -qq
+
+# Install additinal requirements
+RUN pip install git+https://github.com/SciTools/cartopy.git
+
+# Clone and install Pyrate
+WORKDIR pyrate
+COPY . .
+RUN pip3 install .
+WORKDIR /pyrate/experiments
+
+WORKDIR /pyrate/experiments
+WORKDIR /pyrate/data
+WORKDIR /pyrate
+
+ARG save_frequency=50
+ARG seed_start=0
+ARG continues=0
+
+CMD python3 -m route_generator
diff --git a/pyrate/pyrate.dockerfile b/pyrate/pyrate.dockerfile
new file mode 100644
index 0000000..a5bc2fb
--- /dev/null
+++ b/pyrate/pyrate.dockerfile
@@ -0,0 +1,32 @@
+FROM ubuntu:22.04
+
+# Resolve APT dependencies
+RUN apt-get update -qq
+RUN apt-get install apt-utils software-properties-common -qq
+RUN add-apt-repository ppa:antiprism/ppa -y
+RUN apt-get install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism git -qq
+RUN apt install graphviz -yqq
+
+# Install additinal requirements
+RUN pip3 install jupyter tensorflow tensorboard keras-tuner black[jupyter] mapply humanize jupyter-resource-usage
+RUN pip3 install tensorflow-addons
+RUN pip install jupyter_contrib_nbextensions
+RUN jupyter contrib nbextension install --sys-prefix
+RUN jupyter nbextension enable scratchpad/main --sys-prefix
+RUN pip install git+https://github.com/SciTools/cartopy.git
+RUN apt install vim nano -qqy
+# Clone and install Pyrate
+WORKDIR /pyrate
+COPY setup.py .
+COPY setup.cfg .
+COPY pyrate/__init__.py pyrate/
+COPY README.md .
+RUN pip3 install .
+COPY . .
+
+WORKDIR /pyrate
+
+CMD python3 -m route_generator
+
+ARG TOKEN=72336c3d3c88e6f060bf3a27b8ea74007c0f0c14a747d55a
+CMD jupyter notebook --ip 0.0.0.0 --port 8888 --no-browser --allow-root --NotebookApp.token=${TOKEN}
diff --git a/pyrate/pyrate/__init__.py b/pyrate/pyrate/__init__.py
new file mode 100644
index 0000000..cde1afa
--- /dev/null
+++ b/pyrate/pyrate/__init__.py
@@ -0,0 +1,4 @@
+"""The Pyrate package for autonomous surface vehicles."""
+
+__version__ = "22.04"
+__author__ = "Sailing Team Darmstadt e. V. members and affiliates"
diff --git a/pyrate/pyrate/act/__init__.py b/pyrate/pyrate/act/__init__.py
new file mode 100644
index 0000000..8619014
--- /dev/null
+++ b/pyrate/pyrate/act/__init__.py
@@ -0,0 +1,6 @@
+"""The act package provides tools to use the employed actuators of the robot to execute planned actions.
+Usually, this includes the computation of required motor positions to minimize
+the error between desired and actual states.
+
+In the ``control`` package, classes for controlling actuators such that deviations
+from desired and measured states are driven towards zero are implemented."""
diff --git a/pyrate/pyrate/act/control/__init__.py b/pyrate/pyrate/act/control/__init__.py
new file mode 100644
index 0000000..847e592
--- /dev/null
+++ b/pyrate/pyrate/act/control/__init__.py
@@ -0,0 +1,9 @@
+"""This package provides controllers that compute motor inputs, e.g.. angles or
+voltages, such that a desired state can be reached and held."""
+
+from .anti_windup_lqr import AntiWindupLqr
+from .anti_windup_pid import AntiWindupPid
+from .lqr import Lqr
+from .pid import Pid
+
+__all__ = ["AntiWindupLqr", "AntiWindupPid", "Lqr", "Pid"]
diff --git a/pyrate/pyrate/act/control/anti_windup_lqr.py b/pyrate/pyrate/act/control/anti_windup_lqr.py
new file mode 100644
index 0000000..a070034
--- /dev/null
+++ b/pyrate/pyrate/act/control/anti_windup_lqr.py
@@ -0,0 +1,119 @@
+"""This module implements the Linear Quadratic Regulator with integral part and anti-windup."""
+
+# Mathematics
+from numpy import clip
+from numpy import hstack
+from numpy import ndarray
+from numpy import vstack
+from numpy import zeros
+
+# LQR control
+from .lqr import Lqr
+
+
+class AntiWindupLqr(Lqr):
+
+ """The anti-windup LQR controller, including an integration state for zero stationary error.
+
+ This controller resembles the LQR with added clipping on the control signal to a user-set
+ maximum value. Furthermore, the integral of the error over time is pruned (anti windup).
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+ >>> from numpy import eye
+ >>> from numpy import vstack
+
+ We then setup the Lqr controller with some control constants.
+
+ >>> controller = AntiWindupLqr(
+ ... array([[0, 1], [0, 0]]),
+ ... array([0, 1])[:, None],
+ ... array([1, 0])[None, :],
+ ... eye(3),
+ ... array([[1.0]]),
+ ... array([1.0]),
+ ... 0.5,
+ ... )
+
+ We then specify an initial and desired state.
+
+ >>> initial = vstack([1.0, 0.0])
+ >>> desired = vstack([0.0])
+
+ Finally, we retrieve a control signal from the Lqr based on the values we just set.
+
+ >>> signal = controller.control(desired, initial)
+
+ Args:
+ A: System matrix (continous time) ``(n, n)``
+ B: Input matrix ``(n, 1)``
+ C: Output matrix ``(1, n)``
+ Q: State cost matrix (pos. semi definite, symmetric) ``(n+1, n+1)``
+ R: Control cost matrix (pos. definite, symmetric) ``(1, n)``
+ max_control: Limit of control signal
+ dt: Time between measurements
+ keep_trace: Whether to store a trace of control signals, states, etc.
+ """
+
+ # In this context, we reproduce a common PID notation
+ # pylint: disable=invalid-name, too-many-arguments
+
+ def __init__(
+ self,
+ A: ndarray,
+ B: ndarray,
+ C: ndarray,
+ Q: ndarray,
+ R: ndarray,
+ max_control: ndarray,
+ dt: float,
+ keep_trace: bool = False,
+ ) -> None: # noqa: E741
+ # Controller specification for augmented state
+ n = A.shape[0] + 1
+ A_i = zeros((n, n))
+ A_i[1:, 1:] = A
+ A_i[0, 1:] = -C
+ B_i = vstack([zeros((1, 1)), B])
+ C_i = hstack([zeros((1, 1)), C])
+
+ # Setup internal LQR controller and own attributes
+ super().__init__(A_i, B_i, C_i, Q, R, dt, keep_trace, calculate_feed_forward=False)
+ self.V *= (self.C * self.K).sum()
+ self.max_control = max_control
+ self.summed_error = 0.0
+
+ def control(self, desired: ndarray, state: ndarray) -> ndarray:
+ """Compute the control signal based on LQR controller.
+
+ Args:
+ desired: The desired output
+ state: The current state
+
+ Returns:
+ The control signal
+ """
+
+ # Prepend summed error to state vector
+ state_i = vstack([self.summed_error, state])
+
+ # Compute errors
+ error = desired - self.C @ state_i
+ self.summed_error += self.dt * error
+
+ # Get the basic PID control signal and clip to specified boundary
+ lqr_signal = super().control(desired, state_i)
+ control_signal: ndarray = clip(lqr_signal, -abs(self.max_control), abs(self.max_control))
+
+ # Prune integral part, i.e. apply anti wind up
+ self.summed_error += (lqr_signal - control_signal) / self.K[0, 0]
+
+ return control_signal
+
+ def reset(self) -> None:
+ """Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
+
+ super().reset()
+ self.summed_error = 0.0
diff --git a/pyrate/pyrate/act/control/anti_windup_pid.py b/pyrate/pyrate/act/control/anti_windup_pid.py
new file mode 100644
index 0000000..27ec4aa
--- /dev/null
+++ b/pyrate/pyrate/act/control/anti_windup_pid.py
@@ -0,0 +1,87 @@
+"""This module implements the PID (proportional integral derivative) controller."""
+
+# Mathematics
+from numpy import clip
+from numpy import ndarray
+
+# PID controller
+from .pid import Pid
+
+
+class AntiWindupPid(Pid):
+
+ """The PID controller with anti-windup, i.e. a limited control output and integral part.
+
+ This controller resembles the PID with added clipping on the control signal to a user-set
+ maximum value. Furthermore, the integral of the error over time is pruned (anti windup).
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+
+ We then setup the Pid controller with some control constants.
+
+ >>> controller = AntiWindupPid(
+ ... array([0.5]),
+ ... array([0.1]),
+ ... array([0.0]),
+ ... 5.0,
+ ... 0.1,
+ ... )
+
+ We then specify an initial and desired state as well as the current state derivative.
+
+ >>> initial = array([5.0])
+ >>> desired = array([0.0])
+ >>> derivative = array([0.0])
+
+ Finally, we retrieve a control signal from the Pid based on the values we just set.
+
+ >>> signal = controller.control(desired, initial, derivative)
+
+ Args:
+ P: Proportional control constant ``(n,)``
+ I: Integral control constant ``(n,)``
+ D: Derivative control constant ``(n,)``
+ max_control: Limit of control signal
+ dt: Time between measurements
+ keep_trace: Whether to store a trace of control signals, states, etc.
+ """
+
+ # In this context, we reproduce a common LQR notation
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ P: ndarray,
+ I: ndarray, # noqa: E741
+ D: ndarray,
+ max_control: float,
+ dt: float,
+ keep_trace: bool = False,
+ ) -> None:
+ # Setup internal PID controller and own attributes
+ super().__init__(P, I, D, dt, keep_trace)
+ self.max_control = max_control
+
+ def control(self, desired: ndarray, state: ndarray, state_derivative: ndarray) -> ndarray:
+ """Compute the control signal based on proportional, integral and derivative terms.
+
+ Args:
+ desired: The desired state
+ state: The current state
+ state_derivative: The current state derivative
+
+ Returns:
+ The control signal
+ """
+
+ # Get the basic PID control signal and clip to specified boundary
+ pid_signal = super().control(desired, state, state_derivative)
+ control_signal: ndarray = clip(pid_signal, -abs(self.max_control), abs(self.max_control))
+
+ # Prune integral part, i.e. apply anti wind up
+ self.summed_error -= (pid_signal - control_signal) / self.I
+
+ return control_signal
diff --git a/pyrate/pyrate/act/control/lqr.py b/pyrate/pyrate/act/control/lqr.py
new file mode 100644
index 0000000..7991f2c
--- /dev/null
+++ b/pyrate/pyrate/act/control/lqr.py
@@ -0,0 +1,147 @@
+"""This module implements the Linear Quadratic Regulator."""
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+from numpy import ones
+from scipy.linalg import solve
+from scipy.linalg import solve_continuous_are
+
+# Data modelling
+from pandas import concat
+from pandas import DataFrame
+
+
+class Lqr:
+ """The LQR controller.
+
+ The linear-quadratic-regulator (LQR) is a feedback controller that solves linear-quadratic
+ problems at minimum cost. Such problems are defined by linear differential equations and
+ quadratic cost functions.
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+ >>> from numpy import eye
+ >>> from numpy import vstack
+
+ We then setup the Lqr controller with some control constants.
+
+ >>> controller = Lqr(
+ ... array([[0, 1], [0, 0]]),
+ ... array([0, 1])[:, None],
+ ... array([1, 0])[None, :],
+ ... eye(2),
+ ... array([[1.0]]),
+ ... 0.5,
+ ... )
+
+ We then specify an initial and desired state.
+
+ >>> initial = vstack([0.0, 0.0])
+ >>> desired = vstack([0.0])
+
+ Finally, we retrieve a control signal from the Lqr based on the values we just set.
+
+ >>> signal = controller.control(desired, initial)
+
+ Args:
+ A: System matrix (continous time) ``(n, n)``
+ B: Input matrix ``(n, 1)``
+ C: Output matrix ``(1, n)``
+ Q: State cost matrix (pos. semi definite, symmetric) ``(n, n)``
+ R: Control cost matrix (pos. definite, symmetric) ``(1, n)``
+ dt: Time between measurements
+ keep_trace: Whether to store a trace of control signals, states, etc.
+ calculate_feed_forward: Whether to compute a feed forward part
+
+ References:
+ - https://en.wikipedia.org/wiki/Linear%E2%80%93quadratic_regulator
+ """
+
+ # In this context, we reproduce a common PID notation
+ # pylint: disable=invalid-name, too-many-arguments
+
+ def __init__(
+ self,
+ A: ndarray,
+ B: ndarray,
+ C: ndarray,
+ Q: ndarray,
+ R: ndarray,
+ dt: float,
+ keep_trace: bool = False,
+ calculate_feed_forward: bool = True,
+ ) -> None: # noqa: E741
+ # Dimensionality checks
+ assert len(A.shape) == 2 and A.shape[0] == A.shape[1], "Matrix A is not square!"
+ assert B.shape[0] == A.shape[0], "Wrong shape for input matrix B!"
+ assert C.shape[1] == A.shape[0], "Wrong shape for output matrix C!"
+
+ # Controller specification
+ self.A = A
+ self.B = B
+ self.C = C
+ self.Q = Q
+ self.R = R
+ self.dt = dt
+
+ # Compute controller gain
+ # For reference, see here: https://en.wikipedia.org/wiki/Linear%E2%80%93quadratic_regulator
+ self.P = solve_continuous_are(self.A, self.B, self.Q, self.R)
+ self.K = solve(self.R, self.B.T @ self.P)
+
+ # Calculate static feed forward
+ if calculate_feed_forward:
+ self.V = inv(-self.C @ inv(self.A - self.B @ self.K) @ self.B)
+ else:
+ self.V = ones([1, 1])
+
+ # Objects for process tracing
+ self.keep_trace = keep_trace
+ self.process = DataFrame(
+ columns=[
+ "desired",
+ "state",
+ "error",
+ "control_signal",
+ ]
+ )
+
+ def control(self, desired: ndarray, state: ndarray) -> ndarray:
+ """Compute the control signal based on LQR controller.
+
+ Args:
+ desired: The desired output
+ state: The current state
+
+ Returns:
+ The control signal
+ """
+
+ # Compute errors
+ error = desired - self.C @ state
+
+ # Compute feedback and feed forward values
+ control_signal: ndarray = -self.K @ state + self.V @ desired
+
+ # Append control step to process trace
+ if self.keep_trace:
+ new = DataFrame(
+ {
+ "desired": (desired.copy(),),
+ "state": (state.copy(),),
+ "error": (error.copy(),),
+ "control_signal": (control_signal.copy(),),
+ }
+ )
+ self.process = concat([self.process, new], ignore_index=True)
+
+ # Return result
+ return control_signal
+
+ def reset(self) -> None:
+ """Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
+
+ self.process = self.process[0:0]
diff --git a/pyrate/pyrate/act/control/pid.py b/pyrate/pyrate/act/control/pid.py
new file mode 100644
index 0000000..7a3caa4
--- /dev/null
+++ b/pyrate/pyrate/act/control/pid.py
@@ -0,0 +1,133 @@
+"""This module implements the PID (proportional integral derivative) controller."""
+
+# Mathematics
+from numpy import dot
+from numpy import ndarray
+from numpy import zeros_like
+
+# Data modelling
+from pandas import concat
+from pandas import DataFrame
+
+
+class Pid:
+
+ """The PID controller.
+
+ The proportional-integral-derivative controller (PID) is an industriy-standard feedback control loop.
+ This controller responds proportionally to the error, i.e. deviation of the desired state,
+ its time derivative and integral.
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+
+ We then setup the Pid controller with some control constants.
+
+ >>> controller = Pid(
+ ... array([0.5]),
+ ... array([0.0]),
+ ... array([0.0]),
+ ... 0.1,
+ ... )
+
+ We then specify an initial and desired state as well as the current state derivative.
+
+ >>> initial = array([5.0])
+ >>> desired = array([0.0])
+ >>> derivative = array([0.0])
+
+ Finally, we retrieve a control signal from the Pid based on the values we just set.
+
+ >>> signal = controller.control(desired, initial, derivative)
+
+ Args:
+ P: Proportional control constant ``(n,)``
+ I: Integral control constant ``(n,)``
+ D: Derivative control constant ``(n,)``
+ dt: Time between measurements
+ keep_trace: Whether to store a trace of control signals, states, etc.
+
+ References:
+ - https://en.wikipedia.org/wiki/PID_controller
+ """
+
+ # In this context, we reproduce a common PID notation
+ # pylint: disable=invalid-name, too-many-arguments
+
+ def __init__(self, P: ndarray, I: ndarray, D: ndarray, dt: float, keep_trace: bool = False): # noqa: E741
+ # Controller specification
+ self.P = P
+ self.I = I # noqa: E741
+ self.D = D
+ self.dt = dt
+
+ # Error summation field
+ self.summed_error = zeros_like(P).transpose()
+
+ # Objects for process tracing
+ self.keep_trace = keep_trace
+ self.process = DataFrame(
+ columns=[
+ "desired",
+ "state",
+ "state_derivative",
+ "error",
+ "summed_error",
+ "proportional",
+ "integral",
+ "derivative",
+ "control_signal",
+ ]
+ )
+
+ def control(self, desired: ndarray, state: ndarray, state_derivative: ndarray) -> ndarray:
+ """Compute the control signal based on proportional, integral and derivative terms.
+
+ Args:
+ desired: The desired state
+ state: The current state
+ state_derivative: The current state's derivative
+
+ Returns:
+ The control signal
+ """
+
+ # Compute errors
+ error = desired - state
+ self.summed_error += self.dt * error
+
+ # Compute PID values
+ proportional = dot(self.P, error)
+ integral = dot(self.I, self.summed_error)
+ derivative = dot(self.D, state_derivative)
+
+ # Compute control signal
+ control_signal: ndarray = proportional + integral - derivative
+
+ # Append control step to process trace
+ if self.keep_trace:
+ new = DataFrame(
+ {
+ "desired": (desired.copy(),),
+ "state": (state.copy(),),
+ "state_derivative": (state_derivative.copy(),),
+ "error": (error.copy(),),
+ "summed_error": (self.summed_error.copy(),),
+ "proportional": (proportional.copy(),),
+ "integral": (integral.copy(),),
+ "derivative": (derivative.copy(),),
+ "control_signal": (control_signal.copy(),),
+ },
+ )
+ self.process = concat([self.process, new], ignore_index=True)
+
+ # Return result
+ return control_signal
+
+ def reset(self) -> None:
+ """Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
+
+ self.summed_error = zeros_like(self.P).transpose()
+ self.process = self.process[0:0]
diff --git a/pyrate/pyrate/common/__init__.py b/pyrate/pyrate/common/__init__.py
new file mode 100644
index 0000000..8c7532a
--- /dev/null
+++ b/pyrate/pyrate/common/__init__.py
@@ -0,0 +1 @@
+"""Contains generic helper functionality like file IO, mathematics and testing helpers."""
diff --git a/pyrate/pyrate/common/charts/__init__.py b/pyrate/pyrate/common/charts/__init__.py
new file mode 100644
index 0000000..1741b3c
--- /dev/null
+++ b/pyrate/pyrate/common/charts/__init__.py
@@ -0,0 +1,7 @@
+"""Enables handling of nautical charts and storage of obstacles in a spatial database."""
+
+from .db import SpatialiteDatabase
+from .s57_files import ChartFileHandler
+from .s57_files import S57ChartHandler
+
+__all__ = ["SpatialiteDatabase", "ChartFileHandler", "S57ChartHandler"]
diff --git a/pyrate/pyrate/common/charts/db.py b/pyrate/pyrate/common/charts/db.py
new file mode 100644
index 0000000..60be773
--- /dev/null
+++ b/pyrate/pyrate/common/charts/db.py
@@ -0,0 +1,681 @@
+"""This module adds support for a Spatialite database (SQLite DB with extra modules).
+
+This module requires the *libsqlite3-mod-spatialite* system dependency.
+The database allows for storage and efficient retrieval via spatial indexing of elements.
+
+References:
+ - `General information `__
+ - `The website of Spatialite `__
+ - `The website of Spatialite and friends `__
+ - `Cookbook, Chapter "Spatial Indexing support"
+ `__
+ - `Cookbook, Chapter "Creating a well designed DB"
+ `__
+ - `SQL functions reference list `__
+"""
+
+# Python standard
+from contextlib import closing
+from contextlib import contextmanager
+from math import degrees
+import random
+import string
+from warnings import warn
+
+# Database interface
+import sqlite3
+from sqlite3 import Connection
+
+# Typing
+from typing import cast
+from typing import Generator
+from typing import Iterable
+from typing import Iterator
+from typing import Optional
+
+# Shapely for internal abstraction
+from shapely.geometry import LineString
+from shapely.geometry import Point
+from shapely.geometry import Polygon
+import shapely.wkb
+
+# Planning primitives
+from pyrate.plan.geometry import Direction
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarGeometry
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+from pyrate.plan.geometry import PolarRoute
+
+# Geospatial helpers
+from pyrate.plan.geometry.helpers import difference_latitude
+from pyrate.plan.geometry.helpers import difference_longitude
+from pyrate.plan.geometry.helpers import meters2rad
+
+# Import this to enable GDAL/libgeos exceptions if it has not already happened
+from . import s57_files as tmp_import
+
+del tmp_import
+
+
+class SpatialiteDatabase:
+
+ """Allows for IO with the *Spatialite* SQLite database containing obstacles.
+
+ Reading of entries from the database is implemented using generators, i.e. the elements are retrieved
+ one by one as they are consumed by the caller. While this allows for the processing of large amounts of
+ data in constant memory, it also keeps the cursor to the database open until all elements have been
+ consumed. To consume all of the rows at once simply wrap it into the list constructor like this:
+ ``all_as_list = list(database.read_all_obstacles())``. Note that only the parsing to Pyrate primitives is
+ done lazily, while the actual database reading happens eagerly.
+
+ Internally, a spatial index is used for fast *retrieval* of obstacles given geometric constrains.
+ For example, this makes queries for all obstacles in a given bounding boxes take time roughly
+ proportional to the result set, and not the total size of the database.
+ Some real-world benchmarks can be obtained from the script :ref:`script-benchmark_db_and_projections`
+ and are discussed in :ref:`design-decisions-local-projections`.
+ See ``SpatialiteDatabase._CREATE_TABLES_SQL_STATEMENT`` for details on the structure of the
+ database. The longitude is always the first/the X component of the two-dimensional geometries.
+
+ *QGIS* can natively open the created databases for visual inspection. It's very efficient too.
+
+ A single polygon in the dabase might get split into multiple ones in a query due to clipping. A unique
+ index is maintained with best-effort by adding subsequent numbers to the other slices of the same polygon.
+ This assumes that indices are somewhat uniformly distributed and not sequential numbers.
+
+ Examples:
+ First, let us create some polygons to be stored (PolarPoint and PolarRoute would also work):
+
+ >>> from pyrate.plan.geometry import PolarLocation, PolarPolygon, LocationType
+ >>> locations = [PolarLocation(50, 50), PolarLocation(50, 51), PolarLocation(51, 51)]
+ >>> polygon_1 = PolarPolygon(locations=locations, name="A Polygon, YaY!")
+ >>> polygon_2 = PolarPolygon(locations=locations, name="Some Name", identifier=42,
+ ... location_type=LocationType.LAND)
+ >>> polygons = [polygon_1, polygon_2]
+ >>> polygons #doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
+ [PolarPolygon(locations=[...], name="A Polygon, YaY!"),
+ PolarPolygon(locations=[...], location_type=LocationType.LAND, name="Some Name", identifier=42)]
+
+ Then, you can simply store and then retrieve some polygons.
+ Note, that you have to call :meth:`SpatialiteDatabase.close` after using it or use it as a context
+ manager, as shown here.
+
+ >>> from pyrate.common.charts import SpatialiteDatabase
+ >>> with SpatialiteDatabase(":memory:") as database:
+ ... print(len(database))
+ ... database.write_geometries(polygons)
+ ... # We need to wrap it into a call to `list()` to evaluate the generator returned by
+ ... # `read_obstacles_around` while the database is still open
+ ... read = list(database.read_geometries_around(locations[0], radius=200_000)) # 200 km
+ ... assert len(database) == len(read)
+ ... print(len(database))
+ 0
+ 2
+ >>> # The database does not guarantee an order of the result set
+ >>> sort_by_name = lambda geometry: geometry.name
+ >>> read = list(sorted(read, key=sort_by_name))
+ >>> read #doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
+ [PolarPolygon(locations=[...], name="A Polygon, YaY!", identifier=1),
+ PolarPolygon(locations=[...], location_type=LocationType.LAND, name="Some Name", identifier=42)]
+ >>> read == polygons
+ False
+ >>> # This is due to the first polygon now being given a new unused identifier
+ >>> read[0].identifier
+ 1
+ >>> # So we reset it here for the comparison to succeed
+ >>> read[0].identifier = None
+ >>> # Now, they should be almost equal (except for floating-point approximations)
+ >>> polygon_1.equals_almost_congruent(read[0])
+ True
+ >>> polygon_2.equals_almost_congruent(read[1])
+ True
+
+ A full example application can be found in the script :ref:`script-benchmark_db_and_projections`.
+
+ Possible extensions:
+ - Allow for retrieving from arbitrary PolarPolygon/bounding box. See ``_read_obstacles_clipped``.
+ - The method :meth:`~read_obstacles_around` could be easily extended/complemented to support ellipses.
+ However, rotation of that eclipse would only make this useful, and adding that appears to be tricky.
+
+ Note:
+ Use with ``storage_path=":memory:"`` (see example above) to open a ephemeral database that resides in
+ RAM. This works when only a single databse user (like a :class:`~SpatialiteDatabase` instance) will
+ access it.
+ Otherwise, passing ``"file::memory:?cache=shared"`` as a file will allow the same database to be
+ accessed by multiple different users within the same process. Both are useful for (unit-)testing too.
+ When passing extra options like ``"file::memory:?cache=shared"``,
+ you will also have to pass ``uri=True`` to :class:`~SpatialiteDatabase`
+ such that the parameters do not get mistaken for being part of the an actual file name:
+ >>> with SpatialiteDatabase("file::memory:?cache=shared", uri=True) as database:
+ ... print(len(database))
+ 0
+
+ Args:
+ storage_path: the path where to look for the database file, usually ending with ``.sqlite``
+ issue_create_statement: tries to create the table(s) and indices if not yet existing:
+ this can be safely let enabled as existing tables make this a NO-OP
+ kwargs: additional parameters to be passed to the database creation, see :class:`sqlite3.Connection`
+
+ Raises:
+ IOError: When the data base file cannot be accessed
+ RuntimeError: If the Spatialite extension (*libsqlite3-mod-spatialite*) cannot be loaded
+ """
+
+ #: The Spatial Reference System Identifier used for storing objects
+ #: this is the widely used WGS 84; see: https://spatialreference.org/ref/epsg/4326/
+ _SRID: int = 4326
+
+ def __init__(self, storage_path: str, issue_create_statement: bool = True, **kwargs) -> None:
+ # This may raise an IOError:
+ self._connection: Connection = sqlite3.connect(storage_path, **kwargs)
+
+ try:
+ # load the spatialite module
+ self._connection.load_extension("mod_spatialite.so")
+ except sqlite3.OperationalError as error: # pragma: no cover
+ raise RuntimeError(
+ "Cannot load the spatialite extension. Is it installed (see installation instructions)? "
+ f"Error was: {error}"
+ ) from error
+
+ if issue_create_statement:
+ self.create_tables()
+
+ def create_tables(self, table_name: str = "Obstacles") -> None:
+ """This creates the table(s) and indices in the database (if they do not exist yet).
+
+ See the module documentation of :mod:`pyrate.common.charts.db` for more information.
+
+ Args:
+ table_name: The name of the table to initialize
+ """
+ # Check if the table "Obstacles" is present
+ check = f"SELECT COUNT(1) FROM SQLITE_MASTER WHERE name = '{table_name}'"
+ with closing(self._connection.execute(check)) as cursor:
+ count = cast(int, cursor.fetchone()[0]) # This needs to be cast as the result is untyped
+
+ if count == 0:
+ # It is not present, so we initialize the database here
+
+ statement = f"""
+ CREATE TABLE IF NOT EXISTS '{table_name}' (
+ id INTEGER PRIMARY KEY NOT NULL,
+ location_type TINYINT unsigned NOT NULL DEFAULT 0,
+ name VARCHAR CHARACTER DEFAULT NULL,
+ CHECK (location_type <= {LocationType.max_value()})
+ );
+ CREATE INDEX IF NOT EXISTS by_location_type ON '{table_name}' (location_type);
+
+ SELECT InitSpatialMetaDataFull();
+ SELECT AddGeometryColumn(
+ '{table_name}',
+ 'geometry',
+ {SpatialiteDatabase._SRID},
+ 'GEOMETRY', -- just as fast as e.g. POLYGON but more flexible
+ 'XY',
+ TRUE -- set to NOT NULL
+ );
+ SELECT CreateSpatialIndex(
+ '{table_name}',
+ 'geometry'
+ );
+
+ -- can only be done after column is added; but is not supported by SQLite
+ -- ALTER TABLE '{table_name}' ADD CHECK (IsValid(geometry));
+ """
+
+ with self._connection: # auto-commits at the end
+ with self.disable_synchronization():
+ self._connection.executescript(statement).close()
+
+ @contextmanager
+ def disable_synchronization(self) -> Iterator[None]:
+ """Temporarily disables file system synchronization for consistency to increase write performance.
+
+ To quote the `documentation of SQLite `__:
+
+ "With synchronous OFF (0), SQLite continues without syncing as soon as it has handed data off to
+ the operating system. If the application running SQLite crashes, the data will be safe, but the
+ database might become corrupted if the operating system crashes or the computer loses power before
+ that data has been written to the disk surface. On the other hand, commits can be orders of
+ magnitude faster with synchronous OFF."
+
+ """
+ self._connection.execute("PRAGMA synchronous=OFF").close()
+ yield
+ self._connection.execute("PRAGMA synchronous=ON").close()
+
+ def copy_contents_to_database(self, file_path: str, update: bool = False, **kwargs) -> None:
+ """Dumps the content of this obstacle database to a new one.
+
+ This can be useful in cases where one wants to first create an extremely fast in-memory database and
+ later copy it to a file on disk.
+
+ Args:
+ file_path: the path of the other database
+ update: is set to ``True``, update/replace on conflicting identifier; else raise an error in that
+ case
+ kwargs: additional parameters to be passed to the database creation, see attribute ``kwargs`` of
+ :class:`~SpatialiteDatabase`
+
+ Raises:
+ sqlite3.IntegrityError: If a duplicate key should have been inserted and ``update`` was set to
+ ``False``
+ """
+ # init the other database
+ with SpatialiteDatabase(file_path, **kwargs):
+ pass
+
+ database_name = _random_name()
+
+ command = "REPLACE" if update else "INSERT"
+ statements = f"""
+ ATTACH '{file_path}' AS {database_name};
+ {command} INTO {database_name}.Obstacles SELECT * FROM main.Obstacles;
+ DETACH {database_name};
+ """
+
+ with self._connection: # auto-commits at the end
+ self._connection.executescript(statements).close()
+
+ def simplify_contents(self, simplify_tolerance: float) -> None:
+ """Simplifies all geometries within the database. Always runs :meth:`~vacuum` afterwards.
+
+ Args:
+ simplify_tolerance: the tolerance within all new points shall lie wrt. to the old ones, in meters,
+ non-negative. Set to zero to disable.
+
+ Further ideas:
+ - Keep topology between objects, not just within them, e.g. see
+ `this blog post `__.
+ """
+ assert simplify_tolerance >= 0, "tolerance must be non-negative"
+
+ if simplify_tolerance > 0:
+ tolerance_degrees = degrees(meters2rad(simplify_tolerance))
+ statement = (
+ f"UPDATE Obstacles SET geometry = ST_SimplifyPreserveTopology(geometry, {tolerance_degrees})"
+ )
+
+ with self._connection: # auto-commits at the end
+ self._connection.execute(statement).close()
+
+ self.vacuum()
+
+ def vacuum(self) -> None:
+ """Defragments the database. This is useful after deleting or shrinking many entries."""
+ with self._connection: # auto-commits at the end
+ self._connection.execute("VACUUM").close()
+
+ def write_geometry(
+ self, geometry: PolarGeometry, update: bool = False, raise_on_failure: bool = True
+ ) -> None:
+ """Alias for ``write_obstacles([obstacle], update)``. See :meth:`~write_obstacles`.
+
+ Args:
+ geometry: the geometry to place into the database, identified by
+ its :attr:`~pyrate.plan.geometry.Geospatial.identifier`
+ update: see :meth:`~write_obstacles`
+ raise_on_failure: see :meth:`~write_obstacles`
+ """
+
+ return self.write_geometries([geometry], update=update, raise_on_failure=raise_on_failure)
+
+ def write_geometries(
+ self, geometries: Iterable[PolarGeometry], update: bool = False, raise_on_failure: bool = True
+ ) -> None:
+ """Writes geometries into the database.
+
+ All geometries are only identified by their identifier as a primary key.
+ If ``update is True``, any existing geometries with the same IDs will be updated/replaced.
+ If ``update is False``, an exception is raised if duplicate keys are to be inserted.
+
+ Args:
+ geometries:
+ The geometries to place into the database, identified by their
+ :attr:`~pyrate.plan.geometry.Geospatial.identifier`.
+ update:
+ If set to ``True``, update/replace on conflicting identifiers;
+ else raise an error in that case.
+ If set to ``True``, no guarantees about inserts can be made (see :class:`ValueError` below).
+ raise_on_failure:
+ If set to ``False`` suppress the :class:`ValueError` below and instead print a warning.
+
+ Raises:
+ sqlite3.IntegrityError: If a duplicate key should have been inserted and ``update`` was set to
+ ``False``
+ sqlite3.IntegrityError: If a value was not within the constraints; should never happen if all
+ :class:`~pyrate.plan.geometry.polygon.PolarPolygon` were created properly
+ ValueError:
+ If the provided geometries are not valid according to *spatialite* and could not be repaired.
+ However, any valid geometries will have been inserted by then.
+ Also, this is only possible to be checked if ``update`` is set to ``False``.
+ Else, incomplete inserts will simply be ignored.
+ Only very basic cleanup is attempted.
+ Suppressed if ``raise_on_failure is False``.
+ """
+
+ count_before = self.count_geometries()
+
+ # Build query
+ command = "REPLACE" if update else "INSERT"
+ statement = f"""
+ WITH _insert_temp(id,location_type,name,geometry)
+ AS (VALUES (?,?,?,SanitizeGeometry(GeomFromWKB(?,{SpatialiteDatabase._SRID}))))
+
+ {command} INTO Obstacles
+ SELECT * FROM _insert_temp WHERE IsValid(geometry)
+ """
+
+ # Convert data
+ data = [(g.identifier, g.location_type, g.name, to_wkb(g)) for g in geometries]
+
+ # Execute statement
+ with self._connection: # auto-commits at the end
+ self._connection.executemany(statement, data).close()
+
+ # TODO(Felix.Divo):
+ # We want to notify the user if the insert was incomplete, i.e. if a geometry was invalid
+ # (1) `cursor.rowcount` from executemany() does not work since it returns -1
+ # (2) `cursor.lastrowid` contains only the last ID, so we cannot use that
+ # (3) Appending `RETURNING id` causes exceptions when used with executemany():
+ # "sqlite3.ProgrammingError: executemany() can only execute DML statements"
+ # (4) So we do the stupid thing: Count before and after. Does not work with ``update=True``!
+ # This could also cause problems with concurrency.
+ # (5) One could also repair by Buffer(), though that might not be what is desired,
+ # cf. https://shapely.readthedocs.io/en/stable/manual.html#object.buffer.
+
+ # Make sure that all rows were valid
+ missing = count_before + len(data) - self.count_geometries()
+ if not update and missing > 0:
+ message = f"{missing} of the {len(data)} geometries were invalid"
+ if raise_on_failure:
+ raise ValueError(message)
+ warn(message)
+
+ def read_all(
+ self, only_location_type: Optional[LocationType] = None
+ ) -> Generator[PolarGeometry, None, None]:
+ """Read all stored geometries, optionally filtered by a type.
+
+ Args:
+ only_location_type: get only geometries of that type, if not set to ``None``
+
+ Yields:
+ The geometries as read from the database.
+ """
+ yield from self._read_geometries(
+ geometry_column="geometry", # no transformation, just select the column
+ only_location_type=only_location_type,
+ )
+
+ def read_geometries_around(
+ self,
+ around: PolarLocation,
+ radius: float = 10_000.0,
+ only_location_type: Optional[LocationType] = None,
+ ) -> Generator[PolarGeometry, None, None]:
+ """Reads and clips geometries in a given radius around some location.
+
+ The geometries are clipped to the circle given by the location ``around`` and ``radius``. This means
+ that any parts stretching beyond the circles are not returned and the geometry approximately follows
+ the outer circle in such cases. If the ellipse if deformed at (very) high/low latitudes, the clipping
+ area is selected such that at least all geometries in the defined area are included, and possibly some
+ more.
+
+ Note:
+ This method internally uses an ellipse as the clipping area even for circular clipping areas in
+ order to account for some distortions of polar coordinates at high latitudes. Also, keep in mind
+ that clipping/selecting for inclusion in the result set is not a perfect operation, as the
+ clipping area is internally discretized to a geometry. It thus has corners, and is not smooth
+ like an ellipse in the mathematical sense.
+
+ Args:
+ around: The point around which obstacles are to be extracted; assumed to be in degrees
+ radius: The radius around *around* to retrieve items from in meters; default: 10 000 m.
+ The behaviour is unspecified if it is very large, like more than 1 000 000 m.
+ It must be at least zero.
+ only_location_type: Get only obstacles of that type, if not set to ``None``
+
+ Yields:
+ The geometries as read from the database.
+ """
+
+ # Safety assertions on given radius
+ assert radius >= 0.0, "the radius must be non-negative"
+ assert (
+ radius <= 1_000_000
+ ), "radius > 1_000_000; see docs; this is not a fundamental restriction but it is untested"
+
+ # The distance in polar coordinate is always equal when going either west or east
+ east_west, _ = around.translate(Direction.East, radius)
+ longitudal_radius = difference_longitude(around.longitude, east_west.longitude)
+
+ # The above might be different when going north or south
+ # To disambiguate, we take the larger of the two distances as the radius
+ north, _ = around.translate(Direction.North, radius)
+ south, _ = around.translate(Direction.South, radius)
+
+ latitudal_radius = max(
+ difference_latitude(around.latitude, north.latitude),
+ difference_latitude(around.latitude, south.latitude),
+ )
+
+ # Place a corner of the discretized ellipse every 24 degrees,
+ # i.e. turn it into a polygon with 15 corners
+ every_degrees = 24
+
+ # MakeEllipse(...) takes the two radii in lat/long direction in degrees and returns a line string
+ clipping_area = (
+ f"MakePolygon(MakeEllipse({around.longitude}, {around.latitude}, "
+ f"{longitudal_radius}, {latitudal_radius}, {SpatialiteDatabase._SRID}, {every_degrees}))"
+ )
+
+ yield from self._read_geometries_clipped(clipping_area, only_location_type)
+
+ def _read_geometries_clipped(
+ self, clipping_area: str, only_location_type: Optional[LocationType]
+ ) -> Generator[PolarGeometry, None, None]:
+ """Internal helper for querying for clipped geometries.
+
+ Args:
+ clipping_area: The area to clip to
+ only_location_type: The type of the read location
+
+ Yields:
+ The geometries clipped to the given area.
+ """
+ yield from self._read_geometries(
+ geometry_column=f"Intersection(geometry, {clipping_area})",
+ only_location_type=only_location_type,
+ )
+
+ def _read_geometries(
+ self, geometry_column: str, only_location_type: Optional[LocationType]
+ ) -> Generator[PolarGeometry, None, None]:
+ """Factors out the common parts of assembling SQL statements for the ``read_*`` methods.
+
+ Args:
+ geometry_column:
+ A SQL "column name" that can be used in a SELECT clause and which returns a Spatialite
+ geometry. Examples are ``"geometry"`` to simply return the geometries unmodified or something
+ like ``"Reverse(geometry) as ignored_column_name"`` to perform some modification.
+ The name does not matter.
+ only_location_type: Get only obstacles of that type, if not set to ``None``
+
+ Yields:
+ The geometries as read from the database.
+ """
+ if only_location_type is None:
+ additional_where_constraint = ""
+ else:
+ additional_where_constraint = f"AND location_type = {only_location_type.value}"
+
+ # `IsValid(wrapped_geometry)` excludes empty geometries which can sometimes occur
+ yield from self._read_from_sql(
+ f"""
+ WITH temptable AS (
+ SELECT id, location_type, name, ({geometry_column}) as wrapped_geometry
+ FROM Obstacles
+ WHERE wrapped_geometry IS NOT NULL AND IsValid(wrapped_geometry)
+ {additional_where_constraint}
+ )
+ SELECT id, location_type, name, AsBinary(wrapped_geometry) as geometry
+ FROM temptable
+ """
+ )
+
+ def _read_from_sql(self, sql_statement: str) -> Generator[PolarGeometry, None, None]: # noqa: C901
+ """Reads geometries for a given complete SQL query.
+
+ Supports reading these geometry types and maps them to instances of
+ :attr:`pyrate.plan.geometry.PolarGeometry`:
+
+ - ``Point``
+ - ``LineString`` and ``MultiLineString``
+ - ``Polygon`` and ``MultiPolygon``
+
+ Args:
+ sql_statement: The SQL statement to query with and read geometries from
+
+ Yields:
+ The geometries as read from the database.
+ """
+ with closing(self._connection.execute(sql_statement)) as cursor:
+ # This should be theoretically parallelizable, but was not required as of now
+ # keep in mind that `cursor.fetchall()` returns a list, not a generator
+ for (identifier, location_type, name, binary_geometry) in cursor.fetchall():
+ parsed_geometry = shapely.wkb.loads(binary_geometry)
+ geometry_type = parsed_geometry.type
+
+ # The database contains only the geometry types Point, LineString and Polygon.
+ # However, depending on the performed operation, some entries might be cut into
+ # MultiLineString or MultiPolygon, so we need to be able to decode them too.
+ # MultiPoint can currently not occur.
+
+ def to_polygon(
+ polygon: Polygon, unique_identifier: int, name=name, location_type=location_type
+ ) -> PolarPolygon:
+ locations = [PolarLocation(y, x) for (x, y) in polygon.exterior.coords]
+ return PolarPolygon(locations, LocationType(location_type), name, unique_identifier)
+
+ def to_route(
+ line_string: LineString, unique_identifier: int, name=name, location_type=location_type
+ ) -> PolarRoute:
+ locations = [PolarLocation(y, x) for (x, y) in line_string.coords]
+ return PolarRoute(locations, LocationType(location_type), name, unique_identifier)
+
+ if geometry_type == "Point":
+ point = cast(Point, parsed_geometry)
+ yield PolarLocation(
+ latitude=point.y,
+ longitude=point.x,
+ location_type=LocationType(location_type),
+ name=name,
+ identifier=identifier,
+ )
+
+ elif geometry_type == "LineString":
+ yield to_route(cast(LineString, parsed_geometry), identifier)
+
+ elif geometry_type == "MultiLineString":
+ for index, route in enumerate(parsed_geometry.geoms):
+ # Make identifier unique by best effort
+ yield to_route(cast(LineString, route), unique_identifier=identifier + index)
+
+ elif geometry_type == "Polygon":
+ yield to_polygon(cast(Polygon, parsed_geometry), identifier)
+
+ elif geometry_type == "MultiPolygon":
+ for index, polygon in enumerate(parsed_geometry.geoms):
+ # Make identifier unique by best effort
+ yield to_polygon(cast(Polygon, polygon), unique_identifier=identifier + index)
+
+ else: # pragma: no cover
+ # This should never happen in a well-formatted database
+ raise RuntimeError(f'illegal geometry type "{geometry_type}" returned')
+
+ def clear(self) -> None:
+ """Deletes all obstacles from the database, but does not touch the table structure or indices."""
+
+ with self._connection: # auto-commits at the end
+ self._connection.execute("DELETE FROM Obstacles").close()
+
+ def count_geometries(self) -> int:
+ """Counts all obstacles in the database."""
+
+ with closing(self._connection.execute("SELECT COUNT(*) FROM Obstacles")) as cursor:
+ result = cursor.fetchone()
+
+ return cast(int, result[0]) # needs to be cast as the result is untyped
+
+ def count_vertices(self) -> int:
+ """Counts all vertices of all obstacles in the database."""
+
+ statement = "SELECT SUM(ST_NPoints(geometry)) FROM Obstacles"
+ with closing(self._connection.execute(statement)) as cursor:
+ result = cursor.fetchone()
+
+ count = cast(Optional[int], result[0]) # needs to be cast as the result is untyped
+ if count is None:
+ # this can happen if the database is empty since `SUM` will return `NULL` in that case
+ return 0
+
+ return count
+
+ def __len__(self) -> int:
+ return self.count_geometries()
+
+ def close(self) -> None:
+ """Closes the connection to the database and releases all associated resources.
+
+ It is not really documented in the standard library but :meth:`sqlite3.Connection.close` and thus
+ this method can apparently be called multiple times.
+ """
+
+ # The `_connection` is unset if an exception has been thrown, but
+ # `close()` might still be called when the database was used as a
+ # context manager
+ if hasattr(self, "_connection"):
+ self._connection.close() # pragma: no cover
+
+ def __enter__(self) -> "SpatialiteDatabase":
+ return self
+
+ def __exit__(self, exc_type, exc_value, traceback) -> None:
+ self.close()
+
+ # Makes sure the connection is closed when this object ceases to exist
+ def __del__(self) -> None:
+ self.close()
+
+
+def _random_name() -> str:
+ """Returns a probably unique random name consisting only of latin letters."""
+ return "".join(random.choices(string.ascii_letters, k=32))
+
+
+def to_wkb(geometry: PolarGeometry) -> bytes:
+ """Converts the given geometries into well-known binary (WKB) bytes.
+
+ Args:
+ geometry: The polar geometry to be converted
+
+ Returns:
+ The WKB representation of the geometry
+
+ Raises:
+ NotImplementedError:
+ If the geometry type cannot be converted to bytes.
+ This will never occur when the type signature is obeyed.
+ """
+
+ if isinstance(geometry, PolarLocation):
+ return cast(bytes, shapely.wkb.dumps(Point(geometry.longitude, geometry.latitude)))
+ if isinstance(geometry, PolarRoute):
+ return cast(bytes, shapely.wkb.dumps(LineString(geometry.to_numpy())))
+ if isinstance(geometry, PolarPolygon):
+ return cast(bytes, shapely.wkb.dumps(Polygon(geometry.to_numpy())))
+
+ # Can never occur if the type signature was obeyed but better be explicit here
+ raise NotImplementedError(f"unknown geometry type: {type(PolarLocation).__name__}")
diff --git a/pyrate/pyrate/common/charts/s57_files.py b/pyrate/pyrate/common/charts/s57_files.py
new file mode 100644
index 0000000..64e39cf
--- /dev/null
+++ b/pyrate/pyrate/common/charts/s57_files.py
@@ -0,0 +1,319 @@
+"""Allows to find and read nautical charts. Currently, this only supports IHO S-57 charts.
+
+Examples:
+ This shows how to recursively read all obstacles/relevant chart objects from a given directory:
+
+ >>> from pyrate.common.charts import ChartFileHandler, S57ChartHandler
+ >>> path_to_charts = "stda/data/charts/noaa_vector/data"
+ >>> # Nothing about this is is specific to the `S57ChartHandler`, so cast it to `ChartFileHandler`
+ >>> handler: ChartFileHandler = S57ChartHandler()
+ >>> polygons = [ #doctest: +SKIP
+ ... handler.read_chart_file(chart_file)
+ ... for chart_file in handler.find_chart_files(path_to_charts)
+ ... ]
+
+Ideas:
+ - Maybe use `Fiona `__ as an alternative?
+
+Resources:
+ - Documentation on the S-57 file format and the relevant parts of GDAL:
+ - https://gdal.org/python/osgeo.ogr-module.html
+ - https://gdal.org/drivers/vector/s57.html
+ - https://www.teledynecaris.com/s-57/frames/S57catalog.htm (the entire object catalogue!)
+ - https://gdal.org/api/python_gotchas.html (!)
+ - Examples and Cookbooks:
+ - https://pcjericks.github.io/py-gdalogr-cookbook/vector_layers.html
+ - and more general: https://pcjericks.github.io/py-gdalogr-cookbook/index.html
+ - https://lists.osgeo.org/pipermail/gdal-dev/2008-April/016767.html
+ - Helpers:
+ - The program QGIS is very helpful because it can open S-57 files visually.
+"""
+
+# Python standard
+from abc import ABC
+from abc import abstractmethod
+from hashlib import sha1
+import os
+import os.path
+from pathlib import Path
+import sys
+from warnings import catch_warnings
+from warnings import simplefilter
+from warnings import warn
+
+# Typing
+from typing import Generator
+from typing import Mapping
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+# Planning primitives
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+
+# Allow osgeo to be missing
+# Set to True if the osgeo is available, or False if not
+_OSGEO_PRESENT: bool
+try:
+ # This emits warnings (at least on Python 3.8)
+ with catch_warnings():
+ simplefilter("ignore", DeprecationWarning, lineno=8)
+ from osgeo import gdal
+ from osgeo import ogr
+except ImportError as _error: # pragma: no cover
+ _OSGEO_PRESENT = False
+ warn(
+ "Could not import package osgeo. Please install it as described in the README. "
+ f"Error was: {_error}"
+ )
+ del _error
+else:
+ _OSGEO_PRESENT = True
+ ogr.UseExceptions()
+
+
+#: Currently there are only locations and polygons, see :meth:`S57ChartHandler._create_obstacle`
+PolarChartGeometry = Union[PolarLocation, PolarPolygon]
+
+
+class ChartFileHandler(ABC):
+ """This is a generic class for handling chart files, that defines a common interface."""
+
+ @staticmethod
+ @abstractmethod
+ def find_chart_files(search_path: Union[str, "os.PathLike[str]"]) -> Generator[Path, None, None]:
+ """Recursively find all files that can be handled by this handler.
+
+ Args:
+ search_path: The path to search in recursively. Follows symlinks.
+
+ Yields:
+ str: A path per found file
+ """
+
+ @abstractmethod
+ def read_chart_file(
+ self, path: Union[str, "os.PathLike[str]"]
+ ) -> Generator[PolarChartGeometry, None, None]:
+ """Reads a chart file and converts the relevant layers/features into ChartObstacles.
+
+ Args:
+ path: The path to a chart file of the right format
+ """
+
+
+class S57ChartHandler(ChartFileHandler):
+ """Reads IHO S-57 chart files. The returned geometries are *not* checked for validity.
+
+ These chart objects are extracted from the source files:
+
+ - Landmasses (from S-57 object type ``LNAM``)
+ - Depth values (from S-57 object type ``DEPARE``, via attribute ``DRVAL2``, assumed to be in meters)
+ - Buoys (from S-57 object type ``BOY*``, e.g. ``BOYCAR``)
+ - Possibly more in the future
+
+ The identifiers of the created objects are created deterministically from the chart name and the already
+ contained identifiers. They are supposed to be unique across all charts. They are created by first
+ assembling a string that is guaranteed to be a globally unique identifier from the chart file name and the
+ ``LNAM`` field. Then, the string is hashed and truncated to form a 63-bit identifier.
+
+ The names of the objects are created like this:
+ ``{chart file name}#{chart-unique alphanumeric identifier} ({human-readable type}): "{common name}"``.
+
+ All objects are associated with the applicable :class:`pyrate.plan.geometry.LocationType`.
+
+ Raises:
+ ImportError: If the :mod:`osgeo` package is missing
+ """
+
+ def __init__(self):
+ if not _OSGEO_PRESENT: # pragma: no cover
+ raise ImportError('the "osgeo" package must be installed for this handler to function')
+
+ #: This maps layer names to the corresponding parameters for S57ChartHandler._create_obstacle(...)
+ #: These are not all objects but merely the ones which are trivial to map.
+ _SIMPLE_MAPPINGS: Mapping[str, Tuple[LocationType, str]] = {
+ "LNDARE": (LocationType.LAND, "Landmass"),
+ "BOYCAR": (LocationType.OBSTRUCTION, "Buoy (BOYCAR)"),
+ "BOYINB": (LocationType.OBSTRUCTION, "Buoy (BOYINB)"),
+ "BOYISD": (LocationType.OBSTRUCTION, "Buoy (BOYISD)"),
+ "BOYLAT": (LocationType.OBSTRUCTION, "Buoy (BOYLAT)"),
+ "BOYSAW": (LocationType.OBSTRUCTION, "Buoy (BOYSAW)"),
+ "BOYSPP": (LocationType.OBSTRUCTION, "Buoy (BOYSPP)"),
+ # TODO(Felix): Should be included later on; See #19
+ # "OBSTRN": (LocationType.OBSTRUCTION, "Obstruction"),
+ # "OFSPLF": (LocationType.OBSTRUCTION, "Platform"),
+ # "OSPARE": (LocationType.OBSTRUCTION, "Production Area/Wind farm"),
+ # "PILPNT": (LocationType.OBSTRUCTION, "Post"),
+ # "MIPARE": (LocationType.OBSTRUCTION, "Military Exercise Area"),
+ # "DMPGRD": (LocationType.OBSTRUCTION, "Dumping Ground"),
+ # TODO(Felix): maybe later add anchorage and water sport; See #19
+ }
+
+ @staticmethod
+ def find_chart_files(search_path: Union[str, "os.PathLike[str]"]) -> Generator[Path, None, None]:
+ for root, _, files in os.walk(str(search_path), followlinks=True):
+ for file in files:
+ if file.endswith(".000"):
+ # assume it is an IHO S-57 file
+ yield Path(root) / file
+ # else: ignore the file
+
+ def read_chart_file(
+ self, path: Union[str, "os.PathLike[str]"]
+ ) -> Generator[PolarChartGeometry, None, None]:
+ """Reads a chart file and converts the relevant layers/features into ChartObstacles.
+
+ Args:
+ path: The path to the S-57 chart file (e.g. ``something.000``)
+
+ Returns:
+ All relevant obstacles with globally unique and deterministic names
+
+ Raises:
+ FileNotFoundError: If the database file(s) is/are missing
+ IOError: If the database file(s) cannot be opened for another reason
+ """
+ file_path = str(path)
+
+ if not os.path.exists(file_path):
+ raise FileNotFoundError(f"cannot open dataset: {file_path}")
+
+ # open database
+ dataset = ogr.Open(file_path, gdal.GA_ReadOnly)
+ if not dataset:
+ raise IOError(f"cannot open dataset (invalid file): {file_path}")
+
+ file_name = os.path.splitext(os.path.basename(file_path))[0]
+ file_name_bytes = file_name.encode()
+
+ # read contents
+ for i in range(int(dataset.GetLayerCount())):
+ layer = dataset.GetLayerByIndex(i)
+ for geometry, feature_id in S57ChartHandler._convert_layer_to_obstacles(layer):
+
+ # prepend the name of the file to make it unique and ease lookup of objects in the source
+ # this is also required because the LNAM field is not guaranteed to be unique across files
+ geometry.name = f"{file_name}#{geometry.name}"
+
+ # hash a combination of file name and feature identifier as that together is globally unique
+ hashed_id = sha1(file_name_bytes + feature_id.encode()).digest()
+ # truncate to 64 bit and create an int from it
+ identifier = int.from_bytes(hashed_id[-8:], sys.byteorder, signed=True)
+ # cut off the most-significant bit to arrive at 63 bits
+ geometry.identifier = identifier & 0x7F_FF_FF_FF_FF_FF_FF_FF
+
+ yield geometry
+
+ @staticmethod
+ def _convert_layer_to_obstacles(
+ layer: ogr.Layer,
+ ) -> Generator[Tuple[PolarChartGeometry, str], None, None]:
+ """Converts the relevant obstacles of a layer into :attr:`s57_files.PolarChartGeometry`.
+
+ Args:
+ layer: The layer to search in
+
+ Returns:
+ For each relevant feature in the layer: a polygon and a feature ID (32 bit)
+ """
+
+ layer_name = layer.GetName()
+
+ # we first do the more complicated stuff and then convert using S57ChartHandler.SIMPLE_MAPPINGS
+
+ if layer_name == "DEPARE": # "depth area"
+ for feature in layer:
+ # Warning: we assume these depths are given in meters, which could be wrong in some cases but
+ # worked in our tests
+ depth_max = feature["DRVAL2"]
+
+ if depth_max <= 5:
+ yield from S57ChartHandler._create_obstacle(
+ feature, "Depth <= 5m", LocationType.SHALLOW_WATER
+ )
+ elif depth_max <= 10:
+ yield from S57ChartHandler._create_obstacle(
+ feature, "Depth <= 10m", LocationType.SHALLOW_WATER
+ )
+ elif depth_max <= 20:
+ yield from S57ChartHandler._create_obstacle(
+ feature, "Depth <= 20m", LocationType.SHALLOW_WATER
+ )
+ elif depth_max <= 50:
+ yield from S57ChartHandler._create_obstacle(
+ feature, "Depth <= 50m", LocationType.SHALLOW_WATER
+ )
+
+ else:
+ if layer_name in S57ChartHandler._SIMPLE_MAPPINGS:
+ location_type, human_readable_type = S57ChartHandler._SIMPLE_MAPPINGS[layer_name]
+ for feature in layer:
+ yield from S57ChartHandler._create_obstacle(feature, human_readable_type, location_type)
+
+ @staticmethod
+ def _create_obstacle(
+ feature: ogr.Feature,
+ human_readable_type: str,
+ location_type: LocationType,
+ ) -> Generator[Tuple[PolarChartGeometry, str], None, None]:
+ """Creates a point or area obstacle from a given feature.
+
+ Args:
+ feature: The feature to transform
+ human_readable_type: A human-readable string describing what this is, like ``"landmass"``
+ location_type: The location type to be used
+
+ Returns:
+ (1) A location or polygon that represents an obstacle
+ (2) A (not necessarily unique) feature ID (32 bit) for that obstacle; but unique per chart file
+ """
+
+ # This ID is guaranteed to be unique within the chart file and composed of AGEN, FIDN, and FIDS
+ feature_id: str = feature["LNAM"]
+ assert feature_id is not None, "the LNAM field is mandatory for all objects"
+
+ # Remark: feature.IsFieldSetAndNotNull("OBJNAM") seems to work but logs tons of errors to syserr
+ # It is not mandatory for all types of chart objects
+ object_name: Optional[str]
+ try:
+ object_name = feature["OBJNAM"] # might be None
+ except (ValueError, KeyError):
+ object_name = None
+
+ if object_name is None:
+ object_name = "---"
+ else:
+ # Replace broken unicode text (surrogates)
+ object_name = object_name.encode("utf-8", "replace").decode("utf-8")
+
+ # Construct the obstacle's name
+ name = f'{feature_id} ({human_readable_type}): "{object_name}"'
+
+ # Extract the geometries (as the feature may or may not contain a geometry collection)
+ geometry = feature.GetGeometryRef()
+ geometry_type = geometry.GetGeometryType()
+
+ if geometry_type == ogr.wkbPoint:
+ point = PolarLocation(
+ latitude=geometry.GetY(), longitude=geometry.GetX(), name=name, location_type=location_type
+ )
+ yield point, feature_id
+
+ elif geometry_type == ogr.wkbLineString:
+ # Ignore this feature as there are currently no feature being extracted that are
+ # LineStrings and relevant for navigation
+ # TODO(Someone): One should verify that this is okay; See #125
+ warn(f"Ignoring LineString geometry in chart: {name}")
+
+ elif geometry_type == ogr.wkbPolygon:
+ # TODO(Felix): We throw away the inner rings (i.e. the holes); See #106
+ outer_ring = geometry.GetGeometryRef(0)
+ points = [PolarLocation(latitude=lat, longitude=lon) for lon, lat in outer_ring.GetPoints()]
+ yield PolarPolygon(points, name=name, location_type=location_type), feature_id
+
+ else:
+ # Apparently, no other geometries appear in charts
+ raise NotImplementedError(f"Cannot handle geometry type {ogr.GeometryTypeToName(geometry_type)}")
diff --git a/pyrate/pyrate/common/math/__init__.py b/pyrate/pyrate/common/math/__init__.py
new file mode 100644
index 0000000..2a13c64
--- /dev/null
+++ b/pyrate/pyrate/common/math/__init__.py
@@ -0,0 +1,5 @@
+"""Provides mathematical classes that are useful throughout Pyrate's codebase."""
+
+from .gaussian import Gaussian
+
+__all__ = ["Gaussian"]
diff --git a/pyrate/pyrate/common/math/gaussian.py b/pyrate/pyrate/common/math/gaussian.py
new file mode 100644
index 0000000..df09512
--- /dev/null
+++ b/pyrate/pyrate/common/math/gaussian.py
@@ -0,0 +1,132 @@
+"""This module includes an abstraction of gaussian distributions."""
+
+# Typing
+from typing import cast
+
+# Mathematics
+from numpy import ndarray
+from scipy.stats import multivariate_normal
+
+
+class Gaussian:
+
+ """A weighted multivariate gaussian distribution.
+
+ Examples:
+ A Gaussian can be simply created from a mean and covarinace vector (and an optional weight):
+
+ >>> from numpy import array
+ >>> from numpy import vstack
+ >>> mean = vstack([0.0, 0.0])
+ >>> covariance = array([[1.0, 0.0], [0.0, 1.0]])
+ >>> N = Gaussian(mean, covariance, weight=1.0)
+ >>> N(vstack([0.0, 0.0])) # doctest: +ELLIPSIS
+ 0.159...
+
+ Two Gaussians are equal if and only if all attributes are equal:
+
+ >>> N == N
+ True
+ >>> other_covariance = array([[99.0, 0.0], [0.0, 99.0]])
+ >>> other_N = Gaussian(mean, other_covariance, weight=1.0)
+ >>> other_N(vstack([10.0, 10.0])) # doctest: +ELLIPSIS
+ 0.000585...
+ >>> N == other_N
+ False
+
+ Args:
+ mean: The mean of the distribution as column vector, of dimension ``(n, 1)``
+ covariance: The covariance matrix of the distribution, of dimension ``(n, n)``
+ weight: The weight of the distribution, e.g. within a mixture model
+
+ References:
+ - https://en.wikipedia.org/wiki/Multivariate_normal_distribution
+ """
+
+ def __init__(self, mean: ndarray, covariance: ndarray, weight: float = 1.0) -> None:
+ # Sanity checks on given parameters
+ assert len(mean.shape) == 2 and mean.shape[1] == 1, "Mean needs to be a column vector!"
+ assert len(covariance.shape) == 2, "Covariance needs to be a 2D matrix!"
+ assert covariance.shape[0] == covariance.shape[1], "Covariance needs to be a square matrix!"
+ assert covariance.shape[0] == mean.shape[0], "Dimensions of mean and covariance don't fit!"
+
+ # Assign values
+ self.mean = mean
+ self.covariance = covariance
+ self.weight = weight
+
+ # ######################################
+ # Properties following a common filter notation
+ # pylint: disable=invalid-name
+ @property
+ def x(self) -> ndarray:
+ """A shorthand for the distribution's mean.
+
+ Returns:
+ The mean, of dimension ``(1, n)``
+ """
+
+ return self.mean
+
+ @x.setter
+ def x(self, value: ndarray) -> None:
+ self.mean = value
+
+ @property
+ def P(self) -> ndarray:
+ """A shorthand for the distribution's covariance matrix.
+
+ Returns:
+ The covariance, of dimension ``(n, n)``
+ """
+
+ return self.covariance
+
+ @P.setter
+ def P(self, value: ndarray) -> None:
+ self.covariance = value
+
+ @property
+ def w(self) -> float:
+ """A shorthand for the distribution's weight.
+
+ Returns:
+ The weight of this distribution
+ """
+
+ return self.weight
+
+ @w.setter
+ def w(self, value: float):
+ self.weight = value
+
+ def __call__(self, value: ndarray) -> float:
+ """Evaluate the gaussian at the given location.
+
+ Args:
+ value: Where to evaluate the gaussian, of dimension ``(n, 1)``
+
+ Returns:
+ The probability density at the given location
+ """
+
+ # Compute weighted probability density function
+ distribution = multivariate_normal(mean=self.mean.T[0], cov=self.covariance)
+
+ return self.weight * cast(float, distribution.pdf(value.T[0]))
+
+ def __eq__(self, other) -> bool:
+ """Checks if two multivariate normal distributions are equal.
+
+ Args:
+ other: The distribution to compare within
+
+ Returns:
+ Whether the two distributions are the same
+ """
+
+ return (
+ cast(bool, (self.mean == other.mean).all())
+ and cast(bool, (self.covariance == other.covariance).all())
+ and self.weight == other.weight
+ )
diff --git a/pyrate/pyrate/common/raster_datasets/__init__.py b/pyrate/pyrate/common/raster_datasets/__init__.py
new file mode 100644
index 0000000..2c3af5f
--- /dev/null
+++ b/pyrate/pyrate/common/raster_datasets/__init__.py
@@ -0,0 +1,39 @@
+"""The module contains methods to access raster data sets (as opposed to vector data sets).
+
+The :class:`~pyrate.common.raster_datasets.geo_datasets.DataSetAccess` allows to read data arrays from
+raster datasets using query windows. It also computes such windows for a given point and radius.
+However, client code will often want to use some transformed properties of these datasets.
+To that end, a concrete :class:`~pyrate.common.raster_datasets.transformer_base.BaseTransformer` can be used,
+either implemented in the :mod:`~pyrate.common.raster_datasets.transformers_concrete` module
+or in some client code.
+Transformers query some data source for given nodes and radii of influence and then return some
+- well, transformed - property vectors for the query nodes.
+By the way: Instances of :class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`
+directly accept instances of :class:`~pyrate.common.raster_datasets.transformer_base.BaseTransformer`
+to generate properties for nodes in the graph.
+
+.. inheritance-diagram::
+ pyrate.common.raster_datasets.transformers_concrete.ConstantTransformer
+ pyrate.common.raster_datasets.transformers_concrete.BathymetricTransformer
+ :parts: 2
+ :top-classes: pyrate.common.raster_datasets.transformer_base.BaseTransformer
+
+You might want to set some options for the underlying *rasterio*/*GDAL* drivers, like
+`GDAL_CACHEMAX `_
+("If its value is small (less than 100000), it is assumed to be measured in megabytes, otherwise in bytes."):
+
+.. code-block:: python
+
+ with rasterio.Env(GDAL_CACHEMAX=1024):
+ with DataSetAccess(...) as data_set:
+ pass # do cool stuff
+
+"""
+
+from .geo_datasets import DataSetAccess
+from .transformer_base import BaseDatasetTransformer
+from .transformer_base import BaseTransformer
+
+# Don't directly expose transformers_concrete here to keep it simple
+
+__all__ = ["BaseTransformer", "BaseDatasetTransformer", "DataSetAccess"]
diff --git a/pyrate/pyrate/common/raster_datasets/geo_datasets.py b/pyrate/pyrate/common/raster_datasets/geo_datasets.py
new file mode 100644
index 0000000..a88891f
--- /dev/null
+++ b/pyrate/pyrate/common/raster_datasets/geo_datasets.py
@@ -0,0 +1,415 @@
+"""
+This module provides an abstraction over geographical data sets which can be used to efficiently retrieve
+properties for many nodes on a possibly irregular grid.
+
+The implementation is currently single threaded but should be straightforward to parallelize.
+Many commonly used datasets require a couple of gigabytes of memory, so make sure you do not open too many at
+once.
+"""
+
+# Standard library
+import math
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import ContextManager
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+# Scientific
+from numpy import allclose
+from numpy import clip
+from numpy import hstack
+from numpy import linspace
+from numpy import meshgrid
+from numpy import ndarray
+
+# Raster data set library
+import rasterio
+import rasterio.coords
+import rasterio.io
+import rasterio.windows
+from rasterio.windows import Window
+
+# Geographic helpers
+from pyrate.plan.geometry.geospatial import MAXIMUM_EARTH_CIRCUMFERENCE
+from pyrate.plan.geometry.helpers import meters2rad
+
+
+class DataSetAccess(ContextManager["DataSetAccess"]):
+ """Represents a global raster geo dataset that can be efficiently queried for a set of nodes.
+
+ See :meth:`~.get_bounding_windows_around` for why two bounding boxes/windows are supported by some
+ methods.
+
+ Notes:
+ The type of the data that is read depends on the dataset that is used.
+
+ Warning:
+ This class shall only be used as a context manager (using the `with`-syntax) in order to initialize
+ and clean up any resources that are required and possibly several gigabytes large. The behaviour of
+ this class is undefined after the context was left, as the internal data array is deleted to free up
+ memory. Also, the data is only guaranteed to be available once the context was entered.
+
+ Warning:
+ There are many subtle pitfalls with processing geographical datasets. They are only alleviated by
+ software abstractions like *GDAL*, *rasterio* and also *Pyrate* to some degree and are often badly
+ documented. For instance: All raster datasets define a grid (well, the *raster*). The creator(s) of
+ the dataset define whether the data at each entry refers to the center of the cell or the grid line of
+ the raster [1]. However, in a lot of software handling these datasets (like *GDAL* and *rasterio*),
+ it is not always clear what interpretation is used. *rasterio* seems to always assume grid-centered
+ values, and so does Pyrate. This could lead to small discrepancies between the data extracted from the
+ dataset and its real structure. On large-scale queries spanning many cells, this will not be a
+ problem.
+
+ While **many** libraries like *rasterio* exist for querying raster datasets at grid-like points or even
+ irregular points, none seems to allow for getting all raw data points of some datasets around some point
+ with some radius [2]. This was, however, required since we wanted to calculate statistics like "number of
+ points below sea level" for all data points around a given center point and radius. If we interpolated the
+ dataset to the center point, we would only get some local information and not cover the entire area within
+ the radius. The following libraries were investigated but did not provide the needed functionality:
+
+ - `GDALRasterBand tool from GDAL `_
+
+ - `grdtrack tool from GMT `_
+ or `its Python version `_
+
+ - `geonum's topodata module `_
+
+ Args:
+ dataset: A :mod:`rasterio` raster dataset to read from or path to the file to open.
+ It must cover the entire planet with ``(0, 0)`` degrees being in the center.
+ raster_band_index: the index of band (the "layer") of the raster dataset (*GDAL*/*rasterio*
+ terminology)
+
+ Attributes:
+ dataset: the underlying :mod:`rasterio` dataset; read-only
+ raster_band_index: the index of band (the "layer") of the raster dataset (*GDAL*/*rasterio*
+ terminology); read-only
+
+ References:
+ - [1] This concept is also called grid- vs cell-registration. See also
+ `this Earth Science post `__.
+ - [2] Some `kind answers `__
+ on the *gdal-dev* mailing list that did not help.
+ """
+
+ #: The bounding box that any dataset wrapped by this class must match
+ _DATASET_BOUNDING_BOX = rasterio.coords.BoundingBox(left=-180.0, bottom=-90.0, right=180.0, top=90.0)
+
+ def __init__(self, dataset: Union[str, rasterio.DatasetReader], raster_band_index: int = 1) -> None:
+ self.dataset = rasterio.open(dataset) if isinstance(dataset, str) else dataset
+
+ self.raster_band_index = raster_band_index
+
+ self._data_array: ndarray
+ self._dataset_window = Window.from_slices(
+ rows=(0, self.dataset.height),
+ cols=(0, self.dataset.width),
+ )
+
+ assert allclose(DataSetAccess._DATASET_BOUNDING_BOX, self.dataset.bounds, atol=1e-12), (
+ "the dataset needs to cover the entire planet with (0, 0) degrees being in the center but was "
+ + repr(self.dataset.bounds)
+ )
+
+ def get_bounding_windows_around(
+ self, center_latitude: float, center_longitude: float, radius: float
+ ) -> Tuple[Window, Optional[Window]]:
+ """Computes a bounding boxes/windows around the given center containing all points within the radius.
+
+ This method will return one bounding box per coordinate pair for *most* locations on earth.
+ However, near longitude +180°/-180° the dataset wraps around at the side, and as such, a bounding box
+ used for querying the dataset contains one or two such bounding boxes. Due to the internal design of
+ numpy, such a "wrapping" query cannot be expressed in a single slice [1] [2]. Thus, this method
+ might return one or two windows.
+ Correspondingly, :meth:`~.lat_lon_meshgrid_for` and :meth:`~.data_for` take one or two windows each.
+
+ Args:
+ center_latitude: The latitude of center of the box, in radians
+ center_longitude: The longitude of center of the box, in radians
+ radius: The radius around the center that the box should include. Strictly positive, in meters.
+
+ Returns:
+ One or two integer bounding windows as a tuple; might be slightly overestimated (by design)
+
+ References:
+ - [1] Some `answers `_
+ on the *gdal-dev* mailing list that did not help
+
+ - [2] Numpy docs on
+ `Basic Slicing and Indexing
+ `_
+ """
+
+ # pylint: disable=too-many-locals
+
+ assert radius > 0.0, "radius must be strictly positive"
+
+ center_longitude = math.degrees(center_longitude)
+ center_latitude = math.degrees(center_latitude)
+
+ delta_lat = math.degrees(meters2rad(radius)) # uniform across the globe
+ assert delta_lat > 0.0, "is the input in radians?"
+
+ # Better slightly overestimate it by using the earth circumference at the equator
+ # That is about 1% larger than mean circumference
+ earth_circumference_at_lat = math.cos(math.radians(center_latitude)) * MAXIMUM_EARTH_CIRCUMFERENCE
+ delta_lon = radius / earth_circumference_at_lat * 360.0
+ assert delta_lon > 0.0, "is the input in radians?"
+
+ # Top & bottom are simple: just clip the latitude at the poles
+ # Left & right are a bit trickier since that possibly requires the creation of two slices
+
+ # These four coordinates determine the primary window
+ left = center_longitude - delta_lon
+ bottom = max(center_latitude - delta_lat, -90.0)
+ right = center_longitude + delta_lon
+ top = min(center_latitude + delta_lat, +90.0)
+
+ # `additional_window` determines the extra query if wrapping near longitude (+/-) 180° occurs
+ # `window` is geographically more west-ward than `additional_window`, if the latter exists
+ # Keep in mind though, that the common border might lie on +/- 180° longitude and thus on the
+ # border of the dataset/data array
+ window: Window
+ additional_window: Optional[Window]
+
+ # Handle the horizontal overflow of the window
+ # This also handles the case where 2*radius is larger than the width of the dataset
+ if left < -180.0: # Overflow on the left
+ overshoot = clip(-(left + 180), 0.0, 360.0)
+ left_wrapped = +180 - overshoot
+ # It might be the case that it also overflows on the right if the overall radius was so large
+ # that the window(s) would wrap around the world more than once. This can especially happen
+ # at high latitudes, where horizontally wrapping around the globe can happen at arbitrarily small
+ # radii/window sizes near the poles. We thus clip it to (in sum) only cover the world once.
+ right = clip(right, -180.0, left_wrapped)
+
+ # If the bounds overflow on the left, make the wrapped (i.e. the non-clipped) one the primary
+ # window, as it is geographically more west-ward
+ window = self.dataset.window(left_wrapped, bottom, +180.0, top)
+ additional_window = self.dataset.window(
+ -180.0, bottom, right, top
+ ) # Possibly a window with zero width
+
+ elif right > +180.0: # Overflow on the right
+ overshoot = clip(right - 180, 0.0, 360.0)
+ right_wrapped = -180 + overshoot
+ # See the previous case "Overflow on the left" for an analogous explanation
+ left = clip(left, right_wrapped, +180.0)
+
+ # If the bounds overflow on the right, make the clipped (i.e. the non-wrapped) one the primary
+ # window, as it is geographically more west-ward
+ window = self.dataset.window(left, bottom, +180.0, top)
+
+ # `right_wrapped == -180` similar to above cannot occur here since then we must have landed in the
+ # `left < -180.0` branch instead
+ assert right_wrapped > -180, "The window would extend zero meters from east to west"
+
+ additional_window = self.dataset.window(-180.0, bottom, right_wrapped, top)
+
+ else: # No overflow at the bounds occurred, so we only need one `window`
+ window = self.dataset.window(left, bottom, right, top)
+ additional_window = None
+
+ # Jointly round the window(s) to integers and return the result
+ return self._round_windows_ceil(window, additional_window)
+
+ def _round_windows_ceil(
+ self, window_1: Window, window_2: Optional[Window]
+ ) -> Tuple[Window, Optional[Window]]:
+ """Rounds one or two windows to integer types and avoids an overlap to be created.
+
+ Always rounds to the larger windows if non-integer bounds are given. This guarantees that at least all
+ points initially given as the window(s) are also included in the resulting windows.
+
+ The actual rounding is done in :func:`rasterio.windows.window_index`.
+
+ Args:
+ window_1: The left window
+ window_2: An optional right window (geographically touches the eastern/right side of
+ ``window_1``). Keep in mind though, that the common border might lie on +/- 180°
+ longitude and thus on the border of the dataset/data array.
+
+ Returns:
+ One or two windows, rounded to :class:`int` values.
+ Due to rounding, this method may return only a single window even if two were initially provided.
+ """
+ (_, (_, w1_old_right)) = window_1.toranges()
+
+ # Round the first window
+ # The actual rounding is done in :func:`rasterio.windows.window_index`
+ window_1 = Window.from_slices(*cast(Tuple[slice, slice], rasterio.windows.window_index(window_1)))
+ # The rounding may move it beyond the bounds of the dataset, so clip it at the array borders
+ window_1 = window_1.intersection(self._dataset_window)
+
+ if window_2 is not None:
+ # Adjust `window_2` in the correct direction for a possibly created overlap
+ # Afterward, round it too
+
+ # Unpack the bounds that we will work with
+ ((w1_top, w1_bottom), (_, w1_right)) = window_1.toranges()
+ (_, (left, right)) = window_2.toranges()
+
+ # Correct for the horizontal change that was induced by enlarging the `window_1`
+ # This will make `window_2` smaller if their common boundary was not already on a cell border
+ left += w1_right - w1_old_right
+
+ # Round away from the existing `window_1`, i.e. to the right/geographically west-ward
+ left = int(math.ceil(left))
+ right = int(math.ceil(right))
+
+ # There is a 1-cell overlap between the windows that was created by rounding, i.e.
+ # ``right == w1_left``. Therefore, we cut one index off.
+ right -= 1
+
+ # The case ``left == w1_right`` cannot occur since the second window is always guaranteed to be to
+ # the right of the first. We still check that though:
+ assert (
+ left - w1_right
+ ) % self.dataset.width == 0, "this can never happen if the second window is truly to the right"
+
+ # Make sure that the extra window is non-empty and if not, just discard it
+ if right - left <= 0:
+ window_2 = None
+
+ else:
+ # We simply adopt the top and bottom bounds as they are the same in both windows
+ window_2 = Window.from_slices(rows=(w1_top, w1_bottom), cols=(left, right))
+
+ # May become obsolete if https://github.com/mapbox/rasterio/pull/2090 gets accepted
+ def to_int(win: Window) -> Window:
+ ((float_top, float_bottom), (float_left, float_right)) = win.toranges()
+ return Window.from_slices(
+ rows=(int(float_top), int(float_bottom)), cols=(int(float_left), int(float_right))
+ )
+
+ return to_int(window_1), window_2 if window_2 is None else to_int(window_2)
+
+ def _lat_lon_meshgrid_single(self, window: Window, radians: bool) -> Tuple[ndarray, ndarray]:
+ """Creates a meshgrid with all coordinates the data set has entries for in the given window.
+
+ Args:
+ window: as returned by :meth:`~get_bounding_windows_around`
+ radians: if ``True`` return in radians, else in degrees
+
+ Returns:
+ A latitude, longitude meshgrid matching the data returned by :meth:`~_data_single`
+ """
+
+ # These values are in degrees
+ longitude_left, latitude_up = self.dataset.xy(window.row_off, window.col_off)
+ longitude_right, latitude_down = self.dataset.xy(
+ window.row_off + window.height, window.col_off + window.width
+ )
+
+ if radians:
+ longitude_left = math.radians(longitude_left)
+ latitude_up = math.radians(latitude_up)
+ longitude_right = math.radians(longitude_right)
+ latitude_down = math.radians(latitude_down)
+
+ coords_lat = linspace(latitude_up, latitude_down, window.height)
+ coords_lon = linspace(longitude_left, longitude_right, window.width)
+
+ coords_lat, coords_lon = meshgrid(coords_lat, coords_lon, indexing="ij")
+ assert coords_lat.shape == coords_lon.shape
+
+ return coords_lat, coords_lon
+
+ def lat_lon_meshgrid_for(
+ self,
+ window: Window,
+ additional_window: Optional[Window],
+ radians: bool,
+ ) -> Tuple[ndarray, ndarray]:
+ """Creates a meshgrid with all coordinates the data set has entries for in the given windows.
+
+ Args:
+ window: as returned by :meth:`~get_bounding_windows_around`
+ additional_window: as returned by :meth:`~get_bounding_windows_around`
+ radians: if ``True`` return in radians, else in degrees
+
+ Returns:
+ A single latitude, longitude meshgrid matching the data returned by :meth:`~data_for`
+ """
+
+ coords_lat, coords_lon = self._lat_lon_meshgrid_single(window, radians)
+
+ # append additional window (only if required)
+ if additional_window is not None:
+ coords_lat_additional, coords_lon_additional = self._lat_lon_meshgrid_single(
+ additional_window, radians
+ )
+ coords_lat = hstack((coords_lat, coords_lat_additional))
+ coords_lon = hstack((coords_lon, coords_lon_additional))
+
+ return coords_lat, coords_lon
+
+ def _data_single(self, window: Window) -> ndarray:
+ """Get all data points within the given window.
+
+ Notes:
+ The type of the data that is read depends on the dataset that is used.
+ See ``self.dataset.dtypes``.
+
+ Warnings:
+ Never modify the data returned by this method directly! It is only a view into the raw data.
+
+ Args:
+ window: A window as returned by :meth:`~get_bounding_windows_around`
+
+ Returns:
+ The 2D data array matching the coordinates returned by :meth:`~_lat_lon_meshgrid_single`
+ """
+ assert hasattr(self, "_data_array"), "DataSetAccess must be used as a context manager and be open"
+
+ # one could read by this:
+ # self.dataset.read(self.raster_band_index, window=window)
+ # however, this does not map the file into memory and is thus about 10x slower than directly using a
+ # numpy array
+
+ # keep in mind that the slice will create a view into the raw data, and not a copy
+ # this is intentional to make the data access fast
+ data: ndarray = self._data_array[window.toslices()]
+ assert data.shape == (window.height, window.width)
+ return data
+
+ def data_for(self, window: Window, additional_window: Optional[Window]) -> ndarray:
+ """Get all data points within the given windows as a single array.
+
+ Notes:
+ The type of the data that is read depends on the dataset that is used.
+ See ``self.dataset.dtypes``.
+
+ Warnings:
+ Never modify the data returned by this method directly! It is only a view into the raw data.
+
+ Args:
+ window: as returned by :meth:`~get_bounding_windows_around`
+ additional_window: as returned by :meth:`~get_bounding_windows_around`
+
+ Returns:
+ The single 2D data array matching the coordinates returned by :meth:`~lat_lon_meshgrid_for`
+ """
+
+ result = self._data_single(window)
+
+ # append additional window (only if required)
+ if additional_window is not None:
+ additional_result = self._data_single(additional_window)
+ result = hstack((result, additional_result))
+
+ return result
+
+ def __enter__(self) -> "DataSetAccess":
+ self.dataset.__enter__()
+ self._data_array = self.dataset.read(self.raster_band_index)
+ self._data_array.flags.writeable = False # make this read-only to prevent accidents
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
+ del self._data_array
+ return self.dataset.__exit__(exc_type, exc_val, exc_tb)
diff --git a/pyrate/pyrate/common/raster_datasets/transformer_base.py b/pyrate/pyrate/common/raster_datasets/transformer_base.py
new file mode 100644
index 0000000..849b9d1
--- /dev/null
+++ b/pyrate/pyrate/common/raster_datasets/transformer_base.py
@@ -0,0 +1,147 @@
+"""
+This module provides the base classes for the transformers, which extract property vectors for given query
+points form (usually) a geographical dataset.
+"""
+
+# Standard library
+from abc import ABC
+from abc import abstractmethod
+from itertools import repeat
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import ContextManager
+from typing import Iterable
+from typing import Sequence
+from typing import Tuple
+from typing import Union
+
+# Scientific
+from numpy import array
+from numpy import empty
+from numpy import ndarray
+from numpy.typing import DTypeLike
+from pandas import DataFrame
+
+# Progress bar
+from tqdm import tqdm
+
+# Typing helpers
+from .geo_datasets import DataSetAccess
+
+
+class BaseTransformer(ContextManager["BaseTransformer"], ABC):
+ """This class allows to query some data source for a property at each node.
+
+ Subclasses will usually override :meth:`_get_transformed_at` in order to return the data vector for some
+ specific node with given latitude and longitude. Note that the result of calling
+ :meth:`~get_transformed_at_nodes` is a :class:`pandas.DataFrame`, in order to allow a single transformer
+ to return multiple values for each vector if this simplifies or speeds up calculations.
+
+ Querying all nodes prints a progress bar to the command line by default.
+
+ Warning:
+ This class (and any subclasses) shall only be used as a context manager. See :class:`~DataSetAccess`
+ for the reasons for this.
+
+ Args:
+ structured_dtype: For each column in the query result a tuple consisting of a human-readable names and
+ the (numpy) data type of property. This follows the syntax of NumPy's
+ `"Structured Datatypes" `_
+ .
+
+ See Also:
+ BaseDatasetTransformer
+ """
+
+ def __init__(self, structured_dtype: Sequence[Tuple[str, DTypeLike]]) -> None:
+ super().__init__()
+ self.structured_dtype = cast(DTypeLike, structured_dtype)
+
+ @abstractmethod
+ def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple:
+ """Get the property at some specific node, given by its geographical location.
+
+ Args:
+ latitude: The geographical location of the node, in radians
+ longitude: The geographical location of the node, in radians
+ radius: The radius of the area that this node shall represent, in meters
+
+ Returns:
+ A single property vector for each single node.
+ """
+
+ def get_transformed_at_nodes(
+ self,
+ latitudes: ndarray,
+ longitudes: ndarray,
+ radius: Union[float, ndarray],
+ show_progress: bool = False,
+ ) -> DataFrame:
+ """Computes the property for each individual node. Prints a progress bar by default.
+
+ Args:
+ latitudes: latitude values of all nodes, in radians
+ longitudes: longitude values of all nodes, in radians
+ radius: the radius around each node that it should represent, in meters; may be an array of shape
+ `(num_nodes, )` or a single scalar if the radius is uniform
+ show_progress: whether to print a nice and simple progress bar
+
+ Returns:
+ An array of all values generated for each (latitude, longitude) node, with shape
+ `(number of nodes, number of properties per node)`
+ """
+
+ assert latitudes.shape == longitudes.shape
+
+ radii = repeat(radius) if isinstance(radius, float) else cast(Iterable[float], radius)
+
+ if len(latitudes) > 0:
+ result = [
+ self._get_transformed_at(latitude, longitude, rad)
+ for latitude, longitude, rad in tqdm(
+ zip(latitudes, longitudes, radii),
+ unit=" nodes",
+ unit_scale=True,
+ colour="white",
+ total=len(latitudes),
+ disable=not show_progress,
+ )
+ ]
+ else:
+ result = empty((0, len(self.structured_dtype))) # type: ignore
+
+ assert len(result) == latitudes.shape[0]
+
+ # this also ensures that all property vectors have the same length
+ structured_array = array(result, dtype=self.structured_dtype)
+ return DataFrame.from_records(structured_array)
+
+ def __enter__(self) -> "BaseTransformer":
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
+ return False
+
+
+class BaseDatasetTransformer(BaseTransformer, ABC):
+ """A specialized dataset transformer which extracts properties from a geographical dataset.
+
+ Args:
+ structured_dtype: see constructor argument ``structured_dtype`` in :class:`BaseTransformer`
+ dataset: A dataset to read from.
+ It is automatically managed when this class is used as a context manager.
+ """
+
+ def __init__(self, structured_dtype: Sequence[Tuple[str, DTypeLike]], dataset: DataSetAccess) -> None:
+ super().__init__(structured_dtype)
+ self.dataset = dataset
+
+ def __enter__(self) -> "BaseDatasetTransformer":
+ self.dataset.__enter__()
+ return self
+
+ def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
+ super().__exit__(exc_type, exc_val, exc_tb)
+ return self.dataset.__exit__(exc_type, exc_val, exc_tb)
diff --git a/pyrate/pyrate/common/raster_datasets/transformers_concrete.py b/pyrate/pyrate/common/raster_datasets/transformers_concrete.py
new file mode 100644
index 0000000..49600f5
--- /dev/null
+++ b/pyrate/pyrate/common/raster_datasets/transformers_concrete.py
@@ -0,0 +1,163 @@
+"""
+This module exposes specific property transformers.
+
+See the `data repository `_ for details on
+the actual data sets referenced and used here.
+"""
+
+# Standard library
+from enum import auto
+from enum import Enum
+
+# Typing
+from typing import Any
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Tuple
+
+# Scientific
+import numpy
+from numpy import clip
+from numpy import count_nonzero
+from numpy import extract
+from numpy import float32
+from numpy.typing import DTypeLike
+
+# Helpers and own typing
+from ...plan.geometry.helpers import haversine_numpy
+from .geo_datasets import DataSetAccess
+from .transformer_base import BaseDatasetTransformer
+from .transformer_base import BaseTransformer
+
+
+class ConstantTransformer(BaseTransformer):
+
+ """A very simple transformer class to fill a property with a constant value.
+
+ Args:
+ value: The constant value to be added to each node
+ dtype: The numpy data type of the resulting property vector field
+ name: The name of the property. If set to ``None``, a reasonable default name will be used.
+ """
+
+ # pylint: disable=too-few-public-methods
+
+ # idea: could make this class explicitly generic in the type signatures
+ def __init__(self, value: Any, dtype: DTypeLike, name: Optional[str] = None) -> None:
+ name = f"constant value of {value}" if name is None else name
+ super().__init__([(name, dtype)])
+ self.value = value
+
+ def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple[Any]:
+ return (self.value,) # return a tuple
+
+
+class BathymetricTransformer(BaseDatasetTransformer):
+
+ """Extracts values from a given bathymetric datasets, i.e. depth information.
+
+ The datatype for all modes is ``np.float32``. Raises a :class:`ValueError` if not data is found for any
+ given query ``(latitude, longitude, radius)``.
+
+ Args:
+ dataset: the data set to be used
+ modes: a sequence of modes of the values to be extracted; see :class:`~BathymetricTransformer.Modes`
+ """
+
+ # pylint: disable=too-few-public-methods
+
+ #: The depth/elevation below which a point is considered navigable, in meters;
+ #: positive values are above sea level, negative ones below
+ NAVIGABLE_BELOW: float = -5.0
+
+ class Modes(Enum):
+ """The different modes how depth values can be extracted."""
+
+ AVERAGE_DEPTH = auto()
+ """The average depth, weighted with more emphasis on the area near the query point, in meters.
+
+ This is a visualisation of the mode when applied to the `Earth 2014
+ `_
+ topographic/bathymetric dataset:
+
+ .. image:: plot_global_bathymetry_depth.png
+ :alt: AVERAGE_DEPTH mode when applied to the Earth 2014 topographic/bathymetric dataset
+ """
+
+ FRACTION_NAVIGABLE = auto()
+ """The fraction of data points at which a boat can navigate, as a scalar in :math:`[0, 1]`.
+
+ See :attr:`BathymetricTransformer.NAVIGABLE_BELOW` for what is considered navigable.
+ Only the topographic/bathymetric height/depth value is used for determining navigability as an
+ approximation, and not actual water coverage.
+ To not count the Netherlands as navigable, the value is set to a vale a little bit below zero,
+ where zero means sea level.
+
+ This is a visualisation of the mode when applied to the `Earth 2014
+ `_
+ topographic/bathymetric dataset:
+
+ .. image:: plot_global_bathymetry_fraction_navigable.png
+ :alt: FRACTION_NAVIGABLE mode when applied to the Earth 2014 topographic/bathymetric dataset
+ """
+
+ @property
+ def column_name(self) -> str:
+ """Returns the name that is used for the dataframe column."""
+ return f"bathymetric data ({self.name})"
+
+ def __init__(self, dataset: DataSetAccess, modes: Sequence[Modes] = tuple(Modes)) -> None:
+ structured_dtype = [(mode.column_name, float32) for mode in modes]
+ super().__init__(structured_dtype, dataset)
+ self.modes = modes
+
+ def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple:
+ # pylint: disable=too-many-locals
+ assert radius > 0, "the radius must be positive"
+
+ windows = self.dataset.get_bounding_windows_around(
+ center_latitude=latitude, center_longitude=longitude, radius=radius
+ )
+ lats, lons = self.dataset.lat_lon_meshgrid_for(*windows, radians=True)
+ assert lats.shape == lons.shape
+
+ # the depth data is the negative for below-sea level and positive for landmasses
+ depth = self.dataset.data_for(*windows) # as int16 !
+ assert depth.shape == lats.shape
+
+ # get the distances to all points
+ distances = haversine_numpy(lats, lons, latitude, longitude)
+
+ result: List[float] = [] # will be transformed to float32 later
+
+ for mode in self.modes: # respect the ordering
+
+ if mode is BathymetricTransformer.Modes.AVERAGE_DEPTH:
+ # we simply use the inverse distance as a weight for the weighted arithmetic mean
+ weights = clip(1.0 - (distances / radius), 0.0, 1.0)
+ try:
+ average = numpy.average(depth.astype(float32), weights=weights)
+ except ZeroDivisionError as error:
+ raise ValueError(
+ f"no points in radius {radius} m around (lat, lon) = {(latitude, longitude)} rad"
+ ) from error
+ result.append(average)
+
+ elif mode is BathymetricTransformer.Modes.FRACTION_NAVIGABLE:
+ # we use the distance to cut off unwanted entries
+ depth_within_radius = extract(distances <= radius, depth) # also flattens
+ if len(depth_within_radius) == 0:
+ raise ValueError(
+ f"no points in radius {radius} m around (lat, lon) = {(latitude, longitude)} radians"
+ )
+ number_of_navigable = count_nonzero(
+ depth_within_radius <= BathymetricTransformer.NAVIGABLE_BELOW
+ )
+ fraction = number_of_navigable / len(depth_within_radius)
+ result.append(fraction)
+
+ else: # pragma: no branch
+ raise ValueError(f"invalid mode {mode}") # pragma: no cover
+
+ return tuple(result)
diff --git a/pyrate/pyrate/common/testing/__init__.py b/pyrate/pyrate/common/testing/__init__.py
new file mode 100644
index 0000000..e8c52d1
--- /dev/null
+++ b/pyrate/pyrate/common/testing/__init__.py
@@ -0,0 +1,39 @@
+"""This module contains helpers for writing and running tests.
+
+In particular, it contains flags for detecting specific scenarios (like the test running on a CI platform)
+and a variety of custom strategies for *Hypothesis*.
+"""
+
+# Standard library
+from os import environ
+
+# Spatialite for environment setup
+from pyrate.common.charts import SpatialiteDatabase
+
+
+def env(name: str) -> bool:
+ """Checks if an environment variable exists and its value in lower case is ``{yes, true, t, 1}``.
+
+ Args:
+ name: The name of the environment variable to check
+ """
+
+ return environ.get(name, "").lower() in ("yes", "true", "t", "1")
+
+
+#: Set to ``True`` if running on a CI server (best effort)
+IS_CI: bool = env("CI") or env("CONTINUOUS_INTEGRATION")
+
+#: Whether to intensify tests at the expense of more time, e.g. with more example data for hypothesis
+IS_EXTENDED_TESTING: bool = env("EXTENDED_TESTING")
+
+
+#: True iff the Spatialite SQLite extension is installed & can be used
+SPATIALITE_AVAILABLE: bool
+try:
+ with SpatialiteDatabase(":memory:"):
+ pass # make sure it is properly closed
+except RuntimeError: # pragma: no cover
+ SPATIALITE_AVAILABLE = False
+else:
+ SPATIALITE_AVAILABLE = True
diff --git a/pyrate/pyrate/common/testing/strategies/__init__.py b/pyrate/pyrate/common/testing/strategies/__init__.py
new file mode 100644
index 0000000..07aa60b
--- /dev/null
+++ b/pyrate/pyrate/common/testing/strategies/__init__.py
@@ -0,0 +1,18 @@
+"""This module provides testing helpers like hypothesis strategies.
+
+Some typecasts of custom strategies using :func:`hypothesis.strategies.composite` are actually wrong but
+required to paint over some Mypy shortcomings
+(see `hypothesis#2748 `_).
+"""
+
+# Typing
+from typing import Callable
+from typing import TypeVar
+
+# Hypothesis typing
+from hypothesis.strategies import SearchStrategy
+
+
+#: The type of the draw parameter to :func:`hypothesis.strategies.composite` strategies
+T = TypeVar("T")
+DrawType = Callable[[SearchStrategy[T]], T]
diff --git a/pyrate/pyrate/common/testing/strategies/dynamic_system.py b/pyrate/pyrate/common/testing/strategies/dynamic_system.py
new file mode 100644
index 0000000..f95361c
--- /dev/null
+++ b/pyrate/pyrate/common/testing/strategies/dynamic_system.py
@@ -0,0 +1,141 @@
+"""Contains helpers based on hypothesis test data generators."""
+
+# Typing
+from typing import cast
+from typing import Tuple
+
+# Hypothesis testing
+from hypothesis.extra.numpy import arrays
+from hypothesis.strategies import composite
+from hypothesis.strategies import floats
+from hypothesis.strategies import lists
+from hypothesis.strategies import SearchStrategy
+
+# Mathematics
+from numpy import eye
+from numpy import float64
+
+# Gaussian representation
+from pyrate.common.math import Gaussian
+
+# Own typing
+from . import DrawType # pylint: disable=unused-import
+
+
+# In this context, we reproduce a common filter notation
+# pylint: disable=invalid-name, too-many-locals, unused-argument
+
+
+@composite
+def linear_model(
+ draw: DrawType, state_dim: int = 2, input_dim: int = 1, sensor_dim: int = 2
+) -> SearchStrategy[Tuple]:
+ """Generate a linear state space model.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ state_dim: Number of state variables
+ input_dim: Number of input variables
+ sensor_dim: Number of measurement variables
+ """
+
+ # Strategies
+ float_strategy = floats(0, 5)
+
+ # Transition model
+ F = draw(arrays(float64, (state_dim, state_dim), elements=float_strategy))
+
+ # Input model
+ B = draw(arrays(float64, (state_dim, input_dim), elements=float_strategy))
+
+ # Measurement model
+ H = draw(arrays(float64, (sensor_dim, state_dim), elements=float_strategy))
+
+ # Symmetric, positive definite process noise
+ q = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ Q = q @ q.T + eye(state_dim)
+
+ # Symmetric, positive definite sensor noise
+ r = draw(arrays(float64, (sensor_dim, 1), elements=float_strategy))
+ R = r @ r.T + eye(sensor_dim)
+
+ # Initial belief
+ x = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ p = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ P = p @ p.T + eye(state_dim)
+
+ estimate = Gaussian(x, P)
+
+ # Measurements and inputs
+ measurements = draw(
+ lists(arrays(float64, (sensor_dim, 1), elements=float_strategy), min_size=2, max_size=4)
+ )
+ inputs = draw(
+ lists(
+ arrays(float64, (input_dim, 1), elements=float_strategy),
+ min_size=len(measurements),
+ max_size=len(measurements),
+ )
+ )
+
+ # Return model
+ result = estimate, F, B, H, Q, R, measurements, inputs
+ return cast(SearchStrategy[Tuple], result)
+
+
+@composite
+def nonlinear_model(draw: DrawType, state_dim: int = 2, sensor_dim: int = 2) -> SearchStrategy[Tuple]:
+ """Generate a nonlinear state space model.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ state_dim: Number of state variables
+ sensor_dim: Number of measurement variables
+ """
+
+ # Strategies
+ float_strategy = floats(0, 5)
+
+ # Transition model
+ F = draw(arrays(float64, (state_dim, state_dim), elements=float_strategy))
+
+ def f(x):
+ return F @ x
+
+ # Jacobi of f about state
+ def Jf(x):
+ return F
+
+ # Measurement model
+ H = draw(arrays(float64, (sensor_dim, state_dim), elements=float_strategy))
+
+ def h(x):
+ return H @ x
+
+ # Jacobi of h about state
+ def Jh(state):
+ return H
+
+ # Symmetric, positive definite process noise
+ q = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ Q = q @ q.T + eye(state_dim)
+
+ # Symmetric, positive definite sensor noise
+ r = draw(arrays(float64, (sensor_dim, 1), elements=float_strategy))
+ R = r @ r.T + eye(sensor_dim)
+
+ # Initial belief
+ x = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ p = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
+ P = p @ p.T + eye(state_dim)
+
+ estimate = Gaussian(x, P)
+
+ # Measurements
+ measurements = draw(
+ lists(arrays(float64, (sensor_dim, 1), elements=float_strategy), min_size=2, max_size=4)
+ )
+
+ # Return model
+ result = estimate, f, F, Jf, h, H, Jh, Q, R, measurements
+ return cast(SearchStrategy[Tuple], result)
diff --git a/pyrate/pyrate/common/testing/strategies/geometry.py b/pyrate/pyrate/common/testing/strategies/geometry.py
new file mode 100644
index 0000000..603f91e
--- /dev/null
+++ b/pyrate/pyrate/common/testing/strategies/geometry.py
@@ -0,0 +1,448 @@
+"""Contains helpers like hypothesis test data generators."""
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Optional
+from typing import Union
+
+# Hypothesis testing
+from hypothesis import assume
+import hypothesis.extra.numpy as numpy_st
+import hypothesis.strategies as st
+from hypothesis.strategies import SearchStrategy
+
+# Scientific stack
+import numpy
+from scipy.spatial import Voronoi
+
+# Planning primitives
+from shapely.geometry import box
+from shapely.geometry import MultiLineString
+from shapely.geometry import Polygon
+from shapely.ops import polygonize
+from shapely.ops import unary_union
+
+# Geospatial objects
+from pyrate.plan.geometry import CartesianGeometry
+from pyrate.plan.geometry import CartesianLocation
+from pyrate.plan.geometry import CartesianPolygon
+from pyrate.plan.geometry import CartesianRoute
+from pyrate.plan.geometry import Geospatial
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarGeometry
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+from pyrate.plan.geometry import PolarRoute
+
+# Own typing
+from . import DrawType # pylint: disable=unused-import
+
+
+@st.composite
+def geo_bearings(draw: DrawType) -> SearchStrategy[float]:
+ """Returns a direction/bearing/azimuth/yaw for navigation in degrees in :math:`[0, 360)`.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ bearing = draw(st.floats(min_value=0.0, max_value=360.0, exclude_max=True))
+ return cast(SearchStrategy[float], bearing)
+
+
+@st.composite
+def geospatial_identifiers(draw: DrawType) -> SearchStrategy[Optional[int]]:
+ """Returns identifiers for subclasses of :class:`pyrate.plan.geometry.Geospatial`.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ identifier = draw(st.one_of(st.none(), st.integers(min_value=0, max_value=(2**63) - 1)))
+ return cast(SearchStrategy[Optional[int]], identifier)
+
+
+@st.composite
+def location_types(draw: DrawType) -> SearchStrategy[LocationType]:
+ """Returns location types.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ location_type = draw(st.sampled_from(LocationType))
+ return cast(SearchStrategy[LocationType], location_type)
+
+
+@st.composite
+def geospatial_attributes(draw: DrawType) -> SearchStrategy[Dict[str, Any]]:
+ """Returns the common attributes for subclasses of :class:`pyrate.plan.geometry.Geospatial`.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ attributes = {
+ "location_type": draw(location_types()),
+ "name": draw(st.text()),
+ "identifier": draw(geospatial_identifiers()),
+ }
+ return cast(SearchStrategy[Dict[str, Any]], attributes)
+
+
+@st.composite
+def polar_locations(draw: DrawType) -> SearchStrategy[PolarLocation]:
+ """Returns a polar location.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ location = PolarLocation(
+ draw(st.floats(min_value=-90.0, max_value=+90.0)),
+ draw(st.floats(min_value=-180.0, max_value=+180.0, exclude_max=True)),
+ **draw(geospatial_attributes()),
+ )
+ return cast(SearchStrategy[PolarLocation], location)
+
+
+@st.composite
+def cartesian_locations(
+ draw: DrawType,
+ origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
+) -> SearchStrategy[CartesianLocation]:
+ """Returns a cartesian location.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
+ locations
+ """
+ location = CartesianLocation(
+ draw(st.floats(min_value=-10_000.0, max_value=+10_000.0)),
+ draw(st.floats(min_value=-10_000.0, max_value=+10_000.0)),
+ origin=draw(origin),
+ **draw(geospatial_attributes()),
+ )
+ return cast(SearchStrategy[CartesianLocation], location)
+
+
+@st.composite
+def polar_objects(
+ draw: DrawType, stable: bool = False, non_repeating: bool = False
+) -> SearchStrategy[PolarGeometry]:
+ """Returns polar geometries.
+
+ The concrete type is sampled randomly from all three polar geometries.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ stable: see :func:`~polar_routes_stable`
+ non_repeating: if ``True``, the strategy will not produce routes with duplicate locations.
+ Ignored if ``stable`` is given.
+ """
+ possible_sources = st.one_of(
+ [
+ polar_locations(),
+ polar_routes_stable() if stable else polar_routes(non_repeating=non_repeating),
+ polar_polygons(),
+ ]
+ )
+ geospatial: PolarGeometry = draw(possible_sources)
+ return cast(SearchStrategy[PolarGeometry], geospatial)
+
+
+@st.composite
+def cartesian_objects(draw: DrawType) -> SearchStrategy[CartesianGeometry]:
+ """Returns cartesian geometries.
+
+ The concrete type is sampled randomly from all three cartesian geometries.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ """
+ possible_sources = st.one_of(
+ [
+ cartesian_locations(),
+ cartesian_routes(),
+ cartesian_polygons(),
+ ]
+ )
+ geospatial: CartesianGeometry = draw(possible_sources)
+ return cast(SearchStrategy[CartesianGeometry], geospatial)
+
+
+@st.composite
+def geospatial_objects(draw: DrawType, stable: bool = False) -> SearchStrategy[Geospatial]:
+ """Returns instances of the abstract class :class:`pyrate.plan.geometry.Geospatial`.
+
+ The concrete type is sampled randomly from all six cartesian and polar geometries.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ stable: see :func:`~polar_routes_stable`
+ """
+ geospatial: Geospatial = draw(st.one_of([polar_objects(stable=stable), cartesian_objects()]))
+ return cast(SearchStrategy[Geospatial], geospatial)
+
+
+@st.composite
+def cartesian_polygons( # pylint: disable=too-many-arguments,too-many-locals
+ draw: DrawType,
+ min_vertices: int = 5,
+ max_vertices: int = 15,
+ scale: float = 100_000,
+ center_x: float = 0.0,
+ center_y: float = 0.0,
+ origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
+) -> SearchStrategy[CartesianPolygon]:
+ """Returns non-empty valid cartesian polygons around the origin of the coordinate system.
+
+ Inspired `by testing code from the spatialpandas
+ `_
+ library.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ min_vertices: the minimum number of locations that shall form this polygon; needs to be at least ``5``
+ such that the generation algorithm works reliably
+ max_vertices: the minimum number of locations that shall form this polygon; needs to be larger than
+ ``min_vertices``; this may not be very large as this will make example generation
+ extremely slow
+ scale: the maximum that a single coordinate value may be away from the center (in meters)
+ center_x: the east-west center (in meters)
+ center_y: the north-south center (in meters)
+ origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
+ locations
+
+ Raises:
+ ValueError: if polygon generation fails
+ """
+
+ assert min_vertices >= 5, "min_vertices needs to be at least 5"
+ assert max_vertices >= min_vertices, "max_vertices needs to be at least min_vertices"
+ assert scale >= 0.0, "scale must be non-negative"
+
+ count = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
+
+ # very often, this only takes a single try
+ # it is highly unlikely that it will take more than 50
+ tries = 50
+ for _ in range(tries): # pragma: no branch
+
+ # create points in [-0.5, +0.5]
+ points = numpy.random.rand(count, 2) - 0.5
+ # scale them to the desired size
+ points *= scale * 2
+
+ voronoi = Voronoi(points)
+ multi_line_string = MultiLineString(
+ [voronoi.vertices[s] for s in voronoi.ridge_vertices if all(numpy.array(s) >= 0)]
+ )
+
+ poly = unary_union(list(polygonize(multi_line_string)))
+ poly = poly.intersection(box(-scale, -scale, scale, scale))
+
+ if ( # pragma: no branch
+ isinstance(poly, Polygon) and not poly.is_empty and poly.is_simple and poly.is_valid
+ ):
+ coordinates = numpy.array(poly.exterior.coords)
+ # move them to the desired center
+ coordinates[:, 0] += center_x
+ coordinates[:, 1] += center_y
+
+ polygon = CartesianPolygon.from_numpy(
+ coordinates, origin=draw(origin), **draw(geospatial_attributes())
+ )
+ return cast(SearchStrategy[CartesianPolygon], polygon)
+
+ # This should practically never occur (the probability is very, very low)
+ raise ValueError("Failed to construct polygon") # pragma: no cover
+
+
+@st.composite
+def polar_polygons(
+ draw: DrawType,
+ min_vertices: int = 5,
+ max_vertices: int = 15,
+ scale: float = 100_000,
+ center: Optional[PolarLocation] = None,
+) -> SearchStrategy[PolarPolygon]:
+ """Returns non-empty valid polar polygons.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ min_vertices: the minimum number of locations that shall form this polygon; needs to be at least ``5``
+ such that the generation algorithm works reliably
+ max_vertices: the minimum number of locations that shall form this polygon; needs to be larger than
+ ``min_vertices``; this may not be very large as this will make example generation
+ extremely slow
+ scale: the maximum that a single coordinate value may be away from the center (in meters)
+ center: the center of the polygon or ``None`` randomly select one
+
+ Raises:
+ ValueError: if polygon generation fails
+ """
+ cartesian = draw(
+ cartesian_polygons(
+ min_vertices=min_vertices,
+ max_vertices=max_vertices,
+ scale=scale,
+ origin=polar_locations() if center is None else st.just(center), # type: ignore
+ )
+ )
+ return cast(SearchStrategy[PolarPolygon], cartesian.to_polar())
+
+
+@st.composite
+def cartesian_routes( # pylint: disable=too-many-arguments
+ draw: DrawType,
+ min_vertices: int = 2,
+ max_vertices: int = 10,
+ scale: float = 100_000,
+ center_x: float = 0.0,
+ center_y: float = 0.0,
+ origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
+ non_repeating: bool = True,
+) -> SearchStrategy[CartesianRoute]:
+ """Returns a cartesian route.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ min_vertices: the minimum number of locations that shall form this route, must be ``2`` or greater
+ max_vertices: the maximum number of locations that shall form this route
+ scale: the maximum that a single coordinate value may be away from the center (in meters); strictly
+ positive
+ center_x: the east-west center (in meters)
+ center_y: the north-south center (in meters)
+ origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
+ locations
+ non_repeating: if ``True``, the route will not contain any duplicate locations
+ """
+
+ assert min_vertices >= 2, "min_vertices may not be less than 2"
+ assert (
+ max_vertices is None or max_vertices >= min_vertices
+ ), "max_vertices may not be less than min_vertices"
+ assert scale > 0.0, "scale must be strictly positive"
+
+ # define the actual values in the coordinate arrays
+ elements_x = st.floats(min_value=center_x - scale, max_value=center_x + scale)
+ elements_y = st.floats(min_value=center_y - scale, max_value=center_y + scale)
+
+ # define the number of coordinates; we must draw directly and not pass the strategy such that x and y have
+ # the same number of elements
+ length = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
+
+ # create the actual coordinates and ensure that all are different from each other
+ coordinates_x = draw(numpy_st.arrays(dtype="float64", shape=length, elements=elements_x, unique=True))
+ coordinates_y = draw(numpy_st.arrays(dtype="float64", shape=length, elements=elements_y, unique=True))
+ coordinates = numpy.vstack((coordinates_x, coordinates_y)).T
+
+ # make sure that the route has non-zero length
+ if numpy.abs(numpy.diff(coordinates, axis=1)).sum() < 1: # one meter in the typical interpretation
+ coordinates[0, 0] += 1 # add an arbitrary value # pragma: no cover
+
+ assume(not non_repeating or (numpy.abs(numpy.diff(coordinates, axis=1)) > 1).all()) # Difficult to handle
+
+ # create the route with the other parameters of geospatial objects
+ route = CartesianRoute.from_numpy(coordinates, origin=draw(origin), **draw(geospatial_attributes()))
+ return cast(SearchStrategy[CartesianRoute], route)
+
+
+@st.composite
+def polar_routes(
+ draw: DrawType,
+ min_vertices: int = 2,
+ max_vertices: int = 10,
+ non_repeating: bool = True,
+) -> SearchStrategy[PolarRoute]:
+ """Returns a polar route.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ min_vertices: the minimum number of locations that shall form this route, must be ``2`` or greater
+ max_vertices: the maximum number of locations that shall form this route or ``None`` to let
+ *hypothesis* decide
+ non_repeating: if ``True``, the route will not contain any duplicate locations
+ """
+
+ assert min_vertices >= 2, "min_vertices may not be less than 2"
+ assert (
+ max_vertices is None or max_vertices >= min_vertices
+ ), "max_vertices may not be less than min_vertices"
+
+ # define the actual values in the coordinate arrays
+ elements_latitude = st.floats(min_value=-90, max_value=90)
+ elements_longitude = st.floats(min_value=-180, max_value=180, exclude_max=True)
+
+ # define the number of coordinates; we must draw directly and not pass the strategy such that x and y have
+ # the same number of elements
+ length = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
+
+ # create the actual coordinates
+ coordinates_latitude = draw(
+ numpy_st.arrays(dtype="float64", shape=length, elements=elements_latitude, unique=True)
+ )
+ coordinates_longitude = draw(
+ numpy_st.arrays(dtype="float64", shape=length, elements=elements_longitude, unique=True)
+ )
+ coordinates = numpy.vstack((coordinates_latitude, coordinates_longitude)).T
+
+ # make sure that the route has non-zero length
+ # there is no single correct value for the threshold near the poles, but 1e-4 appears to work fine
+ if numpy.abs(numpy.diff(coordinates, axis=1)).sum() < 1e-4:
+ coordinates[0, 0] += 1e-4 # add an arbitrary value # pragma: no cover
+
+ assume(
+ not non_repeating or (numpy.abs(numpy.diff(coordinates, axis=1)) > 1e-4).all()
+ ) # Difficult to handle
+
+ # create the route with the other parameters of geospatial objects
+ try:
+ route = PolarRoute.from_numpy(coordinates, **draw(geospatial_attributes()))
+ except ValueError:
+ # This can still happen if the duplicate entries check above does not catch it
+ assume(False) # pragma: no cover
+
+ # Make sure we only generate routes that can be projected
+ try:
+ route.to_cartesian(route.locations[0])
+ except AssertionError:
+ assume(False) # pragma: no cover
+
+ return cast(SearchStrategy[PolarRoute], route)
+
+
+@st.composite
+def polar_routes_stable(
+ draw: DrawType,
+ min_vertices: int = 5, # polar_polygons() requires at least 5
+ max_vertices: int = 10,
+) -> SearchStrategy[PolarRoute]:
+ """Returns a polar route where the vertices are not too far apart.
+
+ It is therefore numerically more stable when projecting to a cartesian plane.
+
+ Args:
+ draw: see :func:`hypothesis.strategies.composite`
+ min_vertices: the minimum number of locations that shall form this route, must be ``5`` or greater
+ max_vertices: the maximum number of locations that shall form this route or ``None`` to let
+ *hypothesis* decide
+ """
+
+ assert min_vertices >= 5, "min_vertices may not be less than 5"
+ assert (
+ max_vertices is None or max_vertices >= min_vertices
+ ), "max_vertices may not be less than min_vertices"
+
+ # We create a polygon since that is known to cause fewer numerical issues when projecting
+ # since we generate them in a way that guarantees similar locality of the vertices
+ polygon: PolarPolygon = draw(polar_polygons(min_vertices=min_vertices, max_vertices=max_vertices))
+
+ return cast(
+ SearchStrategy[PolarRoute],
+ PolarRoute.from_numpy(
+ polygon.to_numpy(),
+ location_type=polygon.location_type,
+ name=polygon.name,
+ identifier=polygon.identifier,
+ ),
+ )
diff --git a/pyrate/pyrate/plan/__init__.py b/pyrate/pyrate/plan/__init__.py
new file mode 100644
index 0000000..c024389
--- /dev/null
+++ b/pyrate/pyrate/plan/__init__.py
@@ -0,0 +1,11 @@
+"""The plan package provides tools to plan actions that can aid a roboter in reaching its goals.
+One important goal is reaching certain locations despite physically constrained
+movement abilities of a sailing vessel.
+Thus, a big part of this package deals with navigation strategies.
+
+In the ``geometry`` package, general geometrical objects and transformations are
+provided in cartesian (local 2D world) and spherical (latitude/longitude) coordinates.
+
+The ``graph`` module provides navigation tools where the world is modeled as a graph.
+This includes generating a graph, assigning properties to nodes of the graph and
+finding good paths on the graph."""
diff --git a/pyrate/pyrate/plan/geometry/__init__.py b/pyrate/pyrate/plan/geometry/__init__.py
new file mode 100644
index 0000000..f1cfeb9
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/__init__.py
@@ -0,0 +1,49 @@
+"""This package provides geometric abstractions for action planning algorithms.
+
+Warning:
+ This module docstring is not included in the *Sphinx* documentation.
+"""
+
+import typing # Will be removed from the namespace after being used below
+
+
+from .geospatial import Direction
+from .geospatial import Geospatial
+from .geospatial import LocationType
+
+from .location import CartesianLocation
+from .location import PolarLocation
+
+from .polygon import CartesianPolygon
+from .polygon import PolarPolygon
+
+from .route import CartesianRoute
+from .route import PolarRoute
+
+# provide useful aliases
+
+#: Any of :class:`pyrate.plan.geometry.PolarLocation`, :class:`pyrate.plan.geometry.PolarRoute` and
+#: :class:`pyrate.plan.geometry.PolarPolygon`.
+PolarGeometry = typing.Union[PolarLocation, PolarRoute, PolarPolygon]
+
+#: Any of :class:`pyrate.plan.geometry.CartesianLocation`, :class:`pyrate.plan.geometry.CartesianRoute` and
+#: :class:`pyrate.plan.geometry.CartesianPolygon`.
+CartesianGeometry = typing.Union[CartesianLocation, CartesianRoute, CartesianPolygon]
+
+del typing
+
+# don't expose .helpers here as it will rarely be used directly
+
+__all__ = [
+ "CartesianLocation",
+ "CartesianPolygon",
+ "CartesianRoute",
+ "CartesianGeometry",
+ "Direction",
+ "Geospatial",
+ "LocationType",
+ "PolarLocation",
+ "PolarPolygon",
+ "PolarRoute",
+ "PolarGeometry",
+]
diff --git a/pyrate/pyrate/plan/geometry/geospatial.py b/pyrate/pyrate/plan/geometry/geospatial.py
new file mode 100644
index 0000000..581985e
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/geospatial.py
@@ -0,0 +1,221 @@
+"""This module contains common base classes for the geospatial objects like polygons, routes and points."""
+
+# Standard library
+from abc import ABC
+from abc import abstractmethod
+from enum import Enum
+from enum import IntEnum
+from math import pi
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Optional
+from typing import Union
+
+# Geospatial
+from geojson import dumps
+from geojson import Feature
+
+
+#: The mean earth radius at the equator in meters (taken from
+#: `Earth radius (Wikipedia) `__).
+MEAN_EARTH_RADIUS = 6371_008.8
+
+#: The mean earth circumference in meters (derived form :attr:`~MEAN_EARTH_RADIUS`).
+MEAN_EARTH_CIRCUMFERENCE = MEAN_EARTH_RADIUS * 2.0 * pi
+
+#: The maximal earth circumference in meters (i.e. at the equator; taken from
+#: `Earth's circumference (Wikipedia) `__).
+MAXIMUM_EARTH_CIRCUMFERENCE = 40_075_017.0
+
+
+class LocationType(IntEnum):
+
+ """Represents what type a location is of.
+
+ Notes:
+ The values are set to fixed values such that they can be serialized.
+ New members may therefore only be added below, with strictly ascending numbers.
+ """
+
+ #: An object of unknown type.
+ UNKNOWN = 0
+
+ #: An abstract thing that is used for testing purposes and may not have a correspondence in the real
+ #: world. Ships should usually avoid such obstacles too, as they could represent things like
+ #: `icebergs `__.
+ TESTING = 1
+
+ #: A generic obstruction like a buoy, oil rig or special area extracted form a nautical chart.
+ OBSTRUCTION = 2
+
+ #: A land mass like an island or continent.
+ LAND = 3
+
+ #: Water area that might be considered not sufficiently deep for navigation depending on the context.
+ SHALLOW_WATER = 4
+
+ #: Some object representing special weather conditions, like strong winds or just precipitation.
+ WEATHER = 5
+
+ #: An object representing other vessels 🚢.
+ #: It might have been detected via
+ #: `AIS `__.
+ SHIP = 6
+
+ @classmethod
+ def max_value(cls) -> int:
+ """Get the maximum value of all members of this enum."""
+
+ return max(cls)
+
+
+class Direction(float, Enum):
+
+ """A simple collection of named "compass" bearings 🧭 in degrees for self-documenting code."""
+
+ # pylint: disable=invalid-name
+ North = 0.0
+ East = 90.0
+ South = 180.0
+ West = 270.0
+
+
+class Geospatial(ABC):
+
+ """The common abstract base class for both polar and cartesian geospatial objects.
+
+ See :meth:`~Geospatial.to_geo_json` for hints on how this class can be used for visualizing geometries.
+
+ Args:
+ location_type: The type of this polygon
+ name: An optional name of this polygon
+ identifier: An optional unique identifier for this object, in :math:`[0, 2^{63})`, i.e. 64 signed bits
+ """
+
+ def __init__(self, location_type: LocationType, name: Optional[str], identifier: Optional[int]) -> None:
+ self.location_type = location_type
+ self.name = name
+ self.identifier = identifier
+
+ super().__init__()
+
+ @property
+ def identifier(self) -> Optional[int]:
+ """The numerical identifier of this object.
+
+ Must be `None` or in :math:`[0, 2^{63})`, i.e. 64 signed bits.
+ """
+
+ return self._identifier
+
+ @identifier.setter
+ def identifier(self, value: Optional[int]) -> None:
+ assert value is None or 0 <= value < 2**63, "Identifiers must be in [0, 2**63) or None"
+
+ self._identifier = value
+
+ def to_geo_json(self, indent: Optional[Union[int, str]] = None, **kwargs) -> str:
+ """Returns the GeoJSON representation of the geometry embedded into a feature.
+
+ Args:
+ indent: the number of levels to indent or ``None`` for compactness (see :func:`json.dumps`)
+ kwargs: much like indent, any keyword argument that can be passed to :func:`json.dumps`,
+ like ``allow_nan``, ``sort_keys``, and more
+
+ Returns:
+ The GeoJSON representation as a string
+
+ Examples:
+ See also: :ref:`geometry-plotting`.
+
+ GeoJSON is a widely used format that can be interpreted by a variety of GIS programs (geo
+ information systems). Among them are for example the very simple website
+ `geojson.io `__.
+ However, sometimes the geometries are too large to be handled by the web browser.
+ Then there are other programs available, like the free open-source tool
+ `QGIS (Desktop) `__. Its even available in the usual Ubuntu
+ repositories, so just run ``[sudo] apt install qgis``. later, you can simply copy-pasta it into
+ the tool.
+
+ The geojson representation can be obtained like this (using a
+ :class:`~pyrate.plan.geometry.location.PolarLocation` just as an example):
+
+ >>> from pyrate.plan.geometry import PolarLocation
+ >>> team_room = PolarLocation(latitude=49.878091, longitude=8.654052)
+ >>> print(team_room.to_geo_json(indent=4))
+ {
+ "type": "Feature",
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ 8.654052,
+ 49.878091
+ ]
+ },
+ "properties": {}
+ }
+
+ See also:
+ - `GeoJSON on Wikipedia `__
+ - `geojson.io `__
+ - `QGIS (Desktop) `__
+ """
+
+ # this relies on the inheriting instance to provide __geo_interface__ property/attribute
+ return cast(str, dumps(Feature(geometry=self), indent=indent, **kwargs))
+
+ @property
+ @abstractmethod
+ def __geo_interface__(self) -> Dict[str, Any]:
+ raise NotImplementedError()
+
+ @abstractmethod
+ def __eq__(self, other: Any) -> bool:
+ return (
+ isinstance(other, Geospatial)
+ and self.location_type == other.location_type
+ and self.name == other.name
+ and self.identifier == other.identifier
+ )
+
+ @property
+ def _repr_extras(self) -> str:
+ """Create a string representation of the three extra attributes for use in :meth:`~__repr__`.
+
+ Examples:
+ The output is suited to be directly inlucded before the final closing bracet of a typical
+ implementation of ``__repr__()``:
+
+ >>> from pyrate.plan.geometry import PolarLocation
+ >>> PolarLocation(0, 0)._repr_extras
+ ''
+ >>> PolarLocation(0, 0, location_type=LocationType.UNKNOWN, name=None)._repr_extras
+ ''
+ >>> PolarLocation(0, 0, name="")._repr_extras
+ ', name=""'
+ >>> PolarLocation(0, 0, location_type=LocationType.SHIP, identifier=12)._repr_extras
+ ', location_type=LocationType.SHIP, identifier=12'
+
+ The class :class:`pyrate.plan.geometry.location.PolarLocation` was only chosen as an example.
+
+ Returns:
+ The extra arguments in the syntax of keyword arguments, as is common for :meth:`~__repr__`.
+ """
+
+ result = ""
+
+ if self.location_type != LocationType.UNKNOWN:
+ result += f", location_type=LocationType.{self.location_type.name}"
+ if self.name is not None:
+ result += f', name="{self.name}"'
+ if self.identifier is not None:
+ result += f", identifier={self.identifier}"
+
+ return result
+
+ @abstractmethod
+ def __repr__(self) -> str:
+ raise NotImplementedError()
diff --git a/pyrate/pyrate/plan/geometry/helpers.py b/pyrate/pyrate/plan/geometry/helpers.py
new file mode 100644
index 0000000..eec696d
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/helpers.py
@@ -0,0 +1,729 @@
+"""Contains helpers for dealing with distances and normalization of spherical coordinates and compass
+directions. Also allows for translating (collections of) points in polar coordinates.
+
+Maybe we should use `geopandas `__.
+
+References:
+ - Introduction on `Wikipedia `__
+ - Simple discussion on `StackOverflow `__
+ - Charles F. F. Karney (2013): Algorithms for geodesics.
+ `Paper as PDF `__.
+ - `Walter Bislin's Blog `__
+"""
+
+# Python standard library
+from math import atan2
+from math import pi
+from math import tau
+
+# Typing
+from typing import cast
+from typing import Tuple
+from typing import TypeVar
+from typing import Union
+from warnings import warn
+
+# Scientific
+import numpy
+from numpy import absolute
+from numpy import arccos
+from numpy import arcsin
+from numpy import arctan2
+from numpy import array
+from numpy import choose
+from numpy import clip
+from numpy import cos
+from numpy import full
+from numpy import hypot
+from numpy import isfinite
+from numpy import isscalar
+from numpy import ndarray
+from numpy import sin
+from numpy import sqrt
+from numpy import square
+
+# Geospatial
+from pyproj import Geod
+
+# Own constants
+from .geospatial import MEAN_EARTH_CIRCUMFERENCE
+from .geospatial import MEAN_EARTH_RADIUS
+
+# Constants -------------------------------------------------------------------
+
+#: A scalar or a numpy array
+ScalarOrArray = TypeVar("ScalarOrArray", float, ndarray)
+
+#: The pyproj WGS84 object used as the basis for all polar representations and coordinate projections
+WGS84_PYPROJ_GEOD = Geod("+ellps=WGS84 +units=m")
+
+
+# Normalize -------------------------------------------------------------------
+
+
+def _normalize_circular_range(value: ScalarOrArray, minimum: float, maximum: float) -> ScalarOrArray:
+ """Normalizes the value to reside in :math:`[minimum, maximum[` by wrapping around.
+
+ Used by the other normalization functions in this package.
+
+ Args:
+ value: the value to be normalized
+ minimum: the minimum of the desired bounds
+ maximum: the maximum of the desired bounds, assumed to be truly larger than *minimum*
+
+ Returns:
+ The normalized value
+ """
+
+ # general approach: remove offset -> normalize with span -> add offset
+ span = maximum - minimum
+
+ # the second `% span` is required due to floating point issues: `-1e-15 % 360` -> `360.0`,
+ # but not less than `360.0` as required
+ return ((value - minimum) % span) % span + minimum
+
+
+def normalize_latitude(value: ScalarOrArray) -> ScalarOrArray:
+ """Normalizes a latitudal value to the usual bounds by wrapping around.
+
+ Note:
+ This is already done automatically by
+ :attr:`pyrate.plan.geometry.location.PolarLocation.latitude`.
+
+ Examples:
+ >>> normalize_latitude(20.0)
+ 20.0
+ >>> normalize_latitude(-90.0)
+ -90.0
+ >>> normalize_latitude(90.0)
+ 90.0
+
+ It is also possible to wrap over the pole coordinates.
+
+ >>> normalize_latitude(91.0)
+ 89.0
+ >>> normalize_latitude(185.0)
+ -5.0
+
+ Take care: this will also normalize rubbish values.
+
+ >>> normalize_latitude(3229764.25)
+ -24.25
+
+ Args:
+ value: the raw latitudal value in degrees
+
+ Returns:
+ the normalized value in :math:`[-90, +90]` degrees
+ """
+
+ # touch_point_*: the latitudes would meet at this point if values outside [-90, +90] would be allowed
+ # pole_*: the actual bounds of the latitude values; they describe the south and north poles
+ touch_point_min, touch_point_max = -180.0, +180.0
+ pole_down, pole_up = -90.0, +90.0
+
+ # map into [-180.0, +180.0] by modulo exactly as with the longitude
+ value = _normalize_circular_range(value, touch_point_min, touch_point_max)
+
+ # map into [-90.0, +90.0] by mirroring, since `100°` would be `180° - 100° = 80°` and not
+ # `100° mod 90° = 10°` (as an example)
+ try:
+ if value > pole_up:
+ return touch_point_max - value
+ if value < pole_down:
+ return touch_point_min - value
+ return value
+
+ except ValueError:
+ clipped_below = choose(value < pole_down, (value, touch_point_min - value))
+ clipped_above = choose(value > pole_up, (clipped_below, touch_point_max - value))
+ return cast(ScalarOrArray, clipped_above)
+
+
+def normalize_longitude(value: ScalarOrArray) -> ScalarOrArray:
+ """Normalizes a longitudal value to the usual bounds by wrapping.
+
+ Note:
+ This is already done automatically by
+ :attr:`pyrate.plan.geometry.location.PolarLocation.longitude`.
+
+ Examples:
+
+ >>> normalize_longitude(136.0)
+ 136.0
+ >>> normalize_longitude(-86.0)
+ -86.0
+ >>> normalize_longitude(-180.0)
+ -180.0
+
+ You can also get rid of redundant values, e.g. at 180.0°,
+ as well as wrap around the boundaries.
+
+ >>> normalize_longitude(+180.0)
+ -180.0
+ >>> normalize_longitude(185.0)
+ -175.0
+
+ Take care: this will also normalize rubbish values.
+
+ >>> normalize_longitude(3229764.25)
+ -155.75
+
+ Args:
+ value: the raw longitudal value in degrees
+
+ Returns:
+ the normalized value in :math:`[-180, +180[` degrees
+ """
+
+ return _normalize_circular_range(value, -180.0, +180.0)
+
+
+def normalize_direction(value: ScalarOrArray) -> ScalarOrArray:
+ """Normalizes a direction (azimuth/yaw) value to the usual 360° compass values.
+
+ Examples:
+
+ >>> normalize_direction(45.0)
+ 45.0
+ >>> normalize_direction(250.0)
+ 250.0
+ >>> normalize_direction(-6.0)
+ 354.0
+ >>> normalize_direction(360.0)
+ 0.0
+ >>> normalize_direction(450.0)
+ 90.0
+
+ Take care: this will also normalize rubbish values.
+
+ >>> normalize_longitude(3229764.25)
+ -155.75
+
+ Args:
+ value: the raw value in degrees
+
+ Returns:
+ the normalized value in :math:`[0, 360[` degrees
+ """
+
+ return _normalize_circular_range(value, 0.0, 360.0)
+
+
+# Difference ------------------------------------------------------------------
+
+
+def _difference_circular_range(
+ value_a: ScalarOrArray, value_b: ScalarOrArray, minimum: float, maximum: float
+) -> ScalarOrArray:
+ """Calculates differences on a circular number line, where minimum and maximum meet.
+
+ The values do not need to be normalized.
+
+ If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
+ warning is printed and ``NaN`` is returned. Both other values are assumed to be finite.
+
+ Args:
+ value_a: the first value
+ value_b: the second value
+ minimum: the minimum of the desired bounds
+ maximum: the maximum of the desired bounds, assumed to be strictly larger than ``minimum``
+
+ Returns:
+ the normalized value in :math:`[0, (maximum - minimum)/2]`
+ """
+
+ raw_difference = value_a - value_b
+
+ if not isfinite(raw_difference).all():
+ warn(
+ "_difference_circular_range(): "
+ f"difference between {value_a} and {value_b} was not a valid number: {raw_difference}",
+ UserWarning,
+ )
+
+ span = maximum - minimum
+ difference: ScalarOrArray = raw_difference % span
+
+ # take the smaller one of the two possible distances, i.e. the smaller path around the circular range
+ try:
+ # Try the cae where we have floats, not arrays
+ if difference > span / 2.0:
+ return span - difference
+ return difference
+
+ except ValueError:
+ return cast(ScalarOrArray, choose(difference > span / 2.0, (difference, span - difference)))
+
+
+def difference_latitude(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
+ """Calculates the difference between two latitudal values.
+
+ The values do not need to be normalized.
+
+ If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
+ warning is printed and ``NaN`` is returned.
+
+ Examples:
+
+ >>> difference_latitude(-45.0, +50.0)
+ 95.0
+ >>> difference_latitude(-90.0, -90.0)
+ 0.0
+ >>> difference_latitude(-90.0, +90.0) # the maximum distance
+ 180.0
+ >>> difference_latitude(-90.0, +190.0)
+ 80.0
+
+ Take care: this will also calculate distances for rubbish values.
+
+ >>> difference_latitude(95324.0, 3224.25)
+ 60.25
+
+ Args:
+ value_a: the first latitude in degrees
+ value_b: the second latitude in degrees
+
+ Returns:
+ The difference between the two values in degrees in :math:`[0, 180]`
+ """
+
+ # normalization is required because the distance between +80° and +100° shall be 0° and not 20°
+ value_a = normalize_latitude(value_a)
+ value_b = normalize_latitude(value_b)
+
+ # mathematically, there is no need to calculate in modulo `span` like in #difference_circular_range, since
+ # both values are already guaranteed to be in [-90.0, +90.0] and their absolute difference already gets
+ # what we need
+ difference: ScalarOrArray = numpy.abs(value_a - value_b)
+
+ if not isfinite(difference).all():
+ warn(
+ "difference_latitude(): "
+ f"difference between {value_a} and {value_b} was not a valid number: {difference}",
+ UserWarning,
+ )
+
+ return difference
+
+
+def difference_longitude(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
+ """Calculates the difference between two longitudal values.
+
+ The values do not need to be normalized.
+
+ If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
+ warning is printed and ``NaN`` is returned.
+
+ Examples:
+
+ >>> difference_longitude(-145.0, +150.0)
+ 65.0
+ >>> difference_longitude(-90.0, -90.0)
+ 0.0
+ >>> difference_longitude(-90.0, +90.0) # the maximum distance
+ 180.0
+ >>> difference_longitude(-180.0, +190.0)
+ 10.0
+
+ Take care: this will also calculate distances for rubbish values.
+
+ >>> difference_longitude(95324.0, 3224.25)
+ 60.25
+
+ Args:
+ value_a: the first longitude in degrees
+ value_b: the second longitude in degrees
+
+ Returns:
+ The difference between the two values in degrees in :math:`[0, 180]`
+ """
+
+ return _difference_circular_range(value_a, value_b, -180.0, +180.0)
+
+
+def difference_direction(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
+ """Calculates the difference between two directional (azimuthal/yaw) values.
+
+ The values do not need to be normalized.
+
+ If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
+ warning is printed and ``NaN`` is returned.
+
+ Examples:
+
+ >>> difference_direction(145.0, 165.0)
+ 20.0
+ >>> difference_direction(42.0, 42.0)
+ 0.0
+ >>> difference_direction(350.0, 334.5)
+ 15.5
+ >>> difference_direction(270.0, 90.0) # the maximum distance
+ 180.0
+ >>> difference_direction(365.0, 1.0)
+ 4.0
+ >>> difference_direction(370.0, -20.0)
+ 30.0
+
+ Take care: this will also calculate distances for rubbish values.
+
+ >>> difference_direction(95324.0, 3224.25)
+ 60.25
+
+ Args:
+ value_a: the first direction in degrees
+ value_b: the second direction in degrees
+
+ Returns:
+ The difference between the two values in degrees in :math:`[0, 180]`
+ """
+
+ return _difference_circular_range(value_a, value_b, 0.0, +360.0)
+
+
+# Translation -----------------------------------------------------------------
+
+
+def translate_floats(
+ longitude: float, latitude: float, direction: float, distance: float
+) -> Tuple[Tuple[float, float], float]:
+ """Simply a convenience method for calling :func:`~.translate_numpy` with a single point.
+
+ Args:
+ longitude: the original longitude in degrees
+ latitude: the original latitude in degrees
+ direction: the direction to translate into in degrees
+ distance: the distance to translate by in meters
+
+ Returns:
+ a pair ``(longitude, latitude)`` with the new coordinates and the back azimuth
+ """
+ # just use the numpy variant as it would be converted to an array in pyproj internally anyhow
+ coordinates_array = array([[longitude, latitude]])
+ result, back = translate_numpy(coordinates_array, direction, distance)
+ new_coordinates = (result[0, 0], result[0, 1])
+
+ return new_coordinates, back[0]
+
+
+def translate_numpy(
+ coordinates: ndarray,
+ direction: Union[float, ndarray],
+ distance: Union[float, ndarray],
+) -> Tuple[ndarray, ndarray]:
+ """Translates the given point(s) by a given distance and direction/azimuth.
+
+ Everything is assumed to be in degrees.
+ Furthermore, this method returns the back azimuth as documented below.
+
+ Under the hood uses :meth:`pyproj.Geod.fwd`, which computes the *forward transformation* or
+ *forward azimuth*. This walks the given distance on the great circle arc given by the direction/
+ azimuth. It uses the direction to define the initial azimuth, as the real azimuth will probably change
+ along the great circle path (unless going exactly north/south or east/west).
+ See also `this website `__, sections "Bearing"
+ and "Midpoint".
+
+ Note:
+ See see the underlying geographiclib library, , *geod_direct()* for details on the
+ behaviour poles and other special cases. It's rather strange. Also keep in mind that this
+ method suffers from numerical issues like pretty much anything involving floating point
+ computations.
+
+ Note:
+ This is already provided in an object-oriented fashion by
+ - :meth:`pyrate.plan.geometry.location.PolarLocation.translate`
+ - :meth:`pyrate.plan.geometry.polygon.PolarPolygon.translate`
+ - :meth:`pyrate.plan.geometry.route.PolarRoute.translate`
+
+ Args:
+ coordinates: the coordinates as a numpy array with dimensions ``(number of points, 2)``,
+ where the first component describes the longitude and the second one the latitude
+ direction: The direction/azimuth to head to in degrees in :math:`[0, 360]` (0° is north, 90° is east).
+ If it is a scalar, a single value is assumed for all points.
+ If it is an array, it must be of shape ``(number of points, )``.
+ distance: The distance to transpose by in meters; should not be very close to zero if the
+ backwards azimuth shall be used due to numerical stability.
+ If it is a scalar, a single value is assumed for all points.
+ If it is an array, it must be of shape ``(number of points, )``.
+
+ Returns:
+ (1) The new coordinates in the same format as the inout
+ (2) The backwards azimuth in :math:`[0, 360)`, i.e. the direction which could be used to travel
+ from the modified location back to the original one by translating given that ``direction`` and
+ the same ``distance``.
+ """
+
+ # Convert from [0, 360[ to [-180, +180]
+ if isscalar(direction):
+ direction = cast(float, direction) # The cast is needed until isscalar() narrows the type correctly
+ if direction > 180:
+ direction -= 360
+ azimuth = full((coordinates.shape[0],), direction)
+ else:
+ # The cast is needed until isscalar() narrows the type correctly
+ azimuth = cast(ndarray, direction).copy()
+ azimuth[azimuth > 180] -= 360
+
+ # Make sure that dist is an array
+ dist = full((coordinates.shape[0],), distance) if isscalar(distance) else distance
+
+ # If any input to fwd() is an array/sequence, then all must be
+ coordinates[:, 0], coordinates[:, 1], back_azimuth = WGS84_PYPROJ_GEOD.fwd(
+ lons=coordinates[:, 0],
+ lats=coordinates[:, 1],
+ az=azimuth,
+ dist=dist,
+ radians=False,
+ )
+
+ # back azimuth is in [-180, +180], so we need to convert to [0, 360[
+ # see the underlying *geographiclib* library, , `geod_direct()`:
+ # https://geographiclib.sourceforge.io/1.49/C/geodesic_8h.html#a676f59f07987ddd3dd4109fcfeccdb9d
+ back_azimuth[back_azimuth < 0] += 360
+ back_azimuth[back_azimuth == 360.0] = 0.0
+
+ return coordinates, back_azimuth
+
+
+# Distance --------------------------------------------------------------------
+
+
+def fast_distance_geo(
+ latitudes: ScalarOrArray, longitudes: ScalarOrArray, center_latitude: float, center_longitude: float
+) -> ScalarOrArray:
+ """Approximates the great circle distance of all points to the center.
+
+ Warnings:
+ All coordinates are assumed to be within about 250 km of the center to provide reasonable accuracy.
+ Then, it was determined experimentally that the error compared to the great-circle distance was always
+ below 5%.
+ This was done by setting ``@hypothesis.settings(max_examples=50000)`` on the test case
+ ``TestDistanceCalculation.test_fast_distance_geo`` and observing that it did not fail.
+
+ Depending on the latitude **of the center**, the *equirectangular approximation*
+ or the *polar coordinate flat-earth formula* are used. Both assume a spherical world and then flatten it
+ onto a plane.
+
+ Args:
+ latitudes: the latitude values, in radians in range :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
+ longitudes: the longitude values, in radians in range :math:`[-π, +π]`
+ center_latitude: the latitude of the center, in radians in range
+ :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
+ center_longitude: the longitude of the center, in radians in range :math:`[-π, +π]`
+
+ See Also:
+ :func:`~haversine_numpy`: about three times slower but more precise
+
+ References:
+ - Based on
+ `Movable Type Scripts: Calculate distance, bearing and more between Latitude/Longitude points
+ `__
+ (as of Dec. 2020), Section "Equirectangular approximation".
+ In that source: ``phi = latitude``, ``lambda = longitude``, ``theta = co-latitude`` and
+ ``R = (mean) earth radius``.
+ """
+ delta_lambda = _difference_circular_range(longitudes, center_longitude, -pi, +pi) # type: ignore
+
+ # The border value of about 75.0° latitude was determined by eye-balling from some Tissot's indicatrixes
+ if abs(center_latitude) > 1.3962634015954636:
+ # move all locations to the northern hemisphere first if required
+ if center_latitude < 0:
+ center_latitude = -center_latitude
+ latitudes = -latitudes
+ del longitudes, center_longitude # they are now wrong
+
+ # use the "polar coordinate flat-earth formula"
+ theta_1 = (pi / 2) - latitudes
+ theta_2 = (pi / 2) - center_latitude
+ summed = square(theta_1) + square(theta_2) - 2 * theta_1 * theta_2 * cos(delta_lambda) # type: ignore
+ summed = clip(summed, 0.0, None) # for numerical stability as above sum may be slightly negative
+ return cast(ScalarOrArray, sqrt(summed) * MEAN_EARTH_RADIUS)
+
+ # use the "equirectangular approximation"
+ d_lat = _difference_circular_range(latitudes, center_latitude, -pi / 2, +pi / 2) # type: ignore
+ d_lon = delta_lambda * cos(center_latitude)
+ dist_degrees = hypot(d_lat, d_lon) # type: ignore
+ return cast(ScalarOrArray, dist_degrees * MEAN_EARTH_RADIUS)
+
+
+def haversine_numpy(
+ latitudes: ScalarOrArray, longitudes: ScalarOrArray, center_latitude: float, center_longitude: float
+) -> ScalarOrArray:
+ """Calculate the great circle distance between each point to the center in meters.
+
+ Note:
+ "The min() function protects against possible roundoff errors that could
+ sabotage computation of the arcsine if the two points are very nearly
+ antipodal (that is, on opposite sides of the Earth). Under these conditions,
+ the Haversine Formula is ill-conditioned (see the discussion below), but
+ the error, perhaps as large as 2 km [...], is in the context of a
+ distance near 20,000 km [...]."
+ (Source: `Movable Type Scripts: GIS FAQ Q5.1: Great circle distance between 2 points
+ `__)
+
+ Args:
+ latitudes: the latitude values, in radians in range :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
+ longitudes: the longitude values, in radians in range :math:`[-π, +π]`
+ center_latitude: the latitude of the center, in radians in range
+ :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
+ center_longitude: the longitude of the center, in radians in range :math:`[-π, +π]`
+
+ See Also:
+ :func:`~fast_distance_geo`: an approximation that is about three times faster
+
+ Returns:
+ The great circle distance between each point to the center in meters.
+
+ References:
+ - `Wikipedia: Haversine formula `__
+ """
+ d_lat = latitudes - center_latitude
+ d_lon = longitudes - center_longitude
+ summed = sin(d_lat / 2) ** 2 + cos(latitudes) * cos(center_latitude) * sin(d_lon / 2) ** 2
+ # the intermediate result c is the great circle distance in radians
+ d_rad = 2 * arcsin(numpy.minimum(sqrt(summed), 1.0))
+ # the great circle distance will be in the same units as MEAN_EARTH_RADIUS
+ return cast(ScalarOrArray, d_rad * MEAN_EARTH_RADIUS)
+
+
+# Conversion between meters and radians ---------------------------------------
+
+
+def meters2rad(meters: ScalarOrArray) -> ScalarOrArray:
+ """Meters to radians (latitude or longitude) at the equator."""
+ return (meters / MEAN_EARTH_CIRCUMFERENCE) * (2.0 * pi)
+
+
+def rad2meters(rad: ScalarOrArray) -> ScalarOrArray:
+ """Radians (latitude or longitude) at the equator to meters."""
+ return (rad / (2.0 * pi)) * MEAN_EARTH_CIRCUMFERENCE
+
+
+# Cartesian to Spherical ------------------------------------------------------
+
+
+def cartesian_to_spherical(xyz: ndarray) -> Tuple[ndarray, ndarray]:
+ """Converts cartesian coordinates on a unit sphere to spherical coordinates.
+
+ Args:
+ xyz: The cartesian coordinates, expected as an array where each line contains three coordinates for
+ a point.
+
+ Returns:
+ The coordinates as latitude and longitude in radians,
+ such that :math:`-\\frac{π}{2} ≤ φ ≤ +\\frac{π}{2}` is the latitude and :math:`-π ≤ θ < +π` is the
+ longitude.
+
+ Raises:
+ :class:`AssertionError`: if not all pints lie on the unit sphere, as then the altitude would be
+ relevant, but it is not considered by this conversion
+
+ References:
+ - `Movable Type Scripts: Vector-based geodesy
+ `__
+ - `The relevant Wikipedia article
+ `__.
+ Note: In these formulas, mathematicians' coordinates are used, where :math:`0 ≤ φ ≤ π` is the
+ latitude coming down from the pole and :math:`0 ≤ θ ≤ 2π` is the longitude,
+ with the prime meridian being at :math:`π`.
+ We convert these to the usual coordinate conventions of the geographic community within this method.
+ - The `nvector library `__ provides a possible alternative
+ implementation (see section "Example 3: 'ECEF-vector to geodetic latitude'").
+ """
+ # elevation / r:
+ elevation = sqrt(xyz[:, 0] ** 2 + xyz[:, 1] ** 2 + xyz[:, 2] ** 2)
+ assert not numpy.any(absolute(elevation - 1.0) > 1e-9), "not all points lie on the unit sphere"
+
+ # also normalize because the floating point representation of the cartesian coordinates might have
+ # slightly messed with it; this value moves the borders of the clipping slightly inwards
+ # in other words: it makes the clipped values lie *strict* within the bounds, and never
+ # with equality
+ move_in = 1e-14 # empirically worked well
+
+ # latitude / theta:
+ # we know that the elevation is very close to 1, so we do not need to divide by it
+ latitudes = arccos(xyz[:, 2])
+ latitudes = clip(latitudes, move_in, pi - move_in) # clip at the poles
+ latitudes -= pi / 2 # convert from mathematical to geographic convention
+
+ # longitude / phi
+ longitudes = arctan2(xyz[:, 1], xyz[:, 0])
+ # we also clip here although wrapping using modulo 2*pi would be more appropriate
+ # however, this had introduced new numerical new problems which are avoided by clipping
+ # This also guarantees that each longitude is strictly less than 180°
+ longitudes = clip(longitudes, -pi + move_in, +pi - move_in)
+
+ return latitudes, longitudes
+
+
+# Mean computation on angles and coordinates ----------------------------------
+
+
+def mean_coordinate(latitudes: ndarray, longitudes: ndarray) -> Tuple[float, float]:
+ """Computes a reasonable mean coordinate if possible.
+
+ Args:
+ latitudes: The array of latitude values to compute the mean of, in degrees. Will be flattened.
+ longitudes: The array of longitude values to compute the mean of, in degrees. Will be flattened.
+ Must be of the same length as ``latitudes``.
+
+ Returns:
+ The mean coordinate of the given ones, in degrees as ``(latitude, longitude)``.
+
+ Raises:
+ ValueError: If no meaningful mean (of the longitudes) can be computed. See :func:`~mean_angle`.
+
+ See Also:
+ - :func:`~mean_angle`
+ """
+ assert len(latitudes) == len(longitudes), "Both coordinate arrays must have the same length"
+
+ # In case of the latitude values, the "ambiguous" case of antipodal angles/points can be solved by
+ # observing that only latitude values between -90° and +90° are allowed. Therefore, +/- 0° is a reasonable
+ # result in this case.
+ try:
+ latitude = mean_angle(numpy.radians(latitudes))
+ except ValueError:
+ latitude = 0.0
+
+ # In the case of longitudes, simply let the ValueError raise as there is nothing we can do here
+ longitude = mean_angle(numpy.radians(longitudes))
+
+ return numpy.degrees(latitude), numpy.degrees(longitude)
+
+
+def mean_angle(radians: ndarray, tolerance: float = 1e-6) -> float:
+ """Computes a reasonable mean value if possible.
+
+ Args:
+ radians: The array of angles to compute the mean of, in radians. Will be flattened.
+ tolerance: If both components of the cartesian intermediate representation are less than this value,
+ a ``ValueError`` with a descriptive error message will be raised.
+
+ Returns:
+ The mean angle of the given ones
+
+ References:
+ - `Mean of circular quantities (section Mean of angles) on Wikipedia
+ `
+
+ Raises:
+ ValueError: If no meaningful mean can be computed. This is the case when two antipodal angles are
+ given or the sum of multiple ones is "antipodal".
+
+ See Also:
+ - :func:`~mean_coordinate`
+ """
+
+ x: float = sin(radians).sum()
+ y: float = cos(radians).sum()
+
+ if abs(x) < tolerance and abs(y) < tolerance:
+ raise ValueError(
+ "The mean angle of nearly antipodal is ambiguous. "
+ "If this arises while computing mean points on polygons and routes, "
+ "the geometry likely is just so large that many approximations will not work anymore. "
+ "Consider splitting them up into smaller ones."
+ )
+
+ return atan2(x, y) % tau
diff --git a/pyrate/pyrate/plan/geometry/location.py b/pyrate/pyrate/plan/geometry/location.py
new file mode 100644
index 0000000..8cdd317
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/location.py
@@ -0,0 +1,491 @@
+"""This module implements abstractions for timestamped geospatial locations in WGS84 and local coordinates.
+
+Two locations are ``==`` if and only if they are equal according to ``equals_exact()``.
+"""
+
+# Standard library
+from copy import deepcopy
+from math import cos
+from math import radians
+from math import sin
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import Optional
+from typing import Tuple
+
+# Mathematics
+from geopy.distance import GeodesicDistance
+from geopy.distance import GreatCircleDistance
+from numpy import array
+from numpy import ndarray
+from pyproj import Proj
+from shapely.affinity import translate
+from shapely.geometry import Point
+
+# Basis
+from .geospatial import Geospatial
+from .geospatial import LocationType
+
+# Helpers
+from .helpers import normalize_latitude
+from .helpers import normalize_longitude
+from .helpers import translate_floats
+
+
+class PolarLocation(Geospatial):
+
+ """A geospatial location representing a spatial object on earth.
+
+ See `here `__ for a nice collection of formulas and
+ explanations on geographic transformations and calculations. This is the *Rome* for geographic calculation
+ questions on *Stack Overflow*: All roads seem to eventually lead here.
+
+ Examples:
+ First import some packages
+
+ >>> from math import isclose
+ >>> from pyrate.plan.geometry import PolarLocation, Direction
+
+ Then create two example coordinates to work with:
+
+ >>> team_room = PolarLocation(latitude=49.878091, longitude=8.654052)
+ >>> frankfurt = PolarLocation(latitude=50.113709, longitude=8.656561)
+
+ Translate the team room 27 km north, which is towards *Frankfurt*:
+
+ >>> team_room, direction_going_back = team_room.translate(direction=Direction.North, distance=27_000)
+ >>> assert isclose(direction_going_back, Direction.South)
+
+ The variable ``team_room`` now represents a location in/near *Frankfurt*,
+ only a couple hundred meters away from the location ``frankfurt``:
+
+ >>> print(team_room.distance(frankfurt)) # doctest: +ELLIPSIS
+ 812.512...
+
+ Coordinats can also be projected onto a local tanged plane and back.
+ The ``origin`` defines the point where the plane touches the sphere.
+
+ >>> frankfurt == frankfurt.to_cartesian(origin=frankfurt).to_polar()
+ True
+
+ Args:
+ latitude: The latitude in degrees (will be normalized)
+ longitude: The longitude in degrees (will be normalized)
+ location_type: The type of this polygon
+ name: An optional name of this polygon
+ identifier: An optional unique identifier for this object, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ """
+
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ latitude: float,
+ longitude: float,
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ ) -> None:
+ # Type hints
+ self._latitude: float
+ self._longitude: float
+ self._projection: Optional[Proj]
+
+ # Attributes setup
+ self.latitude = latitude
+ self.longitude = longitude
+ # self._projection = None # already set by the property accesses before
+
+ super().__init__(location_type=location_type, name=name, identifier=identifier)
+
+ @property
+ def latitude(self) -> float:
+ """The latitude of this location in degrees in :math:`[-90, +90]`.
+
+ The value is always disambiguated/normalized.
+ """
+
+ return self._latitude
+
+ @latitude.setter
+ def latitude(self, latitude: float) -> None:
+ self._projection = None
+ self._latitude = normalize_latitude(latitude)
+
+ @property
+ def longitude(self) -> float:
+ """The longitude of this location degrees in in :math:`[-180, +180)`.
+
+ The value is always disambiguated/normalized.
+ """
+
+ return self._longitude
+
+ @longitude.setter
+ def longitude(self, longitude: float) -> None:
+ self._projection = None
+ self._longitude = normalize_longitude(longitude)
+
+ @property
+ def projection(self) -> Proj:
+ """Derive a :class:`pyproj.Proj` instance for projecting points.
+
+ This instance is cached for performance reasons, since its creation is relatively time consuming. The
+ cache is appropriately invalidated when setting a new :attr:`~latitude` or :attr:`~longitude`.
+ """
+
+ if self._projection is None:
+ self._projection = Proj(
+ proj="tmerc",
+ ellps="WGS84",
+ units="m",
+ lon_0=self.longitude,
+ lat_0=self.latitude,
+ )
+
+ return self._projection
+
+ def to_cartesian(self, origin: "PolarLocation") -> "CartesianLocation":
+ """Projects this point to a cartesian representation according to the given global reference.
+
+ Args:
+ origin: The reference by which to project onto the local tangent plane
+
+ Returns:
+ The cartesian representation of this point with the given reference point being set
+ """
+
+ # convert to cartesian
+ east, north = origin.projection(self.longitude, self.latitude)
+
+ return CartesianLocation(
+ east,
+ north,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=origin,
+ )
+
+ def translate(self, direction: float, distance: float) -> Tuple["PolarLocation", float]:
+ """Translates this location and returns the new location and back-azimuth.
+
+ See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
+ """
+
+ back_azimuth: float # this is required for mypy, don't know why that is
+ (longitude, latitude), back_azimuth = translate_floats(
+ self.longitude, self.latitude, direction, distance
+ )
+
+ new_location = PolarLocation(
+ longitude=longitude,
+ latitude=latitude,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+
+ return new_location, back_azimuth
+
+ def distance(self, other: "PolarLocation", approximate: bool = False) -> float:
+ """Calculate the horizontal geodesic distance to another location in meters, assumes degrees.
+
+ This assumes an ellipsoidal earth and converges for any pair of points on earth.
+ It is accurate to round-off and uses *geographiclib* (https://pypi.org/project/geographiclib/)
+ via *geopy* (https://pypi.org/project/geopy/).
+
+ The faster *great-circle distance* can also be used by setting *approximate=True*.
+ It assumes only a spherical earth and is guaranteed to give a result for any pair of points.
+ It is wrong by up to 0.5% and based on *geopy*. It is advised to use the exact solution unless you
+ know what you are doing.
+
+ See also:
+ - https://en.wikipedia.org/wiki/Geodesics_on_an_ellipsoid
+ - https://en.wikipedia.org/wiki/Great-circle_distance
+ - https://en.wikipedia.org/wiki/Geographical_distance
+
+ Args:
+ other: The location to measure the distance to in degrees
+ approximate: Whether to use a faster approximation or not (default: ``False``)
+
+ Returns:
+ The distance to the other point in meters
+ """
+
+ # input as latitude, longitude
+ this = (self.latitude, self.longitude)
+ that = (other.latitude, other.longitude)
+
+ if approximate:
+ distance = GreatCircleDistance(this, that).meters
+ else:
+ distance = GeodesicDistance(this, that).meters
+
+ # Geopy is not typed as of now
+ return cast(float, distance)
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {"type": "Point", "coordinates": (self.longitude, self.latitude)}
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ def equals(self, other: Any) -> bool:
+ """Determines whether the given ``other`` object exactly equals this one.
+
+ This function mimics :meth:`shapely.geometry.base.BaseGeometry.equals`:
+ "Refers to point-set equality (or topological equality), and is equivalent to
+ ``self.within(other) and self.contains(other)``."
+
+ Args:
+ other: The object to compare to
+
+ Returns:
+ Whether this and the other object are exactly equal
+ """
+ # The above docstring is also copied by PolarPolygon and PolarRoute
+
+ return self.equals_exact(other, 0.0)
+
+ def equals_exact(self, other: Any, tolerance: float) -> bool:
+ """Determines whether the given ``other`` object equals this one.
+
+ This function mimics :meth:`shapely.geometry.base.BaseGeometry.equals_exact`:
+ "Refers to coordinate equality, which requires coordinates to be equal
+ and in the same order for all components of a geometry."
+
+ Args:
+ other: The object to compare to
+ tolerance: The absolute deviation in meters that is tolerated on the latitude and longitude values
+
+ Returns:
+ Whether this and the ``other`` object are (nearly) equal
+ """
+ # The above docstring is also copied by PolarPolygon and PolarRoute
+
+ return (
+ isinstance(other, PolarLocation)
+ and Geospatial.__eq__(self, other)
+ and self.distance(other) <= tolerance
+ )
+
+ def __repr__(self) -> str:
+ # we leave out self._projection due to performance reasons and because it is redundant
+ return f"PolarLocation(latitude={self.latitude}, longitude={self.longitude}{self._repr_extras})"
+
+
+class CartesianLocation(Geospatial, Point):
+ """A point in the cartesian plane based on local coordinates with an optional global reference.
+
+ Examples:
+ You can simply create a cartesion location like this, where coordinates are in meters:
+
+ >>> location_a = CartesianLocation(east=10, north=-20)
+ >>> location_b = CartesianLocation(east=-30, north=0)
+ >>> distance = location_a.distance(location_b)
+ >>> distance # doctest: +ELLIPSIS
+ 44.721...
+
+ Keep in mind that locations (like all other cartesian geomerties) are iummutable due to the underlying
+ Shapely library:
+
+ >>> location_a.x = 5.0
+ Traceback (most recent call last):
+ ...
+ AttributeError: can't set attribute
+
+ The attributes ``east`` and ``north`` are provided as aliases for ``x`` and ``y``:
+
+ >>> assert location_a.x == location_a.east
+ >>> assert location_a.y == location_a.north
+
+ You can also project them to a polar coordinate.
+ To do this, one must only provide a reference point ``origin`` either when constructing the loction
+ or when calling :meth:`~to_polar`:
+
+ >>> reference = PolarLocation(latitude=50, longitude=30)
+ >>> location_a.origin = reference
+ >>> location_b.origin = reference
+ >>> location_a.to_polar().distance(location_b.to_polar()) # doctest: +ELLIPSIS
+ 44.721...
+
+ As any :class:`~CartesianLocation` also inherits from :class:`shapely.geometry.Point`,
+ we can also use :mod:`shapely` methods
+ (see `the Shapely docs `__).
+ For example, we can inflate the point using ``buffer()``.
+ Mind though, that this will return a :mod:`shapely` geometry and not a :mod:`pyrate.plan.geometry`
+ object.
+
+ >>> buffered = location_a.buffer(10)
+ >>> buffered.geometryType()
+ 'Polygon'
+
+ Thus, we need to convert it back to a pyrate object like so (keep in mind that we now need a polygon):
+
+ >>> from pyrate.plan.geometry.polygon import CartesianPolygon
+ >>> buffered_pyrate = CartesianPolygon.from_shapely(buffered)
+ >>> buffered.equals(buffered_pyrate)
+ True
+
+ Args:
+ east: The easting of the location in meters
+ north: The northing of the location in meters
+ origin: A reference that can be used to project this cartesian representation (back)
+ into a polar one
+ location_type: The type of this polygon
+ name: An optional name of this polygon
+ identifier: An optional unique identifier for this object, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ """
+
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ east: float,
+ north: float,
+ origin: Optional["PolarLocation"] = None,
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ ) -> None:
+ # Set attribute
+ self.origin = origin
+
+ # Typing hints (actually defined by shapely)
+ self.x: float
+ self.y: float
+
+ # Initialize the super classes
+ Point.__init__(self, east, north)
+ Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
+
+ #: Named access to the internal shapely point ``x``. Readonly.
+ east: float = Point.x
+
+ #: Named access to the internal shapely point ``y``. Readonly.
+ north: float = Point.y
+
+ def to_polar(self, origin: Optional["PolarLocation"] = None) -> PolarLocation:
+ """Computes the polar representation of this point.
+
+ Args:
+ origin: The global reference to be used for back-projection, must be set if and only if
+ :attr:`~pyrate.plan.geometry.CartesianLocation.origin` is ``None``
+
+ Returns:
+ The global, polar representation of this point
+ """
+
+ if origin is None:
+ if self.origin is None:
+ raise ValueError("need to give an explicit origin when the instance does not have one")
+ origin = self.origin
+ elif self.origin is not None:
+ raise ValueError("provided an explicit origin while the instance already has one")
+
+ # convert to cartesian
+ longitude, latitude = origin.projection(self.east, self.north, inverse=True)
+
+ return PolarLocation(
+ longitude=longitude,
+ latitude=latitude,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+
+ @classmethod
+ def from_shapely(cls, point: Point, *args, **kwargs) -> "CartesianLocation":
+ """Create a cartesian location from a shapely point.
+
+ Args:
+ point: A shapely point
+ *args: Positional arguments to be passed to :class:`~CartesianLocation`
+ **kwargs: Keyword arguments to be passed to :class:`~CartesianLocation`
+
+ Returns:
+ The cartesian location created from the given geometry and other parameters
+ """
+ return cls(point.x, point.y, *args, **kwargs)
+
+ def translate(self, direction: float, distance: float) -> Tuple["CartesianLocation", ndarray]:
+ """Translates this location.
+
+ Args:
+ direction: The direction angle in degrees (``0`` is north, clockwise)
+ distance: The distance to translate bin meters
+
+ Returns:
+ The translated polygon and the translation vector ``(x_offset, y_offset)`` in meters
+ that can be used to reconstruct the original polygon
+ """
+
+ x_offset = sin(radians(direction)) * distance
+ y_offset = cos(radians(direction)) * distance
+
+ return (
+ CartesianLocation.from_shapely(
+ translate(Point(self.east, self.north), xoff=x_offset, yoff=y_offset),
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ ),
+ array([-x_offset, -y_offset]),
+ )
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {"type": "Point", "coordinates": (self.east, self.north)}
+
+ def __copy__(self) -> "CartesianLocation":
+ return CartesianLocation(
+ east=self.east,
+ north=self.north,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ )
+
+ def __deepcopy__(self, memodict: Dict) -> "CartesianLocation":
+ return CartesianLocation(
+ east=deepcopy(self.east, memodict),
+ north=deepcopy(self.north, memodict),
+ location_type=deepcopy(self.location_type, memodict),
+ name=deepcopy(self.name, memodict),
+ identifier=deepcopy(self.identifier, memodict),
+ origin=deepcopy(self.origin, memodict),
+ )
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ # Inherits the docstring
+ def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
+ return (
+ isinstance(other, CartesianLocation)
+ and Point.equals(self, other)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ # Inherits the docstring
+ def equals_exact( # pylint: disable=missing-function-docstring
+ self, other: Any, tolerance: float
+ ) -> bool:
+ return (
+ isinstance(other, CartesianLocation)
+ and Point.equals_exact(self, other, tolerance)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ def __repr__(self) -> str:
+ origin = f", origin={self.origin}" if self.origin is not None else ""
+ return f"CartesianLocation(east={self.east}, north={self.north}{origin}{self._repr_extras})"
+
+ def __str__(self) -> str:
+ # this is required to override shapely.geometry.Point.__str__()
+ return self.__repr__()
diff --git a/pyrate/pyrate/plan/geometry/polygon.py b/pyrate/pyrate/plan/geometry/polygon.py
new file mode 100644
index 0000000..93d62b0
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/polygon.py
@@ -0,0 +1,624 @@
+"""This module implements abstractions for geospatial, polygonal shapes in WGS84 and local cartesian
+coordinates using shapely.
+
+Two polygons are ``==`` if and only if they are equal according to ``equals_exact()``.
+"""
+
+# Python standard library
+from copy import deepcopy
+from math import cos
+from math import radians
+from math import sin
+
+# Typing
+from typing import Any
+from typing import cast
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+
+# Data modelling
+from numpy import array
+from numpy import isfinite
+from numpy import ndarray
+from shapely.affinity import translate
+from shapely.geometry import Polygon
+
+# Geospatial basis
+from .geospatial import Geospatial
+from .geospatial import LocationType
+
+# Geospatial helpers
+from .helpers import mean_coordinate
+from .helpers import translate_numpy
+from .helpers import WGS84_PYPROJ_GEOD
+
+# Location representation
+from .location import CartesianLocation
+from .location import PolarLocation
+
+
+class PolarPolygon(Geospatial):
+
+ """A polygon based on WGS84 coordinates.
+
+ An object with only a single point may be represented by a polygon with three times the same location.
+
+ Args:
+ locations: The points that make up this polygon; see :attr:`~.locations`
+ location_type: The type of this polygon
+ name: An optional name of this polygon
+ identifier: The polygon's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ """
+
+ def __init__(
+ self,
+ locations: List[PolarLocation],
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ ) -> None:
+ # Type hints
+ self._locations: List[PolarLocation]
+
+ # Attributes setup
+ self.locations = locations
+ self._mean_location: Optional[PolarLocation] = None
+
+ super().__init__(location_type=location_type, name=name, identifier=identifier)
+
+ @property
+ def locations(self) -> List[PolarLocation]:
+ """The points that make up this polygon.
+
+ Getter:
+ At least three points are returned.
+
+ Setter:
+ The list is closed if not already done, such that the first and last points in the list always
+ match exactly. Raises an :class:`AssertionError` if less than three points are given.
+ """
+
+ return self._locations
+
+ @locations.setter
+ def locations(self, locations: List[PolarLocation]) -> None:
+ assert len(locations) >= 3, "a polygon must contain at least three points"
+
+ # close the ring as shapely would do it
+ # comparison is done by exact comparison
+ if (
+ locations[0].latitude != locations[-1].latitude
+ or locations[0].longitude != locations[-1].longitude
+ ):
+ locations.append(locations[0])
+
+ self._locations = locations
+ self._mean_location = None
+
+ def to_cartesian(self, origin: PolarLocation) -> "CartesianPolygon":
+ """Projects this polygon to a cartesian representation according to the given global reference.
+
+ Args:
+ origin: The reference point by which to project onto the local tangent plane
+
+ Returns:
+ The cartesian representation of this polygon with the given reference point being set
+ """
+
+ # convert to cartesian
+ coordinates = self.to_numpy()
+ coordinates[:, 0], coordinates[:, 1] = origin.projection(coordinates[:, 0], coordinates[:, 1])
+
+ return CartesianPolygon.from_numpy(
+ coordinates,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=origin,
+ )
+
+ def distance_to_vertices(self, location: PolarLocation, approximate: bool = False) -> float:
+ """Computes the distance of the given location to the nearest vertex of this polygon.
+
+ Args:
+ location: The location to compute the distance from
+ approximate: Whether to use a less precise, faster method or not
+ """
+
+ return min([location.distance(loc, approximate) for loc in self.locations])
+
+ @property
+ def area(self) -> float:
+ """Returns the area of the polygon in :math:`meters^2`.
+
+ Only simple polygons are supported, i.e. not self-intersecting ones.
+ See :meth:`pyproj.Geod.polygon_area_perimeter` for the implementation.
+ The returned value is always non-negative.
+ """
+
+ _, area = WGS84_PYPROJ_GEOD.polygon_area_perimeter(
+ lons=[location.longitude for location in self.locations],
+ lats=[location.latitude for location in self.locations],
+ radians=False,
+ )
+ # pyproj is not typed as of now
+ # the returned area is signed with the ordering of the points
+ return abs(area)
+
+ @property
+ def is_valid(self) -> bool:
+ """Whether this geometry is valid according to :mod:`shapely`. Quite expensive, not cached.
+
+ Invalid ones might cross themselves or have zero area. Other tools might still refuse it, like *GEOS*.
+ """
+
+ return cast(bool, self.to_cartesian(self.mean).is_valid)
+
+ def simplify(self, tolerance: float, preserve_topology: bool = True) -> "PolarPolygon":
+ """Creates a simplified copy analogous to :meth:`shapely.geometry.Polygon.simplify`.
+
+ The simplification is achieved by reducing its number of vertices in a way that least deforms the
+ shape.
+
+ Args:
+ tolerance: This is passed to :meth:`shapely.geometry.Polygon.simplify`:
+ "All points in the simplified object will be within the tolerance distance of the
+ original geometry."
+ preserve_topology: This is passed to :meth:`shapely.geometry.Polygon.simplify`:
+ "By default a slower algorithm is used that preserves topology."
+
+ Returns:
+ A simplified version of the polygon with the same other attributes
+ """
+
+ projection_center = self.mean
+ cartesian = self.to_cartesian(projection_center)
+
+ simplified = cartesian.simplify(tolerance, preserve_topology)
+ coords = array(simplified.exterior.xy).T # this is the fastest known method
+
+ result_cartesian = CartesianPolygon.from_numpy(
+ coords, location_type=self.location_type, name=self.name, identifier=self.identifier
+ )
+ return result_cartesian.to_polar(projection_center)
+
+ def translate(self, direction: float, distance: float) -> Tuple["PolarPolygon", ndarray]:
+ """Translates this location and returns the new polygon and back-azimuth.
+
+ See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
+ """
+
+ new_coordinates, back_azimuth_array = translate_numpy(self.to_numpy(), direction, distance)
+
+ new_polygon = PolarPolygon.from_numpy(
+ new_coordinates,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+
+ return new_polygon, back_azimuth_array
+
+ def to_numpy(self) -> ndarray:
+ """Converts the coordinates defining this polygon into a :class:`numpy.ndarray`.
+
+ Returns:
+ An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
+ ``(longitude, latitude)``, each in degrees.
+
+ See Also:
+ :meth:`~from_numpy`
+ """
+
+ return array(
+ [(location.longitude, location.latitude) for location in self.locations],
+ dtype="float64",
+ order="C",
+ )
+
+ @classmethod
+ def from_numpy(cls, data: ndarray, *args, **kwargs) -> "PolarPolygon":
+ """Create a polar polygon from a numpy representation.
+
+ Args:
+ data: An array with shape ``(number of locations, 2)``, where each location is represented by a
+ pair of ``(longitude, latitude)``, each in degrees.
+ args: positional arguments to be passed to :class:`~PolarPolygon`
+ kwargs: keyword arguments to be passed to :class:`~PolarPolygon`
+
+ Returns:
+ The polar polygon created from the given coordinates and other parameters
+
+ Raises:
+ AssertionError: If the shape of ``data`` is invalid or contains non-finite values
+
+ See Also:
+ :meth:`~to_numpy`
+ """
+
+ assert len(data.shape) == 2
+ assert data.shape[1] == 2
+ assert isfinite(data).all(), "Invalid values in CartesianPolygon.from_numpy()"
+
+ return cls([PolarLocation(latitude=lat, longitude=lon) for (lon, lat) in data], *args, **kwargs)
+
+ @property
+ def mean(self) -> PolarLocation:
+ """Computes a reasonable mean location of the polygon, if possible. The result is cached.
+
+ Raises:
+ ValueError: If no meaningful mean (of the longitudes) can be computed.
+ See :func:`pyrate.plan.geometry.helpers.mean_angle`.
+ """
+
+ if self._mean_location is None:
+ coordinates = self.to_numpy()
+ latitude, longitude = mean_coordinate(latitudes=coordinates[:, 1], longitudes=coordinates[:, 0])
+ name = f"{self.name} - mean" if self.name else "mean"
+ self._mean_location = PolarLocation(latitude, longitude, name=name)
+
+ return self._mean_location
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {
+ "type": "Polygon",
+ "coordinates": [
+ # the inner array is only the exterior ring,
+ # and we don't have an interior one
+ [(location.longitude, location.latitude) for location in self.locations]
+ ],
+ }
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
+ return (
+ isinstance(other, PolarPolygon)
+ and self.to_cartesian(self.mean).equals(other.to_cartesian(self.mean))
+ and Geospatial.__eq__(self, other)
+ )
+
+ equals.__doc__ = PolarLocation.equals.__doc__
+
+ def equals_exact(self, other: Any, tolerance: float) -> bool:
+ # pylint: disable=missing-function-docstring
+
+ return (
+ isinstance(other, PolarPolygon)
+ and self.to_cartesian(self.mean).equals_exact(other.to_cartesian(self.mean), tolerance)
+ and Geospatial.__eq__(self, other)
+ )
+
+ equals_exact.__doc__ = PolarLocation.equals_exact.__doc__
+
+ def equals_almost_congruent(
+ self, other: Any, rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
+ ) -> bool:
+ """Returns whether two objects are approximately congruent and their attributes equal exactly.
+
+ See :meth:`~almost_congruent` for details on the specific definition of congruence and the tolerances.
+
+ Args:
+ other: The object to compare with
+ rel_tolerance: The relative tolerance (relative to the larger area)
+ abs_tolerance: The absolute area of tolerance in square meters
+
+ Returns:
+ Whether this and the ``other`` polygon are approximately congruent and all attributes are equal.
+ Returns ``False`` if ``other`` is not a :class:`~PolarPolygon`.
+ """
+
+ return (
+ isinstance(other, PolarPolygon)
+ and self.almost_congruent(other, rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance)
+ and Geospatial.__eq__(self, other)
+ )
+
+ def almost_congruent(
+ self, other: "PolarPolygon", rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
+ ) -> bool:
+ """Returns whether two polygons are approximately congruent while allowing for small differences.
+
+ This function is not directly part of shapely and is somewhat costly to compute. It has to:
+ - Project both polygons to cartesian coordinates (to continue with shapely calculations).
+ - Calculate the area of the symmetric difference between this and the other polygon.
+ - Calculate the area of both individual polygons.
+
+ The arguments follow the style of :func:`math.isclose`.
+
+ Args:
+ other: The polygon to compare with
+ rel_tolerance: The relative tolerance (relative to the larger area)
+ abs_tolerance: The absolute area of tolerance in square meters
+
+ Returns:
+ Whether this and the other polygon are approximately congruent. The larger one of the relative
+ and absolute tolerance is used.
+ """
+
+ return self.to_cartesian(self.mean).almost_congruent(
+ other.to_cartesian(self.mean), rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance
+ )
+
+ def __repr__(self) -> str:
+ locations = ", ".join(str(loc) for loc in self.locations)
+
+ return f"PolarPolygon(locations=[{locations}]{self._repr_extras})"
+
+
+class CartesianPolygon(Geospatial, Polygon):
+ """A cartesian polygon based on local coordinates with an optional global reference.
+
+ Note:
+ For the sake of simplicity and performance, this class does not store the given
+ :class:`~pyrate.plan.geometry.location.CartesianLocation` instances directly,
+ but only their coordinates.
+ Thus, when reading back attributes like ``origin``, ``name``, etc. of the locations they are derived
+ from the polygon instance and not from the individual locations.
+
+ Args:
+ locations: The list of locations that this shape consists of; see :attr:`~.locations`
+ location_type: The type of this polygon
+ name: The name of this polygon
+ identifier: The polygon's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ origin: A reference that can be used to project this cartesian representation (back)
+ into a polar one
+ """
+
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ locations: List[CartesianLocation],
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ origin: Optional[PolarLocation] = None,
+ ) -> None:
+ self.origin = origin
+
+ if isinstance(locations, list):
+ Polygon.__init__(self, [location.coords[0] for location in locations])
+ else:
+ # this is required for an efficient implementation of CartesianPolygon.from_numpy
+ # we do not add this possibility to the type signature to make people use from_numpy().
+ Polygon.__init__(self, locations)
+
+ Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
+
+ @property
+ def locations(self) -> List[CartesianLocation]:
+ """Get the locations of this polygon. See the class description for caveats."""
+
+ return [
+ CartesianLocation(
+ x,
+ y,
+ origin=self.origin,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+ for (x, y) in self.exterior.coords
+ ]
+
+ def to_polar(self, origin: Optional[PolarLocation] = None) -> PolarPolygon:
+ """Computes the polar representation of this shape.
+
+ Args:
+ origin: The global reference to be used for back-projection, must be set if and only if
+ :attr:`~pyrate.plan.geometry.CartesianPolygon.origin` is ``None``
+
+ Returns:
+ The global, polar representation of this geometry
+ """
+
+ if origin is None:
+ if self.origin is None:
+ raise ValueError("need to give an explicit origin when the instance does not have one")
+ origin = self.origin
+ elif self.origin is not None:
+ raise ValueError("provided an explicit origin while the instance already has one")
+
+ # convert to cartesian
+ coordinates = self.to_numpy()
+ coordinates[:, 0], coordinates[:, 1] = origin.projection(
+ coordinates[:, 0], coordinates[:, 1], inverse=True
+ )
+
+ return PolarPolygon.from_numpy(
+ coordinates, location_type=self.location_type, name=self.name, identifier=self.identifier
+ )
+
+ def to_numpy(self) -> ndarray:
+ """Converts the coordinates defining this polygon into a :class:`numpy.ndarray`.
+
+ Returns:
+ An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
+ ``(longitude, latitude)``, each in degrees.
+
+ See Also:
+ :meth:`~from_numpy`
+ """
+
+ return array(self.exterior.coords, dtype="float64", order="C")
+
+ @classmethod
+ def from_numpy(cls, data: ndarray, *args, **kwargs) -> "CartesianPolygon":
+ """Create a cartesian polygon from a numpy representation.
+
+ Args:
+ data: An array with shape ``(number of locations, 2)``, where each location is represented by a
+ pair of ``(longitude, latitude)``, each in degrees.
+ *args: Positional arguments to be passed to :class:`~CartesianPolygon`
+ **kwargs: Keyword arguments to be passed to :class:`~CartesianPolygon`
+
+ Returns:
+ The polar polygon created from the given coordinates and other parameters
+
+ Raises:
+ AssertionError: If the shape of ``data`` is invalid or contains non-finite values
+
+ See Also:
+ :meth:`~to_numpy`
+ """
+
+ assert len(data.shape) == 2
+ assert data.shape[1] == 2
+ assert isfinite(data).all(), "Invalid values in PolarPolygon.from_numpy()"
+
+ return cls(data, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_shapely(cls, polygon: Polygon, *args, **kwargs) -> "CartesianPolygon":
+ """Create a cartesian polygon from a shapely polygon.
+
+ Args:
+ polygon: A shapely polygon
+ *args: Positional arguments to be passed to :class:`~CartesianPolygon`
+ **kwargs: Keyword arguments to be passed to :class:`~CartesianPolygon`
+
+ Returns:
+ The cartesian polygon created from the given geometry and other parameters
+ """
+ return cls.from_numpy(array(polygon.exterior.xy).T, *args, **kwargs)
+
+ def translate(self, direction: float, distance: float) -> Tuple["CartesianPolygon", ndarray]:
+ """Translates this polygon.
+
+ Args:
+ direction: The direction angle in degrees (``0`` is north, clockwise)
+ distance: The distance to translate bin meters
+
+ Returns:
+ The translated polygon and the translation vector ``(x_offset, y_offset)`` in meters
+ that can be used to reconstruct the original polygon
+ """
+
+ x_offset = sin(radians(direction)) * distance
+ y_offset = cos(radians(direction)) * distance
+
+ return (
+ CartesianPolygon.from_shapely(
+ translate(Polygon(self.to_numpy()), xoff=x_offset, yoff=y_offset),
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ ),
+ array([-x_offset, -y_offset]),
+ )
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {
+ "type": "Polygon",
+ "coordinates": [
+ # the inner array is only the exterior ring,
+ # and we don't have an interior one
+ list(self.exterior.coords),
+ ],
+ }
+
+ def __copy__(self) -> "CartesianPolygon":
+ return CartesianPolygon(
+ locations=self.locations,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ )
+
+ def __deepcopy__(self, memodict: Dict) -> "CartesianPolygon":
+ return CartesianPolygon(
+ locations=deepcopy(self.locations, memodict),
+ location_type=deepcopy(self.location_type, memodict),
+ name=deepcopy(self.name, memodict),
+ identifier=deepcopy(self.identifier, memodict),
+ origin=deepcopy(self.origin, memodict),
+ )
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ # Inherits the docstring
+ def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
+ return (
+ isinstance(other, CartesianPolygon)
+ and Polygon.equals(self, other)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ # Inherits the docstring
+ def equals_exact( # pylint: disable=missing-function-docstring
+ self, other: Any, tolerance: float
+ ) -> bool:
+ return (
+ isinstance(other, CartesianPolygon)
+ and Polygon.equals_exact(self, other, tolerance)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ def equals_almost_congruent(
+ self, other: Any, rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
+ ) -> bool:
+ """Returns whether two objects are approximately congruent and their attributes equal exactly.
+
+ See :meth:`~almost_congruent` for details on the specific definition of congruence and the tolerances.
+
+ Args:
+ other: The object to compare with
+ rel_tolerance: The relative tolerance (relative to the larger area)
+ abs_tolerance: The absolute area of tolerance in square meters
+
+ Returns:
+ Whether this and the ``other`` polygon are approximately congruent and all attributes are equal.
+ Returns ``False`` if ``other`` is not a :class:`~CartesianPolygon`.
+ """
+
+ return (
+ isinstance(other, CartesianPolygon)
+ and self.almost_congruent(other, rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ def almost_congruent(
+ self, other: "CartesianPolygon", rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
+ ) -> bool:
+ """Returns whether two polygons are approximately congruent while allowing for small differences.
+
+ This function is not directly part of shapely and is somewhat costly to compute. It has to:
+ - Calculate the area of the symmetric difference between this and the ``other`` polygon.
+ - Calculate the area of both individual polygons.
+
+ The arguments follow the style of :func:`math.isclose`.
+
+ Args:
+ other: The polygon to compare with
+ rel_tolerance: The relative tolerance (relative to the larger area)
+ abs_tolerance: The absolute area of tolerance in square meters
+
+ Returns:
+ Whether this and the ``other`` polygon are approximately congruent. The larger one of the relative
+ and absolute tolerance is used.
+ """
+
+ rel_tolerance_as_abs: float = max(self.area, other.area) * rel_tolerance
+ tolerance: float = max(rel_tolerance_as_abs, abs_tolerance)
+ difference: float = self.symmetric_difference(other).area
+ return difference <= tolerance
+
+ def __repr__(self) -> str:
+ origin = f", origin={self.origin}" if self.origin is not None else ""
+ locations = ", ".join(f"({x}, {y})" for x, y in self.exterior.coords)
+
+ return f"CartesianPolygon(locations=[{locations}]{origin}{self._repr_extras})"
+
+ def __str__(self) -> str:
+ # this is required to override shapely.geometry.Polygon.__str__()
+ return self.__repr__()
diff --git a/pyrate/pyrate/plan/geometry/route.py b/pyrate/pyrate/plan/geometry/route.py
new file mode 100644
index 0000000..4b1bf55
--- /dev/null
+++ b/pyrate/pyrate/plan/geometry/route.py
@@ -0,0 +1,466 @@
+"""This module implements abstractions for geospatial routes (line strings) in WGS84 and local coordinate
+frames.
+
+Two routes are ``==`` if and only if they are equal according to ``equals_exact()``.
+"""
+
+# Python standard library
+from copy import deepcopy
+from math import cos
+from math import radians
+from math import sin
+
+# Typing
+from typing import Any
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+
+# Data modelling
+from numpy import array
+from numpy import isfinite
+from numpy import ndarray
+from shapely.affinity import translate
+from shapely.geometry import LineString
+
+# Geospatial basis
+from .geospatial import Geospatial
+from .geospatial import LocationType
+
+# Geospatial helpers
+from .helpers import mean_coordinate
+from .helpers import translate_numpy
+
+# Location representation
+from .location import CartesianLocation
+from .location import PolarLocation
+
+
+class PolarRoute(Geospatial):
+
+ """A route (line string) based on WGS84 coordinates.
+
+ Note:
+ This class does not yet support simplification as it was not required so far.
+
+ Args:
+ locations: The two or more points that make up this route; see :attr:`~.locations`
+ location_type: The type of this polygon
+ name: An optional name of this polygon
+ identifier: The route's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ """
+
+ def __init__(
+ self,
+ locations: List[PolarLocation],
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ ) -> None:
+ # Type hints
+ self._locations: List[PolarLocation]
+
+ # Attributes setup
+ self.locations = locations
+ self._mean_location: Optional[PolarLocation] = None
+
+ super().__init__(location_type=location_type, name=name, identifier=identifier)
+
+ # See Shapely issue
+ if self.length(approximate=True) < 1e-9:
+ raise ValueError(f"(Nearly) zero-length line strings are not allowed by Shapely; got {locations}")
+
+ @property
+ def locations(self) -> List[PolarLocation]:
+ """The points that make up this route.
+
+ Getter:
+ At least two points are returned.
+
+ Setter:
+ Raises an :class:`AssertionError` if less than two points are given.
+ """
+
+ return self._locations
+
+ @locations.setter
+ def locations(self, locations: List[PolarLocation]) -> None:
+ assert len(locations) >= 2, "a route must contain at least two points"
+ self._locations = locations
+ self._mean_location = None
+
+ def distance_to_vertices(self, location: PolarLocation, approximate: bool = False) -> float:
+ """Computes the distance of the given ``location`` to the nearest vertex of this route.
+
+ Args:
+ location: The location to compute the distance from
+ approximate: Whether to use a less precise, faster method or not
+ """
+
+ return min([location.distance(loc, approximate) for loc in self.locations])
+
+ def length(self, approximate: bool = False) -> float:
+ """Compute the length of this route from start to end.
+
+ Args:
+ approximate: Whether to use a less precise, faster method or not
+ """
+
+ return sum([a.distance(b, approximate) for (a, b) in zip(self.locations[:-1], self.locations[1:])])
+
+ def to_cartesian(self, origin: PolarLocation) -> "CartesianRoute":
+ """Projects this route to a cartesian representation according to the given global reference.
+
+ Args:
+ origin: The reference by which to project onto the local tangent plane
+
+ Returns:
+ The cartesian representation of this route with the given reference point being set
+ """
+
+ # convert to cartesian
+ coordinates = self.to_numpy()
+ coordinates[:, 0], coordinates[:, 1] = origin.projection(coordinates[:, 0], coordinates[:, 1])
+
+ return CartesianRoute.from_numpy(
+ coordinates,
+ origin=origin,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+
+ def to_numpy(self) -> ndarray:
+ """Converts the coordinates defining this route into a :class:`numpy.ndarray`.
+
+ Returns:
+ An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
+ ``(longitude, latitude)``, each in degrees.
+
+ See Also:
+ :meth:`~from_numpy`
+ """
+
+ return array(
+ [(location.longitude, location.latitude) for location in self.locations],
+ dtype="float64",
+ order="C",
+ )
+
+ @classmethod
+ def from_numpy(cls, data: ndarray, *args, **kwargs) -> "PolarRoute":
+ """Create a polar route from a numpy representation.
+
+ Args:
+ data: An array with shape ``(number of locations, 2)``, where each location is represented by a
+ pair of ``(longitude, latitude)``, each in degrees.
+ *args: Positional arguments to be passed to :class:`~PolarRoute`
+ **kwargs: Keyword arguments to be passed to :class:`~PolarRoute`
+
+ Returns:
+ The polar route created from the given coordinates and other parameters
+
+ Raises:
+ AssertionError: If the shape of ``data`` is invalid or contains non-finite values
+
+ See Also:
+ :meth:`~to_numpy`
+ """
+
+ assert len(data.shape) == 2
+ assert data.shape[1] == 2
+ assert isfinite(data).all(), "Invalid values in CartesianRoute.from_numpy()"
+
+ return cls([PolarLocation(latitude=lat, longitude=lon) for (lon, lat) in data], *args, **kwargs)
+
+ def translate(self, direction: float, distance: float) -> Tuple["PolarRoute", ndarray]:
+ """Translates this location and returns the new route and back-azimuth.
+
+ See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
+ """
+
+ new_coordinates, back_azimuth_array = translate_numpy(self.to_numpy(), direction, distance)
+
+ new_route = PolarRoute.from_numpy(
+ new_coordinates,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+
+ return new_route, back_azimuth_array
+
+ @property
+ def mean(self) -> PolarLocation:
+ """Computes a reasonable mean location of the route, if possible. The result is cached.
+
+ Raises:
+ ValueError: If no meaningful mean (of the longitudes) can be computed.
+ See :func:`pyrate.plan.geometry.helpers.mean_angle`.
+ """
+
+ if self._mean_location is None:
+ coordinates = self.to_numpy()
+ latitude, longitude = mean_coordinate(latitudes=coordinates[:, 1], longitudes=coordinates[:, 0])
+ name = f"{self.name} - mean" if self.name else "mean"
+ self._mean_location = PolarLocation(latitude, longitude, name=name)
+
+ return self._mean_location
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {
+ "type": "LineString",
+ "coordinates": [(location.longitude, location.latitude) for location in self.locations],
+ }
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
+ return (
+ isinstance(other, PolarRoute)
+ and self.to_cartesian(self.mean).equals(other.to_cartesian(self.mean))
+ and Geospatial.__eq__(self, other)
+ )
+
+ equals.__doc__ = PolarLocation.equals.__doc__
+
+ def equals_exact(self, other: Any, tolerance: float) -> bool:
+ # pylint: disable=missing-function-docstring
+
+ return (
+ isinstance(other, PolarRoute)
+ and self.to_cartesian(self.mean).equals_exact(other.to_cartesian(self.mean), tolerance)
+ and Geospatial.__eq__(self, other)
+ )
+
+ equals_exact.__doc__ = PolarLocation.equals_exact.__doc__
+
+ def __repr__(self) -> str:
+ locations = ", ".join(str(loc) for loc in self.locations)
+ return f"PolarRoute(locations=[{locations}]{self._repr_extras})"
+
+
+class CartesianRoute(Geospatial, LineString):
+
+ """A cartesian route (line string) in local coordinates, optionally with a global reference point.
+
+ Note:
+ For the sake of simplicity and performance, this class does not store the given
+ :class:`~pyrate.plan.geometry.location.CartesianLocation` instances directly,
+ but only their coordinates.
+ Thus, when reading back attributes like ``origin``, ``name``, etc. of the locations they are derived
+ from the route instance and not from the individual locations.
+
+ Args:
+ locations: The list of two or more locations that this shape consists of; see :attr:`~locations`
+ location_type: The type of this route
+ name: The name of this route
+ identifier: The route's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
+ origin: A reference that can be used to project this cartesian representation (back)
+ into a polar one
+ """
+
+ def __init__( # pylint: disable=too-many-arguments
+ self,
+ locations: List[CartesianLocation],
+ location_type: LocationType = LocationType.UNKNOWN,
+ name: Optional[str] = None,
+ identifier: Optional[int] = None,
+ origin: Optional[PolarLocation] = None,
+ ) -> None:
+ # Store attributes
+ self.origin = origin
+
+ if isinstance(locations, list):
+ LineString.__init__(self, [location.coords[0] for location in locations])
+ else:
+ # this is required for an efficient implementation of CartesianRoute.from_numpy
+ # we do not add this possibility to the type signature to make people use from_numpy().
+ LineString.__init__(self, locations)
+
+ Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
+
+ # See Shapely issue
+ if self.length < 1e-9:
+ raise ValueError(f"(Nearly) zero-length line strings are not allowed by Shapely; got {locations}")
+
+ @property
+ def locations(self) -> List[CartesianLocation]:
+ """Get the locations of this route. See the class description for caveats."""
+
+ return [
+ CartesianLocation(
+ x,
+ y,
+ origin=self.origin,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ )
+ for (x, y) in self.coords
+ ]
+
+ def to_polar(self, origin: Optional[PolarLocation] = None) -> PolarRoute:
+ """Computes the polar representation of this route.
+
+ Args:
+ origin: The global reference to be used for back-projection, must be set if and only if
+ :attr:`~origin` is ``None``
+
+ Returns:
+ The global, polar representation of this route
+ """
+
+ if origin is None:
+ if self.origin is None:
+ raise ValueError("need to give an explicit origin when the instance does not have one")
+ origin = self.origin
+ elif self.origin is not None:
+ raise ValueError("provided an explicit origin while the instance already has one")
+
+ # convert to cartesian
+ coordinates = self.to_numpy()
+ coordinates[:, 0], coordinates[:, 1] = origin.projection(
+ coordinates[:, 0], coordinates[:, 1], inverse=True
+ )
+
+ return PolarRoute.from_numpy(
+ coordinates, location_type=self.location_type, name=self.name, identifier=self.identifier
+ )
+
+ def to_numpy(self) -> ndarray:
+ """Converts the coordinates defining this route into a :class:`numpy.ndarray`.
+
+ Returns:
+ An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
+ ``(longitude, latitude)``, each in degrees.
+
+ See Also:
+ :meth:`~from_numpy`
+ """
+
+ return array(self.coords, dtype="float64", order="C")
+
+ @classmethod
+ def from_numpy(cls, data: ndarray, *args, **kwargs) -> "CartesianRoute":
+ """Create a cartesian route from a numpy representation.
+
+ Args:
+ data: An array with shape ``(number of locations, 2)``, where each location is represented by a
+ pair of ``(longitude, latitude)``, each in degrees.
+ *args: positional arguments to be passed to :class:`~CartesianRoute`
+ **kwargs: keyword arguments to be passed to :class:`~CartesianRoute`
+
+ Returns:
+ The cartesian route created from the given coordinates and other parameters
+
+ Raises:
+ AssertionError: If the shape of ``data`` is invalid or contains non-finite values
+
+ See Also:
+ :meth:`~to_numpy`
+ """
+
+ assert len(data.shape) == 2
+ assert data.shape[1] == 2
+ assert isfinite(data).all(), "Invalid values in PolarRoute.from_numpy()"
+
+ return cls(data, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_shapely(cls, line_string: LineString, *args, **kwargs) -> "CartesianRoute":
+ """Create a cartesian route from a shapely line string.
+
+ Args:
+ line_string: A shapely line_string
+ *args: Positional arguments to be passed to :class:`~CartesianRoute`
+ **kwargs: Keyword arguments to be passed to :class:`~CartesianRoute`
+
+ Returns:
+ The cartesian route created from the given geometry and other parameters
+ """
+ return cls.from_numpy(array(line_string.xy).T, *args, **kwargs)
+
+ def translate(self, direction: float, distance: float) -> Tuple["CartesianRoute", ndarray]:
+ """Translates this route.
+
+ Args:
+ direction: The direction angle in degrees (``0`` is north, clockwise)
+ distance: The distance to translate bin meters
+
+ Returns:
+ The translated route and the translation vector ``(x_offset, y_offset)`` in meters
+ that can be used to reconstruct the original route
+ """
+
+ x_offset = sin(radians(direction)) * distance
+ y_offset = cos(radians(direction)) * distance
+
+ return (
+ CartesianRoute.from_shapely(
+ translate(LineString(self.to_numpy()), xoff=x_offset, yoff=y_offset),
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ ),
+ array([-x_offset, -y_offset]),
+ )
+
+ @property
+ def __geo_interface__(self) -> Dict[str, Any]:
+ return {"type": "LineString", "coordinates": self.coords}
+
+ def __copy__(self) -> "CartesianRoute":
+ return CartesianRoute(
+ locations=self.locations,
+ location_type=self.location_type,
+ name=self.name,
+ identifier=self.identifier,
+ origin=self.origin,
+ )
+
+ def __deepcopy__(self, memodict: Dict) -> "CartesianRoute":
+ return CartesianRoute(
+ locations=deepcopy(self.locations, memodict),
+ location_type=deepcopy(self.location_type, memodict),
+ name=deepcopy(self.name, memodict),
+ identifier=deepcopy(self.identifier, memodict),
+ origin=deepcopy(self.origin, memodict),
+ )
+
+ def __eq__(self, other: Any) -> bool:
+ return self.equals_exact(other, tolerance=0.0)
+
+ # Inherits the docstring
+ def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
+ return (
+ isinstance(other, CartesianRoute)
+ and LineString.equals(self, other)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ # Inherits the docstring
+ def equals_exact( # pylint: disable=missing-function-docstring
+ self, other: Any, tolerance: float
+ ) -> bool:
+ return (
+ isinstance(other, CartesianRoute)
+ and LineString.equals_exact(self, other, tolerance)
+ and Geospatial.__eq__(self, other)
+ and self.origin == other.origin
+ )
+
+ def __repr__(self) -> str:
+ origin = f", origin={self.origin}" if self.origin is not None else ""
+ locations = ", ".join(f"({x}, {y})" for x, y in self.coords)
+
+ return f"CartesianRoute(locations=[{locations}]{origin}{self._repr_extras})"
+
+ def __str__(self) -> str:
+ # this is required to override shapely.geometry.LineString.__str__()
+ return self.__repr__()
diff --git a/pyrate/pyrate/plan/graph/__init__.py b/pyrate/pyrate/plan/graph/__init__.py
new file mode 100644
index 0000000..7685eea
--- /dev/null
+++ b/pyrate/pyrate/plan/graph/__init__.py
@@ -0,0 +1,26 @@
+"""
+The ``graph`` module provides navigation tools where the world is modeled as a graph.
+This includes generating a graph, assigning properties to nodes of the graph and finding good paths on it.
+
+Two graph models are provided:
+:class:`~pyrate.plan.graph.graph.NavigationGraph` is a generic implementation and in
+:class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`, nodes are referenced to geographical locations.
+"""
+
+from .graph import NavigationGraph
+
+from .geo_graph import GeoNavigationGraph
+
+from .generate import angular_distance_for
+from .generate import create_earth_graph
+from .generate import great_circle_distance_distance_for
+from .generate import min_required_frequency
+
+__all__ = [
+ "GeoNavigationGraph",
+ "NavigationGraph",
+ "angular_distance_for",
+ "create_earth_graph",
+ "great_circle_distance_distance_for",
+ "min_required_frequency",
+]
diff --git a/pyrate/pyrate/plan/graph/generate.py b/pyrate/pyrate/plan/graph/generate.py
new file mode 100644
index 0000000..ae26362
--- /dev/null
+++ b/pyrate/pyrate/plan/graph/generate.py
@@ -0,0 +1,350 @@
+"""
+Creates a grid on a globe with vertices and edges. Assumes that the earth is a sphere.
+
+
+Examples:
+ The usual approach it to first determine the maximum allowed distance between two nodes on the graph.
+ Note, that small distances (e.g. of less than 100km) might take a while to compute and can create very
+ large graphs. See the script
+ :ref:`earth_graph_frequency_statistics.py `
+ for details on the performance and size of the output.
+
+ >>> maximum_node_distance = 100_000 # in meters
+
+ Then, the minimum required ``frequency`` can be computed from that, which is an integer value determining
+ the granularity of the graph.
+ Higher frequencies result in finer graphs.
+
+ >>> frequency = min_required_frequency(maximum_node_distance, in_meters=True)
+
+ We could have also passed the angular distance in radians (by setting ``in_meters=False``).
+ Alternatively, we can now compute the actual angular distance and great-circle distance in meters from the
+ frequency that we know have.
+ It is in general less (or equal) than the ``maximum_node_distance``, as the ``frequency`` only allows for
+ integer steps in the geranularity.
+
+ >>> angular_distance_for(frequency) # doctest: +ELLIPSIS
+ 0.01559...
+ >>> actual_node_distance = great_circle_distance_distance_for(frequency) # again, in meters
+ >>> actual_node_distance # doctest: +ELLIPSIS
+ 99347.242...
+ >>> actual_node_distance <= maximum_node_distance
+ True
+
+ Now, we can finally generate the :class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`.
+ If we wanted to have some progress messsages printed, we would pass ``print_status=True``.
+
+ >>> graph = create_earth_graph(frequency)
+ >>> len(graph) # the number of nodes
+ 50412
+ >>> graph.num_edges
+ 151230
+
+ Furthermore, the ``graph`` has some specific attributes set, which result from the icoshedron subdivision
+ approach of the algorithm.
+ These allow for certain optimizations and more convenience when applying algorithms as they do not have
+ to be passed explicictly to other functions.
+
+ >>> graph.node_radius * 2 == actual_node_distance
+ True
+ >>> graph.max_neighbors == 6
+ True
+
+Visualization
+-------------
+
+The following visualization shows how the vertices of an earth graph are spread when plotted using the
+`mercator projection `_.
+The vertices and their area near the equator are very evenly spaced.
+However, their positions and shapes get very distorted at high latitudes (i.e. near the north and south
+poles) `due to the projection `_.
+
+.. image:: vertices_distribution_mercator.png
+ :alt: visualization of the vertices of an earth graph, distorted by the mercator projection
+
+The following plot illustrates the area of influence/responsibility around each vertex.
+Again, notice how the quite evenly spaced vertices are very distorted in this projection at high latitudes.
+The visualization was obtained by computing the fraction of navigable area within the nodes' vicinity.
+Navigability was simply determined by the land being below sea level as a rough approximation.
+
+.. image:: vertices_area_of_influence.png
+ :alt: visualization of the area of influence/responsibility of all vertices of an earth graph,
+ distorted by the mercator projection;
+ obtained by computing the fraction of navigable area within the nodes vicinity
+
+Note:
+ Some methods require the `Antiprism `_ software package (version 0.26+).
+ There is a PPA for it available `here `_.
+ Use ``add-apt-repository ppa:antiprism/ppa && apt install antiprism`` on Ubuntu.
+"""
+
+# Standard library
+from math import ceil
+from math import degrees
+import subprocess
+from warnings import warn
+
+# Typing
+from typing import Tuple
+
+# Scientific
+from numpy import compress
+from numpy import empty
+from numpy import empty_like
+from numpy import float64
+from numpy import genfromtxt
+from numpy import maximum
+from numpy import minimum
+from numpy import ndarray
+from numpy import uint32
+from numpy import unique
+
+# Geometry helpers
+from pyrate.plan.geometry.helpers import cartesian_to_spherical
+from pyrate.plan.geometry.helpers import meters2rad
+from pyrate.plan.geometry.helpers import rad2meters
+
+# Graph implementation
+from pyrate.plan.graph import GeoNavigationGraph
+
+
+def create_earth_graph(frequency: int, print_status: bool = False) -> GeoNavigationGraph:
+ """Returns roughly equally spaced points on the earth, creating an *icoshpere* 🌐.
+
+ This basically works by constructing a geodesic polyhedron based on an icosahedron as a starting point,
+ dividing it (with *Class I*) as much as required by the desired distance and then projecting it onto a
+ sphere. The following image visualizes the process for the case ``frequency = 6``:
+
+ .. image:: https://upload.wikimedia.org/wikipedia/commons/f/ff/Geodesic_icosahedral_polyhedron_example.png
+ :alt:
+ Illustration of icosahedron subdivision: (1) create a regular icosahedron, (2) perform 6-frequency
+ subdivision of all faces and (3) project all vertices on a sphere;
+ Licensed under *CC BY-SA 4.0*: Created by *Tomruen* and provided on
+ `Wikipedia `_.
+
+ The implementation is mostly single-threaded. See the script
+ :ref:`earth_graph_frequency_statistics.py `
+ for details on the performance and size of the output.
+
+ *Class I* determines the way that the icosahedron is sliced (see
+ `Wikipedia: Geodesic notation `_).
+ It was chosen since it is apparently used quite often to create regular grids
+ (according to `Wikipedia `_)
+ and since if appears to be very regular (see
+ `this `_ page in the
+ Antiprism documentation for a visualization).
+
+ References:
+ - https://en.wikipedia.org/wiki/Geodesic_grid#Construction
+ - https://en.wikipedia.org/wiki/Geodesic_polyhedron
+ - https://people.sc.fsu.edu/~jburkardt/presentations/sphere_grid_2014_fsu.pdf
+
+ Further ideas:
+ - One could also use `Goldberg polyhedra `_,
+ as those are the `duals `_ to the geodesic spheres used
+ in this implementation and should also work.
+ - Alternatively, one could also use the already implemented
+ `Quaternary Triangular Meshes `_
+
+ Args:
+ frequency: The number of subdivisions per icosahedron edge.
+ Keep in mind that high frequencies could be computationally costly.
+ print_status: If set to ``True``, print human-readable status messages about the progress.
+
+ Returns:
+ A graph covering the entire earth
+ """
+ angular_distance = angular_distance_for(frequency)
+ distance_meters = great_circle_distance_distance_for(frequency)
+ if print_status:
+ print(f"creating an earth grid with a point distance of at most {distance_meters / 1000:.3f} km")
+ print(f"dividing each edge of the base icosahedron into {frequency} parts")
+ print(f"the angular distance of vertices will be ~{degrees(angular_distance):.6f}°")
+
+ if 10_000 < distance_meters < 25_000: # pragma: no cover
+ warn("this might take a while", ResourceWarning)
+ elif distance_meters <= 10_000: # pragma: no cover
+ warn("this might take *very* long", ResourceWarning)
+
+ # this defines how to slice the edges/triangles
+ polyhedron_class = "1"
+
+ if print_status:
+ print('calling Antiprisim\'s "geodesic"')
+
+ # check the geodesic/Antiprism version
+ _assert_antiprism_is_installed()
+
+ command = f"geodesic -M s -c {polyhedron_class} -f {frequency} ico"
+ # check_output raises an Error on a non-zero exit code
+ # use ASCII encoding since the resulting file will not contain any Unicode text
+ output = subprocess.check_output(command.split(), encoding="ascii")
+
+ if print_status:
+ print("parsing the resulting OFF file")
+ latitudes, longitudes, edges = _parse_off_file(output)
+
+ if print_status:
+ print("finished earth grid generation")
+
+ return GeoNavigationGraph.from_coordinates_radians(
+ latitudes=latitudes,
+ longitudes=longitudes,
+ edges=edges,
+ max_neighbors=6,
+ node_radius=distance_meters / 2,
+ )
+
+
+#: The approximate angle between two edges on an icosahedron, in radians, about 63.4°
+_ALPHA = 1.1071487177940905030170654601785
+# calculation:
+# (note: we use latitude is in [-pi/2, +pi/2], longitude is in [-pi, +pi])
+# take two edges in spherical coordinates,
+# see https://en.wikipedia.org/wiki/Regular_icosahedron#Spherical_coordinates
+# (in the link, other coordinates are used!)
+# we choose A=(lat_a, lon_a)=(pi/2, 0) and
+# B=(lat_b, lon_b)=(arctan(1/2), 0) for simplicity
+# then the angle between them is given by
+# alpha = lat_a - lat_b = pi/2 - arctan(1/2)
+# result: https://www.wolframalpha.com/input/?i=pi%2F2+-+arctan%281%2F2%29
+
+
+def min_required_frequency(desired_distance: float, in_meters: bool) -> int:
+ """Compute the minimum frequency to reach the ``desired_distance`` by icosahedron subdivision.
+
+ Here, the frequency is the number of cuts to make on an edge of a polyhedron.
+ Higher frequencies result in finer graphs.
+
+ Args:
+ desired_distance: The maximum distance that two neighboring nodes may be apart.
+ Must be a strictly positive number.
+ If ``in_meters`` is ``True`` in meters of the great-circle distance, else the
+ angular distance in radians.
+ in_meters: Interpret ``desired_distance`` as meters instead of as radians
+
+ Returns:
+ The minimum frequency to reach the ``desired_distance``, at least ``1``
+ """
+ assert desired_distance > 0, "the desired_angular_distance must be positive"
+
+ if in_meters:
+ desired_angular_distance = meters2rad(desired_distance)
+ else:
+ desired_angular_distance = desired_distance
+
+ # calculate the number of slices per edge (=the frequency) by simple division:
+ frequency = _ALPHA / desired_angular_distance
+
+ # if the distance is too big, we simply do not divide the edges at all
+ frequency = max(frequency, 1.0)
+
+ # then we need to round: we round up since we would rather have
+ # more edges than too few of them
+ return int(ceil(frequency))
+
+
+def great_circle_distance_distance_for(frequency: int) -> float:
+ """The great-circle distance that subdivision with the frequency will result in.
+
+ Args:
+ frequency: The frequency of the subdivision, at least ``1``
+
+ Returns:
+ The great-circle distance that the frequency will result in, in meters
+ """
+ return rad2meters(angular_distance_for(frequency))
+
+
+def angular_distance_for(frequency: int) -> float:
+ """The angular distance that subdivision with the frequency will result in.
+
+ Args:
+ frequency: The frequency of the subdivision, at least ``1``
+
+ Returns:
+ The angular distance that the frequency will result in, in radians
+ """
+ assert frequency >= 1, "the frequency must be at least one"
+ return _ALPHA / frequency
+
+
+_ANTIPRISM_REQUIRED_VERSION = (0, 26)
+#: The minimum required Antiprism version
+
+
+def _assert_antiprism_is_installed() -> None:
+ """Raises an exception if *Antiprism* (with the geodesic tool) in not installed in the required version.
+
+ Raises:
+ :class:`AssertionError`: If the *Antiprism* version is insufficient
+ """
+ try:
+ version = subprocess.check_output(["geodesic", "--version"], encoding="utf8").split(" ", 3)[2]
+ except FileNotFoundError as error: # pragma: no cover
+ raise AssertionError(
+ 'Could not call tool "geodesic" from Antiprism, is it installed? (See installation instructions.)'
+ ) from error
+
+ assert tuple(int(v) for v in version.split(".")) >= _ANTIPRISM_REQUIRED_VERSION, (
+ f'tool "geodesic" from Antiprism version >= {_ANTIPRISM_REQUIRED_VERSION} is required, '
+ f"but you have version {version}!"
+ )
+
+
+def _parse_off_file(source_text: str) -> Tuple[ndarray, ndarray, ndarray]:
+ """Parses an Antiprism OFF file and return the result in spherical coordinates.
+
+ Warnings:
+ Assumes that the point :math:`(0, 0, 0)` is not present and that all faces
+ are triangles or "polygons" with fewer vertices.
+
+ Warnings:
+ This is only meant to parse OFF files produced by *Antiprism*.
+ These are not standard OFF files as described
+ `here `_!
+
+ Args:
+ source_text: The raw file content to be parsed
+
+ Returns:
+ all vertices' points (like returned by :meth:`~off_handler.cartesian_to_spherical`)
+ as well as a list of all edges, each consisting of zero-based indices
+ of the endpoints from the first argument.
+ """
+ # split
+ source = source_text.splitlines()
+ assert len(source) >= 2, "OFF file must have at least two lines"
+
+ # check header
+ assert source[0] == "OFF", 'file does not start with "OFF"'
+
+ # get size of file
+ # note: num_edges is usually not set to a correct value, so we ignore the last value
+ num_vertices, num_faces, _ = map(int, source[1].split())
+
+ # get the vertices
+ points = genfromtxt(source[2:], max_rows=num_vertices, dtype=float64)
+ latitudes, longitudes = cartesian_to_spherical(points)
+
+ # get faces
+ faces = genfromtxt(source[2 + num_vertices :], max_rows=num_faces, usecols=(0, 1, 2, 3), dtype=uint32)
+ triangles = compress(faces[:, 0] == 3, faces[:, 1:4], axis=0)
+ del faces # free this memory
+ count = len(triangles)
+
+ # now we want to transform each triangle into three edges
+ edges = empty([count * 3, 2], dtype=uint32)
+ edges[0:count, :] = triangles[:, (0, 1)]
+ edges[count : 2 * count, :] = triangles[:, (1, 2)]
+ edges[2 * count : 3 * count, :] = triangles[:, (0, 2)]
+
+ # then we filter out duplicates or wrong values
+ # sort the IDs in each row in ascending order, to find duplicates since the graph is directed
+ # one could also use `np.sort`
+ sorted_edges = empty_like(edges)
+ sorted_edges[:, 0] = minimum(edges[:, 0], edges[:, 1])
+ sorted_edges[:, 1] = maximum(edges[:, 0], edges[:, 1])
+ edges = unique(sorted_edges, axis=0)
+
+ return latitudes, longitudes, edges
diff --git a/pyrate/pyrate/plan/graph/geo_graph.py b/pyrate/pyrate/plan/graph/geo_graph.py
new file mode 100644
index 0000000..c36d9e4
--- /dev/null
+++ b/pyrate/pyrate/plan/graph/geo_graph.py
@@ -0,0 +1,248 @@
+"""This module provides geo-referenced navigation graphs."""
+
+# Typing
+from typing import Any
+from typing import List
+from typing import Optional
+from typing import Sequence
+from typing import Union
+
+# Scientific
+import numpy
+from numpy import degrees
+from numpy import ndarray
+from numpy import radians
+
+# Scientific
+from pandas import DataFrame
+from pandas import Series
+
+# Progress bars
+from tqdm import tqdm
+
+# Own typing
+from pyrate.common.raster_datasets import BaseTransformer
+from pyrate.plan.graph.graph import NavigationGraph
+
+
+class GeoNavigationGraph(NavigationGraph):
+ """An undirected navigation graph specifically for geo-referenced graphs.
+
+ It is similar to the more generic :class:`~pyrate.plan.graph.graph.NavigationGraph` but ensures that the
+ property dataframe always contains columns `Latitude (radians)` and `Longitude (radians)`.
+ Not providing these when creating the graph will result in an :class:`AssertionError`.
+
+ This class also useful methods for adding new properties and plotting the graph.
+
+ Examples:
+ This creates a very simple graph with two connected nodes at *Darmstadt* and *Griesheim*.
+
+ >>> import numpy
+ >>> nodes = DataFrame(data={'Latitude (radians)': numpy.radians([49.872222, 49.863889]), \
+ 'Longitude (radians)': numpy.radians([ 8.652778, 8.563889])})
+ >>> edges = numpy.array([[0, 1], ])
+ >>> graph = GeoNavigationGraph(nodes, edges)
+ >>> graph.neighbors
+ array([[1],
+ [0]], dtype=int32)
+ >>> graph.latitudes_degrees
+ 0 49.872222
+ 1 49.863889
+ Name: Latitude (radians), dtype: float64
+
+ Alternatively, such a graph can be created using `GeoNavigationGraph.from_coordinates_*`
+
+ >>> same_graph = GeoNavigationGraph.from_coordinates_degrees( \
+ latitudes=[49.872222, 49.863889], longitudes=[ 8.652778, 8.563889], edges=edges)
+ >>> graph == same_graph
+ True
+
+ Args:
+ nodes: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
+ This must contain columns ``"Latitude (radians)"`` (with values in :math:`[-π/2, +π/2]`) and
+ ``"Longitude (radians)"`` (with values in :math:`[-π, +π)`).
+ edges: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
+ neighbours: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
+ max_neighbors: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
+ node_radius: The radius around each node of the area on the globe that it should represent, in meters,
+ non-negative. It can be interpreded as the radius of influence or of responsibility.
+ It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is uniform
+ across all nodes.
+ Setting this allows it to be omitted in some methods of this class, like in
+ :meth:`~append_property`.
+ """
+
+ def __init__(self, *args, node_radius: Optional[Union[float, ndarray]] = None, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ assert node_radius is None or numpy.all(node_radius >= 0)
+ self.node_radius = node_radius
+
+ assert "Latitude (radians)" in self.nodes, 'column "Latitude (radians)" missing'
+ assert "Longitude (radians)" in self.nodes, 'column "Longitude (radians)" missing'
+
+ @classmethod
+ def from_coordinates_radians(
+ cls,
+ latitudes: ndarray,
+ longitudes: ndarray,
+ node_properties: Optional[DataFrame] = None,
+ **kwargs: Any,
+ ) -> "GeoNavigationGraph":
+ """Creates a new geo-referenced navigation graph from the given coordinates and node properties.
+
+ The same as the constructor of :class:`GeoNavigationGraph`, except that that latitude, longitude and
+ properties of the nodes can be given separately. For clarity, everything should be passes as
+ keyword arguments.
+
+ Args:
+ latitudes: The latitudes of all nodes in radians in :math:`[-π/2, +π/2]`
+ longitudes: The longitudes of all nodes in radians in :math:`[-π, +π)`
+ node_properties: The properties of all nodes (will be modified if not set to ``None``)
+ kwargs**: passed to the constructor of :class:`GeoNavigationGraph`
+
+ Returns:
+ A newly created graph
+ """
+ if node_properties is None:
+ node_properties = DataFrame() # create an empty one
+
+ node_properties["Latitude (radians)"] = latitudes
+ node_properties["Longitude (radians)"] = longitudes
+
+ assert "nodes" not in kwargs, (
+ "do not pass nodes, instead explicitly set them via latitudes, "
+ "longitudes and node_properties or directly use the constructor instead"
+ )
+
+ return cls(node_properties, **kwargs)
+
+ @classmethod
+ def from_coordinates_degrees(
+ cls, latitudes: ndarray, longitudes: ndarray, **kwargs: Any
+ ) -> "GeoNavigationGraph":
+ """The same as :func:`~from_coordinates_radians` except that the coordinates are in degrees.
+
+ For clarity, everything should be passes as keyword arguments.
+
+ Args:
+ latitudes: The latitudes of all nodes in degrees in :math:`[-90, +90]`
+ longitudes: The latitudes of all nodes in degrees in :math:`[-180, +180)`
+ kwargs**: passed to :func:`~from_coordinates_radians`
+ """
+ return GeoNavigationGraph.from_coordinates_radians(radians(latitudes), radians(longitudes), **kwargs)
+
+ @staticmethod
+ def _serialized_attributes() -> List[str]:
+ """The list of attributes that shall be (de)serialized (on top of the nodes and edges)."""
+ return NavigationGraph._serialized_attributes() + ["node_radius"]
+
+ @property
+ def latitudes_radians(self) -> Series:
+ """The latitudes of all nodes in radians in :math:`[-π/2, +π/2]`."""
+ return self.nodes["Latitude (radians)"]
+
+ @property
+ def longitudes_radians(self) -> Series:
+ """The longitudes of all nodes in radians in :math:`[-π, +π)`."""
+ return self.nodes["Longitude (radians)"]
+
+ @property
+ def latitudes_degrees(self) -> Series:
+ """The latitudes of all nodes in degrees in :math:`[-90, +90]`."""
+ return degrees(self.latitudes_radians)
+
+ @property
+ def longitudes_degrees(self) -> Series:
+ """The longitudes of all nodes in degrees in :math:`[-180, +180)`."""
+ return degrees(self.longitudes_radians)
+
+ @property
+ def node_properties(self) -> DataFrame:
+ """The properties of all nodes as a view (as opposed to a copy).
+
+ This is the same as :attr:`~nodes`, but without the latitude and longitude values.
+ """
+ return self.nodes.drop(columns=["Latitude (radians)", "Longitude (radians)"])
+
+ def clear_node_properties(self) -> None:
+ """Deletes all properties but retains the coordinate values."""
+ self.nodes = self.nodes[["Latitude (radians)", "Longitude (radians)"]]
+
+ def append_property(
+ self,
+ transformer: BaseTransformer,
+ node_radius: Optional[Union[float, ndarray]] = None,
+ show_progress: bool = False,
+ ) -> None:
+ """Append the properties given by the transformer.
+
+ The name and data type are taken from the given ``transformer``.
+
+ Args:
+ transformer: The dimension/property that shall be queried for each node
+ node_radius: The radius around each node of the area on the globe that it should represent,
+ in meters, non-negative.
+ It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is
+ uniform across all nodes.
+ This value is uniform across all ``transformers``.
+ It may be omitted if :attr:`~node_radius` is set.
+ show_progress: Whether to print a simple progress bar
+
+ See Also:
+ :meth:`~append_properties`
+
+ Raises:
+ :class:`ValueError`: if a property with that name is already present
+ """
+ node_radius = node_radius if node_radius is not None else self.node_radius
+ assert node_radius is not None, (
+ "parameter node_radius must be set either with the method or the object attribute but is "
+ "missing on both"
+ )
+
+ with transformer:
+ new = transformer.get_transformed_at_nodes(
+ self.latitudes_radians, self.longitudes_radians, node_radius, show_progress=show_progress
+ )
+ self.nodes = self.nodes.join(new)
+
+ def append_properties(
+ self,
+ transformers: Sequence[BaseTransformer],
+ node_radius: Optional[Union[float, ndarray]] = None,
+ show_progress: bool = False,
+ ) -> None:
+ """Append multiple properties at once. This has the benefit of printing a combined progress bar.
+
+ Args:
+ transformers: The dimensions/properties that shall be queried for each node
+ node_radius: The radius around each node of the area on the globe that it should represent,
+ in meters, non-negative.
+ It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is
+ uniform across all nodes.
+ This value is uniform across all ``transformers``.
+ It may be omitted if :attr:`~node_radius` is set.
+ show_progress: Whether to print a simple progress bar
+
+ See Also:
+ :meth:`~append_property`
+
+ Raises:
+ ValueError: if a property with any given name is already present
+ """
+ node_radius = node_radius if node_radius is not None else self.node_radius
+ assert node_radius is not None, (
+ "parameter node_radius must be set either with the method or the object attribute but is "
+ "missing on both"
+ )
+
+ for transformer in tqdm(transformers, unit=" transformers", colour="blue", disable=not show_progress):
+ self.append_property(transformer, node_radius, show_progress)
+
+ def __eq__(self, other: Any) -> bool:
+ return (
+ isinstance(other, GeoNavigationGraph)
+ and NavigationGraph.__eq__(self, other)
+ and self.node_radius == other.node_radius
+ )
diff --git a/pyrate/pyrate/plan/graph/graph.py b/pyrate/pyrate/plan/graph/graph.py
new file mode 100644
index 0000000..5bcb3e9
--- /dev/null
+++ b/pyrate/pyrate/plan/graph/graph.py
@@ -0,0 +1,245 @@
+"""This module provides generic navigation graphs."""
+
+# Typing
+from typing import Any
+from typing import List
+from typing import Optional
+from typing import Sized
+from typing import Type
+from typing import TypeVar
+
+# Mathematics
+from numpy import array_equal
+from numpy import asarray
+from numpy import compress
+from numpy import cumsum
+from numpy import empty
+from numpy import int32
+from numpy import logical_and
+from numpy import logical_not
+from numpy import ndarray
+
+# Scientific
+import h5py
+from pandas import DataFrame
+from pandas import read_hdf
+
+
+NavigationGraphSubclass = TypeVar("NavigationGraphSubclass", bound="NavigationGraph")
+
+
+class NavigationGraph(Sized):
+ """A generic undirected graph that can be used for navigation.
+
+ It is represented by nodes and their properties as rows in a pandas dataframe, and edges as an array of
+ indices of nodes for connections. Additionally, :attr:`~neighbors` array is provided which provides all
+ neighbors for all nodes for faster access in graph search.
+
+ Args:
+ nodes: the nodes as a dataframe where each row is a node
+ edges: the edges of shape ``(number_of_edges, 2)``, where each row contains the indices of two
+ neighboring nodes
+ neighbours: the neighbors of all nodes with shape ``(number_of_nodes, max_neighbors_per_node)``,
+ where each row contains the indices of all neighbors of the node, filled with ``-1`` at
+ the end
+ max_neighbors: the maximum number of neighbors of any node (optional); this can be set to allow for
+ some optimizations (e.g. in the neighbor search)
+
+ Examples:
+ This creates a very simple node where ``0`` and ``1`` as well as ``1`` and ``2`` are connected to from
+ a small chain.
+
+ >>> nodes = DataFrame(data={'property_1': [1, 2, 3], 'property_2': [10, 20, 30]})
+ >>> edges = asarray([[0, 1], [1, 2]])
+ >>> graph = NavigationGraph(nodes, edges)
+ >>> graph.neighbors
+ array([[ 1, -1],
+ [ 0, 2],
+ [ 1, -1]], dtype=int32)
+ >>> len(graph)
+ 3
+ >>> graph.num_edges
+ 2
+
+ See Also:
+ :class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`:
+ A more specific implementation that references nodes to geographic locations and contains
+ useful methods for adding properties from datasets and plotting the graph
+ """
+
+ def __init__(
+ self,
+ nodes: DataFrame,
+ edges: ndarray,
+ neighbours: Optional[ndarray] = None,
+ max_neighbors: Optional[int] = None,
+ ) -> None:
+ super().__init__()
+
+ self.nodes = nodes
+
+ assert (
+ len(edges.shape) == 2 and edges.shape[1] == 2
+ ), "the edges must be a 2D-array of shape (number_of_edges, 2)"
+ self.edges = edges
+
+ assert neighbours is None or neighbours.shape[0] == len(nodes)
+ self._neighbors = neighbours
+
+ assert max_neighbors is None or max_neighbors >= 0, "max_neighbors must be non-negative"
+ self.max_neighbors = max_neighbors
+
+ @property
+ def neighbors(self) -> ndarray:
+ """The list of neighbors of each node, identified by their node index.
+
+ An array of dimension ``(number_of_nodes, max_neighbors_per_node)``, with each row containing the
+ indices of the neighbors of the node at that position and the rest of the row filled with ``-1``.
+
+ This might take a short while to be computed for the first time but the result is cached and
+ also serialized if present at the point of saving it to disk.
+ See :ref:`script-benchmark_graph_neighbor_search` for performance measurements and a link to an issue
+ about speeding up this search for neighbors.
+ """
+ if self._neighbors is not None:
+ return self._neighbors
+
+ if self.nodes.empty: # this requires special case
+ self._neighbors = empty((0, 0), dtype=int32)
+
+ else:
+ # each row/inner list contains the neighbors of the node at the index
+ # and the rest of the row is filled with -1s
+ neighbors: List[List[int]] = [[] for _ in range(len(self))]
+
+ for from_node, to_node in self.edges:
+ neighbors[from_node].append(to_node)
+ neighbors[to_node].append(from_node)
+
+ # calculate length of maximal list
+ longest = len(max(neighbors, key=len))
+ # make the lists equal in length by filling with -1
+ neighbors = [x + [-1] * (longest - len(x)) for x in neighbors]
+
+ self._neighbors = asarray(neighbors, dtype=int32)
+
+ return self._neighbors
+
+ def prune_nodes(self, keep_condition: ndarray) -> None:
+ """Only retain the given nodes with their properties and appropriately update all edges and neighbors.
+
+ For example, this should decrease the number of nodes and edges by about 30% when filtering with the
+ ``keep_condition`` set to ``my_graph.nodes["elevation_to_sea_level"] < 0.0`` on a graph representing
+ earth.
+
+ Args:
+ keep_condition: the nodes which to keep as a numpy array with boolean values
+ """
+ assert keep_condition.shape == (len(self),), "keep condition shape does not match nodes"
+
+ # filter points
+ self.nodes = self.nodes[keep_condition]
+
+ # filter edges
+ keep_condition_edges = logical_and(keep_condition[self.edges[:, 0]], keep_condition[self.edges[:, 1]])
+ self.edges = compress(keep_condition_edges, self.edges, axis=0)
+ # then correct the indices that the entries in filtered_edges refer to by subtracting the number of
+ # removed entries before each one
+ index_shift = cumsum(logical_not(keep_condition)).astype(self.edges.dtype)
+ self.edges -= index_shift[self.edges]
+
+ # reset neighbors
+ self._neighbors = None
+
+ @staticmethod
+ def _serialized_attributes() -> List[str]:
+ """The list of attributes that shall be (de)serialized (on top of the nodes and edges)."""
+ return ["max_neighbors"]
+
+ def to_disk(self, file_path: str, overwrite_existing: bool = False) -> None:
+ """Save the graph to disk. Possibly missing parent directories are automatically created.
+
+ The data is stored in an interoperable `HDF5 `_ file with the
+ keys ``nodes``, ``edges`` and optionally ``neighbors``.
+
+ The ``nodes`` are compressed using the default settings of :meth:`pandas.DataFrame.to_hdf`.
+ The ``edges`` (and ``neighbors`` if present) are slightly compressed using the library
+ `h5py `_ (using GZIP level 4).
+ See also `the available options in h5py
+ `_.
+
+ Args:
+ file_path: the path to the file where to store the graph; usually ends with ``.hdf5``
+ overwrite_existing: whether to overwrite the file if it already exists; else, this causes an error
+ to be risen
+
+ Raises:
+ IOError: when the file cannot be accessed or written to, or it already exists and
+ ``overwrite_existing`` is not set
+
+ See Also:
+ :meth:`~from_disk`
+ """
+ compression_options = {"compression": "gzip", "compression_opts": 4}
+
+ with h5py.File(file_path, "w" if overwrite_existing else "w-") as graph_file:
+ graph_file.create_dataset("edges", data=self.edges, **compression_options)
+ if self._neighbors is not None:
+ graph_file.create_dataset("neighbors", data=self.neighbors, **compression_options)
+
+ # Serialize attributes
+ for attribute in self._serialized_attributes():
+ graph_file.attrs[attribute] = getattr(self, attribute)
+
+ # pandas automatically chooses an appropriate compression
+ self.nodes.to_hdf(file_path, key="nodes", mode="r+", append=True)
+
+ @classmethod
+ def from_disk(cls: Type[NavigationGraphSubclass], file_path: str) -> NavigationGraphSubclass:
+ """Reads a file from disk.
+
+ Assumes the HDF5-based format compatible to the one created by :meth:`~NavigationGraph`.
+
+ Args:
+ file_path: the path to the file where to read the graph from; usually ends with ``.hdf5``
+
+ Raises:
+ IOError: when the file cannot be accessed or read from
+
+ Returns:
+ The newly loaded navigation graph, which will be of a subclass of :class:`NavigationGraph` if this
+ method was called on that class.
+
+ See also:
+ :meth:`~to_disk`
+ """
+ with h5py.File(file_path, "r") as graph_file:
+ edges = graph_file["edges"][:]
+ neighbors = graph_file["neighbors"][:] if "neighbors" in graph_file else None
+
+ # Deserialize attributes
+ attributes = {
+ attribute: graph_file.attrs[attribute] for attribute in cls._serialized_attributes()
+ }
+
+ nodes = read_hdf(file_path, key="nodes")
+ assert isinstance(nodes, DataFrame)
+
+ return cls(nodes, edges, neighbors, **attributes)
+
+ @property
+ def num_edges(self) -> int:
+ """Returns the number of edges. The number of nodes can be obtained via ``len(graph)``."""
+ return self.edges.shape[0]
+
+ def __len__(self) -> int:
+ return len(self.nodes)
+
+ def __eq__(self, other: Any) -> bool:
+ return (
+ isinstance(other, NavigationGraph)
+ and self.nodes.equals(other.nodes)
+ and array_equal(self.edges, other.edges)
+ # no need to check array_equal(self.neighbors, other.neighbors) as it is a derived property
+ and self.max_neighbors == other.max_neighbors
+ )
diff --git a/pyrate/pyrate/plan/nearplanner/__init__.py b/pyrate/pyrate/plan/nearplanner/__init__.py
new file mode 100644
index 0000000..ca18a44
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/__init__.py
@@ -0,0 +1,42 @@
+"""Provides the means to describe possible routes (via TimingFrames) in a given environment (CostModel)
+and find a possible route (via Optimizer) in a given context.
+
+:class:`~near_planner.planning_backend.timing_frame.TimingFrame` encapsulates a
+:class:`~pyrate.plan.geometry.route.CartesianRoute` instance. The class further provides an option to
+simulate the route with the help of a :class:`~near_planner.planning_backend.polar_model.PolarModel`
+instance, simulating timing and speed information. This is done via the method
+:meth:`~near_planner.planning_backend.timing_frame.TimingFrame.update_times`. Using a
+:class:`~near_planner.planning_backend.cost_model.CostModel` a
+:class:`~near_planner.planning_backend.timing_frame.TimingFrame` can be evaluated to an
+:class:`~near_planner.planning_backend.timing_frame.EvaluatedTimingFrame`
+
+The subclass :class:`~near_planner.planning_backend.timing_frame.EvaluatedTimingFrame` allows for further
+saving of information on possible collisions and distances to
+:class:`~near_planner.planning_backend.obstacle.Obstacle`\\ s . These are conveniently provided via
+properties. If the information is deemed incorrect, the frame can be revoked via
+:meth:`~near_planner.planning_backend.timing_frame.EvaluatedTimingFrame.revoke` and turned back into a
+:class:`~near_planner.planning_backend.timing_frame.TimingFrame`.
+
+The :class:`~near_planner.planning_backend.cost_model.CostModel` class allows to conveniently store
+obstacles and environment information. A :class:`~near_planner.planning_backend.timing_frame.TimingFrame`
+can be turned into an :class:`~near_planner.planning_backend.timing_frame.EvaluatedTimingFrame` via the
+:meth:`~near_planner.planning_backend.cost_model.CostModel.evaluate` method. This method also calculates
+the cost of taking the TimingFrame by various metrics, stores it in
+:attr:`~near_planner.planning_backend.timing_frame.EvaluatedTimingFrame.actual_cost`. The calculated cost is
+further returned.
+
+The :class:`~near_planner.planning_backend.optimizer.Optimizer` class allows to determine, hopefully
+optimal, routes towards a goal, given by a :class:`pyrate.plan.geometry.location.CartesianLocation`. This
+is done by initializing the :class:`~near_planner.planning_backend.optimizer.Optimizer` with information
+about the current environment. After that a goal and
+:class:`~near_planner.planning_backend.holders.OptimizationParameters` can be provided to the method
+:meth:`~near_planner.planning_backend.optimizer.Optimizer.optimizer` which generates a hopefully optimal
+route candidate. The :class:`~near_planner.planning_backend.optimizer.Optimizer` is furthermore able to
+react to a changing :class:`~near_planner.planning_backend.obstacle.Obstacle` population by changing its
+state via several methods.
+
+The package further provides various (data)classes and methods to help in the interaction with the core
+classes. This includes several cost functions to provide
+:class:`~near_planner.planning_backend.obstacle.Obstacle` with, functions to merge numpy arrays and
+dataclasses to wrap various parameters in.
+"""
diff --git a/pyrate/pyrate/plan/nearplanner/cost_functions.py b/pyrate/pyrate/plan/nearplanner/cost_functions.py
new file mode 100644
index 0000000..4f74323
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/cost_functions.py
@@ -0,0 +1,180 @@
+"""This module contains a collection of cost functions."""
+
+# Support for abstract classes
+from abc import ABC
+from abc import abstractmethod
+
+# Static Typing
+from typing import Optional
+from typing import Tuple
+
+# Scientific Computing
+import numpy as np
+from scipy import linalg
+
+# Backend
+from .evaluated_timing_frame import EvaluatedTimingFrame
+
+
+class CostFunction(ABC):
+
+ """Class to encapsulate different types of cost functions for different types of obstacles.
+
+ A cost function describes the cost of passing by an obstacle ad a given distance in a unit-less
+ measurement. Cost functions must be differentiable with respect to its argument dist.
+ """
+
+ #: A human-readable name of the cost function
+ name: str
+
+ @abstractmethod
+ def cost(self, dist: float) -> np.floating:
+ """Calculates the cost of an obstacle based on a distance.
+
+ Args:
+ dist: distance to evaluate the cost from
+
+ Returns:
+ The evaluated cost
+ """
+
+ @abstractmethod
+ def cost_grad(self, dist: float) -> Tuple[np.floating, np.floating]:
+ """Calculates the cost AND the derivative of it w.r.t. to the distance.
+
+ Args:
+ dist: distance to base calculation upon
+
+ Returns:
+ A tuple of ``(cost, gradient)``
+ """
+
+
+class CostFunctionLinear(CostFunction):
+
+ """Class that represents a linear Cost Function.
+
+ This function is bounded by the parameter attr:`maximum_cost`.
+ Reproduces the values of the function :math:`f(x) = maximum_cost - fact*x`.
+
+ Args:
+ fact: factor for cost decay
+ maximum_cost: maximal cost
+ """
+
+ def __init__(self, fact: float = 1.0, maximum_cost: float = 100) -> None:
+ self.fact: np.floating = np.float64(fact)
+ self.maximum_cost = maximum_cost
+ self.name = "linear"
+
+ def cost(self, dist: float) -> np.floating:
+ return np.float64(-self.fact * np.float64(dist) + self.maximum_cost)
+
+ def cost_grad(self, dist: float) -> Tuple[np.floating, np.floating]:
+ return np.float64(-self.fact * dist + self.maximum_cost), np.float64(-self.fact)
+
+
+class CostFunctionInverse(CostFunction):
+
+ """Class that represents a inversely proportional cost function.
+
+ This functions produces values in the interval :math:`[0, ∞)`.
+ Reproduces the values of the function :math:`f(x) = 1/x`.
+
+ Args:
+ fact: undecayed cost for a unit of distance
+ """
+
+ def __init__(self, fact: float = 1.0) -> None:
+ self.fact: np.floating = np.float64(fact)
+ self.name = "linear"
+
+ def cost(self, dist: float) -> np.floating:
+ return np.float64(self.fact / np.float64(dist))
+
+ def cost_grad(self, dist: float) -> Tuple[np.floating, np.floating]:
+ return np.float64(self.fact / np.float64(dist)), np.float64(self.fact / np.float64(dist**2))
+
+
+class CostFunctionExp(CostFunction):
+
+ """Simple Exponential cost function for use with gradient and cost calculations in TimingFrame
+
+ Args:
+ safety_dist: safety distance the ship should hold
+ clip: clip to determine accuracy
+ """
+
+ def __init__(
+ self, safety_dist: float = 3.0, clip: float = 100000, linear_scale: float = 100, scale: float = 1
+ ) -> None:
+ self.name = "Exponential cost"
+
+ self.clip: np.floating = np.float64(clip)
+ self.scale: np.floating = np.float64(scale)
+ self.linear_scale: np.floating = np.float64(linear_scale)
+ self.safety_dist: np.floating = np.float64(safety_dist)
+
+ def cost(self, dist: float) -> np.floating:
+ if dist == 0:
+ return self.clip
+
+ with np.errstate(over="ignore"):
+ temp = np.clip(np.exp(self.scale * self.safety_dist / dist), None, self.clip)
+
+ return np.float64(temp * self.linear_scale)
+
+ def cost_grad(self, dist: float) -> Tuple[np.floating, np.floating]:
+ with np.errstate(over="ignore"):
+ cost = np.exp(self.scale * self.safety_dist / dist) if dist > 0 else self.clip
+
+ grad = -cost * self.scale * self.safety_dist / dist**2 if dist > 0 else 0
+ cost = np.clip(cost, None, self.clip)
+ grad = np.clip(grad, -self.clip, self.clip)
+
+ assert not np.isnan(grad).any()
+ assert not np.isnan(cost).any()
+
+ return np.float64(cost), np.float64(grad)
+
+
+def default_cache_metric(frame: EvaluatedTimingFrame, lock: Optional[EvaluatedTimingFrame]) -> float:
+
+ """This is a simple demo cost function for the route cache.
+
+ It determines exactly what is the 'best' TimingFrame with respect to an environment (already
+ captured in ``frame.actual_cost``) and in this case the route we are currently pursuing.
+
+ Args:
+ frame: frame that is to be judged
+ lock: frame the route cache is locked onto (last recommended frame)
+
+ Returns:
+ score for the TimingFrame
+ """
+
+ angle = 0
+ if lock:
+ list_of_locations = frame.route.locations
+ heading = np.array(
+ [
+ list_of_locations[1].east - list_of_locations[0].east,
+ list_of_locations[1].north - list_of_locations[0].north,
+ ]
+ )
+
+ list_of_locations2 = lock.route.locations
+ heading2 = np.array(
+ [
+ list_of_locations2[1].east - list_of_locations2[0].east,
+ list_of_locations2[1].north - list_of_locations2[0].north,
+ ]
+ )
+ # normalize
+ heading2 = heading2 / linalg.norm(heading2)
+
+ angle = np.degrees(np.arccos(np.dot(heading, heading2)))
+
+ assert frame.actual_cost != 0.0
+
+ return float(frame.actual_cost + np.clip(angle * 10, a_min=0, a_max=frame.actual_cost * 0.2))
diff --git a/pyrate/pyrate/plan/nearplanner/cost_model.py b/pyrate/pyrate/plan/nearplanner/cost_model.py
new file mode 100644
index 0000000..c9fe57a
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/cost_model.py
@@ -0,0 +1,576 @@
+"""
+cost model code for use in the near-planner
+"""
+
+# Dataclass Support
+from dataclasses import fields
+
+# Static Typing
+from typing import cast
+from typing import Dict
+from typing import Generator
+from typing import Iterable
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+# Scientific Computing
+import numpy as np
+from numpy.typing import NDArray
+
+from scipy import linalg
+
+from .evaluated_timing_frame import CollisionData
+from .evaluated_timing_frame import EvaluatedTimingFrame
+
+from .timing_frame import TimingFrame
+
+from .obstacle import Obstacle
+from .polar_model import PolarModel
+from . import utils
+# Custom Type for later use
+TimingFrameGradients = Tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]]
+
+
+class CostModel:
+ """Class for evaluating and optimizing cost of timed nautical routes.
+
+ Each route is modeled as a :class:`~.timing_frame.TimingFrame`.
+
+ Args:
+ obstacles:
+ Dictionary containing :class:`~.obstacle.Obstacle` s to regard in future route evaluations,
+ indexed by unique keys.
+ polar_model: polar of the boat to simulate
+ """
+
+ def __init__(self, obstacles: Dict[str, Obstacle], polar_model: PolarModel) -> None:
+ self._obstacles = obstacles
+ self._model = polar_model
+
+ self._points_per_obstacle: List[int] = []
+
+ self._obstacles_edges: NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._obstacles_points: NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._obstacles_speeds: NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._obstacles_inner_normals: NDArray[np.floating] = np.array([], dtype=np.float64)
+
+ self._obstacles_squared_edges_length: Optional[NDArray[np.floating]] = np.array([], dtype=np.float64)
+
+ self._update()
+
+ @property
+ def obstacles(self) -> Dict[str, Obstacle]:
+ """Dictionary of IDs with :class:`~.obstacle.Obstacle`\\ s represented by this :class:`CostModel`."""
+ return self._obstacles
+
+ def _update(self) -> None:
+ """Updates and recalculates geometric relationships between :class:`~.obstacle.Obstacle`\\ s.
+
+ Warning:
+ To be called at any change to :attr:`obstacles` by the internal methods.
+ """
+
+ if not self.obstacles:
+ return
+
+ self.distance_matrix = self._check_obstacles()
+
+ self._obstacles_points = np.concatenate(
+ [obstacle.unique_points for obstacle in self._obstacles.values()], axis=0
+ )
+ self._obstacles_inner_normals = np.concatenate(
+ [o.inner_normals for o in self._obstacles.values()], axis=0
+ )
+ self._obstacles_edges = np.concatenate([o.edges for o in self._obstacles.values()])
+ self._points_per_obstacle = [obstacle.unique_points.shape[0] for obstacle in self._obstacles.values()]
+
+ self._obstacles_speeds = np.array(
+ [
+ v
+ for i, o in enumerate(self._obstacles.values())
+ for v in [o.speed] * self._points_per_obstacle[i]
+ ]
+ )
+
+ # check case if no obstacle
+ self._obstacles_speeds = (
+ np.array([[]]) if len(self._obstacles_speeds.shape) == 1 else self._obstacles_speeds
+ )
+ self._obstacles_points = (
+ np.array([[]]) if len(self._obstacles_points.shape) == 1 else self._obstacles_points
+ )
+
+ self._obstacles_squared_edges_length = np.concatenate(
+ [o.squared_edges_length for o in self._obstacles.values()]
+ )
+
+ def _check_obstacles(self) -> NDArray[np.floating]:
+ obstacle_list = list(self._obstacles.values())
+ distance_matrix = np.zeros((len(obstacle_list), len(obstacle_list)))
+ for obs1 in range(len(obstacle_list)):
+ for obs2 in range(len(obstacle_list)):
+ if not obs1 == obs2:
+ distance = np.float64(obstacle_list[obs1].shape.distance(obstacle_list[obs2].shape))
+ distance_matrix[obs1][obs2] = distance
+ if distance == 0:
+ # TODO: join obs1 and obs2
+ pass
+
+ return distance_matrix
+
+ def evaluate(
+ self, frame_to_evaluate: TimingFrame, initial_cost: float = 0
+ ) -> Tuple[EvaluatedTimingFrame, np.floating]:
+ """Evaluates a timed route given in form of a :class:`~.timing_frame.TimingFrame`.
+
+ Upgrades his name to a :class:`~.evaluated_timing_frame.EvaluatedTimingFrame` and also enters a valid
+ value for :attr:`~.evaluated_timing_frame.EvaluatedTimingFrame.actual_cost`.
+
+ Args:
+ frame_to_evaluate:
+ The :class:`~.timing_frame.TimingFrame` to evaluate with regards to models properties
+ initial_cost: The base cost of a route just an optional way to vertically scale route cost
+
+ Returns:
+ Tuple of the :class:`~.evaluated_timing_frame.EvaluatedTimingFrame` and its actual cost for
+ convenience
+ """
+
+ if isinstance(frame_to_evaluate, EvaluatedTimingFrame):
+ return frame_to_evaluate, frame_to_evaluate.actual_cost
+
+ # Evaluate the frame
+ frame_to_evaluate.update_times(self._model)
+ data = self._collision_eval(frame_to_evaluate)
+
+ # Wrap in the new object
+ evaluated_frame = EvaluatedTimingFrame(
+ frame_to_evaluate.route, data, start_time=frame_to_evaluate.start_time
+ )
+ evaluated_frame.update_times(self._model)
+
+ # Update the actual_cost
+ cost = np.float64(initial_cost)
+ for key, obstacle in self._obstacles.items():
+ min_dist = np.amin(evaluated_frame.data_on_collisions[key].min_dist)
+ cost += obstacle.cost(min_dist)
+ cost += evaluated_frame.times[-1]
+ evaluated_frame.actual_cost = cost
+
+ return evaluated_frame, cost
+
+ def add_obstacle(self, obstacles_to_add: Dict[str, Obstacle]) -> None:
+ """Adds an obstacle to the model.
+
+ Args:
+ obstacles_to_add:
+ The :class:`set` of :class:`~.obstacle.Obstacle`\\ s to add to the model,
+ or alternatively a :class:`dict` containing them indexed by IDs
+ """
+ self._obstacles.update(obstacles_to_add)
+ self._update()
+
+ def contains_obstacle(self, obstacle_id_to_check: str) -> bool:
+ """Checks if the model describes a given obstacle id.
+
+ This is if and only if they are contained in a held dictionary with the id as key.
+
+ Args:
+ obstacle_id_to_check: string key of the obstacle to check if held
+
+ Returns:
+ if obstacle is described in this instance
+ """
+ return obstacle_id_to_check in self._obstacles.keys()
+
+ def delete_obstacle(self, id_s_to_delete: Union[str, List[str]]) -> List[Obstacle]:
+ """Takes either a key or a list of keys to delete
+
+ Args:
+ id_s_to_delete: list or single key of obstacle(s) to delete from model
+
+ Returns:
+ deleted obstacles
+ """
+ obstacles_deleted = []
+ if hasattr(id_s_to_delete, "__iter__"):
+ id_s_to_delete = cast(List[str], id_s_to_delete)
+ for iter_id in id_s_to_delete:
+ deleted_obstacle = self._obstacles.pop(iter_id, None)
+
+ assert deleted_obstacle is not None, "tried to delete a non existent identifier"
+ obstacles_deleted.append(deleted_obstacle)
+ else:
+ id_s_to_delete = cast(str, id_s_to_delete)
+
+ obstacles_deleted.append(self._obstacles[id_s_to_delete])
+ del self._obstacles[id_s_to_delete]
+ self._update()
+ return obstacles_deleted
+
+ def rebase_obstacles(self, obstacles_to_rebase: Dict[str, Obstacle]) -> None:
+ """Completely rebases this model on the given obstacles.
+
+ Args:
+ obstacles_to_rebase: Dictionary of :class:`~.obstacle.Obstacle`\\ s with keys to rebase on
+ """
+ self._obstacles = obstacles_to_rebase
+ self._update()
+
+ # ---- Route Evaluation
+
+ def _collision_eval(self, frame: TimingFrame) -> Dict[str, CollisionData]:
+ """Private collision evaluation logic
+
+ Args:
+ frame: The :class:`~.timing_frame.TimingFrame` to be evaluated
+
+ Returns:
+ collision data for each obstacle packed in as :class:`~.holders.CollisionData` with id as key
+ """
+ data = CollisionData()
+
+ if not isinstance(frame, EvaluatedTimingFrame):
+
+ if len(self._obstacles.items()) == 0:
+ return {}
+
+ seg_points = np.concatenate((utils.shapely_point_to_ndarray(frame.position)[None, :], frame._segment_points), axis=0)
+
+ points_on_route, times, delta_times, speeds = cast(
+ Tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]],
+ (
+ np.array(list(seg_points), dtype=np.float64)[:-1],
+ np.array(list(frame.times[:]), dtype=np.float64)[:-1],
+ np.array(list(np.diff(frame.times)), dtype=np.float64)[:],
+ np.array(list(frame.speeds[:]))[:],
+ ),
+ )
+
+ #delta_times = np.append(delta_times, np.array([0]), axis=0)[1:]
+
+ edges, data.delta_speed = self.obstacle_time_projection(times, speeds)
+
+ data.delta_points = points_on_route[:, :, None] - edges
+
+ # find time and place of collision
+ # normal dist
+ data.normal_distances = np.einsum(
+ "ix, txi-> ti", self._obstacles_inner_normals, data.delta_points
+ )
+ # normal speed/vectors
+ data.normal_vectors = np.einsum("ix, txi-> ti", self._obstacles_inner_normals, data.delta_speed)
+
+ with np.errstate(divide="ignore", invalid="ignore"):
+ data.time_collision = np.nan_to_num(-1 * data.normal_distances / data.normal_vectors)
+
+ with np.errstate(over="ignore"):
+ x_coll: NDArray[np.floating] = (
+ np.einsum(
+ "txi, ix-> ti",
+ data.delta_points + data.time_collision[:, None, :] * data.delta_speed,
+ self._obstacles_edges,
+ )
+ / self._obstacles_squared_edges_length
+ )
+
+ data.collision_status = (
+ (x_coll >= 0)
+ * (x_coll <= 1)
+ * (data.time_collision >= 0)
+ * (data.time_collision <= delta_times[:, None])
+ )
+
+ for obs_iter in self._obstacles.values():
+ if obs_iter.shape.contains(frame.route.locations[0]) and obs_iter.shape.contains(
+ frame.route.locations[-1]
+ ):
+ data.collision_status[0][0] = True
+
+ # determine nearest point between time points
+ dv2 = np.einsum("ijk, ijk-> ik", data.delta_speed, data.delta_speed)
+ with np.errstate(divide="ignore", invalid="ignore"):
+ data.time_min = np.nan_to_num(
+ -1 * np.einsum("ijk, ijk-> ik", data.delta_points, data.delta_speed) / dv2
+ )
+
+ data.time_min[dv2[:, 0] == 0, :] = 0.0
+ data.time_min[(data.time_min < 0) + (data.time_min > delta_times[:, None])] = 0
+
+ # case 3: nearest point between obstacle points
+ with np.errstate(divide="ignore", invalid="ignore"):
+ x_min = (
+ np.einsum("ijk, kj->ik", data.delta_points, self._obstacles_edges)
+ / self._obstacles_squared_edges_length
+ )
+ x_min[:, self._obstacles_squared_edges_length == 0] = 0.0
+ x_min[(x_min >= 1) + (x_min < 0)] = 0
+
+ # difference vector
+ assert self._obstacles is not None
+ data.distance_vectors = np.stack(
+ (
+ data.delta_points + data.delta_speed * data.time_min[:, None, :],
+ data.delta_points - self._obstacles_edges.T[None, :, :] * x_min[:, None, :],
+ ),
+ axis=-2,
+ )
+
+ data.min_dist = np.amin(linalg.norm(data.distance_vectors, axis=1), axis=-2)
+ data.min_dist[data.collision_status] = 0
+
+ data.x_min = x_min
+ data.speeds = speeds[:, :, None]
+
+ return self._pack_collision_data(data)
+
+ return frame.data_on_collisions
+
+ def _pack_collision_data(self, data: CollisionData) -> Dict[str, CollisionData]:
+ """Packs collision data dictionary created by :meth:`_collision_eval` into a dictionary.
+
+ Done for each :class:`~.obstacle.Obstacle` allows for obstacle specific querying.
+
+ Args:
+ data: Dictionary of collision data created by :meth:`_collision_eval`
+
+ Returns:
+ Dictionary with obstacle IDs as keys and the :class:`~.holders.CollisionData` as values
+ """
+
+ cumulative_points = np.cumsum([0] + self._points_per_obstacle)
+ indices = [
+ (cumulative_points[i], cumulative_points[i + 1]) for i in range(len(self._points_per_obstacle))
+ ]
+
+ return_val: Dict[str, CollisionData] = {}
+
+ for obstacle_iter, ind in zip(self._obstacles.keys(), indices):
+ collision_iter: CollisionData = CollisionData()
+ for attr in fields(collision_iter):
+ data_value_of_attr: np.ndarray = getattr(data, attr.name)
+ if data_value_of_attr.shape[-1] > 1:
+ setattr(collision_iter, attr.name, data_value_of_attr.T[ind[0] : ind[1]].T) # noqa: E203
+ else:
+ setattr(collision_iter, attr.name, data_value_of_attr.T[0].T)
+ return_val[obstacle_iter] = collision_iter
+
+ return return_val
+
+ def obstacle_time_projection(
+ self, times: NDArray[np.floating], speeds: NDArray[np.floating]
+ ) -> Tuple[NDArray[np.floating], NDArray[np.floating]]:
+ """Projects saved obstacles in their predicted locations at given times.
+
+ Also calculates their vectorized relative speed on route segments relative to each obstacles. For
+ the following dimensional information let x denote the cumulated sum of all unique points of all
+ obstacles currently stored in the cost model.
+
+ Args:
+ times: Time constraints of route with shape ``(number of time points, )``
+ speeds: speeds of route ``()``
+
+ Return: A tuple of ``(points, speeds)`` containing a tensor of projected obstacle coordinates at
+ each time point of the route``points`` , has dimension ``(number of time points, x, 2)``,
+ and ``speeds`` has dimension ``(number of time points, 2, number of obstacles stored, 2)``.
+
+ """
+
+ if len(self._obstacles_points.T.shape) == 1:
+ assert len(self._obstacles_speeds.T.shape) == 1, (self._obstacles_points, self._obstacles_speeds)
+ # in the case of no obstacles numpy implicitly cast dimensions resulting in an error
+
+ return np.array([[[]]], dtype=np.float64), np.array([[[]]], dtype=np.float64)
+
+ assert len(self._obstacles_speeds.T.shape) == 2, self._obstacles_speeds.shape
+ assert len(self._obstacles_points.T.shape) == 2, self._obstacles_points.shape
+
+ projected_points = (
+ self._obstacles_points.T[None, :, :] + self._obstacles_speeds.T[None, :, :] * times[:, None, None]
+ )
+ #print("+"*10)
+ #print(times)
+ #print(speeds)
+ #print("#"*10)
+ relative_speeds = speeds[:, :, None] - self._obstacles_speeds.T[None, :, :]
+ return projected_points, relative_speeds
+
+ # ---- Gradient Calculation
+
+ def gradients( # pylint: disable=too-many-locals
+ self, frame_s: Union[TimingFrame, List[TimingFrame]]
+ ) -> Union[Tuple[List[float], List[NDArray[np.floating]]], Tuple[float, NDArray[np.floating]]]:
+ """Calculates the route gradients of the route with respect to full route cost.
+
+ Arguments:
+ frame_s: frame(s) to calculate gradients from
+
+ Returns:
+ Tuple of cost of taking :class:`~.timing_frame.TimingFrame`\\ (s) and cost derivative of
+ :class:`~.timing_frame.TimingFrame`\\ (s)
+ """
+ if not hasattr(frame_s, "__iter__"):
+ frames = [cast(TimingFrame, frame_s)]
+ else:
+ frames = cast(List[TimingFrame], frame_s)
+
+ evaluated_frames: List[EvaluatedTimingFrame] = []
+ for frame in frames:
+ frame.update_times(self._model)
+ evaluated, _ = self.evaluate(frame)
+ evaluated_frames.append(evaluated)
+
+ if not self._obstacles or len(self._obstacles.values()) == 0:
+ cost_return, grad_return = [r.cost for r in frames], [r.cost_grad() for r in frames]
+
+ if len(evaluated_frames) == 1:
+ assert len(grad_return) == 1
+ return cost_return[0], grad_return[0]
+ return cost_return, grad_return
+
+ costs_array, obs_gradients = self._calc_gradients(evaluated_frames)
+ costs = [c + r.cost for c, r in zip(costs_array, evaluated_frames)]
+ gradients = []
+ for frame, (grad_subs, grad_times, grad_speeds) in zip(evaluated_frames, obs_gradients):
+ assert not np.isnan(grad_times).any(), grad_times
+ gradients.append(
+ frame.cost_grad(
+ other_cost_dtimes=grad_times, dcost_dspeed=grad_speeds, dcost_dpoints_ext=grad_subs
+ )
+ )
+
+ if len(evaluated_frames) == 1:
+ assert len(gradients) == 1
+ return costs[0], gradients[0]
+ return costs, gradients
+
+ def _calc_gradients(
+ self, frames: List[EvaluatedTimingFrame]
+ ) -> Tuple[NDArray[np.floating], Iterable[TimingFrameGradients]]:
+ """Calculates cost derivatives in respect to several aspects of a list of frames
+
+ Args:
+ frames: frames to calculate Gradients
+
+ Returns: Tuple of cumulative cost and an iterable that yields a tuple of gradients. The first
+ entry in the gradient tuple describes the route gradient w.r.t. the locations of the stored
+ obstacles. The second entry describes the gradient w.r.t the simulated times and the third
+ gradient is calculated w.r.t. the obstacle speeds under an assumption of constant speed. Each
+ gradient should've the shape ``(number of route segments, 2)``. The length of the iterable
+ is the same as the given list of frames.
+ """
+
+ # pylint: disable-msg=too-many-locals
+ # this is ok due to it being already split into several hierarchical methods and i see no way to
+ # reduce local variables without butchering visibility
+ # shorten it
+
+ # evaluate the attributes at the lowest distance, needed for gradient calculation
+ array_shape = (len(frames), len(self._obstacles))
+ time_seg, obst_seg, min_dist, grad_distances, t_min, cost = [np.zeros(array_shape) for _ in range(6)]
+ speeds, dist_vect = [np.zeros((*array_shape, 2)) for _ in range(2)]
+ for i, frame in enumerate(frames):
+
+ for j, (obstacle_key, obstacle) in enumerate(self._obstacles.items()):
+ data = frame.data_on_collisions[obstacle_key]
+
+ # pylint: disable-msg=unbalanced-tuple-unpacking
+ # this is safe due to how data is created
+ frame_index, obstacle_index = np.unravel_index(
+ np.argmin(data.min_dist, axis=None), data.min_dist.shape
+ )
+ min_dist[i, j] = data.min_dist[frame_index, obstacle_index]
+
+ assert not (np.isinf(min_dist).any()), data
+
+ t_min[i, j] = data.time_min[frame_index, obstacle_index]
+ cost[i, j], grad_distances[i, j] = obstacle.distance_gradient(min_dist[i, j])
+
+ # if obstacle.soft and min_dist[i, j] == 0:
+ # TO (BJK): add soft obstacles (this is a leftover from old code)
+ # cost[i, j] += obstacle.duration_cost.cost(self._duration_eval(route, obstacle))
+ # grad_distances[i, j] = 0
+
+ if min_dist[i, j] == 0:
+ min_dist[i, j] = 1e-3
+ time_seg[i, j] = frame_index
+ obst_seg[i, j] = obstacle_index
+
+ speeds[i, j, :] = data.speeds[frame_index, :]
+
+ assert data.distance_vectors.shape[2] == 2, "case not handled"
+
+ distance_index = data.distance_vectors[frame_index, :, :, obstacle_index]
+ dist_vect[i, j, :] = min(distance_index[:, 0], distance_index[:, 1], key=linalg.norm)
+
+ # chain rule for derivation
+ assert not (np.isnan(grad_distances).any()), grad_distances
+ assert not (np.isnan(min_dist).any()), min_dist
+ assert not (np.isnan(dist_vect).any()), dist_vect
+
+ dcost_d_distvec = np.nan_to_num((grad_distances / min_dist))[:, :, None] * dist_vect
+
+ assert not (np.isnan(dcost_d_distvec).any()), min_dist
+
+ dcost_d_deltapoints = dcost_d_distvec
+ dcost_d_speed = dcost_d_distvec * t_min[:, :, None]
+ dcost_d_times = np.sum(dcost_d_deltapoints * (-speeds), axis=-1)
+
+ assert not (np.isnan(dcost_d_times).any()), (speeds, grad_distances, min_dist, dist_vect)
+
+ return (
+ cast(NDArray[np.floating], cost.sum(axis=1)),
+ self._split_grads((dcost_d_deltapoints, dcost_d_times, dcost_d_speed), time_seg, frames),
+ )
+
+ def _split_grads(
+ self,
+ gradients: Tuple[NDArray[np.floating], NDArray[np.floating], NDArray[np.floating]],
+ time_seg: NDArray[np.floating],
+ frames: List[EvaluatedTimingFrame],
+ ) -> Generator[TimingFrameGradients, None, None]:
+ """Split combined gradients into separate gradients for each different routes.
+
+ Args:
+ gradients: gradients (grad points, grad times, grad velocity) each gradient of the shape
+ ``(number of obstacles, number of route segments, 2)``
+ time_seg: indices of minimal distance time segment
+ frames: frames of the gradients
+
+ Yields: Gradients split per route as a Tuple. The first entry in the gradient tuple describes the
+ route gradient w.r.t. the locations of the stored obstacles. The second entry describes the
+ gradient w.r.t the simulated times and the third gradient is calculated w.r.t. the obstacle
+ speeds under an assumption of constant speed. Each gradient should've the shape
+ ``(number of route segments, 2)``
+ """
+ for i, frame in enumerate(frames):
+ frame_length = frame.times.shape[0]
+
+ grad_subs, grad_times, grad_speeds = (
+ np.zeros((frame_length - 1, 2)),
+ np.zeros(frame_length),
+ np.zeros((frame_length, 2)),
+ )
+ for j, _ in enumerate(self._obstacles):
+ # (leftovers from old code may safe time if soft obstacles to be added)
+ # if False:
+ # if obst.soft and frame_index.data_on_collision[on].any():
+ # g_subs, g_times, g_speeds = self.duration_grad(frame_index, on, obst)
+ # grad_times += g_times
+ # grad_speeds += g_speeds
+ # grad_subs += g_subs
+ # else:
+
+ time_segment = int(time_seg[i, j])
+ if time_segment:
+ grad_subs[time_segment - 1] += gradients[0][i, j, :]
+ grad_times[time_segment] += gradients[1][i, j]
+ grad_speeds[time_segment, :] += gradients[2][i, j, :]
+
+ yield (
+ cast(NDArray[np.floating], grad_subs),
+ cast(NDArray[np.floating], grad_times),
+ cast(NDArray[np.floating], grad_speeds),
+ )
diff --git a/pyrate/pyrate/plan/nearplanner/evaluated_timing_frame.py b/pyrate/pyrate/plan/nearplanner/evaluated_timing_frame.py
new file mode 100644
index 0000000..6e7cf72
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/evaluated_timing_frame.py
@@ -0,0 +1,163 @@
+"""Contains evaluated timing frames."""
+
+# Static Typing
+from typing import cast
+from typing import Dict
+from typing import Optional
+from typing import Set
+from typing import Tuple
+from typing import Union
+
+import numpy.typing as npt
+
+# Scientific Computing
+import numpy as np
+
+# Geometry
+from pyrate.plan.geometry.location import CartesianLocation
+from pyrate.plan.geometry.route import CartesianRoute
+
+from .holders import CollisionData
+from .timing_frame import TimingFrame
+
+
+class EvaluatedTimingFrame(TimingFrame):
+ """Class used as a wrapper for evaluated :class:`~.timing_frame.TimingFrame`.
+
+ It is guaranteed that instances of this class posses a :attr:`data_on_collision` Evaluation.
+ For :class:`~.timing_frame.TimingFrame` instances upgraded by a CostModel simulated times are also
+ guaranteed.
+
+ Args:
+ route: route simulated by the :class:`EvaluatedTimingFrame`
+ data_on_collisions: data on possible collision to encapsulate
+ start_time: start time to start route at
+
+ Attributes:
+ actual_cost: accumulated cost of time taken, distance to obstacles and their speed
+ """
+
+ def __init__(
+ self, route: CartesianRoute, data_on_collisions: Dict[str, CollisionData], start_time: float = 0
+ ) -> None:
+ self._data_on_collisions = data_on_collisions
+ self._revoked = False
+ self.actual_cost = np.float64(0.0)
+ super().__init__(route, start_time)
+
+ def __str__(self) -> str:
+ return f"Evaluated{super().__str__()}"
+
+ def revoke(self, new_position: CartesianLocation) -> TimingFrame:
+ """Revokes the `~.timing_frame.TimingFrame`.
+
+ This results in this instance becoming invalid
+ and returns a new non evaluated :class:`~.timing_frame.TimingFrame` with adapted first point.
+
+ Args:
+ new_position: new first position of the revoked :class:`~.timing_frame.TimingFrame`
+
+ Returns:
+ stripped :class:`~.timing_frame.TimingFrame`
+ """
+ self.position = new_position
+ new_frame: TimingFrame = TimingFrame(self.route, start_time=self.start_time)
+ self._revoked = True
+ return new_frame
+
+ @property
+ def valid(self) -> bool:
+ """True iff :class:`~.timing_frame.TimingFrame` was already evaluated and no collision was found"""
+ if self._data_on_collisions is not None and not self._revoked:
+ return not any(o.collision_status.any() for k, o in self._data_on_collisions.items())
+ return False
+
+ @property
+ def imminent_collision(self) -> Tuple[str, float]:
+ """Collision time for imminent collisions
+
+ Raises:
+ AssertionError: If attribute is called before TimingFrame has been evaluated
+
+ Return:
+ tuple of the obstacle id and time of the next collision
+ """
+ assert self.data_on_collisions is not None
+ temp = {}
+
+ for key, obstacle_data in self.data_on_collisions.items():
+ if not obstacle_data.collision_status.any():
+ temp[key] = [np.inf]
+ else:
+ temp[key] = (obstacle_data.time_collision + self._times[1:, None])[
+ obstacle_data.collision_status
+ ].min()
+
+ return cast(Tuple[str, float], temp.get(cast(str, min(temp.items(), key=lambda x: x[1]))))
+
+ @property
+ def collision_times(self) -> Dict[str, float]:
+ """Times for collisions with each obstacle
+
+ Raises:
+ AssertionError: If attribute is called before TimingFrame hasn't yet been evaluated
+
+ Return:
+ dictionary that maps an obstacle id to the time of collision if route is taken
+ """
+ assert self.data_on_collisions is not None
+ temp = {}
+ for key, obstacle_data in self.data_on_collisions.items():
+ if not obstacle_data.collision_status.any():
+ temp[key] = np.inf
+ else:
+ temp[key] = (obstacle_data.time_collision + self._times[:-1, None])[
+ obstacle_data.collision_status
+ ].min()
+ return cast(Dict[str, float], temp)
+
+ @property
+ def collision_segment(self) -> Dict[str, Optional[npt.NDArray[np.floating]]]:
+ """Segments where collision happens as a dictionary with obstacle ids as key.
+
+ Raises:
+ AssertionError: If attribute is called before TimingFrame hasn't yet been evaluated
+
+ Return:
+ Dictionary mapping obstacle IDs to index of route segment where collision is predicted
+ """
+ assert self.data_on_collisions is not None
+ temp: Dict[str, Optional[npt.NDArray[np.floating]]] = {}
+ for key, obstacle_data in self.data_on_collisions.items():
+ if not obstacle_data.collision_status.any():
+ temp[key] = None
+ else:
+ temp[key] = np.argmax(obstacle_data.collision_status.sum(axis=1)) # type: ignore
+ return temp
+
+ @property
+ def minimal_dist(
+ self,
+ ) -> Union[Set[np.floating], Dict[str, Tuple[np.floating, Optional[npt.NDArray[np.floating]]]]]:
+ """Dictionary of minimal distances per obstacle ID.
+
+ Return:
+ If not yet evaluated ``{0}``, else a mapping from obstacle ID to ``(distance_x, distance_y)``.
+ """
+ if self.data_on_collisions is None:
+ return {np.float64(0.0)}
+ temp: Dict[str, Tuple[np.floating, Optional[npt.NDArray[np.floating]]]] = {}
+ for key, obstacle_data in self.data_on_collisions.items():
+ if obstacle_data.collision_status.any():
+ temp[key] = np.float64(0.0), None
+ else:
+ temp[key] = (
+ np.float64(obstacle_data.min_dist.min()),
+ np.argmin(obstacle_data.min_dist.min(axis=1)).astype(np.float64),
+ ) # type: ignore
+ return temp
+
+ @property
+ def data_on_collisions(self) -> Dict[str, CollisionData]:
+ """Information about collisions with each obstacle, index by obstacle keys."""
+ return self._data_on_collisions
diff --git a/pyrate/pyrate/plan/nearplanner/exceptions.py b/pyrate/pyrate/plan/nearplanner/exceptions.py
new file mode 100644
index 0000000..047405d
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/exceptions.py
@@ -0,0 +1,17 @@
+"""This module defines exceptions for use in the near-planning backend."""
+
+
+class PlanningError(Exception):
+ """Base Class for all exceptions throw during route planning"""
+
+
+class NoRouteFoundException(PlanningError):
+ """Exception thrown if route discovery failed
+
+ Args:
+ message: message to display
+ """
+
+ def __init__(self, message: str):
+ self.message = message
+ super().__init__(message)
diff --git a/pyrate/pyrate/plan/nearplanner/holders.py b/pyrate/pyrate/plan/nearplanner/holders.py
new file mode 100644
index 0000000..5618b71
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/holders.py
@@ -0,0 +1,92 @@
+"""collection of dataclasses used in the planning backend and node-service of the near-planner node"""
+
+# Dataclass Support
+from dataclasses import dataclass
+
+from typing import Optional
+
+# Scientific Computing
+import numpy as np
+
+# Static Typing
+import numpy.typing as npt
+
+
+@dataclass
+class CollisionData:
+ """Holds information on possible future collisions or collision avoidance with a single obstacle.
+
+ The stored information is not intended to be directly read from this dataclass, but to be stored
+ and post-processed internally in an :class:`~planning_backend.timing_frame.EvaluatedTimingFrame`
+ and corresponding information be read through its properties.
+ """
+
+ collision_status: npt.NDArray[np.bool_] = np.array([], dtype=np.bool_)
+ """array, whose entries describe if a segment i of a frame has a collision with an obstacle """
+ min_dist: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the minimal distances at each time steps to the obstacle"""
+ normal_distances: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the normal distances of obstacle towards the boat on each segment"""
+ normal_vectors: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the normal vectors of each obstacle at each time step"""
+ time_collision: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the time of a future collision with an object"""
+ time_min: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the time of the first collision"""
+ x_min: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, whose entries describe the place of the first collision"""
+ delta_speed: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, describing the relative speed of route segments towards the obstacle"""
+ delta_points: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, describing relative position of route towards obstacle on each time step"""
+ speeds: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, describing relative speeds towards obstacle"""
+ distance_vectors: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ """array, describing the vectorized distances towards the given obstacle"""
+
+
+@dataclass
+class EstimationParameters:
+ """Holds estimation parameters for finding first initial routes.
+
+ This class is mainly used by instances of :class:`~.optimizer.Optimizer` while searching an initial guess
+ for a valid :class:`~.timing_frame.TimingFrame` that later can be optimized.
+ """
+
+ first_try_construction: bool = True
+ """if first to try deterministic construction by obstacle hulls"""
+
+ max_len_relative: float = 0.1
+ """Maximum fraction of the length of a new edge to explore, relative to the distance to the goal"""
+ max_count: int = 300
+ """maximal number of potential nodes are explored before resulting in a failure"""
+ p_goal: float = 0.2
+ """probability the goal is chosen as the next node"""
+
+
+@dataclass
+class OptimizationParameters:
+ """Holds parameters for running :class:`~.optimizer.Optimizer`."""
+
+ estimation_parameters: EstimationParameters
+ """parameters used for initial exploration"""
+ n_samples: int = 25
+ """maximum number of initial samples that will be selected, optimized and the results compared"""
+ inital_stepsize: float = 0.1
+ """fraction of the direct distance to goal which will be used as the initial step size for optimization"""
+ n_iter_grad: int = 500
+ """maximum number of steps used by the gradient descent algorithm"""
+ n_break: int = 10
+ """maximum number of initial samples which will be selected to be optimized and compared"""
+ adaptive_step_size: bool = True
+ """if to use adaptive step size during gradient descent"""
+ prune: bool = True
+ """if to prune the resulting routes"""
+ verbose: bool = False
+ """if to print additional information"""
+ overwrite_grad_lim: Optional[float] = None
+ """optional parameter to overwrite to cut of the gradient at each optimization step"""
+ adaptive_data_rate_penalty_on_loss_sign_change: float = 2.5
+ """penalty applied to lossful sign changes during adaptive step size optimization"""
+ adaptive_data_rate_general_regulization_factor: float = 1.5
+ """regularization positive and negative used to adapt step size based on whether cost in/decreases"""
diff --git a/pyrate/pyrate/plan/nearplanner/obstacle.py b/pyrate/pyrate/plan/nearplanner/obstacle.py
new file mode 100644
index 0000000..2bfa424
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/obstacle.py
@@ -0,0 +1,137 @@
+"""
+code for the Obstacle
+"""
+
+# Static Typing
+from typing import cast
+from typing import Tuple
+
+from numpy.typing import NDArray
+
+# Scientific Computing
+import numpy as np
+
+from scipy import linalg
+
+# Geometry
+from pyrate.plan.geometry.polygon import CartesianPolygon
+
+from .cost_functions import CostFunction
+from .cost_functions import CostFunctionLinear
+
+class Obstacle:
+ """An obstacle with a polygon shape and vector speed.
+
+ Wrapper class around a :class:`~pyrate.plan.geometry.polygon.CartesianPolygon`. Assumes constant speed.
+
+ Args:
+ polygon: Polygon shape obstacle shall be based on
+ speed: 2D speed vector describing the obstacle's movement in ``(east, north)`` direction.
+ cost_function: The cost function that shall apply to this :class:`~.obstacle.Obstacle`
+ """
+
+ def __init__(
+ self,
+ polygon: CartesianPolygon,
+ speed: NDArray[np.floating],
+ cost_function: CostFunction = CostFunctionLinear(),
+ ) -> None:
+ self.shape = polygon
+ self.speed = speed if speed is not None else np.zeros(2)
+ self.cost_function = cost_function
+
+ def __call__(self, time: float) -> "Obstacle":
+ """Transposes Obstacle to a given time.
+
+ Args:
+ time: time to transpose to
+
+ Returns:
+ A copy of this object transposed to given time
+ """
+ projection = self.project_at_time(time)
+ cartesian_poly = CartesianPolygon.from_numpy(projection)
+ return Obstacle(cartesian_poly, self.speed, cost_function=self.cost_function)
+
+ def distance_gradient(self, min_dist: float) -> Tuple[np.floating, np.floating]:
+ """Calculates the distance gradient.
+
+ Args:
+ min_dist: minimal distance to calculate distance gradient from
+
+ Returns:
+ derivative of cost with regards to minimal distance
+ """
+ return self.cost_function.cost_grad(min_dist)
+
+ def cost(self, min_distance: float) -> np.floating:
+ """Calculates the cost of the obstacle in respect to the minimal distance.
+
+ Args:
+ min_distance: minimal distance to base the cost of the obstacle on
+
+ Returns:
+ cost of taking a route in the minimal distance of this obstacle
+ """
+ return self.cost_function.cost(min_distance)
+
+ def project_at_time(self, time: float) -> NDArray[np.floating]:
+ """Projects the obstacle shape at a given time.
+
+ Args:
+ time: time to project the obstacle shape to
+
+ Returns:
+ numpy.ndarray with coordinates of the projected shape
+ """
+ return cast(NDArray[np.floating], self.shape.to_numpy() + self.speed * time)
+
+ @property
+ def unique_points(self) -> NDArray[np.floating]:
+ """only unique points of polygon shape
+
+ Returns:
+ numpy-array of unique points that make up the polygon
+ """
+ return cast(NDArray[np.floating], self.shape.to_numpy()[:-1, :])
+
+ @property
+ def edges(self) -> NDArray[np.floating]:
+ """method for getting edges in a numpy array
+
+ Returns:
+ numpy-array of polygon edges
+ """
+ temp = self.shape.to_numpy()
+ return cast(NDArray[np.floating], (np.concatenate((temp[1:, :], temp[:1, :])) - temp)[:-1, :])
+
+ @property
+ def squared_edges_length(self) -> NDArray[np.floating]:
+ """method for getting the squared length of polygon edges in a numpy array
+
+ Returns:
+ numpy-array of squared lengths of polygon edges
+ """
+ return cast(NDArray[np.floating], linalg.norm(self.edges, axis=1) ** 2)
+
+ @property
+ def edges_length(self) -> NDArray[np.floating]:
+ """method for getting edges length in a numpy array
+
+ Returns:
+ numpy-array of lengths of polygon edges
+ """
+ return cast(NDArray[np.floating], linalg.norm(self.edges, axis=1))
+
+ @property
+ def inner_normals(self) -> NDArray[np.floating]:
+ """method for getting normals in numpy-array
+
+ Returns:
+ numpy-array of edge normals
+ """
+ diffs = self.edges
+ normals = np.concatenate((diffs[:, 1:2], -diffs[:, 0:1]), axis=1)
+ with np.errstate(divide="ignore"):
+ _ = cast(NDArray[np.floating], normals / linalg.norm(normals, axis=1, keepdims=True))
+ return _
diff --git a/pyrate/pyrate/plan/nearplanner/optimizer.py b/pyrate/pyrate/plan/nearplanner/optimizer.py
new file mode 100644
index 0000000..24facbf
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/optimizer.py
@@ -0,0 +1,737 @@
+"""
+optimizer code for use in the near-planner ros-node
+"""
+
+# Dataclass Support
+from dataclasses import dataclass
+from typing import Dict
+from typing import List
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+# Static Typing
+from typing import cast
+
+# Numerical Computing
+import numpy as np
+import shapely
+import shapely.ops
+from numpy import typing as npt
+
+# Geometry
+from pyrate.plan.geometry.route import CartesianRoute
+from scipy import linalg
+from scipy.special import softmax
+from shapely.geometry import Point
+
+from . import utils
+from .cost_model import CostModel
+from .evaluated_timing_frame import EvaluatedTimingFrame
+from .exceptions import NoRouteFoundException
+from .holders import EstimationParameters
+from .holders import OptimizationParameters
+from .obstacle import Obstacle
+from .polar_model import PolarModel
+from .timing_frame import TimingFrame
+
+
+class Optimizer:
+ """Debug class for interactive interpreter.
+
+ Object for initializing and optimizing (near) routes / planning based on obstacles and polar diagrams.
+ Provides optimization regarding sailing time and obstacle danger, which is evaluated
+ via cost functions.
+
+ Args:
+ wind_information: optional current tuple of ``(angle, speed)``, in (rad, strength in m/s)
+ obstacles: optional dict mapping obstacle keys to obstacle objects
+ heading: optional current heading angle of the boat, in radians, right hand from true north
+ position: optional current position of the sailboat, in ``(east, north)``/``(x, y)`` coordinates
+ """
+
+ def __init__( # pylint: disable=R0913
+ self,
+ position: Optional[Point] = None,
+ obstacles: Optional[Dict[str, Obstacle]] = None,
+ heading: Optional[npt.NDArray[np.floating]] = None,
+ wind_information: Optional[Tuple[float, float]] = None,
+ ) -> None:
+ # environment variables
+ wind_angle, wind_speed = wind_information if wind_information is not None else (0, 0)
+
+ self._wind_angle: np.floating = np.float64(wind_angle)
+ self._wind_speed: np.floating = np.float64(wind_speed)
+
+ self._grad_lim: np.floating = np.float32(5.0)
+
+ # boat model and goal
+ self.boat_polar: PolarModel = PolarModel(wind_speed, wind_angle)
+
+ # obstacles
+ self._obstacles: Dict[str, Obstacle] = obstacles or {}
+
+ self.cost_model: CostModel = CostModel(self._obstacles, self.boat_polar)
+
+ # boat position
+ self._heading: Optional[npt.NDArray[np.floating]] = heading
+
+ self.heading_dir: Optional[npt.NDArray[np.floating]] = np.array([1, 0])
+ if heading is not None:
+ self.heading_dir = np.array([np.cos(heading), np.sin(heading)])
+
+ self._position: Point = position if position is not None else Point(0, 0)
+
+ self._goal: Optional[Point] = None
+
+ def __str__(self):
+ return f"Optimizer(wind=({self._wind_angle},{self._wind_speed}), position={self._position}, goal={self._goal}, obs={self._obstacles.items()})"
+
+ # ---- UPDATE INTERFACES ---- #
+ @property
+ def position(self) -> Point:
+ """Position of the boat held by the optimizer"""
+ return self._position
+
+ @position.setter
+ def position(self, value: Point) -> None:
+ self._position = value
+
+ @property
+ def goal(self) -> Optional[Point]:
+ """Current sub goal we have to create a route to"""
+ return self._goal
+
+ @goal.setter
+ def goal(self, value: Point) -> None:
+ self._goal = value
+
+ @property
+ def heading(self) -> Optional[npt.NDArray[np.floating]]:
+ """Current heading of the optimizer"""
+ return self._heading
+
+ @heading.setter
+ def heading(self, value: Point) -> None:
+ self._heading = value
+ self.heading_dir = np.array([np.cos(value), np.sin(value)])
+
+ def on_added_obstacles(self, obstacles: Dict[str, Obstacle]) -> None:
+ """Event to call if obstacles added to simulation.
+
+ This event changes the underlying :class:`CostModel` to adapt to deleted obstacles.
+
+ Args:
+ obstacles: obstacles, optionally keyed by ids, to add
+ """
+ self.cost_model.add_obstacle(obstacles)
+
+ def on_reset_obstacles(self, obstacles: Dict[str, Obstacle]) -> None:
+ """Convenience Event to call if a drastic change in obstacle population has happened to simulation.
+
+ The same effects of this method can be achieved by separately calling :meth:`on_added_obstacles` and
+ :meth:`on_deleted_obstacles`. As the behaviours of these methods overlap this method provides a more
+ efficient, joint and easier way to deal with a reset or large movement in the stored obstacle
+ population.
+
+ Args:
+ obstacles: dictionary of obstacles to reset the stored obstacle population upon, keyed by
+ obstacle ids
+ """
+ self.cost_model.rebase_obstacles(obstacles)
+
+ def on_deleted_obstacles(self, obstacle_id_s: Union[str, List[str]]) -> Optional[List[Obstacle]]:
+ """Event to call to delete obstacles from simulation.
+
+ This event changes the underlying :class:`CostModel` to adapt to deleted obstacles.
+
+ Args:
+ obstacle_id_s: obstacle, optionally keyed by ids, by ids to delete
+ """
+ deleted_obs = self.cost_model.delete_obstacle(obstacle_id_s)
+ return deleted_obs
+
+ @property
+ def wind_angle(self) -> float:
+ """Wind angle to simulate"""
+ return float(self._wind_angle)
+
+ @wind_angle.setter
+ def wind_angle(self, value: float) -> None:
+ self._wind_angle = np.float64(value)
+ self.boat_polar.wind_direction = np.float64(value)
+
+ @property
+ def wind_speed(self) -> float:
+ """Wind speed to simulate"""
+ return float(self._wind_speed)
+
+ @wind_speed.setter
+ def wind_speed(self, value: float) -> None:
+ self._wind_speed = np.float64(value)
+ self.boat_polar.wind_speed = np.float64(value)
+
+ # ---- OPTIMIZE LOGIC ---- #
+
+ def optimize( # noqa: C901
+ self,
+ goal: Point,
+ optimization_parameters: OptimizationParameters,
+ heading: Optional[npt.NDArray[np.floating]] = None,
+ custom: bool = True,
+ ) -> Tuple[Optional[EvaluatedTimingFrame], Optional[List[EvaluatedTimingFrame]]]:
+ """Main optimize method of :class:`Optimizer`.
+
+ It uses different methods to determine an optimized
+ :class:`~.evaluated_timing_frame.EvaluatedTimingFrame`
+ from a given heading, goal and optimization parameters.
+
+ Args:
+ goal: :class:`shapely.geometry.Point` specifying the route goal in local cartesian coordinates
+ optimization_parameters: :class`~dataclasses.dataclass` holding parameters for optimization
+ heading: :class:`np.ndarray` 2d vector describing the current heading of the boat
+ custom: to use custom method (:meth:`optimize_gradient`)
+
+ Returns:
+ optimized :class:`~.evaluated_timing_frame.EvaluatedTimingFrame` and an optional list of
+ intermediate results produced during the optimization and selection algorithm
+
+ Raises:
+ NoRouteFoundException: if no route was found during route discovery
+ """
+ # pylint: disable-msg=too-many-locals
+
+ if goal is not None:
+ self._goal = goal
+ if heading is not None:
+ self._heading = heading
+
+ assert self._goal is not None
+ assert self.wind_angle is not None
+ assert self.wind_speed is not None
+ assert self.position is not None
+
+ # prepare initial guess
+ init_frames: List[EvaluatedTimingFrame] = []
+ final_frames: List[EvaluatedTimingFrame] = []
+ costs = []
+ construction_successful = False
+
+ if optimization_parameters.overwrite_grad_lim:
+ self.grad_lim = optimization_parameters.overwrite_grad_lim
+ # todo (BEN)
+
+ if optimization_parameters.estimation_parameters.first_try_construction:
+ constructed_routes = self._construct_init(
+ goal=self._goal, parameters=optimization_parameters.estimation_parameters
+ )
+
+ # remove cycles
+ constructed_routes = [f.remove_single_cycles() for f in constructed_routes]
+
+ # simulate
+ _ = [f.update_times(boat_model=self.boat_polar) for f in constructed_routes]
+ # evaluate
+ init_frames = [self.cost_model.evaluate(f)[0] for f in constructed_routes]
+
+ # throw away invalid
+ init_frames = list(filter(lambda frame: frame.valid, init_frames))
+
+ construction_successful = len(init_frames) != 0
+
+ if optimization_parameters.verbose:
+ print(f"[C] Construction finished with {len(init_frames)} remaining")
+
+ if not construction_successful:
+ for _ in range(optimization_parameters.n_samples):
+ frame = self._sample_tree_init(
+ self._goal, parameters=optimization_parameters.estimation_parameters
+ )
+ if frame is None:
+ continue
+
+ frame = frame.remove_single_cycles()
+ frame.update_times(boat_model=self.boat_polar)
+ evaluated, _ = self.cost_model.evaluate(frame)
+
+ init_frames.append(evaluated)
+ if len(init_frames) >= optimization_parameters.n_break:
+ break
+
+ assert all(f.valid for f in init_frames)
+
+ if len(init_frames) == 0:
+ raise NoRouteFoundException("route discovery failed")
+
+ # optimize
+ log_grad = []
+
+ for frame in init_frames:
+ if custom:
+ optimized_frame, list_of_steps = self.optimize_gradient(
+ frame,
+ step_size_parameter=optimization_parameters.inital_stepsize
+ * linalg.norm(self._goal - self.position),
+ optimization_parameters=optimization_parameters, # TODO (BEN) here was something missing
+ )
+ log_grad.append(list_of_steps)
+ else:
+ raise NotImplementedError
+
+ optimized_frame = optimized_frame.remove_single_cycles() # todo execute sometimes in iteration?
+ optimized_frame.update_times(boat_model=self.boat_polar)
+ optimized_frame_evaluated, _ = self.cost_model.evaluate(optimized_frame)
+ costs.append(optimized_frame_evaluated.actual_cost)
+ final_frames.append(optimized_frame_evaluated)
+
+ if optimization_parameters.prune:
+ for frame in final_frames:
+ frame.prune(eps=0.5 / 180 * np.pi)
+ frame.update_times(boat_model=self.boat_polar)
+ frame, _ = self.cost_model.evaluate(frame)
+
+ # handle optimization and initialization failure
+ if np.amin(costs) > 1e10:
+ return None, None
+
+ # return TimingFrame with minimal cost and the attempts needed to reach it
+ return final_frames[np.argmin(costs)], log_grad[np.argmin(costs)]
+
+ def optimize_gradient( # noqa: C901
+ self,
+ init_frame: EvaluatedTimingFrame,
+ optimization_parameters: OptimizationParameters,
+ step_size_parameter: float = 10.0,
+ only_attempt: bool = False,
+ ) -> Tuple[EvaluatedTimingFrame, List[EvaluatedTimingFrame]]:
+ """Optimization routine for gradient based optimization
+
+ Args:
+ only_attempt: if to assure computed route is valid
+ init_frame: initial frame to optimized
+ step_size_parameter: step size for gradient descend
+ optimization_parameters: parameters for optimization bundled in a dataclass
+
+ Returns:
+ optimized :class:`~.timing_frame.TimingFrame`
+ """
+ # pylint: disable-msg=too-many-locals
+ # pylint: disable-msg=too-many-statements
+ step_size: npt.NDArray[np.floating] = step_size_parameter * np.ones(
+ (init_frame.points.shape[0] - 2, 2)
+ )
+
+ last_grad: Optional[npt.NDArray[np.floating]] = None
+ last_subgoals: Optional[npt.NDArray[np.floating]] = None
+
+ cost_list = []
+ result_list = [init_frame]
+ frame = init_frame
+ frame.update_times(self.boat_polar)
+
+ assert init_frame.valid or only_attempt
+
+ last_cost: Optional[np.floating] = None
+ for i in range(optimization_parameters.n_iter_grad):
+ cost, grad = cast(Tuple[np.floating, npt.NDArray[np.floating]], self.cost_model.gradients(frame))
+ if optimization_parameters.verbose:
+ print(
+ f" [C] cost {cost} at stepsize {step_size} with gradient <{grad}> \n subgoals {last_subgoals}"
+ )
+ if self._heading is not None:
+ assert self._heading is not None
+ # # assert False, grad
+ grad[0, :] = self._heading * np.clip(
+ np.sum(self._heading * grad[0, :]),
+ -10,
+ min(10.0, linalg.norm((frame.points[1, :] - self.position) / step_size[0, :])),
+ )
+ frame.update_times(self.boat_polar)
+
+ frame_evaluated, _ = self.cost_model.evaluate(frame)
+ # step size adaption:
+ if optimization_parameters.adaptive_step_size:
+
+ if last_cost is not None:
+ assert last_cost is not None
+ if cost - last_cost > 1e-6 or not frame_evaluated.valid:
+ result_list.append(frame)
+ cost_list.append(cost)
+ step_size /= optimization_parameters.adaptive_data_rate_general_regulization_factor
+ assert last_grad is not None
+ step_size[
+ grad * last_grad < 0
+ ] /= optimization_parameters.adaptive_data_rate_penalty_on_loss_sign_change
+ assert last_subgoals is not None and last_grad is not None
+ new_subs = cast(
+ npt.NDArray[np.floating],
+ last_subgoals
+ - np.concatenate(
+ (
+ np.zeros((1, 2)),
+ step_size * np.clip(last_grad, -self._grad_lim, self._grad_lim),
+ np.zeros((1, 2)),
+ ),
+ axis=0,
+ ),
+ )
+ frame = TimingFrame( # type:ignore
+ CartesianRoute.from_numpy(new_subs)
+ )
+ frame.update_times(self.boat_polar)
+ # print(f" ->{last_cost}: {last_grad}")
+ continue
+
+ assert last_grad is not None
+
+ step_size[
+ grad * last_grad < 0
+ ] /= optimization_parameters.adaptive_data_rate_general_regulization_factor
+ step_size[
+ grad * last_grad > 0
+ ] *= optimization_parameters.adaptive_data_rate_general_regulization_factor
+
+ if (step_size < 1e-6).all():
+ break
+ last_cost = cost
+ cost_list.append(cost)
+ last_subgoals = frame.points
+ result_list.append(frame)
+ last_grad = grad
+ # print(f" ->last state committed {last_cost}: {last_grad} : {last_subgoals}")
+ # print(f"apply grad<{grad}> {step_size * np.clip(grad, -self._grad_lim, self._grad_lim)}")
+ new_subs = frame.points - np.concatenate(
+ (
+ np.zeros((1, 2)),
+ step_size * np.clip(grad, -self._grad_lim, self._grad_lim),
+ np.zeros((1, 2)),
+ ),
+ axis=0,
+ )
+
+ assert not (np.isnan(new_subs).any()) or only_attempt, grad
+
+ frame = TimingFrame(CartesianRoute.from_numpy(new_subs)) # type: ignore
+ frame.update_times(self.boat_polar)
+
+ frame_evaluated, actual_cost = self.cost_model.evaluate(frame)
+ cost = np.float64(actual_cost)
+
+ if last_cost is not None and cost - last_cost > 0 or not frame_evaluated.valid: # got worse
+ assert last_subgoals is not None
+ frame = TimingFrame(CartesianRoute.from_numpy(last_subgoals)) # type: ignore
+ frame.update_times(self.boat_polar)
+ frame_evaluated, cost = self.cost_model.evaluate(frame) # evaluate the frame
+ if not only_attempt:
+ assert frame_evaluated.valid
+ cost = np.float32(cost)
+
+ frame_evaluated.actual_cost = cost
+ return frame_evaluated, result_list
+
+ # --- SOLUTION SPACE EXPLORATION --- #
+
+ @dataclass
+ class _RRTNode:
+ """Class describing solution node for initial guess."""
+
+ position: Point
+ time: float
+ distance: float
+ angle: Optional[float]
+ points: List
+ node_identifier: int
+
+ def _sample_tree_init(
+ self, goal: Point, parameters: EstimationParameters = EstimationParameters()
+ ) -> Optional[TimingFrame]:
+ """Initial guessing method implementation
+
+ Args:
+ goal: Shapely Point to guess towards to
+ parameters: dictionary of addition parameters
+
+ Returns:
+ initial TimingFrame guesses, None if no TimingFrame found
+ """
+
+ distance_to_goal = goal.distance(self.position)
+
+ nodes = [
+ self._RRTNode(
+ position=self.position,
+ time=0,
+ distance=distance_to_goal,
+ angle=None,
+ points=[],
+ node_identifier=0,
+ )
+ ]
+ segments = []
+
+ for _ in range(parameters.max_count):
+
+ new_point = self._sample_new_node(goal, p_goal=parameters.p_goal)
+ new_point, node = self._select_expansion_node(
+ new_point, nodes, max_len=distance_to_goal * parameters.max_len_relative
+ )
+
+ new_frame = TimingFrame(
+ CartesianRoute.from_numpy(
+ np.concatenate(
+ [utils.shapely_point_to_ndarray(self.position)[None, :]]
+ + [utils.shapely_point_to_ndarray(p)[None, :] for p in node.points]
+ + [utils.shapely_point_to_ndarray(new_point)[None, :]]
+ )
+ )
+ )
+
+ new_frame.update_times(self.boat_polar)
+
+ new_frame_evaluated, _ = self.cost_model.evaluate(new_frame)
+
+ if new_frame_evaluated.valid:
+ segments.append(new_frame_evaluated)
+ new_distance = goal.distance(new_point)
+ if new_distance < 0.1:
+ return TimingFrame(
+ CartesianRoute.from_numpy(
+ np.concatenate(
+ [utils.shapely_point_to_ndarray(self.position)[None, :]]
+ + [utils.shapely_point_to_ndarray(p)[None, :] for p in node.points]
+ + [utils.shapely_point_to_ndarray(new_point)[None, :]]
+ )
+ )
+ )
+
+ new_node = self._RRTNode(
+ position=new_point,
+ time=new_frame_evaluated.times[-1],
+ distance=new_distance,
+ angle=None,
+ points=node.points + [new_point],
+ node_identifier=0,
+ )
+ nodes.append(new_node)
+
+ # print("Counter exceeded")
+ return None
+
+ def _sample_new_node(self, goal: Point, p_dir_goal=0.5, p_goal=0.2, std=None) -> Point:
+ """sampling of a new :class:`_RRTNode`
+
+ Args:
+ goal: shapely point to develop towards
+ p_dir_goal: probability to choose the goal direction as next direction
+ p_goal: probability to choose the goal as next node
+ std: minimum derivation from current direction
+ """
+ if np.random.rand() < p_goal: # sample the goal position
+ return goal
+ if np.random.rand() < p_dir_goal:
+ mean = goal
+ else:
+ mean = self.position
+ if std is None:
+ dist = goal.distance(self.position)
+ std = dist / 1.5
+ return Point(mean.x + std * np.random.randn(), mean.y + std * np.random.randn())
+
+ def _select_expansion_node(
+ self, new_point: Point, nodes: List[_RRTNode], max_len: float
+ ) -> Tuple[Point, _RRTNode]:
+ """Selects a new node to expand towards. Takes the :attr:`wind_angle` into account.
+
+ Args:
+ new_point: shapely point describing relative movements towards the goal taken in the next step
+ nodes: list of :class:`_RTTNode`\\s which already have been selected in the past
+ max_len: The maximum absolute length to walk towards the goal
+
+ Returns:
+ A new point to expand to and the corresponding node. This point now contains the absolute
+ positional information of the next point.
+ """
+
+ def distance_metric(nodes_to_compare: List[Optimizer._RRTNode], point: Point) -> List[float]:
+ """Just a little vectorized version of point.distance().
+
+ One could have solved that with .vectorize() but this is more declarative.
+
+ Args:
+ nodes_to_compare: nodes to apply metric onto
+ point: points to measure towards
+
+ Returns:
+ distance by metric
+ """
+ return [point.distance(n.position) for n in nodes_to_compare]
+
+ # calculate distances from each possible node to current node and determine therefore
+ # their probabilities to progress to them
+
+ distances = distance_metric(nodes, new_point)
+
+ probabilities = np.nan_to_num(softmax(-10 / np.amin(distances) * np.array(distances)))
+
+ exp_node: Optimizer._RRTNode = np.random.choice(nodes, p=probabilities) # type: ignore
+
+ delta_new_exp = utils.shapely_point_to_ndarray(new_point) - utils.shapely_point_to_ndarray(
+ exp_node.position
+ )
+ distance_to_new, angle_toward_new = (
+ linalg.norm(delta_new_exp),
+ np.arctan2(delta_new_exp[1], delta_new_exp[0]),
+ )
+
+ wind_angle = self._wind_angle # save it for thread safety
+ if wind_angle is not None:
+ d_angle = (angle_toward_new - wind_angle + np.pi) % (2 * np.pi) - np.pi
+
+ if d_angle > np.pi * 5 / 6.0:
+ angle_toward_new = wind_angle + np.pi * 3 / 4.0
+ elif d_angle < -np.pi * 5 / 6.0:
+ angle_toward_new = wind_angle - np.pi * 3 / 4.0
+
+ distance_to_new = min(distance_to_new, max_len)
+
+ projected_array = np.array(
+ [distance_to_new * np.cos(angle_toward_new), distance_to_new * np.sin(angle_toward_new)]
+ )
+
+ new_point_clipped = Point(
+ projected_array[0] + exp_node.position.x, projected_array[1] + exp_node.position.y
+ )
+ return new_point_clipped, exp_node
+
+ # --- ROUTE CONSTRUCTION --- #
+
+ def _get_intersecting_obstacle_ids(
+ self, route: shapely.geometry.LineString
+ ) -> List[Tuple[int, Obstacle]]:
+ to_return = []
+ for i, ob in enumerate(self.cost_model.obstacles.values()):
+ if ob.shape.intersects(route):
+ to_return.append((i, ob))
+ return to_return
+
+ def _tangent_route_on_obstacle(
+ self, route_seg: shapely.geometry.LineString, obstacle_index: int, scale: float = 1
+ ) -> List[CartesianRoute]:
+
+ assert scale > 0
+ assert scale < 2
+
+ obstacle = list(self.cost_model.obstacles.values())[obstacle_index]
+ distances = self.cost_model.distance_matrix[obstacle_index]
+ center = obstacle.shape.centroid
+
+ min_distance = np.min(distances[np.nonzero(distances)], initial=np.inf)
+ if min_distance == np.inf:
+ min_distance = 0
+ max_distance_from_center = max([ver.distance(center) for ver in obstacle.shape.locations])
+
+ scale = scale * (min_distance / max_distance_from_center + 1)
+ intersection = cast(shapely.geometry.LineString, obstacle.shape.intersection(route_seg))
+ boundary_paths: List[CartesianRoute] = []
+ for poly in shapely.ops.split(obstacle.shape, intersection):
+ poly = cast(shapely.geometry.Polygon, poly)
+ _ = poly.exterior.difference(intersection)
+ if isinstance(_, shapely.geometry.multilinestring.MultiLineString):
+ _ = shapely.ops.linemerge(_)
+ _ = cast(shapely.geometry.linestring.LineString, _)
+ # noinspection PyUnresolvedReferences
+ _ = shapely.affinity.scale(_, xfact=scale, yfact=scale, zfact=scale, origin=center)
+ line_coords = list(_.coords)
+
+ line_array = np.array(line_coords)
+
+ boundary_paths.append(CartesianRoute.from_numpy(line_array))
+ boundary_paths.append(CartesianRoute.from_numpy(line_array[::-1]))
+
+ return boundary_paths
+
+ def _construct_init(
+ self, goal: Point, parameters: EstimationParameters = EstimationParameters()
+ ) -> Optional[List[TimingFrame]]:
+ """
+
+ Args:
+ goal:
+ parameters:
+
+ Returns:
+
+ """
+
+ subgoals = np.concatenate(
+ (
+ utils.shapely_point_to_ndarray(self.position)[None, :],
+ utils.shapely_point_to_ndarray(goal)[None, :],
+ ),
+ axis=0,
+ )
+
+ inital_frame: TimingFrame = TimingFrame(CartesianRoute.from_numpy(subgoals))
+ inital_frame.update_times(self.boat_polar)
+ inital_frame_evaluated: EvaluatedTimingFrame = self.cost_model.evaluate(inital_frame)[0]
+
+ collision_times = inital_frame_evaluated.collision_times
+
+ _ = list(collision_times.values())
+ collisions_in_order = np.array(list(np.argsort(np.array(_))))
+ collisions_in_order = collisions_in_order[: np.sum(np.isfinite(np.array(_)))]
+
+ frame_parts = {
+ i: self._tangent_route_on_obstacle(inital_frame.route, i, scale=1)
+ for i, intersecting_obstacle in self._get_intersecting_obstacle_ids(inital_frame.route)
+ }
+
+ if len(frame_parts) == 0:
+ route_vec = utils.shapely_point_to_ndarray(goal) - utils.shapely_point_to_ndarray(self.position)
+ route_vect_r = route_vec + np.array([-0.01, 0])
+ route_vect_l = route_vec + np.array([0.01, 0])
+ simple_route = np.concatenate(
+ (
+ utils.shapely_point_to_ndarray(self.position)[None, :],
+ utils.shapely_point_to_ndarray(self.position)[None, :] + 0.2 * route_vect_l[None, :],
+ utils.shapely_point_to_ndarray(self.position)[None, :] + 0.4 * route_vect_r[None, :],
+ utils.shapely_point_to_ndarray(self.position)[None, :] + 0.6 * route_vect_l[None, :],
+ utils.shapely_point_to_ndarray(self.position)[None, :] + 0.8 * route_vect_r[None, :],
+ utils.shapely_point_to_ndarray(goal)[None, :],
+ ),
+ axis=0,
+ )
+
+ simple_frame = TimingFrame(CartesianRoute.from_numpy(simple_route))
+
+ return [simple_frame]
+
+ # frame_parts[0] exists
+
+ frames_on_bench = []
+ for obs_already_finished, obs_ind in enumerate(collisions_in_order):
+ if obs_already_finished == 0:
+ for first_part in frame_parts[obs_ind]:
+ frames_on_bench.append(TimingFrame(first_part))
+ else:
+ temp = []
+ for obstacle_hull in frame_parts[obs_ind]:
+ for old_frame in frames_on_bench:
+ temp.append(old_frame.append(obstacle_hull))
+ frames_on_bench = temp
+
+ constructed_route_arrays = [
+ np.concatenate(
+ (
+ utils.shapely_point_to_ndarray(self.position)[None, :],
+ f.route.to_numpy(),
+ utils.shapely_point_to_ndarray(goal)[None, :],
+ ),
+ axis=0,
+ )
+ for f in frames_on_bench
+ ]
+ constructed_route: List[TimingFrame] = [
+ TimingFrame(CartesianRoute.from_numpy(array)) for array in constructed_route_arrays
+ ]
+ return constructed_route
diff --git a/pyrate/pyrate/plan/nearplanner/polar_model.py b/pyrate/pyrate/plan/nearplanner/polar_model.py
new file mode 100644
index 0000000..b6c5890
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/polar_model.py
@@ -0,0 +1,172 @@
+"""
+naive code for polar model used in the optimizer and cost model names
+"""
+
+# Dataclass Support
+from abc import ABC
+from abc import abstractmethod
+
+# Static Typing
+from typing import cast
+
+from numpy.typing import NDArray
+
+# Scientific Computing
+import numpy as np
+
+
+from scipy.interpolate import UnivariateSpline
+
+
+class BoatSpeedPolar(ABC):
+ """An exemplary boat speed polar.
+
+ It provides the wind speed relation based on the boats speed polar diagram
+ (wind direction and strength dependent boat speed).
+ An example can be seen `Link here `
+
+ for future: improve simulation capabilities or include an interface to an simulation node
+ """
+
+ def __init__(self) -> None:
+ # spline
+ self.values = {"alpha": [np.pi * i / 5.0 for i in range(6)], "v": [0.9, 1.6, 1.5, 1.0, 0, 0]}
+
+ alphas = np.array(self.values["alpha"][:-1] + [0.85 * np.pi] + [self.values["alpha"][-1]])
+ self.alphas = np.concatenate((np.flipud(-alphas[1:-1]), alphas, 2 * np.pi + np.flipud(-alphas[1:-1])))
+
+ speeds = np.array(self.values["v"][:-1] + [-0.04] + [self.values["v"][-1]])
+ self.speeds = np.concatenate((np.flipud(speeds[1:-1]), speeds, np.flipud(speeds[1:-1])))
+ self.speed_polar = UnivariateSpline(self.alphas, self.speeds, s=0)
+
+ @abstractmethod
+ def speed(self, angles: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Returns simulated speed of the boat at a given angle.
+
+ Args:
+ angles: angles to simulate speed at
+
+ Returns:
+ simulated speed
+ """
+
+ @abstractmethod
+ def speed_grad(self, angles: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Returns gradient at a given array of angles
+
+ Args:
+ angles: angles to simulate speed at
+
+ Returns:
+ gradient of the spline model to use in general cost gradient calculation
+ """
+
+ def _speed(
+ self, angle: NDArray[np.floating], wind_speed: np.floating, wind_direction: np.floating
+ ) -> NDArray[np.floating]:
+ """Return the boat speed at given heading, wind speed and wind angle.
+
+ Spline interpolation regarding angle and quadratic model for wind speed
+
+ Args:
+ angle: wind angle to evaluate spline model on
+ wind_speed: wind speed to evaluate spline model on
+ wind_direction: wind direction to evaluate spline model on
+
+ Returns:
+ predicted speed for the boat at given circumstance
+ """
+ max_speed = 15
+ speed_clipped = np.clip(wind_speed, np.float64(0), np.float64(max_speed))
+ return np.multiply(
+ self._spline_eval(angle - wind_direction), 1 - (max_speed - speed_clipped) / max_speed
+ )
+
+ def _speed_grad(
+ self, angle: NDArray[np.floating], wind_speed: np.floating, wind_direction: np.floating
+ ) -> NDArray[np.floating]:
+ """Return the boat speed derivative.
+
+ Does this with respect to the angle at given heading, wind speed and wind angle
+ spline interpolation regarding angle and quadratic model for wind speed
+
+ Args:
+ angle: angle to determine speed derivative from
+ wind_speed: wind speed to determine speed derivative from
+ wind_direction: wind direction to determine speed derivative from
+ """
+ max_speed = 15
+ speed_clipped = np.clip(wind_speed, np.float64(0), np.float64(max_speed))
+ return np.multiply(
+ self._spline_der(angle - np.float32(wind_direction)), 1 - (max_speed - speed_clipped) / max_speed
+ )
+
+ def _spline_eval(self, angle: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Evaluate spline at a given angle.
+
+ Args:
+ angle: angle to evaluate spline at
+
+ Returns:
+ spline evaluation at given angle
+ """
+ ang = np.add(angle, np.pi) % (2 * np.pi) - np.pi
+
+ return cast(NDArray[np.floating], self.speed_polar(np.abs(ang)))
+
+ def _spline_der(self, angle: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Evaluates spline derivative at given angle.
+
+ Args:
+ angle: angle to evaluate spline derivative at
+
+ Returns:
+ spline derivative at given angle
+ """
+ ang = (angle + np.pi) % (2 * np.pi) - np.pi
+
+ return cast(NDArray[np.floating], self.speed_polar(np.abs(ang), nu=1) * np.sign(ang))
+
+
+class PolarModel(BoatSpeedPolar):
+ """A very simple implementation of a polar model.
+
+ Args:
+ wind_speed: The speed of the wind in, m/s
+ wind_direction: The direction from which the wind is blowing, in radians
+ manoeuvre_time: approximate time each turning manoeuvre takes
+ """
+
+ def __init__(
+ self, wind_speed: float = 0, wind_direction: float = 0, manoeuvre_time: float = 10.0
+ ) -> None:
+ super().__init__()
+
+ self.wind_speed = np.float64(wind_speed)
+ self.wind_direction = np.float64(wind_direction)
+ self.manoeuvre_time = np.float64(manoeuvre_time)
+
+ def speed(self, angles: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Returns simulated speed of the boat at a given angle.
+
+ Args:
+ angles: angles to simulate speed at
+
+ Returns:
+ simulated speed
+ """
+ return self._speed(angles, self.wind_speed, self.wind_direction)
+
+ def speed_grad(self, angles: NDArray[np.floating]) -> NDArray[np.floating]:
+ """Returns gradient at a given array of angles.
+
+ Args:
+ angles: angles to simulate speed at
+
+ Returns:
+ gradient of the spline model to use in general cost gradient calculation
+ """
+ #print(f"windA {angles} {self.wind_direction}")
+ _ = self._speed_grad(angles, self.wind_speed, self.wind_direction)
+ #print(f"grad {_}")
+ return _
diff --git a/pyrate/pyrate/plan/nearplanner/timing_frame.py b/pyrate/pyrate/plan/nearplanner/timing_frame.py
new file mode 100644
index 0000000..798e1a0
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/timing_frame.py
@@ -0,0 +1,582 @@
+"""Contains timing frames."""
+
+import itertools
+
+# Static Typing
+from copy import copy
+from typing import cast
+from typing import Optional
+from typing import Tuple
+from typing import Union
+
+import numpy.typing as npt
+
+# Scientific Computing
+import numpy as np
+
+from scipy import linalg
+
+import shapely.ops
+from shapely.coords import CoordinateSequence
+from shapely.geometry import LineString, Point
+
+# Geometry
+from pyrate.plan.geometry.location import CartesianLocation
+from pyrate.plan.geometry.route import CartesianRoute
+
+from . import utils
+
+from .polar_model import PolarModel
+
+
+class TimingFrame:
+ """A wrapper class around CartesianRoutes under corresponding time and speed constraints.
+
+ Args:
+ route: The :class:`~pyrate.plan.geometry.route.CartesianRoute` to be wrapped
+ start_time: Optional clock time in seconds the frame should start at
+ """
+
+ def __init__(self, route: CartesianRoute, start_time: float = 0):
+ self.route = route
+
+ self.identifier: int = 0
+ self.simulated = False
+
+ self._respect_manoeuvre = True
+
+ self._start_time = start_time
+
+ self._delta_times: npt.NDArray[np.floating] = np.array([0])
+ self._scalar_speeds: npt.NDArray[np.floating] = np.array([0])
+ self._times: npt.NDArray[np.floating] = np.arange(0, route.to_numpy().shape[0] - 1, dtype=np.float64)
+ self._times_without_manoeuvre: npt.NDArray[np.floating] = self.times
+
+ self._model: Optional[PolarModel] = None
+ self._angles: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._distances: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._end_times: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._directions: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._start_times: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ self._delta_positions: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+
+ self._segment_points: npt.NDArray[np.floating] = utils.merge_numpy_array(
+ self.route.to_numpy(), self.route.to_numpy()[:, :]
+ )
+
+ self._speeds: npt.NDArray[np.floating] = np.array([], dtype=np.float64)
+ if not self._speeds.shape[0] == self.route.to_numpy().shape[0]:
+ temp: npt.NDArray[np.floating] = route.to_numpy()
+ self._speeds = cast(
+ npt.NDArray[np.floating], np.append((temp[1:, :] - temp[:-1, :]), [[0, 0]], axis=0)
+ )
+
+ def __str__(self) -> str:
+ return f"TimingFrame({self.route.to_numpy()})"
+
+ def __call__(self, time_to_fetch: float) -> npt.NDArray[np.floating]:
+ """Give the position of the sailboat following this route at a given time
+
+ Args:
+ time_to_fetch: point in time for query
+
+ Returns:
+ Numpy array with cartesian coordinates (dim 2)
+ """
+ time = cast(np.floating, np.float32(time_to_fetch))
+
+ if not self.simulated:
+ assert time >= self._times[0]
+ assert time <= self._times[-1]
+
+ index: int = int(np.argmax(self._times >= time) - 1)
+ partition = (time - self._times[index]) / (self._times[index + 1] - self._times[index])
+ if index == 0:
+ before = utils.shapely_point_to_ndarray(self.position)
+ else:
+ before = cast(npt.NDArray[np.floating], self.route.to_numpy()[index - 1])
+
+ return cast(
+ npt.NDArray[np.floating],
+ before + partition * (cast(npt.NDArray[np.floating], self.route.to_numpy()[index]) - before),
+ )
+
+ assert self.simulated
+
+ assert self._angles is not None
+ assert self._delta_positions is not None
+ assert self._distances is not None
+ assert self._directions is not None
+ assert self._end_times is not None
+ assert self._start_times is not None
+
+ assert time >= self._start_times[0]
+ assert time <= self._end_times[-1]
+
+ index = int(np.argmax(self._start_times >= time) - 1)
+ if not (self._start_times >= time).any():
+ index = self._start_times.shape[0] - 1
+ with np.errstate(divide="ignore", invalid="ignore"):
+ partition = (time - self._start_times[index]) / (
+ self._end_times[index] - self._start_times[index]
+ )
+ partition = 1.0 if partition > 1.0 else partition
+ if index == 0:
+ before = utils.shapely_point_to_ndarray(self.position)
+ else:
+ before = cast(npt.NDArray[np.floating], self.route.to_numpy()[index - 1])
+ return cast(
+ npt.NDArray[np.floating],
+ before + partition * (cast(npt.NDArray[np.floating], self.route.to_numpy()[index]) - before),
+ )
+
+ @property
+ def points(self) -> npt.NDArray[np.floating]:
+ """Points of the route in a numpy array."""
+ return cast(npt.NDArray[np.floating], self.route.to_numpy())
+
+ @property
+ def position(self) -> CartesianLocation:
+ """first point of the route"""
+ return CartesianLocation(east=self.points[0][0], north=self.points[0][1])
+
+ @position.setter
+ def position(self, value: CartesianLocation) -> None:
+ tmp = self.points
+
+ tmp[0][0] = value.east
+ tmp[0][1] = value.north
+
+ self.route = CartesianRoute.from_numpy(tmp)
+
+ @property
+ def start_time(self) -> float:
+ """Start time of the :class:`TimingFrame`."""
+ return self._start_time
+
+ @property
+ def speeds(self) -> npt.NDArray[np.floating]:
+ """Speeds of the route segments in a numpy array."""
+ return self._speeds
+
+ @property
+ def times(self) -> npt.NDArray[np.floating]:
+ """Returns the timing of the route segments.
+
+ After :meth:`update_times` has been called, this includes manoeuvre times.
+ """
+ return self._times
+
+ @property
+ def segment_points(self) -> npt.NDArray[np.floating]:
+ """The points of each segment as a numpy array representation."""
+ return self._segment_points
+
+ @property
+ def valid(self) -> bool:
+ """``True`` iff it was already evaluated and no collision has been found."""
+ return False # Overridden in superclass
+
+ @property
+ def cost(self) -> float:
+ """Cost of the route. This is only describes the time it needs to take the route.
+
+ Warning:
+ Only really accurate after :meth:`update_times` has been called
+ """
+ return float(self._times[-1])
+
+ def update_times(self, boat_model: Optional[PolarModel] = None) -> float:
+ """Updates the timing constraint of a :class:`TimingFrame` to adapt to a polar model.
+
+ This fills in manoeuvre times, angles and speed information.
+
+ Args:
+ boat_model: boat model to adapt to
+
+ Returns:
+ time needed to reach goal under given constraints
+ """
+ if boat_model is not None:
+ self._model = boat_model
+ else:
+ assert self._model
+
+ self._delta_positions = cast(
+ npt.NDArray[np.floating],
+ self.route.to_numpy()[1:] - self.route.to_numpy()[:-1, :],
+ )
+
+ self._distances = cast(npt.NDArray[np.floating], linalg.norm(self._delta_positions, axis=1))
+
+ with np.errstate(divide="ignore", invalid="ignore"):
+ self._directions = cast(
+ npt.NDArray[np.floating], self._delta_positions / self._distances[:, None]
+ )
+ self._directions[self._distances == 0, :] = 0
+
+ self._angles = cast(
+ npt.NDArray[np.floating], np.arctan2(self._delta_positions[:, 1], self._delta_positions[:, 0])
+ )
+
+ # apply transformation to right hand y axis
+ self._angles = np.vectorize(utils.transform_angles_leftx_to_righty, signature="()->()")(self._angles)
+
+ self._scalar_speeds = cast(
+ npt.NDArray[np.floating], np.clip(self._model.speed(self._angles), 1e-3, None).astype(np.float64)
+ )
+
+ self._delta_times = cast(
+ npt.NDArray[np.floating], np.clip(self._distances / self._scalar_speeds, 0, None)
+ )
+
+ self._speeds = cast(npt.NDArray[np.floating], self._scalar_speeds[:, None] * self._directions)
+
+ self._speeds[self._directions == 0] = 0
+
+ self._times = np.concatenate((np.zeros(1), np.cumsum(self._delta_times)))
+ self._times += self._start_time
+
+ self._times_without_manoeuvre = self._times
+
+ self._add_manoeuvre_time(self._angles)
+
+ self.simulated = True
+
+ return float(self.times[-1])
+
+ def prune(self, eps: float = 0.01) -> None:
+ """Function to delete subgoals with neglectable direction changes, to reduce route points.
+
+ Args:
+ eps: angle difference threshold for deciding on the edge deletion (in radians)
+ """
+ delta_pos = self.points - np.concatenate(
+ (utils.shapely_point_to_ndarray(self.position)[None, :], self.points[:-1, :]), axis=0
+ )
+ angles = np.arctan2(delta_pos[:, 1], delta_pos[:, 0])
+ # calculate angle differences
+ delta_ang = np.abs(angles[1:] - angles[:-1])
+ # create a mask from angle differences between each segment and given threshold
+ mask = np.append(delta_ang > eps, True)
+ mask[0] = True
+ mask[-1] = True
+
+ self.route = CartesianRoute.from_numpy(self.route.to_numpy()[mask])
+
+ def cost_grad(
+ self,
+ other_cost_dtimes: Optional[npt.NDArray[np.floating]] = None,
+ dcost_dpoints_ext: Optional[npt.NDArray[np.floating]] = None,
+ dcost_dspeed: Optional[npt.NDArray[np.floating]] = None,
+ time_cost: float = 2,
+ ) -> npt.NDArray[np.floating]:
+ """Calculates the cost gradient(derivative) of a ``TimingFrame`` with to subgoal timings.
+
+ If other time gradients are given returns gradient with respect to all variables.
+
+ Args:
+ other_cost_dtimes: calculated gradient w.r.t timings of ``(number of segments, 2)``
+ dcost_dpoints_ext: calculated gradient w.r.t location of obstacles of ``(number of segments, 2)``
+ dcost_dspeed: calculated gradient in respect to speed of obstacles of ``(number of segments, 2)``
+ time_cost: coefficient for cost per timing interval
+
+ Returns: cost gradient of the ``TimingFrame`` cumulated w.r.t. the subgoal timings, segment speeds,
+ segment times and optionally additionally given obstacle gradients. This gradient is of the shape
+ ``(number of segments, 2)``
+ """
+
+ grad = np.zeros(self._times.shape[0] - 1)
+
+ grad[-1] = np.float64(time_cost)
+
+ # handle case if no partial gradients are given
+ if other_cost_dtimes is None or dcost_dpoints_ext is None or dcost_dspeed is None:
+ return self._gradients(grad)
+
+ if self._respect_manoeuvre:
+ dcost_dpoints = dcost_dpoints_ext[:-1:2] + dcost_dpoints_ext[1::2]
+
+ else:
+ dcost_dpoints = dcost_dpoints_ext
+
+ # assure mypy and us that there are no overflows
+ assert not np.isnan(grad + other_cost_dtimes[:-1]).any(), (grad, other_cost_dtimes[:-1])
+
+ return self._gradients(grad + other_cost_dtimes[:-1], dcost_dspeed) + cast(
+ npt.NDArray[np.floating], dcost_dpoints
+ )
+
+ def _gradients(
+ self,
+ dcost_dtimes: npt.NDArray[np.floating],
+ dcost_speed: npt.NDArray[np.floating] = np.array([], dtype=np.float64),
+ ) -> npt.NDArray[np.floating]:
+ """Calculates the gradients of the time events with respect to the route edge coordinates.
+
+ Also calculates the gradient of the vector speeds for each sequence.
+
+ Args:
+ dcost_dtimes: previously calculated gradient in respect to timings
+ dcost_speed: previously calculated gradient in respect to speed of obstacles and speed
+ on route segments
+
+ Returns: cost gradient of the :class:`TimingFrame` for time and speed, of the shape
+ ``(number of segments, 2)``
+ """
+
+ if dcost_speed.size == 0:
+ dcost_speed = np.zeros((dcost_dtimes.shape[0], 2))
+
+ cost_grad_delta_times = np.flipud(np.cumsum(np.flipud(dcost_dtimes)))
+
+ if not self._respect_manoeuvre:
+ dcost_dtimes_basic = cost_grad_delta_times
+ else:
+ dcost_dtimes_basic = cost_grad_delta_times[::2]
+ dcost_speed = dcost_speed[::2, :]
+
+ assert not np.isnan(dcost_dtimes_basic).any(), dcost_dtimes
+
+ dcost_ddist = dcost_dtimes_basic / self._scalar_speeds
+ dcost_dscal_speed = -dcost_dtimes_basic * self._distances / self._scalar_speeds ** 2
+ dcost_dscal_speed += np.einsum("ij, ij-> i", dcost_speed, self._directions)
+
+ dcost_dscal_speed = np.nan_to_num(dcost_dscal_speed)
+
+ dcost_ddirections = dcost_speed * self._scalar_speeds[:, None]
+
+ assert self._model is not None
+ # print("*"*30)
+ # print(dcost_speed)
+ # print(dcost_dscal_speed)
+ # print(self._scalar_speeds)
+ # print(self._directions)
+ # print("_"*30)
+ # print(dcost_ddist)
+ # print("-*-"*10)
+ # print(dcost_dscal_speed)
+ # print("/"*30)
+ # print(self._model.speed_grad(self._angles))
+ dcost_dang = dcost_dscal_speed * self._model.speed_grad(self._angles)
+ # print("-o-"*10)
+ # print(dcost_dang)
+ if self._respect_manoeuvre:
+ dcost_dang += self._manoeuvre_time(self._angles, cost_grad_delta_times[1::2])[1]
+
+ dcost_ddist = np.nan_to_num(dcost_ddist)
+
+ assert not np.isnan(dcost_ddist).any(), self._scalar_speeds
+ assert not np.isnan(dcost_dang).any(), (dcost_dscal_speed, dcost_dang)
+ assert not np.isnan(dcost_ddirections).any(), dcost_ddirections
+
+ return self._transpose_gradients_to_polar(dcost_ddist, dcost_dang, dcost_ddirections)
+
+ def _add_manoeuvre_time(self, angles: npt.NDArray[np.floating]) -> None:
+ """Calculates and appends manoeuvre time to timing vector of the :class:`TimingFrame`
+
+ Args:
+ angles: angles ot the :class:`TimingFrame` of shape ``(number of segments - 1, )``
+ """
+
+ delta_times_manoeuvre = self._manoeuvre_time(angles)
+ # print("+-+"*10)
+ # print(delta_times_manoeuvre)
+ # print("+-+"*10)
+ times_manoeuvre = np.cumsum(delta_times_manoeuvre)
+
+ self._start_times = self._times_without_manoeuvre[:-1] + np.concatenate(
+ (np.zeros(1), times_manoeuvre)
+ )
+ self._end_times = self._times_without_manoeuvre[1:] + np.concatenate((np.zeros(1), times_manoeuvre))
+
+ # print(f"{self._start_times} -<>- {self._end_times}")
+ self._times = utils.merge_numpy_array(self._start_times, self._end_times)
+ # print(f"{self._times}")
+ self._speeds = utils.merge_numpy_array(self._speeds, np.zeros(self._speeds.shape)[:-1, :])
+ self._segment_points = utils.merge_numpy_array(self.route.to_numpy()[1:], self.route.to_numpy()[1:-1])
+
+ def _manoeuvre_time(
+ self,
+ angles,
+ dcost_dtimes: npt.NDArray[np.floating] = np.array([], dtype=np.float64),
+ time_loss: Optional[np.floating] = None,
+ c_angle: float = 10.0,
+ ) -> Union[npt.NDArray[np.floating], Tuple[npt.NDArray[np.floating], npt.NDArray[np.floating]]]:
+ """Calculates the needed time for each maneuver at each subgoal.
+
+ If the cost gradient with respect to time is given also returns the partial cost derivative
+ with regards to manoeuvre simulation.
+
+ Args:
+ angles: array of turning angles at each subgoal of the shape ``(number of route segments, )``
+ dcost_dtimes: previously calculated cost derivative w.r.t time ``(number of route segments, )``
+ time_loss: time loss coefficient to be applied at each turning
+ c_angle: a coefficient for scaling the angle
+
+ Returns:
+ Calculated additional time needed for each turning as an array of shape ``(number of route
+ segments, )``. If the partial derivative w.r.t speed is given also returns an additional gradient
+ of shape ``(number of route segments, )`` w.r.t the angles. This gradient is simulated via an
+ exponential cost function.
+ """
+
+ if time_loss is None:
+ assert self._model is not None
+ time_loss = self._model.manoeuvre_time
+ d_angle = angles[1:] - angles[:-1]
+
+ d_angle = -np.pi + ((d_angle + np.pi) % (2 * np.pi))
+
+ scale = c_angle / 180.0 * np.pi
+
+ times = time_loss * (1 - np.exp(-(d_angle ** 2) / scale ** 2))
+
+ if dcost_dtimes.shape[0] == 0:
+ return cast(npt.NDArray[np.floating], times)
+
+ d_dang = time_loss * np.exp(-(d_angle ** 2) / scale ** 2) / scale ** 2 * dcost_dtimes * 2 * d_angle
+ return times, np.append(-d_dang, 0) + np.concatenate((np.zeros(1), d_dang))
+
+ def _transpose_gradients_to_polar(
+ self,
+ dcost_ddist: npt.NDArray[np.floating],
+ dcost_dang: npt.NDArray[np.floating],
+ dcost_ddirections: npt.NDArray[np.floating],
+ ) -> npt.NDArray[np.floating]:
+ """Calculate the cumulated gradient for the cartesian to polar transform according to chain rule.
+
+ Args:
+ dcost_ddist: cost gradient w.r.t distance vectors of shape ``(number of route segments, 2)``
+ dcost_dang: cost gradient w.r.t segment angles ``(number of route segments, )``
+ dcost_ddirections: cost gradient w.r.t direction vectors ``(number of route segments, 2)``
+
+ Returns:
+ transposed and cumulated gradient of shape ``(number of route segments, 2)``
+ """
+
+ with np.errstate(divide="ignore", invalid="ignore"):
+ addition = -(
+ np.einsum("ij, ij-> i", dcost_ddirections, self._delta_positions) / self._distances ** 2
+ )
+ addition = np.nan_to_num(addition)
+
+ assert not np.isnan(addition).any(), addition
+
+ dcost_ddist += addition
+
+ # print(f"dc_ddist {dcost_ddist}")
+ # print(f"dc_ddist {dcost_ddirections}")
+ # print(f"dc_dang {dcost_dang}")
+
+ # print(f"addition {addition}")
+ # print(f"distances {self._distances}")
+
+ # print(f"rotdelta {np.concatenate((-self._delta_positions[:, 1:2], self._delta_positions[:, 0:1]), axis=1)}")
+ # print(f"unclipped {(dcost_dang / self._distances**2)}")
+ # print(f"clipped {np.clip((dcost_dang / self._distances**2), None, 1e6)}")
+ # assert non NaN values
+
+ assert not np.isnan(dcost_ddirections).any(), dcost_ddirections
+ assert not np.isnan(dcost_ddist).any(), (dcost_ddist, self._distances)
+ assert not np.isnan(dcost_dang).any(), dcost_dang
+
+ grad_delta: npt.NDArray[np.floating] = (
+ dcost_ddirections / self._distances[:, None]
+ + (dcost_ddist / self._distances)[:, None] * self._delta_positions
+ + np.concatenate((-self._delta_positions[:, 1:2], self._delta_positions[:, 0:1]), axis=1)
+ * np.clip((dcost_dang / self._distances ** 2), None, 1e6)[:, None]
+ )
+
+ # print(f"graddelta {grad_delta}")
+ grad_delta[self._distances == 0, :] = 0.0
+
+ # remove NaN values because first manoeuvre should'nt be moved
+
+ assert not np.isnan(grad_delta).any(), grad_delta
+
+ grad = -grad_delta[1:, :] + grad_delta[:-1, :]
+
+ # print(f"grad {grad}")
+ assert not np.isnan(grad).any(), (grad, grad_delta)
+
+ return cast(npt.NDArray[np.floating], grad)
+
+ @staticmethod
+ def detect_crossing(linestring: LineString) -> Optional[CartesianLocation]:
+ segments = list(map(LineString, zip(linestring.coords[:-1], linestring.coords[1:])))
+
+ for seg1, seg2 in itertools.combinations(segments, 2):
+ if seg1.coords[0] == seg2.coords[0]:
+ return CartesianLocation.from_shapely(Point(seg1.coords[-0]))
+ # return CartesianLocation.from_shapely(Point(seg1.coords[-0])), CartesianLocation.from_shapely(Point(seg2.coords[-0]))
+ if seg1.crosses(seg2):
+ return seg1.intersection(seg2)
+ # return seg1.intersection(seg2), seg2.intersection(seg1)
+ # todo (BEN) refactor
+
+ return None
+
+ def remove_single_cycles(self) -> "TimingFrame":
+
+ simple_route = []
+ route_to_work = self.route
+ crossing = TimingFrame.detect_crossing(route_to_work)
+
+ # while there are crossings
+ while crossing:
+ line_strings_start, remaining_route = list(shapely.ops.split(route_to_work, crossing))
+ # keep before self intersection
+ simple_route.append(line_strings_start)
+
+ # remaining_route = line_strings_split[1]
+ # if the last point / line is the line / point being crossed there is no element after.
+ if len(remaining_route.coords) <= 2:
+ final_route_elements = []
+ for part in simple_route:
+ final_route_elements += list(part.coords)
+ final_route_elements += remaining_route.coords[-1:]
+ linestring_without_cycle = LineString(final_route_elements)
+
+ return TimingFrame(CartesianRoute.from_shapely(linestring_without_cycle))
+ # todo (BEN) Refactor
+
+ remaining_route = LineString(remaining_route.coords[1:][::-1]) # start from next coordinates
+ buffed_crossing = crossing.buffer(0.001)
+ line_string_end = shapely.ops.split(remaining_route, buffed_crossing)[0]
+ line_string_end = LineString(line_string_end.coords[::-1])
+
+ if len(line_string_end.coords) <= 2:
+ final_route_elements = []
+ for part in simple_route:
+ final_route_elements += list(part.coords)
+ final_route_elements += line_string_end.coords[-1:]
+ linestring_without_cycle = LineString(final_route_elements)
+ return TimingFrame(CartesianRoute.from_shapely(linestring_without_cycle))
+
+ line_string_end = LineString(line_string_end.coords[1:])
+ # repeat
+
+ crossing = TimingFrame.detect_crossing(line_string_end)
+ route_to_work = line_string_end
+ # if there are no more cycles keep the rest
+ simple_route.append(route_to_work)
+
+ final_route_elements = []
+ for part in simple_route:
+ final_route_elements += list(part.coords)
+ linestring_without_cycle = LineString(final_route_elements)
+ return TimingFrame(CartesianRoute.from_shapely(linestring_without_cycle))
+
+ def append(self, route: CartesianRoute) -> "TimingFrame":
+ _ = np.concatenate((self.route.to_numpy(), route.to_numpy()), axis=0)
+ return TimingFrame(CartesianRoute.from_numpy(_), start_time=self.start_time)
+
+ def prepend(self, route: CartesianRoute) -> "TimingFrame":
+ _ = np.concatenate((route.to_numpy(), self.route.to_numpy()), axis=0)
+ return TimingFrame(CartesianRoute.from_numpy(_), start_time=self.start_time)
+
+ def inject(self, ind: int, route: CartesianRoute):
+ _ = np.concatenate((route.to_numpy()[0 : ind - 1, 0:1], self.route.to_numpy()), axis=0)
+ _ = np.concatenate((_, route.to_numpy()[ind - 1 :, 0:1]), axis=0)
+ return TimingFrame(CartesianRoute.from_numpy(_), start_time=self.start_time)
diff --git a/pyrate/pyrate/plan/nearplanner/utils.py b/pyrate/pyrate/plan/nearplanner/utils.py
new file mode 100644
index 0000000..8765a8b
--- /dev/null
+++ b/pyrate/pyrate/plan/nearplanner/utils.py
@@ -0,0 +1,76 @@
+"""
+utilities module for the planning backend
+"""
+
+# Scientific Computing
+import numpy as np
+import numpy.typing as npt
+
+# Geometry
+from shapely.geometry import Point
+
+
+def angle_between(vector_a: np.ndarray, vector_b: np.ndarray) -> float:
+ """Code snippet to determine right-hand angle between two numpy arrays.
+
+ Args:
+ vector_a: the first vector, of shape ``(spacial_dimension, )``
+ vector_b: the second vector, of shape ``(spacial_dimension, )``
+
+ Returns:
+ the right-hand angle encompassed between ``vector_a`` and ``vector_b``, in degrees
+ """
+
+ ang1 = np.arctan2(*vector_a[::-1])
+ ang2 = np.arctan2(*vector_b[::-1])
+ return float(np.rad2deg((ang1 - ang2) % (2 * np.pi)))
+
+
+def transform_angles_leftx_to_righty(angle: np.float64) -> np.float64:
+ """Transpose angle from lefthand x axis to righthand y-axis angle (to north)
+
+ Args:
+ angle: lefthand angle measured from x axis [-pi:pi]
+ Returns:
+ right hand angle referenced from y axis [-pi:pi]
+ """
+ mapped_angle = 0.5 * np.pi - angle
+ if mapped_angle > 0.5 * np.pi:
+ mapped_angle = -(2 * np.pi - mapped_angle)
+ return mapped_angle
+
+
+def merge_numpy_array(
+ array_a: npt.NDArray[np.floating], array_b: npt.NDArray[np.floating]
+) -> npt.NDArray[np.floating]:
+ """Merges two numpy arrays into one along the first axis.
+
+ Args:
+ array_a: numpy array a to merge
+ array_b: numpy array b to merge
+
+ Returns:
+ merged numpy array
+ """
+ # merge two numpy arrays in alternating order along the first dimension
+ if len(array_b) == 0:
+ return array_a
+ shape = array_a.shape[0] + array_b.shape[0]
+ res = np.zeros([shape] + list(array_a.shape[1:]), dtype=np.float64)
+ res[::2] = array_a
+ res[1::2] = array_b
+
+ return res
+
+
+def shapely_point_to_ndarray(point: Point) -> npt.NDArray[np.floating]:
+ """Projects a :mod:`shapely`/:mod:`pyrate.plan.geometry` point onto a numpy array.
+
+ Args:
+ point: point to be projected
+
+ Returns:
+ The numpy-array representation of the point in ``(x, y)`` format
+ """
+
+ return np.array([point.x, point.y], dtype=np.float64)
diff --git a/pyrate/pyrate/py.typed b/pyrate/pyrate/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/pyrate/sense/__init__.py b/pyrate/pyrate/sense/__init__.py
new file mode 100644
index 0000000..c6f278f
--- /dev/null
+++ b/pyrate/pyrate/sense/__init__.py
@@ -0,0 +1 @@
+"""This package provides methods for state estimation, visual perception, mapping and similar."""
diff --git a/pyrate/pyrate/sense/filters/__init__.py b/pyrate/pyrate/sense/filters/__init__.py
new file mode 100644
index 0000000..e83e125
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/__init__.py
@@ -0,0 +1,9 @@
+"""This package provides filters for state estimations based on noisy measurements."""
+
+from .extended import ExtendedKalman
+from .extended_gmphd import ExtendedGaussianMixturePHD
+from .gmphd import GaussianMixturePHD
+from .kalman import Kalman
+from .unscented import UnscentedKalman
+
+__all__ = ["Kalman", "ExtendedKalman", "UnscentedKalman", "GaussianMixturePHD", "ExtendedGaussianMixturePHD"]
diff --git a/pyrate/pyrate/sense/filters/extended.py b/pyrate/pyrate/sense/filters/extended.py
new file mode 100644
index 0000000..71426d9
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/extended.py
@@ -0,0 +1,165 @@
+"""This module implements the extended Kalman filter for non-linear state
+ estimation."""
+
+# Standard library
+from copy import deepcopy
+
+# Typing
+from typing import Callable
+from typing import Union
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Data modelling
+from pandas import concat
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+
+
+class ExtendedKalman:
+
+ """The extended Kalman filter for non-linear state estimation.
+
+ This filter behaves similarly to the standard Kalman filter, but utilizes nonlinear
+ models and their jacobian matrix to estimate state variables whose process and/or relation to
+ the measured properties cannot be accurately described by a linear model.
+
+ Examples:
+ Start by importing the necessary numpy functions.
+
+ >>> from numpy import array
+ >>> from numpy import cos
+ >>> from numpy import eye
+ >>> from numpy import sin
+ >>> from numpy import vstack
+
+ Setup the model. In this case, we track a sine wave.
+ Thereby we choose the transition model and its jacobian, as well as the linear
+ measurement model, like so.
+
+ >>> f = lambda x: sin(x)
+ >>> F = lambda x: array([cos(x)])
+ >>> H = lambda x: array([[1.0]])
+ >>> h = lambda x: x
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(1)
+ >>> R = eye(1)
+
+ Our initial belief is at 0.
+
+ >>> mean = vstack([0.0])
+ >>> covariance = array([[1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the filter.
+
+ >>> kalman = ExtendedKalman(F, f, estimate, H, h, Q, R)
+
+ We first predict with the provided model and then correct the prediction with a
+ measurement of the true position.
+
+ >>> kalman.predict()
+ >>> kalman.correct(array([5.]))
+
+ Args:
+ F: Linearized state transition model, i.e. the jacobi matrix of f (n, n)
+ f: Non-linear state transition model that describes the state's evolution
+ from one timestep to the next
+ estimate: Initial belief, i.e. the gaussian that describes your initial guess
+ on the state and your uncertainty
+ H: Linearized measurement model, i.e. the jacobi matrix of h (m, n)
+ h: Non-linear measurement model that maps a state variable into the measured space
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ keep_trace: Flag for tracking filter process
+
+ References:
+ - https://en.wikipedia.org/wiki/Extended_Kalman_filter
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-instance-attributes, too-many-arguments
+
+ def __init__(
+ self,
+ F: Union[ndarray, Callable[..., ndarray]],
+ f: Callable[..., ndarray],
+ estimate: Gaussian,
+ H: Union[ndarray, Callable[..., ndarray]],
+ h: Callable[..., ndarray],
+ Q: ndarray,
+ R: ndarray,
+ keep_trace: bool = False,
+ ):
+ # Initial belief
+ self.estimate = deepcopy(estimate)
+ self.prediction = deepcopy(estimate)
+
+ # Model specification
+ self.f = f
+ self.F = F
+ self.h = h
+ self.H = H
+ self.Q = Q
+ self.R = R
+
+ # Residual and its covariance matrix
+ self.y: ndarray
+ self.S: ndarray
+
+ # Kalman gain
+ self.K: ndarray
+
+ # Objects for process tracing
+ self.keep_trace = keep_trace
+ self.predictions = DataFrame(columns=["x", "P", "F"])
+ self.estimates = DataFrame(columns=["x", "P", "z"])
+
+ def predict(self, **kwargs) -> None:
+ """Predict a future state based on a linear forward model with optional system input."""
+
+ # Linearize and predict state transition
+ self.prediction.x = self.f(x=self.estimate.x, **kwargs)
+ F = self.F(self.prediction.x, **kwargs) if callable(self.F) else self.F
+ self.prediction.P = F @ self.estimate.P @ F.T + self.Q
+
+ # Append prediction data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {"x": (self.prediction.x.copy(),), "P": (self.prediction.P.copy(),), "F": (F.copy(),)}
+ )
+ self.predictions = concat([self.predictions, new], ignore_index=True)
+
+ def correct(self, z: ndarray, **kwargs) -> None:
+ """Correct a state prediction based on a measurement."""
+
+ # Check for differing measurement model
+ H, h = kwargs.pop("H", self.H), kwargs.pop("h", self.h)
+
+ # Approximate about predicted state
+ H_x: ndarray = H(self.prediction.x, **kwargs) if callable(H) else H
+
+ # Compute the residual and its covariance
+ self.y = z - h(self.prediction.x, **kwargs)
+ self.S = H_x @ self.prediction.P @ H_x.T + self.R
+
+ # Compute the new Kalman gain
+ self.K = self.prediction.P @ H_x.T @ inv(self.S)
+
+ # Estimate new state
+ self.estimate.x = self.prediction.x + self.K @ self.y
+ self.estimate.P = self.prediction.P - self.K @ self.S @ self.K.T
+
+ # Append estimation data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {"x": (self.estimate.x.copy(),), "P": (self.estimate.P.copy(),), "z": (z.copy(),)}
+ )
+ self.estimates = concat([self.estimates, new], ignore_index=True)
diff --git a/pyrate/pyrate/sense/filters/extended_gmphd.py b/pyrate/pyrate/sense/filters/extended_gmphd.py
new file mode 100644
index 0000000..443374d
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/extended_gmphd.py
@@ -0,0 +1,210 @@
+"""This module implements the extended Gaussian Mixture PHD filter for linear
+ multi target tracking.."""
+
+# Standard library
+from copy import deepcopy
+
+# Typing
+from typing import Callable
+from typing import List
+from typing import Union
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Gaussians for state representation
+from pyrate.common.math import Gaussian
+
+# Base class
+from .gmphd import GaussianMixturePHD
+
+
+class ExtendedGaussianMixturePHD(GaussianMixturePHD):
+
+ """The extended gaussian mixture PHD filter for non-linear multi-target tracking.
+
+ The extended gaussian mixture PHD filter is a multi target tracker for non-linear state space models.
+ It can be regarded as an extension of the extended Kalman filter formulas to so-called random
+ finite sets (RFS). The PHD filter follows the same prediction-correction scheme for state
+ estimation as the single target extended Kalman filters. As an additional part of the interface,
+ the internal model for the filter's belief needs to be pruned regularly as to limit
+ the computational complexity. The extraction of a state estimate is similarly more
+ sophisticated in the PHD filter and requires the use of a dedicated procedure.
+
+ Examples:
+ Start by importing the necessary numpy functions.
+
+ >>> from numpy import array
+ >>> from numpy import cos
+ >>> from numpy import eye
+ >>> from numpy import sin
+ >>> from numpy import vstack
+
+ Setup the model. In this case, we track sine waves.
+ Thereby we choose the transition model and its jacobian, as well as the linear
+ measurement model, like so.
+
+ >>> f = lambda x: sin(x)
+ >>> F = lambda x: array([cos(x)])
+ >>> H = lambda x: array([[1.0]])
+ >>> h = lambda x: x
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(1)
+ >>> R = eye(1)
+
+ Our belief of how targets are generetaded is for them to start with
+ a position at zero.
+
+ >>> mean = vstack([0.0])
+ >>> covariance = array([[1.0]])
+ >>> birth_belief = [Gaussian(mean, covariance)]
+
+ We need to tell the filter how certain we are to detect targets and whether they survive.
+ Also, the amount of clutter in the observed environment is quantized.
+
+ >>> survival_rate = 0.99
+ >>> detection_rate = 0.99
+ >>> intensity = 0.01
+
+ Then, we initialize the filter. This model has not input, so we ignore B.
+
+ >>> phd = ExtendedGaussianMixturePHD(
+ ... birth_belief,
+ ... survival_rate,
+ ... detection_rate,
+ ... intensity,
+ ... F,
+ ... f,
+ ... H,
+ ... h,
+ ... Q,
+ ... R
+ ... )
+
+ We first predict with the provided model and then correct the prediction with a
+ measurement, in this case of a single targets' position.
+
+ >>> phd.predict()
+ >>> phd.correct([array([5.])])
+
+ Args:
+ birth_belief: GMM of target births
+ survival_rate: Probability of a target to survive a timestep
+ detection_rate: Probability of a target to be detected at a timestep
+ intensity: Clutter intensity
+ F: Linear state transition model (n, n)
+ f: Non-linear state transition model that describes the state's evolution
+ from one timestep to the next
+ H: Linearized measurement model, i.e. the jacobi matrix of h (m, n)
+ h: Non-linear measurement model that maps a state variable into the measured space
+ Q: Process noise matrix (n, n)
+ R: Measurement noise matrix (m, m)
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ birth_belief: List[Gaussian],
+ survival_rate: float,
+ detection_rate: float,
+ intensity: float,
+ F: Union[ndarray, Callable[..., ndarray]],
+ f: Callable[..., ndarray],
+ H: Union[ndarray, Callable[..., ndarray]],
+ h: Callable[..., ndarray],
+ Q: ndarray,
+ R: ndarray,
+ ):
+ # Extended filter specification
+ self.f = f
+ self.h = h
+
+ # Initializes internal linear model
+ super().__init__(birth_belief, survival_rate, detection_rate, intensity, F, H, Q, R)
+
+ def predict(self, **kwargs) -> None:
+ """Predict the future state."""
+
+ # Spontaneous birth
+ born = deepcopy(self.birth_belief)
+
+ # Spawning off of existing targets
+ # Not implemented at this point in time
+ spawned: List[Gaussian] = []
+
+ # Prediction for existing targets
+ for component in self.gmm:
+ component.x = self.f(x=component.x, **kwargs)
+ F = self.F(component.x, **kwargs) if callable(self.F) else self.F
+ component.P = F @ component.P @ F.T + self.Q
+ component.w *= self.survival_rate
+
+ # Concatenate with newborn and spawned target components
+ self.gmm += born + spawned
+
+ def correct(self, measurements: ndarray, **kwargs) -> None:
+ """Correct the former prediction based on a sensor reading.
+
+ Args:
+ measurements: Measurements at this timestep
+ **kwargs: Optional measurement models H and/or h as well as their parameters
+ """
+
+ # pylint: disable=too-many-locals
+
+ # Check for differing measurement model
+ H, h = kwargs.pop("H", self.H), kwargs.pop("h", self.h)
+
+ # ######################################
+ # Construction of update components
+
+ mu: List[ndarray] = [] # Means mapped to measurement space
+ S: List[ndarray] = [] # Residual covariance
+ K: List[ndarray] = [] # Gains
+ P: List[ndarray] = [] # Covariance
+
+ for i, component in zip(range(len(self.gmm)), self.gmm):
+ # Approximate about predicted state
+ H_x: ndarray = H(component.x, **kwargs) if callable(H) else H
+
+ mu.append(h(component.x, **kwargs))
+ S.append(self.R + H_x @ component.P @ H_x.T)
+ K.append(component.P @ H_x.T @ inv(S[i]))
+ P.append(component.P - K[i] @ S[i] @ K[i].T)
+
+ # ######################################
+ # Update
+
+ # Undetected assumption
+ updated = deepcopy(self.gmm)
+ for component in updated:
+ component.w *= 1 - self.detection_rate
+
+ # Measured assumption
+ for z in measurements:
+ # Fill batch with corrected components
+ batch = [
+ Gaussian(
+ self.gmm[i].x + K[i] @ (z - mu[i]),
+ P[i],
+ self.detection_rate * self.gmm[i].w * Gaussian(mu[i], S[i])(z),
+ )
+ for i in range(len(self.gmm))
+ ]
+
+ # Normalize weights
+ sum_of_weights = sum([c.w for c in batch])
+ for component in batch:
+ component.w /= self.intensity + sum_of_weights
+
+ # Append batch to updated GMM
+ updated += batch
+
+ # Set updated as new gaussian mixture model
+ self.gmm = updated
diff --git a/pyrate/pyrate/sense/filters/gmphd.py b/pyrate/pyrate/sense/filters/gmphd.py
new file mode 100644
index 0000000..508615a
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/gmphd.py
@@ -0,0 +1,287 @@
+"""This module implements the Gaussian Mixture PHD filter for linear
+ multi target tracking.."""
+
+# Standard library
+from copy import deepcopy
+
+# Typing
+from typing import Callable
+from typing import List
+from typing import Union
+
+# Mathematics
+from numpy import array
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Filter basis
+from pyrate.common.math import Gaussian
+
+
+class GaussianMixturePHD:
+
+ """The gaussian mixture PHD filter for linear multi-target tracking.
+
+ The gaussian mixture PHD filter is a multi target tracker for linear state space models.
+ It can be regarded as an extension of the Kalman filter formulas to so-called random
+ finite sets (RFS). The PHD filter follows the same prediction-correction scheme for state
+ estimation as the single target Kalman filters. As an additional part of the interface,
+ the internal model for the filter's belief needs to be pruned regularly as to limit
+ the computational complexity. The extraction of a state estimate is similarly more
+ sophisticated in the PHD filter and requires the use of a dedicated procedure.
+
+ Examples:
+ Start by importing the necessary numpy functions.
+
+ >>> from numpy import array
+ >>> from numpy import eye
+ >>> from numpy import vstack
+
+ Setup the model.
+ In this case, we track 1D positions with constant velocities.
+ Thereby we choose the transition model like so.
+
+ >>> F = array([[1.0, 1.0], [0.0, 0.0]])
+
+ The measurements will be positions and no velocities.
+
+ >>> H = array([[1.0, 0.0]])
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(2)
+ >>> R = eye(1)
+
+ Our belief of how targets are generetaded is for them to start with
+ a position and velocity of 0.
+
+ >>> mean = vstack([0.0, 0.0])
+ >>> covariance = array([[1.0, 0.0], [0.0, 1.0]])
+ >>> birth_belief = [Gaussian(mean, covariance)]
+
+ We need to tell the filter how certain we are to detect targets and whether they survive.
+ Also, the amount of clutter in the observed environment is quantized.
+
+ >>> survival_rate = 0.99
+ >>> detection_rate = 0.99
+ >>> intensity = 0.01
+
+ Then, we initialize the filter. This model has not input, so we ignore B.
+
+ >>> phd = GaussianMixturePHD(
+ ... birth_belief,
+ ... survival_rate,
+ ... detection_rate,
+ ... intensity,
+ ... F,
+ ... H,
+ ... Q,
+ ... R
+ ... )
+
+ We first predict with the provided model and then correct the prediction with a
+ measurement, in this case of a single targets' position.
+
+ >>> phd.predict()
+ >>> phd.correct([array([5.])])
+
+ Args:
+ birth_belief: GMM of target births
+ survival_rate: Probability of a target to survive a timestep
+ detection_rate: Probability of a target to be detected at a timestep
+ intensity: Clutter intensity
+ F: Linearstate transition model (n, n)
+ H: Linear measurement model (m, n)
+ Q: Process noise matrix (n, n)
+ R: Measurement noise matrix (m, m)
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ birth_belief: List[Gaussian],
+ survival_rate: float,
+ detection_rate: float,
+ intensity: float,
+ F: Union[ndarray, Callable[..., ndarray]],
+ H: Union[ndarray, Callable[..., ndarray]],
+ Q: ndarray,
+ R: ndarray,
+ ):
+ # Filter specification
+ self.F = F
+ self.H = H
+ self.Q = Q
+ self.R = R
+
+ # Gaussian mixture model for spontaneous birth of new targets
+ self.birth_belief = birth_belief
+
+ # Rates of survival, detection and clutter intensity
+ self.survival_rate = survival_rate
+ self.detection_rate = detection_rate
+ self.intensity = intensity
+
+ # Gaussian mixture model
+ self.gmm: List[Gaussian] = []
+
+ def extract(self, threshold: float = 0.5) -> List[ndarray]:
+ """Extract a state representation based on spikes in the current GMM.
+
+ Args:
+ threshold: Weight that a component needs to have to be considered a target state
+ """
+
+ # Memory for all estimated states
+ states: List[ndarray] = []
+
+ # Every component with sufficient weight is considered to be a target
+ for component in self.gmm:
+ if component.w > threshold:
+ # A component with weight over 1 represents multiple targets
+ states += [component.x for _ in range(int(round(component.w)))]
+
+ # Return all extracted states
+ return states
+
+ def prune(self, threshold: float, merge_distance: float, max_components: int) -> None:
+ """Reduces the number of gaussian mixture components.
+
+ Args:
+ threshold: Truncation threshold s.t. components with weight < threshold are removed
+ merge_distance: Merging threshold s.t. components 'close enough' will be merged
+ max_components: Maximum number of gaussians after pruning
+ """
+
+ # Select a subset of components to be pruned
+ selected = [component for component in self.gmm if component.w > threshold]
+
+ # Create new list for pruned mixture model
+ pruned: List[Gaussian] = []
+
+ # While candidates for pruning exist ...
+ while selected:
+ # Find mean of component with maximum weight
+ index = max(range(len(selected)), key=lambda index: selected[index].w)
+
+ mean = selected[index].x
+
+ # Select components to be merged and remove merged from selected
+ mergeable = [
+ c for c in selected if ((c.x - mean).T @ inv(c.P) @ (c.x - mean)).item() <= merge_distance
+ ]
+ selected = [c for c in selected if c not in mergeable]
+
+ # Compute new mixture component
+ merged_weight = sum([component.w for component in mergeable])
+ merged_mean = array(sum([component.w * component.x for component in mergeable]) / merged_weight)
+ merged_covariance = array(
+ sum(
+ [
+ component.w * (component.P + (mean - component.x) @ (mean - component.x).T)
+ for component in mergeable
+ ]
+ )
+ / merged_weight
+ )
+
+ # Store the component
+ pruned.append(Gaussian(merged_mean, merged_covariance, merged_weight))
+
+ # Remove components with minimum weight if maximum number is exceeded
+ while len(pruned) > max_components:
+ # Find index of component with minimum weight
+ index = min(range(len(pruned)), key=lambda index: pruned[index].w)
+
+ # Remove the component
+ del pruned[index]
+
+ # Update GMM with pruned model
+ self.gmm = deepcopy(pruned)
+
+ def predict(self, **kwargs) -> None:
+ """Predict the future state."""
+
+ # Compute F if additional parameters are needed
+ if callable(self.F):
+ F = self.F(**kwargs)
+ else:
+ F = self.F
+
+ # Spontaneous birth
+ born = deepcopy(self.birth_belief)
+
+ # Spawning off of existing targets
+ # Not implemented at this point in time
+ spawned: List[Gaussian] = []
+
+ # Prediction for existing targets
+ for component in self.gmm:
+ component.x = F @ component.x
+ component.P = F @ component.P @ F.T + self.Q
+ component.w *= self.survival_rate
+
+ # Concatenate with newborn and spawned target components
+ self.gmm += born + spawned
+
+ def correct(self, measurements: ndarray, **kwargs) -> None:
+ """Correct the former prediction based on a sensor reading.
+
+ Args:
+ measurements: Measurements at this timestep
+ """
+
+ # Check for differing measurement model
+ H = kwargs.pop("H", self.H)
+
+ # Compute H if additional parameters are needed
+ if callable(H):
+ H = H(**kwargs)
+
+ # ######################################
+ # Construction of update components
+
+ mu: List[ndarray] = [] # Means mapped to measurement space
+ S: List[ndarray] = [] # Residual covariance
+ K: List[ndarray] = [] # Gains
+ P: List[ndarray] = [] # Covariance
+
+ for i, component in zip(range(len(self.gmm)), self.gmm):
+ mu.append(H @ component.x)
+ S.append(self.R + H @ component.P @ H.T)
+ K.append(component.P @ H.T @ inv(S[i]))
+ P.append(component.P - K[i] @ S[i] @ K[i].T)
+
+ # ######################################
+ # Update
+
+ # Undetected assumption
+ updated = deepcopy(self.gmm)
+ for component in updated:
+ component.w *= 1 - self.detection_rate
+
+ # Measured assumption
+ for z in measurements:
+ # Fill batch with corrected components
+ batch = [
+ Gaussian(
+ self.gmm[i].x + K[i] @ (z - mu[i]),
+ P[i],
+ self.detection_rate * self.gmm[i].w * Gaussian(mu[i], S[i])(z),
+ )
+ for i in range(len(self.gmm))
+ ]
+
+ # Normalize weights
+ sum_of_weights = sum([c.w for c in batch])
+ for component in batch:
+ component.w /= self.intensity + sum_of_weights
+
+ # Append batch to updated GMM
+ updated += batch
+
+ # Set updated as new gaussian mixture model
+ self.gmm = updated
diff --git a/pyrate/pyrate/sense/filters/kalman.py b/pyrate/pyrate/sense/filters/kalman.py
new file mode 100644
index 0000000..70370ba
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/kalman.py
@@ -0,0 +1,178 @@
+"""This module implements the Kalman filter for state estimation based on
+ linear state transition and measurement models."""
+
+# Standard library
+from copy import deepcopy
+
+# Typing
+from typing import Callable
+from typing import Optional
+from typing import Union
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Data modelling
+from pandas import concat
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+
+
+class Kalman:
+
+ """The Kalman filter for linear state estimation.
+
+ The Kalman filter is a single target tracker for linear state space models, i.e. models that
+ describe the transition of a state variable and its relationship to sensor readings
+ as matrix-vector-multiplications.
+ Additionally, the Kalman filter is based on the assumption that the state process and
+ measurements are sampled from a Gaussian distribution.
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+ >>> from numpy import eye
+ >>> from numpy import vstack
+
+ Then, setup the system's model.
+ In this case, we track a 1D position that we assume to have a constant velocity.
+ Thereby we choose the transition model and measurement function like so.
+
+ >>> F = array([[1.0, 1.0], [0.0, 0.0]])
+ >>> H = array([[1.0, 0.0]])
+
+ Furthermore, we assume the following covariance matrices to model
+ the noise in our model and measurements.
+
+ >>> Q = eye(2)
+ >>> R = eye(1)
+
+ Our initial belief is a position and velocity of 0.
+
+ >>> mean = vstack([0.0, 0.0])
+ >>> covariance = array([[1.0, 0.0], [0.0, 1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the filter.
+ Since, this model has not input we can ignore the control function B.
+
+ >>> kalman = Kalman(F, estimate, H, Q, R)
+
+ Now, we can predict based on the provided model and correct predictions with
+ measurements of the true position.
+
+ >>> kalman.predict()
+ >>> kalman.correct(array([5.]))
+
+ Predictions and corrections do not need to alternate every time.
+ As an example, you can predict the state multiple times should your measurements be
+ unavailable for an extended period of time.
+
+ Args:
+ F: State transition model, i.e. the change of x in a single timestep (n, n)
+ estimate: Initial belief, i.e. the gaussian distribution that describes your initial guess
+ on the target's state
+ H: Measurement model, i.e. a mapping from a state to measurement space (m, n)
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ B: Input dynamics model, i.e. the influence of a set system input on the state transition (1, k)
+ keep_trace: Flag for tracking filter process
+
+ References:
+ - https://en.wikipedia.org/wiki/Kalman_filter
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-instance-attributes, too-many-arguments
+
+ def __init__(
+ self,
+ F: Union[ndarray, Callable[..., ndarray]],
+ estimate: Gaussian,
+ H: Union[ndarray, Callable[..., ndarray]],
+ Q: ndarray,
+ R: ndarray,
+ B: Optional[ndarray] = None,
+ keep_trace: bool = False,
+ ):
+ # Initial belief
+ self.estimate = deepcopy(estimate)
+ self.prediction = deepcopy(estimate)
+
+ # Model specification
+ self.F = F
+ self.B = B
+ self.H = H
+ self.Q = Q
+ self.R = R
+
+ # Residual and its covariance matrix
+ self.y: ndarray
+ self.S: ndarray
+
+ # Kalman gain
+ self.K: ndarray
+
+ # Objects for process tracing
+ self.keep_trace = keep_trace
+ self.predictions = DataFrame(columns=["x", "P", "F"])
+ self.estimates = DataFrame(columns=["x", "P", "z"])
+
+ def predict(self, **kwargs) -> None:
+ """Predict a future state based on a linear forward model with optional system input."""
+
+ # Compute F if additional parameters are needed
+ F = self.F(**kwargs) if callable(self.F) else self.F
+
+ # Predict next state
+ self.prediction.x = F @ self.estimate.x
+ self.prediction.P = F @ self.estimate.P @ F.T + self.Q
+
+ # Consider system input
+ u = kwargs.pop("u", None)
+ if u is not None:
+ self.prediction.x += self.B @ u
+
+ # Append prediction data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {"x": (self.prediction.x.copy(),), "P": (self.prediction.P.copy(),), "F": (F.copy(),)}
+ )
+ self.predictions = concat([self.predictions, new], ignore_index=True)
+
+ def correct(self, z: ndarray, **kwargs) -> None:
+ """Correct a state prediction based on a measurement.
+
+ Args:
+ z: The measurement taken at this timestep
+ """
+
+ # Check for differing measurement model
+ H = kwargs.pop("H", self.H)
+
+ # Compute H if additional parameters are needed
+ if callable(H):
+ H = H(**kwargs)
+
+ # Compute the residual and its covariance
+ self.y = z - H @ self.prediction.x
+ self.S = H @ self.prediction.P @ H.T + self.R
+
+ # Compute the new Kalman gain
+ self.K = self.prediction.P @ H.T @ inv(self.S)
+
+ # Estimate new state
+ self.estimate.x = self.prediction.x + self.K @ self.y
+ self.estimate.P = self.prediction.P - self.K @ self.S @ self.K.T
+
+ # Append estimation data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {"x": (self.estimate.x.copy(),), "P": (self.estimate.P.copy(),), "z": (z.copy(),)}
+ )
+ self.estimates = concat([self.estimates, new], ignore_index=True)
diff --git a/pyrate/pyrate/sense/filters/unscented.py b/pyrate/pyrate/sense/filters/unscented.py
new file mode 100644
index 0000000..588679e
--- /dev/null
+++ b/pyrate/pyrate/sense/filters/unscented.py
@@ -0,0 +1,238 @@
+"""This module provides an implementation of the Unscented Kalman filter
+ for non-linear state estimation."""
+
+# Standard library
+from copy import deepcopy
+
+# Typing
+from typing import Callable
+
+# Mathematics
+from numpy import array
+from numpy import hstack
+from numpy.linalg import inv
+from numpy import ndarray
+from numpy import outer
+from numpy import tensordot
+from numpy import vectorize
+from numpy import vstack
+from scipy.linalg import cholesky
+
+# Data modelling
+from pandas import concat
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+
+
+class UnscentedKalman:
+
+ """The unscented Kalman filter for non-linear state estimation.
+
+ This filter behaves similarly to the standard Kalman filter, but utilizes the so-called
+ unscented transform to approximate gaussian distributions by sampling from the given
+ nonlinear models to estimate state variables whose process and/or relation to
+ the measured properties cannot be accurately described by a linear model.
+
+ Examples:
+ To use the UKF, here we utilize numpy's functionality.
+
+ >>> from numpy import array
+ >>> from numpy import cos
+ >>> from numpy import eye
+ >>> from numpy import sin
+ >>> from numpy import vstack
+
+ Setup the model. In this case, we track a sine wave.
+ Thereby we choose the transition model and its jacobian, as well as the linear
+ measurement model, like so.
+
+ >>> f = lambda x: sin(x)
+ >>> F = lambda x: array([cos(x)])
+ >>> H = lambda x: array([[1.0]])
+ >>> h = lambda x: x
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(1)
+ >>> R = eye(1)
+
+ Our initial belief is at 0.
+
+ >>> mean = vstack([0.0])
+ >>> covariance = array([[1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the filter. This model has not input, so we ignore B.
+
+ >>> kalman = UnscentedKalman(f, estimate, h, Q, R)
+
+ We first predict with the provided model and then correct the prediction with a
+ measurement of the true position.
+
+ >>> kalman.predict()
+ >>> kalman.correct(array([5.]))
+
+ Args:
+ f: Non-linear state transition model that describes the state's evolution
+ from one timestep to the next
+ estimate: Initial belief, i.e. the gaussian that describes your initial guess
+ on the state and your uncertainty
+ h: Non-linear measurement model that maps a state variable into the measured space
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ alpha: Spread of sample points, pick between 0. and 1.
+ beta: Sigma point parameter, 2 is optimal for gaussian problems
+ kappa: Sigma point parameter, a common choice for kappa is to subtract 3
+ from your state's dimension
+ keep_trace: Flag for tracking filter process
+
+ References:
+ - https://en.wikipedia.org/wiki/Unscented_Kalman_filter
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-instance-attributes, too-many-arguments
+ # Required lambda is falsely accused
+ # pylint: disable=unnecessary-lambda
+
+ def __init__(
+ self,
+ f: Callable[..., ndarray],
+ estimate: Gaussian,
+ h: Callable[..., ndarray],
+ Q: ndarray,
+ R: ndarray,
+ alpha: float = 1.0,
+ beta: float = 2.0,
+ kappa: float = 1.0,
+ keep_trace: bool = False,
+ ):
+ # Initial belief
+ self.estimate = deepcopy(estimate)
+ self.prediction = deepcopy(estimate)
+
+ # Model specification
+ self.f = f
+ self.h = h
+ self.Q = Q
+ self.R = R
+ self.alpha = alpha
+ self.beta = beta
+ self.kappa = kappa
+
+ # Residual and its covariance matrix
+ self.y: ndarray
+ self.S: ndarray
+
+ # Predicted sigma points and measurements
+ self.Y: ndarray
+ self.Z: ndarray
+
+ # Kalman gain
+ self.K: ndarray
+
+ # Merwe initial points and weights
+ self.X: ndarray
+ self.mean_weights: ndarray
+ self.cov_weights: ndarray
+ self.setup_weights()
+
+ # Objects for process tracing
+ self.keep_trace = keep_trace
+ self.predictions = DataFrame(columns=["x", "P", "X", "Y"])
+ self.estimates = DataFrame(columns=["x", "P", "z"])
+
+ def setup_weights(self) -> None:
+ """Computes mean and covariance weights for unscented transform"""
+
+ # Aliases for calculation
+ n = self.estimate.x.size
+ l = self.alpha**2 * n + self.kappa # noqa: E741
+
+ # Weights for mean and covariance
+ self.mean_weights = array([l / (n + l)] + [1 / (2 * (n + l))] * (2 * n))
+ self.cov_weights = array(
+ [l / (n + l) + 1 - self.alpha**2 + self.beta] + [1 / (2 * (n + l))] * (2 * n)
+ )
+
+ def compute_sigma_points(self) -> None:
+ """Calculates van der Merwe's sigma points"""
+
+ # Compute the distances for each point
+ distance_factor = self.estimate.x.size * (1 + self.alpha**2) + self.kappa
+ distances = cholesky(distance_factor * self.estimate.P)
+
+ # Sigma points
+ self.X = hstack([self.estimate.x, self.estimate.x + distances, self.estimate.x - distances])
+
+ def predict(self, **kwargs) -> None:
+ """Predict a future state based on a linear forward model with optional system input.
+
+ Args:
+ **kwargs: Arguments that are passed to forward model
+ """
+
+ # Compute and propagate Merwe points
+ self.compute_sigma_points()
+ self.Y = vectorize(lambda x: self.f(x, **kwargs), signature="(m)->(n)")(self.X.T).T
+
+ # Predict next state as mean of distribution
+ self.prediction.x = vstack(self.mean_weights @ self.Y.T)
+ self.prediction.P = (
+ tensordot(
+ self.cov_weights,
+ [outer(y - self.prediction.x.T, y - self.prediction.x.T) for y in self.Y.T],
+ axes=1,
+ )
+ + self.Q
+ )
+
+ # Append prediction data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {
+ "x": (self.prediction.x.copy(),),
+ "P": (self.prediction.P.copy(),),
+ "X": (deepcopy(self.X),),
+ "Y": (deepcopy(self.Y),),
+ }
+ )
+ self.predictions = concat([self.predictions, new], ignore_index=True)
+
+ def correct(self, z: ndarray, **kwargs) -> None:
+ """Correct a state prediction based on a measurement."""
+
+ # Check for differing measurement model
+ h = kwargs.pop("h", self.h)
+
+ # Compute measurement distribution
+ self.Z = vectorize(lambda y: h(y, **kwargs), signature="(m)->(n)")(self.Y.T).T
+ mean_Z = vstack(self.mean_weights @ self.Z.T)
+
+ # Compute the residual and its covariance
+ self.y = z - mean_Z
+ self.S = (
+ tensordot(self.cov_weights, [outer(z - mean_Z.T, z - mean_Z.T) for z in self.Z.T], axes=1)
+ + self.R
+ )
+
+ # Compute the new Kalman gain
+ self.K = tensordot(
+ self.cov_weights,
+ [outer(y - self.prediction.x.T, z - mean_Z.T) for y, z in zip(self.Y.T, self.Z.T)],
+ axes=1,
+ ) @ inv(self.S)
+
+ # Estimate new state
+ self.estimate.x = self.prediction.x + self.K @ self.y
+ self.estimate.P = self.prediction.P - self.K @ self.S @ self.K.T
+
+ # Append estimation data to trace
+ if self.keep_trace:
+ new = DataFrame(
+ {"x": (self.estimate.x.copy(),), "P": (self.estimate.P.copy(),), "z": (z.copy(),)}
+ )
+ self.estimates = concat([self.estimates, new], ignore_index=True)
diff --git a/pyrate/pyrate/sense/smoothers/__init__.py b/pyrate/pyrate/sense/smoothers/__init__.py
new file mode 100644
index 0000000..ce84ec2
--- /dev/null
+++ b/pyrate/pyrate/sense/smoothers/__init__.py
@@ -0,0 +1,7 @@
+"""This package provides smoothers for state estimations based on noisy measurements."""
+
+from .extended import ExtendedRts
+from .rts import Rts
+from .unscented import UnscentedRts
+
+__all__ = ["Rts", "ExtendedRts", "UnscentedRts"]
diff --git a/pyrate/pyrate/sense/smoothers/extended.py b/pyrate/pyrate/sense/smoothers/extended.py
new file mode 100644
index 0000000..0fdbccd
--- /dev/null
+++ b/pyrate/pyrate/sense/smoothers/extended.py
@@ -0,0 +1,134 @@
+"""This module implements the Extended Rauch-Tung-Striebel (RTS) filter for state estimation based on
+ linearized state transition and measurement models."""
+
+# Typing
+from typing import Callable
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Data modelling
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+from pyrate.sense.filters import ExtendedKalman
+
+
+class ExtendedRts(ExtendedKalman):
+
+ """The Extended RTS smoother for non-linear state estimation.
+
+ The Extended RTS smoother is a single target state estimator for non-linear
+ models and their jacobi matrix to estimate state variables whose process and/or relation to
+ the measured properties cannot be accurately described by a linear model.
+
+ Examples:
+ Start by importing the necessary numpy functions.
+
+ >>> from numpy import array
+ >>> from numpy import cos
+ >>> from numpy import eye
+ >>> from numpy import sin
+ >>> from numpy import vstack
+
+ Setup the model. In this case, we track a sine wave.
+ Thereby we choose the transition model and its jacobian, as well as the linear
+ measurement model, like so.
+
+ >>> f = lambda x: sin(x)
+ >>> F = lambda x: array([cos(x)])
+ >>> H = lambda x: array([[1.0]])
+ >>> h = lambda x: x
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(1)
+ >>> R = eye(1)
+
+ Our initial belief is at 0.
+
+ >>> mean = vstack([0.0])
+ >>> covariance = array([[1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the filter. This model has not input, so we ignore B.
+
+ >>> rts = ExtendedRts(F, f, estimate, H, h, Q, R)
+
+ We first predict with the provided model and then correct the prediction with
+ measurements of the true position.
+
+ >>> for i in range(10):
+ ... rts.predict()
+ ... rts.correct(array([5.]))
+
+ So far, this is equivalent to using the standard Extended Kalman filter.
+ We can now get a better estimate of the state trajectory by using the RTS smoothing algorithm.
+ Hereby, old estimates get updated recursively by their successors.
+
+ >>> smooth_estimates = rts.smooth()
+
+ Args:
+ F: Linearized state transition model, i.e. the jacobi matrix of f (n, n)
+ f: Non-linear state transition model that describes the state's evolution
+ from one timestep to the next
+ estimate: Initial belief, i.e. the gaussian that describes your initial guess
+ on the state and your uncertainty
+ H: Linearized measurement model, i.e. the jacobi matrix of h (m, n)
+ h: Non-linear measurement model that maps a state variable into the measured space
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ F: Callable[..., ndarray],
+ f: Callable[..., ndarray],
+ estimate: Gaussian,
+ H: Callable[..., ndarray],
+ h: Callable[..., ndarray],
+ Q: ndarray,
+ R: ndarray,
+ ):
+ super().__init__(F, f, estimate, H, h, Q, R, keep_trace=True)
+
+ def smooth(self) -> DataFrame:
+ """Apply RTS smoothing.
+
+ Returns:
+ The smoothed data with columns `"x"` and `"P"`
+ """
+
+ # Dataframe of smoothed estimates
+ # The latest estimated cannot be improved
+ smoothed = DataFrame(columns=["x", "P"])
+ smoothed.loc[self.estimates.index[-1]] = {
+ "x": self.estimates.iloc[-1].x,
+ "P": self.estimates.iloc[-1].P,
+ }
+
+ # Recursively go back in time
+ for i in self.estimates.index[-2::-1]:
+ # Access next predictions and estimates for smoothing
+ predicted_x = self.predictions.iloc[i + 1].x
+ predicted_P = self.predictions.iloc[i + 1].P
+ prediction_F = self.predictions.iloc[i + 1].F
+ estimated_x = self.estimates.iloc[i].x
+ estimated_P = self.estimates.iloc[i].P
+
+ # Compute smoothing gain
+ G = estimated_P @ prediction_F @ inv(predicted_P)
+
+ # Append to smoothed DataFrame
+ smoothed.loc[i] = {
+ "x": estimated_x + G @ (smoothed.loc[i + 1].x - predicted_x),
+ "P": estimated_P + G @ (smoothed.loc[i + 1].P - predicted_P) @ G.T,
+ }
+
+ return smoothed
diff --git a/pyrate/pyrate/sense/smoothers/rts.py b/pyrate/pyrate/sense/smoothers/rts.py
new file mode 100644
index 0000000..90c2e7a
--- /dev/null
+++ b/pyrate/pyrate/sense/smoothers/rts.py
@@ -0,0 +1,136 @@
+"""This module implements the Rauch-Tung-Striebel (RTS) filter for state estimation based on
+ linear state transition and measurement models."""
+
+# Typing
+from typing import Callable
+from typing import Optional
+from typing import Union
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+
+# Data modelling
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+from pyrate.sense.filters import Kalman
+
+
+class Rts(Kalman):
+
+ """The RTS smoother for linear state estimation.
+
+ The RTS smoother is a single target state estimator for linear state space models, i.e. models that
+ describe the transition of a state variable and its relationship to sensor readings
+ as matrix-vector-multiplications.
+ Additionally, the RTS smoother is based on the assumption that the state process and
+ measurements are sampled from a Gaussian distribution.
+
+ Examples:
+ First, import some helper functions from numpy.
+
+ >>> from numpy import array
+ >>> from numpy import eye
+ >>> from numpy import vstack
+
+ Then, setup the system's model.
+ In this case, we track a 1D position that we assume to have a constant velocity.
+ Thereby we choose the transition model and measurement function like so.
+
+ >>> F = array([[1.0, 1.0], [0.0, 0.0]])
+ >>> H = array([[1.0, 0.0]])
+
+ Furthermore, we assume the following covariance matrices to model
+ the noise in our model and measurements.
+
+ >>> Q = eye(2)
+ >>> R = eye(1)
+
+ Our initial belief is a position and velocity of 0.
+
+ >>> mean = vstack([0.0, 0.0])
+ >>> covariance = array([[1.0, 0.0], [0.0, 1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the smoother.
+ Since, this model has not input we can ignore the control function B.
+
+ >>> rts = Rts(F, estimate, H, Q, R)
+
+ Now, we can predict based on the provided model and correct predictions with
+ measurements of the true position.
+
+ >>> for i in range(10):
+ ... rts.predict()
+ ... rts.correct(array([5.]))
+
+ So far, this is equivalent to using the standard Kalman filter.
+ We can now get a better estimate of the state trajectory by using the RTS smoothing algorithm.
+ Hereby, old estimates get updated recursively by their successors.
+
+ >>> smooth_estimates = rts.smooth()
+
+ Args:
+ F: State transition model, i.e. the change of x in a single timestep (n, n)
+ estimate: Initial belief, i.e. the gaussian distribution that describes your initial guess
+ on the target's state
+ H: Measurement model, i.e. a mapping from a state to measurement space (m, n)
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ B: Input dynamics model, i.e. the influence of a set system input on the state transition (1, k)
+
+ References:
+ - https://en.wikipedia.org/wiki/Kalman_filter#Rauch%E2%80%93Tung%E2%80%93Striebel
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ F: Union[ndarray, Callable[..., ndarray]],
+ estimate: Gaussian,
+ H: Union[ndarray, Callable[..., ndarray]],
+ Q: ndarray,
+ R: ndarray,
+ B: Optional[ndarray] = None,
+ ):
+ super().__init__(F, estimate, H, Q, R, B, keep_trace=True)
+
+ def smooth(self) -> DataFrame:
+ """Apply RTS smoothing.
+
+ Returns:
+ The smoothed data with columns `"x"` and `"P"`
+ """
+
+ # Dataframe of smoothed estimates
+ # The latest estimated cannot be improved
+ smoothed = DataFrame(columns=["x", "P"])
+ smoothed.loc[self.estimates.index[-1]] = {
+ "x": self.estimates.iloc[-1].x,
+ "P": self.estimates.iloc[-1].P,
+ }
+
+ # Recursively go back in time
+ for i in self.estimates.index[-2::-1]:
+ # Access next predictions and estimates for smoothing
+ predicted_x = self.predictions.iloc[i + 1].x
+ predicted_P = self.predictions.iloc[i + 1].P
+ prediction_F = self.predictions.iloc[i + 1].F
+ estimated_x = self.estimates.iloc[i].x
+ estimated_P = self.estimates.iloc[i].P
+
+ # Compute smoothing gain
+ G = estimated_P @ prediction_F @ inv(predicted_P)
+
+ # Append to smoothed DataFrame
+ smoothed.loc[i] = {
+ "x": estimated_x + G @ (smoothed.loc[i + 1].x - predicted_x),
+ "P": estimated_P + G @ (smoothed.loc[i + 1].P - predicted_P) @ G.T,
+ }
+
+ return smoothed
diff --git a/pyrate/pyrate/sense/smoothers/unscented.py b/pyrate/pyrate/sense/smoothers/unscented.py
new file mode 100644
index 0000000..6cc6216
--- /dev/null
+++ b/pyrate/pyrate/sense/smoothers/unscented.py
@@ -0,0 +1,145 @@
+"""This module implements the Extended Rauch-Tung-Striebel (RTS) filter for state estimation based on
+ linearized state transition and measurement models."""
+
+# Typing
+from typing import Callable
+
+# Mathematics
+from numpy.linalg import inv
+from numpy import ndarray
+from numpy import outer
+from numpy import tensordot
+
+# Data modelling
+from pandas import DataFrame
+
+# Pyrate
+from pyrate.common.math import Gaussian
+from pyrate.sense.filters import UnscentedKalman
+
+
+class UnscentedRts(UnscentedKalman):
+
+ """The unscented Kalman filter for non-linear state estimation.
+
+ This filter behaves similarly to the standard Rts smoother, but utilizes the so-called
+ unscented transform to approximate gaussian distributions by sampling from the given
+ nonlinear models to estimate state variables whose process and/or relation to
+ the measured properties cannot be accurately described by a linear model.
+
+ Examples:
+ To use the UKF, here we utilize numpy's functionality.
+
+ >>> from numpy import array
+ >>> from numpy import cos
+ >>> from numpy import eye
+ >>> from numpy import sin
+ >>> from numpy import vstack
+
+ Setup the model. In this case, we track a sine wave.
+ Thereby we choose the transition model and its jacobian, as well as the linear
+ measurement model, like so.
+
+ >>> f = lambda x: sin(x)
+ >>> F = lambda x: array([cos(x)])
+ >>> H = lambda x: array([[1.0]])
+ >>> h = lambda x: x
+
+ Furthermore, we assume the following noise on the process and measurements.
+
+ >>> Q = eye(1)
+ >>> R = eye(1)
+
+ Our initial belief is at 0.
+
+ >>> mean = vstack([0.0])
+ >>> covariance = array([[1.0]])
+ >>> estimate = Gaussian(mean, covariance)
+
+ Then, we initialize the filter. This model has not input, so we ignore B.
+
+ >>> rts = UnscentedRts(f, estimate, h, Q, R)
+
+ We first predict with the provided model and then correct the prediction with
+ measurements of the true position.
+
+ >>> for i in range(10):
+ ... rts.predict()
+ ... rts.correct(array([5.]))
+
+ So far, this is equivalent to using the standard Kalman filter.
+ We can now get a better estimate of the state trajectory by using the RTS smoothing algorithm.
+ Hereby, old estimates get updated recursively by their successors.
+
+ >>> smooth_estimates = rts.smooth()
+
+ Args:
+ f: Non-linear state transition model that describes the state's evolution
+ from one timestep to the next
+ estimate: Initial belief, i.e. the gaussian that describes your initial guess
+ on the state and your uncertainty
+ h: Non-linear measurement model that maps a state variable into the measured space
+ Q: Process noise matrix, i.e. the covariance of the state transition (n, n)
+ R: Measurement noise matrix, i.e. the covariance of the sensor readings (m, m)
+ alpha: Spread of sample points, pick between 0. and 1.
+ beta: Sigma point parameter, 2 is optimal for gaussian problems
+ kappa: Sigma point parameter, a common choice for kappa is to subtract 3
+ from your state's dimension
+ """
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+ # pylint: disable=too-many-arguments
+
+ def __init__(
+ self,
+ f: Callable[..., ndarray],
+ estimate: Gaussian,
+ h: Callable[..., ndarray],
+ Q: ndarray,
+ R: ndarray,
+ alpha: float = 1.0,
+ beta: float = 2.0,
+ kappa: float = 1.0,
+ ):
+ super().__init__(f, estimate, h, Q, R, alpha, beta, kappa, keep_trace=True)
+
+ def smooth(self) -> DataFrame:
+ """Apply RTS smoothing.
+
+ Returns:
+ The smoothed data with columns `"x"` and `"P"`
+ """
+
+ # Dataframe of smoothed estimates
+ # The latest estimated cannot be improved
+ smoothed = DataFrame(columns=["x", "P"])
+ smoothed.loc[self.estimates.index[-1]] = {
+ "x": self.estimates.iloc[-1].x,
+ "P": self.estimates.iloc[-1].P,
+ }
+
+ # Recursively go back in time
+ for i in self.estimates.index[-2::-1]:
+ # Access next predictions and estimates for smoothing
+ predicted_x = self.predictions.iloc[i + 1].x
+ predicted_P = self.predictions.iloc[i + 1].P
+ X = self.predictions.iloc[i + 1].X
+ Y = self.predictions.iloc[i + 1].Y
+ estimated_x = self.estimates.iloc[i].x
+ estimated_P = self.estimates.iloc[i].P
+
+ # Compute smoothing gain
+ G = tensordot(
+ self.cov_weights,
+ [outer(x - estimated_x.T, y - predicted_x.T) for x, y in zip(X.T, Y.T)],
+ axes=1,
+ ) @ inv(predicted_P)
+
+ # Append to smoothed DataFrame
+ smoothed.loc[i] = {
+ "x": estimated_x + G @ (smoothed.loc[i + 1].x - predicted_x),
+ "P": estimated_P + G @ (smoothed.loc[i + 1].P - predicted_P) @ G.T,
+ }
+
+ return smoothed
diff --git a/pyrate/pyrate/sense/vision/__init__.py b/pyrate/pyrate/sense/vision/__init__.py
new file mode 100644
index 0000000..1ffbc3f
--- /dev/null
+++ b/pyrate/pyrate/sense/vision/__init__.py
@@ -0,0 +1 @@
+"""This package provides methods for visual perception."""
diff --git a/pyrate/pyrate/sense/vision/image_line.py b/pyrate/pyrate/sense/vision/image_line.py
new file mode 100644
index 0000000..6e71f9b
--- /dev/null
+++ b/pyrate/pyrate/sense/vision/image_line.py
@@ -0,0 +1,180 @@
+"""
+This module implements the ``ImageLine`` used by ``ObstacleLocator`` to construct the horizon in an image.
+"""
+
+from __future__ import annotations
+
+# Standard library
+from math import atan2
+from math import cos
+from math import sin
+from math import sqrt
+from math import tan
+
+# Typing
+from typing import cast
+from typing import Tuple
+
+# Numpy
+import numpy
+from numpy import arange
+from numpy import arctan
+from numpy import diff
+from numpy import linspace
+from numpy import ndarray
+from numpy import pi
+
+
+class ImageLine:
+
+ """Represents a line in the image.
+
+ The line can be constructed using
+ either points, a height and an angle or polar coordinates using the respective ``from_X`` methods.
+
+ Args:
+ image_shape: Shape of the image ``(width, height)``
+ x_y_coordinates: x- and y-coordinates of both endpoints as ``((x1, x2), (y1, y2))``
+ height: Height of the line in the image space
+ angle: Angle of the line in the image
+ """
+
+ def __init__(
+ self,
+ image_shape: Tuple[int, int],
+ x_y_coordinates: Tuple[Tuple[int, int], Tuple[int, int]],
+ height: int,
+ angle: float,
+ ):
+ self.image_width, self.image_height = image_shape
+ self.x_y_coordinates = x_y_coordinates
+ self.height = height
+ self.angle = angle
+
+ self.x_points = arange(x_y_coordinates[0][0], x_y_coordinates[0][1] + 1).astype(int)
+ self.y_points = numpy.round(linspace(*x_y_coordinates[1], num=len(self.x_points))).astype(int)
+
+ @classmethod
+ def from_points(
+ cls, image_shape: Tuple[int, int], points: Tuple[Tuple[int, int], Tuple[int, int]]
+ ) -> ImageLine:
+ """Construct an ``ImageLine`` from the two endpoints of the line (in the image space).
+
+ Args:
+ image_shape: Shape of the image ``(width, height)``
+ points: End points of the line ``((x1, y1), (x2, y2))``
+
+ Returns:
+ New ``ImageLine`` from two points
+ """
+
+ x_coordinates = cast(Tuple[int, int], tuple(p[0] for p in points))
+ y_coordinates = cast(Tuple[int, int], tuple(p[1] for p in points))
+
+ height = numpy.sum(y_coordinates) / 2
+ angle = atan2(diff(y_coordinates), diff(x_coordinates))
+
+ return cls(
+ image_shape=image_shape,
+ x_y_coordinates=(x_coordinates, y_coordinates),
+ height=height,
+ angle=angle,
+ )
+
+ @classmethod
+ def from_height_angle(
+ cls, image_shape: Tuple[int, int], relative_height: float, angle: float
+ ) -> ImageLine:
+ """Construct an ``ImageLine`` from its relative height and angle.
+
+ Note: This method of creation might create end_points and indices values that are
+ out of bound of the image if the angle of the represented line is too large.
+
+ Args:
+ image_shape: Shape of the image ``(width, height)``
+ relative_height: Relative height of the line (based on ``floor(image_shape[0] / 2)``)
+ angle: Angle of the line
+
+ Returns:
+ New ``ImageLine`` from height and angle
+ """
+
+ x_coordinates = (0, image_shape[0])
+ diff_height = int(tan(angle) * image_shape[0] / 2)
+ height = int(relative_height + image_shape[1] / 2)
+ y_coordinates = (height - diff_height, height + diff_height)
+
+ return cls(
+ image_shape=image_shape,
+ x_y_coordinates=(x_coordinates, y_coordinates),
+ height=height,
+ angle=angle,
+ )
+
+ @classmethod
+ def from_polar(cls, image_shape: Tuple[int, int], radius: float, alpha: float) -> ImageLine:
+ # pylint: disable=too-many-locals
+ """Construct an ``ImageLine`` from its polar representation.
+
+ The line is represented by its distance from the coordinate origin (radius) and the
+ line rotation angle in radians.
+
+ Note: This method of creation might create end_points and indices values that are
+ out of bound of the image if the angle of the represented line is too large.
+
+ Args:
+ image_shape: Shape of the image ``(width, height)``
+ radius: Distance from the coordinate origin
+ alpha: Line rotation angle in radians
+
+ Returns:
+ New ``ImageLine`` from the polar representation
+ """
+
+ cos_alpha, sin_alpha = cos(alpha), sin(alpha)
+ # line_end pixels
+ x_coordinates = (0, image_shape[0])
+ y_coordinates = (int(radius / sin_alpha), int((radius - cos_alpha * image_shape[0]) / sin_alpha))
+ # pixels of a line
+ x_points: ndarray = arange(0, image_shape[0])
+ y_points: ndarray = numpy.round((radius - cos_alpha * x_points) / sin_alpha).astype(int)
+
+ angle = alpha - pi / 2 # of horizon
+ d_2, alpha_0 = sqrt(image_shape[1] ** 2 + image_shape[0] ** 2) / 2, arctan(
+ image_shape[1] / image_shape[0]
+ )
+ height_rel = radius - d_2 * cos(alpha - alpha_0) # over image center
+ height = int(height_rel + image_shape[1] // 2)
+
+ line = cls(
+ image_shape=image_shape,
+ x_y_coordinates=(x_coordinates, y_coordinates),
+ height=height,
+ angle=angle,
+ )
+ line.x_points = x_points
+ line.y_points = y_points
+ return line
+
+ @property
+ def indices(self) -> Tuple[ndarray, ndarray]:
+ """Returns pixel coordinates of the line on the image
+
+ Returns:
+ two ``ndarray``s containing every x and y coordinate of pixels the line covers
+ """
+
+ return self.x_points, self.y_points
+
+ @property
+ def end_points(self) -> Tuple[Tuple[int, int], Tuple[int, int]]:
+ """Returns the two end points defining the line
+
+ Returns:
+ x, y coordinates of both end points as in ``((x1, x2), (y1, y2))``
+ """
+
+ return (self.x_y_coordinates[0][0], self.x_y_coordinates[1][0]), (
+ self.x_y_coordinates[0][1],
+ self.x_y_coordinates[1][1],
+ )
diff --git a/pyrate/pyrate/sense/vision/image_rectangle.py b/pyrate/pyrate/sense/vision/image_rectangle.py
new file mode 100644
index 0000000..14a1ba8
--- /dev/null
+++ b/pyrate/pyrate/sense/vision/image_rectangle.py
@@ -0,0 +1,54 @@
+"""
+This module implements the representation of an obstacle or object in an image
+(modelled as a bounding box), used by ``ObstacleLocator``.
+"""
+
+# Typing
+from typing import Tuple
+
+
+class ImageRectangle:
+
+ """Represents an object's bounding box in the image plane as a rectangular bounding box.
+
+ Args:
+ rectangle: x and y coordinates, width and height of the rectangle
+ offset: position offset (e.g. due to subimage used for detection)
+ angle: rotation offset (due to rotate detection)
+ """
+
+ def __init__(
+ self, rectangle: Tuple[int, int, int, int], offset: Tuple[int, int] = (0, 0), angle: float = 0
+ ) -> None:
+
+ self.x_rel, self.y_rel, self.width, self.height = rectangle
+ self.offset = offset
+ self._x_coord, self._y_coord = self.x_rel + offset[0], self.y_rel + offset[1]
+ self.angle = angle
+
+ def rectangle_to_corner(self, offset: bool = True) -> Tuple[Tuple[int, int], Tuple[int, int]]:
+ """Returns bounding box corners for OpenCV drawing functions.
+
+ Args:
+ offset: If True, return coordinates offset by the offset specified when
+ creating the `ImageRectangle`. If False, ignore the offset.
+
+ Returns:
+ Coordinates of the upper left and lower right points of the rectangle ``(x1, y1), (x2, y2)``
+ """
+
+ if offset:
+ x_res, y_res = self._x_coord, self._y_coord
+ else:
+ x_res, y_res = self.x_rel, self.y_rel
+ return (x_res, y_res), (x_res + self.width, y_res + self.height)
+
+ @property
+ def bottom_center(self) -> Tuple[int, int]:
+ """Returns the bottom center coordinates of the bounding box.
+
+ Returns:
+ x, y coordinates of the bottom center pixel
+ """
+
+ return self._x_coord + self.width // 2, self._y_coord + self.height
diff --git a/pyrate/pyrate/sense/vision/obstacle_locator.py b/pyrate/pyrate/sense/vision/obstacle_locator.py
new file mode 100644
index 0000000..6638865
--- /dev/null
+++ b/pyrate/pyrate/sense/vision/obstacle_locator.py
@@ -0,0 +1,143 @@
+"""
+This module implements a mechanism to construct the horizon line in an image,
+as well as (in the future) find obstacles in the image and return their locations.
+"""
+
+# Typing
+from typing import List
+from typing import Tuple
+
+# Numpy
+import numpy
+from numpy import arctan
+from numpy import ndarray
+from numpy import pi
+from numpy import radians
+
+# Scientific
+from cv2 import blur
+from cv2 import Canny
+from cv2 import HoughLines
+from cv2 import INTER_AREA
+from cv2 import medianBlur
+from cv2 import resize
+
+# Auxiliary modules
+from .image_line import ImageLine
+
+
+class ObstacleLocator: # pylint: disable=too-few-public-methods
+
+ """Canny based horizon extraction and (in the future) obstacle detection class.
+
+ Args:
+ image_width: Width of images in pixel
+ image_height: Height of images in pixel
+ """
+
+ def __init__(self, image_width: int, image_height: int) -> None:
+ # Precompute common image attributes
+ self.image_width, self.image_height = image_width, image_height
+ self._center = image_width // 2, image_height // 2
+ self._min_angle = arctan(self.image_width / self.image_height)
+
+ def _preprocess(self, image: ndarray, box_blur_size: int = 90, median_blur_size: int = 3) -> ndarray:
+ """Preprocesses an image, reduces its resolution and smooths it.
+
+ Args:
+ image: Image to preprocess
+ box_blur_size: The size of the box blur filter is calculated as
+ `image_height // box_blur_size`. Only modify default value with prior testing.
+ median_blur_size: Size of the median filter aperture. Must be
+ odd and greater than 1; only modify default value with prior testing.
+
+ Returns:
+ Preprocessed image
+ """
+
+ image = resize(image, (self.image_width, self.image_height), interpolation=INTER_AREA)
+ blur_size = self.image_height // box_blur_size
+ image = blur(image, (blur_size, blur_size))
+ image = medianBlur(image, median_blur_size)
+ return image
+
+ def _detect_lines( # pylint: disable=too-many-arguments
+ self,
+ image: ndarray,
+ n_max: int,
+ min_line_length: float,
+ canny_threshold: int,
+ angle_resolution: float = 0.2,
+ ) -> Tuple[List[ImageLine], List[int]]:
+ """Detect lines in an image using the Canny and Hough filter
+
+ Args:
+ image: Image where lines should be detected in
+ n_max: Max number of lines that should be returned
+ min_line_length: Line length threshold
+ canny_threshold: Threshold for the hysteresis procedure
+ angle_resolution: Distance resolution of the accumulator
+
+ Returns:
+ Detected lines, their lengths and the dissimilarity of the regions separated by each line
+ """
+
+ # extract lines in image
+ edges = Canny(image, canny_threshold, canny_threshold * 2)
+ lines = HoughLines(
+ edges,
+ 1,
+ radians(angle_resolution),
+ int(self.image_width * min_line_length),
+ min_theta=self._min_angle,
+ max_theta=pi - self._min_angle,
+ )
+ if lines is None:
+ return [], []
+ lines = [
+ ImageLine.from_polar((self.image_width, self.image_height), *line[0]) for line in lines[:n_max]
+ ]
+ line_lengths = self._evaluate_lines(edges, lines)
+
+ return lines, line_lengths
+
+ @staticmethod
+ def _evaluate_lines(
+ edges: ndarray,
+ lines: List[ImageLine],
+ ) -> List[int]:
+ """Evaluate the length and separation of image by each line.
+
+ Args:
+ image: The image the line is drawn on
+ edges: Edges in image
+ lines: ``ImageLine``s that shall be evaluated
+
+ Returns:
+ A tuple containing each line's length and the dissimilarity of the regions separated by the line
+ """
+
+ # number of points on line
+ votes = [numpy.sum(edges[line.y_points, line.x_points] > 0).astype(int) for line in lines]
+ return votes
+
+ def detect_horizon(
+ self, img: ndarray, n_max: int = 3, min_visibility: float = 0.2, canny_threshold: int = 35
+ ) -> Tuple[List[ImageLine], List[int]]:
+ """Detects possible horizon line(s).
+
+ Args:
+ img: Image to analyze
+ n_max: Max number of lines to extract
+ min_visibility: Minimum length of horizon line to be considered
+ canny_threshold: Canny threshold for edge detection
+
+ Returns:
+ Horizon lines (in image space shrunk to ``(self._w, self._h)`` as set in :meth:`__init__`!),
+ their respective lengths, and
+ their separation factors (dissimilarity of the two regions of the image
+ separated by that line)
+ """
+
+ img = self._preprocess(img)
+ return self._detect_lines(img, n_max, min_visibility, canny_threshold)
diff --git a/pyrate/resources/logo.svg b/pyrate/resources/logo.svg
new file mode 100644
index 0000000..53908d8
--- /dev/null
+++ b/pyrate/resources/logo.svg
@@ -0,0 +1 @@
+
\ No newline at end of file
diff --git a/pyrate/resources/project_structure.png b/pyrate/resources/project_structure.png
new file mode 100644
index 0000000..dbaa355
Binary files /dev/null and b/pyrate/resources/project_structure.png differ
diff --git a/pyrate/route_generator.py b/pyrate/route_generator.py
new file mode 100644
index 0000000..7496ba5
--- /dev/null
+++ b/pyrate/route_generator.py
@@ -0,0 +1,303 @@
+import glob
+import os
+from typing import Optional
+
+import matplotlib.pyplot as plt
+import numpy as np
+import pandas as pd
+from PIL import ImageDraw
+from PIL.Image import Image
+from shapely.geometry import Polygon, Point
+from shapely.ops import unary_union
+from tqdm import tqdm
+
+import experiments
+from pyrate.plan.nearplanner.timing_frame import TimingFrame
+
+experiments.optimization_param.n_iter_grad = 50
+experiments.optimization_param.verbose = False
+
+SIZE_INNER = 75
+SIZE_ROUTE = 100
+MIN_DESTINATION_DISTANCE = 25
+
+
+# https://stackoverflow.com/questions/16444719/python-numpy-complex-numbers-is-there-a-function-for-polar-to-rectangular-co
+def polar_to_cartesian(
+ radii: np.ndarray,
+ angles: np.ndarray,
+):
+ """Transforms polar coordinates into cartesian coordinates.
+
+ Args:
+ radii: A array of radii.
+ angles: A array of angles.
+
+ Returns:
+ An array of cartesian coordinates.
+ """
+ return radii * np.exp(2j * angles * np.pi)
+
+
+def cartesian_to_polar(
+ x: np.ndarray,
+):
+ """Transforms cartesian coordinates into polar coordinates.
+
+ Args:
+ x: A set of complex number to be separated into polar coordinates.
+
+ Returns:
+ An distance array and an angle array.
+ """
+ return abs(x), np.angle(x)
+
+
+def random_polygon(
+ radius_mean: float = 2,
+ radius_sigma: float = 1.5,
+):
+ """Generates the simplest of polygons, a triangle with a size described by a random polygon.
+
+ Args:
+ radius_mean: The average radius defining a circumcircle of a triangle.
+ radius_sigma: The variance of a radius defining a circumcircle of a triangle.
+
+ Returns:
+ A single triangle.
+ """
+ number_of_corners = np.random.randint(3, 10)
+ array = polar_to_cartesian(
+ np.random.lognormal(radius_mean, radius_sigma),
+ np.sort(np.random.rand(number_of_corners)),
+ )
+ offset = np.random.randint(low=-SIZE_ROUTE, high=SIZE_ROUTE, size=(2,))
+ return_values = np.zeros((number_of_corners, 2), dtype=float)
+ return_values[:] = offset
+ return_values[:, :] += np.array((np.real(array), np.imag(array))).T
+ return Polygon(return_values)
+
+
+def generate_obstacles(
+ seed=None,
+ number_of_polygons: int = 40,
+ radius_mean: float = 2,
+ radius_sigma: float = 1,
+) -> dict[str, Polygon]:
+ """Generates a set of obstacles from a union of triangles.
+
+ The union of triangles meas that if polygons overlap o polygon containing the union of those polygons is returned.
+ Args:
+ seed: A seed to generate a set of obstacles from.
+ number_of_polygons: The number of polygons that should be drawn.
+ radius_mean: The average radius defining a circumcircle of an obstacle triangle.
+ radius_sigma: The variance of a radius defining a circumcircle of an obstacle triangle.
+
+ Returns:
+ A list of unified obstacles.
+ """
+ if seed is not None:
+ np.random.seed(seed)
+ polygons = []
+ for _ in range(number_of_polygons):
+ poly = random_polygon(radius_mean, radius_sigma)
+ if poly.contains(Point(0, 0)):
+ continue
+ if poly.exterior.distance(Point(0, 0)) < 1:
+ continue
+ polygons.append(poly)
+ polygon_list = list(unary_union(polygons).geoms)
+ return {str(i): p for i, p in enumerate(polygon_list)}
+
+
+def generate_destination(obstacles: dict[str, Polygon], seed: Optional[int] = None) -> Point:
+ """Generates for a map.
+
+ Can be used to generate a valid destination for list of obstacles.
+ Args:
+ obstacles: A list of obstacles.
+ seed: The seed determining the point.
+
+ Returns:
+ A goal that should be reached by the ship.
+ """
+ # sets the seed
+ if seed is not None:
+ np.random.seed(seed)
+
+ # generates the point
+ point: Optional[Point] = None
+ while (
+ point is None
+ or abs(point.x) < MIN_DESTINATION_DISTANCE
+ or abs(point.y) < MIN_DESTINATION_DISTANCE
+ or any(obstacle.contains(point) for obstacle in obstacles.values())
+ ):
+ point = Point(np.random.randint(-SIZE_INNER, SIZE_INNER, size=(2,), dtype=int))
+ return point
+
+
+def plot_situation(
+ obstacles: dict[str, Polygon],
+ destination: Point,
+ obstacle_color: Optional[str] = None,
+ route: Optional[TimingFrame] = None,
+ legend: bool = True,
+ title: Optional[str] = None,
+) -> None:
+ """PLots the obstacles into a matplotlib plot.
+
+ Args:
+ obstacles: A list of obstacles.
+ destination: The destination that should be reached by the boat.
+ obstacle_color: The color the obstacles should have. Can be None.
+ If none all obstacles will have different colors.
+ route: The route that should be plotted.
+ legend: If true plots a legend.
+ title: The title of the plot.
+ Returns:
+ None
+ """
+ plt.figure(figsize=(8, 8))
+ plt.axis([-SIZE_ROUTE, SIZE_ROUTE, -SIZE_ROUTE, SIZE_ROUTE])
+
+ # Sets a title if one is demanded
+ if title:
+ plt.title(title)
+
+ # Plots the obsticles.
+ if obstacles:
+ for polygon in obstacles.values():
+ if obstacle_color is not None:
+ plt.fill(*polygon.exterior.xy, color=obstacle_color, label="Obstacle")
+ else:
+ plt.fill(*polygon.exterior.xy)
+
+ # Plots the wind direction
+ # https://www.geeksforgeeks.org/matplotlib-pyplot-arrow-in-python/
+ plt.arrow(
+ 0,
+ +int(SIZE_ROUTE * 0.9),
+ 0,
+ -int(SIZE_ROUTE * 0.1),
+ head_width=10,
+ width=4,
+ label="Wind (3Bft)",
+ )
+
+ if route:
+ plt.plot(route.points[:, 0], route.points[:, 1], color="BLUE", marker=".")
+
+ # Plots the estination
+ if destination:
+ plt.scatter(*destination.xy, marker="X", color="green", label="Destination")
+ plt.scatter(0, 0, marker="o", color="green", label="Start")
+
+ if legend:
+ # https://stackoverflow.com/questions/13588920/stop-matplotlib-repeating-labels-in-legend
+ handles, labels = plt.gca().get_legend_handles_labels()
+ by_label = dict(zip(labels, handles))
+ plt.legend(by_label.values(), by_label.keys())
+ plt.show()
+
+
+def generate_image_from_map(
+ obstacles: dict[str, Polygon],
+ destination: Point,
+ route: Optional[list[TimingFrame]],
+) -> Image:
+ """Generate an image from the map.
+
+ Can be used to feed an ANN.
+ - Obstacles are marked as reed.
+ - The destination is marked as green.
+ - The points where the route will likely change are blue.
+
+ Args:
+ obstacles: A dict of obstacles as shapely Polygons. Keyed as a string.
+ destination: A destination that should be navigated to.
+ """
+ img = Image.new(
+ "RGB",
+ (SIZE_ROUTE * 2, SIZE_ROUTE * 2),
+ "#ffffff",
+ )
+ draw = ImageDraw.Draw(img)
+ for polygon in obstacles.values():
+ draw.polygon(
+ list(np.dstack(polygon.exterior.xy).reshape((-1)) + SIZE_ROUTE),
+ fill="#FF0000",
+ outline="#FF0000",
+ )
+ img.putpixel((int(destination.x) + 100, int(destination.y) + 100), (0, 0xFF, 0))
+ return img
+
+
+def generate_all_to_series(seed: Optional[int] = None, image: bool = False) -> pd.Series:
+ """Generates everything and aggregates all data into a `pd:Series`.
+
+ Args:
+ seed:The seed that should be used to generate map and destination.
+ image: If an image should be generated or if that should be postponed to save memory.
+ Returns:
+ Contains a `pd.Series`containing the following.
+ - The seed tha generated the map.
+ - The destination in x
+ - The destination in y
+ - A list of Obstacle polygons.
+ - The route generated for this map by the roBOOTer navigation system.
+ - Optionally the image containing all the information.
+ Can be generated at a later date without the fear for a loss of accuracy.
+ """
+ obstacles = generate_obstacles(seed)
+ destination = generate_destination(obstacles, seed)
+
+ try:
+ route, _ = experiments.generate_route(
+ position=Point(0, 0), goal=destination, obstacles=obstacles, wind=(18, 180)
+ )
+ except Exception as e:
+ print("Error")
+ print(e)
+ route = None
+ return pd.Series(
+ data={
+ "seed": str(seed),
+ "obstacles": obstacles,
+ "destination_x": destination.x,
+ "destination_y": destination.y,
+ "image": generate_image_from_map(obstacles, destination, route) if image else pd.NA,
+ "route": route.points if route else pd.NA,
+ "cost": route.cost if route else pd.NA,
+ },
+ name=str(seed),
+ )
+
+
+if __name__ == "__main__":
+ save_frequency = int(os.getenv("save_frequency", "50"))
+ start_seed = int(os.getenv("seed_start", "0"))
+ continues = os.getenv("continues", "true").lower() == "true"
+ print(f"Save Frequency: {save_frequency}")
+ print(f"Start seed: {start_seed}")
+ print(f"Continues: {continues}")
+
+ files = glob.glob("data/raw_*.pickle") + glob.glob("data/tmp_*.pickle")
+ seed_groups = {int(file[9:-7]) for file in files}
+ for next_seeds in range(start_seed, 10_000_000_000, save_frequency):
+ if next_seeds in seed_groups:
+ continue
+ print(f"Start generating routes for seed: {next_seeds}")
+ tmp_pickle_str: str = f"data/tmp_{next_seeds:010}.pickle"
+ pd.DataFrame().to_pickle(tmp_pickle_str)
+ df = pd.DataFrame(
+ [
+ generate_all_to_series(i, image=False)
+ for i in tqdm(range(next_seeds, next_seeds + save_frequency, 1))
+ ]
+ ).set_index("seed")
+ pickle_to_file = f"data/raw_{next_seeds:010}.pickle"
+ df.to_pickle(pickle_to_file)
+ os.remove(tmp_pickle_str)
+ if not continues:
+ break
diff --git a/pyrate/scripts/__init__.py b/pyrate/scripts/__init__.py
new file mode 100644
index 0000000..c7bd704
--- /dev/null
+++ b/pyrate/scripts/__init__.py
@@ -0,0 +1 @@
+"""This file is required to mark the directory as a package for the documentation to be able to include it."""
diff --git a/pyrate/scripts/benchmark_db_and_projections.py b/pyrate/scripts/benchmark_db_and_projections.py
new file mode 100644
index 0000000..41839a8
--- /dev/null
+++ b/pyrate/scripts/benchmark_db_and_projections.py
@@ -0,0 +1,149 @@
+#!/usr/bin/env python3
+
+"""This scripts benchmarks both the database reading accesses as well as the
+projections between polar and cartesian representations.
+
+The function :func:`_query_database` queries the database for some location and radius which were deemed a
+realistic load scenario.
+The function :func:`_project_to_cartesian_and_back` takes the result of such a query and projects it to the
+cartesian representation and back.
+Keep in mind, that these operations might only be performed very seldom on an actual Atlantic crossing
+(maybe every couple of hours).
+
+The results and details are now included in :ref:`benchmarking-db-and-local-projections`
+(in the documentation of the :mod:`pyrate.plan` module).
+
+This script was initially developed as part of
+`issue #40 `__,
+in order to evaluate whether "custom" local projections are a feasible option on a Raspberry Pi 3B/4B.
+Since then, the implementation has changed.
+In particular, the database creation as been moved into a separate script
+(see :ref:`script-s57_charts_to_db`).
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+from time import perf_counter
+
+# Typing
+from typing import Any
+from typing import Callable
+from typing import List
+
+# Data modeling
+import numpy
+
+# Geospatial
+from pyrate.plan.geometry import PolarGeometry
+from pyrate.plan.geometry import PolarLocation
+
+# Database
+from pyrate.common.charts import SpatialiteDatabase
+
+
+#: Around Miami, Florida, US.
+#: The point was chosen simply because charts are available nearby.
+QUERY_AROUND = PolarLocation(longitude=-80.10955810546875, latitude=25.851808634972723)
+
+
+def _query_database(path_to_db: str, around: PolarLocation, radius: float) -> List[PolarGeometry]:
+ """Queries some polygons from the database.
+
+ Args:
+ path_to_db: The path to the database
+ around: The location around which to query for chart objects
+ radius: The radius within which to query for chart objects in meters
+
+ Returns:
+ The resulting polygons
+ """
+ with SpatialiteDatabase(path_to_db) as database:
+ return list(database.read_geometries_around(around=around, radius=radius))
+
+
+def _project_to_cartesian_and_back(data: List[PolarGeometry]) -> None:
+ """Projects some PolarPolygons to their cartesian representation and back to test the performance.
+
+ Args:
+ data: Some polygons to project
+ """
+ assert data # non-emptiness
+
+ first = data[0]
+ center = first if isinstance(first, PolarLocation) else first.locations[0]
+
+ for polygon in data:
+ polygon.to_cartesian(center).to_polar()
+
+
+def _measure_func(func: Callable[..., Any], name: str, iterations: int, *params, **kw_params) -> None:
+ """Measures and prints the running time of a given method.
+
+ Args:
+ func: The callable to execute
+ name: The name to use for printing
+ iterations: The number of iterations to average over
+ *params: Positional arguments to be passed to the callable
+ **kw_params: Keyword arguments to be passed to the callable
+ """
+ results = numpy.empty((iterations,))
+ for i in range(iterations):
+ start = perf_counter()
+ func(*params, **kw_params)
+ end = perf_counter()
+ results[i] = end - start
+
+ print(f'Executed "{name}" {iterations} times:')
+ print(f"\taverage:\t {numpy.mean(results):.6f} seconds")
+ print(f"\tstd dev:\t {numpy.std(results):.6f} seconds")
+ print(f"\tvariance:\t {numpy.var(results):.6f} seconds")
+
+
+def benchmark(path_to_db: str, iterations: int, around: PolarLocation, radius: float) -> None:
+ """Performs the benchmark and prints the results to the console.
+
+ Args:
+ path_to_db: The path to the database
+ iterations: The number of iterations to average over
+ around: The location around which to query for chart objects
+ radius: The radius within which to query for chart objects in meters
+ """
+ print("Information on the setting:")
+
+ with SpatialiteDatabase(path_to_db) as database:
+ print(f"\tnumber of rows/polygons in database:\t\t\t {len(database)}")
+ print(f"\tsum of vertices of all rows/polygons of in database:\t {database.count_vertices()}")
+
+ data = _query_database(path_to_db, around, radius)
+ print(f"\textracted number of polygons:\t\t\t\t {len(data)}")
+ vertex_count = sum(1 if isinstance(poly, PolarLocation) else len(poly.locations) for poly in data)
+ print(f"\textracted total number of vertices:\t\t\t {vertex_count}")
+
+ print() # newline
+
+ _measure_func(_query_database, "query_database", iterations, path_to_db, around, radius)
+
+ print() # newline
+
+ _measure_func(_project_to_cartesian_and_back, "project_to_cartesian_and_back", iterations, data)
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(
+ description="Benchmark DB queries and projections for a fixed location (see docs/scripts).",
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument("path_to_db", type=str)
+ parser.add_argument("--iterations", type=int, default=10)
+ parser.add_argument("--radius", type=float, default=100, help="The query radius in kilometers")
+ args = parser.parse_args()
+
+ benchmark(
+ path_to_db=args.path_to_db, iterations=args.iterations, around=QUERY_AROUND, radius=args.radius * 1000
+ )
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/benchmark_graph_neighbor_search.py b/pyrate/scripts/benchmark_graph_neighbor_search.py
new file mode 100644
index 0000000..572c58f
--- /dev/null
+++ b/pyrate/scripts/benchmark_graph_neighbor_search.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+"""Benchmark the neighbor search in graphs.
+
+Initially written as part of
+`Issue #90 `__ to determine
+whether a faster implementation is needed.
+That issue also contains a draft which *might* make it faster if that is required in the future.
+
+Examples:
+ These are the benchmark results when run on a `Lenovo ThinkPad T560 laptop `__
+ with an `Intel(R) Core(TM) i5-6300U CPU @ 2.40GHz
+ `__
+ and 16GB RAM (at commit ``9a8177326dc0d82d0aea4559e6c85071ceebf56f``):
+
+ .. code-block:: bash
+
+ ./scripts/benchmark_graph_neighbor_search.py --iterations 100
+ frequency = 2 for distance 5000 km
+ generated graph in 0.018192768096923828 seconds
+ number of nodes = 42, number of edges = 120
+ non-empty entries in neighbor table = 240
+ computation time = 0.0003081770000221695 (avg. over 100 samples)
+
+ frequency = 8 for distance 1000 km
+ generated graph in 0.0327601432800293 seconds
+ number of nodes = 642, number of edges = 1920
+ non-empty entries in neighbor table = 3840
+ computation time = 0.0033796210000218707 (avg. over 100 samples)
+
+ frequency = 71 for distance 100 km
+ generated graph in 1.8711962699890137 seconds
+ number of nodes = 50412, number of edges = 151230
+ non-empty entries in neighbor table = 302460
+ computation time = 0.30925760600001695 (avg. over 100 samples)
+
+ frequency = 142 for distance 50 km
+ generated graph in 7.630561828613281 seconds
+ number of nodes = 201642, number of edges = 604920
+ non-empty entries in neighbor table = 1209840
+ computation time = 1.1302456550000102 (avg. over 100 samples)
+
+ frequency = 706 for distance 10 km
+ generated graph in 260.7689461708069 seconds
+ number of nodes = 4984362, number of edges = 14953080
+ non-empty entries in neighbor table = 29906160
+ computation time = 27.382845137000004 (avg. over 100 samples)
+
+
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+from time import time
+from timeit import timeit
+
+# Scientific
+import numpy
+
+# Graph
+from pyrate.plan.graph.generate import create_earth_graph
+from pyrate.plan.graph.generate import min_required_frequency
+from pyrate.plan.graph import NavigationGraph
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(
+ description="Benchmark the neighbor search in graphs.", formatter_class=ArgumentDefaultsHelpFormatter
+ )
+ parser.add_argument(
+ "--iterations", type=int, default=100, help="the number of timing samples to collect per graph size"
+ )
+ args = parser.parse_args()
+
+ for distance_km in [5000, 1000, 100, 50, 10]:
+ frequency = min_required_frequency(distance_km * 1000, in_meters=True)
+ print(f"frequency = {frequency} for distance {distance_km} km")
+
+ time_before_generation = time()
+ graph = create_earth_graph(frequency, print_status=False)
+ print(f"generated graph in {time() - time_before_generation} seconds")
+ print(f"number of nodes = {len(graph)}, number of edges = {graph.num_edges}")
+
+ def setup(local_graph: NavigationGraph = graph) -> None:
+ local_graph._neighbors = None # pylint: disable=protected-access
+
+ def statement(local_graph: NavigationGraph = graph) -> None:
+ _ = local_graph.neighbors
+
+ avg_time = timeit(setup=setup, stmt=statement, number=args.iterations)
+ print(f"non-empty entries in neighbor table = {numpy.count_nonzero(graph.neighbors != -1)}")
+ print(f"computation time = {avg_time} (avg. over {args.iterations} samples)")
+
+ print() # empty line between distances
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/create_chart_db_from_geojson.py b/pyrate/scripts/create_chart_db_from_geojson.py
new file mode 100644
index 0000000..9776314
--- /dev/null
+++ b/pyrate/scripts/create_chart_db_from_geojson.py
@@ -0,0 +1,64 @@
+#!/usr/bin/env python3
+
+"""Create a database from a given GeoJSON input file. Intended to quickly create test databases.
+
+It assumes the same structure as the one generated by `geojson.io `__ and only supports
+polygons.
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+import json
+
+# Database and charts
+from typing import Generator
+
+# Math
+from numpy import array
+
+# Pyrate
+from pyrate.common.charts import SpatialiteDatabase
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarPolygon
+
+
+def read_geojson(
+ path: str, location_type: LocationType = LocationType.LAND
+) -> Generator[PolarPolygon, None, None]:
+ """Reads a GeoJSON file (only supports specific constructs, see module documentation).
+
+ Args:
+ path: the input file
+ location_type: the location type of all chart objects
+ """
+
+ with open(path, "r", encoding="utf-8") as input_file:
+ json_data = json.load(input_file)
+
+ assert json_data["type"] == "FeatureCollection"
+ for feature in json_data["features"]:
+ assert feature["type"] == "Feature"
+ geometry = feature["geometry"]
+ assert geometry["type"] == "Polygon"
+ coordinates = geometry["coordinates"]
+ assert len(coordinates) == 1, "the polygon may have exactly one exterior and zero interior rings"
+ exterior = coordinates[0]
+ yield PolarPolygon.from_numpy(array(exterior), location_type=location_type)
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter)
+ parser.add_argument(
+ "path_to_geojson", type=str, help='The input file, usually ends with ".json", UTF-8 encoding'
+ )
+ parser.add_argument("path_to_db", type=str, help='The output file, usually ends with ".sqlite"')
+ args = parser.parse_args()
+
+ with SpatialiteDatabase(args.path_to_db) as database:
+ database.write_geometries(read_geojson(args.path_to_geojson), update=True)
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/create_earth_graph.py b/pyrate/scripts/create_earth_graph.py
new file mode 100644
index 0000000..132342d
--- /dev/null
+++ b/pyrate/scripts/create_earth_graph.py
@@ -0,0 +1,145 @@
+#!/usr/bin/env python3
+
+"""
+Generated the spherical graph that can then be used to navigate.
+The graph gets serialized to disk at the end of the calculation.
+
+Examples:
+ Generate a graph and visualize it. Because it makes using it in search algorithms faster, we also include
+ the neighbor table. the also prunes by default using the *Earth2014* dataset (variant *TBI*, 1 arc-min
+ resolution).
+
+ .. code-block:: bash
+
+ ./scripts/create_earth_graph.py 500000 earth_graph_500_km.hdf5
+
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+import os.path
+
+# Data set access
+from pyrate.common.raster_datasets import DataSetAccess
+from pyrate.common.raster_datasets import transformers_concrete
+
+# Graph generation
+from pyrate.plan.graph import create_earth_graph
+from pyrate.plan.graph import min_required_frequency
+
+# Script visualize_earth_graph
+try:
+ from visualize_earth_graph import dump_2d_plots
+except ImportError:
+ # add scripts folder
+ import sys
+
+ sys.path.append(os.path.abspath(os.path.dirname(__file__)))
+ del sys
+
+ from visualize_earth_graph import dump_2d_plots
+
+
+def calculate_and_save( # pylint: disable=too-many-arguments
+ requested_distance: float,
+ out_graph_file: str,
+ prune_land_areas: bool,
+ bathymetric_dataset: str,
+ generate_neighbors: bool,
+ dump_plots: bool,
+ out_visualization_directory: str,
+) -> None:
+ """Calculates and saves the graph with the given node distance while performing some logging.
+
+ Args:
+ requested_distance: the maximum distance between two neighboring nodes, in meters
+ out_graph_file: the target file where to save the graph to; usually end in ``.hdf5``
+ prune_land_areas: whether to prune by land areas
+ bathymetric_dataset: the path to the bathymetric dataset if parameter ``prune_land_areas`` is set to
+ ``True``; e.g. the Earth2014 dataset with depth in meters
+ generate_neighbors: whether to generate and serialize all neighbors too
+ dump_plots: whether to dump all plots with the default config using
+ :mod:`scripts.visualize_earth_graph` after completing the graph generation
+ out_visualization_directory: the target directory (may not yet exist) where to save the visualizations
+ to if parameter ``dump_plots`` is set to ``True``
+ """
+
+ print("Starting generation of earth graph")
+ graph = create_earth_graph(min_required_frequency(requested_distance, in_meters=True))
+
+ if prune_land_areas:
+ print("Pruning graph")
+
+ # generate the "keep condition" and then remove the property afterwards
+ data_set = DataSetAccess(bathymetric_dataset)
+ mode = transformers_concrete.BathymetricTransformer.Modes.FRACTION_NAVIGABLE
+ graph.append_properties(
+ [transformers_concrete.BathymetricTransformer(data_set, [mode])], show_progress=True
+ )
+ # keep all nodes that have more than 0% (i.e. that have any) navigable locations
+ keep_condition = graph.node_properties[mode.column_name] >= 0.0
+ graph.clear_node_properties()
+
+ graph.prune_nodes(keep_condition.to_numpy())
+
+ if generate_neighbors:
+ print("Generating neighbor table")
+ _ = graph.neighbors
+
+ print("Completed generation of earth graph")
+
+ print(f'Serializing to disk: "{out_graph_file}"')
+ os.makedirs(os.path.dirname(out_graph_file) or ".", exist_ok=True)
+ graph.to_disk(out_graph_file)
+
+ if dump_plots:
+ print(f"Dumping visualizations: {out_visualization_directory}")
+ dump_2d_plots(graph, out_visualization_directory)
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(
+ description="Create and serialize a graph of the earth. "
+ "Optionally perform pruning of land area, neighbor discovery and "
+ "dumping of visualizations.",
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument("requested_distance", type=float, help="The max. distance between nodes in meters")
+ parser.add_argument("out_graph_file", type=str)
+ parser.add_argument("--prune_land_areas", type=bool, default=True)
+ parser.add_argument(
+ "--bathymetric_dataset",
+ type=str,
+ default="../data/topography/earth2014/Earth2014.TBI2014.1min.geod.geo.tif",
+ )
+ parser.add_argument("--generate_neighbors", type=bool, default=True)
+ parser.add_argument("--dump_plots", type=bool, default=True)
+ parser.add_argument(
+ "--visualization_directory",
+ type=str,
+ default=None,
+ help='default: "dirname(out_graph_file)/visualization/"',
+ )
+ args = parser.parse_args()
+
+ out_visualization_directory = (
+ args.visualization_directory
+ if args.visualization_directory is not None
+ else os.path.join(os.path.dirname(args.out_graph_file), "visualization")
+ )
+
+ calculate_and_save(
+ requested_distance=args.requested_distance,
+ out_graph_file=args.out_graph_file,
+ prune_land_areas=args.prune_land_areas,
+ bathymetric_dataset=args.bathymetric_dataset,
+ generate_neighbors=args.generate_neighbors,
+ dump_plots=args.dump_plots,
+ out_visualization_directory=out_visualization_directory,
+ )
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/earth_graph_frequency_statistics.py b/pyrate/scripts/earth_graph_frequency_statistics.py
new file mode 100644
index 0000000..faac7f0
--- /dev/null
+++ b/pyrate/scripts/earth_graph_frequency_statistics.py
@@ -0,0 +1,115 @@
+#!/usr/bin/env python3
+
+"""
+Compute a statistics table for earth-generation with multiple frequencies.
+
+.. _script-earth_graph_frequency_statistics-example:
+
+Examples:
+ .. code-block:: bash
+
+ ./scripts/earth_graph_frequency_statistics.py 300 --step 10
+ Frequency Great Circle Distance (km) # nodes # edges Computation time (sec)
+ 10 705.365422 1002 3000 0.229929
+ 20 352.682711 4002 12000 0.531473
+ 30 235.121807 9002 27000 0.582267
+ 40 176.341356 16002 48000 0.998187
+ 50 141.073084 25002 75000 1.559254
+ 60 117.560904 36002 108000 2.264499
+ 70 100.766489 49002 147000 3.095882
+ 80 88.170678 64002 192000 6.268704
+ 90 78.373936 81002 243000 5.790993
+ 100 70.536542 100002 300000 6.933162
+ 110 64.124129 121002 363000 9.229682
+ 120 58.780452 144002 432000 10.617164
+ 130 54.258879 169002 507000 16.519117
+ 140 50.383244 196002 588000 15.838989
+ 150 47.024361 225002 675000 21.517596
+ 160 44.085339 256002 768000 24.864843
+ 170 41.492084 289002 867000 30.145898
+ 180 39.186968 324002 972000 28.684602
+ 190 37.124496 361002 1083000 27.561629
+ 200 35.268271 400002 1200000 33.667006
+ 210 33.58883 441002 1323000 34.471024
+ 220 32.062065 484002 1452000 37.554208
+ 230 30.668062 529002 1587000 43.1782
+ 240 29.390226 576002 1728000 46.112764
+ 250 28.214617 625002 1875000 44.472765
+ 260 27.129439 676002 2028000 53.084822
+ 270 26.124645 729002 2187000 60.2798
+ 280 25.191622 784002 2352000 63.117184
+ 290 24.322946 841002 2523000 64.914421
+ 300 23.512181 900002 2700000 68.890113
+
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+from time import perf_counter
+
+# Typing
+from typing import List
+from typing import Tuple
+
+# Scientific
+import numpy
+import pandas
+
+# Earth graph calculation
+from pyrate.plan.graph import create_earth_graph
+from pyrate.plan.graph import great_circle_distance_distance_for
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(
+ description="Compute a statistics table for earth-generation with multiple frequencies.",
+ formatter_class=ArgumentDefaultsHelpFormatter,
+ )
+ parser.add_argument("max", type=int, help="the maximum frequency to test")
+ parser.add_argument(
+ "--step", type=int, default=10, help="how large the steps while increasing the frequencies should be"
+ )
+ args = parser.parse_args()
+
+ pandas.set_option("display.max_columns", None)
+ pandas.set_option("display.max_rows", None)
+
+ columns = [
+ ("Frequency", numpy.uint),
+ ("Great Circle Distance (km)", numpy.float64),
+ ("# nodes", numpy.uint),
+ ("# edges", numpy.uint),
+ ("Computation time (sec)", numpy.float64),
+ ]
+
+ records: List[Tuple] = []
+ for frequency in range(0, args.max + 1, args.step):
+ if frequency == 0:
+ continue # better steps when starting at zero
+
+ start = perf_counter()
+ graph = create_earth_graph(frequency)
+ end = perf_counter()
+ records.append(
+ (
+ frequency,
+ great_circle_distance_distance_for(frequency) / 1000,
+ len(graph),
+ graph.num_edges,
+ end - start,
+ )
+ )
+
+ # re-creating this is inefficient, but it does not matter for small sizes
+ data_frame = pandas.DataFrame.from_records(numpy.array(records, dtype=columns))
+ string = data_frame.iloc[[-1]].to_string(index=False, justify="right")
+ if len(data_frame) == 1: # only the first time
+ print(string)
+ else:
+ print(string.splitlines()[-1])
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/s57_charts_to_db.py b/pyrate/scripts/s57_charts_to_db.py
new file mode 100644
index 0000000..111aba2
--- /dev/null
+++ b/pyrate/scripts/s57_charts_to_db.py
@@ -0,0 +1,72 @@
+#!/usr/bin/env python3
+
+"""Create a database containing all S-57 charts in the given directory.
+
+Optionally simplifies the geometries before saving them.
+See "S57ChartHandler" for supported nautical chart features.
+"""
+
+# Standard library
+from argparse import ArgumentDefaultsHelpFormatter
+from argparse import ArgumentParser
+
+# Typing
+from typing import Optional
+
+# Progress bar
+from tqdm import tqdm
+
+# Database and charts
+from pyrate.common.charts import S57ChartHandler
+from pyrate.common.charts import SpatialiteDatabase
+
+
+def create_db(path_to_raw_charts: str, path_to_db: str, simplify_tolerance: Optional[float] = None) -> None:
+ """Creates a database from all charts in the given directory.
+
+ Args:
+ path_to_raw_charts: the path where to look for the source chart files
+ path_to_db: the file of the target database
+ simplify_tolerance: the tolerance within all new points shall lie wrt. to the old ones, in meters,
+ non-negative
+ """
+ files = list(S57ChartHandler.find_chart_files(path_to_raw_charts))
+ print("Scanned for relevant files")
+
+ with SpatialiteDatabase(path_to_db) as database:
+ with database.disable_synchronization():
+ if len(database) != 0:
+ raise RuntimeError("writing to an already existing database, which might be an error")
+ print("Created database")
+
+ handler = S57ChartHandler()
+ for file in tqdm(files, unit=" files"):
+ database.write_geometries(handler.read_chart_file(file), update=False, raise_on_failure=False)
+
+ if simplify_tolerance is not None:
+ vertices_before = database.count_vertices()
+ database.simplify_contents(simplify_tolerance)
+ vertices_after = database.count_vertices()
+ change = (vertices_before - vertices_after) / vertices_before * 100
+ print(f"Reduced the vertex count from {vertices_before} to {vertices_after} (-{change:.3f}%)")
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(description=__doc__, formatter_class=ArgumentDefaultsHelpFormatter)
+ parser.add_argument("path_to_raw_charts", type=str, help="will be searched recursively")
+ parser.add_argument("path_to_db", type=str, help='usually ends with ".sqlite"')
+ parser.add_argument(
+ "--simplify_tolerance",
+ type=float,
+ default=25.0,
+ help="the simplification tolerance in meters, set to zero to disable",
+ )
+ args = parser.parse_args()
+
+ simplify_tolerance = None if args.simplify_tolerance == 0.0 else args.simplify_tolerance
+ create_db(args.path_to_raw_charts, args.path_to_db, simplify_tolerance)
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/scripts/visualize_earth_graph.py b/pyrate/scripts/visualize_earth_graph.py
new file mode 100644
index 0000000..42e9428
--- /dev/null
+++ b/pyrate/scripts/visualize_earth_graph.py
@@ -0,0 +1,252 @@
+#!/usr/bin/env python3
+
+"""
+Visualizes a graph generated by :func:`pyrate.plan.graph.generate.create_earth_graph`, like the one created by
+the script :ref:`script-create_earth_graph`.
+
+Examples:
+ Simply plot the node positions as well as graphs of all properties into a local folder.
+ This will overwrite existing plots and create the target directory if it does not already exist:
+
+ .. code-block:: bash
+
+ ./scripts/visualize_earth_graph.py my_graph.hdf5 visualization/
+
+"""
+
+# Standard library
+from argparse import ArgumentParser
+import os.path
+
+# Typing
+from typing import Any
+from typing import Dict
+from typing import Optional
+from typing import Sequence
+
+# Scientific
+import matplotlib.pyplot as plt
+import numpy as np
+import scipy.interpolate
+
+# Progress bars
+from tqdm import tqdm
+
+# Geography
+from cartopy.crs import PlateCarree
+from cartopy.crs import Robinson
+
+# Own Pyrate code
+from pyrate.plan.graph import GeoNavigationGraph
+
+# pylint: disable=too-many-arguments,too-many-locals
+
+
+def _prepare_2d_plot(
+ central_longitude: float = 0.0,
+ show_gridlines: bool = True,
+ show_gridline_labels: bool = False,
+ show_coastlines: bool = False,
+) -> plt.Axes:
+ """Prepares a 2D plot for visualizing the graph positions or property data.
+
+ Args:
+ central_longitude: the central longitude of the projection, in degrees in ``[-180, +180)``
+ show_gridlines: whether to overlay a grid
+ show_gridline_labels: whether to print labels to a grid (if drawn at all)
+ show_coastlines: whether to outline coastlines
+
+ Returns:
+ The correctly configured axes object
+ """
+ # create frame plot
+ if show_gridlines and show_gridline_labels:
+ # Robinson would be nicer but is not supported by matplotlib.Axis.gridlines()
+ coordinate_reference = PlateCarree(central_longitude=central_longitude)
+ else:
+ coordinate_reference = Robinson(central_longitude=central_longitude)
+ axes = plt.axes(projection=coordinate_reference)
+
+ # create background
+ if show_coastlines:
+ # the resolution may allow only a few specific values:
+ # https://scitools.org.uk/cartopy/docs/latest/matplotlib/geoaxes.html#cartopy.mpl.geoaxes.GeoAxes.coastlines
+ axes.coastlines(resolution="110m", color="black")
+ if show_gridlines:
+ axes.gridlines(crs=coordinate_reference, draw_labels=show_gridline_labels)
+
+ axes.set_xlim(-180.0, +180.0)
+ axes.set_ylim(-90.0, +90.0)
+
+ return axes
+
+
+def plot_node_positions_2d(
+ graph: GeoNavigationGraph,
+ central_longitude: float = 0.0,
+ show_gridlines: bool = True,
+ show_gridline_labels: bool = False,
+ show_coastlines: bool = False,
+) -> plt.Axes:
+ """Visualizes the positions of the nodes on the globe.
+
+ Args:
+ graph: the graph to be visualized
+ central_longitude: the central longitude of the projection, in degrees in ``[-180, +180)``
+ show_gridlines: whether to overlay a grid
+ show_gridline_labels: whether to print labels to a grid (if drawn at all)
+ show_coastlines: whether to outline coastlines
+
+ Returns:
+ The axes object containing the visualization
+ """
+ axes = _prepare_2d_plot(
+ central_longitude=central_longitude,
+ show_gridlines=show_gridlines,
+ show_gridline_labels=show_gridline_labels,
+ show_coastlines=show_coastlines,
+ )
+
+ point_size = 1000 / len(graph) * plt.rcParams["lines.markersize"] ** 2
+ # idea: plt.plot() is faster according to the docs of axes.scatter, so it could be used instead
+ axes.scatter(
+ graph.longitudes_degrees.to_numpy(),
+ graph.latitudes_degrees.to_numpy(),
+ s=point_size,
+ linewidths=0,
+ alpha=0.9,
+ )
+
+ return axes
+
+
+def plot_properties_2d(
+ graph: GeoNavigationGraph,
+ property_column: str,
+ central_longitude: float = 0.0,
+ resolution: int = 10,
+ show_gridlines: bool = True,
+ show_gridline_labels: bool = False,
+ show_coastlines: bool = False,
+ show_legend: bool = True,
+ interpolation_method: str = "nearest",
+ shading_method: str = "nearest",
+) -> plt.Axes:
+ """Creates a 2D plot of the graph and associated data.
+
+ Args:
+ graph: the graph to be visualized
+ property_column: the name of the property/node column data frame to plot
+ central_longitude: the central longitude of the projection, in degrees in ``[-180, +180)``
+ resolution: the number of points/pixels per degree latitude/longitude
+ show_gridlines: whether to overlay a grid
+ show_gridline_labels: whether to print labels to a grid (if drawn at all)
+ show_coastlines: whether to outline coastlines
+ show_legend: whether to show a legend for the color values of the property
+ interpolation_method: passed to :func:`scipy.interpolate.griddata`; ``"nearest"`` best reflects the
+ nature of the discretized nodes
+ shading_method: passed to :func:`matplotlib.pyplot.pcolormesh`; ``"nearest"`` best reflects the nature
+ of the discretized nodes
+
+ Returns:
+ The axes object containing the visualization
+ """
+ axes = _prepare_2d_plot(
+ central_longitude=central_longitude,
+ show_gridlines=show_gridlines,
+ show_gridline_labels=show_gridline_labels,
+ show_coastlines=show_coastlines,
+ )
+
+ # re-interpolate data
+ lat = np.linspace(-90, +90, 180 * resolution)
+ lon = np.linspace(-180, +180, 360 * resolution)
+ lat, lon = np.meshgrid(lat, lon)
+ node_coordinates = np.column_stack((graph.latitudes_degrees, graph.longitudes_degrees))
+ grid_data = scipy.interpolate.griddata(
+ node_coordinates, graph.nodes[property_column], (lat, lon), method=interpolation_method
+ )
+
+ # print data
+ axes.pcolormesh(lon, lat, grid_data, alpha=0.9, cmap="seismic", shading=shading_method)
+
+ if show_legend:
+ plt.colorbar(ax=axes)
+
+ return axes
+
+
+def dump_2d_plots(
+ graph: GeoNavigationGraph,
+ path: str,
+ formats: Sequence[str] = ("png",),
+ dpi: int = 500,
+ show_progress: bool = False,
+ kwargs_node_positions: Optional[Dict[str, Any]] = None,
+ kwargs_properties: Optional[Dict[str, Any]] = None,
+) -> None:
+ """Dump 2D plots of the graph positions and all property data into the given directory.
+
+ Args:
+ graph: the graph to be visualized
+ path: the directory where to dump the plots into; is created if not yet existing; overwrites
+ existing plots
+ formats: the file formats to save in, can be for example be *png*, *svg* or *pdf* (as it must be
+ supported by matplotlib). Keep in mind However, that usually only raster images work
+ reasonably fast
+ dpi: the dots per inch resolution of the resulting (raster) visualizations
+ show_progress: whether to print a simple progress bar
+ kwargs_node_positions: passed directly to :meth:`~plot_node_positions_2d`
+ kwargs_properties: passed directly to :meth:`~plot_properties_2d`
+ """
+ # create the target directory if it does not already exist
+ assert not os.path.isfile(
+ path
+ ), "the visualization target path must be (not yet existing) directory and not a regular file"
+ os.makedirs(path, exist_ok=True)
+ file_pattern = os.path.join(path, "plot_{name}.{suffix}")
+
+ node_properties = graph.node_properties
+ number_of_properties = node_properties.size
+ with tqdm(
+ total=(number_of_properties + 1) * len(formats), unit=" plots", disable=not show_progress
+ ) as progress_bar:
+
+ figure: plt.Figure = plt.figure() # Reuse it in save_plot
+
+ def save_plot(prepared_axes: plt.Axes, file_path: str) -> None:
+ figure.add_axes(prepared_axes)
+ figure.savefig(file_path, transparent=True, dpi=dpi)
+ figure.clf()
+
+ progress_bar.update() # increment by one
+
+ # plot the node positions
+ for viz_format in formats:
+ axes = plot_node_positions_2d(graph, **(kwargs_node_positions or {}))
+ final_path = file_pattern.format(name="node_positions", suffix=viz_format)
+ save_plot(axes, final_path)
+
+ # plot the properties of the node
+ for property_name in node_properties.columns:
+ # this operation might be expensive, so only do it once per property
+ axes = plot_properties_2d(property_name, **(kwargs_properties or {}))
+
+ for viz_format in formats:
+ final_path = file_pattern.format(name=f"property_{property_name}", suffix=viz_format)
+ save_plot(axes, final_path)
+
+
+def _main() -> None:
+ """The main function."""
+ parser = ArgumentParser(description="Visualize a geo-referenced graph.")
+ parser.add_argument("path_to_graph", type=str)
+ parser.add_argument("visualization_output_directory", type=str)
+ args = parser.parse_args()
+
+ graph = GeoNavigationGraph.from_disk(args.path_to_graph)
+ dump_2d_plots(graph, args.visualization_output_directory)
+
+
+if __name__ == "__main__":
+ _main()
diff --git a/pyrate/setup.cfg b/pyrate/setup.cfg
new file mode 100644
index 0000000..8229767
--- /dev/null
+++ b/pyrate/setup.cfg
@@ -0,0 +1,77 @@
+# https://gitlab.com/pycqa/flake8/-/issues/428
+[flake8]
+max-complexity = 10
+max-line-length = 110
+j = 0
+format = pylint
+doctests = true
+ignore =
+ # this entirely overrides the default ignore list
+ # see https://flake8.pycqa.org/en/latest/user/violations.html
+
+ # conflicts with black formatter
+ E203,
+
+ # allow this since PyCharm likes to format it this way and it does not really hurt
+ E126,
+ E127,
+
+ # ignore this since we adopted the newer "[W504] line break after binary operator"
+ W503
+
+[mutmut]
+paths_to_mutate=pyrate/
+runner=bash -c "mypy pyrate/ && pytest -x --assert=plain"
+tests_dir=tests/
+
+# waits for https://github.com/python/mypy/issues/5205
+[mypy]
+check_untyped_defs = True
+no_implicit_optional = True
+warn_redundant_casts = True
+warn_unused_ignores = True
+warn_return_any = True
+strict_equality = True
+plugins = numpy.typing.mypy_plugin
+
+# Below are any MyPy ignores:
+
+[mypy-pandas.*]
+ignore_missing_imports = True
+
+[mypy-matplotlib.*]
+ignore_missing_imports = True
+
+[mypy-scipy.*]
+ignore_missing_imports = True
+
+[mypy-geopy.*]
+ignore_missing_imports = True
+
+[mypy-osgeo.*]
+ignore_missing_imports = True
+
+[mypy-shapely.*]
+ignore_missing_imports = True
+
+[mypy-geojson.*]
+ignore_missing_imports = True
+
+[mypy-rasterio.*]
+ignore_missing_imports = True
+
+[mypy-tqdm.*]
+ignore_missing_imports = True
+
+[mypy-cartopy.crs.*]
+ignore_missing_imports = True
+
+[mypy-h5py.*]
+ignore_missing_imports = True
+
+[mypy-cv2.*]
+ignore_missing_imports = True
+
+# Required for the scripts
+[mypy-visualize_earth_graph.*]
+ignore_missing_imports = True
diff --git a/pyrate/setup.py b/pyrate/setup.py
new file mode 100644
index 0000000..879d5c8
--- /dev/null
+++ b/pyrate/setup.py
@@ -0,0 +1,85 @@
+"""This is used until we migrate to ``pyproject.toml``."""
+
+# Standard library
+from platform import system
+import re
+from subprocess import check_output
+
+# Installation & setup tool
+import setuptools
+
+
+# Find Pyrate version and author strings
+with open("pyrate/__init__.py", "r", encoding="utf8") as fd:
+ content = fd.read()
+ version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', content, re.MULTILINE).group(1)
+ author = re.search(r'^__author__\s*=\s*[\'"]([^\'"]*)[\'"]', content, re.MULTILINE).group(1)
+
+# Import readme
+with open("README.md", "r", encoding="utf8") as readme:
+ long_description = readme.read()
+
+# Get GDAL version
+# also cut the trailing line break from the output
+if system() == "Windows":
+ gdal_version_raw = check_output(["gdalinfo", "--version"], universal_newlines=True)[:-1]
+ # Output formatted as: "GDAL 3.1.4, released 2020/10/20"
+ gdal_version = gdal_version_raw.split(" ", maxsplit=3)[1][:-1] # also slit away the comma at the end
+else:
+ gdal_version = check_output(["gdal-config", "--version"], universal_newlines=True)[:-1]
+
+setuptools.setup(
+ name="Pyrate",
+ version=version,
+ author=author,
+ author_email="info@sailingteam.tu-darmstadt.de",
+ description="The Pyrate package for autonomous, unmanned surface vehicles.",
+ long_description=long_description,
+ long_description_content_type="text/markdown",
+ url="https://st-darmstadt.de",
+ packages=setuptools.find_packages(),
+ package_data={
+ "pyrate": ["py.typed"], # https://www.python.org/dev/peps/pep-0561/
+ },
+ classifiers=[
+ "Programming Language :: Python :: 3",
+ "License :: TODO",
+ "Operating System :: OS Independent",
+ ],
+ python_requires=">=3.10",
+ install_requires=[
+ # => libraries for actual functionality
+ # -> general tools
+ "tqdm",
+ # -> generic scientific
+ "numpy>=1.21.0",
+ "scipy",
+ "pandas!=1.3.0,!=1.3.1,!=1.3.2", # See issues #128 and #129
+ "h5py",
+ "tables", # needed for pandas' to_hdf5(), see
+ # https://pandas.pydata.org/docs/user_guide/io.html#hdf5-pytables
+ "matplotlib",
+ "opencv-python",
+ # -> geospatial / GIS tools
+ "pyproj",
+ "geopy",
+ "geojson",
+ "shapely<1.8.0", # See #142
+ "cartopy",
+ "rasterio>=1.3b1",
+ "pygeodesy",
+ f"pygdal=={gdal_version}.*", # needs libgdal-dev to be installed via the system package manager
+ # => testing and code quality
+ "black~=22.3",
+ "hypothesis>=6.41.0",
+ "mypy",
+ "pylint~=2.13.4", # Explicitly pin the version since it changes frequently and is disrupting
+ "hacking", # This also installs flake8, mccabe and others
+ "pytest>=6.0.0",
+ "pytest-cov",
+ "pytest-subtests",
+ ],
+ extras_require={
+ "docs": ["sphinx", "sphinx-markdown-builder", "sphinx_rtd_theme", "sphinxcontrib-programoutput"]
+ },
+)
diff --git a/pyrate/tests/__init__.py b/pyrate/tests/__init__.py
new file mode 100644
index 0000000..12aa1ad
--- /dev/null
+++ b/pyrate/tests/__init__.py
@@ -0,0 +1,37 @@
+"""Contains common helpers for tests."""
+
+# Standard library
+from datetime import timedelta
+import os.path
+
+# Hypothesis testing
+from hypothesis import settings
+
+# Geo libraries
+import rasterio
+
+# DataSetAccess getting the path to the example data set
+from pyrate.common.raster_datasets import DataSetAccess
+
+# Pyrate
+from pyrate.common.testing import IS_EXTENDED_TESTING
+
+
+# do more tests on the CI server as that one has more patience
+# see: https://hypothesis.readthedocs.io/en/latest/settings.html#settings-profiles
+settings.register_profile("normal", deadline=timedelta(seconds=5), print_blob=True, max_examples=500)
+settings.register_profile("extended", parent=settings.get_profile("normal"), max_examples=10_000)
+if IS_EXTENDED_TESTING:
+ settings.load_profile("extended")
+else:
+ settings.load_profile("normal")
+
+
+def _open_test_geo_dataset() -> DataSetAccess:
+ """Tries to return a Earth2014 20 arc-minute grid resolution dataset."""
+ path = os.path.join(
+ os.path.dirname(__file__), "common/raster_datasets/Earth2014.TBI2014.30min.geod.geo.tif"
+ )
+
+ assert os.path.exists(path), "The downscaled Earth2014 testing dataset is missing"
+ return DataSetAccess(rasterio.open(path))
diff --git a/pyrate/tests/act/__init__.py b/pyrate/tests/act/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/act/test_lqr.py b/pyrate/tests/act/test_lqr.py
new file mode 100644
index 0000000..f7fa259
--- /dev/null
+++ b/pyrate/tests/act/test_lqr.py
@@ -0,0 +1,66 @@
+"""This module asserts correct runtime behaviour of the pyrate.act.pid module."""
+
+# Test environment
+from unittest import TestCase
+
+# Mathematics
+from numpy import array
+from numpy import eye
+from numpy import Inf
+from numpy.linalg import norm
+from numpy import vstack
+
+# Package under test
+from pyrate.act.control import Lqr
+
+
+class TestLqr(TestCase):
+
+ """Test for correct runtime behaviour in pyrate.act.control.lqr."""
+
+ # In this context, we reproduce a common controller notation
+ # pylint: disable=invalid-name
+
+ def setUp(self) -> None:
+ """Setup the LQR specification for testing."""
+
+ # Model specification
+ self.A = array([[0, 1], [0, 0]])
+ self.B = array([0, 1])[:, None]
+ self.C = array([1, 0])[None, :]
+ self.dt = 0.5
+
+ # Cost matrix specification
+ self.Q = eye(2)
+ self.R = array([[1.0]])
+
+ # State and target
+ # Target is already reached in this example
+ self.state = vstack([0.0, 0.0])
+ self.desired = vstack([0.0])
+
+ def test_process_tracking(self) -> None:
+ """Assert that a pandas.DataFrame with process data is created."""
+
+ # Initialize PID controller
+ lqr = Lqr(self.A, self.B, self.C, self.Q, self.R, self.dt, keep_trace=True)
+
+ # Execute a few control steps
+ state = self.state.copy()
+ previous_error = Inf
+ for _ in range(10):
+ control_signal = lqr.control(desired=self.desired, state=self.state)
+ state += self.A.dot(state) + self.B.dot(control_signal)
+ error = norm(state - self.desired).item() # Convert from numpy scalar to Python float
+ assert error < previous_error or error == 0, "Error did not get lower."
+ previous_error = error
+
+ # Assert correct process tracing with LQR controller
+ assert lqr.process is not None, "LQR did not keep trace of process"
+ assert len(lqr.process.index) == 10, "LQR has not traced enough steps"
+
+ # Reset PID
+ lqr.reset()
+
+ # Assert correct reset to initial state
+ assert len(lqr.process.index) == 0, "LQR has not dropped process trace properly"
diff --git a/pyrate/tests/act/test_lqr_integral.py b/pyrate/tests/act/test_lqr_integral.py
new file mode 100644
index 0000000..2c8d1d7
--- /dev/null
+++ b/pyrate/tests/act/test_lqr_integral.py
@@ -0,0 +1,117 @@
+"""This module asserts correct runtime behaviour of the pyrate.act.pid module."""
+
+# Test environment
+from unittest import TestCase
+
+# Mathematics
+from numpy import array
+from numpy import eye
+from numpy.linalg import eig
+from numpy import vstack
+
+# Package under test
+from pyrate.act.control import AntiWindupLqr
+
+
+class TestAntiWindupLqr(TestCase):
+
+ """Test for correct runtime behaviour in pyrate.act.control.anti_windup_lqr."""
+
+ # In this context, we reproduce a common controller notation
+ # pylint: disable=invalid-name
+
+ def setUp(self) -> None:
+ """Setup the LQR specification for testing."""
+
+ # Model specification
+ self.A = array([[0, 1], [0, 0]])
+ self.B = array([0, 1])[:, None]
+ self.C = array([1, 0])[None, :]
+ self.max_control = array([1.0])
+ self.dt = 0.5
+
+ # Time discrete model
+ self.Ad = self.dt * eye(2)
+ self.Bd = self.B + self.A @ self.B * self.dt
+
+ # Cost matrix specification
+ self.Q = eye(3)
+ self.R = array([[1.0]])
+
+ # State and target
+ # Target is already reached in this example
+ self.wrong_state = vstack([1.0, 0.0])
+ self.state_small_negative = vstack([-0.001, 0.0])
+ self.state = vstack([0.0, 0.0])
+ self.desired = vstack([0.0])
+ self.desired1 = vstack([1.0])
+
+ def test_lqr_design(self) -> None:
+ """Assert stable controller dynamics"""
+
+ # Initialize controller
+ lqr = AntiWindupLqr(
+ self.A, self.B, self.C, self.Q, self.R, self.max_control, self.dt, keep_trace=True
+ )
+ # check continous time eigen_values
+ eigen_values, _ = eig(lqr.A - lqr.B @ lqr.K)
+ assert all(ev.real < 0 for ev in eigen_values), "instable controller"
+
+ def test_anti_windup(self) -> None:
+ """Assert control signal in allowed range, LQR responsive by limited integral part."""
+
+ # Initialize controller
+ lqr = AntiWindupLqr(
+ self.A, self.B, self.C, self.Q, self.R, self.max_control, self.dt, keep_trace=True
+ )
+
+ # Execute a few control steps
+ for _ in range(10):
+ control_signal = lqr.control(desired=self.desired, state=self.wrong_state)
+ assert abs(control_signal) <= self.max_control, "control limits are not applied"
+
+ assert control_signal == -self.max_control, f"control limits not reached {control_signal}"
+
+ # test stationary summed error
+ summed_error = lqr.summed_error
+ control_signal = lqr.control(desired=self.desired, state=self.wrong_state)
+ assert abs(lqr.summed_error - summed_error) < 1e-6, "summed error changes in saturation"
+
+ # test reactiveness
+ control_signal_back = lqr.control(desired=self.desired, state=self.state_small_negative)
+ assert abs(control_signal_back) < self.max_control, "anti wind up is not working"
+
+ def test_process_tracking(self) -> None:
+ """Assert useful controller behavior and that a pandas.DataFrame with process data is created."""
+
+ # Initialize controller
+ lqr = AntiWindupLqr(
+ self.A, self.B, self.C, self.Q, self.R, self.max_control, self.dt, keep_trace=True
+ )
+
+ # Check zero control
+ control_signal = lqr.control(desired=self.desired, state=self.state)
+ assert control_signal == 0, "Control signal not zero when desired value 0 reached"
+ control_signal = lqr.control(desired=self.desired1, state=self.wrong_state)
+ assert control_signal == 0, "Control signal not zero when desired value 0 reached"
+ lqr.reset()
+
+ # Execute a few control steps
+ state = self.wrong_state.copy()
+ initial_error = abs(self.C @ state - self.desired)
+ for _ in range(10):
+ control_signal = lqr.control(desired=self.desired, state=state)
+ state = self.Ad.dot(state) + self.Bd.dot(control_signal)
+ error = abs(self.C @ state - self.desired)
+ assert error < initial_error or error == 0, "Error exceeds initial - instable controller?"
+
+ # Assert correct process tracing with LQR controller
+ assert lqr.process is not None, "LQR did not keep trace of process"
+ assert len(lqr.process.index) == 10, "LQR has not traced enough steps"
+
+ # Reset controller
+ lqr.reset()
+
+ # Assert correct reset to initial state
+ assert lqr.summed_error == 0.0, "Integral did not reset summed error properly"
+ assert len(lqr.process.index) == 0, "LQR has not dropped process trace properly"
diff --git a/pyrate/tests/act/test_pid.py b/pyrate/tests/act/test_pid.py
new file mode 100644
index 0000000..3ddcc1f
--- /dev/null
+++ b/pyrate/tests/act/test_pid.py
@@ -0,0 +1,65 @@
+"""This module asserts correct runtime behaviour of the pyrate.act.pid module."""
+
+# Test environment
+from unittest import TestCase
+
+# Mathematics
+from numpy import array
+from numpy import Inf
+from numpy.linalg import norm
+from numpy import vstack
+
+# Package under test
+from pyrate.act.control import Pid
+
+
+class TestPid(TestCase):
+
+ """Test for correct runtime behaviour in pyrate.act.control.pid."""
+
+ # In this context, we reproduce a common PID notation
+ # pylint: disable=invalid-name
+
+ def setUp(self) -> None:
+ """Setup the PID specification for testing."""
+
+ # PID specification
+ self.P = array([[1.0]])
+ self.I = array([[1.0]]) # noqa: 741
+ self.D = array([[1.0]])
+ self.dt = 0.5
+
+ # State and target
+ # Target is already reached in this example
+ self.state = vstack([0.0])
+ self.state_derivative = vstack([0.0])
+ self.desired = vstack([0.0])
+
+ def test_process_tracking(self) -> None:
+ """Assert that a pandas.DataFrame with process data is created."""
+
+ # Initialize PID controller
+ pid = Pid(self.P, self.I, self.D, self.dt, keep_trace=True)
+
+ # Execute a few control steps
+ state = self.state.copy()
+ previous_error = Inf
+ for _ in range(10):
+ control_signal = pid.control(
+ desired=self.desired, state=self.state, state_derivative=self.state_derivative
+ )
+ state += control_signal
+ error = norm(state - self.desired).item() # Convert from numpy scalar to Python float
+ assert error < previous_error or error == 0, "Error did not get lower."
+ previous_error = error
+
+ # Assert correct process tracing with PID controller
+ assert pid.process is not None, "PID did not keep trace of process"
+ assert len(pid.process.index) == 10, "PID has not traced enough steps"
+
+ # Reset PID
+ pid.reset()
+
+ # Assert correct reset to initial state
+ assert pid.summed_error == 0.0, "PID did not reset summed error properly"
+ assert len(pid.process.index) == 0, "PID has not dropped process trace properly"
diff --git a/pyrate/tests/act/test_pid_anti_windup.py b/pyrate/tests/act/test_pid_anti_windup.py
new file mode 100644
index 0000000..2aa694b
--- /dev/null
+++ b/pyrate/tests/act/test_pid_anti_windup.py
@@ -0,0 +1,88 @@
+"""This module asserts correct runtime behaviour of the pyrate.act.pid module."""
+
+# Test environment
+from unittest import TestCase
+
+# Mathematics
+from numpy import array
+from numpy import Inf
+from numpy.linalg import norm
+from numpy import vstack
+
+# Package under test
+from pyrate.act.control import AntiWindupPid
+
+
+class TestAntiWindupPid(TestCase):
+
+ """Test for correct runtime behaviour in pyrate.act.control.anti_windup_pid."""
+
+ # In this context, we reproduce a common PID notation
+ # pylint: disable=invalid-name
+
+ def setUp(self) -> None:
+ """Set up the PID specification for testing."""
+
+ # PID specification
+ self.P = array([[1.0]])
+ self.I = array([[1.0]]) # noqa: 741
+ self.D = array([[1.0]])
+ self.max_control = 1.0
+ self.dt = 0.5
+
+ # State and target
+ # Target is already reached in this example
+ self.state = vstack([0.0])
+ self.state_large = vstack([0.5])
+ self.state_small_neg = vstack([-0.01])
+ self.state_derivative = vstack([0.0])
+ self.desired = vstack([0.0])
+
+ def test_anti_windup(self) -> None:
+ """Assert control signal in allowed range, PID responsive by limited integral part."""
+ # Initialize PID controller
+ pid = AntiWindupPid(self.P, self.I, self.D, self.max_control, self.dt, keep_trace=True)
+
+ # Execute a few control steps
+ for _ in range(10):
+ control_signal = pid.control(
+ desired=self.desired, state=self.state_large, state_derivative=self.state_derivative
+ )
+ assert abs(control_signal) <= self.max_control, "control limits are not applied"
+ assert abs(control_signal) == self.max_control, "control limits not reached"
+
+ # test reactiveness
+ control_signal_back = pid.control(
+ desired=self.desired, state=self.state_small_neg, state_derivative=self.state_derivative
+ )
+ assert abs(control_signal_back) < self.max_control + 1e-4, "anti wind up is not working"
+
+ def test_process_tracking(self) -> None:
+ """Assert that a pandas.DataFrame with process data is created."""
+
+ # Initialize PID controller
+ pid = AntiWindupPid(self.P, self.I, self.D, self.max_control, self.dt, keep_trace=True)
+
+ # Execute a few control steps
+ # Execute a few control steps
+ state = self.state.copy()
+ previous_error = Inf
+ for _ in range(10):
+ control_signal = pid.control(
+ desired=self.desired, state=self.state, state_derivative=self.state_derivative
+ )
+ state += control_signal
+ error = norm(state - self.desired).item() # Convert from numpy scalar to Python float
+ assert error < previous_error or error == 0, "Error did not decrease"
+ previous_error = error
+
+ # Assert correct process tracing with PID controller
+ assert pid.process is not None, "PID did not keep trace of process"
+ assert len(pid.process.index) == 10, "PID has not traced enough steps"
+
+ # Reset PID
+ pid.reset()
+
+ # Assert correct reset to initial state
+ assert pid.summed_error == 0.0, "PID did not reset summed error properly"
+ assert len(pid.process.index) == 0, "PID has not dropped process trace properly"
diff --git a/pyrate/tests/common/__init__.py b/pyrate/tests/common/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/common/charts/__init__.py b/pyrate/tests/common/charts/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/common/charts/example_charts/README.txt b/pyrate/tests/common/charts/example_charts/README.txt
new file mode 100644
index 0000000..861720d
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/README.txt
@@ -0,0 +1 @@
+See https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data/-/tree/master/charts/noaa_vector for the license and more information.
diff --git a/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04A.TXT b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04A.TXT
new file mode 100644
index 0000000..61c9b81
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04A.TXT
@@ -0,0 +1,71 @@
+NOAA ENC
+
+NATIONAL OCEANIC AND ATMOSPHERIC ADMINISTRATION
+
+US1BS04M - BERING SEA NORTHERN PART (EAST)
+
+
+INDEX:
+NOTE A
+AIDS TO NAVIGATION
+CAUTION - TEMPORARY CHANGES
+CAUTION - LIMITATIONS
+NOTE B
+WARNING - PRUDENT MARINER
+AUTHORITIES
+INTERNATIONAL BOUNDARIES
+POLLUTION REPORTS
+CAUTION - DANGER
+ADDITIONAL INFORMATION
+
+
+NOTES:
+NOTE A
+Navigation regulations are published in Chapter 2, U.S. Coast Pilot 9. Additions or revisions to Chapter 2 are published in the Notice to Mariners. Information concerning the regulations may be obtained at the Office of the Commander, 17th Coast Guard District in Juneau, Alaska or at the Office of the District Engineer, Corps of Engineers in Anchorage, Alaska.
+Refer to charted regulation section numbers.
+
+
+AIDS TO NAVIGATION
+Consult U.S. Coast Guard Light List for supplemental information concerning aids to navigation.
+
+See National Geospatial-Intelligence Agency List of Lights and Fog Signals for information not included in the United States Coast Guard Light List.
+
+
+CAUTION - TEMPORARY CHANGES
+Temporary changes or defects in aids to navigation are not indicated. See Local Notice to Mariners.
+
+
+CAUTION - LIMITATIONS
+Limitations on the use of radio signals as aids to marine navigation can be found in the U.S. Coast Guard Light Lists and National Geospatial-Intelligence Agency Publication 117.
+Radio direction-finder bearings to commercial broadcasting stations are subject to error and should be used with caution.
+
+
+NOTE B
+Radio navigational aids on the Russian Arctic coast and adjacent islands north of the Arctic Circle have been omitted due to the lack of reliable information.
+
+
+WARNING - PRUDENT MARINER
+The prudent mariner will not rely solely on any single aid to navigation, particularly on floating aids. See U.S. Coast Guard Light List and U.S. Coast Pilot for details.
+
+
+AUTHORITIES
+Hydrography and topography by the National Ocean Service, Coast Survey, with additional data from the U.S. Coast Guard, National Geospatial Intelligence Agency, and the Japanese Hydrographic Department.
+
+
+INTERNATIONAL BOUNDARIES
+International boundaries as shown are approximate.
+
+
+POLLUTION REPORTS
+Report all spills of oil and hazardous substances to the National Response Center via 1-800-424-8802 (toll free), or to the nearest U.S. Coast Guard facility if telephone communication is impossible (33 CFR 153).
+
+
+CAUTION - DANGER
+Danger, Prohibited, and Restricted Area falling within the limits of the larger scale charts are shown thereon and not repeated.
+
+
+ADDITIONAL INFORMATION
+Additional information can be obtained at www.nauticalcharts.noaa.gov
+
+
+END OF FILE
diff --git a/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04B.TXT b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04B.TXT
new file mode 100644
index 0000000..44efb5f
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04B.TXT
@@ -0,0 +1,6 @@
+Maritime boundary provisionally applied pending formal exchange of instruments of ratification.
+
+According to Article 3 of the Agreement Between the United States of America and Russia on the Maritime Boundary, signed June 1, 1990:
+
+"1. In any area east of the maritime boundary that lies within 200 nautical miles of the baseline from which the breadth of the territorial sea of Russia is measured but beyond 200 nautical miles of the baselines from which the breadth of the territorial sea of the United States is measured ("eastern special area"), Russia agrees that henceforth the United States may exercise the sovereign rights and jurisdiction derived from exclusive economic zone jurisdiction that Russia would otherwise be entitled to exercise under international law in the absence of the agreement of the Parties on the maritime boundary...
+3. to the extent that either Party exercises the sovereign rights or jurisdiction in the special area or areas on its side of the maritime boundary as provided for in this Article, such exercise of sovereign rights or jurisdiction derives from the agreement of the Parties and does not constitute an extension of its exclusive economic zone. To this end, each Party shall take the necessary steps to ensure that any exercise on its part of such rights or jurisdiction in the special area or areas on its side of the maritime boundary shall be so characterized in its relevant laws, regulations, and charts."
diff --git a/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04C.TXT b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04C.TXT
new file mode 100644
index 0000000..caa0cb3
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04C.TXT
@@ -0,0 +1,2 @@
+CAUTION - QUALITY OF BATHYMETRIC DATA
+The areas represented by the object M_QUAL (Quality of data) are approximate due to generalizing for clarity. Caution is advised, particularly for nearshore navigation or voyage planning. M_QUAL represents areas of uniform quality of bathymetric data. The CATZOC (Category of zone of confidence in data) attribute of M_QUAL provides an assessment of the overall zone of confidence.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04M.000 b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04M.000
new file mode 100644
index 0000000..88fa287
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/US1BS04M/US1BS04M.000 differ
diff --git a/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GA.TXT b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GA.TXT
new file mode 100644
index 0000000..4ed75cd
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GA.TXT
@@ -0,0 +1,97 @@
+NOAA ENC
+
+NATIONAL OCEANIC AND ATMOSPHERIC ADMINISTRATION
+
+US4AK5GM - PORT MOLLER AND HERENDEEN BAY
+
+INDEX:
+
+AIDS TO NAVIGATION
+POLLUTION REPORTS
+CAUTION USE OF RADIO SIGNALS (LIMITATIONS)
+SUPPLEMENTAL INFORMATION
+CAUTION - TEMPORARY CHANGES
+WARNING - PRUDENT MARINER
+ADDITIONAL INFORMATION
+NOTE A
+AUTHORITIES
+CAUTION - LIMITATIONS
+CAUTION - CHANNELS
+RADAR REFLECTORS
+NOAA WEATHER RADIO BROADCASTS
+TIDAL INFORMATION
+ADMINISTRATION AREA
+COLREGS, 82.1705 (see note A)
+
+
+
+NOTES:
+
+AIDS TO NAVIGATION
+Consult U.S. Coast Guard Light List for supplemental information concerning aids to navigation.
+
+
+POLLUTION REPORTS
+Report all spills of oil and hazardous substances to the National Response Center via 1-800-424-8802 (toll free), or to the nearest U.S. Coast Guard facility if telephone communication is impossible (33 CFR 153).
+
+
+CAUTION USE OF RADIO SIGNALS (LIMITATIONS)
+Limitations on the use of radio signals as aids to marine navigation can be found in the U.S. Coast Guard Light Lists and National Geospatial-Intelligence Agency Publication 117. Radio direction-finder bearings to commercial broadcasting stations are subject to error and should be used with caution.
+
+
+SUPPLEMENTAL INFORMATION
+Consult U.S. Coast Pilot 9 for important supplemental information.
+
+
+CAUTION - TEMPORARY CHANGES
+Temporary changes or defects in aids to navigation are not indicated. See Local Notice to Mariners
+
+
+WARNING - PRUDENT MARINER
+The prudent mariner will not rely solely on any single aid to navigation, particularly on floating aids. See U.S. Coast Guard Light List and U.S. Coast Pilot for details.
+
+
+ADDITIONAL INFORMATION
+Additional information can be obtained at www.nauticalcharts.noaa.gov
+
+
+NOTE A
+Navigation regulations are published in Chapter 2, U.S. Coast Pilot 9. Additions or revisions to Chapter 2 are published in the Notice to Mariners. Information concerning
+the regulations may be obtained at the Office of the Commander, 17th Coast Guard District in Juneau, Alaska, or at the Office of the District Engineer, Corps of Engineers in Anchorage, Alaska. Refer to charted regulation section numbers
+
+
+AUTHORITIES
+Hydrography and Topography by the National Ocean Service, Coast Survey, with additional data from the U.S. Coast Guard.
+
+
+CAUTION - LIMITATIONS
+Limitations on the use of radio signals as aids to marine navigation can be found in the U.S. Coast Guard Light Lists and National Geospatial-Intelligence Agency Publication 117. Radio direction-finder bearings to commercial broadcasting stations are subject to error and should be used with caution.CAUTION - CHANNELS Channels are subject to frequent changes due to very strong tidal currents.
+
+
+CAUTION - CHANNELS
+Channels are subject to frequent changes due to very strong tidal currents.
+
+RADAR REFLECTORS
+Radar reflectors have been placed on many floating aids to navigation. Individual radar
+reflector identification on these aids has been omitted from this chart.
+
+
+NOAA WEATHER RADIO BROADCASTS
+The NOAA Weather Radio station listed below provides continuous weather broadcasts. The reception range is typically 20 to 40 nautical miles from the antenna site, but can be as much as 100 nautical miles for stations at high elevations.
+
+Sand Point, AK KSDP 840 AM
+
+
+TIDAL INFORMATION
+For tidal information see the NOS Tide Table publication or go to http://co-ops.nos.noaa.gov
+
+
+ADMINISTRATION AREA
+The entire extent of this ENC cell falls within the limits of an Administration Area. This area covers land, internal waters, and territorial sea. The territorial sea is a maritime zone which the United States exercises sovereignty extending to the airspace as well as to its bed and subsoil. For more information, please refer to the Coast Pilot.
+
+
+COLREGS, 82.1705 (see note A)
+International Regulations for Preventing Collisions at Sea, 1972. The entire area of this chart falls seaward of the COLREGS Demarcation Line.
+
+
+END OF FILE
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GB.TXT b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GB.TXT
new file mode 100644
index 0000000..caa0cb3
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GB.TXT
@@ -0,0 +1,2 @@
+CAUTION - QUALITY OF BATHYMETRIC DATA
+The areas represented by the object M_QUAL (Quality of data) are approximate due to generalizing for clarity. Caution is advised, particularly for nearshore navigation or voyage planning. M_QUAL represents areas of uniform quality of bathymetric data. The CATZOC (Category of zone of confidence in data) attribute of M_QUAL provides an assessment of the overall zone of confidence.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.000 b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.000
new file mode 100644
index 0000000..6ceb97d
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.000 differ
diff --git a/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.001 b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.001
new file mode 100644
index 0000000..b65d192
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/US4AK5GM/US4AK5GM.001 differ
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87A.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87A.TXT
new file mode 100644
index 0000000..926ec96
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87A.TXT
@@ -0,0 +1,144 @@
+NOAA ENC
+
+NATIONAL OCEANIC AND ATMOSPHERIC ADMINISTRATION
+
+US4FL87M - CAPE CANAVERAL TO BETHEL SHOAL
+
+INDEX:
+NOTE A
+AIDS TO NAVIGATION
+NOAA WEATHER BROADCASTS
+CAUTION - TEMPORARY CHANGES
+CAUTION - DREDGED AREAS
+SUPPLEMENTAL INFORMATION
+AUTHORITIES
+POLLUTION REPORTS
+RADAR REFLECTORS
+WARNING - PRUDENT MARINER
+CAUTION - SUBMARINE PIPELINES AND CABLES
+HURRICANES AND TROPICAL STORMS
+CAUTION - LIMITATIONS
+ADDITIONAL INFORMATION
+TIDAL INFORMATION
+CAUTION - USACE HYDROGRAPHIC SURVEYS
+
+
+NOTES:
+NOTE A
+Navigation regulations are published in Chapter 2, U.S.
+Coast Pilot 4. Additions or revisions to Chapter 2 are pub-
+lished in the Notice to Mariners. Information concerning the
+regulations may be obtained at the Office of the Commander,
+7th Coast Guard District in Miami, Florida, or at the Office
+of the District Engineer, Corps of Engineers in Jacksonville,
+Florida.
+Refer to charted regulation section numbers.
+
+AIDS TO NAVIGATION
+Consult U.S. Coast Guard Light List for
+supplemental information concerning aids to
+navigation.
+
+NOAA WEATHER BROADCASTS
+The NOAA Weather Radio stations listed
+below provide continuous weather broadcasts.
+The reception range is typically 20 to 40
+nautical miles from the antenna site, but can be
+as much as 100 nautical miles for stations at
+high elevations.
+
+Melbourne, FL WXJ-70 162.550 MHz
+Fort Pierce, FL WWF-69 162.425 MHz
+
+CAUTION - TEMPORARY CHANGES
+Temporary changes or defects in aids to
+navigation are not indicated.
+See Local Notice to Mariners.
+
+CAUTION - DREDGED AREAS
+Improved channels are
+subject to shoaling, particularly at the edges.
+
+SUPPLEMENTAL INFORMATION
+Consult U.S. Coast Pilot 4 for important
+supplemental information.
+
+AUTHORITIES
+Hydrography and topography by the National Ocean Service, Coast
+Survey, with additional data from the Corps of Engineers, and U.S.
+Coast Guard.
+
+POLLUTION REPORTS
+Report all spills of oil and hazardous sub-
+stances to the National Response Center via
+1-800-424-8802 (toll free), or to the nearest U.S.
+Coast Guard facility if telephone communication
+is impossible (33 CFR 153).
+
+RADAR REFLECTORS
+Radar reflectors have been placed on many
+floating aids to navigation. Individual radar
+reflector identification on these aids has been
+omitted from this chart.
+
+WARNING - PRUDENT MARINER
+The prudent mariner will not rely solely on
+any single aid to navigation, particularly on
+floating aids. See U.S. Coast Guard Light List
+and U.S. Coast Pilot for details.
+
+CAUTION - SUBMARINE PIPELINES AND CABLES
+Additional uncharted submarine pipelines and
+submarine cables may exist within the area of
+this chart. Not all submarine pipelines and sub-
+marine cables are required to be buried, and
+those that were originally buried may have
+become exposed. Mariners should use extreme
+caution when operating vessels in depths of
+water comparable to their draft in areas where
+pipelines and cables may exist, and when
+anchoring, dragging, or trawling.
+Covered wells may be marked by lighted or
+unlighted buoys.
+
+HURRICANES AND TROPICAL STORMS
+Hurricanes, tropical storms and other major storms may
+cause considerable damage to marine structures, aids to
+navigation and moored vessels, resulting in submerged debris
+in unknown locations.
+Charted soundings, channel depths and shoreline may not
+reflect actual conditions following these storms. Fixed aids to
+navigation may have been damaged or destroyed. Buoys may
+have been moved from their charted positions, damaged, sunk,
+extinguished or otherwise made inoperative. Mariners should
+not rely upon the position or operation of an aid to navigation.
+Wrecks and submerged obstructions may have been displaced
+from charted locations. Pipelines may have become uncovered
+or moved.
+Mariners are urged to exercise extreme caution and are
+requested to report aids to navigation discrepancies and
+hazards to navigation to the nearest United States Coast Guard
+unit.
+
+CAUTION - LIMITATIONS
+Limitations on the use of radio signals as
+aids to marine navigation can be found in the
+U.S. Coast Guard Light Lists and National
+Geospatial-Intelligence Agency Publication 117.
+Radio direction-finder bearings to commercial
+broadcasting stations are subject to error and
+should be used with caution.
+
+ADDITIONAL INFORMATION
+Additional information can be obtained at www.nauticalcharts.noaa.gov
+
+TIDAL INFORMATION
+For tidal information see the NOS Tide Table publication or go to http://co-ops.nos.noaa.gov.
+
+CAUTION - USACE HYDROGRAPHIC SURVEYS
+USACE conducts hydrographic surveys to monitor navigation conditions.
+These surveys are not intended to detect underwater features. Uncharted features hazardous to surface navigation are not expected but may exist in federal channels.
+
+
+
+END OF FILE
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87B.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87B.TXT
new file mode 100644
index 0000000..eb35c8b
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87B.TXT
@@ -0,0 +1,16 @@
+Exclusive Economic Zone (EEZ)
+The EEZ is a zone beyond and adjacent to the territorial sea within which the U.S. has certain
+sovereign rights and jurisdiction. Under some U.S. laws, the inner limit of the EEZ extends landward
+to the seaward limit of the states submerged lands. For more information, please refer to the Coast
+Pilot.
+
+Contiguous Zone
+The Contiguous Zone is a zone contiguous to the territorial sea, in which the United States may
+exercise the control necessary to prevent and punish infringement within its territory or territorial sea
+of its customs, fiscal, immigration, cultural heritage or sanitary laws and regulations. For more
+information, please refer to the Coast Pilot.
+
+Administration Area
+This area covers land, internal waters, and territorial sea. The territorial sea is a maritime zone over
+which the United States exercises sovereignty extending to the airspace as well as to its bed and
+subsoil. For more information, please refer to the Coast Pilot.
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87C.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87C.TXT
new file mode 100644
index 0000000..f8f0422
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87C.TXT
@@ -0,0 +1,14 @@
+The Inland Navigational Rules Act of 1980 is in effect for vessels transiting this
+area. The seaward boundaries of this area are the COLREGS demarcation lines.
+In the area seaward of the COLREGS demarcation lines, vessels are governed by
+COLREGS: International Regulations for Preventing Collisions at Sea, 1972.
+The COLREGS demarcation lines are defined in 33 CFR 80.727b.
+
+Navigation regulations are published in Chapter 2, U.S.
+Coast Pilot 4. Additions or revisions to Chapter 2 are pub-
+lished in the Notice to Mariners. Information concerning the
+regulations may be obtained at the Office of the Commander,
+7th Coast Guard District in Miami, Florida, or at the Office
+of the District Engineer, Corps of Engineers in Jacksonville,
+Florida.
+Refer to charted regulation section numbers.
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87D.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87D.TXT
new file mode 100644
index 0000000..e18b018
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87D.TXT
@@ -0,0 +1 @@
+Depths from surveys of 2000-2007 - Regulations for Ocean Dumping Sites are contained in 40 CFR, Parts 220-229. Additional information concerning the regulations and requirements for use of the sites may be obtained from the Environmental Protection Agency (EPA). See U.S. Coast Pilots appendix for addresses of EPA offices. Dumping subsequent to the survey dates may have reduced the depths shown.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87E.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87E.TXT
new file mode 100644
index 0000000..a0a5a57
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87E.TXT
@@ -0,0 +1,2 @@
+This area represents the limits of the Low-Mid Inclination launch hazard areas associated with the majority of launches from Cape Canaveral. Launch debris may fall within these areas. See Notice to Mariners or contact the Coast Guard for launch hazard areas specific to each launch and the times they will be in effect.
+
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87F.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87F.TXT
new file mode 100644
index 0000000..f0172fe
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87F.TXT
@@ -0,0 +1 @@
+This area represents the limits of the High Inclination launch hazard areas associated with the majority of launches from Cape Canaveral. Launch debris may fall within these areas. See Notice to Mariners or contact the Coast Guard for launch hazard areas specific to each launch and the times they will be in effect.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87G.TXT b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87G.TXT
new file mode 100644
index 0000000..6898bf6
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87G.TXT
@@ -0,0 +1,6 @@
+CAUTION QUALITY OF BATHYMETRIC DATA
+The areas represented by the object M_QUAL (Quality of data) are approximate due
+to generalizing for clarity. Caution is advised, particularly for nearshore navigation
+or voyage planning. M_QUAL represents areas of uniform quality of bathymetric data.
+The CATZOC (Category of zone of confidence in data) attribute of M_QUAL provides
+an assessment of the overall zone of confidence.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.000 b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.000
new file mode 100644
index 0000000..0a439c1
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.000 differ
diff --git a/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.001 b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.001
new file mode 100644
index 0000000..83427b3
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/US4FL87M/US4FL87M.001 differ
diff --git a/pyrate/tests/common/charts/example_charts/empty/.gitkeep b/pyrate/tests/common/charts/example_charts/empty/.gitkeep
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70A.TXT b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70A.TXT
new file mode 100644
index 0000000..5f4b3b2
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70A.TXT
@@ -0,0 +1,132 @@
+NOAA ENC
+
+NATIONAL OCEANIC AND ATMOSPHERIC ADMINISTRATION
+
+US4VA70M - CHINCOTEAGUE INLET TO GREAT MACHIPONGO INLET
+
+INDEX
+AUTHORITIES
+AIDS TO NAVIGATION
+NOTE A
+WARNING - PRUDENT MARINER
+POLLUTION REPORTS
+SUPPLEMENTAL INFORMATION
+CAUTION - TEMPORARY CHANGES
+CAUTION - LIMITATIONS
+CAUTION - SUBMARINE PIPELINES AND CABLES
+RADAR REFLECTORS
+NOAA WEATHER BROADCASTS
+CAUTION - DREDGED AREAS
+TIDAL INFORMATION
+ADDITIONAL INFORMATION
+
+
+NOTES:
+AUTHORITIES
+Hydrography and Topography by the National Ocean Service, Coast
+Survey, with additional data from the Corps of Engineers, Geological
+Survey, and U.S. Coast Guard.
+
+
+AIDS TO NAVIGATION
+Consult U.S. Coast Guard Light List for
+supplemental information concerning aids to
+navigation.
+
+
+NOTE A
+Navigation regulations are published in Chapter 2, U.S.
+Coast Pilot 3. Additions or revisions to Chapter 2 are pub-
+lished in the Notice to Mariners. Information concerning the
+regulations may be obtained at the Office of the Commander,
+5th Coast Guard District in Portsmouth, Virginia or at the
+Office of the District Engineer, Corps of Engineers in
+Norfolk, Virginia.
+Refer to charted regulation section numbers.
+
+
+WARNING - PRUDENT MARINER
+The prudent mariner will not rely solely on
+any single aid to navigation, particularly on
+floating aids. See U.S. Coast Guard Light List
+and U.S. Coast Pilot for details.
+
+
+POLLUTION REPORTS
+Report all spills of oil and hazardous substances to the
+National Response Center via 1-800-424-8802 (toll free), or
+to the nearest U.S. Coast Guard facility if telephone com-
+munication is impossible (33 CFR 153).
+
+
+SUPPLEMENTAL INFORMATION
+Consult U.S. Coast Pilot 3 for important
+supplemental information.
+
+
+CAUTION - TEMPORARY CHANGES
+Temporary changes or defects in aids to
+navigation are not indicated. See
+Local Notice to Mariners.
+
+
+CAUTION - LIMITATIONS
+Limitations on the use of radio signals as
+aids to marine navigation can be found in the
+U.S. Coast Guard Light Lists and National
+Geospatial-Intelligence Agency Publication 117.
+Radio direction-finder bearings to commercial
+broadcasting stations are subject to error and
+should be used with caution.
+
+
+CAUTION - SUBMARINE PIPELINES AND CABLES
+Additional uncharted submarine pipelines and
+submarine cables may exist within the area of
+this chart. Not all submarine pipelines and sub-
+marine cables are required to be buried, and
+those that were originally buried may have
+become exposed. Mariners should use extreme
+caution when operating vessels in depths of
+water comparable to their draft in areas where
+pipelines and cables may exist, and when
+anchoring, dragging, or trawling.
+Covered wells may be marked by lighted or
+unlighted buoys.
+
+
+RADAR REFLECTORS
+Radar reflectors have been placed on many
+floating aids to navigation. Individual radar
+reflector identification on these aids has been
+omitted from this chart.
+
+
+NOAA WEATHER RADIO BROADCASTS
+The NOAA Weather Radio stations listed
+below provide continuous weather broadcasts.
+The reception range is typically 37 to 74 kilometers / 20
+to 40 nautical miles from the antenna site, but can be
+as much as 100 nautical miles / 185 kilometers for stations at
+high elevations.
+
+Norfolk, VA KHB-37 162.550 MHz
+Salisbury, MD KEC-92 162.475 MHz
+Heathsville, VA WXM-57 162.400 MHz
+
+
+CAUTION - DREDGED AREAS
+Improved channels are
+subject to shoaling, particularly at the edges.
+
+
+TIDAL INFORMATION
+For tidal information see the NOS tide table publication or go to http://co-ops.nos.noaa.gov.
+
+
+ADDITIONAL INFORMATION
+Additional information can be obtained at www.nauticalcharts.noaa.gov
+
+
+END OF FILE
+
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70B.TXT b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70B.TXT
new file mode 100644
index 0000000..f7f8e2c
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70B.TXT
@@ -0,0 +1,8 @@
+Exclusive Economic Zone (EEZ)
+The EEZ is a zone beyond and adjacent to the territorial sea within which the U.S. has certain sovereign rights and jurisdiction. Under some U.S. laws, the inner limit of the EEZ extends landward to the seaward limit of the states submerged lands. For more information, please refer to the Coast Pilot.
+
+Contiguous Zone
+The Contiguous Zone is a zone contiguous to the territorial sea, in which the United States may exercise the control necessary to prevent and punish infringement within its territory or territorial sea of its customs, fiscal, immigration, cultural heritage or sanitary laws and regulations. For more information, please refer to the Coast Pilot.
+
+Administration Area
+This area covers land, internal waters, and territorial sea. The territorial sea is a maritime zone over which the United States exercises sovereignty extending to the airspace as well as to its bed and subsoil. For more information, please refer to the Coast Pilot.
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70C.TXT b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70C.TXT
new file mode 100644
index 0000000..12b441d
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70C.TXT
@@ -0,0 +1 @@
+Mariners are warned that numerous uncharted duck blinds, stakes, and fishing structures, some submerged, may exist in the fish trap areas. Such structures are not charted unless known to be permanent.
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70D.TXT b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70D.TXT
new file mode 100644
index 0000000..f674a1b
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70D.TXT
@@ -0,0 +1,17 @@
+The Inland Navigational Rules Act of 1980 is in effect for vessels
+transiting this area. The seaward boundaries of this area are the COLREGS
+demarcation lines. In the area seaward of the COLREGS demarcation lines, vessels
+are governed by COLREGS: International Regulations for Prevention of Collisions
+at Sea, 1972. The COLREGS demarcation lines are defined in 33 CFR 80.505c,
+33 CFR 80.505d, 33 CFR 80.505e and 33 CFR 505h.
+
+
+NOTE A
+Navigation regulations are published in Chapter 2, U.S.
+Coast Pilot 3. Additions or revisions to Chapter 2 are pub-
+lished in the Notice to Mariners. Information concerning the
+regulations may be obtained at the Office of the Commander,
+5th Coast Guard District in Portsmouth, Virginia or at the
+Office of the District Engineer, Corps of Engineers in
+Norfolk, Virginia.
+Refer to charted regulation section numbers.
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70E.TXT b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70E.TXT
new file mode 100644
index 0000000..a453f02
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70E.TXT
@@ -0,0 +1,6 @@
+CAUTION - QUALITY OF BATHYMETRIC DATA
+The areas represented by the object M_QUAL (Quality of data) are approximate
+due to generalizing for clarity. Caution is advised, particularly for nearshore
+navigation or voyage planning. M_QUAL represents areas of uniform quality of
+bathymetric data. The CATZOC (Category of zone of confidence in data) attribute
+of M_QUAL provides an assessment of the overall zone of confidence.
\ No newline at end of file
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.000 b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.000
new file mode 100644
index 0000000..2865209
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.000 differ
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.001 b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.001
new file mode 100644
index 0000000..ee75e08
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.001 differ
diff --git a/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.002 b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.002
new file mode 100644
index 0000000..b77b579
Binary files /dev/null and b/pyrate/tests/common/charts/example_charts/nested_folder/US4VA70M/US4VA70M.002 differ
diff --git a/pyrate/tests/common/charts/example_charts/this is_no_chart.txt b/pyrate/tests/common/charts/example_charts/this is_no_chart.txt
new file mode 100644
index 0000000..a813a31
--- /dev/null
+++ b/pyrate/tests/common/charts/example_charts/this is_no_chart.txt
@@ -0,0 +1 @@
+This should not be found by the discovery chart tests.
diff --git a/pyrate/tests/common/charts/test_db.py b/pyrate/tests/common/charts/test_db.py
new file mode 100644
index 0000000..a3246de
--- /dev/null
+++ b/pyrate/tests/common/charts/test_db.py
@@ -0,0 +1,469 @@
+"""This module tests if the database abstraction is working correctly."""
+
+# Standard library
+from io import StringIO
+from os.path import join
+import sqlite3
+from tempfile import TemporaryDirectory
+import warnings
+
+# Typing
+from typing import cast
+from typing import List
+from typing import Optional
+
+# Unit testing
+from unittest.mock import patch
+from unittest import skip
+from unittest import skipIf
+from unittest import TestCase
+
+# Verification
+from hypothesis import given
+from hypothesis import HealthCheck
+from hypothesis import settings
+import hypothesis.strategies as st
+import numpy
+
+# Geometry
+from shapely.geometry import Point
+
+# Package under test
+from pyrate.common.charts.db import to_wkb
+from pyrate.common.charts import SpatialiteDatabase
+from pyrate.plan.geometry import CartesianLocation
+from pyrate.plan.geometry import CartesianPolygon
+from pyrate.plan.geometry import Geospatial
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarGeometry
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+from pyrate.plan.geometry import PolarRoute
+
+# Flags and helpers
+from pyrate.common.testing import IS_CI
+from pyrate.common.testing import IS_EXTENDED_TESTING
+from pyrate.common.testing import SPATIALITE_AVAILABLE
+from pyrate.common.testing.strategies.geometry import location_types
+from pyrate.common.testing.strategies.geometry import polar_locations
+from pyrate.common.testing.strategies.geometry import polar_objects
+from pyrate.common.testing.strategies.geometry import polar_polygons
+from pyrate.common.testing.strategies.geometry import polar_routes_stable
+
+
+# force testing this in CI to make sure it is tested regularly at least there
+SKIP_IF_SPATIALITE_IS_MISSING = skipIf(
+ not SPATIALITE_AVAILABLE and not IS_CI, "allow spatialite to be missing and skip the tests in that case"
+)
+
+
+# reduce the example count since else this will take way too long, especially on the extended tests
+TEST_REDUCED_COUNT = settings(
+ max_examples=500 if IS_EXTENDED_TESTING else 50,
+ deadline=1000,
+ suppress_health_check=(HealthCheck.too_slow,),
+)
+
+
+@SKIP_IF_SPATIALITE_IS_MISSING
+class TestDatabase(TestCase):
+ """Test for basic use of the database."""
+
+ @staticmethod
+ def _apply_id_if_missing(geometry: Geospatial, identifier: Optional[int]) -> None:
+ if geometry.identifier is None:
+ geometry.identifier = identifier
+
+ def test_empty_creation(self) -> None:
+ """Tests whether the creation of a new database if not crashing anything."""
+ with SpatialiteDatabase(":memory:") as database:
+ self.assertEqual(len(database), database.count_geometries())
+ self.assertEqual(
+ database.count_geometries(), 0, "a freshly initialized database should contain no polygons"
+ )
+ self.assertEqual(
+ database.count_vertices(), 0, "a freshly initialized database should contain no vertices"
+ )
+
+ def test_disable_issue_create_statement(self) -> None:
+ """Tests whether initializing with ``issue_create_statement=False`` fails."""
+ with SpatialiteDatabase(":memory:", issue_create_statement=False) as database:
+ with self.assertRaises(sqlite3.OperationalError):
+ database.count_geometries()
+
+ def test_write_invalid_geometry_no_update_exception(self) -> None:
+ """Tests that an exception is raised if an invalid geometry is attempted to be written.
+
+ This tests the case where ``update=False``.
+ """
+ with self.assertRaises(ValueError):
+ with SpatialiteDatabase(":memory:") as database:
+ point = PolarLocation(latitude=-70.2, longitude=+120.444, name="Pointy Point")
+ invalid_geometry = PolarPolygon([point, point, point])
+ database.write_geometry(invalid_geometry, update=False)
+
+ def test_write_invalid_geometry_no_update_suppressed(self) -> None:
+ """Tests that NO exception is raised if an invalid geometry is attempted to be written.
+
+ This tests the case where ``update=False`` and ``raise_on_failure=False``.
+ """
+ with self.assertRaises(UserWarning):
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ with SpatialiteDatabase(":memory:") as database:
+ point = PolarLocation(latitude=-70.2, longitude=+120.444, name="Pointy Point")
+ invalid_geometry = PolarPolygon([point, point, point])
+ database.write_geometry(invalid_geometry, update=False, raise_on_failure=False)
+
+ @skip("SpatialiteDatabase.write_geometry() currently does not detect it when update=True")
+ def test_write_invalid_geometry_with_update_exception(self) -> None:
+ """Tests that an exception is raised if an invalid geometry is attempted to be written.
+
+ This tests the case where ``update=True``.
+ """
+ with self.assertRaises(ValueError):
+ with SpatialiteDatabase(":memory:") as database:
+ point = PolarLocation(latitude=-70.2, longitude=+120.444, name="Pointy Point")
+ invalid_geometry = PolarPolygon([point, point, point])
+ database.write_geometry(invalid_geometry, update=True)
+
+ @skip("SpatialiteDatabase.write_geometry() currently does not detect it when update=True")
+ def test_write_invalid_geometry_with_update_suppressed(self) -> None:
+ """Tests that NO exception is raised if an invalid geometry is attempted to be written.
+
+ This tests the case where ``update=True`` and ``raise_on_failure=False``.
+ """
+ with self.assertRaises(UserWarning):
+ with warnings.catch_warnings():
+ warnings.simplefilter("error")
+ with SpatialiteDatabase(":memory:") as database:
+ point = PolarLocation(latitude=-70.2, longitude=+120.444, name="Pointy Point")
+ invalid_geometry = PolarPolygon([point, point, point])
+ database.write_geometry(invalid_geometry, update=True, raise_on_failure=False)
+
+ def test_convert_invalid_geometry_type(self) -> None:
+ """Tests whether converting an unsupported geometry type raises a :class:`NotImplementedError`."""
+ with self.assertRaises(NotImplementedError):
+ # This obviously is a faulty cast, but we need it to trigger the exception
+ polar = cast(PolarLocation, CartesianLocation(55, 55))
+ to_wkb(polar)
+
+ def test_create_twice(self) -> None:
+ """Tests that opening/creating/initializing a database twice does not cause any problems.
+
+ This method checks for output on stdout and stderr since sqlite will sometimes just print some
+ warnings instead of raising exceptions. This is a regression test.
+ """
+ # this creates as shared database (within this process); need to pass uri=True later on
+ uri = "file::memory:?cache=shared"
+
+ # capture stdout & stderr
+ with patch("sys.stdout", new=StringIO()) as fake_stdout:
+ with patch("sys.stderr", new=StringIO()) as fake_stderr:
+ # open two databases
+ with SpatialiteDatabase(uri, uri=True):
+ with SpatialiteDatabase(uri, uri=True):
+ pass
+
+ # assert that nothing got printed
+ self.assertEqual(len(fake_stdout.getvalue()), 0)
+ self.assertEqual(len(fake_stderr.getvalue()), 0)
+
+ def test_clear_and_count(self) -> None:
+ """Tests :meth:`~.SpatialiteDatabase.clear` and the two counting methods."""
+ with SpatialiteDatabase(":memory:") as database:
+ self.assertEqual(len(database), 0)
+ self.assertEqual(database.count_vertices(), 0)
+
+ poly = PolarPolygon([PolarLocation(0, 0), PolarLocation(0, 1), PolarLocation(1, 1)])
+
+ poly.identifier = 1
+ database.write_geometry(poly)
+
+ self.assertEqual(len(database), 1)
+ self.assertEqual(database.count_vertices(), len(poly.locations))
+
+ poly.identifier = 1
+ database.write_geometry(poly, update=True)
+
+ self.assertEqual(len(database), 1)
+ self.assertEqual(database.count_vertices(), len(poly.locations))
+
+ poly.identifier = 2
+ database.write_geometry(poly)
+
+ self.assertEqual(len(database), 2)
+ self.assertEqual(database.count_vertices(), len(poly.locations) * 2)
+
+ database.clear()
+
+ self.assertEqual(len(database), 0)
+ self.assertEqual(database.count_vertices(), 0)
+
+ def test_result_multi_geometry(self) -> None:
+ """Tests whether the DB can correctly handle query results that are multi-polygons/-routes."""
+
+ # Test with both routes and polygons
+ for geometry_type in (PolarRoute, PolarPolygon):
+ with self.subTest(f"With type {geometry_type.__name__}"):
+
+ with SpatialiteDatabase(":memory:") as database:
+ horseshoe = numpy.array(
+ [
+ [-3.06913376, 47.50722936],
+ [-3.0893898, 47.52566325],
+ [-3.07788849, 47.54640812],
+ [-3.03050995, 47.55278059],
+ [-2.9875946, 47.54675573],
+ [-2.97849655, 47.53006788],
+ [-2.97712326, 47.51801223],
+ [-2.97849655, 47.50908464],
+ [-3.04887772, 47.50653362],
+ [-3.04922104, 47.51047605],
+ [-2.9898262, 47.51349065],
+ [-2.99480438, 47.54084606],
+ [-3.03136826, 47.54698747],
+ [-3.06947708, 47.54327953],
+ [-3.07806015, 47.52763379],
+ [-3.06741714, 47.51198337],
+ [-3.06913376, 47.50722936],
+ ]
+ )
+ touch_point = PolarLocation(longitude=-3.0588340759277344, latitude=47.50943249496333)
+
+ database.write_geometry(geometry_type.from_numpy(horseshoe)) # type: ignore
+ result = list(database.read_geometries_around(touch_point, radius=3_000))
+ self.assertTrue(len(result) in {2, 3})
+
+ def _random_insert_and_extract_all_generic(
+ self, geometry: PolarGeometry, location_type: LocationType, update: bool
+ ) -> None:
+ """Tests whether inserting and then reading works in a very basic setting."""
+
+ with SpatialiteDatabase(":memory:") as database:
+ # it should be empty in the beginning
+ self.assertEqual(0, len(database))
+ self.assertEqual(0, database.count_geometries())
+ self.assertEqual(0, database.count_vertices())
+
+ # insert the polygon
+ try:
+ database.write_geometry(geometry, update=update)
+ except ValueError:
+ return # this example is corrupt, try the next one
+ else:
+ if update and len(database) == 0: # Errors cannot be checked if update==True
+ return # this example is corrupt, try the next one
+
+ # now the database should not be empty anymore
+ self.assertEqual(1, len(database))
+ self.assertEqual(1, database.count_geometries())
+
+ if isinstance(geometry, (PolarRoute, PolarPolygon)):
+ # Some repeated points might be removed, so we cannot check the exact number of vertices
+ self.assertGreaterEqual(len(geometry.locations), database.count_vertices())
+ else: # is a PolarLocation
+ self.assertEqual(1, database.count_vertices())
+
+ # the element should be included in "all"
+ read_obstacles = list(database.read_all())
+ self.assertEqual(1, len(read_obstacles))
+
+ # it should only be included if the type matches
+ all_filtered = list(database.read_all(only_location_type=location_type))
+ if geometry.location_type == location_type:
+ self.assertEqual(all_filtered, read_obstacles)
+ else:
+ self.assertEqual(len(all_filtered), 0)
+
+ # and it should be the one that we have written into the database in the first place
+ read_obstacles_single = read_obstacles[0]
+ # the id may be newly generated if polygon has the id None, so
+ # set it to the generated one for the sake of equality testing
+ TestDatabase._apply_id_if_missing(geometry, read_obstacles_single.identifier)
+
+ assert isinstance(read_obstacles_single, type(geometry)) # Make mypy understand this check
+
+ if isinstance(geometry, PolarPolygon):
+ self.assertTrue(geometry.almost_congruent(read_obstacles_single)) # type: ignore
+ else:
+ self.assertTrue(geometry.equals_exact(read_obstacles_single, tolerance=1e-3))
+
+ @given(polar_locations(), location_types(), st.booleans())
+ @TEST_REDUCED_COUNT
+ def test_random_insert_and_extract_all_locations(
+ self, geometry: PolarGeometry, location_type: LocationType, update: bool
+ ) -> None:
+ """Basic test with locations."""
+ self._random_insert_and_extract_all_generic(geometry, location_type, update)
+
+ @given(polar_routes_stable(), location_types(), st.booleans())
+ @TEST_REDUCED_COUNT
+ def test_random_insert_and_extract_all_routes(
+ self, geometry: PolarGeometry, location_type: LocationType, update: bool
+ ) -> None:
+ """Basic test with routes."""
+ self._random_insert_and_extract_all_generic(geometry, location_type, update)
+
+ @given(polar_polygons(), location_types(), st.booleans())
+ @TEST_REDUCED_COUNT
+ def test_random_insert_and_extract_all_polygons(
+ self, geometry: PolarGeometry, location_type: LocationType, update: bool
+ ) -> None:
+ """Basic test with polygons."""
+ self._random_insert_and_extract_all_generic(geometry, location_type, update)
+
+ def test_copy_to_other_database(self) -> None:
+ """Tests whether database can be copied.
+
+ This test is not performed using hypothesis as it takes too long.
+ """
+
+ location_1 = PolarLocation(latitude=-76.400, longitude=-171.924)
+ location_2 = PolarLocation(latitude=-70.400, longitude=-171.924)
+ location_3 = PolarLocation(latitude=-76.400, longitude=-170.924)
+ polygons = [
+ PolarPolygon(
+ locations=[location_1, location_2, location_3],
+ name="K",
+ identifier=1234145,
+ location_type=LocationType.SHALLOW_WATER,
+ ),
+ PolarPolygon(locations=[location_1, location_2, location_3], identifier=2342),
+ ]
+
+ with TemporaryDirectory() as directory_name:
+ database_file_name = join(directory_name, "other_db.sqlite")
+
+ with SpatialiteDatabase(":memory:") as first_database:
+ with first_database.disable_synchronization():
+ first_database.write_geometries(polygons)
+ first_database.copy_contents_to_database(database_file_name)
+
+ with SpatialiteDatabase(database_file_name) as second_database:
+ read = list(second_database.read_all())
+
+ def sorter(geometry: Geospatial) -> int:
+ return geometry.identifier or -1
+
+ read = list(sorted(read, key=sorter))
+ polygons = list(sorted(polygons, key=sorter))
+
+ self.assertEqual(len(read), len(polygons))
+ for polygon_a, polygon_b in zip(read, polygons):
+ assert isinstance(polygon_a, PolarPolygon) # Make mypy understand this check
+ self.assertTrue(polygon_a.equals_almost_congruent(polygon_b, rel_tolerance=1e-15))
+
+ @settings(max_examples=50 if IS_EXTENDED_TESTING else 10, suppress_health_check=(HealthCheck.too_slow,))
+ @given(st.lists(polar_objects(), min_size=0, max_size=3))
+ def test_database_simplification_zero_tolerance(self, geometries: List[PolarGeometry]) -> None:
+ """Tests whether database objects can be simplified."""
+
+ for index, geometry in enumerate(geometries):
+ geometry.identifier = index
+
+ with SpatialiteDatabase(":memory:") as database:
+ try:
+ database.write_geometries(geometries)
+ except ValueError:
+ return # this example is corrupt, try the next one
+ database.simplify_contents(0.0)
+ read = list(database.read_all())
+
+ # only one polygon should be returned
+ self.assertEqual(len(read), len(geometries))
+ for original, read_back in zip(geometries, read):
+ self.assertTrue(original.equals_exact(read_back, tolerance=1e-3))
+
+ def test_database_simplification(self) -> None:
+ """Tests whether database objects can be simplified."""
+
+ # set unique identifiers
+ points = numpy.array(Point(0, 0).buffer(10_000, resolution=16).exterior.coords)
+ cartesian_polygon = CartesianPolygon.from_numpy(points, identifier=10042)
+ polygon = cartesian_polygon.to_polar(origin=PolarLocation(-50.111, -30))
+ self.assertEqual(len(polygon.locations), 65)
+
+ with SpatialiteDatabase(":memory:") as database:
+ database.write_geometry(polygon)
+ database.simplify_contents(100)
+ read = list(database.read_all())
+
+ # only one polygon should be returned
+ self.assertEqual(len(read), 1)
+ read_polygon = read[0]
+ assert isinstance(read_polygon, PolarPolygon) # Make mypy understand this check
+
+ # That polygon should be similar
+ self.assertEqual(33, len(read_polygon.locations))
+ self.assertAlmostEqual(read_polygon.area, polygon.area, places=-3)
+ self.assertTrue(read_polygon.equals_almost_congruent(polygon, rel_tolerance=0.01))
+
+ # else, it should be the same
+ polygon.locations = read_polygon.locations
+ self.assertEqual(polygon, read_polygon)
+
+
+@SKIP_IF_SPATIALITE_IS_MISSING
+class TestDatabaseReadAroundHandcrafted(TestCase):
+ """Tests :meth:`pyrate.common.charts.SpatialiteDatabase.read_obstacles_around` with known examples."""
+
+ included_polygon = PolarPolygon(
+ [PolarLocation(1, 1), PolarLocation(1, -1), PolarLocation(-1, -1), PolarLocation(-1, 1)], identifier=1
+ )
+ excluded_polygon = PolarPolygon(
+ [PolarLocation(56, 170), PolarLocation(56.5, 170), PolarLocation(56, 170.2)], identifier=2
+ )
+ both_polygons = [included_polygon, excluded_polygon]
+
+ def setUp(self) -> None:
+ self.database = SpatialiteDatabase(":memory:")
+ self.database.write_geometries(self.both_polygons)
+ super().setUp()
+
+ def tearDown(self) -> None:
+ self.database.close()
+ super().tearDown()
+
+ def test_read_around_includes_full(self) -> None:
+ """Reads a complete polygon."""
+
+ around = list(self.database.read_geometries_around(PolarLocation(0, 2), radius=500_000))
+ self.assertEqual(1, len(around))
+ self.assertEqual(1, len(around))
+ read = around[0]
+ assert isinstance(read, PolarPolygon) # Make mypy understand this check
+ self.assertTrue(self.included_polygon.almost_congruent(read))
+
+ def test_read_around_includes_nothing(self) -> None:
+ """Tests that :meth:`pyrate.common.charts.SpatialiteDatabase.read_obstacles_around` works correctly.
+
+ Reads nothing.
+ """
+
+ around = list(self.database.read_geometries_around(PolarLocation(0, 2), radius=0))
+ self.assertEqual(0, len(around))
+
+ def test_read_around_includes_partial(self) -> None:
+ """Tests that :meth:`pyrate.common.charts.SpatialiteDatabase.read_obstacles_around` works correctly.
+
+ Reads about half of the polygon.
+ """
+ query_point = PolarLocation(latitude=0.0, longitude=5.0)
+ # this is the distance to center of self.included_polygon
+ radius = query_point.distance(PolarLocation(0.0, 0.0))
+ read = list(self.database.read_geometries_around(query_point, radius=radius))
+ self.assertEqual(1, len(read))
+ read_polygon = read[0]
+ assert isinstance(read_polygon, PolarPolygon) # Make mypy understand this check
+
+ # these shall only be very roughly similar, as half of the polygon is missing
+ # thus, we allow for a very large relative tolerance
+ self.assertTrue(read_polygon.equals_almost_congruent(self.included_polygon, rel_tolerance=0.6))
+
+ # this is roughly the part that should be included in the result
+ eastern_half = PolarPolygon(
+ [PolarLocation(1, 0), PolarLocation(-1, 0), PolarLocation(-1, 1), PolarLocation(1, 1)]
+ )
+ # thus, we allow for less relative tolerance
+ self.assertTrue(read_polygon.almost_congruent(eastern_half, rel_tolerance=0.15))
diff --git a/pyrate/tests/common/charts/test_s57_files.py b/pyrate/tests/common/charts/test_s57_files.py
new file mode 100644
index 0000000..35c449d
--- /dev/null
+++ b/pyrate/tests/common/charts/test_s57_files.py
@@ -0,0 +1,96 @@
+"""Tests whether ``pyrate.common.charts.raw_files`` correctly handles IHO S-57 chart files."""
+
+# Python standard
+from pathlib import Path
+
+# Testing
+from unittest import skipIf
+from unittest import TestCase
+
+# Extra test tooling
+import pytest
+
+# Pyrate library
+from pyrate.common.testing import IS_CI
+from pyrate.plan.geometry import LocationType
+
+# Module under test
+from pyrate.common.charts import ChartFileHandler
+from pyrate.common.charts.s57_files import _OSGEO_PRESENT
+from pyrate.common.charts import S57ChartHandler
+
+
+PATH_TO_EXAMPLES = Path(__file__).parent / "example_charts"
+CHARTS = [
+ PATH_TO_EXAMPLES / "nested_folder/US4VA70M/US4VA70M.000",
+ PATH_TO_EXAMPLES / "US1BS04M/US1BS04M.000",
+ PATH_TO_EXAMPLES / "US4AK5GM/US4AK5GM.000",
+ PATH_TO_EXAMPLES / "US4FL87M/US4FL87M.000",
+]
+CHARTS.sort()
+
+
+class TestChartDiscovery(TestCase):
+ """Tests that charts are correctly discovered"""
+
+ def test_discovery(self):
+ """Also checks for nested folders and npn-chart files being present"""
+ discovered = list(S57ChartHandler.find_chart_files(PATH_TO_EXAMPLES))
+ discovered.sort()
+
+ # check that exactly the expected charts have been found
+ self.assertListEqual(discovered, CHARTS)
+
+ # check that they have the characteristic file extension
+ for chart_path in discovered:
+ self.assertTrue(chart_path.name.endswith(".000"))
+
+
+# force testing this in CI to make sure it is tested regularly at least there
+@skipIf(not _OSGEO_PRESENT and not IS_CI, "allow osgeo to be missing and skip the tests in that case")
+class TestReadCharts(TestCase):
+ """Tests whether actually reading the charts works"""
+
+ def setUp(self) -> None:
+ self.handler: ChartFileHandler = S57ChartHandler()
+
+ def test_reading_non_existent_file(self):
+ """Tests reading a chart file that does not exist."""
+ with self.assertRaises(FileNotFoundError):
+ # Wrapping this in a list causes the generator/iterator to be actually evaluated
+ list(self.handler.read_chart_file("/does/surely/not/exist/and/if/it/does/we/have/a/troll"))
+ with self.assertRaises(FileNotFoundError):
+ # Wrapping this in a list causes the generator/iterator to be actually evaluated
+ list(self.handler.read_chart_file("/does/surely/not/exist/and/if/it/does/we/have/a/troll.000"))
+
+ def test_reading_wrong_file_type(self):
+ """Tests reading a chart file that is not an S57 file (this Python program file)."""
+ with self.assertRaises(IOError):
+ # This test python file is not a chart
+ not_a_chart_file = __file__
+ # Wrapping this in a list causes the generator/iterator to be actually evaluated
+ list(self.handler.read_chart_file(not_a_chart_file))
+
+ @pytest.mark.filterwarnings("ignore:Ignoring LineString geometry in chart")
+ def test_reading_contains_all_types(self):
+ """Checks for specific types of entries in the charts.
+
+ Note:
+ Only tests for Landmasses as of now
+ """
+
+ all_obstacles = [obstacle for chart in CHARTS for obstacle in self.handler.read_chart_file(chart)]
+ self.assertGreater(len(all_obstacles), 0, "no obstacles were read")
+
+ relevant_types = (
+ (LocationType.LAND, "Landmass"),
+ (LocationType.SHALLOW_WATER, "Depth"),
+ (LocationType.OBSTRUCTION, "Buoy"),
+ )
+ for location_type, name_component in relevant_types:
+ filtered = [
+ o
+ for o in all_obstacles
+ if (o.name is not None and name_component in o.name and location_type == o.location_type)
+ ]
+ self.assertTrue(filtered, f"no obstacle of type {name_component} was found")
diff --git a/pyrate/tests/common/math/__init__.py b/pyrate/tests/common/math/__init__.py
new file mode 100644
index 0000000..bf9f5bc
--- /dev/null
+++ b/pyrate/tests/common/math/__init__.py
@@ -0,0 +1 @@
+"""Tests ``pyrate.common.math.**``."""
diff --git a/pyrate/tests/common/raster_datasets/Earth2014.TBI2014.30min.geod.geo.tif b/pyrate/tests/common/raster_datasets/Earth2014.TBI2014.30min.geod.geo.tif
new file mode 100644
index 0000000..4b3ebf0
Binary files /dev/null and b/pyrate/tests/common/raster_datasets/Earth2014.TBI2014.30min.geod.geo.tif differ
diff --git a/pyrate/tests/common/raster_datasets/__init__.py b/pyrate/tests/common/raster_datasets/__init__.py
new file mode 100644
index 0000000..149ad23
--- /dev/null
+++ b/pyrate/tests/common/raster_datasets/__init__.py
@@ -0,0 +1,6 @@
+"""Tests the raster datasets.
+
+The test file ``Earth2014.TBI2014.30min.geod.geo.tif`` is the *Earth 2014* dataset, exported from the
+`data repository
+`__.
+"""
diff --git a/pyrate/tests/common/raster_datasets/test_geo_datasets.py b/pyrate/tests/common/raster_datasets/test_geo_datasets.py
new file mode 100644
index 0000000..f5cf418
--- /dev/null
+++ b/pyrate/tests/common/raster_datasets/test_geo_datasets.py
@@ -0,0 +1,201 @@
+"""Tests :class:`pyrate.common.raster_datasets.geo_datasets.DataSetAccess`."""
+
+# Standard library
+from math import degrees
+from math import pi
+from math import radians
+
+# Generic testing
+from unittest import TestCase
+
+# Geometry
+from rasterio.windows import intersect
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Numeric testing
+from numpy import rad2deg
+from numpy.testing import assert_array_almost_equal
+
+# Own Geometry
+from pyrate.plan.geometry.helpers import meters2rad
+from pyrate.plan.geometry.helpers import rad2meters
+from pyrate.plan.geometry import PolarLocation
+
+# Test environment helper
+from ... import _open_test_geo_dataset
+
+
+class TestGeoDataset(TestCase):
+ """Ensure that the :class:`pyrate.plan.graph.generate.geo_datasets.DataSetAccess` works correctly.
+
+ Uses the *Earth2014* dataset.
+ """
+
+ #: The resolution of the dataset in arc-minutes
+ DATASET_RESOLUTION = 30
+ #: The maximal distance of two data points in the dataset in degrees
+ MAX_POINT_DISTANCE_DEG = DATASET_RESOLUTION / 60
+ #: The maximal distance of two data points in the dataset in meters (at the equator)
+ MAX_POINT_DISTANCE = rad2meters(radians(MAX_POINT_DISTANCE_DEG))
+
+ # Handle the context manager, see https://stackoverflow.com/a/11180583/3753684
+ def run(self, result=None) -> None:
+ with _open_test_geo_dataset() as dataset:
+ self.dataset = dataset # pylint: disable=attribute-defined-outside-init
+ super().run(result)
+
+ @given(
+ st.floats(min_value=-pi / 2 * 0.75, max_value=+pi / 2 * 0.75),
+ st.floats(min_value=-pi * 0.75, max_value=+pi * 0.75),
+ st.floats(min_value=0.001, max_value=1000_000.0),
+ )
+ def test_bounding_window_center_of_dataset(
+ self, latitude: float, longitude: float, radius: float
+ ) -> None:
+ """Tests that the bounding box is correctly calculated if the query point is not at the border."""
+ # pylint: disable=too-many-locals
+
+ win_1, win_2 = self.dataset.get_bounding_windows_around(latitude, longitude, radius)
+
+ self.assertIsNone(win_2, "only one window shall be returned")
+
+ # Get the geographical extends in degrees, not radians:
+ left, bottom, right, top = self.dataset.dataset.window_bounds(win_1)
+
+ # Check the position of the window
+ latitude_deg, longitude_deg = degrees(latitude), degrees(longitude)
+
+ self.assertLessEqual(bottom, latitude_deg + 1e-12)
+ self.assertGreaterEqual(top, latitude_deg - 1e-12)
+ self.assertAlmostEqual(
+ (top + bottom) / 2,
+ latitude_deg,
+ delta=self.MAX_POINT_DISTANCE_DEG,
+ msg="window should be vertically centered around the given center point",
+ )
+
+ self.assertLessEqual(left, longitude_deg + 1e-12)
+ self.assertGreaterEqual(right, longitude_deg - 1e-12)
+ self.assertAlmostEqual(
+ (right + left) / 2,
+ longitude_deg,
+ delta=self.MAX_POINT_DISTANCE_DEG,
+ msg="window should be horizontally centered around the given center point",
+ )
+
+ # Check the size of the window
+ self.assertLessEqual(left, right)
+ self.assertLessEqual(bottom, top)
+
+ # Distances are uniform along longitudes
+ radius_deg = degrees(meters2rad(radius))
+ self.assertGreaterEqual(top - bottom, 2 * radius_deg - 1e-12)
+ self.assertLessEqual(top - bottom, 2 * radius_deg + 3 * self.MAX_POINT_DISTANCE_DEG)
+
+ # Distances are more complicated along the latitudes
+ left_side_center = PolarLocation(latitude=(top + bottom) / 2, longitude=left)
+ right_side_center = PolarLocation(latitude=(top + bottom) / 2, longitude=right)
+ # Use approximate=True for a spherical model
+ distance_horizontal = left_side_center.distance(right_side_center, approximate=True)
+ # The rough checking of the bounding boxes is very coarse and was determined by fiddling until it
+ # works
+ # This part of the test guarantees that the windows is roughly the right size, but pinning it down to
+ # exact number is hard since we round the discrete window bounds, map them to geographical coordinates
+ # and then have to deal with floating point inaccuracies
+ self.assertGreaterEqual(distance_horizontal, 2 * radius - 6 * self.MAX_POINT_DISTANCE)
+ self.assertLessEqual(distance_horizontal, 2 * radius + 4 * self.MAX_POINT_DISTANCE)
+
+ @given(
+ st.floats(min_value=-pi / 2 * 0.95, max_value=+pi / 2 * 0.95),
+ st.one_of(
+ [st.floats(min_value=-pi, max_value=-pi * 0.995), st.floats(min_value=+pi * 0.995, max_value=+pi)]
+ ),
+ st.floats(min_value=200_000.0, max_value=1_000_000.0),
+ )
+ def test_bounding_window_left_and_right_side(
+ self, latitude: float, longitude: float, radius: float
+ ) -> None:
+ """Tests that the bounding box is correctly calculated if the query point is at the border.
+
+ Very high latitudes (near the poles) are not tested as there might be a single (albeit very wide)
+ window being returned. For the same reason, only moderate radii are tested.
+ """
+ window_1, window_2 = self.dataset.get_bounding_windows_around(latitude, longitude, radius)
+
+ # We need a plain assert here for type checking
+ assert window_2 is not None, "two windows should be returned"
+ self.assertFalse(intersect(window_1, window_2), "windows may never overlap")
+
+ # Also test the same as in :meth:`~test_bounding_window_intersection_empty`
+ self.assertEqual(window_1.height, window_2.height)
+ self.assertGreaterEqual(window_1.height, 1)
+ self.assertGreaterEqual(window_1.width, 1)
+ self.assertGreaterEqual(window_2.width, 1)
+ self.assertLessEqual(window_1.height, self.dataset.dataset.height)
+ self.assertLessEqual(window_1.width + window_2.width, self.dataset.dataset.width)
+
+ @given(
+ st.floats(min_value=-pi / 2, max_value=+pi / 2),
+ st.floats(min_value=-pi, max_value=+pi),
+ st.floats(min_value=0.001, max_value=100_000_000.0),
+ )
+ def test_bounding_window_general_properties(
+ self, latitude: float, longitude: float, radius: float
+ ) -> None:
+ """Tests some more general properties that should hold for all windows and window pairs.
+
+ In particular, it makes sure that even if a window pair is very wide, the intersection is always
+ empty.
+ """
+ window_1, window_2 = self.dataset.get_bounding_windows_around(latitude, longitude, radius)
+
+ # Make sure that everything in window_1 is rounded
+ self.assertIsInstance(window_1.col_off, int)
+ self.assertIsInstance(window_1.row_off, int)
+ self.assertIsInstance(window_1.height, int)
+ self.assertIsInstance(window_1.width, int)
+
+ # Test some general properties of window_1
+ self.assertTrue(radius == 0 or window_1.height >= 1)
+ self.assertLessEqual(window_1.height, self.dataset.dataset.height)
+ self.assertTrue(radius == 0 or window_1.width >= 1)
+ self.assertLessEqual(window_1.width, self.dataset.dataset.width)
+
+ if window_2 is not None:
+ self.assertGreater(radius, 0)
+
+ # Make sure that everything in window_2 is rounded
+ self.assertIsInstance(window_2.col_off, int)
+ self.assertIsInstance(window_2.row_off, int)
+ self.assertIsInstance(window_2.height, int)
+ self.assertIsInstance(window_2.width, int)
+
+ # Test some general properties of window_2 in relation to window_1
+ self.assertEqual(window_1.height, window_2.height)
+ self.assertGreaterEqual(window_2.width, 1)
+ self.assertLessEqual(window_1.width + window_2.width, self.dataset.dataset.width)
+
+ self.assertFalse(intersect(window_1, window_2), "windows may never overlap")
+
+ @given(
+ st.floats(min_value=-pi / 2 * 0.75, max_value=+pi / 2 * 0.75),
+ st.floats(min_value=-pi * 0.75, max_value=+pi * 0.75),
+ st.floats(min_value=0.001, max_value=1000_000.0),
+ )
+ def test_meshgrid_generation(self, latitude: float, longitude: float, radius: float) -> None:
+ """Tests that msehgrids are generated correctly no matter whether radians are used or not.
+
+ Uses the data generation of :meth:`~test_bounding_window_center_of_dataset`.
+ """
+
+ window, window_empty = self.dataset.get_bounding_windows_around(latitude, longitude, radius)
+ self.assertIsNone(window_empty, "only one window shall be returned")
+
+ lats_deg, lons_deg = self.dataset.lat_lon_meshgrid_for(window, window_empty, radians=False)
+ lats_rad, lons_rad = self.dataset.lat_lon_meshgrid_for(window, window_empty, radians=True)
+
+ assert_array_almost_equal(rad2deg(lats_rad), lats_deg)
+ assert_array_almost_equal(rad2deg(lons_rad), lons_deg)
diff --git a/pyrate/tests/common/raster_datasets/test_transformers.py b/pyrate/tests/common/raster_datasets/test_transformers.py
new file mode 100644
index 0000000..63bd9d7
--- /dev/null
+++ b/pyrate/tests/common/raster_datasets/test_transformers.py
@@ -0,0 +1,132 @@
+"""
+Tests the transformers in :mod:`pyrate.common.raster_datasets.transformer_base` and in
+:mod:`pyrate.common.raster_datasets.transformers_concrete`.
+"""
+
+# Testing
+from unittest import TestCase
+
+# Scientific
+from numpy import array
+from numpy import empty
+from numpy import float32
+from numpy import int16
+from numpy.testing import assert_array_equal
+from numpy import uint16
+from numpy import uint32
+from pandas import DataFrame
+from pandas import Series
+from pandas.testing import assert_frame_equal
+from pandas.testing import assert_series_equal
+
+# Module under test
+from pyrate.common.raster_datasets.transformers_concrete import BathymetricTransformer
+from pyrate.common.raster_datasets.transformers_concrete import ConstantTransformer
+
+# Graph generation
+from pyrate.plan.graph import create_earth_graph
+from pyrate.plan.graph import GeoNavigationGraph
+from pyrate.plan.graph import min_required_frequency
+
+# CI/Testing helpers
+from ... import _open_test_geo_dataset
+
+
+class TestGetNodePropertiesWithConstantTransformer(TestCase):
+ """Ensure that the :meth:`pyrate.plan.graph.GeoNavigationGraph.append_properties` works correctly."""
+
+ def test_get_node_properties_empty_coordinates(self) -> None:
+ """Tests getting properties for a graph without nodes."""
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=empty((0,)), longitudes=empty((0,)), edges=empty((0, 2)), node_radius=111.111
+ )
+ transformers = [ConstantTransformer(42, uint32, "prop_1"), ConstantTransformer(43, uint16, "prop_2")]
+ graph.append_properties(transformers)
+ self.assertEqual(len(graph.node_properties), 0)
+ assert_array_equal(graph.node_properties.columns, ["prop_1", "prop_2"])
+
+ def test_get_node_properties_no_transformers(self) -> None:
+ """Tests getting properties without a transformer."""
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=array([0, 1]), longitudes=array([0, 0]), edges=array([[0, 1]]), node_radius=111.111
+ )
+ graph.append_properties([]) # empty!
+ self.assertEqual(len(graph.node_properties), 2)
+ assert_array_equal(graph.node_properties.columns, [])
+
+ def test_get_node_properties_single_transformer(self) -> None:
+ """Tests getting properties using only a single transformer."""
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=array([0, 1]),
+ longitudes=array([0, 0]),
+ edges=array([[0, 1]]),
+ node_radius=0.0, # some weird radius
+ )
+ # now we use `append_property` to append a single one
+ graph.append_property(ConstantTransformer(33, uint32, "prop_1"))
+ self.assertEqual(len(graph.node_properties), 2)
+ assert_frame_equal(graph.node_properties, DataFrame(data={"prop_1": [33, 33]}, dtype=uint32))
+
+ def test_get_node_properties_single_transformer_str_datatype(self) -> None:
+ """Tests getting properties using only a single transformer and a string datatype."""
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=array([0]),
+ longitudes=array([0]),
+ edges=array([[0, 0]]), # edge to itself
+ node_radius=111.111,
+ )
+ # now we use `append_property` to append a single one
+ data_type = "U10" # must give string data type explicitly and not with np.str or "U"
+ graph.append_property(ConstantTransformer("content", data_type, "prop_1"))
+ self.assertEqual(len(graph.node_properties), 1)
+ assert_frame_equal(graph.node_properties, DataFrame(data={"prop_1": ["content"]}, dtype=data_type))
+
+ def test_get_node_properties_multiple_transformers(self) -> None:
+ """Tests getting properties using multiple transformers."""
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=array([0, 1]), longitudes=array([0, 0]), edges=array([[0, 1]]), node_radius=111.111
+ )
+ # now we use `append_property` to append a single one
+ graph.append_properties(
+ [ConstantTransformer(33, uint32, "prop_1"), ConstantTransformer(99, int16, "prop_2")]
+ )
+ self.assertEqual(len(graph.node_properties), 2)
+ assert_array_equal(graph.node_properties.columns, ["prop_1", "prop_2"])
+ assert_series_equal(
+ graph.node_properties["prop_1"], Series(data=[33, 33], dtype=uint32, name="prop_1")
+ )
+ assert_series_equal(
+ graph.node_properties["prop_2"], Series(data=[99, 99], dtype=int16, name="prop_2")
+ )
+
+
+class TestBathymetricTransformer(TestCase):
+ """Tests :class:`pyrate.common.raster_datasets.transformers_concrete.BathymetricTransformer`."""
+
+ def test_all_modes(self) -> None:
+ """Tests all modes at once."""
+
+ # create a coarse grid
+ distance_meters = 1000_000
+ graph = create_earth_graph(min_required_frequency(distance_meters, in_meters=True))
+
+ # fetch properties
+ modes = list(BathymetricTransformer.Modes)
+ graph.append_property(BathymetricTransformer(_open_test_geo_dataset(), modes))
+ properties = graph.node_properties
+
+ # check that the returned properties are all floats
+ self.assertTrue((properties.dtypes == float32).all())
+
+ def test_no_data(self) -> None:
+ """Tests that querying for data where there are no data points in the result range raises an error."""
+ for mode in list(BathymetricTransformer.Modes):
+ with self.subTest(mode.name), self.assertRaises(ValueError):
+ with BathymetricTransformer(_open_test_geo_dataset(), [mode]) as transformer:
+ # This works by querying for a point (at 1e-3°N 1e-3°E), where there is no data point
+ # within 1e-9 meters in the underlying dataset
+ # This should trigger an exception (e.g. because the average depth over zero data
+ # points is not clearly)
+ transformer.get_transformed_at_nodes(
+ latitudes=array([1e-3]), longitudes=array([1e-3]), radius=1e-9
+ )
diff --git a/pyrate/tests/plan/__init__.py b/pyrate/tests/plan/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/plan/geometry/__init__.py b/pyrate/tests/plan/geometry/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/plan/geometry/helpers/__init__.py b/pyrate/tests/plan/geometry/helpers/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/plan/geometry/helpers/test_difference.py b/pyrate/tests/plan/geometry/helpers/test_difference.py
new file mode 100644
index 0000000..7f8c889
--- /dev/null
+++ b/pyrate/tests/plan/geometry/helpers/test_difference.py
@@ -0,0 +1,171 @@
+"""This module asserts correct runtime behaviour of the :mod:`pyrate.plan.geometry.helpers` functions
+for calculating differences.
+"""
+
+# Python standard
+from abc import ABC
+from abc import abstractmethod
+from math import isfinite
+from math import isnan
+import warnings
+
+# Typing
+from typing import Callable
+from typing import Sequence
+from typing import Tuple
+
+# Generic testing
+from unittest import TestCase
+
+# Numeric testing
+from numpy import allclose
+from numpy import array
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Test helpers
+from pyrate.plan.geometry.helpers import difference_direction
+from pyrate.plan.geometry.helpers import difference_latitude
+from pyrate.plan.geometry.helpers import difference_longitude
+from pyrate.plan.geometry.helpers import ScalarOrArray
+
+
+class TestDifference(TestCase, ABC):
+ """Makes sure the distance measure is well-behaved.
+
+ Keep in mind that it is formally not a metric since the triangle inequality does not hold.
+ """
+
+ @abstractmethod
+ def _get_difference_function(self) -> Callable[[ScalarOrArray, ScalarOrArray], ScalarOrArray]:
+ """Get the function to be tested."""
+
+ @abstractmethod
+ def _get_max(self) -> float:
+ """Get the desired maximum value (inclusive)."""
+
+ @abstractmethod
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float, float]]:
+ """Get some concrete values to be tested as a sequence of ``(value a, value b, distance between)``."""
+
+ @given(st.floats(), st.floats())
+ def test_distance_measuring_commutes_and_is_in_bounds(self, first: float, second: float) -> None:
+ """Assures flipping the sides when calculating distances does not make a significant difference."""
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ distance_1 = self._get_difference_function()(first, second)
+ distance_2 = self._get_difference_function()(second, first)
+
+ if isfinite(distance_1) and isfinite(distance_1):
+ # make sure it commutes
+ self.assertAlmostEqual(distance_1, distance_2)
+
+ # make sure the distance is always positive
+ self.assertGreaterEqual(distance_1, 0.0)
+ self.assertGreaterEqual(distance_2, 0.0)
+
+ # make sure the distance is within bounds
+ self.assertLessEqual(distance_1, self._get_max())
+ self.assertLessEqual(distance_2, self._get_max())
+
+ else:
+ self.assertTrue(isnan(distance_1))
+ self.assertTrue(isnan(distance_2))
+
+ @given(st.floats())
+ def test_distance_measuring_to_itself_is_zero(self, thing: float) -> None:
+ """Assures flipping the sides when calculating distances does not make a significant difference."""
+
+ with warnings.catch_warnings():
+ warnings.simplefilter("ignore")
+ distance = self._get_difference_function()(thing, thing)
+
+ # make sure the distance is always positive and very close to zero
+ if isfinite(distance):
+ self.assertGreaterEqual(distance, 0.0)
+ self.assertAlmostEqual(distance, 0.0)
+ else:
+ self.assertTrue(isnan(distance))
+
+ def test_concrete_examples(self) -> None:
+ """Checks the result for the concrete examples given in :meth:`~_get_concrete_examples`."""
+ function = self._get_difference_function()
+
+ for index, (value_a, value_b, expected_result) in enumerate(self._get_concrete_examples()):
+ with self.subTest(f"example triple #{index}"):
+ self.assertAlmostEqual(function(value_a, value_b), expected_result, delta=1e-12)
+
+ def test_concrete_examples_as_array(self) -> None:
+ """Checks the result for the concrete examples given in :meth:`~_get_concrete_examples`."""
+ function = self._get_difference_function()
+ data = array(self._get_concrete_examples()).T
+ self.assertTrue(allclose(function(data[0, :], data[1, :]), data[2, :]))
+
+
+class TestDifferenceLatitude(TestDifference):
+ """Tests :func:`pyrate.plan.geometry.helpers.difference_latitude`."""
+
+ def _get_difference_function(self) -> Callable[[ScalarOrArray, ScalarOrArray], ScalarOrArray]:
+ return difference_latitude
+
+ def _get_max(self) -> float:
+ return 180.0
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float, float]]:
+ return [
+ (0, 0, 0),
+ (-90, 90, 180),
+ (-89.5, 0, 89.5),
+ (-89.5, 0.5, 90),
+ (-89.5, -0.5, 89),
+ (-45, 45, 90),
+ ]
+
+
+class TestDifferenceLongitude(TestDifference):
+ """Tests :func:`pyrate.plan.geometry.helpers.difference_longitude`."""
+
+ def _get_difference_function(self) -> Callable[[ScalarOrArray, ScalarOrArray], ScalarOrArray]:
+ return difference_longitude
+
+ def _get_max(self) -> float:
+ return 180.0
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float, float]]:
+ return [
+ (0, 0, 0),
+ (-90, 90, 180),
+ (-89.5, 0, 89.5),
+ (-89.5, 0.5, 90),
+ (-89.5, -0.5, 89),
+ (180, -180, 0),
+ (100, -100, 160),
+ (-45, 45, 90),
+ ]
+
+
+class TestDifferenceDirection(TestDifference):
+ """Tests :func:`pyrate.plan.geometry.helpers.difference_direction`."""
+
+ def _get_difference_function(self) -> Callable[[ScalarOrArray, ScalarOrArray], ScalarOrArray]:
+ return difference_direction
+
+ def _get_max(self) -> float:
+ return 180.0
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float, float]]:
+ return [
+ (0, 0, 0),
+ (-90, 90, 180),
+ (0, 360, 0),
+ (10, -10, 20),
+ (10, 350, 20),
+ (370, 20, 10),
+ ]
+
+
+# Do not execute the base class as a test, see https://stackoverflow.com/a/43353680/3753684
+del TestDifference
diff --git a/pyrate/tests/plan/geometry/helpers/test_distance.py b/pyrate/tests/plan/geometry/helpers/test_distance.py
new file mode 100644
index 0000000..2286929
--- /dev/null
+++ b/pyrate/tests/plan/geometry/helpers/test_distance.py
@@ -0,0 +1,62 @@
+"""This module asserts correct runtime behaviour of the :mod:`pyrate.plan.geometry.helpers` functions
+for calculating distances.
+"""
+
+# Python standard library
+from datetime import timedelta
+from math import radians
+
+# Testing
+from unittest import TestCase
+
+# Hypothesis testing
+from hypothesis import given
+from hypothesis import settings
+import hypothesis.strategies as st
+
+# Scientific (testing)
+import numpy.testing
+
+# Module under test
+from pyrate.plan.geometry.helpers import fast_distance_geo
+from pyrate.plan.geometry.helpers import haversine_numpy
+
+# Own geometry
+from pyrate.plan.geometry.geospatial import MEAN_EARTH_CIRCUMFERENCE
+from pyrate.plan.geometry import PolarLocation
+
+# Test helpers
+from pyrate.common.testing.strategies.geometry import geo_bearings
+from pyrate.common.testing.strategies.geometry import polar_locations
+
+
+class TestDistanceCalculation(TestCase):
+ """Tests the geographic helper methods."""
+
+ @given(polar_locations(), polar_locations())
+ def test_haversine_formula(self, location_1: PolarLocation, location_2: PolarLocation) -> None:
+ """Test the correctness of the haversine formula."""
+ dist = haversine_numpy(
+ radians(location_1.latitude),
+ radians(location_1.longitude),
+ radians(location_2.latitude),
+ radians(location_2.longitude),
+ )
+ self.assertLessEqual(dist, MEAN_EARTH_CIRCUMFERENCE / 2)
+ numpy.testing.assert_allclose(location_1.distance(location_2), dist, atol=5.0, rtol=0.01)
+
+ @given(polar_locations(), geo_bearings(), st.floats(min_value=0.0, max_value=250_000.0))
+ @settings(deadline=timedelta(seconds=1.0))
+ # pylint: disable=no-self-use
+ def test_fast_distance_geo(self, center: PolarLocation, direction: float, distance: float) -> None:
+ """Test the correctness of the fast great-circle approximation."""
+
+ other, _ = center.translate(direction, distance)
+
+ distance_calculated = fast_distance_geo(
+ radians(other.latitude),
+ radians(other.longitude),
+ radians(center.latitude),
+ radians(center.longitude),
+ )
+ numpy.testing.assert_allclose(distance, distance_calculated, atol=0.5, rtol=0.05)
diff --git a/pyrate/tests/plan/geometry/helpers/test_normalize.py b/pyrate/tests/plan/geometry/helpers/test_normalize.py
new file mode 100644
index 0000000..8c9a5de
--- /dev/null
+++ b/pyrate/tests/plan/geometry/helpers/test_normalize.py
@@ -0,0 +1,182 @@
+"""This module asserts correct runtime behaviour of the :mod:`pyrate.plan.geometry.helpers` functions
+for normalization.
+"""
+
+# Python standard
+from abc import ABC
+from abc import abstractmethod
+
+# Typing
+from typing import Callable
+from typing import Sequence
+from typing import Tuple
+
+# Generic testing
+from unittest import TestCase
+
+# Numeric testing
+from numpy import allclose
+from numpy import array
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Test helpers
+from pyrate.plan.geometry.helpers import normalize_direction
+from pyrate.plan.geometry.helpers import normalize_latitude
+from pyrate.plan.geometry.helpers import normalize_longitude
+from pyrate.plan.geometry.helpers import ScalarOrArray
+
+
+class TestNormalize(TestCase, ABC):
+ """Makes sure the normalizations are well-behaved."""
+
+ @abstractmethod
+ def _get_normalization_function(self) -> Callable[[ScalarOrArray], ScalarOrArray]:
+ """Get the function to be tested."""
+
+ @abstractmethod
+ def _get_min(self) -> float:
+ """Get the desired minimum value (inclusive)."""
+
+ @abstractmethod
+ def _get_max(self) -> float:
+ """Get the desired maximum value, see :meth:`TestNormalize._max_is_inclusive`."""
+
+ def _max_is_inclusive(self) -> bool: # pylint: disable=no-self-use
+ """If :meth:`TestNormalize._get_max` is to be seen as inclusive or exclusive"""
+ return False
+
+ @abstractmethod
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float]]:
+ """Get some concrete values to be tested as a sequence of ``(non-normalized, normalized)``."""
+
+ @given(st.floats(allow_infinity=False, allow_nan=False))
+ def test_bounds(self, value: float) -> None:
+ """Assures that the normalized value is within its bounds."""
+
+ normalized = self._get_normalization_function()(value)
+
+ # make sure the normalized value is within bounds
+ self.assertGreaterEqual(normalized, self._get_min())
+
+ if self._max_is_inclusive():
+ self.assertLessEqual(normalized, self._get_max())
+ else:
+ self.assertLess(normalized, self._get_max())
+
+ @given(st.floats(allow_infinity=False, allow_nan=False))
+ def test_normalizing_twice(self, value: float) -> None:
+ """Assures that normalizing twice does not really change the value."""
+
+ normalized = self._get_normalization_function()(value)
+ normalized_twice = self._get_normalization_function()(normalized)
+
+ self.assertAlmostEqual(normalized, normalized_twice, places=10)
+
+ @given(st.floats(min_value=-400, max_value=+400))
+ def test_already_normalized_values(self, value: float) -> None:
+ """Assures that values stay unchanged if and only if are already normalized (i.e. within bounds)."""
+ below_max = value < self._get_max() or (self._max_is_inclusive() and value == self._get_max())
+ if self._get_min() <= value and below_max:
+ self.assertAlmostEqual(self._get_normalization_function()(value), value, delta=1e-12)
+ else:
+ self.assertNotEqual(self._get_normalization_function()(value), value)
+
+ def test_concrete_examples(self) -> None:
+ """Checks the result for the concrete examples given in :meth:`~_get_concrete_examples`."""
+ function = self._get_normalization_function()
+
+ for index, (non_normalized, normalized) in enumerate(self._get_concrete_examples()):
+ with self.subTest(f"example triple #{index}"):
+ self.assertAlmostEqual(function(non_normalized), normalized, delta=1e-12)
+
+ def test_concrete_examples_as_array(self) -> None:
+ """Checks the result for the concrete examples given in :meth:`~_get_concrete_examples`."""
+ function = self._get_normalization_function()
+ data = array(self._get_concrete_examples()).T
+ self.assertTrue(allclose(function(data[0, :]), data[1, :]))
+
+
+class TestNormalizeLatitude(TestNormalize):
+ """Tests :func:`pyrate.plan.geometry.helpers.normalize_latitude`."""
+
+ def _get_normalization_function(self) -> Callable[[ScalarOrArray], ScalarOrArray]:
+ return normalize_latitude
+
+ def _get_min(self) -> float:
+ return -90.0
+
+ def _get_max(self) -> float:
+ return 90.0
+
+ def _max_is_inclusive(self) -> bool:
+ return True
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float]]:
+ return [
+ (0, 0),
+ (90, 90),
+ (-90, -90),
+ (100, 80),
+ (180, 0),
+ (270, -90),
+ (-180, 0),
+ (-270, 90),
+ (-10, -10),
+ ]
+
+
+class TestNormalizeLongitude(TestNormalize):
+ """Tests :func:`pyrate.plan.geometry.helpers.normalize_longitude`."""
+
+ def _get_normalization_function(self) -> Callable[[ScalarOrArray], ScalarOrArray]:
+ return normalize_longitude
+
+ def _get_min(self) -> float:
+ return -180.0
+
+ def _get_max(self) -> float:
+ return 180.0
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float]]:
+ return [
+ (0, 0),
+ (90, 90),
+ (-90, -90),
+ (100, 100),
+ (180, -180),
+ (-180, -180),
+ (270, -90),
+ (-10, -10),
+ ]
+
+
+class TestNormalizeDirection(TestNormalize):
+ """Tests :func:`pyrate.plan.geometry.helpers.normalize_direction`."""
+
+ def _get_normalization_function(self) -> Callable[[ScalarOrArray], ScalarOrArray]:
+ return normalize_direction
+
+ def _get_min(self) -> float:
+ return 0.0
+
+ def _get_max(self) -> float:
+ return 360.0
+
+ def _get_concrete_examples(self) -> Sequence[Tuple[float, float]]:
+ return [
+ (0, 0),
+ (90, 90),
+ (-90, 270),
+ (100, 100),
+ (180, 180),
+ (-180, 180),
+ (270, 270),
+ (-10, 350),
+ ]
+
+
+# Do not execute the base class as a test, see https://stackoverflow.com/a/43353680/3753684
+del TestNormalize
diff --git a/pyrate/tests/plan/geometry/helpers/test_other.py b/pyrate/tests/plan/geometry/helpers/test_other.py
new file mode 100644
index 0000000..88f45f3
--- /dev/null
+++ b/pyrate/tests/plan/geometry/helpers/test_other.py
@@ -0,0 +1,120 @@
+"""This module asserts correct runtime behaviour of various additional helpers."""
+
+# Python Standard Library
+from math import tau
+
+# Generic testing
+from unittest import TestCase
+
+# Hypothesis testing
+import hypothesis.extra.numpy as st_numpy
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Scientific
+import numpy as np
+from numpy.testing import assert_almost_equal
+
+# Module under test
+from pyrate.plan.geometry.helpers import cartesian_to_spherical
+from pyrate.plan.geometry.helpers import difference_latitude
+from pyrate.plan.geometry.helpers import difference_longitude
+from pyrate.plan.geometry.helpers import mean_angle
+from pyrate.plan.geometry.helpers import mean_coordinate
+from pyrate.plan.geometry.helpers import meters2rad
+from pyrate.plan.geometry.helpers import rad2meters
+
+# Own strategies
+from pyrate.common.testing.strategies.geometry import geo_bearings
+
+
+_POSITIVE_FLOATS = st.floats(min_value=0.0, max_value=1e9, allow_infinity=False, allow_nan=False)
+
+
+class TestRadiansAndMeterConversion(TestCase):
+ """Makes sure the conversion between meters and radians works."""
+
+ @given(_POSITIVE_FLOATS)
+ def test_is_reversible_float(self, meters: float) -> None:
+ """Tests that the two functions are the reverse of each other."""
+ self.assertAlmostEqual(meters, rad2meters(meters2rad(meters)), places=5)
+
+ @given(st_numpy.arrays(dtype=float, shape=st_numpy.array_shapes(), elements=_POSITIVE_FLOATS))
+ def test_is_reversible_numpy(self, meters: np.ndarray) -> None: # pylint: disable=no-self-use
+ """Tests that the two functions are the reverse of each other."""
+ assert_almost_equal(meters, rad2meters(meters2rad(meters)), decimal=5)
+
+
+class TestCartesianToSpherical(TestCase):
+ """Makes sure the conversion from cartesian to spherical coordinates works."""
+
+ def test_raises_if_not_on_unit_sphere(self) -> None:
+ """Asserts that an exception is raised if values are not on the unit sphere."""
+ with self.assertRaises(AssertionError):
+ cartesian_to_spherical(np.array([(10, 20, 30)]))
+
+ def test_specific_values(self) -> None: # pylint: disable=no-self-use
+ """Asserts that an exception is raised if values are not on the unit sphere."""
+ data_in = np.array([(1, 0, 0), (0, 1, 0), (0, 0, 1), (0.5, 0.5, np.sqrt(1 - 0.5**2 - 0.5**2))])
+ expected_data_out = np.array([(0, 0), (0, np.pi / 2), (-np.pi / 2, 0), (-np.pi / 4, np.pi / 4)]).T
+
+ assert_almost_equal(cartesian_to_spherical(data_in), expected_data_out)
+
+
+class TestAngleAndCoordinateMean(TestCase):
+ """Makes sure the mean computation and angles and coordinates works correctly."""
+
+ @given(geo_bearings(), st.floats(min_value=0.0, max_value=1e-9))
+ def test_raises_if_ambiguous(self, angle: float, noise: float) -> None:
+ """Asserts that an exception is raised if no sensible mean can be calculated."""
+
+ ambiguous_pair = np.array([angle, (angle + 180 + noise) % 360])
+ with self.assertRaises(ValueError):
+ mean_angle(np.radians(ambiguous_pair))
+ with self.assertRaises(ValueError):
+ mean_coordinate(np.array([0.0, 67.2]), ambiguous_pair)
+
+ # But the methods should recover from an exception on the latitude mean computation
+ latitude, _ = mean_coordinate(ambiguous_pair, np.array([0.0, 67.2]))
+ self.assertAlmostEqual(latitude, 0.0)
+
+ @given(
+ st_numpy.arrays(
+ elements=st.floats(min_value=0.0, max_value=np.pi), dtype=float, shape=st_numpy.array_shapes()
+ )
+ )
+ def test_mean_angle_is_in_valid_range(self, data: np.ndarray) -> None:
+ """Asserts that means are never negative and always between ``0°`` and ``360°``."""
+
+ try:
+ mean = mean_angle(data)
+ self.assertGreaterEqual(mean, 0.0)
+ self.assertLessEqual(mean, np.pi)
+
+ except ValueError:
+ pass # this might happen with the generated values and is okay
+
+ @given(geo_bearings(), st.floats(min_value=0.0, max_value=170))
+ def test_obvious_values_angle(self, angle: float, difference: float) -> None:
+ """Asserts that the result is sensible for known values."""
+
+ mean = mean_angle(np.radians(np.array([angle, (angle + difference) % 360])))
+ self.assertAlmostEqual(mean, np.radians((angle + difference / 2)) % tau, delta=1e-6)
+
+ @given(
+ st.floats(min_value=-80.0, max_value=+80.0),
+ st.floats(min_value=-170.0, max_value=+170.0),
+ st.floats(min_value=-9.0, max_value=9.0),
+ st.floats(min_value=-9.0, max_value=9.0),
+ )
+ def test_obvious_values_coordinate(
+ self, latitude: float, longitude: float, lat_delta: float, lon_delta: float
+ ) -> None:
+ """Asserts that the result is sensible for known values."""
+
+ lat_mean, lon_mean = mean_coordinate(
+ latitudes=np.array([latitude, latitude + lat_delta]),
+ longitudes=np.array([longitude, longitude + lon_delta]),
+ )
+ self.assertLessEqual(difference_latitude(lat_mean, (latitude + lat_delta / 2)), 1e-6)
+ self.assertLessEqual(difference_longitude(lon_mean, (longitude + lon_delta / 2)), 1e-6)
diff --git a/pyrate/tests/plan/geometry/helpers/test_translate.py b/pyrate/tests/plan/geometry/helpers/test_translate.py
new file mode 100644
index 0000000..7180a78
--- /dev/null
+++ b/pyrate/tests/plan/geometry/helpers/test_translate.py
@@ -0,0 +1,34 @@
+"""This module asserts correct runtime behaviour of the :mod:`pyrate.plan.geometry.helpers` functions
+for translation.
+
+Note that most of the correctness is asserted by the use in
+:meth:`pyrate.plan.geometry.PolarPolygon.translate` and :meth:`pyrate.plan.geometry.PolarRoute.translate`.
+Also, no extensive tests are needed since we trust the underlying library due to its widespread adoption and
+maturity.
+We only need to check that the conversion of parameters and results works as expcted.
+"""
+
+# Testing
+from unittest import TestCase
+
+# Scientific (testing)
+from numpy import array
+
+# Module under test
+from pyrate.plan.geometry.helpers import translate_numpy
+
+
+class TestTranslate(TestCase):
+ """Tests the translation helpers."""
+
+ COORDINATES = array([[1.0, 2.0], [3.0, -4.0], [-5.0, 6.0]])
+ DIRECTIONS = array([0.0, 90.0, -90.0])
+ DISTANCES = array([1.0, 100.0, 10000.0])
+
+ def test_translate_numpy(self) -> None: # pylint: disable=no-self-use
+ """Test that any combination of types of input are accepted."""
+
+ translate_numpy(TestTranslate.COORDINATES, TestTranslate.DIRECTIONS, TestTranslate.DISTANCES)
+ translate_numpy(TestTranslate.COORDINATES, 90, TestTranslate.DISTANCES)
+ translate_numpy(TestTranslate.COORDINATES, TestTranslate.DIRECTIONS, 100)
+ translate_numpy(TestTranslate.COORDINATES, 90, 100)
diff --git a/pyrate/tests/plan/geometry/primitives/__init__.py b/pyrate/tests/plan/geometry/primitives/__init__.py
new file mode 100644
index 0000000..96add55
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/__init__.py
@@ -0,0 +1,53 @@
+"""This module asserts correct runtime behaviour of the :mod:`pyrate.plan.geometry` primitives for
+locations, polygons and trajectories.
+
+Quite a few tests are marked with ``@settings(max_examples=)`` since this test suite makes
+up a very large part of the total testing time and some tests just don't justify wasting many resources on
+them due to very simple code being tested.
+"""
+
+# Python standard math
+from math import isclose
+
+# Typing
+from typing import Union
+
+# Hypothesis testing
+from hypothesis import HealthCheck
+from hypothesis import settings
+
+# Package under test
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+from pyrate.plan.geometry import PolarRoute
+
+
+#: Tests that require the generation of cartesian routes are slow since the generation of examples is slow.
+#: As polar routes, cartesian polygons and polar polygons depend on this, they are also run at reduced rate.
+slow_route_max_examples = settings(
+ max_examples=int(settings().max_examples * 0.1), suppress_health_check=(HealthCheck.too_slow,)
+)
+
+
+#: A test that only tests very few examples since the property to be tested is rather trivial and we do not
+#: want to invest significant amounts of time into it.
+simple_property_only_few_examples = settings(
+ max_examples=int(max(5, settings().max_examples * 0.001)), suppress_health_check=(HealthCheck.too_slow,)
+)
+
+
+def is_near_special_point(polar_location: PolarLocation, tolerance: float = 1e-6) -> bool:
+ """Checks if the given ``polar_location`` is within ``tolerance`` of the poles or +/- 180° longitude."""
+ return (
+ isclose(polar_location.latitude, -90, abs_tol=tolerance)
+ or isclose(polar_location.latitude, +90, abs_tol=tolerance)
+ or isclose(polar_location.longitude, -180, abs_tol=tolerance)
+ or isclose(polar_location.longitude, +180, abs_tol=tolerance)
+ )
+
+
+def is_any_near_special_point(
+ polar_line_object: Union[PolarPolygon, PolarRoute], tolerance: float = 1e-6
+) -> bool:
+ """Checks if any point in in the given geometry ``is_near_special_point`` within the ``tolerance``."""
+ return any(is_near_special_point(location, tolerance) for location in polar_line_object.locations)
diff --git a/pyrate/tests/plan/geometry/primitives/test_common.py b/pyrate/tests/plan/geometry/primitives/test_common.py
new file mode 100644
index 0000000..5fc2dec
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/test_common.py
@@ -0,0 +1,44 @@
+"""Tests some general properties of geometries."""
+
+# Generic testing
+from unittest import TestCase
+
+# Scientific testing
+from numpy.testing import assert_array_almost_equal
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Package under test
+from pyrate.plan.geometry import CartesianGeometry
+
+# Test helpers
+from pyrate.common.testing.strategies.geometry import cartesian_objects
+from pyrate.common.testing.strategies.geometry import geo_bearings
+
+
+class TestCartesianGeometries(TestCase):
+ """Asserts general properties of the cartesian geometries."""
+
+ @given(
+ cartesian_objects(),
+ geo_bearings(),
+ st.floats(min_value=1.0, max_value=100_000.0),
+ )
+ def test_translation_is_invertible(
+ self,
+ original: CartesianGeometry,
+ direction: float,
+ distance: float,
+ ) -> None:
+ """Tests that translation is invertible and a valid backwards vector is returned."""
+
+ # translate & translate back
+ translated, back_vector = original.translate(direction, distance)
+ back_direction = (direction + 180) % 360
+ translated_translated, back_back_vector = translated.translate(back_direction, distance)
+
+ # check the result
+ assert_array_almost_equal(back_vector, -back_back_vector, decimal=9)
+ self.assertTrue(original.equals_exact(translated_translated, tolerance=1e-9))
diff --git a/pyrate/tests/plan/geometry/primitives/test_geospatial.py b/pyrate/tests/plan/geometry/primitives/test_geospatial.py
new file mode 100644
index 0000000..62fa471
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/test_geospatial.py
@@ -0,0 +1,217 @@
+"""Tests that the geometry base classes in :mod:`pyrate.plan.geometry.geospatial` work correctly."""
+
+# Python standard
+from copy import copy
+from copy import deepcopy
+from json import loads
+
+# Typing
+from typing import Any
+from typing import Sequence
+from typing import Tuple
+
+# Generic testing
+from unittest import TestCase
+
+# Hypothesis testing
+from hypothesis import given
+from hypothesis import HealthCheck
+from hypothesis import Phase
+from hypothesis import settings
+import hypothesis.strategies as st
+
+# Package under test
+from pyrate.plan.geometry import CartesianLocation
+from pyrate.plan.geometry import CartesianPolygon
+from pyrate.plan.geometry import CartesianRoute
+from pyrate.plan.geometry import Direction
+from pyrate.plan.geometry import Geospatial
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+from pyrate.plan.geometry import PolarRoute
+
+# Hypothesis testing
+from pyrate.common.testing.strategies.geometry import geospatial_objects
+
+
+_CARTESIAN_LOCATION_1 = CartesianLocation(5003.0, 139.231)
+_CARTESIAN_LOCATION_2 = CartesianLocation(600.1, 139.231)
+_POLAR_LOCATION_1 = PolarLocation(65.01, -180.0)
+_POLAR_LOCATION_2 = PolarLocation(-80.3, -180.0)
+
+
+class TestStringRepresentations(TestCase):
+ """Makes sure that the string conversion with ``__str__`` and ``__repr__`` works."""
+
+ _GROUND_TRUTH: Sequence[Tuple[Geospatial, str]] = [
+ (
+ _CARTESIAN_LOCATION_1,
+ "CartesianLocation(east=5003.0, north=139.231)",
+ ),
+ (
+ PolarLocation(65.01, -180.0),
+ "PolarLocation(latitude=65.00999999999999, longitude=-180.0)",
+ ),
+ (
+ CartesianPolygon([_CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_1]),
+ "CartesianPolygon(locations=[(5003.0, 139.231), (5003.0, 139.231), (5003.0, 139.231), "
+ "(5003.0, 139.231)])",
+ ),
+ (
+ PolarPolygon([_POLAR_LOCATION_1, _POLAR_LOCATION_1, _POLAR_LOCATION_1]),
+ "PolarPolygon(locations=[PolarLocation(latitude=65.00999999999999, longitude=-180.0), "
+ "PolarLocation(latitude=65.00999999999999, longitude=-180.0), "
+ "PolarLocation(latitude=65.00999999999999, longitude=-180.0)])",
+ ),
+ (
+ CartesianRoute([_CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_2, _CARTESIAN_LOCATION_1]),
+ "CartesianRoute(locations=[(5003.0, 139.231), (600.1, 139.231), (5003.0, 139.231)])",
+ ),
+ (
+ PolarRoute([_POLAR_LOCATION_1, _POLAR_LOCATION_2, _POLAR_LOCATION_1, _POLAR_LOCATION_1]),
+ "PolarRoute(locations=[PolarLocation(latitude=65.00999999999999, longitude=-180.0), "
+ "PolarLocation(latitude=-80.3, longitude=-180.0), "
+ "PolarLocation(latitude=65.00999999999999, longitude=-180.0), "
+ "PolarLocation(latitude=65.00999999999999, longitude=-180.0)])",
+ ),
+ ]
+
+ def test_conversions(self) -> None:
+ """Makes sure that all given geospatial objects can be converted."""
+
+ for geospatial, desired_str in TestStringRepresentations._GROUND_TRUTH:
+
+ with self.subTest(f"{type(geospatial)}.__str__"):
+ self.assertEqual(str(geospatial), desired_str)
+
+ with self.subTest(f"{type(geospatial)}.__repr__"):
+ self.assertEqual(repr(geospatial), desired_str)
+
+
+class TestGeoJsonRepresentations(TestCase):
+ """Makes sure that the conversion to GeoJSON via the common property ``__geo_interface__`` works."""
+
+ _GROUND_TRUTH: Sequence[Tuple[Geospatial, str]] = [
+ (
+ _CARTESIAN_LOCATION_1,
+ '{"type": "Feature", "geometry": {"type": "Point", '
+ '"coordinates": [5003.0, 139.231]}, "properties": {}}',
+ ),
+ (
+ PolarLocation(65.01, -180.0),
+ '{"type": "Feature", "geometry": {"type": "Point", "coordinates": [-180.0, 65.01]}, '
+ '"properties": {}}',
+ ),
+ (
+ CartesianPolygon([_CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_1]),
+ '{"type": "Feature", "geometry": {"type": "Polygon", "coordinates": '
+ "[[[5003.0, 139.231], [5003.0, 139.231], [5003.0, 139.231], [5003.0, 139.231]]]}, "
+ '"properties": {}}',
+ ),
+ (
+ PolarPolygon([_POLAR_LOCATION_1, _POLAR_LOCATION_1, _POLAR_LOCATION_1]),
+ '{"type": "Feature", "geometry": {"type": "Polygon", '
+ '"coordinates": [[[-180.0, 65.01], [-180.0, 65.01], [-180.0, 65.01]]]}, "properties": {}}',
+ ),
+ (
+ CartesianRoute([_CARTESIAN_LOCATION_1, _CARTESIAN_LOCATION_2, _CARTESIAN_LOCATION_1]),
+ '{"type": "Feature", "geometry": {"type": "LineString", "coordinates": '
+ '[[5003.0, 139.231], [600.1, 139.231], [5003.0, 139.231]]}, "properties": {}}',
+ ),
+ (
+ PolarRoute([_POLAR_LOCATION_1, _POLAR_LOCATION_2, _POLAR_LOCATION_1]),
+ '{"type": "Feature", "geometry": {"type": "LineString", "coordinates": '
+ '[[-180.0, 65.01], [-180.0, -80.3], [-180.0, 65.01]]}, "properties": {}}',
+ ),
+ ]
+
+ def test_conversions(self) -> None:
+ """Makes sure that all given geospatial objects can be converted."""
+
+ for geospatial, desired_geojson in TestGeoJsonRepresentations._GROUND_TRUTH:
+ for indent in (None, 1, 8):
+ with self.subTest(f"{type(geospatial)} with indent={indent}"):
+ geojson = geospatial.to_geo_json(indent=indent)
+ # load as JSON get get better error messages and become whitespace independent
+ self.assertDictEqual(loads(geojson), loads(desired_geojson))
+
+
+class TestIdentifiers(TestCase):
+ """Makes sure that identifiers are validated correctly.
+
+ The test is only performed on polar locations for simplicity and because validation is handled in the
+ abstract common parent class :class:`pyrate.plan.geometry.Geospatial` anyway.
+ """
+
+ @given(st.integers(min_value=0, max_value=(2**63) - 1))
+ def test_on_locations_success(self, integer: int) -> None: # pylint: disable=no-self-use
+ """Tests that valid identifiers are accepted."""
+ PolarLocation(latitude=0.0, longitude=0.0, identifier=integer)
+
+ @given(
+ st.one_of(
+ st.integers(max_value=-1), # negative numbers
+ st.integers(min_value=2**63), # very large numbers
+ )
+ )
+ def test_on_locations_rejected(self, integer: int) -> None:
+ """Tests that invalid identifiers are rejected."""
+ with self.assertRaises(AssertionError):
+ PolarLocation(latitude=0.0, longitude=0.0, identifier=integer)
+
+
+class TestEqualityMethods(TestCase):
+ """Test the various equality methods."""
+
+ @given(geospatial_objects(stable=True))
+ @settings(
+ max_examples=200,
+ suppress_health_check=(HealthCheck.data_too_large,),
+ phases=(Phase.explicit, Phase.reuse, Phase.generate, Phase.target), # Do not shrink as it takes long
+ )
+ def test_equality_after_translation(self, geospatial: Any) -> None:
+ """Tests that translated objects are only equal under sufficient tolerance."""
+
+ # We discard the second output since it differs between cartesian and polar objects
+ translated, _ = geospatial.translate(direction=Direction.North, distance=0.5)
+
+ # Try since generated primitives might cause an exception to be thrown
+ # e.g. if projected routes become length 0
+ try:
+ # They should not be equal
+ self.assertNotEqual(geospatial, translated)
+ self.assertFalse(geospatial.equals(translated))
+ self.assertFalse(geospatial.equals_exact(translated, tolerance=0.0))
+ if hasattr(geospatial, "equals_almost_congruent"):
+ self.assertFalse(
+ geospatial.equals_almost_congruent(translated, abs_tolerance=0.0, rel_tolerance=0.0)
+ )
+
+ # They should be equal within some tolerance (the tolerance needs to be large for the polar
+ # variants)
+ # TODO(Someone): re-enable; see #114
+ # self.assertTrue(geospatial.equals_exact(translated, tolerance=5))
+ # self.assertTrue(geospatial.almost_equals(translated, decimal=-1))
+
+ # We do not use `equals_almost_congruent` as it is not a per-coordinate difference and might cause
+ # a very large symmetric difference on very large objects
+
+ except ValueError:
+ pass
+
+
+class TestCopyAndDeepcopy(TestCase):
+ """Tests that all geometric objects can be deep-copied."""
+
+ @given(geospatial_objects())
+ @settings(max_examples=500)
+ def test_is_copyable(self, geospatial: Any) -> None:
+ """Tests that copies can be made and are equal to the original."""
+
+ # Check copy
+ copied = copy(geospatial)
+ self.assertEqual(geospatial, copied)
+
+ # Check deepcopy
+ deep_copied = deepcopy(geospatial)
+ self.assertEqual(geospatial, deep_copied)
diff --git a/pyrate/tests/plan/geometry/primitives/test_locations.py b/pyrate/tests/plan/geometry/primitives/test_locations.py
new file mode 100644
index 0000000..cb61910
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/test_locations.py
@@ -0,0 +1,173 @@
+"""Tests that the location classes in :mod:`pyrate.plan.geometry.location` work correctly."""
+
+# Python standard math
+from math import isclose
+
+# Typing
+from typing import cast
+
+# Generic testing
+from unittest import TestCase
+
+# Geometry
+from shapely.geometry import Point
+
+# Hypothesis testing
+from hypothesis import given
+from hypothesis import HealthCheck
+from hypothesis import settings
+import hypothesis.strategies as st
+
+# Package under test
+from pyrate.plan.geometry import CartesianLocation
+from pyrate.plan.geometry import PolarLocation
+
+# Test helpers
+from pyrate.common.testing.strategies.geometry import cartesian_locations
+from pyrate.common.testing.strategies.geometry import geo_bearings
+from pyrate.common.testing.strategies.geometry import polar_locations
+
+# Local test helpers
+from . import is_near_special_point
+from . import simple_property_only_few_examples
+
+
+class TestLocationConversion(TestCase):
+ """Test for correct runtime behaviour in :mod:`pyrate.plan` location and shape primitives."""
+
+ @given(cartesian_locations(origin=polar_locations()))
+ @settings(max_examples=20, suppress_health_check=(HealthCheck.data_too_large,)) # this is a slow test
+ def test_projection_and_back_projection_origin_in_route(
+ self, cartesian_location: CartesianLocation
+ ) -> None:
+ """Test the projection with an origin already being present in the geometry."""
+ recreated = cartesian_location.to_polar().to_cartesian(cast(PolarLocation, cartesian_location.origin))
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+
+ @given(cartesian_locations(origin=st.none()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_extra(
+ self, cartesian_location: CartesianLocation, origin: PolarLocation
+ ) -> None:
+ """Test the projection with an origin being provided."""
+ recreated = cartesian_location.to_polar(origin).to_cartesian(origin)
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+
+ @given(cartesian_locations(origin=st.none()))
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_not_given(
+ self, cartesian_location: CartesianLocation
+ ) -> None:
+ """Test the projection with no origin being given."""
+ with self.assertRaises(ValueError):
+ cartesian_location.to_polar()
+
+ @given(cartesian_locations(origin=polar_locations()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_twice(
+ self, cartesian_location: CartesianLocation, origin: PolarLocation
+ ) -> None:
+ """Test the projection with ambiguous origin being provided."""
+ with self.assertRaises(ValueError):
+ cartesian_location.to_polar(origin)
+
+ def test_distance_measuring_specific(self) -> None:
+ """Tests a specific input/output pair."""
+
+ location_a = PolarLocation(latitude=55.6544, longitude=139.74477)
+ location_b = PolarLocation(latitude=21.4225, longitude=39.8261)
+ distance = location_a.distance(location_b, approximate=False)
+ self.assertAlmostEqual(distance, 8_665_850.116876071)
+
+ @given(
+ polar_locations(),
+ geo_bearings(),
+ st.floats(min_value=1.0, max_value=100_000.0, allow_nan=False, allow_infinity=False),
+ )
+ def test_translation_is_invertible(
+ self, original: PolarLocation, direction: float, distance: float
+ ) -> None:
+ """Tests that translation is invertible and a valid bearing is returned.
+
+ Warning:
+ Only tests in-depth in the case where latitudes and longitudes are not near the poles.
+ """
+
+ # translate
+ translated, back_direction = original.translate(direction, distance)
+ self.assertGreaterEqual(back_direction, 0.0)
+ self.assertLess(back_direction, 360.0)
+
+ # translate back
+ translated_translated, back_back_direction = translated.translate(back_direction, distance)
+ self.assertGreaterEqual(back_back_direction, 0.0)
+ self.assertLess(back_back_direction, 360.0)
+
+ # the method seems to have problems at poles
+ if not is_near_special_point(original) and not is_near_special_point(translated):
+ # the method is rather rough, so we want to add larger tolerances than usual while checking
+ self.assertTrue(isclose(direction, back_back_direction, abs_tol=1e-6))
+ self.assertTrue(original.equals_exact(translated_translated, 1e-6))
+
+ @given(cartesian_locations())
+ def test_from_shapely_conversion(self, cartesian_location: CartesianLocation) -> None:
+ """Test that :meth:`pyrate.plan.geometry.location.CartesianLocation.from_shapely` works."""
+
+ # we only want to compare the coordinates, so create a new instance without the identifier, name, etc.
+ bare = CartesianLocation(cartesian_location.x, cartesian_location.y)
+ bare_shapely = Point(cartesian_location.x, cartesian_location.y)
+ recreated = CartesianLocation.from_shapely(bare_shapely)
+ self.assertEqual(recreated, bare)
+
+
+class TestPolarLocationDistanceIsAMetric(TestCase):
+ """Makes sure that :meth:`~pyrate.plan.geometry.location.PolarLocation.distance` is a metric.
+
+ This should always succeed since we use a very stable external library for this.
+
+ See `Wikipedia `__ for the axioms.
+ """
+
+ @given(polar_locations(), polar_locations(), st.booleans())
+ def test_distance_measuring_commutes_and_sanity_checks(
+ self, location_a: PolarLocation, location_b: PolarLocation, approximate: bool
+ ) -> None:
+ """Assures flipping the sides when calculating distances does not make a significant difference."""
+
+ distance_1 = location_a.distance(location_b, approximate)
+ distance_2 = location_b.distance(location_a, approximate)
+
+ # make sure it commutes
+ self.assertAlmostEqual(distance_1, distance_2)
+
+ # make sure the distance is always positive
+ self.assertGreaterEqual(distance_1, 0.0)
+ self.assertGreaterEqual(distance_2, 0.0)
+
+ @given(polar_locations(), polar_locations(), polar_locations(), st.booleans())
+ def test_distance_measuring_triangle_inequality(
+ self,
+ location_a: PolarLocation,
+ location_b: PolarLocation,
+ location_c: PolarLocation,
+ approximate: bool,
+ ) -> None:
+ """Assures flipping the sides when calculating distances does not make a significant difference."""
+
+ distance_a_b = location_a.distance(location_b, approximate)
+ distance_b_c = location_b.distance(location_c, approximate)
+ distance_a_c = location_a.distance(location_c, approximate)
+
+ # allow for floating point errors
+ abs_tolerance = 1e-6 # 1 micro meter
+ self.assertGreaterEqual(distance_a_b + distance_b_c + abs_tolerance, distance_a_c)
+
+ @given(polar_locations(), st.booleans())
+ def test_distance_measuring_to_itself_is_zero(self, location: PolarLocation, approximate: bool) -> None:
+ """Assures flipping the sides when calculating distances does not make a significant difference."""
+
+ distance = location.distance(location, approximate)
+
+ # make sure the distance is always positive and very close to zero
+ self.assertGreaterEqual(distance, 0.0)
+ self.assertAlmostEqual(distance, 0.0)
diff --git a/pyrate/tests/plan/geometry/primitives/test_polygons.py b/pyrate/tests/plan/geometry/primitives/test_polygons.py
new file mode 100644
index 0000000..db00fca
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/test_polygons.py
@@ -0,0 +1,266 @@
+"""Tests that the polygon classes in :mod:`pyrate.plan.geometry.polygon` work correctly."""
+
+# Python standard math
+from math import sqrt
+
+# Typing
+from typing import cast
+
+# Generic testing
+from unittest import TestCase
+
+# Geometry
+from shapely.geometry import Polygon
+
+# Scientific
+from numpy import array
+
+# Scientific testing
+from numpy.testing import assert_array_less
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Package under test
+from pyrate.plan.geometry import CartesianPolygon
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarPolygon
+
+# Test helpers
+from pyrate.common.testing.strategies.geometry import cartesian_polygons
+from pyrate.common.testing.strategies.geometry import geo_bearings
+from pyrate.common.testing.strategies.geometry import polar_locations
+from pyrate.common.testing.strategies.geometry import polar_polygons
+
+# Local test helpers
+from . import is_any_near_special_point
+from . import simple_property_only_few_examples
+from . import slow_route_max_examples
+
+
+class TestPolarPolygons(TestCase):
+ """Asserts general properties of the polar polygons."""
+
+ @given(polar_polygons())
+ @slow_route_max_examples
+ def test_area_is_non_negative(self, polar_polygon: PolarPolygon) -> None:
+ """Tests that all areas are non-negative."""
+ self.assertGreaterEqual(polar_polygon.area, 0, "areas must be non-negative")
+
+ @given(polar_polygons())
+ @slow_route_max_examples
+ def test_is_valid(self, polygon: PolarPolygon) -> None:
+ """Test that the generated polygons are valid."""
+
+ self.assertTrue(polygon.is_valid)
+
+ def test_is_not_valid(self) -> None:
+ """Test that a known invalid polygon is detected as such."""
+
+ location = PolarLocation(12, 23.999)
+ polygon = PolarPolygon([location, location, location])
+ self.assertFalse(polygon.is_valid)
+
+ @given(polar_polygons(), polar_locations(), st.booleans())
+ @slow_route_max_examples
+ def test_distance_to_vertices_is_non_negative(
+ self, polar_polygon: PolarPolygon, polar_location: PolarLocation, approximate: bool
+ ) -> None:
+ """Tests that all distances to vertices are non-negative."""
+ distance = polar_polygon.distance_to_vertices(polar_location, approximate)
+ self.assertGreaterEqual(distance, 0, "distances must be non-negative")
+
+ @given(polar_polygons(max_vertices=50))
+ @slow_route_max_examples
+ def test_simplification(self, original: PolarPolygon) -> None:
+ """Checks the the area change is valid and the rough position is preserved."""
+
+ simplified = original.simplify(tolerance=sqrt(original.area) / 10)
+
+ self.assertLessEqual(len(simplified.locations), len(original.locations))
+ self.assertTrue(original.almost_congruent(simplified, rel_tolerance=0.3))
+
+ @given(polar_polygons())
+ @slow_route_max_examples
+ def test_simplification_artificial(self, original: PolarPolygon) -> None:
+ """This duplicates the first point and looks whether it is removed."""
+
+ locations = original.locations
+ original.locations = [locations[0]] + locations
+
+ simplified = original.simplify(tolerance=sqrt(original.area) / 1000)
+
+ # strictly less, as opposed to test_simplification()
+ self.assertLess(len(simplified.locations), len(original.locations))
+ self.assertTrue(original.almost_congruent(simplified, rel_tolerance=0.05))
+
+ @given(polar_polygons())
+ @simple_property_only_few_examples # this only checks the call signatures so no need for many examples
+ def test_numpy_conversion_invertible(self, polar_polygon: PolarPolygon) -> None:
+ """Tests that the polygon conversion can be inverted."""
+ recreated = PolarPolygon.from_numpy(
+ polar_polygon.to_numpy(),
+ name=polar_polygon.name,
+ location_type=polar_polygon.location_type,
+ identifier=polar_polygon.identifier,
+ )
+ self.assertEqual(polar_polygon, recreated)
+
+ @given(
+ st.sampled_from(
+ [
+ PolarPolygon(
+ locations=[
+ PolarLocation(latitude=-76.40057132099628, longitude=-171.92454675519284),
+ PolarLocation(latitude=-76.40057132099628, longitude=-171.92454675519284),
+ PolarLocation(latitude=-76.40057132099628, longitude=-171.92454675519284),
+ ],
+ name="K",
+ ),
+ PolarPolygon(
+ locations=[
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ ],
+ location_type=LocationType.TESTING,
+ name="_1",
+ ),
+ PolarPolygon(
+ locations=[
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ ],
+ name="",
+ ),
+ ]
+ ),
+ geo_bearings(),
+ st.floats(min_value=1.0, max_value=100_000.0, allow_nan=False, allow_infinity=False),
+ )
+ def test_translation_is_invertible(
+ self, original: PolarPolygon, direction: float, distance: float
+ ) -> None:
+ """Tests that translation is invertible and a valid bearing is returned.
+
+ Warning:
+ Only tests in-depth in the case where latitudes and longitudes are not near the poles.
+ Since the tests are quite flaky due to the underling library, we only test specific polygons.
+ """
+ # translate
+ translated, back_direction = original.translate(direction, distance)
+ assert_array_less(0.0 - 1e-12, back_direction)
+ assert_array_less(back_direction, 360.0 + 1e-12)
+
+ # translate back
+ translated_translated, back_back_direction = translated.translate(back_direction[0], distance)
+ assert_array_less(0.0 - 1e-12, back_back_direction)
+ assert_array_less(back_back_direction, 360.0 + 1e-12)
+
+ # the method seems to have problems at poles
+ if not is_any_near_special_point(original) and not is_any_near_special_point(translated):
+ # the method is rather rough, so we want to add larger tolerances than usual while checking
+ self.assertAlmostEqual(direction, back_back_direction[0], delta=0.1)
+ self.assertTrue(original.equals_exact(translated_translated, tolerance=0.1))
+
+ def test_non_finite_from_numpy_raises(self) -> None:
+ """Tests that invalid parameter to :meth:`~PolarPolygon.from_numpy` warn about it."""
+
+ with self.assertRaises(AssertionError):
+ PolarPolygon.from_numpy(array([(1, 2), (2, 4), (4, float("NaN"))]))
+ with self.assertRaises(AssertionError):
+ PolarPolygon.from_numpy(array([(float("Inf"), 2), (2, 4), (4, 1)]))
+ with self.assertRaises(AssertionError):
+ PolarPolygon.from_numpy(array([(1, 2), (2, float("-Inf")), (4, 4)]))
+
+
+class TestCartesianPolygons(TestCase):
+ """Asserts general properties of the cartesian polygons."""
+
+ @given(cartesian_polygons())
+ @simple_property_only_few_examples # this only checks the call signatures so no need for many examples
+ def test_numpy_conversion_invertible(self, cartesian_polygon: CartesianPolygon) -> None:
+ """Tests that the polygon conversion can be inverted."""
+ recreated = CartesianPolygon.from_numpy(
+ cartesian_polygon.to_numpy(),
+ origin=cartesian_polygon.origin,
+ name=cartesian_polygon.name,
+ location_type=cartesian_polygon.location_type,
+ identifier=cartesian_polygon.identifier,
+ )
+
+ self.assertEqual(cartesian_polygon, recreated)
+
+ @given(cartesian_polygons(origin=polar_locations()))
+ @slow_route_max_examples
+ def test_projection_and_back_projection_origin_in_route(
+ self, cartesian_polygon: CartesianPolygon
+ ) -> None:
+ """Test the projection with an origin already being present in the geometry."""
+ recreated = cartesian_polygon.to_polar().to_cartesian(cast(PolarLocation, cartesian_polygon.origin))
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+
+ @given(cartesian_polygons(origin=st.none()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_extra(
+ self, cartesian_polygon: CartesianPolygon, origin: PolarLocation
+ ) -> None:
+ """Test the projection with an origin being provided."""
+ recreated = cartesian_polygon.to_polar(origin).to_cartesian(origin)
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+
+ @given(cartesian_polygons(origin=st.none()))
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_not_given(
+ self, cartesian_polygon: CartesianPolygon
+ ) -> None:
+ """Test the projection with no origin being given."""
+ with self.assertRaises(ValueError):
+ cartesian_polygon.to_polar()
+
+ @given(cartesian_polygons(origin=polar_locations()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_twice(
+ self, cartesian_polygon: CartesianPolygon, origin: PolarLocation
+ ) -> None:
+ """Test the projection with ambiguous origin being provided."""
+ with self.assertRaises(ValueError):
+ cartesian_polygon.to_polar(origin)
+
+ @given(cartesian_polygons())
+ @slow_route_max_examples
+ def test_locations_property_attributes(self, cartesian_polygon: CartesianPolygon) -> None:
+ """Test that all contained locations share the same attributes."""
+ for location in cartesian_polygon.locations:
+ self.assertEqual(location.location_type, cartesian_polygon.location_type)
+ self.assertEqual(location.name, cartesian_polygon.name)
+ self.assertEqual(location.identifier, cartesian_polygon.identifier)
+ self.assertEqual(location.origin, cartesian_polygon.origin)
+
+ @given(cartesian_polygons())
+ @slow_route_max_examples
+ def test_from_shapely_conversion(self, cartesian_polygon: CartesianPolygon) -> None:
+ """Test that :meth:`pyrate.plan.geometry.polygon.CartesianPolygon.from_shapely` works."""
+ # we only want to compare the coordinates, so create a new instance without the identifier, name, etc.
+ bare = CartesianPolygon.from_numpy(cartesian_polygon.to_numpy())
+ bare_shapely = Polygon(cartesian_polygon.to_numpy())
+ recreated = CartesianPolygon.from_shapely(bare_shapely)
+ self.assertEqual(recreated, bare)
+
+ def test_non_finite_from_numpy_raises(self) -> None:
+ """Tests that invalid parameter to :meth:`~CartesianPolygon.from_numpy` warn about it."""
+
+ with self.assertRaises(AssertionError):
+ CartesianPolygon.from_numpy(array([(1, 2), (2, 4), (4, float("NaN"))]))
+ with self.assertRaises(AssertionError):
+ CartesianPolygon.from_numpy(array([(float("Inf"), 2), (2, 4), (4, 1)]))
+ with self.assertRaises(AssertionError):
+ CartesianPolygon.from_numpy(array([(1, 2), (2, float("-Inf")), (4, 4)]))
diff --git a/pyrate/tests/plan/geometry/primitives/test_routes.py b/pyrate/tests/plan/geometry/primitives/test_routes.py
new file mode 100644
index 0000000..3a97ff6
--- /dev/null
+++ b/pyrate/tests/plan/geometry/primitives/test_routes.py
@@ -0,0 +1,266 @@
+"""Tests that the route classes in :mod:`pyrate.plan.geometry.route` work correctly."""
+
+# Typing
+from typing import cast
+
+# Generic testing
+from unittest import TestCase
+
+# Geometry
+from shapely.geometry import LineString
+
+# Scientific
+from numpy import array
+
+# Scientific testing
+from numpy.testing import assert_array_less
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Package under test
+from pyrate.plan.geometry import CartesianLocation
+from pyrate.plan.geometry import CartesianRoute
+from pyrate.plan.geometry import LocationType
+from pyrate.plan.geometry import PolarLocation
+from pyrate.plan.geometry import PolarRoute
+
+# Test helpers
+from pyrate.common.testing.strategies.geometry import cartesian_routes
+from pyrate.common.testing.strategies.geometry import geo_bearings
+from pyrate.common.testing.strategies.geometry import polar_locations
+from pyrate.common.testing.strategies.geometry import polar_routes
+
+# Local test helpers
+from . import is_any_near_special_point
+from . import simple_property_only_few_examples
+from . import slow_route_max_examples
+
+
+class TestPolarRoutes(TestCase):
+ """Asserts general properties of the polar routes."""
+
+ @given(polar_routes())
+ @simple_property_only_few_examples # this only checks the call signatures so no need for many examples
+ def test_numpy_conversion_invertible(self, polar_route: PolarRoute) -> None:
+ """Tests that the route conversion can be inverted."""
+ recreated = PolarRoute.from_numpy(
+ polar_route.to_numpy(),
+ name=polar_route.name,
+ location_type=polar_route.location_type,
+ identifier=polar_route.identifier,
+ )
+ self.assertEqual(polar_route, recreated)
+
+ @given(polar_routes(), polar_locations(), st.booleans())
+ @slow_route_max_examples
+ def test_distance_to_vertices_is_non_negative(
+ self, polar_route: PolarRoute, polar_location: PolarLocation, approximate: bool
+ ) -> None:
+ """Tests that all distances to vertices are non-negative."""
+ distance = polar_route.distance_to_vertices(polar_location, approximate)
+ self.assertGreaterEqual(distance, 0, "distances must be non-negative")
+
+ @given(polar_routes())
+ @slow_route_max_examples
+ def test_length_is_non_negative(self, polar_route: PolarRoute) -> None:
+ """Tests that the length of a route is always non-negative."""
+ self.assertGreaterEqual(polar_route.length(), 0, "lengths must be non-negative")
+
+ @given(polar_routes(min_vertices=3, max_vertices=3))
+ @slow_route_max_examples
+ def test_length_values(self, polar_route: PolarRoute) -> None:
+ """Tests that the length of a route with three locations is plausible."""
+ location_a, location_b, location_c = polar_route.locations
+ distance = location_a.distance(location_b) + location_b.distance(location_c)
+ self.assertAlmostEqual(polar_route.length(), distance, msg="lengths must be non-negative")
+
+ @given(
+ st.sampled_from(
+ [
+ PolarRoute(
+ locations=[
+ PolarLocation(latitude=-76.40057132099628, longitude=-171.92454675519284),
+ PolarLocation(latitude=-76, longitude=-171),
+ ],
+ name="K",
+ ),
+ PolarRoute(
+ locations=[
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33, longitude=89),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ PolarLocation(latitude=-33.68964326163993, longitude=89.95053943144632),
+ ],
+ location_type=LocationType.TESTING,
+ name="_1",
+ ),
+ PolarRoute(
+ locations=[
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ PolarLocation(latitude=0.0, longitude=0.0),
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ PolarLocation(latitude=0.0, longitude=0.029771743643124182),
+ ],
+ name="",
+ ),
+ ]
+ ),
+ geo_bearings(),
+ st.floats(min_value=1.0, max_value=100_000.0),
+ )
+ def test_translation_is_invertible(self, original: PolarRoute, direction: float, distance: float) -> None:
+ """Tests that translation is invertible and a valid bearing is returned.
+
+ Warning:
+ Only tests in-depth in the case where latitudes and longitudes are not near the poles.
+ Since the tests are quite flaky due to the underling library, we only test specific routes.
+ """
+
+ # translate
+ translated, back_direction = original.translate(direction, distance)
+ assert_array_less(0.0 - 1e-12, back_direction)
+ assert_array_less(back_direction, 360.0 + 1e-12)
+
+ # translate back
+ translated_translated, back_back_direction = translated.translate(back_direction[0], distance)
+ assert_array_less(0.0 - 1e-12, back_back_direction)
+ assert_array_less(back_back_direction, 360.0 + 1e-12)
+
+ # the method seems to have problems at poles
+ if not is_any_near_special_point(original) and not is_any_near_special_point(translated):
+ # the method is rather rough, so we want to add larger tolerances than usual while checking
+ self.assertAlmostEqual(direction, back_back_direction[0], delta=0.1)
+ self.assertTrue(original.equals_exact(translated_translated, tolerance=1e-2 * distance))
+
+ def test_zero_length_route(self) -> None:
+ """Test that :meth:`pyrate.plan.geometry.route.PolarRoute.__init__` raises an exception."""
+
+ with self.assertRaises(ValueError):
+ PolarRoute(locations=[PolarLocation(0.0, 0.0)] * 2)
+
+ def test_non_finite_from_numpy_raises(self) -> None:
+ """Tests that invalid parameter to :meth:`~PolarRoute.from_numpy` warn about it."""
+
+ with self.assertRaises(AssertionError):
+ PolarRoute.from_numpy(array([(1, 2), (2, 4), (4, float("NaN"))]))
+ with self.assertRaises(AssertionError):
+ PolarRoute.from_numpy(array([(float("Inf"), 2), (2, 4), (4, 1)]))
+ with self.assertRaises(AssertionError):
+ PolarRoute.from_numpy(array([(1, 2), (2, float("-Inf")), (4, 4)]))
+
+
+class TestCartesianRoutes(TestCase):
+ """Asserts general properties of the cartesian routes."""
+
+ @given(cartesian_routes())
+ @simple_property_only_few_examples # this only checks the call signatures so no need for many examples
+ def test_numpy_conversion_invertible(self, cartesian_route: CartesianRoute) -> None:
+ """Tests that the polygon conversion can be inverted."""
+
+ recreated = CartesianRoute.from_numpy(
+ cartesian_route.to_numpy(),
+ origin=cartesian_route.origin,
+ name=cartesian_route.name,
+ location_type=cartesian_route.location_type,
+ identifier=cartesian_route.identifier,
+ )
+
+ self.assertEqual(cartesian_route, recreated)
+
+ @given(cartesian_routes(origin=polar_locations()))
+ @slow_route_max_examples
+ def test_projection_and_back_projection_origin_in_route(self, cartesian_route: CartesianRoute) -> None:
+ """Test the projection with an origin already being present in the geometry."""
+
+ # Try since generated primitives might cause an exception to be thrown
+ # e.g. if projected routes become length 0
+ try:
+ recreated = cartesian_route.to_polar().to_cartesian(cast(PolarLocation, cartesian_route.origin))
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+ except ValueError:
+ pass
+
+ @given(cartesian_routes(origin=st.none()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_extra(
+ self, cartesian_route: CartesianRoute, origin: PolarLocation
+ ) -> None:
+ """Test the projection with an origin being provided."""
+
+ # Try since generated primitives might cause an exception to be thrown
+ # e.g. if projected routes become length 0
+ try:
+ recreated = cartesian_route.to_polar(origin).to_cartesian(origin)
+ self.assertTrue(recreated.equals_exact(recreated, tolerance=1e-6))
+ except ValueError:
+ pass
+
+ @given(cartesian_routes(origin=st.none()))
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_not_given(self, cartesian_route: CartesianRoute) -> None:
+ """Test the projection with no origin being given."""
+
+ with self.assertRaises(ValueError):
+ cartesian_route.to_polar()
+
+ @given(cartesian_routes(origin=polar_locations()), polar_locations())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_projection_and_back_projection_origin_given_twice(
+ self, cartesian_route: CartesianRoute, origin: PolarLocation
+ ) -> None:
+ """Test the projection with ambiguous origin being provided."""
+
+ with self.assertRaises(ValueError):
+ cartesian_route.to_polar(origin)
+
+ @given(cartesian_routes())
+ @slow_route_max_examples
+ def test_locations_property_attributes(self, cartesian_route: CartesianRoute) -> None:
+ """Test that all contained locations share the same attributes."""
+
+ for location in cartesian_route.locations:
+ self.assertEqual(location.location_type, cartesian_route.location_type)
+ self.assertEqual(location.name, cartesian_route.name)
+ self.assertEqual(location.identifier, cartesian_route.identifier)
+ self.assertEqual(location.origin, cartesian_route.origin)
+
+ @given(cartesian_routes())
+ @slow_route_max_examples
+ def test_from_shapely_conversion(self, cartesian_route: CartesianRoute) -> None:
+ """Test that :meth:`pyrate.plan.geometry.route.CartesianRoute.from_shapely` works."""
+
+ # we only want to compare the coordinates, so create a new instance without the identifier, name, etc.
+ bare = CartesianRoute.from_numpy(cartesian_route.to_numpy())
+ bare_shapely = LineString(cartesian_route.to_numpy())
+ recreated = CartesianRoute.from_shapely(bare_shapely)
+ self.assertEqual(recreated, bare)
+
+ @given(cartesian_routes())
+ @simple_property_only_few_examples # this only checks very simple additional logic
+ def test_locations_property(self, cartesian_route: CartesianRoute) -> None:
+ """Test that :meth:`pyrate.plan.geometry.route.CartesianRoute.locations` works."""
+
+ locations = cartesian_route.locations
+ self.assertEqual(len(cartesian_route.coords), len(locations))
+ for i, (x, y) in enumerate(cartesian_route.coords):
+ self.assertEqual(x, locations[i].x)
+ self.assertEqual(y, locations[i].y)
+
+ def test_zero_length_route(self) -> None:
+ """Test that :meth:`pyrate.plan.geometry.route.CartesianRoute.__init__` raises an exception."""
+
+ with self.assertRaises(ValueError):
+ CartesianRoute(locations=[CartesianLocation(0.0, 0.0)] * 2)
+
+ def test_non_finite_from_numpy_raises(self) -> None:
+ """Tests that invalid parameter to :meth:`~CartesianRoute.from_numpy` warn about it."""
+
+ with self.assertRaises(AssertionError):
+ CartesianRoute.from_numpy(array([(1, 2), (2, 4), (4, float("NaN"))]))
+ with self.assertRaises(AssertionError):
+ CartesianRoute.from_numpy(array([(float("Inf"), 2), (2, 4), (4, 1)]))
+ with self.assertRaises(AssertionError):
+ CartesianRoute.from_numpy(array([(1, 2), (2, float("-Inf")), (4, 4)]))
diff --git a/pyrate/tests/plan/graph/__init__.py b/pyrate/tests/plan/graph/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/plan/graph/generate/__init__.py b/pyrate/tests/plan/graph/generate/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/plan/graph/generate/example_files/geodesic_-M_s_-c_2_-f_2_ico.off b/pyrate/tests/plan/graph/generate/example_files/geodesic_-M_s_-c_2_-f_2_ico.off
new file mode 100644
index 0000000..4793303
--- /dev/null
+++ b/pyrate/tests/plan/graph/generate/example_files/geodesic_-M_s_-c_2_-f_2_ico.off
@@ -0,0 +1,376 @@
+OFF
+122 252 0
+0 0.5257311121191336 0.85065080835204
+0 0.5257311121191336 -0.85065080835204
+0 -0.5257311121191336 0.85065080835204
+0 -0.5257311121191336 -0.85065080835204
+0.5257311121191336 0.85065080835204 0
+0.5257311121191336 -0.85065080835204 0
+-0.5257311121191336 0.85065080835204 0
+-0.5257311121191336 -0.85065080835204 0
+0.85065080835204 0 0.5257311121191336
+0.85065080835204 0 -0.5257311121191336
+-0.85065080835204 0 0.5257311121191336
+-0.85065080835204 0 -0.5257311121191336
+2.175242402100701e-16 -1.643460219210441e-32 1
+0.3090169943749475 0.8090169943749472 0.5000000000000002
+-0.3090169943749475 0.8090169943749472 0.5000000000000002
+0.4999999999999999 0.3090169943749474 0.8090169943749472
+-0.5000000000000001 0.3090169943749475 0.8090169943749472
+2.175242402100701e-16 1.643460219210441e-32 -1
+0.3090169943749475 0.8090169943749472 -0.5000000000000002
+-0.3090169943749475 0.8090169943749472 -0.5000000000000002
+0.5 0.3090169943749473 -0.8090169943749475
+-0.4999999999999999 0.3090169943749474 -0.8090169943749472
+0.3090169943749473 -0.8090169943749475 0.5
+-0.3090169943749475 -0.8090169943749472 0.5000000000000002
+0.5 -0.3090169943749473 0.8090169943749475
+-0.4999999999999999 -0.3090169943749474 0.8090169943749472
+0.3090169943749473 -0.8090169943749475 -0.5
+-0.3090169943749473 -0.8090169943749475 -0.5
+0.5 -0.3090169943749472 -0.8090169943749475
+-0.5000000000000001 -0.3090169943749475 -0.8090169943749472
+0 1 4.350484804201401e-17
+0.8090169943749475 0.5 0.3090169943749472
+0.8090169943749472 0.4999999999999999 -0.3090169943749473
+0 -1 -4.350484804201401e-17
+0.8090169943749472 -0.4999999999999999 0.3090169943749473
+0.8090169943749475 -0.5 -0.3090169943749472
+-0.8090169943749472 0.4999999999999999 0.3090169943749473
+-0.8090169943749472 0.4999999999999999 -0.3090169943749475
+-0.8090169943749475 -0.5 0.3090169943749472
+-0.8090169943749472 -0.4999999999999999 -0.3090169943749473
+1 2.175242402100701e-16 -1.643460219210441e-32
+-1 -2.175242402100701e-16 -1.643460219210441e-32
+-0.1803319730021167 0.289241011911498 -0.9401170227910867
+-0.35682208977309 -3.124513936890529e-17 -0.9341723589627157
+-0.1803319730021166 -0.2892410119114981 -0.9401170227910867
+-0.6483337612153338 -5.436311068297173e-17 -0.7613562464893677
+-0.1803319730021166 0.2892410119114981 0.9401170227910867
+-0.35682208977309 3.09531117213564e-17 0.9341723589627158
+-0.6483337612153338 5.402340711901317e-17 0.7613562464893677
+-0.1803319730021167 -0.289241011911498 0.9401170227910867
+0.291783261575753 -0.5810242734872509 0.7597850497889703
+0.5773502691896258 -0.5773502691896256 0.5773502691896258
+0.5810242734872511 -0.7597850497889701 0.291783261575753
+0.7597850497889702 -0.291783261575753 0.5810242734872511
+-0.291783261575753 -0.5810242734872509 -0.7597850497889703
+-0.5773502691896258 -0.5773502691896256 -0.5773502691896258
+-0.5810242734872511 -0.7597850497889701 -0.291783261575753
+-0.7597850497889702 -0.291783261575753 -0.5810242734872511
+-2.313323858849861e-18 0.7613562464893674 -0.6483337612153339
+3.124513936890529e-17 0.9341723589627158 -0.3568220897730901
+-0.2892410119114981 0.9401170227910867 -0.1803319730021165
+0.2892410119114981 0.9401170227910867 -0.1803319730021165
+-2.313323858849861e-18 -0.7613562464893674 0.6483337612153339
+3.124513936890529e-17 -0.9341723589627158 0.3568220897730901
+-0.2892410119114981 -0.9401170227910867 0.1803319730021165
+0.2892410119114981 -0.9401170227910867 0.1803319730021165
+0.2917832615757529 -0.5810242734872509 -0.7597850497889704
+0.5773502691896258 -0.5773502691896257 -0.5773502691896258
+0.7597850497889701 -0.2917832615757531 -0.5810242734872512
+0.5810242734872511 -0.7597850497889701 -0.291783261575753
+2.313323858849861e-18 0.7613562464893674 0.6483337612153339
+-3.124513936890529e-17 0.9341723589627158 0.3568220897730901
+0.2892410119114981 0.9401170227910867 0.1803319730021165
+-0.2892410119114981 0.9401170227910867 0.1803319730021165
+-0.2917832615757529 -0.5810242734872509 0.7597850497889704
+-0.5773502691896258 -0.5773502691896257 0.5773502691896258
+-0.7597850497889701 -0.2917832615757531 0.5810242734872512
+-0.5810242734872511 -0.7597850497889701 0.291783261575753
+2.313323858849861e-18 -0.7613562464893674 -0.6483337612153339
+-3.124513936890529e-17 -0.9341723589627158 -0.3568220897730901
+0.2892410119114981 -0.9401170227910867 -0.1803319730021165
+-0.2892410119114981 -0.9401170227910867 -0.1803319730021165
+0.1803319730021167 0.289241011911498 0.9401170227910867
+0.35682208977309 -3.124513936890529e-17 0.9341723589627157
+0.1803319730021166 -0.2892410119114981 0.9401170227910867
+0.6483337612153338 -5.436311068297173e-17 0.7613562464893677
+0.2917832615757529 0.5810242734872509 0.7597850497889704
+0.5773502691896258 0.5773502691896257 0.5773502691896258
+0.7597850497889701 0.2917832615757531 0.5810242734872512
+0.5810242734872511 0.7597850497889701 0.291783261575753
+0.7613562464893677 -0.6483337612153338 5.436311068297173e-17
+0.9341723589627157 -0.35682208977309 3.124513936890529e-17
+0.9401170227910867 -0.1803319730021167 -0.289241011911498
+0.9401170227910867 -0.1803319730021166 0.2892410119114981
+0.291783261575753 0.5810242734872509 -0.7597850497889703
+0.5773502691896258 0.5773502691896256 -0.5773502691896258
+0.5810242734872511 0.7597850497889701 -0.291783261575753
+0.7597850497889702 0.291783261575753 -0.5810242734872511
+0.1803319730021166 0.2892410119114981 -0.9401170227910867
+0.35682208977309 3.09531117213564e-17 -0.9341723589627158
+0.6483337612153338 5.402340711901317e-17 -0.7613562464893677
+0.1803319730021167 -0.289241011911498 -0.9401170227910867
+0.7613562464893677 0.6483337612153338 -5.436311068297173e-17
+0.9341723589627157 0.35682208977309 -3.124513936890529e-17
+0.9401170227910867 0.1803319730021167 0.289241011911498
+0.9401170227910867 0.1803319730021166 -0.2892410119114981
+-0.291783261575753 0.5810242734872509 0.7597850497889703
+-0.5773502691896258 0.5773502691896256 0.5773502691896258
+-0.5810242734872511 0.7597850497889701 0.291783261575753
+-0.7597850497889702 0.291783261575753 0.5810242734872511
+-0.7613562464893677 0.6483337612153338 5.436311068297173e-17
+-0.9341723589627157 0.35682208977309 3.124513936890529e-17
+-0.9401170227910867 0.1803319730021167 -0.289241011911498
+-0.9401170227910867 0.1803319730021166 0.2892410119114981
+-0.2917832615757529 0.5810242734872509 -0.7597850497889704
+-0.5773502691896258 0.5773502691896257 -0.5773502691896258
+-0.7597850497889701 0.2917832615757531 -0.5810242734872512
+-0.5810242734872511 0.7597850497889701 -0.291783261575753
+-0.7613562464893677 -0.6483337612153338 -5.436311068297173e-17
+-0.9341723589627157 -0.35682208977309 -3.124513936890529e-17
+-0.9401170227910867 -0.1803319730021167 0.289241011911498
+-0.9401170227910867 -0.1803319730021166 -0.2892410119114981
+3 42 1 98 0.90196 0.45098 0.00000
+3 21 114 42 0.90196 0.45098 0.00000
+3 42 43 21 0.90196 0.45098 0.00000
+3 43 42 17 0.90196 0.45098 0.00000
+3 17 44 43 0.90196 0.45098 0.00000
+3 44 17 101 0.90196 0.45098 0.00000
+3 45 21 43 0.90196 0.45098 0.00000
+3 43 29 45 0.90196 0.45098 0.00000
+3 29 43 44 0.90196 0.45098 0.00000
+3 54 44 3 0.90196 0.45098 0.00000
+3 11 116 45 0.90196 0.45098 0.00000
+3 57 45 29 0.90196 0.45098 0.00000
+3 46 0 106 0.90196 0.45098 0.00000
+3 12 82 46 0.90196 0.45098 0.00000
+3 46 47 12 0.90196 0.45098 0.00000
+3 47 46 16 0.90196 0.45098 0.00000
+3 16 48 47 0.90196 0.45098 0.00000
+3 48 16 109 0.90196 0.45098 0.00000
+3 49 12 47 0.90196 0.45098 0.00000
+3 47 25 49 0.90196 0.45098 0.00000
+3 25 47 48 0.90196 0.45098 0.00000
+3 76 48 10 0.90196 0.45098 0.00000
+3 2 84 49 0.90196 0.45098 0.00000
+3 74 49 25 0.90196 0.45098 0.00000
+3 50 2 62 0.90196 0.45098 0.00000
+3 24 84 50 0.90196 0.45098 0.00000
+3 50 51 24 0.90196 0.45098 0.00000
+3 51 50 22 0.90196 0.45098 0.00000
+3 22 52 51 0.90196 0.45098 0.00000
+3 52 22 65 0.90196 0.45098 0.00000
+3 53 24 51 0.90196 0.45098 0.00000
+3 51 34 53 0.90196 0.45098 0.00000
+3 34 51 52 0.90196 0.45098 0.00000
+3 90 52 5 0.90196 0.45098 0.00000
+3 8 85 53 0.90196 0.45098 0.00000
+3 93 53 34 0.90196 0.45098 0.00000
+3 54 3 78 0.90196 0.45098 0.00000
+3 29 44 54 0.90196 0.45098 0.00000
+3 54 55 29 0.90196 0.45098 0.00000
+3 55 54 27 0.90196 0.45098 0.00000
+3 27 56 55 0.90196 0.45098 0.00000
+3 56 27 81 0.90196 0.45098 0.00000
+3 57 29 55 0.90196 0.45098 0.00000
+3 55 39 57 0.90196 0.45098 0.00000
+3 39 55 56 0.90196 0.45098 0.00000
+3 118 56 7 0.90196 0.45098 0.00000
+3 11 45 57 0.90196 0.45098 0.00000
+3 121 57 39 0.90196 0.45098 0.00000
+3 58 1 114 0.90196 0.45098 0.00000
+3 18 94 58 0.90196 0.45098 0.00000
+3 58 59 18 0.90196 0.45098 0.00000
+3 59 58 19 0.90196 0.45098 0.00000
+3 19 60 59 0.90196 0.45098 0.00000
+3 60 19 117 0.90196 0.45098 0.00000
+3 61 18 59 0.90196 0.45098 0.00000
+3 59 30 61 0.90196 0.45098 0.00000
+3 30 59 60 0.90196 0.45098 0.00000
+3 73 60 6 0.90196 0.45098 0.00000
+3 4 96 61 0.90196 0.45098 0.00000
+3 72 61 30 0.90196 0.45098 0.00000
+3 62 2 74 0.90196 0.45098 0.00000
+3 22 50 62 0.90196 0.45098 0.00000
+3 62 63 22 0.90196 0.45098 0.00000
+3 63 62 23 0.90196 0.45098 0.00000
+3 23 64 63 0.90196 0.45098 0.00000
+3 64 23 77 0.90196 0.45098 0.00000
+3 65 22 63 0.90196 0.45098 0.00000
+3 63 33 65 0.90196 0.45098 0.00000
+3 33 63 64 0.90196 0.45098 0.00000
+3 81 64 7 0.90196 0.45098 0.00000
+3 5 52 65 0.90196 0.45098 0.00000
+3 80 65 33 0.90196 0.45098 0.00000
+3 66 3 101 0.90196 0.45098 0.00000
+3 26 78 66 0.90196 0.45098 0.00000
+3 66 67 26 0.90196 0.45098 0.00000
+3 67 66 28 0.90196 0.45098 0.00000
+3 28 68 67 0.90196 0.45098 0.00000
+3 68 28 100 0.90196 0.45098 0.00000
+3 69 26 67 0.90196 0.45098 0.00000
+3 67 35 69 0.90196 0.45098 0.00000
+3 35 67 68 0.90196 0.45098 0.00000
+3 92 68 9 0.90196 0.45098 0.00000
+3 5 80 69 0.90196 0.45098 0.00000
+3 90 69 35 0.90196 0.45098 0.00000
+3 70 0 86 0.90196 0.45098 0.00000
+3 14 106 70 0.90196 0.45098 0.00000
+3 70 71 14 0.90196 0.45098 0.00000
+3 71 70 13 0.90196 0.45098 0.00000
+3 13 72 71 0.90196 0.45098 0.00000
+3 72 13 89 0.90196 0.45098 0.00000
+3 73 14 71 0.90196 0.45098 0.00000
+3 71 30 73 0.90196 0.45098 0.00000
+3 30 71 72 0.90196 0.45098 0.00000
+3 61 72 4 0.90196 0.45098 0.00000
+3 6 108 73 0.90196 0.45098 0.00000
+3 60 73 30 0.90196 0.45098 0.00000
+3 74 2 49 0.90196 0.45098 0.00000
+3 23 62 74 0.90196 0.45098 0.00000
+3 74 75 23 0.90196 0.45098 0.00000
+3 75 74 25 0.90196 0.45098 0.00000
+3 25 76 75 0.90196 0.45098 0.00000
+3 76 25 48 0.90196 0.45098 0.00000
+3 77 23 75 0.90196 0.45098 0.00000
+3 75 38 77 0.90196 0.45098 0.00000
+3 38 75 76 0.90196 0.45098 0.00000
+3 120 76 10 0.90196 0.45098 0.00000
+3 7 64 77 0.90196 0.45098 0.00000
+3 118 77 38 0.90196 0.45098 0.00000
+3 78 3 66 0.90196 0.45098 0.00000
+3 27 54 78 0.90196 0.45098 0.00000
+3 78 79 27 0.90196 0.45098 0.00000
+3 79 78 26 0.90196 0.45098 0.00000
+3 26 80 79 0.90196 0.45098 0.00000
+3 80 26 69 0.90196 0.45098 0.00000
+3 81 27 79 0.90196 0.45098 0.00000
+3 79 33 81 0.90196 0.45098 0.00000
+3 33 79 80 0.90196 0.45098 0.00000
+3 65 80 5 0.90196 0.45098 0.00000
+3 7 56 81 0.90196 0.45098 0.00000
+3 64 81 33 0.90196 0.45098 0.00000
+3 82 0 46 0.90196 0.45098 0.00000
+3 15 86 82 0.90196 0.45098 0.00000
+3 82 83 15 0.90196 0.45098 0.00000
+3 83 82 12 0.90196 0.45098 0.00000
+3 12 84 83 0.90196 0.45098 0.00000
+3 84 12 49 0.90196 0.45098 0.00000
+3 85 15 83 0.90196 0.45098 0.00000
+3 83 24 85 0.90196 0.45098 0.00000
+3 24 83 84 0.90196 0.45098 0.00000
+3 50 84 2 0.90196 0.45098 0.00000
+3 8 88 85 0.90196 0.45098 0.00000
+3 53 85 24 0.90196 0.45098 0.00000
+3 86 0 82 0.90196 0.45098 0.00000
+3 13 70 86 0.90196 0.45098 0.00000
+3 86 87 13 0.90196 0.45098 0.00000
+3 87 86 15 0.90196 0.45098 0.00000
+3 15 88 87 0.90196 0.45098 0.00000
+3 88 15 85 0.90196 0.45098 0.00000
+3 89 13 87 0.90196 0.45098 0.00000
+3 87 31 89 0.90196 0.45098 0.00000
+3 31 87 88 0.90196 0.45098 0.00000
+3 104 88 8 0.90196 0.45098 0.00000
+3 4 72 89 0.90196 0.45098 0.00000
+3 102 89 31 0.90196 0.45098 0.00000
+3 90 5 69 0.90196 0.45098 0.00000
+3 34 52 90 0.90196 0.45098 0.00000
+3 90 91 34 0.90196 0.45098 0.00000
+3 91 90 35 0.90196 0.45098 0.00000
+3 35 92 91 0.90196 0.45098 0.00000
+3 92 35 68 0.90196 0.45098 0.00000
+3 93 34 91 0.90196 0.45098 0.00000
+3 91 40 93 0.90196 0.45098 0.00000
+3 40 91 92 0.90196 0.45098 0.00000
+3 105 92 9 0.90196 0.45098 0.00000
+3 8 53 93 0.90196 0.45098 0.00000
+3 104 93 40 0.90196 0.45098 0.00000
+3 94 1 58 0.90196 0.45098 0.00000
+3 20 98 94 0.90196 0.45098 0.00000
+3 94 95 20 0.90196 0.45098 0.00000
+3 95 94 18 0.90196 0.45098 0.00000
+3 18 96 95 0.90196 0.45098 0.00000
+3 96 18 61 0.90196 0.45098 0.00000
+3 97 20 95 0.90196 0.45098 0.00000
+3 95 32 97 0.90196 0.45098 0.00000
+3 32 95 96 0.90196 0.45098 0.00000
+3 102 96 4 0.90196 0.45098 0.00000
+3 9 100 97 0.90196 0.45098 0.00000
+3 105 97 32 0.90196 0.45098 0.00000
+3 98 1 94 0.90196 0.45098 0.00000
+3 17 42 98 0.90196 0.45098 0.00000
+3 98 99 17 0.90196 0.45098 0.00000
+3 99 98 20 0.90196 0.45098 0.00000
+3 20 100 99 0.90196 0.45098 0.00000
+3 100 20 97 0.90196 0.45098 0.00000
+3 101 17 99 0.90196 0.45098 0.00000
+3 99 28 101 0.90196 0.45098 0.00000
+3 28 99 100 0.90196 0.45098 0.00000
+3 68 100 9 0.90196 0.45098 0.00000
+3 3 44 101 0.90196 0.45098 0.00000
+3 66 101 28 0.90196 0.45098 0.00000
+3 102 4 89 0.90196 0.45098 0.00000
+3 32 96 102 0.90196 0.45098 0.00000
+3 102 103 32 0.90196 0.45098 0.00000
+3 103 102 31 0.90196 0.45098 0.00000
+3 31 104 103 0.90196 0.45098 0.00000
+3 104 31 88 0.90196 0.45098 0.00000
+3 105 32 103 0.90196 0.45098 0.00000
+3 103 40 105 0.90196 0.45098 0.00000
+3 40 103 104 0.90196 0.45098 0.00000
+3 93 104 8 0.90196 0.45098 0.00000
+3 9 97 105 0.90196 0.45098 0.00000
+3 92 105 40 0.90196 0.45098 0.00000
+3 106 0 70 0.90196 0.45098 0.00000
+3 16 46 106 0.90196 0.45098 0.00000
+3 106 107 16 0.90196 0.45098 0.00000
+3 107 106 14 0.90196 0.45098 0.00000
+3 14 108 107 0.90196 0.45098 0.00000
+3 108 14 73 0.90196 0.45098 0.00000
+3 109 16 107 0.90196 0.45098 0.00000
+3 107 36 109 0.90196 0.45098 0.00000
+3 36 107 108 0.90196 0.45098 0.00000
+3 110 108 6 0.90196 0.45098 0.00000
+3 10 48 109 0.90196 0.45098 0.00000
+3 113 109 36 0.90196 0.45098 0.00000
+3 110 6 117 0.90196 0.45098 0.00000
+3 36 108 110 0.90196 0.45098 0.00000
+3 110 111 36 0.90196 0.45098 0.00000
+3 111 110 37 0.90196 0.45098 0.00000
+3 37 112 111 0.90196 0.45098 0.00000
+3 112 37 116 0.90196 0.45098 0.00000
+3 113 36 111 0.90196 0.45098 0.00000
+3 111 41 113 0.90196 0.45098 0.00000
+3 41 111 112 0.90196 0.45098 0.00000
+3 121 112 11 0.90196 0.45098 0.00000
+3 10 109 113 0.90196 0.45098 0.00000
+3 120 113 41 0.90196 0.45098 0.00000
+3 114 1 42 0.90196 0.45098 0.00000
+3 19 58 114 0.90196 0.45098 0.00000
+3 114 115 19 0.90196 0.45098 0.00000
+3 115 114 21 0.90196 0.45098 0.00000
+3 21 116 115 0.90196 0.45098 0.00000
+3 116 21 45 0.90196 0.45098 0.00000
+3 117 19 115 0.90196 0.45098 0.00000
+3 115 37 117 0.90196 0.45098 0.00000
+3 37 115 116 0.90196 0.45098 0.00000
+3 112 116 11 0.90196 0.45098 0.00000
+3 6 60 117 0.90196 0.45098 0.00000
+3 110 117 37 0.90196 0.45098 0.00000
+3 118 7 77 0.90196 0.45098 0.00000
+3 39 56 118 0.90196 0.45098 0.00000
+3 118 119 39 0.90196 0.45098 0.00000
+3 119 118 38 0.90196 0.45098 0.00000
+3 38 120 119 0.90196 0.45098 0.00000
+3 120 38 76 0.90196 0.45098 0.00000
+3 121 39 119 0.90196 0.45098 0.00000
+3 119 41 121 0.90196 0.45098 0.00000
+3 41 119 120 0.90196 0.45098 0.00000
+3 113 120 10 0.90196 0.45098 0.00000
+3 11 57 121 0.90196 0.45098 0.00000
+3 112 121 41 0.90196 0.45098 0.00000
+1 0 0.38824 0.60000 0.30196
+1 1 0.38824 0.60000 0.30196
+1 2 0.38824 0.60000 0.30196
+1 3 0.38824 0.60000 0.30196
+1 4 0.38824 0.60000 0.30196
+1 5 0.38824 0.60000 0.30196
+1 6 0.38824 0.60000 0.30196
+1 7 0.38824 0.60000 0.30196
+1 8 0.38824 0.60000 0.30196
+1 9 0.38824 0.60000 0.30196
+1 10 0.38824 0.60000 0.30196
+1 11 0.38824 0.60000 0.30196
diff --git a/pyrate/tests/plan/graph/generate/example_files/geodestic_file_1.off b/pyrate/tests/plan/graph/generate/example_files/geodestic_file_1.off
new file mode 100644
index 0000000..22b17fe
--- /dev/null
+++ b/pyrate/tests/plan/graph/generate/example_files/geodestic_file_1.off
@@ -0,0 +1,826 @@
+OFF
+272 552 0
+0 0.5257311121191336 0.85065080835204
+0 0.5257311121191336 -0.85065080835204
+0 -0.5257311121191336 0.85065080835204
+0 -0.5257311121191336 -0.85065080835204
+0.5257311121191336 0.85065080835204 0
+0.5257311121191336 -0.85065080835204 0
+-0.5257311121191336 0.85065080835204 0
+-0.5257311121191336 -0.85065080835204 0
+0.85065080835204 0 0.5257311121191336
+0.85065080835204 0 -0.5257311121191336
+-0.85065080835204 0 0.5257311121191336
+-0.85065080835204 0 -0.5257311121191336
+2.267469933117213e-16 0.1834794080019837 0.9830235535526306
+1.92245706721902e-16 -0.1834794080019837 0.9830235535526306
+0.2120312799176223 0.7385845055044615 0.6399497359677749
+0.395510687919606 0.8519810158853969 0.3430738175848559
+-0.2120312799176222 0.7385845055044614 0.639949735967775
+-0.395510687919606 0.851981015885397 0.3430738175848558
+0.3430738175848558 0.395510687919606 0.851981015885397
+0.6399497359677748 0.2120312799176223 0.7385845055044618
+-0.3430738175848559 0.3955106879196059 0.851981015885397
+-0.6399497359677748 0.2120312799176223 0.7385845055044615
+1.922457067219021e-16 0.1834794080019837 -0.9830235535526306
+2.267469933117213e-16 -0.1834794080019837 -0.9830235535526306
+0.2120312799176222 0.7385845055044614 -0.639949735967775
+0.395510687919606 0.851981015885397 -0.3430738175848558
+-0.2120312799176223 0.7385845055044615 -0.6399497359677749
+-0.395510687919606 0.8519810158853969 -0.3430738175848559
+0.3430738175848558 0.3955106879196059 -0.8519810158853971
+0.6399497359677748 0.2120312799176222 -0.7385845055044618
+-0.3430738175848558 0.395510687919606 -0.851981015885397
+-0.6399497359677748 0.2120312799176223 -0.7385845055044618
+0.2120312799176222 -0.7385845055044615 0.639949735967775
+0.3955106879196058 -0.851981015885397 0.3430738175848558
+-0.2120312799176223 -0.7385845055044615 0.6399497359677749
+-0.395510687919606 -0.8519810158853969 0.3430738175848559
+0.3430738175848558 -0.3955106879196059 0.8519810158853971
+0.6399497359677748 -0.2120312799176222 0.7385845055044618
+-0.3430738175848558 -0.395510687919606 0.851981015885397
+-0.6399497359677748 -0.2120312799176223 0.7385845055044618
+0.2120312799176222 -0.7385845055044615 -0.639949735967775
+0.3955106879196059 -0.851981015885397 -0.3430738175848558
+-0.2120312799176222 -0.7385845055044615 -0.639949735967775
+-0.3955106879196058 -0.851981015885397 -0.3430738175848558
+0.3430738175848558 -0.3955106879196056 -0.851981015885397
+0.6399497359677748 -0.2120312799176221 -0.7385845055044618
+-0.3430738175848559 -0.3955106879196059 -0.851981015885397
+-0.6399497359677748 -0.2120312799176223 -0.7385845055044615
+0.1834794080019837 0.9830235535526306 3.4096087162373e-17
+-0.1834794080019837 0.9830235535526306 3.4096087162373e-17
+0.7385845055044618 0.6399497359677748 0.2120312799176221
+0.8519810158853971 0.3430738175848559 0.3955106879196058
+0.7385845055044618 0.6399497359677748 -0.2120312799176222
+0.851981015885397 0.3430738175848557 -0.3955106879196058
+0.1834794080019837 -0.9830235535526306 -3.4096087162373e-17
+-0.1834794080019837 -0.9830235535526306 -3.4096087162373e-17
+0.7385845055044618 -0.6399497359677748 0.2120312799176222
+0.851981015885397 -0.3430738175848557 0.3955106879196058
+0.7385845055044618 -0.6399497359677748 -0.2120312799176221
+0.8519810158853971 -0.3430738175848559 -0.3955106879196058
+-0.7385845055044618 0.6399497359677748 0.2120312799176222
+-0.851981015885397 0.3430738175848557 0.3955106879196058
+-0.7385845055044615 0.6399497359677748 -0.2120312799176224
+-0.8519810158853969 0.3430738175848559 -0.395510687919606
+-0.7385845055044618 -0.6399497359677748 0.2120312799176221
+-0.8519810158853971 -0.3430738175848559 0.3955106879196058
+-0.7385845055044618 -0.6399497359677748 -0.2120312799176222
+-0.851981015885397 -0.3430738175848557 -0.3955106879196058
+0.9830235535526306 2.267469933117213e-16 0.1834794080019836
+0.9830235535526306 1.92245706721902e-16 -0.1834794080019837
+-0.9830235535526306 -2.267469933117213e-16 0.1834794080019836
+-0.9830235535526306 -1.92245706721902e-16 -0.1834794080019837
+-0.1194960329361959 0.374843742971558 -0.919354592345948
+-0.2408723836637745 0.1975414971827028 -0.9502409440131212
+-0.1235792137159473 -4.626953174201564e-17 -0.9923347106381738
+-0.4579792781293658 0.2066706056476467 -0.8646052518724022
+-0.35682208977309 -3.124513936890529e-17 -0.9341723589627157
+-0.2408723836637744 -0.1975414971827028 -0.9502409440131212
+-0.1194960329361959 -0.3748437429715579 -0.919354592345948
+-0.5605012402939293 1.966702600323942e-17 -0.8281535845656691
+-0.4579792781293658 -0.2066706056476469 -0.8646052518724022
+-0.7260059495344062 -6.939177849948745e-18 -0.6876884187192956
+-0.1194960329361959 0.3748437429715579 0.919354592345948
+-0.2408723836637744 0.1975414971827029 0.9502409440131212
+-0.4579792781293657 0.2066706056476468 0.8646052518724022
+-0.1235792137159473 4.574573400785218e-17 0.9923347106381738
+-0.35682208977309 3.09531117213564e-17 0.9341723589627158
+-0.5605012402939293 -1.986949146301587e-17 0.8281535845656691
+-0.7260059495344062 7.000242466867724e-18 0.6876884187192956
+-0.2408723836637744 -0.1975414971827029 0.9502409440131212
+-0.4579792781293658 -0.2066706056476467 0.8646052518724022
+-0.1194960329361959 -0.374843742971558 0.919354592345948
+0.1933486428115418 -0.5681923857830995 0.7998585594097523
+0.3897397037191919 -0.5872812009018947 0.7093685603493469
+0.4066259737430365 -0.741026038156455 0.5343554325088083
+0.5343554325088083 -0.4066259737430364 0.7410260381564551
+0.5773502691896258 -0.5773502691896256 0.5773502691896258
+0.5872812009018947 -0.7093685603493469 0.389739703719192
+0.5681923857830998 -0.799858559409752 0.1933486428115418
+0.7093685603493468 -0.3897397037191919 0.5872812009018948
+0.741026038156455 -0.5343554325088083 0.4066259737430364
+0.799858559409752 -0.1933486428115418 0.5681923857830997
+-0.1933486428115418 -0.5681923857830995 -0.7998585594097523
+-0.3897397037191919 -0.5872812009018947 -0.7093685603493469
+-0.4066259737430365 -0.741026038156455 -0.5343554325088083
+-0.5343554325088083 -0.4066259737430364 -0.7410260381564551
+-0.5773502691896258 -0.5773502691896256 -0.5773502691896258
+-0.5872812009018947 -0.7093685603493469 -0.389739703719192
+-0.5681923857830998 -0.799858559409752 -0.1933486428115418
+-0.7093685603493468 -0.3897397037191919 -0.5872812009018948
+-0.741026038156455 -0.5343554325088083 -0.4066259737430364
+-0.799858559409752 -0.1933486428115418 -0.5681923857830997
+2.949150586228217e-17 0.6876884187192956 -0.7260059495344064
+-1.966702600323942e-17 0.8281535845656691 -0.5605012402939293
+-0.2066706056476468 0.8646052518724022 -0.4579792781293658
+0.2066706056476469 0.8646052518724022 -0.4579792781293658
+3.124513936890529e-17 0.9341723589627158 -0.3568220897730901
+-0.1975414971827029 0.9502409440131212 -0.2408723836637746
+-0.374843742971558 0.919354592345948 -0.1194960329361958
+0.1975414971827029 0.9502409440131212 -0.2408723836637745
+2.197802757745743e-17 0.9923347106381738 -0.1235792137159472
+0.374843742971558 0.919354592345948 -0.1194960329361959
+2.949150586228217e-17 -0.6876884187192956 0.7260059495344064
+-1.966702600323942e-17 -0.8281535845656691 0.5605012402939293
+-0.2066706056476468 -0.8646052518724022 0.4579792781293658
+0.2066706056476469 -0.8646052518724022 0.4579792781293658
+3.124513936890529e-17 -0.9341723589627158 0.3568220897730901
+-0.1975414971827029 -0.9502409440131212 0.2408723836637746
+-0.374843742971558 -0.919354592345948 0.1194960329361958
+0.1975414971827029 -0.9502409440131212 0.2408723836637745
+2.197802757745743e-17 -0.9923347106381738 0.1235792137159472
+0.374843742971558 -0.919354592345948 0.1194960329361959
+0.1933486428115418 -0.5681923857830995 -0.7998585594097523
+0.3897397037191918 -0.5872812009018947 -0.7093685603493468
+0.5343554325088081 -0.4066259737430364 -0.7410260381564551
+0.4066259737430364 -0.741026038156455 -0.5343554325088083
+0.5773502691896258 -0.5773502691896257 -0.5773502691896258
+0.7093685603493467 -0.389739703719192 -0.5872812009018948
+0.799858559409752 -0.1933486428115418 -0.5681923857830997
+0.5872812009018947 -0.7093685603493468 -0.3897397037191921
+0.7410260381564551 -0.5343554325088083 -0.4066259737430366
+0.5681923857830998 -0.799858559409752 -0.1933486428115418
+-2.949150586228217e-17 0.6876884187192956 0.7260059495344064
+1.966702600323942e-17 0.8281535845656691 0.5605012402939293
+0.2066706056476468 0.8646052518724022 0.4579792781293658
+-0.2066706056476469 0.8646052518724022 0.4579792781293658
+-3.124513936890529e-17 0.9341723589627158 0.3568220897730901
+0.1975414971827029 0.9502409440131212 0.2408723836637746
+0.374843742971558 0.919354592345948 0.1194960329361958
+-0.1975414971827029 0.9502409440131212 0.2408723836637745
+-2.197802757745743e-17 0.9923347106381738 0.1235792137159472
+-0.374843742971558 0.919354592345948 0.1194960329361959
+-0.1933486428115418 -0.5681923857830995 0.7998585594097523
+-0.3897397037191918 -0.5872812009018947 0.7093685603493468
+-0.5343554325088081 -0.4066259737430364 0.7410260381564551
+-0.4066259737430364 -0.741026038156455 0.5343554325088083
+-0.5773502691896258 -0.5773502691896257 0.5773502691896258
+-0.7093685603493467 -0.389739703719192 0.5872812009018948
+-0.799858559409752 -0.1933486428115418 0.5681923857830997
+-0.5872812009018947 -0.7093685603493468 0.3897397037191921
+-0.7410260381564551 -0.5343554325088083 0.4066259737430366
+-0.5681923857830998 -0.799858559409752 0.1933486428115418
+-2.949150586228217e-17 -0.6876884187192956 -0.7260059495344064
+1.966702600323942e-17 -0.8281535845656691 -0.5605012402939293
+0.2066706056476468 -0.8646052518724022 -0.4579792781293658
+-0.2066706056476469 -0.8646052518724022 -0.4579792781293658
+-3.124513936890529e-17 -0.9341723589627158 -0.3568220897730901
+0.1975414971827029 -0.9502409440131212 -0.2408723836637746
+0.374843742971558 -0.919354592345948 -0.1194960329361958
+-0.1975414971827029 -0.9502409440131212 -0.2408723836637745
+-2.197802757745743e-17 -0.9923347106381738 -0.1235792137159472
+-0.374843742971558 -0.919354592345948 -0.1194960329361959
+0.1194960329361959 0.374843742971558 0.919354592345948
+0.2408723836637745 0.1975414971827028 0.9502409440131212
+0.1235792137159473 -4.626953174201564e-17 0.9923347106381738
+0.4579792781293658 0.2066706056476467 0.8646052518724022
+0.35682208977309 -3.124513936890529e-17 0.9341723589627157
+0.2408723836637744 -0.1975414971827028 0.9502409440131212
+0.1194960329361959 -0.3748437429715579 0.919354592345948
+0.5605012402939293 1.966702600323942e-17 0.8281535845656691
+0.4579792781293658 -0.2066706056476469 0.8646052518724022
+0.7260059495344062 -6.939177849948745e-18 0.6876884187192956
+0.1933486428115418 0.5681923857830995 0.7998585594097523
+0.3897397037191918 0.5872812009018947 0.7093685603493468
+0.5343554325088081 0.4066259737430364 0.7410260381564551
+0.4066259737430364 0.741026038156455 0.5343554325088083
+0.5773502691896258 0.5773502691896257 0.5773502691896258
+0.7093685603493467 0.389739703719192 0.5872812009018948
+0.799858559409752 0.1933486428115418 0.5681923857830997
+0.5872812009018947 0.7093685603493468 0.3897397037191921
+0.7410260381564551 0.5343554325088083 0.4066259737430366
+0.5681923857830998 0.799858559409752 0.1933486428115418
+0.6876884187192954 -0.7260059495344061 6.939177849948745e-18
+0.8281535845656691 -0.5605012402939293 -1.966702600323942e-17
+0.8646052518724022 -0.4579792781293658 -0.2066706056476467
+0.8646052518724022 -0.4579792781293658 0.2066706056476468
+0.9341723589627157 -0.35682208977309 3.124513936890529e-17
+0.9502409440131212 -0.2408723836637745 -0.1975414971827029
+0.919354592345948 -0.1194960329361959 -0.374843742971558
+0.9502409440131212 -0.2408723836637745 0.1975414971827029
+0.992334710638174 -0.1235792137159473 4.511279344846526e-17
+0.919354592345948 -0.1194960329361959 0.3748437429715579
+0.1933486428115418 0.5681923857830995 -0.7998585594097523
+0.3897397037191919 0.5872812009018947 -0.7093685603493469
+0.4066259737430365 0.741026038156455 -0.5343554325088083
+0.5343554325088083 0.4066259737430364 -0.7410260381564551
+0.5773502691896258 0.5773502691896256 -0.5773502691896258
+0.5872812009018947 0.7093685603493469 -0.389739703719192
+0.5681923857830998 0.799858559409752 -0.1933486428115418
+0.7093685603493468 0.3897397037191919 -0.5872812009018948
+0.741026038156455 0.5343554325088083 -0.4066259737430364
+0.799858559409752 0.1933486428115418 -0.5681923857830997
+0.1194960329361959 0.3748437429715579 -0.919354592345948
+0.2408723836637744 0.1975414971827029 -0.9502409440131212
+0.4579792781293657 0.2066706056476468 -0.8646052518724022
+0.1235792137159473 4.574573400785218e-17 -0.9923347106381738
+0.35682208977309 3.09531117213564e-17 -0.9341723589627158
+0.5605012402939293 -1.986949146301587e-17 -0.8281535845656691
+0.7260059495344062 7.000242466867724e-18 -0.6876884187192956
+0.2408723836637744 -0.1975414971827029 -0.9502409440131212
+0.4579792781293658 -0.2066706056476467 -0.8646052518724022
+0.1194960329361959 -0.374843742971558 -0.919354592345948
+0.6876884187192954 0.7260059495344061 -6.939177849948745e-18
+0.8281535845656691 0.5605012402939293 1.966702600323942e-17
+0.8646052518724022 0.4579792781293658 0.2066706056476467
+0.8646052518724022 0.4579792781293658 -0.2066706056476468
+0.9341723589627157 0.35682208977309 -3.124513936890529e-17
+0.9502409440131212 0.2408723836637745 0.1975414971827029
+0.919354592345948 0.1194960329361959 0.374843742971558
+0.9502409440131212 0.2408723836637745 -0.1975414971827029
+0.992334710638174 0.1235792137159473 -4.511279344846526e-17
+0.919354592345948 0.1194960329361959 -0.3748437429715579
+-0.1933486428115418 0.5681923857830995 0.7998585594097523
+-0.3897397037191919 0.5872812009018947 0.7093685603493469
+-0.4066259737430365 0.741026038156455 0.5343554325088083
+-0.5343554325088083 0.4066259737430364 0.7410260381564551
+-0.5773502691896258 0.5773502691896256 0.5773502691896258
+-0.5872812009018947 0.7093685603493469 0.389739703719192
+-0.5681923857830998 0.799858559409752 0.1933486428115418
+-0.7093685603493468 0.3897397037191919 0.5872812009018948
+-0.741026038156455 0.5343554325088083 0.4066259737430364
+-0.799858559409752 0.1933486428115418 0.5681923857830997
+-0.6876884187192954 0.7260059495344061 6.939177849948745e-18
+-0.8281535845656691 0.5605012402939293 -1.966702600323942e-17
+-0.8646052518724022 0.4579792781293658 -0.2066706056476467
+-0.8646052518724022 0.4579792781293658 0.2066706056476468
+-0.9341723589627157 0.35682208977309 3.124513936890529e-17
+-0.9502409440131212 0.2408723836637745 -0.1975414971827029
+-0.919354592345948 0.1194960329361959 -0.374843742971558
+-0.9502409440131212 0.2408723836637745 0.1975414971827029
+-0.992334710638174 0.1235792137159473 4.511279344846526e-17
+-0.919354592345948 0.1194960329361959 0.3748437429715579
+-0.1933486428115418 0.5681923857830995 -0.7998585594097523
+-0.3897397037191918 0.5872812009018947 -0.7093685603493468
+-0.5343554325088081 0.4066259737430364 -0.7410260381564551
+-0.4066259737430364 0.741026038156455 -0.5343554325088083
+-0.5773502691896258 0.5773502691896257 -0.5773502691896258
+-0.7093685603493467 0.389739703719192 -0.5872812009018948
+-0.799858559409752 0.1933486428115418 -0.5681923857830997
+-0.5872812009018947 0.7093685603493468 -0.3897397037191921
+-0.7410260381564551 0.5343554325088083 -0.4066259737430366
+-0.5681923857830998 0.799858559409752 -0.1933486428115418
+-0.6876884187192954 -0.7260059495344061 -6.939177849948745e-18
+-0.8281535845656691 -0.5605012402939293 1.966702600323942e-17
+-0.8646052518724022 -0.4579792781293658 0.2066706056476467
+-0.8646052518724022 -0.4579792781293658 -0.2066706056476468
+-0.9341723589627157 -0.35682208977309 -3.124513936890529e-17
+-0.9502409440131212 -0.2408723836637745 0.1975414971827029
+-0.919354592345948 -0.1194960329361959 0.374843742971558
+-0.9502409440131212 -0.2408723836637745 -0.1975414971827029
+-0.992334710638174 -0.1235792137159473 -4.511279344846526e-17
+-0.919354592345948 -0.1194960329361959 -0.3748437429715579
+3 72 1 212 0.90196 0.45098 0.00000
+3 30 252 72 0.90196 0.45098 0.00000
+3 72 73 30 0.90196 0.45098 0.00000
+3 73 72 22 0.90196 0.45098 0.00000
+3 22 74 73 0.90196 0.45098 0.00000
+3 74 22 215 0.90196 0.45098 0.00000
+3 75 30 73 0.90196 0.45098 0.00000
+3 73 76 75 0.90196 0.45098 0.00000
+3 76 73 74 0.90196 0.45098 0.00000
+3 74 77 76 0.90196 0.45098 0.00000
+3 77 74 23 0.90196 0.45098 0.00000
+3 23 78 77 0.90196 0.45098 0.00000
+3 78 23 221 0.90196 0.45098 0.00000
+3 31 254 75 0.90196 0.45098 0.00000
+3 75 79 31 0.90196 0.45098 0.00000
+3 79 75 76 0.90196 0.45098 0.00000
+3 76 80 79 0.90196 0.45098 0.00000
+3 80 76 77 0.90196 0.45098 0.00000
+3 77 46 80 0.90196 0.45098 0.00000
+3 46 77 78 0.90196 0.45098 0.00000
+3 102 78 3 0.90196 0.45098 0.00000
+3 81 31 79 0.90196 0.45098 0.00000
+3 79 47 81 0.90196 0.45098 0.00000
+3 47 79 80 0.90196 0.45098 0.00000
+3 105 80 46 0.90196 0.45098 0.00000
+3 11 258 81 0.90196 0.45098 0.00000
+3 111 81 47 0.90196 0.45098 0.00000
+3 82 0 232 0.90196 0.45098 0.00000
+3 12 172 82 0.90196 0.45098 0.00000
+3 82 83 12 0.90196 0.45098 0.00000
+3 83 82 20 0.90196 0.45098 0.00000
+3 20 84 83 0.90196 0.45098 0.00000
+3 84 20 235 0.90196 0.45098 0.00000
+3 85 12 83 0.90196 0.45098 0.00000
+3 83 86 85 0.90196 0.45098 0.00000
+3 86 83 84 0.90196 0.45098 0.00000
+3 84 87 86 0.90196 0.45098 0.00000
+3 87 84 21 0.90196 0.45098 0.00000
+3 21 88 87 0.90196 0.45098 0.00000
+3 88 21 241 0.90196 0.45098 0.00000
+3 13 174 85 0.90196 0.45098 0.00000
+3 85 89 13 0.90196 0.45098 0.00000
+3 89 85 86 0.90196 0.45098 0.00000
+3 86 90 89 0.90196 0.45098 0.00000
+3 90 86 87 0.90196 0.45098 0.00000
+3 87 39 90 0.90196 0.45098 0.00000
+3 39 87 88 0.90196 0.45098 0.00000
+3 158 88 10 0.90196 0.45098 0.00000
+3 91 13 89 0.90196 0.45098 0.00000
+3 89 38 91 0.90196 0.45098 0.00000
+3 38 89 90 0.90196 0.45098 0.00000
+3 154 90 39 0.90196 0.45098 0.00000
+3 2 178 91 0.90196 0.45098 0.00000
+3 152 91 38 0.90196 0.45098 0.00000
+3 92 2 122 0.90196 0.45098 0.00000
+3 36 178 92 0.90196 0.45098 0.00000
+3 92 93 36 0.90196 0.45098 0.00000
+3 93 92 32 0.90196 0.45098 0.00000
+3 32 94 93 0.90196 0.45098 0.00000
+3 94 32 125 0.90196 0.45098 0.00000
+3 95 36 93 0.90196 0.45098 0.00000
+3 93 96 95 0.90196 0.45098 0.00000
+3 96 93 94 0.90196 0.45098 0.00000
+3 94 97 96 0.90196 0.45098 0.00000
+3 97 94 33 0.90196 0.45098 0.00000
+3 33 98 97 0.90196 0.45098 0.00000
+3 98 33 131 0.90196 0.45098 0.00000
+3 37 180 95 0.90196 0.45098 0.00000
+3 95 99 37 0.90196 0.45098 0.00000
+3 99 95 96 0.90196 0.45098 0.00000
+3 96 100 99 0.90196 0.45098 0.00000
+3 100 96 97 0.90196 0.45098 0.00000
+3 97 56 100 0.90196 0.45098 0.00000
+3 56 97 98 0.90196 0.45098 0.00000
+3 192 98 5 0.90196 0.45098 0.00000
+3 101 37 99 0.90196 0.45098 0.00000
+3 99 57 101 0.90196 0.45098 0.00000
+3 57 99 100 0.90196 0.45098 0.00000
+3 195 100 56 0.90196 0.45098 0.00000
+3 8 181 101 0.90196 0.45098 0.00000
+3 201 101 57 0.90196 0.45098 0.00000
+3 102 3 162 0.90196 0.45098 0.00000
+3 46 78 102 0.90196 0.45098 0.00000
+3 102 103 46 0.90196 0.45098 0.00000
+3 103 102 42 0.90196 0.45098 0.00000
+3 42 104 103 0.90196 0.45098 0.00000
+3 104 42 165 0.90196 0.45098 0.00000
+3 105 46 103 0.90196 0.45098 0.00000
+3 103 106 105 0.90196 0.45098 0.00000
+3 106 103 104 0.90196 0.45098 0.00000
+3 104 107 106 0.90196 0.45098 0.00000
+3 107 104 43 0.90196 0.45098 0.00000
+3 43 108 107 0.90196 0.45098 0.00000
+3 108 43 171 0.90196 0.45098 0.00000
+3 47 80 105 0.90196 0.45098 0.00000
+3 105 109 47 0.90196 0.45098 0.00000
+3 109 105 106 0.90196 0.45098 0.00000
+3 106 110 109 0.90196 0.45098 0.00000
+3 110 106 107 0.90196 0.45098 0.00000
+3 107 66 110 0.90196 0.45098 0.00000
+3 66 107 108 0.90196 0.45098 0.00000
+3 262 108 7 0.90196 0.45098 0.00000
+3 111 47 109 0.90196 0.45098 0.00000
+3 109 67 111 0.90196 0.45098 0.00000
+3 67 109 110 0.90196 0.45098 0.00000
+3 265 110 66 0.90196 0.45098 0.00000
+3 11 81 111 0.90196 0.45098 0.00000
+3 271 111 67 0.90196 0.45098 0.00000
+3 112 1 252 0.90196 0.45098 0.00000
+3 24 202 112 0.90196 0.45098 0.00000
+3 112 113 24 0.90196 0.45098 0.00000
+3 113 112 26 0.90196 0.45098 0.00000
+3 26 114 113 0.90196 0.45098 0.00000
+3 114 26 255 0.90196 0.45098 0.00000
+3 115 24 113 0.90196 0.45098 0.00000
+3 113 116 115 0.90196 0.45098 0.00000
+3 116 113 114 0.90196 0.45098 0.00000
+3 114 117 116 0.90196 0.45098 0.00000
+3 117 114 27 0.90196 0.45098 0.00000
+3 27 118 117 0.90196 0.45098 0.00000
+3 118 27 261 0.90196 0.45098 0.00000
+3 25 204 115 0.90196 0.45098 0.00000
+3 115 119 25 0.90196 0.45098 0.00000
+3 119 115 116 0.90196 0.45098 0.00000
+3 116 120 119 0.90196 0.45098 0.00000
+3 120 116 117 0.90196 0.45098 0.00000
+3 117 49 120 0.90196 0.45098 0.00000
+3 49 117 118 0.90196 0.45098 0.00000
+3 151 118 6 0.90196 0.45098 0.00000
+3 121 25 119 0.90196 0.45098 0.00000
+3 119 48 121 0.90196 0.45098 0.00000
+3 48 119 120 0.90196 0.45098 0.00000
+3 150 120 49 0.90196 0.45098 0.00000
+3 4 208 121 0.90196 0.45098 0.00000
+3 148 121 48 0.90196 0.45098 0.00000
+3 122 2 152 0.90196 0.45098 0.00000
+3 32 92 122 0.90196 0.45098 0.00000
+3 122 123 32 0.90196 0.45098 0.00000
+3 123 122 34 0.90196 0.45098 0.00000
+3 34 124 123 0.90196 0.45098 0.00000
+3 124 34 155 0.90196 0.45098 0.00000
+3 125 32 123 0.90196 0.45098 0.00000
+3 123 126 125 0.90196 0.45098 0.00000
+3 126 123 124 0.90196 0.45098 0.00000
+3 124 127 126 0.90196 0.45098 0.00000
+3 127 124 35 0.90196 0.45098 0.00000
+3 35 128 127 0.90196 0.45098 0.00000
+3 128 35 161 0.90196 0.45098 0.00000
+3 33 94 125 0.90196 0.45098 0.00000
+3 125 129 33 0.90196 0.45098 0.00000
+3 129 125 126 0.90196 0.45098 0.00000
+3 126 130 129 0.90196 0.45098 0.00000
+3 130 126 127 0.90196 0.45098 0.00000
+3 127 55 130 0.90196 0.45098 0.00000
+3 55 127 128 0.90196 0.45098 0.00000
+3 171 128 7 0.90196 0.45098 0.00000
+3 131 33 129 0.90196 0.45098 0.00000
+3 129 54 131 0.90196 0.45098 0.00000
+3 54 129 130 0.90196 0.45098 0.00000
+3 170 130 55 0.90196 0.45098 0.00000
+3 5 98 131 0.90196 0.45098 0.00000
+3 168 131 54 0.90196 0.45098 0.00000
+3 132 3 221 0.90196 0.45098 0.00000
+3 40 162 132 0.90196 0.45098 0.00000
+3 132 133 40 0.90196 0.45098 0.00000
+3 133 132 44 0.90196 0.45098 0.00000
+3 44 134 133 0.90196 0.45098 0.00000
+3 134 44 220 0.90196 0.45098 0.00000
+3 135 40 133 0.90196 0.45098 0.00000
+3 133 136 135 0.90196 0.45098 0.00000
+3 136 133 134 0.90196 0.45098 0.00000
+3 134 137 136 0.90196 0.45098 0.00000
+3 137 134 45 0.90196 0.45098 0.00000
+3 45 138 137 0.90196 0.45098 0.00000
+3 138 45 218 0.90196 0.45098 0.00000
+3 41 164 135 0.90196 0.45098 0.00000
+3 135 139 41 0.90196 0.45098 0.00000
+3 139 135 136 0.90196 0.45098 0.00000
+3 136 140 139 0.90196 0.45098 0.00000
+3 140 136 137 0.90196 0.45098 0.00000
+3 137 59 140 0.90196 0.45098 0.00000
+3 59 137 138 0.90196 0.45098 0.00000
+3 198 138 9 0.90196 0.45098 0.00000
+3 141 41 139 0.90196 0.45098 0.00000
+3 139 58 141 0.90196 0.45098 0.00000
+3 58 139 140 0.90196 0.45098 0.00000
+3 194 140 59 0.90196 0.45098 0.00000
+3 5 168 141 0.90196 0.45098 0.00000
+3 192 141 58 0.90196 0.45098 0.00000
+3 142 0 182 0.90196 0.45098 0.00000
+3 16 232 142 0.90196 0.45098 0.00000
+3 142 143 16 0.90196 0.45098 0.00000
+3 143 142 14 0.90196 0.45098 0.00000
+3 14 144 143 0.90196 0.45098 0.00000
+3 144 14 185 0.90196 0.45098 0.00000
+3 145 16 143 0.90196 0.45098 0.00000
+3 143 146 145 0.90196 0.45098 0.00000
+3 146 143 144 0.90196 0.45098 0.00000
+3 144 147 146 0.90196 0.45098 0.00000
+3 147 144 15 0.90196 0.45098 0.00000
+3 15 148 147 0.90196 0.45098 0.00000
+3 148 15 191 0.90196 0.45098 0.00000
+3 17 234 145 0.90196 0.45098 0.00000
+3 145 149 17 0.90196 0.45098 0.00000
+3 149 145 146 0.90196 0.45098 0.00000
+3 146 150 149 0.90196 0.45098 0.00000
+3 150 146 147 0.90196 0.45098 0.00000
+3 147 48 150 0.90196 0.45098 0.00000
+3 48 147 148 0.90196 0.45098 0.00000
+3 121 148 4 0.90196 0.45098 0.00000
+3 151 17 149 0.90196 0.45098 0.00000
+3 149 49 151 0.90196 0.45098 0.00000
+3 49 149 150 0.90196 0.45098 0.00000
+3 120 150 48 0.90196 0.45098 0.00000
+3 6 238 151 0.90196 0.45098 0.00000
+3 118 151 49 0.90196 0.45098 0.00000
+3 152 2 91 0.90196 0.45098 0.00000
+3 34 122 152 0.90196 0.45098 0.00000
+3 152 153 34 0.90196 0.45098 0.00000
+3 153 152 38 0.90196 0.45098 0.00000
+3 38 154 153 0.90196 0.45098 0.00000
+3 154 38 90 0.90196 0.45098 0.00000
+3 155 34 153 0.90196 0.45098 0.00000
+3 153 156 155 0.90196 0.45098 0.00000
+3 156 153 154 0.90196 0.45098 0.00000
+3 154 157 156 0.90196 0.45098 0.00000
+3 157 154 39 0.90196 0.45098 0.00000
+3 39 158 157 0.90196 0.45098 0.00000
+3 158 39 88 0.90196 0.45098 0.00000
+3 35 124 155 0.90196 0.45098 0.00000
+3 155 159 35 0.90196 0.45098 0.00000
+3 159 155 156 0.90196 0.45098 0.00000
+3 156 160 159 0.90196 0.45098 0.00000
+3 160 156 157 0.90196 0.45098 0.00000
+3 157 65 160 0.90196 0.45098 0.00000
+3 65 157 158 0.90196 0.45098 0.00000
+3 268 158 10 0.90196 0.45098 0.00000
+3 161 35 159 0.90196 0.45098 0.00000
+3 159 64 161 0.90196 0.45098 0.00000
+3 64 159 160 0.90196 0.45098 0.00000
+3 264 160 65 0.90196 0.45098 0.00000
+3 7 128 161 0.90196 0.45098 0.00000
+3 262 161 64 0.90196 0.45098 0.00000
+3 162 3 132 0.90196 0.45098 0.00000
+3 42 102 162 0.90196 0.45098 0.00000
+3 162 163 42 0.90196 0.45098 0.00000
+3 163 162 40 0.90196 0.45098 0.00000
+3 40 164 163 0.90196 0.45098 0.00000
+3 164 40 135 0.90196 0.45098 0.00000
+3 165 42 163 0.90196 0.45098 0.00000
+3 163 166 165 0.90196 0.45098 0.00000
+3 166 163 164 0.90196 0.45098 0.00000
+3 164 167 166 0.90196 0.45098 0.00000
+3 167 164 41 0.90196 0.45098 0.00000
+3 41 168 167 0.90196 0.45098 0.00000
+3 168 41 141 0.90196 0.45098 0.00000
+3 43 104 165 0.90196 0.45098 0.00000
+3 165 169 43 0.90196 0.45098 0.00000
+3 169 165 166 0.90196 0.45098 0.00000
+3 166 170 169 0.90196 0.45098 0.00000
+3 170 166 167 0.90196 0.45098 0.00000
+3 167 54 170 0.90196 0.45098 0.00000
+3 54 167 168 0.90196 0.45098 0.00000
+3 131 168 5 0.90196 0.45098 0.00000
+3 171 43 169 0.90196 0.45098 0.00000
+3 169 55 171 0.90196 0.45098 0.00000
+3 55 169 170 0.90196 0.45098 0.00000
+3 130 170 54 0.90196 0.45098 0.00000
+3 7 108 171 0.90196 0.45098 0.00000
+3 128 171 55 0.90196 0.45098 0.00000
+3 172 0 82 0.90196 0.45098 0.00000
+3 18 182 172 0.90196 0.45098 0.00000
+3 172 173 18 0.90196 0.45098 0.00000
+3 173 172 12 0.90196 0.45098 0.00000
+3 12 174 173 0.90196 0.45098 0.00000
+3 174 12 85 0.90196 0.45098 0.00000
+3 175 18 173 0.90196 0.45098 0.00000
+3 173 176 175 0.90196 0.45098 0.00000
+3 176 173 174 0.90196 0.45098 0.00000
+3 174 177 176 0.90196 0.45098 0.00000
+3 177 174 13 0.90196 0.45098 0.00000
+3 13 178 177 0.90196 0.45098 0.00000
+3 178 13 91 0.90196 0.45098 0.00000
+3 19 184 175 0.90196 0.45098 0.00000
+3 175 179 19 0.90196 0.45098 0.00000
+3 179 175 176 0.90196 0.45098 0.00000
+3 176 180 179 0.90196 0.45098 0.00000
+3 180 176 177 0.90196 0.45098 0.00000
+3 177 36 180 0.90196 0.45098 0.00000
+3 36 177 178 0.90196 0.45098 0.00000
+3 92 178 2 0.90196 0.45098 0.00000
+3 181 19 179 0.90196 0.45098 0.00000
+3 179 37 181 0.90196 0.45098 0.00000
+3 37 179 180 0.90196 0.45098 0.00000
+3 95 180 36 0.90196 0.45098 0.00000
+3 8 188 181 0.90196 0.45098 0.00000
+3 101 181 37 0.90196 0.45098 0.00000
+3 182 0 172 0.90196 0.45098 0.00000
+3 14 142 182 0.90196 0.45098 0.00000
+3 182 183 14 0.90196 0.45098 0.00000
+3 183 182 18 0.90196 0.45098 0.00000
+3 18 184 183 0.90196 0.45098 0.00000
+3 184 18 175 0.90196 0.45098 0.00000
+3 185 14 183 0.90196 0.45098 0.00000
+3 183 186 185 0.90196 0.45098 0.00000
+3 186 183 184 0.90196 0.45098 0.00000
+3 184 187 186 0.90196 0.45098 0.00000
+3 187 184 19 0.90196 0.45098 0.00000
+3 19 188 187 0.90196 0.45098 0.00000
+3 188 19 181 0.90196 0.45098 0.00000
+3 15 144 185 0.90196 0.45098 0.00000
+3 185 189 15 0.90196 0.45098 0.00000
+3 189 185 186 0.90196 0.45098 0.00000
+3 186 190 189 0.90196 0.45098 0.00000
+3 190 186 187 0.90196 0.45098 0.00000
+3 187 51 190 0.90196 0.45098 0.00000
+3 51 187 188 0.90196 0.45098 0.00000
+3 228 188 8 0.90196 0.45098 0.00000
+3 191 15 189 0.90196 0.45098 0.00000
+3 189 50 191 0.90196 0.45098 0.00000
+3 50 189 190 0.90196 0.45098 0.00000
+3 224 190 51 0.90196 0.45098 0.00000
+3 4 148 191 0.90196 0.45098 0.00000
+3 222 191 50 0.90196 0.45098 0.00000
+3 192 5 141 0.90196 0.45098 0.00000
+3 56 98 192 0.90196 0.45098 0.00000
+3 192 193 56 0.90196 0.45098 0.00000
+3 193 192 58 0.90196 0.45098 0.00000
+3 58 194 193 0.90196 0.45098 0.00000
+3 194 58 140 0.90196 0.45098 0.00000
+3 195 56 193 0.90196 0.45098 0.00000
+3 193 196 195 0.90196 0.45098 0.00000
+3 196 193 194 0.90196 0.45098 0.00000
+3 194 197 196 0.90196 0.45098 0.00000
+3 197 194 59 0.90196 0.45098 0.00000
+3 59 198 197 0.90196 0.45098 0.00000
+3 198 59 138 0.90196 0.45098 0.00000
+3 57 100 195 0.90196 0.45098 0.00000
+3 195 199 57 0.90196 0.45098 0.00000
+3 199 195 196 0.90196 0.45098 0.00000
+3 196 200 199 0.90196 0.45098 0.00000
+3 200 196 197 0.90196 0.45098 0.00000
+3 197 69 200 0.90196 0.45098 0.00000
+3 69 197 198 0.90196 0.45098 0.00000
+3 231 198 9 0.90196 0.45098 0.00000
+3 201 57 199 0.90196 0.45098 0.00000
+3 199 68 201 0.90196 0.45098 0.00000
+3 68 199 200 0.90196 0.45098 0.00000
+3 230 200 69 0.90196 0.45098 0.00000
+3 8 101 201 0.90196 0.45098 0.00000
+3 228 201 68 0.90196 0.45098 0.00000
+3 202 1 112 0.90196 0.45098 0.00000
+3 28 212 202 0.90196 0.45098 0.00000
+3 202 203 28 0.90196 0.45098 0.00000
+3 203 202 24 0.90196 0.45098 0.00000
+3 24 204 203 0.90196 0.45098 0.00000
+3 204 24 115 0.90196 0.45098 0.00000
+3 205 28 203 0.90196 0.45098 0.00000
+3 203 206 205 0.90196 0.45098 0.00000
+3 206 203 204 0.90196 0.45098 0.00000
+3 204 207 206 0.90196 0.45098 0.00000
+3 207 204 25 0.90196 0.45098 0.00000
+3 25 208 207 0.90196 0.45098 0.00000
+3 208 25 121 0.90196 0.45098 0.00000
+3 29 214 205 0.90196 0.45098 0.00000
+3 205 209 29 0.90196 0.45098 0.00000
+3 209 205 206 0.90196 0.45098 0.00000
+3 206 210 209 0.90196 0.45098 0.00000
+3 210 206 207 0.90196 0.45098 0.00000
+3 207 52 210 0.90196 0.45098 0.00000
+3 52 207 208 0.90196 0.45098 0.00000
+3 222 208 4 0.90196 0.45098 0.00000
+3 211 29 209 0.90196 0.45098 0.00000
+3 209 53 211 0.90196 0.45098 0.00000
+3 53 209 210 0.90196 0.45098 0.00000
+3 225 210 52 0.90196 0.45098 0.00000
+3 9 218 211 0.90196 0.45098 0.00000
+3 231 211 53 0.90196 0.45098 0.00000
+3 212 1 202 0.90196 0.45098 0.00000
+3 22 72 212 0.90196 0.45098 0.00000
+3 212 213 22 0.90196 0.45098 0.00000
+3 213 212 28 0.90196 0.45098 0.00000
+3 28 214 213 0.90196 0.45098 0.00000
+3 214 28 205 0.90196 0.45098 0.00000
+3 215 22 213 0.90196 0.45098 0.00000
+3 213 216 215 0.90196 0.45098 0.00000
+3 216 213 214 0.90196 0.45098 0.00000
+3 214 217 216 0.90196 0.45098 0.00000
+3 217 214 29 0.90196 0.45098 0.00000
+3 29 218 217 0.90196 0.45098 0.00000
+3 218 29 211 0.90196 0.45098 0.00000
+3 23 74 215 0.90196 0.45098 0.00000
+3 215 219 23 0.90196 0.45098 0.00000
+3 219 215 216 0.90196 0.45098 0.00000
+3 216 220 219 0.90196 0.45098 0.00000
+3 220 216 217 0.90196 0.45098 0.00000
+3 217 45 220 0.90196 0.45098 0.00000
+3 45 217 218 0.90196 0.45098 0.00000
+3 138 218 9 0.90196 0.45098 0.00000
+3 221 23 219 0.90196 0.45098 0.00000
+3 219 44 221 0.90196 0.45098 0.00000
+3 44 219 220 0.90196 0.45098 0.00000
+3 134 220 45 0.90196 0.45098 0.00000
+3 3 78 221 0.90196 0.45098 0.00000
+3 132 221 44 0.90196 0.45098 0.00000
+3 222 4 191 0.90196 0.45098 0.00000
+3 52 208 222 0.90196 0.45098 0.00000
+3 222 223 52 0.90196 0.45098 0.00000
+3 223 222 50 0.90196 0.45098 0.00000
+3 50 224 223 0.90196 0.45098 0.00000
+3 224 50 190 0.90196 0.45098 0.00000
+3 225 52 223 0.90196 0.45098 0.00000
+3 223 226 225 0.90196 0.45098 0.00000
+3 226 223 224 0.90196 0.45098 0.00000
+3 224 227 226 0.90196 0.45098 0.00000
+3 227 224 51 0.90196 0.45098 0.00000
+3 51 228 227 0.90196 0.45098 0.00000
+3 228 51 188 0.90196 0.45098 0.00000
+3 53 210 225 0.90196 0.45098 0.00000
+3 225 229 53 0.90196 0.45098 0.00000
+3 229 225 226 0.90196 0.45098 0.00000
+3 226 230 229 0.90196 0.45098 0.00000
+3 230 226 227 0.90196 0.45098 0.00000
+3 227 68 230 0.90196 0.45098 0.00000
+3 68 227 228 0.90196 0.45098 0.00000
+3 201 228 8 0.90196 0.45098 0.00000
+3 231 53 229 0.90196 0.45098 0.00000
+3 229 69 231 0.90196 0.45098 0.00000
+3 69 229 230 0.90196 0.45098 0.00000
+3 200 230 68 0.90196 0.45098 0.00000
+3 9 211 231 0.90196 0.45098 0.00000
+3 198 231 69 0.90196 0.45098 0.00000
+3 232 0 142 0.90196 0.45098 0.00000
+3 20 82 232 0.90196 0.45098 0.00000
+3 232 233 20 0.90196 0.45098 0.00000
+3 233 232 16 0.90196 0.45098 0.00000
+3 16 234 233 0.90196 0.45098 0.00000
+3 234 16 145 0.90196 0.45098 0.00000
+3 235 20 233 0.90196 0.45098 0.00000
+3 233 236 235 0.90196 0.45098 0.00000
+3 236 233 234 0.90196 0.45098 0.00000
+3 234 237 236 0.90196 0.45098 0.00000
+3 237 234 17 0.90196 0.45098 0.00000
+3 17 238 237 0.90196 0.45098 0.00000
+3 238 17 151 0.90196 0.45098 0.00000
+3 21 84 235 0.90196 0.45098 0.00000
+3 235 239 21 0.90196 0.45098 0.00000
+3 239 235 236 0.90196 0.45098 0.00000
+3 236 240 239 0.90196 0.45098 0.00000
+3 240 236 237 0.90196 0.45098 0.00000
+3 237 60 240 0.90196 0.45098 0.00000
+3 60 237 238 0.90196 0.45098 0.00000
+3 242 238 6 0.90196 0.45098 0.00000
+3 241 21 239 0.90196 0.45098 0.00000
+3 239 61 241 0.90196 0.45098 0.00000
+3 61 239 240 0.90196 0.45098 0.00000
+3 245 240 60 0.90196 0.45098 0.00000
+3 10 88 241 0.90196 0.45098 0.00000
+3 251 241 61 0.90196 0.45098 0.00000
+3 242 6 261 0.90196 0.45098 0.00000
+3 60 238 242 0.90196 0.45098 0.00000
+3 242 243 60 0.90196 0.45098 0.00000
+3 243 242 62 0.90196 0.45098 0.00000
+3 62 244 243 0.90196 0.45098 0.00000
+3 244 62 260 0.90196 0.45098 0.00000
+3 245 60 243 0.90196 0.45098 0.00000
+3 243 246 245 0.90196 0.45098 0.00000
+3 246 243 244 0.90196 0.45098 0.00000
+3 244 247 246 0.90196 0.45098 0.00000
+3 247 244 63 0.90196 0.45098 0.00000
+3 63 248 247 0.90196 0.45098 0.00000
+3 248 63 258 0.90196 0.45098 0.00000
+3 61 240 245 0.90196 0.45098 0.00000
+3 245 249 61 0.90196 0.45098 0.00000
+3 249 245 246 0.90196 0.45098 0.00000
+3 246 250 249 0.90196 0.45098 0.00000
+3 250 246 247 0.90196 0.45098 0.00000
+3 247 71 250 0.90196 0.45098 0.00000
+3 71 247 248 0.90196 0.45098 0.00000
+3 271 248 11 0.90196 0.45098 0.00000
+3 251 61 249 0.90196 0.45098 0.00000
+3 249 70 251 0.90196 0.45098 0.00000
+3 70 249 250 0.90196 0.45098 0.00000
+3 270 250 71 0.90196 0.45098 0.00000
+3 10 241 251 0.90196 0.45098 0.00000
+3 268 251 70 0.90196 0.45098 0.00000
+3 252 1 72 0.90196 0.45098 0.00000
+3 26 112 252 0.90196 0.45098 0.00000
+3 252 253 26 0.90196 0.45098 0.00000
+3 253 252 30 0.90196 0.45098 0.00000
+3 30 254 253 0.90196 0.45098 0.00000
+3 254 30 75 0.90196 0.45098 0.00000
+3 255 26 253 0.90196 0.45098 0.00000
+3 253 256 255 0.90196 0.45098 0.00000
+3 256 253 254 0.90196 0.45098 0.00000
+3 254 257 256 0.90196 0.45098 0.00000
+3 257 254 31 0.90196 0.45098 0.00000
+3 31 258 257 0.90196 0.45098 0.00000
+3 258 31 81 0.90196 0.45098 0.00000
+3 27 114 255 0.90196 0.45098 0.00000
+3 255 259 27 0.90196 0.45098 0.00000
+3 259 255 256 0.90196 0.45098 0.00000
+3 256 260 259 0.90196 0.45098 0.00000
+3 260 256 257 0.90196 0.45098 0.00000
+3 257 63 260 0.90196 0.45098 0.00000
+3 63 257 258 0.90196 0.45098 0.00000
+3 248 258 11 0.90196 0.45098 0.00000
+3 261 27 259 0.90196 0.45098 0.00000
+3 259 62 261 0.90196 0.45098 0.00000
+3 62 259 260 0.90196 0.45098 0.00000
+3 244 260 63 0.90196 0.45098 0.00000
+3 6 118 261 0.90196 0.45098 0.00000
+3 242 261 62 0.90196 0.45098 0.00000
+3 262 7 161 0.90196 0.45098 0.00000
+3 66 108 262 0.90196 0.45098 0.00000
+3 262 263 66 0.90196 0.45098 0.00000
+3 263 262 64 0.90196 0.45098 0.00000
+3 64 264 263 0.90196 0.45098 0.00000
+3 264 64 160 0.90196 0.45098 0.00000
+3 265 66 263 0.90196 0.45098 0.00000
+3 263 266 265 0.90196 0.45098 0.00000
+3 266 263 264 0.90196 0.45098 0.00000
+3 264 267 266 0.90196 0.45098 0.00000
+3 267 264 65 0.90196 0.45098 0.00000
+3 65 268 267 0.90196 0.45098 0.00000
+3 268 65 158 0.90196 0.45098 0.00000
+3 67 110 265 0.90196 0.45098 0.00000
+3 265 269 67 0.90196 0.45098 0.00000
+3 269 265 266 0.90196 0.45098 0.00000
+3 266 270 269 0.90196 0.45098 0.00000
+3 270 266 267 0.90196 0.45098 0.00000
+3 267 70 270 0.90196 0.45098 0.00000
+3 70 267 268 0.90196 0.45098 0.00000
+3 251 268 10 0.90196 0.45098 0.00000
+3 271 67 269 0.90196 0.45098 0.00000
+3 269 71 271 0.90196 0.45098 0.00000
+3 71 269 270 0.90196 0.45098 0.00000
+3 250 270 70 0.90196 0.45098 0.00000
+3 11 111 271 0.90196 0.45098 0.00000
+3 248 271 71 0.90196 0.45098 0.00000
+1 0 0.38824 0.60000 0.30196
+1 1 0.38824 0.60000 0.30196
+1 2 0.38824 0.60000 0.30196
+1 3 0.38824 0.60000 0.30196
+1 4 0.38824 0.60000 0.30196
+1 5 0.38824 0.60000 0.30196
+1 6 0.38824 0.60000 0.30196
+1 7 0.38824 0.60000 0.30196
+1 8 0.38824 0.60000 0.30196
+1 9 0.38824 0.60000 0.30196
+1 10 0.38824 0.60000 0.30196
+1 11 0.38824 0.60000 0.30196
diff --git a/pyrate/tests/plan/graph/generate/example_files/geodestic_file_2.off b/pyrate/tests/plan/graph/generate/example_files/geodestic_file_2.off
new file mode 100644
index 0000000..4793303
--- /dev/null
+++ b/pyrate/tests/plan/graph/generate/example_files/geodestic_file_2.off
@@ -0,0 +1,376 @@
+OFF
+122 252 0
+0 0.5257311121191336 0.85065080835204
+0 0.5257311121191336 -0.85065080835204
+0 -0.5257311121191336 0.85065080835204
+0 -0.5257311121191336 -0.85065080835204
+0.5257311121191336 0.85065080835204 0
+0.5257311121191336 -0.85065080835204 0
+-0.5257311121191336 0.85065080835204 0
+-0.5257311121191336 -0.85065080835204 0
+0.85065080835204 0 0.5257311121191336
+0.85065080835204 0 -0.5257311121191336
+-0.85065080835204 0 0.5257311121191336
+-0.85065080835204 0 -0.5257311121191336
+2.175242402100701e-16 -1.643460219210441e-32 1
+0.3090169943749475 0.8090169943749472 0.5000000000000002
+-0.3090169943749475 0.8090169943749472 0.5000000000000002
+0.4999999999999999 0.3090169943749474 0.8090169943749472
+-0.5000000000000001 0.3090169943749475 0.8090169943749472
+2.175242402100701e-16 1.643460219210441e-32 -1
+0.3090169943749475 0.8090169943749472 -0.5000000000000002
+-0.3090169943749475 0.8090169943749472 -0.5000000000000002
+0.5 0.3090169943749473 -0.8090169943749475
+-0.4999999999999999 0.3090169943749474 -0.8090169943749472
+0.3090169943749473 -0.8090169943749475 0.5
+-0.3090169943749475 -0.8090169943749472 0.5000000000000002
+0.5 -0.3090169943749473 0.8090169943749475
+-0.4999999999999999 -0.3090169943749474 0.8090169943749472
+0.3090169943749473 -0.8090169943749475 -0.5
+-0.3090169943749473 -0.8090169943749475 -0.5
+0.5 -0.3090169943749472 -0.8090169943749475
+-0.5000000000000001 -0.3090169943749475 -0.8090169943749472
+0 1 4.350484804201401e-17
+0.8090169943749475 0.5 0.3090169943749472
+0.8090169943749472 0.4999999999999999 -0.3090169943749473
+0 -1 -4.350484804201401e-17
+0.8090169943749472 -0.4999999999999999 0.3090169943749473
+0.8090169943749475 -0.5 -0.3090169943749472
+-0.8090169943749472 0.4999999999999999 0.3090169943749473
+-0.8090169943749472 0.4999999999999999 -0.3090169943749475
+-0.8090169943749475 -0.5 0.3090169943749472
+-0.8090169943749472 -0.4999999999999999 -0.3090169943749473
+1 2.175242402100701e-16 -1.643460219210441e-32
+-1 -2.175242402100701e-16 -1.643460219210441e-32
+-0.1803319730021167 0.289241011911498 -0.9401170227910867
+-0.35682208977309 -3.124513936890529e-17 -0.9341723589627157
+-0.1803319730021166 -0.2892410119114981 -0.9401170227910867
+-0.6483337612153338 -5.436311068297173e-17 -0.7613562464893677
+-0.1803319730021166 0.2892410119114981 0.9401170227910867
+-0.35682208977309 3.09531117213564e-17 0.9341723589627158
+-0.6483337612153338 5.402340711901317e-17 0.7613562464893677
+-0.1803319730021167 -0.289241011911498 0.9401170227910867
+0.291783261575753 -0.5810242734872509 0.7597850497889703
+0.5773502691896258 -0.5773502691896256 0.5773502691896258
+0.5810242734872511 -0.7597850497889701 0.291783261575753
+0.7597850497889702 -0.291783261575753 0.5810242734872511
+-0.291783261575753 -0.5810242734872509 -0.7597850497889703
+-0.5773502691896258 -0.5773502691896256 -0.5773502691896258
+-0.5810242734872511 -0.7597850497889701 -0.291783261575753
+-0.7597850497889702 -0.291783261575753 -0.5810242734872511
+-2.313323858849861e-18 0.7613562464893674 -0.6483337612153339
+3.124513936890529e-17 0.9341723589627158 -0.3568220897730901
+-0.2892410119114981 0.9401170227910867 -0.1803319730021165
+0.2892410119114981 0.9401170227910867 -0.1803319730021165
+-2.313323858849861e-18 -0.7613562464893674 0.6483337612153339
+3.124513936890529e-17 -0.9341723589627158 0.3568220897730901
+-0.2892410119114981 -0.9401170227910867 0.1803319730021165
+0.2892410119114981 -0.9401170227910867 0.1803319730021165
+0.2917832615757529 -0.5810242734872509 -0.7597850497889704
+0.5773502691896258 -0.5773502691896257 -0.5773502691896258
+0.7597850497889701 -0.2917832615757531 -0.5810242734872512
+0.5810242734872511 -0.7597850497889701 -0.291783261575753
+2.313323858849861e-18 0.7613562464893674 0.6483337612153339
+-3.124513936890529e-17 0.9341723589627158 0.3568220897730901
+0.2892410119114981 0.9401170227910867 0.1803319730021165
+-0.2892410119114981 0.9401170227910867 0.1803319730021165
+-0.2917832615757529 -0.5810242734872509 0.7597850497889704
+-0.5773502691896258 -0.5773502691896257 0.5773502691896258
+-0.7597850497889701 -0.2917832615757531 0.5810242734872512
+-0.5810242734872511 -0.7597850497889701 0.291783261575753
+2.313323858849861e-18 -0.7613562464893674 -0.6483337612153339
+-3.124513936890529e-17 -0.9341723589627158 -0.3568220897730901
+0.2892410119114981 -0.9401170227910867 -0.1803319730021165
+-0.2892410119114981 -0.9401170227910867 -0.1803319730021165
+0.1803319730021167 0.289241011911498 0.9401170227910867
+0.35682208977309 -3.124513936890529e-17 0.9341723589627157
+0.1803319730021166 -0.2892410119114981 0.9401170227910867
+0.6483337612153338 -5.436311068297173e-17 0.7613562464893677
+0.2917832615757529 0.5810242734872509 0.7597850497889704
+0.5773502691896258 0.5773502691896257 0.5773502691896258
+0.7597850497889701 0.2917832615757531 0.5810242734872512
+0.5810242734872511 0.7597850497889701 0.291783261575753
+0.7613562464893677 -0.6483337612153338 5.436311068297173e-17
+0.9341723589627157 -0.35682208977309 3.124513936890529e-17
+0.9401170227910867 -0.1803319730021167 -0.289241011911498
+0.9401170227910867 -0.1803319730021166 0.2892410119114981
+0.291783261575753 0.5810242734872509 -0.7597850497889703
+0.5773502691896258 0.5773502691896256 -0.5773502691896258
+0.5810242734872511 0.7597850497889701 -0.291783261575753
+0.7597850497889702 0.291783261575753 -0.5810242734872511
+0.1803319730021166 0.2892410119114981 -0.9401170227910867
+0.35682208977309 3.09531117213564e-17 -0.9341723589627158
+0.6483337612153338 5.402340711901317e-17 -0.7613562464893677
+0.1803319730021167 -0.289241011911498 -0.9401170227910867
+0.7613562464893677 0.6483337612153338 -5.436311068297173e-17
+0.9341723589627157 0.35682208977309 -3.124513936890529e-17
+0.9401170227910867 0.1803319730021167 0.289241011911498
+0.9401170227910867 0.1803319730021166 -0.2892410119114981
+-0.291783261575753 0.5810242734872509 0.7597850497889703
+-0.5773502691896258 0.5773502691896256 0.5773502691896258
+-0.5810242734872511 0.7597850497889701 0.291783261575753
+-0.7597850497889702 0.291783261575753 0.5810242734872511
+-0.7613562464893677 0.6483337612153338 5.436311068297173e-17
+-0.9341723589627157 0.35682208977309 3.124513936890529e-17
+-0.9401170227910867 0.1803319730021167 -0.289241011911498
+-0.9401170227910867 0.1803319730021166 0.2892410119114981
+-0.2917832615757529 0.5810242734872509 -0.7597850497889704
+-0.5773502691896258 0.5773502691896257 -0.5773502691896258
+-0.7597850497889701 0.2917832615757531 -0.5810242734872512
+-0.5810242734872511 0.7597850497889701 -0.291783261575753
+-0.7613562464893677 -0.6483337612153338 -5.436311068297173e-17
+-0.9341723589627157 -0.35682208977309 -3.124513936890529e-17
+-0.9401170227910867 -0.1803319730021167 0.289241011911498
+-0.9401170227910867 -0.1803319730021166 -0.2892410119114981
+3 42 1 98 0.90196 0.45098 0.00000
+3 21 114 42 0.90196 0.45098 0.00000
+3 42 43 21 0.90196 0.45098 0.00000
+3 43 42 17 0.90196 0.45098 0.00000
+3 17 44 43 0.90196 0.45098 0.00000
+3 44 17 101 0.90196 0.45098 0.00000
+3 45 21 43 0.90196 0.45098 0.00000
+3 43 29 45 0.90196 0.45098 0.00000
+3 29 43 44 0.90196 0.45098 0.00000
+3 54 44 3 0.90196 0.45098 0.00000
+3 11 116 45 0.90196 0.45098 0.00000
+3 57 45 29 0.90196 0.45098 0.00000
+3 46 0 106 0.90196 0.45098 0.00000
+3 12 82 46 0.90196 0.45098 0.00000
+3 46 47 12 0.90196 0.45098 0.00000
+3 47 46 16 0.90196 0.45098 0.00000
+3 16 48 47 0.90196 0.45098 0.00000
+3 48 16 109 0.90196 0.45098 0.00000
+3 49 12 47 0.90196 0.45098 0.00000
+3 47 25 49 0.90196 0.45098 0.00000
+3 25 47 48 0.90196 0.45098 0.00000
+3 76 48 10 0.90196 0.45098 0.00000
+3 2 84 49 0.90196 0.45098 0.00000
+3 74 49 25 0.90196 0.45098 0.00000
+3 50 2 62 0.90196 0.45098 0.00000
+3 24 84 50 0.90196 0.45098 0.00000
+3 50 51 24 0.90196 0.45098 0.00000
+3 51 50 22 0.90196 0.45098 0.00000
+3 22 52 51 0.90196 0.45098 0.00000
+3 52 22 65 0.90196 0.45098 0.00000
+3 53 24 51 0.90196 0.45098 0.00000
+3 51 34 53 0.90196 0.45098 0.00000
+3 34 51 52 0.90196 0.45098 0.00000
+3 90 52 5 0.90196 0.45098 0.00000
+3 8 85 53 0.90196 0.45098 0.00000
+3 93 53 34 0.90196 0.45098 0.00000
+3 54 3 78 0.90196 0.45098 0.00000
+3 29 44 54 0.90196 0.45098 0.00000
+3 54 55 29 0.90196 0.45098 0.00000
+3 55 54 27 0.90196 0.45098 0.00000
+3 27 56 55 0.90196 0.45098 0.00000
+3 56 27 81 0.90196 0.45098 0.00000
+3 57 29 55 0.90196 0.45098 0.00000
+3 55 39 57 0.90196 0.45098 0.00000
+3 39 55 56 0.90196 0.45098 0.00000
+3 118 56 7 0.90196 0.45098 0.00000
+3 11 45 57 0.90196 0.45098 0.00000
+3 121 57 39 0.90196 0.45098 0.00000
+3 58 1 114 0.90196 0.45098 0.00000
+3 18 94 58 0.90196 0.45098 0.00000
+3 58 59 18 0.90196 0.45098 0.00000
+3 59 58 19 0.90196 0.45098 0.00000
+3 19 60 59 0.90196 0.45098 0.00000
+3 60 19 117 0.90196 0.45098 0.00000
+3 61 18 59 0.90196 0.45098 0.00000
+3 59 30 61 0.90196 0.45098 0.00000
+3 30 59 60 0.90196 0.45098 0.00000
+3 73 60 6 0.90196 0.45098 0.00000
+3 4 96 61 0.90196 0.45098 0.00000
+3 72 61 30 0.90196 0.45098 0.00000
+3 62 2 74 0.90196 0.45098 0.00000
+3 22 50 62 0.90196 0.45098 0.00000
+3 62 63 22 0.90196 0.45098 0.00000
+3 63 62 23 0.90196 0.45098 0.00000
+3 23 64 63 0.90196 0.45098 0.00000
+3 64 23 77 0.90196 0.45098 0.00000
+3 65 22 63 0.90196 0.45098 0.00000
+3 63 33 65 0.90196 0.45098 0.00000
+3 33 63 64 0.90196 0.45098 0.00000
+3 81 64 7 0.90196 0.45098 0.00000
+3 5 52 65 0.90196 0.45098 0.00000
+3 80 65 33 0.90196 0.45098 0.00000
+3 66 3 101 0.90196 0.45098 0.00000
+3 26 78 66 0.90196 0.45098 0.00000
+3 66 67 26 0.90196 0.45098 0.00000
+3 67 66 28 0.90196 0.45098 0.00000
+3 28 68 67 0.90196 0.45098 0.00000
+3 68 28 100 0.90196 0.45098 0.00000
+3 69 26 67 0.90196 0.45098 0.00000
+3 67 35 69 0.90196 0.45098 0.00000
+3 35 67 68 0.90196 0.45098 0.00000
+3 92 68 9 0.90196 0.45098 0.00000
+3 5 80 69 0.90196 0.45098 0.00000
+3 90 69 35 0.90196 0.45098 0.00000
+3 70 0 86 0.90196 0.45098 0.00000
+3 14 106 70 0.90196 0.45098 0.00000
+3 70 71 14 0.90196 0.45098 0.00000
+3 71 70 13 0.90196 0.45098 0.00000
+3 13 72 71 0.90196 0.45098 0.00000
+3 72 13 89 0.90196 0.45098 0.00000
+3 73 14 71 0.90196 0.45098 0.00000
+3 71 30 73 0.90196 0.45098 0.00000
+3 30 71 72 0.90196 0.45098 0.00000
+3 61 72 4 0.90196 0.45098 0.00000
+3 6 108 73 0.90196 0.45098 0.00000
+3 60 73 30 0.90196 0.45098 0.00000
+3 74 2 49 0.90196 0.45098 0.00000
+3 23 62 74 0.90196 0.45098 0.00000
+3 74 75 23 0.90196 0.45098 0.00000
+3 75 74 25 0.90196 0.45098 0.00000
+3 25 76 75 0.90196 0.45098 0.00000
+3 76 25 48 0.90196 0.45098 0.00000
+3 77 23 75 0.90196 0.45098 0.00000
+3 75 38 77 0.90196 0.45098 0.00000
+3 38 75 76 0.90196 0.45098 0.00000
+3 120 76 10 0.90196 0.45098 0.00000
+3 7 64 77 0.90196 0.45098 0.00000
+3 118 77 38 0.90196 0.45098 0.00000
+3 78 3 66 0.90196 0.45098 0.00000
+3 27 54 78 0.90196 0.45098 0.00000
+3 78 79 27 0.90196 0.45098 0.00000
+3 79 78 26 0.90196 0.45098 0.00000
+3 26 80 79 0.90196 0.45098 0.00000
+3 80 26 69 0.90196 0.45098 0.00000
+3 81 27 79 0.90196 0.45098 0.00000
+3 79 33 81 0.90196 0.45098 0.00000
+3 33 79 80 0.90196 0.45098 0.00000
+3 65 80 5 0.90196 0.45098 0.00000
+3 7 56 81 0.90196 0.45098 0.00000
+3 64 81 33 0.90196 0.45098 0.00000
+3 82 0 46 0.90196 0.45098 0.00000
+3 15 86 82 0.90196 0.45098 0.00000
+3 82 83 15 0.90196 0.45098 0.00000
+3 83 82 12 0.90196 0.45098 0.00000
+3 12 84 83 0.90196 0.45098 0.00000
+3 84 12 49 0.90196 0.45098 0.00000
+3 85 15 83 0.90196 0.45098 0.00000
+3 83 24 85 0.90196 0.45098 0.00000
+3 24 83 84 0.90196 0.45098 0.00000
+3 50 84 2 0.90196 0.45098 0.00000
+3 8 88 85 0.90196 0.45098 0.00000
+3 53 85 24 0.90196 0.45098 0.00000
+3 86 0 82 0.90196 0.45098 0.00000
+3 13 70 86 0.90196 0.45098 0.00000
+3 86 87 13 0.90196 0.45098 0.00000
+3 87 86 15 0.90196 0.45098 0.00000
+3 15 88 87 0.90196 0.45098 0.00000
+3 88 15 85 0.90196 0.45098 0.00000
+3 89 13 87 0.90196 0.45098 0.00000
+3 87 31 89 0.90196 0.45098 0.00000
+3 31 87 88 0.90196 0.45098 0.00000
+3 104 88 8 0.90196 0.45098 0.00000
+3 4 72 89 0.90196 0.45098 0.00000
+3 102 89 31 0.90196 0.45098 0.00000
+3 90 5 69 0.90196 0.45098 0.00000
+3 34 52 90 0.90196 0.45098 0.00000
+3 90 91 34 0.90196 0.45098 0.00000
+3 91 90 35 0.90196 0.45098 0.00000
+3 35 92 91 0.90196 0.45098 0.00000
+3 92 35 68 0.90196 0.45098 0.00000
+3 93 34 91 0.90196 0.45098 0.00000
+3 91 40 93 0.90196 0.45098 0.00000
+3 40 91 92 0.90196 0.45098 0.00000
+3 105 92 9 0.90196 0.45098 0.00000
+3 8 53 93 0.90196 0.45098 0.00000
+3 104 93 40 0.90196 0.45098 0.00000
+3 94 1 58 0.90196 0.45098 0.00000
+3 20 98 94 0.90196 0.45098 0.00000
+3 94 95 20 0.90196 0.45098 0.00000
+3 95 94 18 0.90196 0.45098 0.00000
+3 18 96 95 0.90196 0.45098 0.00000
+3 96 18 61 0.90196 0.45098 0.00000
+3 97 20 95 0.90196 0.45098 0.00000
+3 95 32 97 0.90196 0.45098 0.00000
+3 32 95 96 0.90196 0.45098 0.00000
+3 102 96 4 0.90196 0.45098 0.00000
+3 9 100 97 0.90196 0.45098 0.00000
+3 105 97 32 0.90196 0.45098 0.00000
+3 98 1 94 0.90196 0.45098 0.00000
+3 17 42 98 0.90196 0.45098 0.00000
+3 98 99 17 0.90196 0.45098 0.00000
+3 99 98 20 0.90196 0.45098 0.00000
+3 20 100 99 0.90196 0.45098 0.00000
+3 100 20 97 0.90196 0.45098 0.00000
+3 101 17 99 0.90196 0.45098 0.00000
+3 99 28 101 0.90196 0.45098 0.00000
+3 28 99 100 0.90196 0.45098 0.00000
+3 68 100 9 0.90196 0.45098 0.00000
+3 3 44 101 0.90196 0.45098 0.00000
+3 66 101 28 0.90196 0.45098 0.00000
+3 102 4 89 0.90196 0.45098 0.00000
+3 32 96 102 0.90196 0.45098 0.00000
+3 102 103 32 0.90196 0.45098 0.00000
+3 103 102 31 0.90196 0.45098 0.00000
+3 31 104 103 0.90196 0.45098 0.00000
+3 104 31 88 0.90196 0.45098 0.00000
+3 105 32 103 0.90196 0.45098 0.00000
+3 103 40 105 0.90196 0.45098 0.00000
+3 40 103 104 0.90196 0.45098 0.00000
+3 93 104 8 0.90196 0.45098 0.00000
+3 9 97 105 0.90196 0.45098 0.00000
+3 92 105 40 0.90196 0.45098 0.00000
+3 106 0 70 0.90196 0.45098 0.00000
+3 16 46 106 0.90196 0.45098 0.00000
+3 106 107 16 0.90196 0.45098 0.00000
+3 107 106 14 0.90196 0.45098 0.00000
+3 14 108 107 0.90196 0.45098 0.00000
+3 108 14 73 0.90196 0.45098 0.00000
+3 109 16 107 0.90196 0.45098 0.00000
+3 107 36 109 0.90196 0.45098 0.00000
+3 36 107 108 0.90196 0.45098 0.00000
+3 110 108 6 0.90196 0.45098 0.00000
+3 10 48 109 0.90196 0.45098 0.00000
+3 113 109 36 0.90196 0.45098 0.00000
+3 110 6 117 0.90196 0.45098 0.00000
+3 36 108 110 0.90196 0.45098 0.00000
+3 110 111 36 0.90196 0.45098 0.00000
+3 111 110 37 0.90196 0.45098 0.00000
+3 37 112 111 0.90196 0.45098 0.00000
+3 112 37 116 0.90196 0.45098 0.00000
+3 113 36 111 0.90196 0.45098 0.00000
+3 111 41 113 0.90196 0.45098 0.00000
+3 41 111 112 0.90196 0.45098 0.00000
+3 121 112 11 0.90196 0.45098 0.00000
+3 10 109 113 0.90196 0.45098 0.00000
+3 120 113 41 0.90196 0.45098 0.00000
+3 114 1 42 0.90196 0.45098 0.00000
+3 19 58 114 0.90196 0.45098 0.00000
+3 114 115 19 0.90196 0.45098 0.00000
+3 115 114 21 0.90196 0.45098 0.00000
+3 21 116 115 0.90196 0.45098 0.00000
+3 116 21 45 0.90196 0.45098 0.00000
+3 117 19 115 0.90196 0.45098 0.00000
+3 115 37 117 0.90196 0.45098 0.00000
+3 37 115 116 0.90196 0.45098 0.00000
+3 112 116 11 0.90196 0.45098 0.00000
+3 6 60 117 0.90196 0.45098 0.00000
+3 110 117 37 0.90196 0.45098 0.00000
+3 118 7 77 0.90196 0.45098 0.00000
+3 39 56 118 0.90196 0.45098 0.00000
+3 118 119 39 0.90196 0.45098 0.00000
+3 119 118 38 0.90196 0.45098 0.00000
+3 38 120 119 0.90196 0.45098 0.00000
+3 120 38 76 0.90196 0.45098 0.00000
+3 121 39 119 0.90196 0.45098 0.00000
+3 119 41 121 0.90196 0.45098 0.00000
+3 41 119 120 0.90196 0.45098 0.00000
+3 113 120 10 0.90196 0.45098 0.00000
+3 11 57 121 0.90196 0.45098 0.00000
+3 112 121 41 0.90196 0.45098 0.00000
+1 0 0.38824 0.60000 0.30196
+1 1 0.38824 0.60000 0.30196
+1 2 0.38824 0.60000 0.30196
+1 3 0.38824 0.60000 0.30196
+1 4 0.38824 0.60000 0.30196
+1 5 0.38824 0.60000 0.30196
+1 6 0.38824 0.60000 0.30196
+1 7 0.38824 0.60000 0.30196
+1 8 0.38824 0.60000 0.30196
+1 9 0.38824 0.60000 0.30196
+1 10 0.38824 0.60000 0.30196
+1 11 0.38824 0.60000 0.30196
diff --git a/pyrate/tests/plan/graph/generate/test_graph_generation.py b/pyrate/tests/plan/graph/generate/test_graph_generation.py
new file mode 100644
index 0000000..2e6e5cf
--- /dev/null
+++ b/pyrate/tests/plan/graph/generate/test_graph_generation.py
@@ -0,0 +1,205 @@
+"""Tests the generated graphs are well-formed."""
+
+# Standard Library
+from contextlib import redirect_stdout
+from io import StringIO
+from math import isclose
+
+# Testing
+import unittest
+
+# Typing
+from typing import cast
+
+# Hypothesis testing
+from hypothesis import given
+import hypothesis.strategies as st
+
+# Scientific (testing)
+import numpy as np
+import numpy.testing
+
+# Own geography
+from pyrate.plan.geometry.geospatial import MEAN_EARTH_CIRCUMFERENCE
+from pyrate.plan.geometry.helpers import haversine_numpy
+
+# Module under test
+from pyrate.plan.graph.generate import angular_distance_for
+from pyrate.plan.graph.generate import create_earth_graph
+from pyrate.plan.graph.generate import great_circle_distance_distance_for
+from pyrate.plan.graph.generate import min_required_frequency
+
+
+EXAMPLE_DISTANCES_KILOMETERS = [100000, 100000.0, 5000, 250] # smaller values take too long
+
+
+class TestGridGeneration(unittest.TestCase):
+ """Tests that a grid can be created and pruned."""
+
+ @staticmethod
+ def _calculate_distances(latitudes: np.ndarray, longitudes: np.ndarray, edges: np.ndarray) -> np.ndarray:
+ """Calculates the distance of all edges. The `edges` index into the coordinate arrays."""
+ entries = [
+ (latitudes[node_1], longitudes[node_1], latitudes[node_2], longitudes[node_2])
+ for node_1, node_2 in edges
+ ]
+ return haversine_numpy(*np.transpose(entries))
+
+ def test_create_earth_grid(self) -> None:
+ """Ensures that the generated earth grids are formed correctly."""
+ for distance_km in EXAMPLE_DISTANCES_KILOMETERS:
+ with self.subTest(f"Test with distance {distance_km} km"):
+ distance = distance_km * 1000
+
+ # create a grid
+ graph = create_earth_graph(min_required_frequency(distance, in_meters=True))
+ self.assertIsNotNone(graph.node_radius)
+ actual_distance: float = cast(float, graph.node_radius) * 2
+
+ # the actual_distance must be a upper-bounded by he requested distance
+ self.assertLessEqual(actual_distance, distance)
+ self.assertLessEqual(actual_distance, MEAN_EARTH_CIRCUMFERENCE / 2)
+
+ # the shapes of the returned arrays must match
+ self.assertEqual(
+ graph.latitudes_radians.shape,
+ graph.longitudes_radians.shape,
+ "latitude and longitude must have the same shape",
+ )
+ self.assertEqual(
+ graph.latitudes_degrees.shape,
+ graph.longitudes_degrees.shape,
+ "latitude and longitude must have the same shape",
+ )
+ self.assertEqual(
+ graph.latitudes_radians.shape,
+ graph.longitudes_degrees.shape,
+ "radians and degrees must have the same shape",
+ )
+ self.assertGreaterEqual(len(graph), 12) # as it is based on slicing an icosahedron
+
+ # the edges must be valid indices into the edges
+ self.assertTrue(
+ np.all(graph.edges[:, :] >= 0) and np.all(graph.edges[:, :] < len(graph)),
+ "some edges reference non-existent points",
+ )
+
+ # check the actual coordinate value
+ if (
+ np.any(graph.latitudes_radians < -np.pi / 2)
+ or np.any(graph.longitudes_radians < -np.pi)
+ or np.any(graph.latitudes_radians >= +np.pi / 2)
+ or np.any(graph.longitudes_radians >= +np.pi)
+ ):
+ print(
+ "latitude < min / 2:",
+ np.compress(graph.latitudes_radians < -np.pi / 2, graph.latitudes_radians),
+ )
+ print(
+ "longitude < min:",
+ np.compress(graph.longitudes_radians < -np.pi, graph.longitudes_radians),
+ )
+ print(
+ "latitude >= max / 2:",
+ np.compress(graph.latitudes_radians >= +np.pi / 2, graph.latitudes_radians),
+ )
+ print(
+ "longitude >= max:",
+ np.compress(graph.longitudes_radians >= +np.pi, graph.longitudes_radians),
+ )
+ self.fail("some points are outside of the allowed range")
+
+ # check the distances along the edges
+ distances = TestGridGeneration._calculate_distances(
+ graph.latitudes_radians, graph.longitudes_radians, graph.edges
+ )
+
+ numpy.testing.assert_allclose(distances, actual_distance, atol=10, rtol=0.2)
+
+ mean = np.mean(distances)
+ self.assertTrue(isclose(mean, actual_distance, rel_tol=0.1, abs_tol=10.0))
+ standard_deviation = np.std(distances)
+ self.assertLessEqual(standard_deviation / mean, 0.075)
+
+ def test_print_status(self) -> None:
+ """This tests that logging being enabled actually logs something and does not crash."""
+ stdout_logging = StringIO()
+ with redirect_stdout(stdout_logging):
+ create_earth_graph(6, print_status=True)
+ logged_lines = list(stdout_logging.getvalue().splitlines())
+ self.assertEqual(len(logged_lines), 6, "we expect 6 lines of messages")
+
+ def test_find_neighbors(self) -> None:
+ """Tests that result of the neighbor search is correct."""
+ for distance_km in EXAMPLE_DISTANCES_KILOMETERS:
+ with self.subTest(f"Test with distance {distance_km} km"):
+
+ # create a grid & determine neighbors
+ graph = create_earth_graph(min_required_frequency(distance_km * 1000, in_meters=True))
+ neighbors = graph.neighbors
+ count_per_node = np.count_nonzero(neighbors >= 0, axis=1)
+
+ # check the resulting number of entries
+ self.assertEqual(
+ np.sum(count_per_node),
+ graph.edges.shape[0] * 2,
+ "each edge must generate two entries in the neighbor table",
+ )
+ self.assertEqual(
+ np.count_nonzero(count_per_node == 5),
+ 12,
+ "exactly twelve nodes must have exactly five neighbors "
+ "(the corners of the original icosahedron)",
+ )
+ self.assertEqual(
+ np.count_nonzero(count_per_node == 6),
+ len(graph) - 12,
+ "all but twelve nodes must have exactly six neighbors",
+ )
+
+ # check the range of values
+ valid_index = np.logical_and(neighbors >= 0, neighbors < len(graph))
+ self.assertTrue(
+ np.all(np.logical_xor(neighbors == -1, valid_index)),
+ "any value i may either be -1 (=null) or a valid index with 0 <= i < num_nodes",
+ )
+
+
+class TestHelperMethods(unittest.TestCase):
+ """Tests that the helpers (e.g. for computing minimum required frequencies) work correctly."""
+
+ @given(st.floats(min_value=1e-6, allow_infinity=False, allow_nan=False), st.booleans())
+ def test_right_order_of_magnitude(self, desired_distance: float, in_meters: bool) -> None:
+ """Asserts that commuting a frequency and converting it to units is correct w.r.t. to each other."""
+ frequency = min_required_frequency(desired_distance, in_meters)
+
+ if in_meters:
+ actual_distance = great_circle_distance_distance_for(frequency)
+ else:
+ actual_distance = angular_distance_for(frequency)
+ self.assertLessEqual(actual_distance, desired_distance)
+
+ if frequency > 1:
+ if in_meters:
+ actual_distance_one_rougher = great_circle_distance_distance_for(frequency - 1)
+ else:
+ actual_distance_one_rougher = angular_distance_for(frequency - 1)
+ self.assertGreaterEqual(actual_distance_one_rougher, desired_distance)
+
+ def test_specific_values(self) -> None:
+ """Asserts that commuting a frequency works correct for specific hand-chosen values."""
+
+ # Taken from the implementation:
+ # The approximate angle between two edges on an icosahedron, in radians, about 63.4°
+ alpha = 1.1071487
+
+ # Contains pairs: (angular distance in radians, frequency)
+ table = [
+ (alpha + 1e-6, 1),
+ (alpha - 1e-9, 2),
+ (alpha / 9000.005, 9001),
+ ]
+
+ for desired_angular_distance, desired_frequency in table:
+ computed_frequency = min_required_frequency(desired_angular_distance, in_meters=False)
+ self.assertEqual(desired_frequency, computed_frequency)
diff --git a/pyrate/tests/plan/graph/generate/test_off_handler.py b/pyrate/tests/plan/graph/generate/test_off_handler.py
new file mode 100644
index 0000000..e6cbbf4
--- /dev/null
+++ b/pyrate/tests/plan/graph/generate/test_off_handler.py
@@ -0,0 +1,56 @@
+"""Tests the Aptiprism OFF file handler."""
+
+# Standard library
+import os.path
+
+# Testing
+import unittest
+
+# Scientific
+import numpy as np
+
+# Module under test
+from pyrate.plan.graph.generate import _parse_off_file
+
+
+TEST_FILES_DIR = os.path.realpath(os.path.join(os.path.dirname(__file__), "example_files/"))
+TEST_FILES = [
+ os.path.join(TEST_FILES_DIR, "geodestic_file_1.off"),
+ os.path.join(TEST_FILES_DIR, "geodestic_file_2.off"),
+ os.path.join(TEST_FILES_DIR, "geodesic_-M_s_-c_2_-f_2_ico.off"),
+]
+
+
+class TestOffHandler(unittest.TestCase):
+ """Tests the Aptiprism OFF file handler using some examples."""
+
+ def test_with_example_files(self):
+ """Tests the Aptiprism OFF file handler using three example files."""
+
+ for test_file in TEST_FILES:
+ with self.subTest(f'Test file "{test_file}"'):
+
+ # test that it does not crash
+ with open(test_file, "r", encoding="utf-8") as myfile:
+ source = myfile.read()
+ latitudes, longitudes, edges = _parse_off_file(source)
+
+ if "geodesic_-M_s_-c_2_-f_2_ico" in test_file:
+ self.assertEqual(
+ len(latitudes), 122, f"wrong total number of nodes: {len(latitudes)} instead of 122"
+ )
+ self.assertEqual(
+ edges.shape[0], 360, f"wrong total number of edges: {edges.shape[0]} instead of 360"
+ )
+
+ # the shapes of the returned arrays must match
+ self.assertEqual(
+ latitudes.shape, longitudes.shape, "latitude and longitude must have the same shape"
+ )
+ self.assertGreater(len(latitudes), 0, "no points found")
+
+ # the edges must be valid indices into the edges
+ self.assertTrue(
+ np.all(edges[:, :] >= 0) and np.all(edges[:, :] < len(latitudes)),
+ "some edges reference non-existent points",
+ )
diff --git a/pyrate/tests/plan/graph/test_geo_graph.py b/pyrate/tests/plan/graph/test_geo_graph.py
new file mode 100644
index 0000000..84aabf6
--- /dev/null
+++ b/pyrate/tests/plan/graph/test_geo_graph.py
@@ -0,0 +1,166 @@
+"""Asserts correct behaviour of the geo-referenced graph navigation.
+
+See Also:
+ tests/common/raster_datasets/test_transformers_concrete.py
+"""
+
+# Standard library
+from copy import deepcopy
+import os.path
+from tempfile import TemporaryDirectory
+from unittest import TestCase
+
+# Scientific
+import numpy
+from numpy import arange
+from numpy import array
+from numpy import empty
+from numpy.testing import assert_array_equal
+from pandas import DataFrame
+
+# Graph generation / Module under test
+from pyrate.common.raster_datasets import transformers_concrete
+from pyrate.plan.graph import create_earth_graph
+from pyrate.plan.graph import GeoNavigationGraph
+from pyrate.plan.graph import min_required_frequency
+
+# CI/Testing helpers
+from ... import _open_test_geo_dataset
+
+
+from .generate.test_graph_generation import EXAMPLE_DISTANCES_KILOMETERS
+
+
+class TestGeoNavigationGraph(TestCase):
+ """Tests properties specific to :class:`pyrate.plan.graph.GeoNavigationGraph`."""
+
+ def test_create_invalid_duplicate_argument_nodes(self) -> None:
+ """Tests supplying nodes to from_coordinates_radians/from_coordinates_degrees raises an Exception."""
+ for function in [
+ GeoNavigationGraph.from_coordinates_degrees,
+ GeoNavigationGraph.from_coordinates_radians,
+ ]:
+ with self.subTest(msg=f"function {str(function)}"):
+ with self.assertRaises(Exception): # noqa: H202
+ function( # type: ignore
+ latitudes=empty((0,)), longitudes=empty((0,)), edges=empty((0, 2)), nodes=DataFrame()
+ )
+
+ def test_node_radius_constructor(self) -> None:
+ """Tests that only invalid inputs to node_radius raise exceptions."""
+ GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=empty((0,)), longitudes=empty((0,)), edges=empty((0, 2)), node_radius=0
+ )
+ GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=empty((0,)), longitudes=empty((0,)), edges=empty((0, 2)), node_radius=100_000
+ )
+
+ with self.assertRaises(Exception): # noqa: H202
+ GeoNavigationGraph.from_coordinates_degrees(
+ latitudes=empty((0,)), longitudes=empty((0,)), edges=empty((0, 2)), node_radius=-1e-9
+ )
+
+ def test_set_node_properties(self) -> None:
+ """Tests that passing ``node_properties`` works."""
+ graph = GeoNavigationGraph.from_coordinates_radians(
+ latitudes=array([42]),
+ longitudes=array([21]),
+ edges=empty((0, 2)),
+ node_radius=100,
+ node_properties=DataFrame(data={"col1": [99], "col2": ["text"]}),
+ )
+ self.assertEqual(graph.node_radius, 100)
+ assert_array_equal(graph.node_properties["col1"], [99])
+ assert_array_equal(graph.node_properties["col2"], ["text"])
+
+ def test_read_write(self) -> None:
+ """Tests that a *geo* navigation graph can be serialized and deserialized again."""
+ latitudes = array([49.8725144])
+ longitudes = array([8.6528707])
+ edges = empty((0, 2))
+
+ # `graph.neighbors` is cached, so we want to try it with and without the cached neighbors being set
+ for set_neighbors in [True, False]:
+ with self.subTest(f"neighbors set = {set_neighbors}"):
+ graph = GeoNavigationGraph.from_coordinates_degrees(
+ latitudes, longitudes, edges=edges, max_neighbors=42, node_radius=1000
+ )
+ if set_neighbors:
+ _ = graph.neighbors
+
+ with TemporaryDirectory() as directory:
+ path = os.path.join(directory, "some_file.hdf5")
+ graph.to_disk(path)
+ new_graph = GeoNavigationGraph.from_disk(path)
+
+ self.assertEqual(graph, new_graph)
+ assert_array_equal(new_graph.neighbors, graph.neighbors)
+
+
+class TestNavigationGraphPruningGeo(TestCase):
+ """Tests that navigation graphs can be pruned by testing it with earth graphs."""
+
+ def test_pruning_artificial(self) -> None:
+ """Tests that pruning half of the points works as expected."""
+
+ for distance_km in EXAMPLE_DISTANCES_KILOMETERS:
+ with self.subTest(f"Test with distance {distance_km} km"):
+ # create a grid
+ graph = create_earth_graph(min_required_frequency(distance_km * 1000, in_meters=True))
+
+ # keep all nodes at even latitudes
+ keep_condition = arange(0, len(graph)) % 2 == 0
+ pruned_graph = deepcopy(graph)
+ pruned_graph.prune_nodes(keep_condition)
+
+ self.assertGreater(len(pruned_graph), 0, "some node must remain")
+
+ # test the reduction ratio
+ delta_nodes = len(pruned_graph) / len(graph)
+ delta_edges = pruned_graph.num_edges / graph.num_edges
+ self.assertAlmostEqual(delta_nodes, 0.5, msg="suspicious node count reduction")
+ # about a fifth of all edges should be removed since each of the removed nodes removed five
+ # edges
+ self.assertAlmostEqual(delta_edges, 1 / 5, delta=0.15, msg="suspicious edge count reduction")
+
+ # test the values in the edges, since they were rewritten as they point to new indices
+ self.assertTrue(numpy.all(pruned_graph.edges[:, :] >= 0), "indices must be non-negative")
+ self.assertTrue(
+ numpy.all(pruned_graph.edges[:, :] < len(pruned_graph)),
+ "some filtered edges reference (now) non-existent points",
+ )
+
+ def test_pruning_depth(self) -> None:
+ """Supplements :meth`~test_pruning_artificial` by a real-world application.
+
+ Only checks application-specific properties and not, for example, the general shapes of the result.
+ """
+ # create a grid
+ distance_meters = 500_000
+ graph = create_earth_graph(min_required_frequency(distance_meters, in_meters=True))
+
+ # fetch properties
+ mode = transformers_concrete.BathymetricTransformer.Modes.AVERAGE_DEPTH
+ graph.append_property(transformers_concrete.BathymetricTransformer(_open_test_geo_dataset(), [mode]))
+
+ # keep all nodes that are below sea level
+ keep_condition = (graph.node_properties[mode.column_name] < 0.0).to_numpy()
+
+ # Remove the now useless property
+ graph.clear_node_properties()
+
+ # perform pruning
+ pruned_graph = deepcopy(graph)
+ pruned_graph.prune_nodes(keep_condition)
+
+ # test the reduction ratio
+ delta_nodes = len(pruned_graph) / len(graph)
+ delta_edges = pruned_graph.num_edges / graph.num_edges
+ earth_fraction_water = 0.708 # see https://en.wikipedia.org/wiki/World_Ocean
+ # although we go by topography and not water coverage, this should still be fairly correct
+ self.assertAlmostEqual(
+ delta_nodes, earth_fraction_water, delta=0.1, msg="suspicious node count reduction"
+ )
+ self.assertAlmostEqual(
+ delta_edges, earth_fraction_water, delta=0.1, msg="suspicious edge count reduction"
+ )
diff --git a/pyrate/tests/plan/graph/test_graph.py b/pyrate/tests/plan/graph/test_graph.py
new file mode 100644
index 0000000..26f57d9
--- /dev/null
+++ b/pyrate/tests/plan/graph/test_graph.py
@@ -0,0 +1,120 @@
+"""Asserts correct behaviour of the base classes for graph navigation.
+
+See Also:
+ tests/common/raster_datasets/test_transformers_concrete.py
+"""
+
+# Standard library
+from copy import deepcopy
+import os.path
+from tempfile import TemporaryDirectory
+from unittest import TestCase
+
+# Scientific
+from numpy import array
+from numpy import empty
+from numpy import full
+from numpy.testing import assert_array_equal
+from pandas import DataFrame
+from pandas.testing import assert_frame_equal
+
+# Module under test
+from pyrate.plan.graph import NavigationGraph
+
+
+# Some examples:
+_NODES = DataFrame(data={"property_1": [1, 2, 3], "property_2": [10, 20, 30]})
+_EDGES = array([[0, 1], [1, 2]])
+_NEIGHBORS = array([[1, -1], [0, 2], [1, -1]])
+
+
+class TestNavigationGraph(TestCase):
+ """Tests the very basic functionality like initialization, (de)serialization and finding neighbors."""
+
+ def test_empty(self) -> None:
+ """Tests that a new instance can be created with and without neighbors."""
+ graph = NavigationGraph(DataFrame(), empty((0, 2)))
+ self.assertEqual(len(graph), 0)
+ self.assertEqual(graph.num_edges, 0)
+
+ # check that the correct neighbor table is returned
+ self.assertEqual(graph.neighbors.shape, (0, 0))
+
+ def test_create(self) -> None:
+ """Tests that a new instance can be created with and without neighbors."""
+
+ for given_neighbors in [_NEIGHBORS, None]:
+ with self.subTest(f"neighbors given = {given_neighbors is not None}"):
+ graph = NavigationGraph(_NODES, _EDGES, given_neighbors)
+ assert_array_equal(graph.neighbors, _NEIGHBORS)
+
+ # repeated queries should return the same neighbors
+ assert_array_equal(graph.neighbors, graph.neighbors)
+
+ def test_read_write(self) -> None:
+ """Tests that a navigation graph can be serialized and deserialized again."""
+
+ # `graph.neighbors` is cached, so we want to try it with and without the cached neighbors being set
+ for set_neighbors in [True, False]:
+ with self.subTest(f"neighbors set = {set_neighbors}"):
+ graph = NavigationGraph(_NODES, _EDGES, max_neighbors=42)
+ if set_neighbors:
+ _ = graph.neighbors
+
+ with TemporaryDirectory() as directory:
+ path = os.path.join(directory, "some_file.hdf5")
+ graph.to_disk(path)
+ new_graph = NavigationGraph.from_disk(path)
+
+ self.assertEqual(graph, new_graph)
+ assert_array_equal(new_graph.neighbors, _NEIGHBORS)
+
+ def test_max_neighbors_constructor(self) -> None:
+ """Tests that only invalid inputs to max_neighbors raise exceptions."""
+ NavigationGraph(DataFrame(), empty((0, 2)), max_neighbors=0)
+ NavigationGraph(DataFrame(), empty((0, 2)), max_neighbors=10)
+
+ with self.assertRaises(Exception): # noqa: H202
+ NavigationGraph(DataFrame(), empty((0, 2)), max_neighbors=-2)
+
+
+class TestNavigationGraphPruningArtificial(TestCase):
+ """Tests that simple toy navigation graphs can be pruned."""
+
+ def test_pruning_no_nodes(self) -> None:
+ """Tests that pruning no nodes works."""
+ old_graph = NavigationGraph(_NODES, _EDGES, _NEIGHBORS)
+
+ pruned_graph = deepcopy(old_graph)
+ retain_all = full((len(_NODES),), True)
+ pruned_graph.prune_nodes(retain_all)
+
+ self.assertEqual(old_graph, pruned_graph)
+
+ def test_pruning_all(self) -> None:
+ """Tests that pruning all nodes works."""
+ old_graph = NavigationGraph(_NODES, _EDGES, _NEIGHBORS)
+
+ pruned_graph = deepcopy(old_graph)
+ retain_all = full((len(_NODES),), False)
+ pruned_graph.prune_nodes(retain_all)
+
+ self.assertNotEqual(old_graph, pruned_graph)
+ self.assertEqual(len(pruned_graph.nodes), 0)
+ self.assertEqual(len(pruned_graph.nodes.columns), 2, "the properties must be retained")
+ self.assertEqual(pruned_graph.edges.shape, (0, 2))
+ self.assertEqual(pruned_graph.neighbors.shape, (0, 0))
+
+ def test_pruning_very_simple(self) -> None:
+ """Tests that pruning some nodes works as expected."""
+
+ old_graph = NavigationGraph(_NODES, _EDGES, _NEIGHBORS)
+
+ pruned_graph = deepcopy(old_graph)
+ keep_condition = array([True, True, False]) # only prune the last node
+ pruned_graph.prune_nodes(keep_condition)
+
+ self.assertNotEqual(old_graph, pruned_graph)
+ assert_frame_equal(pruned_graph.nodes, _NODES[:2])
+ assert_array_equal(pruned_graph.edges, _EDGES[:1])
+ assert_array_equal(pruned_graph.neighbors, _NEIGHBORS[:2, :1])
diff --git a/pyrate/tests/sense/__init__.py b/pyrate/tests/sense/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/sense/filters/__init__.py b/pyrate/tests/sense/filters/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/sense/filters/test_kalman.py b/pyrate/tests/sense/filters/test_kalman.py
new file mode 100644
index 0000000..7d8cb73
--- /dev/null
+++ b/pyrate/tests/sense/filters/test_kalman.py
@@ -0,0 +1,194 @@
+"""This module asserts correct runtime behaviour of the pyrate.sense.filter subpackage."""
+
+# Python standard library
+from datetime import timedelta
+from typing import cast
+
+# Test environment
+from unittest import TestCase
+
+# Mathematics
+from numpy import array
+from numpy import eye
+from numpy import Inf
+from numpy.linalg import norm
+from numpy import ndarray
+from numpy import vstack
+
+# Hypothesis testing
+from hypothesis import given
+from hypothesis import settings
+
+# Package under test
+from pyrate.common.math import Gaussian
+from pyrate.sense.filters import ExtendedKalman
+from pyrate.sense.filters import Kalman
+from pyrate.sense.filters import UnscentedKalman
+
+# Helpers
+from pyrate.common.testing.strategies.dynamic_system import linear_model
+from pyrate.common.testing.strategies.dynamic_system import nonlinear_model
+
+# Flags from Pyrate
+from pyrate.common.testing import IS_EXTENDED_TESTING
+
+
+class TestKalman(TestCase):
+
+ """Test for correct runtime behaviour of Kalman filters pyrate.sense.filter."""
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+
+ @given(linear_model())
+ @settings(max_examples=1000 if IS_EXTENDED_TESTING else 10, deadline=timedelta(seconds=1.0))
+ def test_kalman(self, model):
+ """Assert the correct functionality of the standard Kalman filter."""
+
+ # Unpack generated model
+ estimate, F, B, H, Q, R, measurements, inputs = model
+
+ # Initialize filter
+ kalman = Kalman(F=F, estimate=estimate, H=H, Q=Q, R=R, B=B, keep_trace=True)
+
+ # Apply the Kalman filter a few times
+ for i, measurement in enumerate(measurements):
+ kalman.predict(u=inputs[i])
+ kalman.correct(z=measurement)
+
+ # Assert correct tracing with Kalman
+ self.assertIsNotNone(kalman.predictions, "Kalman filter did not keep trace of predictions")
+ self.assertIsNotNone(kalman.estimates, "Kalman filter did not keep trace of estimates")
+ self.assertEqual(
+ len(kalman.predictions.index), len(measurements), "Kalman filter has not traced all predictions"
+ )
+ self.assertEqual(
+ len(kalman.estimates.index), len(measurements), "Kalman filter has not traced all estimates"
+ )
+
+ # Apply the Kalman filter with a callable H
+ callable_H = lambda dummy: H # noqa: E731
+ kalman.H = callable_H
+ kalman.correct(z=measurements[0], dummy=None)
+
+ @given(nonlinear_model())
+ @settings(max_examples=1000 if IS_EXTENDED_TESTING else 10, deadline=timedelta(seconds=1.0))
+ def test_extended_kalman(self, model):
+ """Assert the correct functionality of the extended Kalman filter."""
+
+ # Unpack generated model
+ estimate, f, _, Jf, h, _, Jh, Q, R, measurements = model
+
+ # Initialize filter
+ extended = ExtendedKalman(F=Jf, f=f, estimate=estimate, H=Jh, h=h, Q=Q, R=R, keep_trace=True)
+
+ # Apply the Kalman filter a few times
+ for z in measurements:
+ extended.predict()
+ extended.correct(z)
+
+ # Assert correct tracing with Kalman
+ self.assertIsNotNone(extended.predictions, "Extended Kalman filter did not keep trace of predictions")
+ self.assertIsNotNone(extended.estimates, "Extended Kalman filter did not keep trace of estimates")
+ self.assertEqual(
+ len(extended.predictions.index),
+ len(measurements),
+ "Extended Kalman filter has not traced all predictions",
+ )
+ self.assertEqual(
+ len(extended.estimates.index),
+ len(measurements),
+ "Extended Kalman filter has not traced all estimates",
+ )
+
+ @given(nonlinear_model())
+ @settings(max_examples=1000 if IS_EXTENDED_TESTING else 10, deadline=timedelta(seconds=1.0))
+ def test_unscented_kalman(self, model):
+ """Assert the correct functionality of the extended Kalman filter."""
+
+ # Unpack generated model
+ estimate, f, _, _, h, _, _, Q, R, measurements = model
+
+ # Initialize filter
+ unscented = UnscentedKalman(f=f, estimate=estimate, h=h, Q=Q, R=R, keep_trace=True)
+
+ # Apply the Kalman filter a few times
+ for z in measurements:
+ unscented.predict()
+ unscented.correct(z)
+
+ # Assert correct tracing with Kalman
+ self.assertIsNotNone(
+ unscented.predictions, "Uncented Kalman filter did not keep trace of predictions"
+ )
+ self.assertIsNotNone(unscented.estimates, "Uncented Kalman filter did not keep trace of estimates")
+ self.assertEqual(
+ len(unscented.predictions.index),
+ len(measurements),
+ "Uncented Kalman filter has not traced all predictions",
+ )
+ self.assertEqual(
+ len(unscented.estimates.index),
+ len(measurements),
+ "Uncented Kalman filter has not traced all estimates",
+ )
+
+ @staticmethod
+ def test_estimation():
+ """Assert that the filter estimates tend towards the true state over time."""
+
+ # Define model of a constant value
+ F = H = Q = R = eye(1)
+
+ # Initial belief
+ estimate = Gaussian(vstack([0.0]), eye(1))
+
+ # Initialize filters
+ kalman = Kalman(F=F, estimate=estimate, H=H, Q=Q, R=R)
+ extended = ExtendedKalman(
+ F=lambda _: F,
+ f=lambda x: cast(ndarray, x),
+ estimate=estimate,
+ H=lambda _: H,
+ h=lambda x: cast(ndarray, x),
+ Q=Q,
+ R=R,
+ )
+ unscented = UnscentedKalman(
+ f=lambda x: cast(ndarray, x),
+ estimate=estimate,
+ h=lambda x: cast(ndarray, x),
+ Q=Q,
+ R=R,
+ )
+
+ # Apply the Kalman filter a few times
+ true_state = array([20.0])
+ previous_kalman_error = Inf
+ previous_extended_error = Inf
+ previous_unscented_error = Inf
+ for _ in range(10):
+ # Check error going down for Kalman
+ kalman.predict()
+ kalman.correct(z=true_state)
+ error = norm(kalman.estimate.x - true_state).item() # Convert from numpy scalar to Python float
+ assert (
+ error < previous_kalman_error or error == 0
+ ), "Kalman estimate did not get better over time."
+ previous_kalman_error = error
+
+ # Check error going down for EKF
+ extended.predict()
+ extended.correct(z=true_state)
+ error = norm(extended.estimate.x - true_state).item()
+ assert error < previous_extended_error or error == 0, "EKF estimate did not get better over time."
+ previous_extended_error = error
+
+ # Check error going down for UKF
+ unscented.predict()
+ unscented.correct(z=true_state)
+ error = norm(unscented.estimate.x - true_state).item()
+ assert (
+ error < previous_unscented_error or error == 0
+ ), "UKF estimate did not get better over time."
+ previous_unscented_error = error
diff --git a/pyrate/tests/sense/filters/test_phd.py b/pyrate/tests/sense/filters/test_phd.py
new file mode 100644
index 0000000..8f08c36
--- /dev/null
+++ b/pyrate/tests/sense/filters/test_phd.py
@@ -0,0 +1,145 @@
+"""This module asserts correct runtime behaviour of the pyrate.sense.filter subpackage."""
+
+# Python standard library
+from datetime import timedelta
+
+# Test environment
+from unittest import TestCase
+
+# Hypothesis testing
+from hypothesis import given
+from hypothesis import settings
+
+# Package under test
+from pyrate.sense.filters import ExtendedGaussianMixturePHD
+from pyrate.sense.filters import GaussianMixturePHD
+
+# Helpers
+from pyrate.common.testing.strategies.dynamic_system import linear_model
+from pyrate.common.testing.strategies.dynamic_system import nonlinear_model
+
+# Flags from Pyrate
+from pyrate.common.testing import IS_EXTENDED_TESTING
+
+
+class TestPHD(TestCase):
+
+ """Test for correct runtime behaviour in pyrate.sense.filter."""
+
+ # In this context, we reproduce a common filter notation
+ # pylint: disable=invalid-name
+
+ def setUp(self) -> None:
+ """Setup the linear motion model for the filter tests."""
+
+ # Survival and detection rate of targets
+ self.survival_rate = 0.99
+ self.detection_rate = 0.99
+
+ # Clutter intensity
+ self.intensity = 0.01
+
+ # PHD pruning
+ self.threshold = 0.1
+ self.merge_distance = 0.5
+ self.max_components = 0.5
+
+ @given(linear_model())
+ @settings(max_examples=1000 if IS_EXTENDED_TESTING else 10, deadline=timedelta(seconds=1.0))
+ def test_gmphd(self, model):
+ """Assert the correct functionality of the gaussian mixture PHD filter."""
+
+ # Unpack generated model
+ estimate, F, _, H, Q, R, measurements, _ = model
+
+ # Initialize filter
+ gmphd = GaussianMixturePHD(
+ birth_belief=[estimate],
+ survival_rate=self.survival_rate,
+ detection_rate=self.detection_rate,
+ intensity=self.intensity,
+ F=F,
+ H=H,
+ Q=Q,
+ R=R,
+ )
+
+ self.assertEqual(len(gmphd.gmm), 0, "Mixture model should be initialy empty")
+
+ # Predict with a callable F and check number of components
+ callable_F = lambda dummy: F # noqa: E731
+ gmphd.F = callable_F
+ gmphd.predict(dummy=None)
+ gmphd.F = F
+ self.assertEqual(
+ len(gmphd.gmm), len(gmphd.birth_belief), "Mixture model is not the right size after prediction"
+ )
+
+ # Apply the PHD filter with a callable H and check number of components
+ callable_H = lambda dummy: H # noqa: E731
+ gmphd.H = callable_H
+ gmphd.correct(measurements=measurements, dummy=None)
+ gmphd.H = H
+ self.assertEqual(
+ len(gmphd.gmm), len(measurements) + 1, "Mixture model is not the right size after correction"
+ )
+
+ # Removing all components
+ gmphd.prune(self.threshold, self.merge_distance, 0)
+ self.assertEqual(len(gmphd.extract()), 0, "Pruning did not remove all components")
+
+ # Apply the filter multiple times
+ for _, _ in enumerate(measurements):
+ gmphd.predict()
+ gmphd.correct(measurements=measurements)
+
+ # Extract states
+ gmphd.extract(self.threshold)
+
+ @given(nonlinear_model())
+ @settings(max_examples=1000 if IS_EXTENDED_TESTING else 10, deadline=timedelta(seconds=1.0))
+ def test_extended_gmphd(self, model):
+ """Assert the correct functionality of the gaussian mixture PHD filter."""
+
+ # Unpack generated model
+ estimate, f, _, Jf, h, _, Jh, Q, R, measurements = model
+
+ # Initialize filter
+ gmphd = ExtendedGaussianMixturePHD(
+ birth_belief=[estimate],
+ survival_rate=self.survival_rate,
+ detection_rate=self.detection_rate,
+ intensity=self.intensity,
+ F=Jf,
+ f=f,
+ H=Jh,
+ h=h,
+ Q=Q,
+ R=R,
+ )
+
+ self.assertEqual(len(gmphd.gmm), 0, "Mixture model should be initialy empty")
+
+ # Predict and check number of components
+ gmphd.predict()
+ self.assertEqual(
+ len(gmphd.gmm), len(gmphd.birth_belief), "Mixture model is not the right size after prediction"
+ )
+
+ # Apply the PHD filter and check number of components
+ gmphd.correct(measurements=measurements)
+ self.assertEqual(
+ len(gmphd.gmm), len(measurements) + 1, "Mixture model is not the right size after correction"
+ )
+
+ # Removing all components
+ gmphd.prune(self.threshold, self.merge_distance, 0)
+ self.assertEqual(len(gmphd.extract()), 0, "Pruning did not remove all components")
+
+ # Apply the filter multiple times
+ for _, _ in enumerate(measurements):
+ gmphd.predict()
+ gmphd.correct(measurements=measurements)
+
+ # Extract states
+ gmphd.extract(self.threshold)
diff --git a/pyrate/tests/sense/vision/__init__.py b/pyrate/tests/sense/vision/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_no_horizon/testims/Preprocessed_test_0.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_no_horizon/testims/Preprocessed_test_0.jpg
new file mode 100644
index 0000000..ad463f7
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_no_horizon/testims/Preprocessed_test_0.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_1.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_1.txt
new file mode 100644
index 0000000..912da94
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_1.txt
@@ -0,0 +1,3 @@
+19,237
+432,242
+0.69
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_10.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_10.txt
new file mode 100644
index 0000000..762f819
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_10.txt
@@ -0,0 +1,3 @@
+17,262
+630,273
+1.03
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_13.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_13.txt
new file mode 100644
index 0000000..9ad47a0
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_13.txt
@@ -0,0 +1,3 @@
+14,277
+389,279
+0.31
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_16.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_16.txt
new file mode 100644
index 0000000..08fde39
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_16.txt
@@ -0,0 +1,3 @@
+153,286
+638,291
+0.59
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_18.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_18.txt
new file mode 100644
index 0000000..14966ef
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_18.txt
@@ -0,0 +1,3 @@
+160,286
+623,290
+0.49
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_19.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_19.txt
new file mode 100644
index 0000000..1569084
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_19.txt
@@ -0,0 +1,3 @@
+48,271
+411,273
+0.32
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_3.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_3.txt
new file mode 100644
index 0000000..5262999
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_3.txt
@@ -0,0 +1,3 @@
+21,253
+598,261
+0.79
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_4.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_4.txt
new file mode 100644
index 0000000..a5027bc
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_4.txt
@@ -0,0 +1,3 @@
+67,266
+415,274
+1.32
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_7.txt b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_7.txt
new file mode 100644
index 0000000..e8ad6f5
--- /dev/null
+++ b/pyrate/tests/sense/vision/resources/testing_dataset_successful/annotations/Preprocessed_test_7.txt
@@ -0,0 +1,3 @@
+7,257
+242,262
+1.22
\ No newline at end of file
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_1.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_1.jpg
new file mode 100644
index 0000000..3584500
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_1.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_10.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_10.jpg
new file mode 100644
index 0000000..e91be5e
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_10.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_13.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_13.jpg
new file mode 100644
index 0000000..ce7cd8a
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_13.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_16.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_16.jpg
new file mode 100644
index 0000000..5c519e3
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_16.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_18.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_18.jpg
new file mode 100644
index 0000000..f814428
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_18.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_19.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_19.jpg
new file mode 100644
index 0000000..bd41879
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_19.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_3.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_3.jpg
new file mode 100644
index 0000000..58d25a0
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_3.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_4.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_4.jpg
new file mode 100644
index 0000000..fb1e793
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_4.jpg differ
diff --git a/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_7.jpg b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_7.jpg
new file mode 100644
index 0000000..bb47c95
Binary files /dev/null and b/pyrate/tests/sense/vision/resources/testing_dataset_successful/testims/Preprocessed_test_7.jpg differ
diff --git a/pyrate/tests/sense/vision/test_image_line.py b/pyrate/tests/sense/vision/test_image_line.py
new file mode 100644
index 0000000..6d339d1
--- /dev/null
+++ b/pyrate/tests/sense/vision/test_image_line.py
@@ -0,0 +1,61 @@
+"""This test suite runs additional tests for ``ImageLine`` that are not covered in ``TestObstacleLocator``"""
+
+# Testing
+from unittest import TestCase
+
+# Hypothesis
+from hypothesis import given
+from hypothesis.strategies import composite
+from hypothesis.strategies import floats
+from hypothesis.strategies import integers
+from hypothesis.strategies import just
+from hypothesis.strategies import tuples
+
+# Scientific
+from numpy import pi
+
+# Module under test
+from pyrate.sense.vision.image_line import ImageLine
+
+
+@composite
+def image_dimensions_and_points(draw):
+ """Generate image dimensions and points left and right on that image"""
+
+ image_dims = draw(tuples(integers(1, 10000), integers(1, 10000)))
+ point_a = draw(tuples(just(0), integers(0, image_dims[1] - 1)))
+ point_b = draw(tuples(just(image_dims[0] - 1), integers(0, image_dims[1] - 1)))
+
+ return image_dims, point_a, point_b
+
+
+class TestImageLine(TestCase):
+
+ """Tests the remaining methods of ``ImageLine`` not covered by testing ``ObstacleLocator``"""
+
+ @given(floats(1, 10000), floats(1, 10000), floats(-5000, 5000), floats(0, 2 * pi))
+ def test_from_height_angle(self, image_width, image_height, height, angle):
+ """Test that creates (from height and angle) and tests ``ImageLine``s"""
+
+ image_line = ImageLine.from_height_angle((image_width, image_height), height, angle)
+
+ self.assertTrue(image_line.image_width == image_width and image_line.image_height == image_height)
+ self.assertTrue(image_line.angle == angle)
+ self.assertAlmostEqual(image_line.height, int(height + image_height / 2))
+
+ end_points = image_line.end_points
+ self.assertTrue(
+ end_points[0][0] == 0 and end_points[1][0] == image_width,
+ msg=f"x1={end_points[0][0]} x2={end_points[1][0]}",
+ )
+
+ @given(test_input=image_dimensions_and_points())
+ def test_indices(self, test_input):
+ """Test that tests the ``indices`` property of ``ImageLine``"""
+
+ image_dims, point1, point2 = test_input
+
+ image_line = ImageLine.from_points(image_dims, (point1, point2))
+ x_coords, y_coords = image_line.indices
+ self.assertTrue(((50 <= x_coords) <= 50).all())
+ self.assertTrue(((0 <= y_coords) <= 200).all())
diff --git a/pyrate/tests/sense/vision/test_image_rectangle.py b/pyrate/tests/sense/vision/test_image_rectangle.py
new file mode 100644
index 0000000..81284b7
--- /dev/null
+++ b/pyrate/tests/sense/vision/test_image_rectangle.py
@@ -0,0 +1,102 @@
+"""This test suite evaluates and tests behavior of the ``ImageRectangle`` class"""
+
+# Testing
+from unittest import TestCase
+
+# Hypothesis
+from hypothesis import given
+from hypothesis.strategies import integers
+
+# Module under test
+from pyrate.sense.vision.image_rectangle import ImageRectangle
+
+
+class TestImageRectangle(TestCase):
+
+ """Tests functionality of the ``ImageRectangle`` class"""
+
+ @given(
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ )
+ # pylint: disable=too-many-arguments
+ def test_bottom_center(self, position_x, position_y, width, height, offset_x, offset_y):
+ """Parametrized test that tests correct functionality of the bottom_center property
+
+ Args:
+ position_x: x position of the rectangle
+ position_y: y position of the rectangle
+ width: width of the rectangle
+ height: height of the rectangle
+ offset_x: x component of the offset
+ offset_y: y component of the offset
+ """
+
+ offset = (offset_x, offset_y)
+
+ rectangle_without_offset = ImageRectangle((position_x, position_y, width, height))
+ self.assertTupleEqual(rectangle_without_offset.offset, (0, 0))
+ self.assertAlmostEqual(rectangle_without_offset.bottom_center[0], position_x + (width / 2), delta=0.5)
+ self.assertAlmostEqual(rectangle_without_offset.bottom_center[1], position_y + height)
+
+ rectangle_with_offset = ImageRectangle((position_x, position_y, width, height), offset=offset)
+ self.assertTupleEqual(rectangle_with_offset.offset, offset)
+ self.assertAlmostEqual(
+ rectangle_with_offset.bottom_center[0], position_x + offset_x + (width / 2), delta=0.5
+ )
+ self.assertAlmostEqual(rectangle_with_offset.bottom_center[1], position_y + offset_y + height)
+
+ @given(
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ integers(0, 10000),
+ )
+ # pylint: disable=too-many-arguments
+ def test_rectangle_to_corner(self, position_x, position_y, width, height, offset_x, offset_y):
+ """Parametrized test that tests correct functionality of the rectangle_to_corner method
+
+ Args:
+ position_x: x position of the rectangle
+ position_y: y position of the rectangle
+ width: width of the rectangle
+ height: height of the rectangle
+ offset_x: x component of the offset
+ offset_y: y component of the offset
+ """
+
+ offset = (offset_x, offset_y)
+
+ # rectangle without offset
+ rectangle_without_offset = ImageRectangle((position_x, position_y, width, height))
+ self.assertTupleEqual(rectangle_without_offset.offset, (0, 0))
+ cornerlu, cornerrb = rectangle_without_offset.rectangle_to_corner(offset=False)
+
+ self.assertTrue(
+ cornerlu[0] == position_x and cornerlu[1] == position_y,
+ msg=f"Left upper corner: {cornerlu}",
+ )
+ self.assertTrue(
+ cornerrb[0] == position_x + width and cornerrb[1] == position_y + height,
+ msg=f"Right bottom corner: {cornerrb}",
+ )
+
+ # rectangle with offset
+ rectangle_with_offset = ImageRectangle((position_x, position_y, width, height), offset)
+ self.assertTupleEqual(rectangle_with_offset.offset, offset)
+ cornerlu, cornerrb = rectangle_with_offset.rectangle_to_corner(offset=True)
+
+ self.assertTrue(
+ cornerlu[0] == position_x + offset_x and cornerlu[1] == position_y + offset_y,
+ msg=f"Left upper corner: {cornerlu}",
+ )
+ self.assertTrue(
+ cornerrb[0] == position_x + offset_x + width and cornerrb[1] == position_y + offset_y + height,
+ msg=f"Right bottom corner: {cornerrb}",
+ )
diff --git a/pyrate/tests/sense/vision/test_obstacle_locator.py b/pyrate/tests/sense/vision/test_obstacle_locator.py
new file mode 100644
index 0000000..cfdcb77
--- /dev/null
+++ b/pyrate/tests/sense/vision/test_obstacle_locator.py
@@ -0,0 +1,103 @@
+"""This test suite evaluates and tests behavior of the ``ObstacleLocator`` class"""
+
+# Standard library
+from csv import reader
+from math import radians
+from pathlib import Path
+
+# Typing
+from typing import cast
+from typing import Tuple
+
+# Testing
+from unittest import TestCase
+
+# Scientific
+from cv2 import imread
+
+# Module under test
+from pyrate.sense.vision.image_line import ImageLine
+from pyrate.sense.vision.obstacle_locator import ObstacleLocator
+
+
+PATH_TO_DATASET = Path(__file__).parent / "resources" / "testing_dataset_successful"
+DATASET_IMAGES_PATHS = sorted(list((PATH_TO_DATASET / "testims").glob("*.jpg")))
+DATASET_ANNOTATIONS_PATHS = sorted(list((PATH_TO_DATASET / "annotations").glob("*.txt")))
+
+PATH_TO_FAILING = (
+ Path(__file__).parent / "resources" / "testing_dataset_no_horizon" / "testims" / "Preprocessed_test_0.jpg"
+)
+IMAGE_HEIGHT, IMAGE_WIDTH = imread(PATH_TO_FAILING.as_posix()).shape[:2]
+
+
+class TestObstacleLocator(TestCase):
+
+ """Test for correct predictions made by ``ObstacleLocator``"""
+
+ @staticmethod
+ def parse_annotation(file_path: str, obstacle_locator: ObstacleLocator) -> Tuple[ImageLine, float]:
+ """Helper function to parse the ground truth labels from the dataset.
+
+ Args:
+ file_path: Label file path
+ obstacle_locator: the ObstacleLocator that returns the ImageLine that should be
+ compared to the returned ImageLine of this function
+
+ Returns:
+ ImageLine as described in the annotation, angle read from annotation
+ (for testing correct angle calculation)
+ """
+
+ with open(file_path, "rt", encoding="UTF-8") as label_file:
+ content = label_file.read().split("\n")
+
+ csvreader = reader(content, delimiter=",")
+ point_a = cast(Tuple[int, int], tuple(int(x) for x in next(csvreader)))
+ point_b = cast(Tuple[int, int], tuple(int(x) for x in next(csvreader)))
+ label_angle = radians(float(next(csvreader)[0]))
+
+ line = ImageLine.from_points(
+ image_shape=(obstacle_locator.image_width, obstacle_locator.image_height),
+ points=(point_a, point_b),
+ )
+
+ return line, label_angle
+
+ def test_horizon_angle(self):
+ """Compares ``ObstacleLocator`` horizon estimates to ground truth annotations"""
+
+ uut_ol = ObstacleLocator(image_width=IMAGE_WIDTH, image_height=IMAGE_HEIGHT) # unit/module under test
+
+ for image_path, label_path in zip(DATASET_IMAGES_PATHS, DATASET_ANNOTATIONS_PATHS):
+ with self.subTest(image=image_path.name):
+ # Assert that we have the correct label for the test image
+ self.assertEqual(
+ image_path.name.split(".")[0],
+ label_path.name.split(".")[0],
+ msg="That isn't the right label for the image. This shouldn't happen.",
+ )
+
+ image = imread(image_path.as_posix())
+
+ # read annotation and test if ImageLine calculates the line's angle correctly
+ label_image_line, label_angle = self.parse_annotation(label_path.as_posix(), uut_ol)
+ self.assertAlmostEqual(label_angle, label_image_line.angle, places=2)
+
+ result = uut_ol.detect_horizon(image)
+ horizons = result[0]
+
+ # Test that a) a horizon is detected and b) it has the correct angle
+ self.assertTrue(len(horizons) > 0, msg="No horizon was detected.")
+ self.assertAlmostEqual(
+ horizons[0].angle, label_image_line.angle, places=1, msg="Horizon angle mismatch."
+ )
+
+ def test_missing_lines(self):
+ """Tests the branch when no horizon line is detected in the image"""
+
+ uut_ol = ObstacleLocator(image_width=IMAGE_WIDTH, image_height=IMAGE_HEIGHT) # unit/module under test
+
+ image = imread(PATH_TO_FAILING.as_posix())
+ # ObstacleLocator does not find a horizon line in this image
+ result = uut_ol.detect_horizon(image)
+ self.assertFalse(result[0])
diff --git a/pyrate/tests/test_the_scripts.py b/pyrate/tests/test_the_scripts.py
new file mode 100644
index 0000000..a01f191
--- /dev/null
+++ b/pyrate/tests/test_the_scripts.py
@@ -0,0 +1,33 @@
+"""Tests the bundled scripts."""
+
+# Standard library
+from pathlib import Path
+from subprocess import run
+import sys
+
+# Typing
+from typing import Set
+
+# Generic testing
+from unittest import TestCase
+
+
+class TestScripts(TestCase):
+ """Tests ``pyrate/scripts/*``."""
+
+ BASE_DIR = Path(__file__).parent.parent / "scripts"
+ EXCLUDE: Set[Path] = set()
+
+ def test_invoke_help(self) -> None:
+ """Tests that the help can be invoked, which makes sure that at least all imports work."""
+
+ for script in set(TestScripts.BASE_DIR.iterdir()) - TestScripts.EXCLUDE:
+ if not script.name.startswith(".") and not script.name.startswith("__"):
+ with self.subTest(script.name):
+ run(
+ [sys.executable, str(script), "--help"],
+ capture_output=True,
+ timeout=30, # Sometimes caches are generated, so this is a long timeout
+ text=True,
+ check=True,
+ )