Removed the subdir.

This commit is contained in:
Philipp Horstenkamp 2022-07-20 15:01:07 +02:00
parent aee5e8bbd9
commit e03f1c8be8
Signed by: Philipp
GPG Key ID: DD53EAC36AFB61B4
234 changed files with 21562 additions and 456 deletions

1
.gitignore vendored
View File

@ -204,3 +204,4 @@ dmypy.json
# Cython debug symbols
cython_debug/
log_dir/

File diff suppressed because one or more lines are too long

BIN
model.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 63 KiB

1
pyrate

@ -1 +0,0 @@
Subproject commit 814b6dd028e67ceb55ac6babac2344f7c1f7c01a

164
pyrate/.dockerignore Normal file
View File

@ -0,0 +1,164 @@
### TortoiseGit template
# Project-level settings
/.tgitconfig
### JupyterNotebooks template
# gitignore template for Jupyter Notebooks
# website: http://jupyter.org/
.ipynb_checkpoints
*/.ipynb_checkpoints/*
# IPython
profile_default/
ipython_config.py
# Remove previous ipynb_checkpoints
# git rm -r .ipynb_checkpoints/
### Python template
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
.gitignore
.gitlab-ci.yml
Experiments.ipynb
.gitlab
.dockerignore
*.dockerfile
.git

140
pyrate/.gitignore vendored Normal file
View File

@ -0,0 +1,140 @@
.idea
.vscode
.test-artifacts.junit.xml
.pth
/stda-env/
# For mutation testing
.hammett-db
.mutmut-cache
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
pip-wheel-metadata/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
coverage.json
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
.python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/

196
pyrate/.gitlab-ci.yml Normal file
View File

@ -0,0 +1,196 @@
# ~~~~~~~~~~~~~~~~~~~~~ Base image
image: ubuntu:22.04
# ~~~~~~~~~~~~~~~~~~~~~ Caches & Environment variables
variables:
PIP_CACHE_DIR: "$CI_PROJECT_DIR/.cache/pip"
cache:
# Share cache among commits on the same branch
key: ${CI_COMMIT_REF_SLUG}
paths:
- .cache/pip
- .cache/apt
# cache these such that subsequent invocations test at least the previous examples
- .hypothesis/examples
# mypy works incrementally
- .mypy_cache
# ~~~~~~~~~~~~~~~~~~~~~ Config
stages:
- Static Code Analysis
- Automatic Testing
- Manual Testing
- Documentation
- Deploy
default:
timeout: 15 minutes
# ~~~~~~~~~~~~~~~~~~~~~ Static Code Analysis
Pylint:
image: python:3.10
stage: Static Code Analysis
needs: []
before_script:
- pip install "pylint~=2.13.4"
script:
- pylint -j 0 pyrate tests scripts | awk '!seen[$0]++'
Flake8 Linter:
image: python:3.10
stage: Static Code Analysis
needs: []
before_script:
- pip install hacking # This also installs flake8, mccabe and others
script:
- flake8 | awk '!seen[$0]++'
Mypy Static Type Checker:
stage: Static Code Analysis
needs: []
before_script:
# (This section is partly copied from the Pytest job)
# Setup APT cache based on
# https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
- rm -f /etc/apt/apt.conf.d/docker-clean
- mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
# Install Pip & additional requirements that cannot be fulfilled with pip
- apt-get update -qq
- apt-get install -qqy python3-pip g++ python3-dev python3-gdal libgdal-dev
# Print version information and install Pyrate
- python3 --version
- pip3 install .
script:
- mypy pyrate tests scripts
Black Linter:
image: python:3.10
stage: Static Code Analysis
needs: []
before_script:
- pip install black
script:
- black --check --diff .
# Black is allowed to fail since we do not strictly enforce its advice
allow_failure: true
# ~~~~~~~~~~~~~~~~~~~~~ Automatic Testing
Pytest:
stage: Automatic Testing
needs: []
timeout: 3 hours
# ~~~~~~~~~~~~~~~~~~~~~ Setup & Installation
before_script:
# Setup APT cache based on
# https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
- rm -f /etc/apt/apt.conf.d/docker-clean
- mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
# Install Pip & additional requirements that cannot be fulfilled with pip
- apt-get update -qq
- apt-get install software-properties-common -qqy
- add-apt-repository ppa:antiprism/ppa -y
- apt-get install -qqy python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism
# Print version information and install Pyrate
- python3 --version
- pip3 install .
script:
# Run the tests and collect coverage
- pytest --junitxml=.test-artifacts.junit.xml
# Convert the coverage report extra (instead of with --cov-report xml:coverage.xml) to correct the source paths
# This is for Gitlab to automatically parse it
- python3 -m coverage xml
# Make sure to crash at less than 100% statement coverage
- python3 -m coverage json
- echo Checking for 100.00% statement coverage ...
- >
cat coverage.json | python3 -c $'import json, sys; miss = bool(json.load(sys.stdin)["totals"]["missing_lines"])\nif miss: sys.exit("\033[91m\033[01mERROR: Statement Coverage is <100%.\033[0m");'
coverage: '/^TOTAL\s+\d+\s+\d+\s+\d+\s+\d+\s+(\d+.\d+\%)/'
artifacts:
reports:
junit: .test-artifacts.junit.xml
cobertura: coverage.xml
Mutation Testing:
extends: Pytest
stage: Manual Testing
needs: [Pytest, Mypy Static Type Checker] # Always make sure that the normal tests run fine!
timeout: 3 days # This can take very long, since the test suite might get run many times
when: manual
script:
- pip3 install mutmut
- mutmut run --no-progress # Update less regularly
- mutmut results
- mutmut html
artifacts:
reports: {}
paths:
- html/
expire_in: 500 days
# ~~~~~~~~~~~~~~~~~~~~~ Documentation
Build Documentation:
stage: Documentation
needs: []
retry: # Needed due to random "std::bad_alloc" occurrences
max: 2
when: script_failure
before_script:
# Installation as in the "Pytest" job
# Setup APT cache based on
# https://gitlab.com/gitlab-org/gitlab-runner/issues/991#note_126864314
- rm -f /etc/apt/apt.conf.d/docker-clean
- mkdir -p .cache/apt && mkdir /var/cache/apt/archives && mount --bind .cache/apt /var/cache/apt/archives/
# Setup pip
- apt-get update -qq
- apt-get install -qqy python3-pip
# Install additional requirements that cannot be fulfilled with pip
- apt-get install -qqy g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite
# Print version information and install Pyrate
- python3 --version
- pip3 install .[docs]
# Install additional requirements specifically for the documentation
- apt-get -qqy install graphviz
script:
- cd doc
- make html # only build HTML as it supports all features, is fast and required for the Gitlab pages website
artifacts:
paths:
- doc/build/html/
# ~~~~~~~~~~~~~~~~~~~~~ Deploy
# This job is separate from "Build Documentation" job since this one shall only run on the master branch while the other
# should run as part of all pipelines
pages:
image: alpine:latest
stage: Deploy
needs: [Build Documentation]
only:
- master
script:
- mkdir public
- cp -R doc/build/html/* public/
artifacts:
paths:
- public
# This job triggers the pipeline of the ros-nodes repository on changes of the master branch
Trigger Downstream Pipelines:
# needs: [] although this does not really require other jobs, it shall only be run at the end if all others succeeded
stage: Deploy
only:
- master
trigger: informatik/ros-nodes

36
pyrate/CHANGELOG.md Normal file
View File

@ -0,0 +1,36 @@
Version 22.04
=============
For this release, the following major features were implemented:
- `pyrate.sense`: A foundation for vision based obstacle detection has been merged, including a horizon line detector and base classes for simple image regions.
- `pyrate.plan`: The classes based on *Shapely* have been prepared for a version bump to shapely 2.0 in the future.
- `scripts`: A script has been added to quickly create test databases from GeoJSON.
Additionally, bugfixes and enhancements to the CI pipeline have been implemented.
As an example, reporting on missing statement coverage is now explicit in the respective job.
The [Wiki](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/wikis) has now been moved into a Sailing Team wide repository.
Here, information of all teams can be gathered in a single Wiki to increase exchange of information and improve coordination.
Version 21.10
=============
For this release, the following major features were implemented:
- `pyrate.common`: Strategies for generating test cases for hypothesis are now available within Pyrate and can be
used by other projects too.
- `pyrate.plan`: The database can now handle additional types of chart objects (points and line strings).
In addition, there were many bug fixes and QA improvements: We now reach 100% statement coverage and also collect
branch coverage. The latter is consistently above 95-99%, and we also enforce at least 90% in every contribution.
While not strictly part of Pyrate, it is worth mentioning that the [Wiki](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/wikis)
grew substantially. In particular, large articles on the hardware of our boats and the components were written.
Most pieces of equipment should now be straightforward to set up just by following the instructions there.
Version 21.04
=============
Versioning of *Pyrate* starts with this version as the current version covers all intended major areas of application.
A lot of early quirks have now been ironed out, and we hope that changes from now on will be less disrupting.
Prior to this, there were no labeled versions but simply an ongoing development effort.

69
pyrate/README.md Normal file
View File

@ -0,0 +1,69 @@
# Pyrate ⛵🛥️🗺
[![pipeline status](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/badges/master/pipeline.svg)](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
[![statement coverage report](https://img.shields.io/badge/Statement%20Coverage-100%25-success)](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
[![branch coverage report](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/badges/master/coverage.svg?key_text=Branch%20Coverage&key_width=110)](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commits/master)
[![link to docs for `master`](https://img.shields.io/badge/docs-master%20branch-blue)](http://informatik.pages.sailingteam.hg.tu-darmstadt.de/pyrate/)
[![python version](https://img.shields.io/badge/python-3.6+-green)](https://devguide.python.org/#status-of-python-branches)
This project offers algorithms targeted towards autonomous surface vehicles implemented in the Python 3.6+ programming language.
The functionality included in the Pyrate Python package enables the perception and processing of environmental features, planning of strategies and trajectories as well as action taking.
Pyrate is therefore divided into multiple distinct subpackages *sense*, *plan* and *act*, as well as the *common* package for additional features such as file I/O and mathematical abstractions.
The sense subpackage includes algorithms for computer vision, single and multi target state estimation, data smoothing and more.
In the plan subpackage, the perceived state of the agent and its environment is processed to compute trajectories for long- and short-term navigation as well as strategic decisions.
This also includes methods for gradient based and gradient free optimization methods.
Finally, the act subpackage contains state controllers to carry out planned behavior.
<img src="resources/project_structure.png" width="1080">
This project aims at providing the algorithmic backend for the ROS driven on-board systems and simulations.
Models of the robot's mechanics, electronics and its environment shall be developed within their own respective repository.
Furthermore, technical specifications, maps and so on go into a separate project repository as well.
This ensures a clean separation of distinct requirements for quality assurance and responsibilities that distinguish these projects.
## Features
These are the currently supported features:
- Sense:
- Filters:
- Kalman filters for linear and non-linear state estimation
- A gaussian mixture PHD filter for multi target tracking
- Smoothers:
- Rauch-Tung-Striebel based smoothing of time series data.
- Extended and Unscented Kalman filter extension to the RTS smoothing approach for non-linear models.
- Vision:
- Base classes for simple image regions, e.g. lines and boxes.
- Horizon line detection as basis for later obstacle localization.
- Plan:
- Geometric primitives for both cartesian and polar coordinate systems
- Locations, polygons and routes
- Transformations, unary and binary geometric operations based on [shapely](https://shapely.readthedocs.io/en/latest/project.html)
- Graphs for use in navigation
- Base classes and common abstractions like (de)serialization and pruning
- Generation of graphs covering the entire globe
- Act
- Controllers
- PID & LQR implementations
- Optional anti-windup
- Common
- Chart IO
- Discovery and loading of obstacles, e.g. landmasses, from S-57 chart files
- Writing and querying of a [spatialite database](https://www.gaia-gis.it/fossil/libspatialite) containing obstacles/polygons
- Raster dataset IO
- Math helpers
- Other:
- [Hypothesis](https://hypothesis.readthedocs.io/en/latest/) driven testing environment (including generators for simple testing of geometric calculations)
- Documentation generated by [Sphinx](https://www.sphinx-doc.org/en/master/)
- Continuous integration (CI) pipeline with linting, type checking, auto formatting, testing and documentation generation
**Documentation**: For a complete overview over the project, how to set things up and contribute,
please visit [our documentation](http://informatik.pages.sailingteam.hg.tu-darmstadt.de/pyrate/).
It can also be reached by the "External Wiki" link on the left sidebar the project overview.
The CI pipeline ensures that its content is always up to date with the current `master` branch.
**New Features**: Upcoming or required features and existing problems can be found, added and discussed in the
[Gitlab issues](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/boards)
and [merge requests](https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/merge_requests).

20
pyrate/doc/Makefile Normal file
View File

@ -0,0 +1,20 @@
# Minimal makefile for Sphinx documentation
#
# You can set these variables from the command line, and also
# from the environment for the first two.
SPHINXOPTS ?= -W --keep-going
SPHINXBUILD ?= sphinx-build
SOURCEDIR = source
BUILDDIR = build
# Put it first so that "make" without argument is like "make help".
help:
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
.PHONY: help Makefile
# Catch-all target: route all unknown targets to Sphinx using the new
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
%: Makefile
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)

35
pyrate/doc/make.bat Normal file
View File

@ -0,0 +1,35 @@
@ECHO OFF
pushd %~dp0
REM Command file for Sphinx documentation
if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=sphinx-build
)
set SOURCEDIR=source
set BUILDDIR=build
if "%1" == "" goto help
%SPHINXBUILD% >NUL 2>NUL
if errorlevel 9009 (
echo.
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
echo.installed, then set the SPHINXBUILD environment variable to point
echo.to the full path of the 'sphinx-build' executable. Alternatively you
echo.may add the Sphinx directory to PATH.
echo.
echo.If you don't have Sphinx installed, grab it from
echo.http://sphinx-doc.org/
exit /b 1
)
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
goto end
:help
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
:end
popd

View File

@ -0,0 +1,10 @@
Act
---
.. automodule:: pyrate.act
.. toctree::
:maxdepth: 2
:caption: Subpackages:
control/control

View File

@ -0,0 +1,8 @@
Anti windup LQR
---------------
.. automodule:: pyrate.act.control.anti_windup_lqr
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Anti windup PID
---------------
.. automodule:: pyrate.act.control.anti_windup_pid
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,13 @@
Control
-------
.. automodule:: pyrate.act.control
.. toctree::
:maxdepth: 2
:caption: Modules:
pid
anti_windup_pid
lqr
anti_windup_lqr

View File

@ -0,0 +1,8 @@
LQR
---
.. automodule:: pyrate.act.control.lqr
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
PID
---
.. automodule:: pyrate.act.control.pid
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,11 @@
Charts
------
The charts package enables users to read and write charts based on different file formats and databases.
.. toctree::
:maxdepth: 2
:caption: Modules:
database
s57_files

View File

@ -0,0 +1,7 @@
Database
--------
.. automodule:: pyrate.common.charts.db
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Raw Files
---------
.. automodule:: pyrate.common.charts.s57_files
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,13 @@
Common
------
.. automodule:: pyrate.common
.. toctree::
:maxdepth: 2
:caption: Subpackages:
charts/charts
math/math
raster_datasets/raster_datasets
testing/testing

View File

@ -0,0 +1,7 @@
Gaussian
--------
.. automodule:: pyrate.common.math.gaussian
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,10 @@
Math
----
The math package contains useful abstractions for common mathematical objects.
.. toctree::
:maxdepth: 2
:caption: Modules:
gaussian

View File

@ -0,0 +1,7 @@
Geographical Datasets
---------------------
.. automodule:: pyrate.common.raster_datasets.geo_datasets
:members:
:undoc-members:
:show-inheritance:

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.1 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 954 KiB

View File

@ -0,0 +1,12 @@
Raster Datasets
---------------
.. automodule:: pyrate.common.raster_datasets
.. toctree::
:maxdepth: 2
:caption: Modules:
geo_datasets
transformer_base
transformers_concrete

View File

@ -0,0 +1,7 @@
Transformer Base Classes
------------------------
.. automodule:: pyrate.common.raster_datasets.transformer_base
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Concrete Transformers
---------------------
.. automodule:: pyrate.common.raster_datasets.transformers_concrete
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Dynamic System
--------------
.. automodule:: pyrate.common.testing.strategies.dynamic_system
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Geometry
--------
.. automodule:: pyrate.common.testing.strategies.geometry
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,11 @@
Strategies
----------
.. automodule:: pyrate.common.testing.strategies
.. toctree::
:maxdepth: 2
:caption: Modules:
dynamic_system
geometry

View File

@ -0,0 +1,13 @@
Testing
-------
.. automodule:: pyrate.common.testing
:members:
:undoc-members:
:show-inheritance:
.. toctree::
:maxdepth: 2
:caption: Modules:
strategies

122
pyrate/doc/source/conf.py Normal file
View File

@ -0,0 +1,122 @@
# Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
from os.path import abspath
from os.path import dirname
from os.path import join
import sys
sys.path.insert(0, abspath(join(dirname(__file__), "../../"))) # for scripts/
import pyrate # noqa: E402
# -- Project information -----------------------------------------------------
project = "Pyrate"
copyright = pyrate.__author__
author = pyrate.__author__
# The version info for the project, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = pyrate.__version__.split("-")[0]
# The full version, including alpha/beta/rc tags
release = pyrate.__version__
# -- General configuration ---------------------------------------------------
primary_domain = "py"
# If this is True, todo and todolist produce output, else they produce nothing.
# The default is False.
todo_include_todos = True
language = "en"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
"sphinx_markdown_builder",
"sphinx.ext.autodoc",
"sphinx.ext.intersphinx",
"sphinx.ext.mathjax",
"sphinx.ext.viewcode",
"sphinx.ext.napoleon",
"sphinx.ext.autosummary",
"sphinx.ext.doctest",
"sphinx.ext.inheritance_diagram",
"sphinx_rtd_theme",
"sphinxcontrib.programoutput",
]
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = []
source_suffix = [".rst"]
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
"shapely": ("https://shapely.readthedocs.io/en/stable", None),
"numpy": ("https://numpy.org/doc/stable", None),
"scipy": ("https://docs.scipy.org/doc/scipy", None),
"matplotlib": ("https://matplotlib.org/stable", None),
"pandas": ("https://pandas.pydata.org/pandas-docs/stable", None),
"h5py": ("https://docs.h5py.org/en/stable", None),
"tables": ("https://www.pytables.org", None),
"pyproj": ("https://pyproj4.github.io/pyproj/stable", None),
"rasterio": ("https://rasterio.readthedocs.io/en/stable", None),
"geopy": ("https://geopy.readthedocs.io/en/stable", None),
"cartopy": ("https://scitools.org.uk/cartopy/docs/latest", None),
"pytest": ("https://docs.pytest.org/en/stable", None),
"pytest-cov": ("https://pytest-cov.readthedocs.io/en/stable", None),
"hypothesis": ("https://hypothesis.readthedocs.io/en/latest", None),
}
nitpicky = False
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes or:
# - https://sphinx-themes.org/
# - https://www.writethedocs.org/guide/tools/sphinx-themes/
html_theme = "sphinx_rtd_theme"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = [] # '_static'
html_favicon = "../../resources/logo.svg"
html_logo = "../../resources/logo.svg"
html_sidebars = {"**": ["globaltoc.html", "relations.html", "sourcelink.html", "searchbox.html"]}
# -- Options for LaTeX output ---------------------------------------------
latex_engine = "pdflatex"
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
"papersize": "a4paper",
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [("index", "pyrate.tex", f"{project} Documentation", author, "manual")]

View File

@ -0,0 +1,12 @@
Contribution
============
A long-term goal of this project is to provide a stable foundation of algorithms and data structures to enable a reliable and autonomous operation of a surface vehicle.
To achieve this goal, every addition or change to the software needs to be reviewed by the team and follow a set of rules as defined by our CI pipeline.
This obviously includes writing tests and documentation as needed.
To contribute to the project, feel free to create a new branch with a meaningful name and start your work from there.
If you feel that your work is ready to be merged into the master branch of the project, it is time to open a merge request.
If you are new to the Python programming language, you can find a good overview of Pythonic idioms `here <http://web.archive.org/web/20170316131252id_/http://python.net/~goodger/projects/pycon/2007/idiomatic/handout.html>`_.
We use `Google-style <https://www.sphinx-doc.org/en/master/usage/extensions/napoleon.html>`_ docstrings within the code and `Sphinx <https://www.sphinx-doc.org/en/master/>`_ to generate the documentation you are currently reading.

View File

@ -0,0 +1,30 @@
Welcome to Pyrate!
==================
This project offers algorithms targeted towards autonomous surface vehicles implemented in the Python 3.6+ programming language.
The functionality included in the Pyrate Python package enables the perception and processing of environmental features, planning of strategies and trajectories as well as action taking.
Pyrate is therefore divided into multiple distinct subpackages *sense*, *plan* and *act*, as well as the *common* package for additional features such as file I/O and mathematical abstractions.
The sense subpackage includes algorithms for computer vision, single and multi target state estimation, data smoothing and more.
In the plan subpackage, the perceived state of the agent and its environment is processed to compute trajectories for long- and short-term navigation as well as strategic decisions.
This also includes methods for gradient based and gradient free optimization methods are included.
Finally, the act subpackage contains state controllers to carry out planned behavior.
.. image:: ../../resources/project_structure.png
This project aims at providing the algorithmic backend for the ROS driven on-board systems and simulations.
Models of the robot's mechanics, electronics and its environment shall be developed within their own respective repository.
Furthermore technical specifications, maps and so on go into a separate project repository as well.
This ensures a clean separation of distinct requirements for quality assurance and responsibilities that distinguish these projects.
.. toctree::
:hidden:
installation
sense/sense
plan/plan
act/act
common/common
scripts/scripts
contribution
quality_assurance

View File

@ -0,0 +1,93 @@
Installation
============
This section leads you through the process of setting up your machine to be able to contribute and use the Pyrate package.
Creating a virtualenv
---------------------
.. note:: This step is not necessary but recommended
To encapsulate Pyrate, its specific dependencies and the rest of the STDA Python workspace, it can be a good decision to first set up a *virtualenv*.
A *virtualenv* behaves in a similar way yo the Python installation your distribution probably is already shipped with, but keeps all the installed packages in a separate space.
This way you do not run into incompatibilities if you participate in multiple projects at the same time.
A new *virtualenv* named *stda-env* is created with the following command within your current working directory.
.. code-block:: bash
python -m venv stda-env
To actually use this newly created environment, you need to activate the environment.
To do this you can switch into the project directory and execute :code:`source PathToEnvironment/stda-env/bin/activate`.
This causes your current shell to switch from the system wide Python installation to the *virtualenv*.
For ease of use, you can append one of the following lines to your `~/.bashrc`.
.. code-block:: bash
# Always activate the environment when a new shell is created
source PathToEnvironment/stda-env/bin/activate
# Create a new command that activates the environment by hand
alias activate-stda="source PathToEnvironment/stda-env/bin/activate"
To return from the virtualenv to using the system's Python interpreter, simply use the command :code:`deactivate`.
Setting up Pyrate
-----------------
To install Pyrate and its dependencies to your Linux-based Python 3.10+ environment, simply do the following.
Afterwards Pyrate's functionality will be available to you either globally or, if you followed the instructions above, with your *virtuelenv* activated.
.. code-block:: bash
# Dependencies that are not installed via pip
sudo add-apt-repository ppa:antiprism/ppa
sudo apt install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism
pip install wheel
# Clone the repository and change into the created directory
git clone git@gitlab.sailingteam.hg.tu-darmstadt.de:informatik/pyrate.git
cd pyrate
# To install, choose either option A (static) or B (editable)
# A: Install a static version of Pyrate
pip install .
# B: If you want to contribute to Pyrate, you need to install in editable mode
pip install -e .
You can check that everything worked by executing :code:`python3 -c "import pyrate"`.
If no :code:`ImportError` or alike pops up, the installation has succeeded.
Building the docs
-----------------
The documentation you are currently reading can easily be build for the locally installed branch using `Sphinx <https://www.sphinx-doc.org/>`__.
The most recent version from the ``master`` branch is also available `online <https://informatik.pages.sailingteam.hg.tu-darmstadt.de/pyrate/>`__.
Three formats are currently supported.
After you have built any of them with the below instructions, open these files to view the documentation:
- HTML (multiple linked pages): open ``doc/build/html/index.html`` in a web browser
- PDF (single document): open ``doc/build/latex/pyrate.pdf`` in a PDF viewer
- Markdown (multiple linked pages, limited functionality): open ``doc/build/markdown/index.md``
.. code-block:: bash
# install the extra Python dependencies
pip install .[docs]
# install the `dot` program to render inheritance diagrams (else they will appear as gibberish text)
sudo apt install graphviz
# change into the documentation directory
cd doc
# compile the docs into one or more of the below formats
make html
make latexpdf # requires pdflatex
make markdown
On Windows, `make.bat` can be used in place of `make` (untested, possibly requires installing additional dependencies).

View File

@ -0,0 +1,215 @@
Geometry
========
The geometry package provides a foundation for planning methods by implementing
several commonly used geometric objects, e.g. locations, polygons, and routes.
Each of them comes in a polar coordinates (i.e. latitude & longitude) and
cartesian coordinates (i.e. local x- & y-axis on a tangent plane) variant.
The cartesian ones are based on `Shapely <https://shapely.readthedocs.io/en/stable/project.html>`_
and the polar ones try to mimic their interface and functionality.
All coordinates are referenced to the widely used
`world geodetic system (WGS84) <https://de.wikipedia.org/wiki/World_Geodetic_System_1984>`__.
In this inheritance diagram, :class:`~shapely.BaseGeometry` as well as classes inheriting directly from it
are provided by *Shapely*.
It shows that all geometric objects of *Pyrate* inherit from :class:`~pyrate.plan.geometry.geospatial.Geospatial`:
.. inheritance-diagram::
pyrate.plan.geometry.location.CartesianLocation
pyrate.plan.geometry.location.PolarLocation
pyrate.plan.geometry.polygon.CartesianPolygon
pyrate.plan.geometry.polygon.PolarPolygon
pyrate.plan.geometry.route.CartesianRoute
pyrate.plan.geometry.route.PolarRoute
:parts: 1
:top-classes: pyrate.plan.geometry.geospatial.Geospatial
See :ref:`geometry-plotting` on how to easily plot geometries like points, polygons and routes.
See :ref:`design-decisions-local-projections` on how the implementation of the projections
between local and global coordinate systems has developed.
.. toctree::
:maxdepth: 2
:caption: Modules:
geospatial
location
polygon
route
helpers
.. _geometry-plotting:
Geometry Plotting
-----------------
There are many possibilities to visualize geometries with Python. For simplicity, we chose to not provide
direct visualization methods, but support `GeoJSON <https://geojson.org>`_. This format can be read very
easily by many programs, including the website `geojson.io <https://geojson.io>`_. You can simply
copy-paste it into there or use the convenient command-line tool `geojsonio <https://github.com/mapbox/geojsonio-cli>`_.
However, when objects become very large, other tools like `QGIS Desktop <https://www.qgis.org>`_ may be more appropriate.
The code below gives and example of how the
*GeoJSON* representation can be obtained. After that, a few interesting references are given.
Also, see :meth:`~pyrate.plan.geometry.geospatial.Geospatial.to_geo_json`.
.. code-block:: python
from geojson import dumps, Feature
from pyrate.plan.geometry import PolarPolygon
# create a geometry object
some_geometry = PolarPolygon(...)
# then simply dump it to standard out
print(some_geometry.to_geo_json())
# or more general
print(dumps(Feature(geometry=some_geometry)))
.. code-block:: bash
echo '{"type": "Point", "coordinates": [30, 10]}' | geojsonio
geojsonio some_gemetry.json
# see https://github.com/mapbox/geojsonio-cli#examples for more examples
This works for
- :class:`~pyrate.plan.geometry.location.PolarLocation`,
- :class:`~pyrate.plan.geometry.location.CartesianLocation`,
- :class:`~pyrate.plan.geometry.polygon.PolarPolygon`,
- :class:`~pyrate.plan.geometry.polygon.CartesianPolygon`,
- :class:`~pyrate.plan.geometry.route.PolarRoute`,
- :class:`~pyrate.plan.geometry.route.CartesianRoute`,
- and any object that provides a ``__geo_interface__`` attribute/property.
Further References
~~~~~~~~~~~~~~~~~~
- The original `Gitlab issue #54 <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/issues/54>`_ that collected initial ideas
- `Interaktive Visualisierung von Geodaten in Jupyter Notebooks (Lightning Talk, FOSSGIS 2017) <https://tib.flowcenter.de/mfc/medialink/3/de387967965b98c17bd5dd552ac86e899179084e8c1b5aa6d578f5ad72c5eea5ea/Interaktive_Visualisierung_von_Geodaten_in_Jupyter_Notebooks_Lightning_Talk_2.pdf>`_
- Examples in the *Folium* library: `Quickstart - GeoJSON/TopoJSON Overlays <https://python-visualization.github.io/folium/quickstart.html#GeoJSON/TopoJSON-Overlays>`_
.. _design-decisions-local-projections:
Design decisions on the local projections
-----------------------------------------
This section documents our arguments for and against `Universal Transverse Mercator (UTM) <https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system>`_ vs
`local tangent plane coordinates <https://en.wikipedia.org/wiki/Local_tangent_plane_coordinates>`_ based on freely chosen reference points,
as means of `horizontal position representation <https://en.wikipedia.org/wiki/Horizontal_position_representation>`_.
A third approach would be to provide both.
This discussion was copied and adapted from the `issue #40 <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/issues/40>`_, which initially collected this discussion.
Overview of the arguments
~~~~~~~~~~~~~~~~~~~~~~~~~
Firstly, the three approaches are presented with the arguments for using them.
Pro UTM
.......
1. A worldwide standard for navigation
2. Data easy to import/export to other teams/projects (can be important e.g. for the WRSC competition). However, WGS84 coordinates will probably suffice.
3. UTM locations can be pre-computed while arbitrary projections constantly change. Example from DB (pseudo-SQL): ``SELECT obstacle WHERE obstacle.zone IN {boat_zone, boat_zone + 1, boat_zone - 1, ...}``. Compared to *local* where the PolarLocation is transformed into local coordinates, distance computed and then decided whether to use or drop.
4. UTM errors are guaranteed to be +-1m per 1km within a single zone, see for reference e.g. `here <https://www.e-education.psu.edu/natureofgeoinfo/c2_p22.html>`_.
5. UTM makes tiling the map easy. This might help to choose which obstacles to include while planning. However, a single UTM zone is also quite large.
6. Slicing can be done once, offline.
Pro local
.........
1. Better precision around boat position/obstacles close to the boat. If we also use the Traverse Mercator projection like UTM, we might even get better resolution. However, this might come at some increased computational cost since it cannot be easily done offline/beforehand.
2. No tiling needed, select obstacles that are within a range of boat, and clip the non-relevant parts (already implemented in the *spatialite* database with polar coordinates)
3. Do special cases due to UTM zones not being entirely uniform
4. Could, in theory, allow for different projections for different needs (preserve the visual shape, preserve the area, etc.), though it might be too complicated and not worth the effort
5. Works exactly the same, no matter where on the globe something is
Pro for both and therefore neutral
..................................
1. Tested and documented packages for UTM (`utm <https://pypi.org/project/utm/>`_) and for arbitrary local transformations exist (`pyproj <https://pypi.org/project/pyproj/>`_)
2. Slicing Polygons provided by shapely (either ``island.intersect(Point(x, y).buffer(radius))`` or ``island.intersect(Polygon([(0, 0), (max, 0), (max, max), (0, max)]))``)
3. Both approaches would provide sufficiently precise approximations of the earth surface for our needs
About implementing both
.......................
1. Would have the best of both worlds
2. How would this complicate the implementation? (Too much, and it would spark discussions and incompatibilities.)
Decision
~~~~~~~~
In the end, the main argument against UTM zones was the handling of the cases near zone borders and that there are some irregularities in the UTM zones that might complicate things.
However, using local projections was feared to have a huge performance impact on embedded computes, so we performed a benchmark of a basic implementation.
The results when benchmarking in the scenario tested below confirmed that using local projections was feasible on our embedded computers.
Thus, the local transformation approach was selected.
.. _benchmarking-db-and-local-projections:
Benchmarking results of the custom local transformation approach
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
The performance was initially tested on a *Raspberry Pi 4B* with 2GB RAM and a *SandDisk Extreme 64GB (Class 3)*.
A *Raspberry Pi* was chosen as it will likely be the actual computer being used in many challenges.
The OS was *Raspberry Pi OS (32-bit)* with version *May 2020* and the variant "with desktop and recommended software".
The overall performance was concluded to be very acceptable.
The benchmarking was performed with the chart database from the `data repository <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data>`__
on `commit 0abe9269026de87b7265f664d10a0b9599314313 <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data/-/commit/0abe9269026de87b7265f664d10a0b9599314313>`__.
It contained the entirety of North America as was available from the (US) NOAA.
The benchmark script (and *Pyrate* code) was from `commit 0ae4c33e361369321b10d677067deeb07ed27493 <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/commit/0ae4c33e361369321b10d677067deeb07ed27493>`__.
See :ref:`script-benchmark_db_and_projections` for details on what is actually tested.
The following tests were carried out on an Intel(R) Core(TM) i5-6300U with a SATA SSD and plenty of RAM.
Results with realistic parameters: radius 100km
...............................................
.. code-block:: bash
user@ubuntu:~/sailing/pyrate $ python scripts/benchmark_db_and_projections.py ../data/charts/noaa_vector/all_combined_simplified_25m.sqlite --iterations 10 --radius 100
Information on the setting:
number of rows/polygons in database: 648828
sum of vertices of all rows/polygons of in database: 13727653
extracted number of polygons: 6266
extracted total number of vertices: 120179
Executed "query_database" 10 times:
average: 2.977373 seconds
std dev: 0.042802 seconds
variance: 0.001832 seconds
Executed "project_to_cartesian_and_back" 10 times:
average: 1.465923 seconds
std dev: 0.033850 seconds
variance: 0.001146 seconds
Results with stress testing parameters: radius 999km
....................................................
.. code-block:: bash
user@ubuntu:~/sailing/pyrate $ python scripts/benchmark_db_and_projections.py ../data/charts/noaa_vector/all_combined_simplified_25m.sqlite --iterations 10 --radius 999
Information on the setting:
number of rows/polygons in database: 648828
sum of vertices of all rows/polygons of in database: 13727653
extracted number of polygons: 90539
extracted total number of vertices: 2131078
Executed "query_database" 10 times:
average: 34.120787 seconds
std dev: 0.499919 seconds
variance: 0.249919 seconds
Executed "project_to_cartesian_and_back" 10 times:
average: 23.383787 seconds
std dev: 0.224816 seconds
variance: 0.050542 seconds
Notes and conclusions
.....................
Comparing the results with radius 100km and 999km, we can see that ``_project_to_cartesian_and_back()`` grows very linearly, as expected: 12 μs/vertex (100km) vs. 11 μs/vertex (999km).
The ``_query_database()`` benchmark runs even better (sub linear in the number of vertices): 24 μs/vertex (100km) vs. 16 μs/vertex (999km).
Also note, that having a lot of polygons outside of the relevant area seems to be non-problematic.
Here, the spatial index really shines, as ``_query_database()`` took *a lot* longer before its introduction.
About 66% of the time when projecting is spent reassembling the polygon after it was converted, so that's probably something we can improve if we eventually need to.
Also, one could reduce the fidelity of the features by using stronger simplification or reduce the query radius.
Memory seems to not be a problem either. No precise measurements were made though.

View File

@ -0,0 +1,7 @@
Geospatial
----------
.. automodule:: pyrate.plan.geometry.geospatial
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Geometry Helpers
----------------
.. automodule:: pyrate.plan.geometry.helpers
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Location
--------
.. automodule:: pyrate.plan.geometry.location
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Polygon
-------
.. automodule:: pyrate.plan.geometry.polygon
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Route
-----
.. automodule:: pyrate.plan.geometry.route
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Earth Graph Generation
----------------------
.. automodule:: pyrate.plan.graph.generate
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Geo-referenced Graph Implementation
-----------------------------------
.. automodule:: pyrate.plan.graph.geo_graph
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Graph Implementation
--------------------
.. automodule:: pyrate.plan.graph.graph
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,16 @@
Graph
=====
.. automodule:: pyrate.plan.graph
.. inheritance-diagram:: pyrate.plan.graph.graph.NavigationGraph pyrate.plan.graph.geo_graph.GeoNavigationGraph
:parts: 1
:top-classes: pyrate.plan.graph.graph.NavigationGraph
.. toctree::
:maxdepth: 2
:caption: Modules:
graph
geo_graph
generate

Binary file not shown.

After

Width:  |  Height:  |  Size: 768 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 129 KiB

View File

@ -0,0 +1,11 @@
Plan
----
.. automodule:: pyrate.plan
.. toctree::
:maxdepth: 2
:caption: Subpackages:
geometry/geometry
graph/graph_overview

View File

@ -0,0 +1,62 @@
Quality Assurance
=================
This section shows you how to test the software locally on your machine and ensure that your contributions follow our common coding standards.
Note that with every contribution pushed to the Gitlab server the routines that are described here are automatically executed for you.
The results are then visible in `the repository's CI/CD section <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/pipelines>`__ and in the specific merge request view.
`The Software section of the Wiki <https://gitlab.sailingteam.hg.tu-darmstadt.de/team/wiki/-/wikis/Software/Home%20%F0%9F%92%BB>`__ also holds further information on our `styleguide <https://gitlab.sailingteam.hg.tu-darmstadt.de/team/wiki/-/wikis/Software/Quality-Assurance/Styleguide>`__, `documentation <https://gitlab.sailingteam.hg.tu-darmstadt.de/team/wiki/-/wikis/Software/Quality-Assurance/Documentation>`__ and `testing <https://gitlab.sailingteam.hg.tu-darmstadt.de/team/wiki/-/wikis/Software/Quality-Assurance/Testing>`__.
Coding Style
------------
A common style when collaborating on a big project is crucial to keep everything maintainable and easy to understand in the long run.
To make sure you are following the rules we employ a number of programs that help us to analyse the source automatically.
Since Python is an interpreted language, we do not have a compiler that ensures that everything we write will make sense at runtime.
Linting and enforcing coding conventions thereby can have a very important role in keeping our software reliable.
To get reports on the code you have written you can use the following commands in the packages root directory.
.. code-block:: bash
flake8
mypy pyrate tests scripts
pylint -j 0 pyrate tests scripts
Testing
-------
Tests can be run by simply executing :code:`pytest` within the repositories root directory.
This will also result in coverage statistics being printed, which are a good indicator whether your tests are covering all the lines of your code.
Again, since Python is not compiled, this can be very useful.
Nevertheless, make sure that your tests ideally go beyond mere execution of code and assert its correctness.
Besides statement coverage, we also collect branch coverage (see `here <https://coverage.readthedocs.io/en/stable/branch.html>`__).
We require that all contributions achieve 100% statement coverage and 90% branch coverage.
Two parts of the library are special when it comes to testing:
Both the tests around :code:`pyrate.common.charts.SpatialiteDatabase` and around :code:`pyrate.common.charts.S57ChartHandler` are only exercised if the required dependencies are met and if not running on a CI server.
If the dependencies cannot be found, the tests will be marked as skipped.
This allows for easier development as less dependencies are actually required for running the tests, but the CI server will still check these modules for you.
Hypothesis Testing and Speed
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Many tests use the `Hypothesis property based testing framework <https://hypothesis.readthedocs.io/en/latest/>`__.
Everyone is encouraged to do so too, and there are already some handy example generators in ``tests/strategies/``.
The settings (along with some more general setup) are declared in ``tests/__init__.py``.
However, a few tests take a lot of time to process, like for example the tests that use the very slow cartesian route example generation.
Therefore, it might be required to reduce the number of tests, as is done in ``tests/plan/geometry/primitives/test_polygons.py`` using the ``@settings()`` decorator.
Timings for (all) individual tests can be obtained by running pytest with ``--durations=0`` (see `the pytest docs <https://docs.pytest.org/en/stable/usage.html#profiling-test-execution-duration>`__).
You may want to temporarily add this argument to the ``addopts`` option in the section ``[tool.pytest.ini_options]`` in ``pyproject.toml``.
Downstream CI Pipeline Triggers
-------------------------------
Other projects like *ros-nodes* depend on *Pyrate*: They are *downstream* projects, as changes in *Pyrate* flow down the "stream of dependencies" to them.
To ensure that changes here in the upstream *Pyrate* project do not break such downstream projects (or just to remind us to fix stuff over there too),
the pipeline of this repository triggers the ones of downstream projects.
This is configured in a special ``Deploy``-stage job called ``Trigger Downstream Pipelines`` at the bottom of the ``.gitlab-ci.yml`` file (in this upstream project!).
The capabilities are documented in `the official GitLab docs on "Multi-project pipelines" <https://docs.gitlab.com/ee/ci/multi_project_pipelines.html>`__.

View File

@ -0,0 +1,52 @@
.. _scripts-reference:
API Reference
-------------
``s57_charts_to_db.py``
~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.s57_charts_to_db
:members:
:undoc-members:
``benchmark_db_and_projections.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.benchmark_db_and_projections
:members:
:undoc-members:
``create_earth_graph.py``
~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.create_earth_graph
:members:
:undoc-members:
``earth_graph_frequency_statistics.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.earth_graph_frequency_statistics
:members:
:undoc-members:
``visualize_earth_graph.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.visualize_earth_graph
:members:
:undoc-members:
``benchmark_graph_neighbor_search.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. automodule:: scripts.benchmark_graph_neighbor_search
:members:
:undoc-members:

View File

@ -0,0 +1,81 @@
Scripts
=======
*Pyrate* contains a few scripts in the directory `pyrate/scripts/`.
They are mainly meant for actually applying the algorithms to real-world data and to also serve as some examples for the library code.
Requirements
------------
Most script documentation assumes the typical Sailing Team directory layout as described in
`the installation guide <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/pyrate/-/wikis/Installation>`_.
To execute these programs, you need the datasets that you want to work with (if any).
Therefore, you will probably want to download the data repository as described
`here <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data/-/blob/master/README.md#installation>`_,
if you haven't already.
Usage
-----
This section just lists the parameters of the scrips. See :ref:`scripts-reference` for more complete explanations.
.. _script-s57_charts_to_db:
``s57_charts_to_db.py``
~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/s57_charts_to_db.py --help
.. _script-benchmark_db_and_projections:
``benchmark_db_and_projections.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/benchmark_db_and_projections.py --help
.. _script-create_earth_graph:
``create_earth_graph.py``
~~~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/create_earth_graph.py --help
.. _script-earth_graph_frequency_statistics:
``earth_graph_frequency_statistics.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/earth_graph_frequency_statistics.py --help
.. _script-visualize_earth_graph:
``visualize_earth_graph.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/visualize_earth_graph.py --help
.. _script-benchmark_graph_neighbor_search:
``benchmark_graph_neighbor_search.py``
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
.. command-output:: ../../scripts/benchmark_graph_neighbor_search.py --help
Reference
---------
This section above just lists the usage of the scrips.
The complete API reference can be found below:
.. toctree::
:maxdepth: 2
reference

View File

@ -0,0 +1,7 @@
Extended Kalman Filter
----------------------
.. automodule:: pyrate.sense.filters.extended
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Extended Gaussian Mixture PHD Filter
------------------------------------
.. automodule:: pyrate.sense.filters.extended_gmphd
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,16 @@
Filters
-------
The filters package provides single and multi target tracking capabilities based on Bayesian methods.
A prime example for such a filter is the so called Kalman filter and its derivatives for nonlinear estimation.
Additionally, the gaussian mixture probability hypothesis density (PHD) filter is provided.
.. toctree::
:maxdepth: 2
:caption: Modules:
kalman
extended
unscented
gmphd
extended_gmphd

View File

@ -0,0 +1,7 @@
Gaussian Mixture PHD Filter
---------------------------
.. automodule:: pyrate.sense.filters.gmphd
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Kalman Filter
-------------
.. automodule:: pyrate.sense.filters.kalman
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
Unscented Kalman Filter
-----------------------
.. automodule:: pyrate.sense.filters.unscented
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,9 @@
Sense
-----
.. toctree::
:maxdepth: 2
:caption: Subpackages:
filters/filters
smoothers/smoothers

View File

@ -0,0 +1,7 @@
Extended RTS Smoother
---------------------
.. automodule:: pyrate.sense.smoothers.extended
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,7 @@
RTS Smoother
------------
.. automodule:: pyrate.sense.smoothers.rts
:members:
:undoc-members:
:show-inheritance:

View File

@ -0,0 +1,13 @@
Smoothers
---------
Smoothing is the problem of state estimation, where not only previous measurements but also future observations are part of a single estimate.
A popular example for smoothing is the so called Rauch-Tung-Striebel (RTS) smoother, which is based on the Kalman filter and its derivatives for nonlinear estimation.
.. toctree::
:maxdepth: 2
:caption: Modules:
rts
extended
unscented

View File

@ -0,0 +1,7 @@
Unscented RTS Smoother
----------------------
.. automodule:: pyrate.sense.smoothers.unscented
:members:
:undoc-members:
:show-inheritance:

93
pyrate/experiments.py Normal file
View File

@ -0,0 +1,93 @@
from typing import Dict, List
from shapely.geometry import Polygon, Point
from pyrate.plan.geometry.polygon import CartesianPolygon
from pyrate.plan.nearplanner.cost_functions import *
from pyrate.plan.nearplanner.evaluated_timing_frame import EvaluatedTimingFrame
from pyrate.plan.nearplanner.holders import EstimationParameters
from pyrate.plan.nearplanner.holders import OptimizationParameters
from pyrate.plan.nearplanner.obstacle import Obstacle
from pyrate.plan.nearplanner.optimizer import Optimizer
estimation_param = EstimationParameters()
optimization_param = OptimizationParameters(estimation_param)
optimization_param.verbose = False
def create_context(
position: Point,
goal: Point,
wind: Tuple[float, float],
obstacles: Dict[str, Polygon],
costfunction_obstacles_width: float = 40,
costfunction_obstacles_scale: float = 0.02,
) -> Optimizer:
fct = CostFunctionExp(scale=costfunction_obstacles_scale, safety_dist=costfunction_obstacles_width)
cartesian_polys: Dict[str, CartesianPolygon] = {
i: CartesianPolygon.from_shapely(k) for i, k in obstacles.items()
}
obstacle_dict: Dict[str, Obstacle] = {
i: Obstacle(poly, np.array([0, 0]), cost_function=fct) for i, poly in cartesian_polys.items()
}
context = Optimizer(wind_information=(0, 0), obstacles={}, position=Point(0, 0))
context.wind_speed, context.wind_angle = wind[0], np.deg2rad(wind[1])
context.position = position
context.goal = goal
context.on_reset_obstacles(obstacle_dict)
return context
# sueden nach norden -> wind (25 m/s, 0)
def generate_route(
position: Point,
goal: Point,
wind: Tuple[float, float],
obstacles: Dict[str, Polygon],
costfunction_obstacles_width: float = 40,
costfunction_obstacles_scale: float = 0.02,
) -> Tuple[Optional[EvaluatedTimingFrame], Optional[List[EvaluatedTimingFrame]]]:
"""Function that generates a route in the first of the tuple return value.
Second value contains a list of timing frames created during optimization.
"""
context = create_context(
position=position,
goal=goal,
wind=wind,
obstacles=obstacles,
costfunction_obstacles_width=costfunction_obstacles_width,
costfunction_obstacles_scale=costfunction_obstacles_scale,
)
return context.optimize(goal=goal, optimization_parameters=optimization_param)
# poly = Polygon([(-2, 1), (-0.1, 3), (3, 3), (2,1)])
# poly = Polygon([(-80, 10), (-50, 30), (30, 30), (20,20), (0,20)])
# poly_ = Polygon([(1000,1000),(1010,1010),(1000,1020)])
# poly2 = Polygon([(-50, 70), (50, 70), (50, 80), (-50,80)])
# ob_set = {"0": poly2, "1": poly_, "2": poly}
# ob_set_2 = {"0": poly_}
# print("START")
# route = [Point(0,0), Point(0,1), Point(0,2), Point(2,2), Point(2,1), Point(-2,2), Point(0,10)]
# _ = np.array([shapely_point_to_ndarray(p) for p in route])
# print(_)
# frame = TimingFrame(CartesianRoute.from_numpy(_))
# print(frame)
# frame2 = frame.remove_single_cycles()
# print(frame2)
# print("_"*10)
# print(frame.remove_single_cycles())

549
pyrate/pyproject.toml Normal file
View File

@ -0,0 +1,549 @@
# see MR !31 for why this only uses the legacy version
[build-system]
requires = ["setuptools>=40.8.0", "wheel"]
build-backend = "setuptools.build_meta:__legacy__"
[tool.black]
line-length = 110
target-version = [
"py310",
]
[tool.pytest.ini_options]
addopts = "-v --color=yes --cov=pyrate --doctest-modules"
junit_family = "xunit2"
testpaths = [
# for the doctests:
"pyrate",
# for the actual tests:
"tests"
]
doctest_optionflags = [
"IGNORE_EXCEPTION_DETAIL",
"DONT_ACCEPT_TRUE_FOR_1"
]
filterwarnings = [
"error",
"error::DeprecationWarning",
"error::PendingDeprecationWarning",
"ignore:The height, width, and precision:rasterio.errors.RasterioDeprecationWarning", # See https://github.com/rasterio/rasterio/issues/2466
]
[tool.coverage.run]
concurrency = ["multiprocessing"]
branch = true
[tool.coverage.report]
fail_under = 95.00
precision = 2
show_missing = true
exclude_lines = [
# Regexes for lines to exclude from consideration
# Have to re-enable the standard pragma
"pragma: no cover",
# Don't complain about missing debug-only code:
"def __repr__",
# Don't complain if tests don't hit defensive assertion code:
"raise AssertionError",
"raise NotImplementedError",
# Don't complain if non-runnable code isn't run:
"if __name__ == .__main__.:",
# It's okay to not cover unimplemented comparison methods
"return NotImplemented"
]
[tool.pylint.master]
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=["cv2"]
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=["CVS"]
# Add files or directories matching the regex patterns to the blacklist. The
# regex matches against base names, not paths.
ignore-patterns=[]
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Use multiple processes to speed up Pylint.
jobs=0
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=[]
# Pickle collected data for later comparisons.
persistent="yes"
# Specify a configuration file.
#rcfile=
# When enabled, pylint would attempt to guess common misconfiguration and emit
# user-friendly hints instead of false-positive error messages
suggestion-mode="yes"
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension="no"
[tool.pylint.messages_control]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=[]
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then reenable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=[
"locally-disabled",
"locally-enabled",
"bad-continuation", # conflicts with flake8's rule W504 that we adopted over W503
"no-value-for-parameter", # the last one does not work with hypothesis, cf. https://github.com/HypothesisWorks/hypothesis/issues/1654
"import-error", # to lint code without the need to install all dependencies
"too-many-instance-attributes",
"duplicate-code", # too many false positives
]
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
enable= [
"c-extension-no-member",
"useless-suppression",
]
[tool.pylint.reports]
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation="10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)"
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
# Set the output format. Available formats are text, parseable, colorized, json
# and msvs (visual studio).You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format="colorized"
# Tells whether to display a full report or only the messages
reports="no"
# Activate the evaluation score.
score="yes"
[tool.pylint.refactoring]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
# Complete name of functions that never returns. When checking for
# inconsistent-return-statements if a never returning function is called then
# it will be considered as an explicit return statement and no message will be
# printed.
never-returning-functions=["optparse.Values", "sys.exit", "exit"]
[tool.pylint.variables]
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=[]
# Tells whether unused global variables should be treated as a violation.
allow-global-unused-variables="yes"
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=[]
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx="_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_"
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names="_.*|^ignored_|^unused_"
# Tells whether we should check for unused import in __init__ files.
init-import="no"
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=[]
[tool.pylint.logging]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=["logging"]
[tool.pylint.miscellaneous]
# List of note tags to take in consideration, separated by a comma.
notes=[
"FIXME",
"TODO"
]
[tool.pylint.format]
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=""
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines="^\\s*(# )?<?https?://\\S+>?$"
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Maximum number of characters on a single line.
max-line-length=110
# Maximum number of lines in a module
max-module-lines=1000
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=[
"trailing-comma",
"dict-separator"
]
# Allow the body of a class to be on the same line as the declaration if body
# contains single statement.
single-line-class-stmt="no"
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt="no"
[tool.pylint.basic]
# Naming style matching correct argument names
argument-naming-style="snake_case"
# Regular expression matching correct argument names. Overrides argument-
# naming-style
#argument-rgx=
# Naming style matching correct attribute names
attr-naming-style="snake_case"
# Regular expression matching correct attribute names. Overrides attr-naming-
# style
#attr-rgx=
# Bad variable names which should always be refused, separated by a comma
bad-names=[
"foo",
"bar",
"baz"
]
# Naming style matching correct class attribute names
class-attribute-naming-style="any"
# Regular expression matching correct class attribute names. Overrides class-
# attribute-naming-style
#class-attribute-rgx=
# Naming style matching correct class names
class-naming-style="PascalCase"
# Regular expression matching correct class names. Overrides class-naming-style
#class-rgx=
# Naming style matching correct constant names
const-naming-style="UPPER_CASE"
# Regular expression matching correct constant names. Overrides const-naming-
# style
#const-rgx=
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
# Naming style matching correct function names
function-naming-style="snake_case"
# Regular expression matching correct function names. Overrides function-
# naming-style
#function-rgx=
# Good variable names which should always be accepted, separated by a comma
good-names=[
"i",
"j",
"k",
"ex",
"Run",
"_",
"up",
"x",
"y",
"z"
]
# Include a hint for the correct naming format with invalid-name
include-naming-hint="yes"
# Naming style matching correct inline iteration names
inlinevar-naming-style="any"
# Regular expression matching correct inline iteration names. Overrides
# inlinevar-naming-style
#inlinevar-rgx=
# Naming style matching correct method names
method-naming-style="snake_case"
# Regular expression matching correct method names. Overrides method-naming-
# style
#method-rgx=
# Naming style matching correct module names
module-naming-style="snake_case"
# Regular expression matching correct module names. Overrides module-naming-
# style
#module-rgx=
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=[]
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx="^_"
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=["abc.abstractproperty"]
# Naming style matching correct variable names
variable-naming-style="snake_case"
# Regular expression matching correct variable names. Overrides variable-
# naming-style
#variable-rgx=
[tool.pylint.similarities]
# Ignore comments when computing similarities.
ignore-comments="yes"
# Ignore docstrings when computing similarities.
ignore-docstrings="yes"
# Ignore imports when computing similarities.
ignore-imports="no"
# Minimum lines number of a similarity.
min-similarity-lines=4
[tool.pylint.typecheck]
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=["contextlib.contextmanager"]
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=[]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members="yes"
# This flag controls whether pylint should warn about no-member and similar
# checks whenever an opaque object is returned when inferring. The inference
# can return multiple potential results while evaluating a Python object, but
# some branches might not be evaluated, which results in partial inference. In
# that case, it might be useful to still emit no-member and other checks for
# the rest of the inferred objects.
ignore-on-opaque-inference="yes"
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=["optparse.Values","thread._local","_thread._local"]
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=["cv2"]
# Show a hint with possible names when a member name was not found. The aspect
# of finding the hint is based on edit distance.
missing-member-hint="yes"
# The minimum edit distance a name should have in order to be considered a
# similar match for a missing member name.
missing-member-hint-distance=1
# The total number of similar names that should be taken in consideration when
# showing a hint for a missing member.
missing-member-max-choices=1
[tool.pylint.spelling]
# Limits count of emitted suggestions for spelling mistakes
max-spelling-suggestions=4
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=""
# List of comma separated words that should not be checked.
spelling-ignore-words=[]
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=""
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words="no"
[tool.pylint.design]
# Maximum number of arguments for function / method
max-args=5
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of statements in function / method body
max-statements=50
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
[tool.pylint.classes]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods= [
"__init__",
"__new__",
"setUp"
]
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected= [
"_asdict",
"_fields",
"_replace",
"_source",
"_make"
]
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg="cls"
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg="mcs"
[tool.pylint.imports]
# Allow wildcard imports from modules that define __all__.
allow-wildcard-with-all="no"
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks="no"
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=[]
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=""
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=""
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=""
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=[]
# Force import order to recognize a module as part of a third party library.
known-third-party=[]
[tool.pylint.exception]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions="Exception"

View File

@ -0,0 +1,27 @@
FROM ubuntu:22.04
# Resolve APT dependencies
RUN apt-get update -qq
RUN apt-get install apt-utils software-properties-common -qq
RUN add-apt-repository ppa:antiprism/ppa -y
RUN apt-get install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism git -qq
# Install additinal requirements
RUN pip install git+https://github.com/SciTools/cartopy.git
# Clone and install Pyrate
WORKDIR pyrate
COPY . .
RUN pip3 install .
WORKDIR /pyrate/experiments
WORKDIR /pyrate/experiments
WORKDIR /pyrate/data
WORKDIR /pyrate
ARG save_frequency=50
ARG seed_start=0
ARG continues=0
CMD python3 -m route_generator

33
pyrate/pyrate.dockerfile Normal file
View File

@ -0,0 +1,33 @@
FROM ubuntu:22.04
# Resolve APT dependencies
ENV PYRATE = True
RUN apt-get update -qq
RUN apt-get install apt-utils software-properties-common -qq
RUN add-apt-repository ppa:antiprism/ppa -y
RUN apt-get install python3-pip g++ python3-dev python3-gdal libgdal-dev libsqlite3-mod-spatialite antiprism git -qq
RUN apt install graphviz -yqq
# Install additinal requirements
RUN pip3 install jupyter tensorflow tensorboard keras-tuner black[jupyter] mapply humanize jupyter-resource-usage
RUN pip3 install tensorflow-addons
RUN pip install jupyter_contrib_nbextensions
RUN jupyter contrib nbextension install --sys-prefix
RUN jupyter nbextension enable scratchpad/main --sys-prefix
RUN pip install git+https://github.com/SciTools/cartopy.git
RUN apt install vim nano -qqy
# Clone and install Pyrate
WORKDIR /pyrate
COPY setup.py .
COPY setup.cfg .
COPY pyrate/__init__.py pyrate/
COPY README.md .
RUN pip3 install .
COPY . .
WORKDIR /pyrate
CMD python3 -m route_generator
ARG TOKEN=72336c3d3c88e6f060bf3a27b8ea74007c0f0c14a747d55a
CMD jupyter notebook --ip 0.0.0.0 --port 8888 --no-browser --allow-root --NotebookApp.token=${TOKEN}

View File

@ -0,0 +1,4 @@
"""The Pyrate package for autonomous surface vehicles."""
__version__ = "22.04"
__author__ = "Sailing Team Darmstadt e. V. members and affiliates"

View File

@ -0,0 +1,6 @@
"""The act package provides tools to use the employed actuators of the robot to execute planned actions.
Usually, this includes the computation of required motor positions to minimize
the error between desired and actual states.
In the ``control`` package, classes for controlling actuators such that deviations
from desired and measured states are driven towards zero are implemented."""

View File

@ -0,0 +1,9 @@
"""This package provides controllers that compute motor inputs, e.g.. angles or
voltages, such that a desired state can be reached and held."""
from .anti_windup_lqr import AntiWindupLqr
from .anti_windup_pid import AntiWindupPid
from .lqr import Lqr
from .pid import Pid
__all__ = ["AntiWindupLqr", "AntiWindupPid", "Lqr", "Pid"]

View File

@ -0,0 +1,119 @@
"""This module implements the Linear Quadratic Regulator with integral part and anti-windup."""
# Mathematics
from numpy import clip
from numpy import hstack
from numpy import ndarray
from numpy import vstack
from numpy import zeros
# LQR control
from .lqr import Lqr
class AntiWindupLqr(Lqr):
"""The anti-windup LQR controller, including an integration state for zero stationary error.
This controller resembles the LQR with added clipping on the control signal to a user-set
maximum value. Furthermore, the integral of the error over time is pruned (anti windup).
Examples:
First, import some helper functions from numpy.
>>> from numpy import array
>>> from numpy import eye
>>> from numpy import vstack
We then setup the Lqr controller with some control constants.
>>> controller = AntiWindupLqr(
... array([[0, 1], [0, 0]]),
... array([0, 1])[:, None],
... array([1, 0])[None, :],
... eye(3),
... array([[1.0]]),
... array([1.0]),
... 0.5,
... )
We then specify an initial and desired state.
>>> initial = vstack([1.0, 0.0])
>>> desired = vstack([0.0])
Finally, we retrieve a control signal from the Lqr based on the values we just set.
>>> signal = controller.control(desired, initial)
Args:
A: System matrix (continous time) ``(n, n)``
B: Input matrix ``(n, 1)``
C: Output matrix ``(1, n)``
Q: State cost matrix (pos. semi definite, symmetric) ``(n+1, n+1)``
R: Control cost matrix (pos. definite, symmetric) ``(1, n)``
max_control: Limit of control signal
dt: Time between measurements
keep_trace: Whether to store a trace of control signals, states, etc.
"""
# In this context, we reproduce a common PID notation
# pylint: disable=invalid-name, too-many-arguments
def __init__(
self,
A: ndarray,
B: ndarray,
C: ndarray,
Q: ndarray,
R: ndarray,
max_control: ndarray,
dt: float,
keep_trace: bool = False,
) -> None: # noqa: E741
# Controller specification for augmented state
n = A.shape[0] + 1
A_i = zeros((n, n))
A_i[1:, 1:] = A
A_i[0, 1:] = -C
B_i = vstack([zeros((1, 1)), B])
C_i = hstack([zeros((1, 1)), C])
# Setup internal LQR controller and own attributes
super().__init__(A_i, B_i, C_i, Q, R, dt, keep_trace, calculate_feed_forward=False)
self.V *= (self.C * self.K).sum()
self.max_control = max_control
self.summed_error = 0.0
def control(self, desired: ndarray, state: ndarray) -> ndarray:
"""Compute the control signal based on LQR controller.
Args:
desired: The desired output
state: The current state
Returns:
The control signal
"""
# Prepend summed error to state vector
state_i = vstack([self.summed_error, state])
# Compute errors
error = desired - self.C @ state_i
self.summed_error += self.dt * error
# Get the basic PID control signal and clip to specified boundary
lqr_signal = super().control(desired, state_i)
control_signal: ndarray = clip(lqr_signal, -abs(self.max_control), abs(self.max_control))
# Prune integral part, i.e. apply anti wind up
self.summed_error += (lqr_signal - control_signal) / self.K[0, 0]
return control_signal
def reset(self) -> None:
"""Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
super().reset()
self.summed_error = 0.0

View File

@ -0,0 +1,87 @@
"""This module implements the PID (proportional integral derivative) controller."""
# Mathematics
from numpy import clip
from numpy import ndarray
# PID controller
from .pid import Pid
class AntiWindupPid(Pid):
"""The PID controller with anti-windup, i.e. a limited control output and integral part.
This controller resembles the PID with added clipping on the control signal to a user-set
maximum value. Furthermore, the integral of the error over time is pruned (anti windup).
Examples:
First, import some helper functions from numpy.
>>> from numpy import array
We then setup the Pid controller with some control constants.
>>> controller = AntiWindupPid(
... array([0.5]),
... array([0.1]),
... array([0.0]),
... 5.0,
... 0.1,
... )
We then specify an initial and desired state as well as the current state derivative.
>>> initial = array([5.0])
>>> desired = array([0.0])
>>> derivative = array([0.0])
Finally, we retrieve a control signal from the Pid based on the values we just set.
>>> signal = controller.control(desired, initial, derivative)
Args:
P: Proportional control constant ``(n,)``
I: Integral control constant ``(n,)``
D: Derivative control constant ``(n,)``
max_control: Limit of control signal
dt: Time between measurements
keep_trace: Whether to store a trace of control signals, states, etc.
"""
# In this context, we reproduce a common LQR notation
# pylint: disable=too-many-arguments
def __init__(
self,
P: ndarray,
I: ndarray, # noqa: E741
D: ndarray,
max_control: float,
dt: float,
keep_trace: bool = False,
) -> None:
# Setup internal PID controller and own attributes
super().__init__(P, I, D, dt, keep_trace)
self.max_control = max_control
def control(self, desired: ndarray, state: ndarray, state_derivative: ndarray) -> ndarray:
"""Compute the control signal based on proportional, integral and derivative terms.
Args:
desired: The desired state
state: The current state
state_derivative: The current state derivative
Returns:
The control signal
"""
# Get the basic PID control signal and clip to specified boundary
pid_signal = super().control(desired, state, state_derivative)
control_signal: ndarray = clip(pid_signal, -abs(self.max_control), abs(self.max_control))
# Prune integral part, i.e. apply anti wind up
self.summed_error -= (pid_signal - control_signal) / self.I
return control_signal

View File

@ -0,0 +1,147 @@
"""This module implements the Linear Quadratic Regulator."""
# Mathematics
from numpy.linalg import inv
from numpy import ndarray
from numpy import ones
from scipy.linalg import solve
from scipy.linalg import solve_continuous_are
# Data modelling
from pandas import concat
from pandas import DataFrame
class Lqr:
"""The LQR controller.
The linear-quadratic-regulator (LQR) is a feedback controller that solves linear-quadratic
problems at minimum cost. Such problems are defined by linear differential equations and
quadratic cost functions.
Examples:
First, import some helper functions from numpy.
>>> from numpy import array
>>> from numpy import eye
>>> from numpy import vstack
We then setup the Lqr controller with some control constants.
>>> controller = Lqr(
... array([[0, 1], [0, 0]]),
... array([0, 1])[:, None],
... array([1, 0])[None, :],
... eye(2),
... array([[1.0]]),
... 0.5,
... )
We then specify an initial and desired state.
>>> initial = vstack([0.0, 0.0])
>>> desired = vstack([0.0])
Finally, we retrieve a control signal from the Lqr based on the values we just set.
>>> signal = controller.control(desired, initial)
Args:
A: System matrix (continous time) ``(n, n)``
B: Input matrix ``(n, 1)``
C: Output matrix ``(1, n)``
Q: State cost matrix (pos. semi definite, symmetric) ``(n, n)``
R: Control cost matrix (pos. definite, symmetric) ``(1, n)``
dt: Time between measurements
keep_trace: Whether to store a trace of control signals, states, etc.
calculate_feed_forward: Whether to compute a feed forward part
References:
- https://en.wikipedia.org/wiki/Linear%E2%80%93quadratic_regulator
"""
# In this context, we reproduce a common PID notation
# pylint: disable=invalid-name, too-many-arguments
def __init__(
self,
A: ndarray,
B: ndarray,
C: ndarray,
Q: ndarray,
R: ndarray,
dt: float,
keep_trace: bool = False,
calculate_feed_forward: bool = True,
) -> None: # noqa: E741
# Dimensionality checks
assert len(A.shape) == 2 and A.shape[0] == A.shape[1], "Matrix A is not square!"
assert B.shape[0] == A.shape[0], "Wrong shape for input matrix B!"
assert C.shape[1] == A.shape[0], "Wrong shape for output matrix C!"
# Controller specification
self.A = A
self.B = B
self.C = C
self.Q = Q
self.R = R
self.dt = dt
# Compute controller gain
# For reference, see here: https://en.wikipedia.org/wiki/Linear%E2%80%93quadratic_regulator
self.P = solve_continuous_are(self.A, self.B, self.Q, self.R)
self.K = solve(self.R, self.B.T @ self.P)
# Calculate static feed forward
if calculate_feed_forward:
self.V = inv(-self.C @ inv(self.A - self.B @ self.K) @ self.B)
else:
self.V = ones([1, 1])
# Objects for process tracing
self.keep_trace = keep_trace
self.process = DataFrame(
columns=[
"desired",
"state",
"error",
"control_signal",
]
)
def control(self, desired: ndarray, state: ndarray) -> ndarray:
"""Compute the control signal based on LQR controller.
Args:
desired: The desired output
state: The current state
Returns:
The control signal
"""
# Compute errors
error = desired - self.C @ state
# Compute feedback and feed forward values
control_signal: ndarray = -self.K @ state + self.V @ desired
# Append control step to process trace
if self.keep_trace:
new = DataFrame(
{
"desired": (desired.copy(),),
"state": (state.copy(),),
"error": (error.copy(),),
"control_signal": (control_signal.copy(),),
}
)
self.process = concat([self.process, new], ignore_index=True)
# Return result
return control_signal
def reset(self) -> None:
"""Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
self.process = self.process[0:0]

View File

@ -0,0 +1,133 @@
"""This module implements the PID (proportional integral derivative) controller."""
# Mathematics
from numpy import dot
from numpy import ndarray
from numpy import zeros_like
# Data modelling
from pandas import concat
from pandas import DataFrame
class Pid:
"""The PID controller.
The proportional-integral-derivative controller (PID) is an industriy-standard feedback control loop.
This controller responds proportionally to the error, i.e. deviation of the desired state,
its time derivative and integral.
Examples:
First, import some helper functions from numpy.
>>> from numpy import array
We then setup the Pid controller with some control constants.
>>> controller = Pid(
... array([0.5]),
... array([0.0]),
... array([0.0]),
... 0.1,
... )
We then specify an initial and desired state as well as the current state derivative.
>>> initial = array([5.0])
>>> desired = array([0.0])
>>> derivative = array([0.0])
Finally, we retrieve a control signal from the Pid based on the values we just set.
>>> signal = controller.control(desired, initial, derivative)
Args:
P: Proportional control constant ``(n,)``
I: Integral control constant ``(n,)``
D: Derivative control constant ``(n,)``
dt: Time between measurements
keep_trace: Whether to store a trace of control signals, states, etc.
References:
- https://en.wikipedia.org/wiki/PID_controller
"""
# In this context, we reproduce a common PID notation
# pylint: disable=invalid-name, too-many-arguments
def __init__(self, P: ndarray, I: ndarray, D: ndarray, dt: float, keep_trace: bool = False): # noqa: E741
# Controller specification
self.P = P
self.I = I # noqa: E741
self.D = D
self.dt = dt
# Error summation field
self.summed_error = zeros_like(P).transpose()
# Objects for process tracing
self.keep_trace = keep_trace
self.process = DataFrame(
columns=[
"desired",
"state",
"state_derivative",
"error",
"summed_error",
"proportional",
"integral",
"derivative",
"control_signal",
]
)
def control(self, desired: ndarray, state: ndarray, state_derivative: ndarray) -> ndarray:
"""Compute the control signal based on proportional, integral and derivative terms.
Args:
desired: The desired state
state: The current state
state_derivative: The current state's derivative
Returns:
The control signal
"""
# Compute errors
error = desired - state
self.summed_error += self.dt * error
# Compute PID values
proportional = dot(self.P, error)
integral = dot(self.I, self.summed_error)
derivative = dot(self.D, state_derivative)
# Compute control signal
control_signal: ndarray = proportional + integral - derivative
# Append control step to process trace
if self.keep_trace:
new = DataFrame(
{
"desired": (desired.copy(),),
"state": (state.copy(),),
"state_derivative": (state_derivative.copy(),),
"error": (error.copy(),),
"summed_error": (self.summed_error.copy(),),
"proportional": (proportional.copy(),),
"integral": (integral.copy(),),
"derivative": (derivative.copy(),),
"control_signal": (control_signal.copy(),),
},
)
self.process = concat([self.process, new], ignore_index=True)
# Return result
return control_signal
def reset(self) -> None:
"""Resets the filter's memory, i.e. set the error integral to zero and empty the process trace."""
self.summed_error = zeros_like(self.P).transpose()
self.process = self.process[0:0]

View File

@ -0,0 +1 @@
"""Contains generic helper functionality like file IO, mathematics and testing helpers."""

View File

@ -0,0 +1,7 @@
"""Enables handling of nautical charts and storage of obstacles in a spatial database."""
from .db import SpatialiteDatabase
from .s57_files import ChartFileHandler
from .s57_files import S57ChartHandler
__all__ = ["SpatialiteDatabase", "ChartFileHandler", "S57ChartHandler"]

View File

@ -0,0 +1,681 @@
"""This module adds support for a Spatialite database (SQLite DB with extra modules).
This module requires the *libsqlite3-mod-spatialite* system dependency.
The database allows for storage and efficient retrieval via spatial indexing of elements.
References:
- `General information <https://www.gaia-gis.it/gaia-sins/spatialite-manual-2.3.1.html>`__
- `The website of Spatialite <https://www.gaia-gis.it/fossil/libspatialite/index/>`__
- `The website of Spatialite and friends <https://www.gaia-gis.it/gaia-sins/>`__
- `Cookbook, Chapter "Spatial Indexing support"
<https://www.gaia-gis.it/gaia-sins/spatialite-cookbook-5/cookbook_topics.06.html#topic_Spatial_Indexing_support>`__
- `Cookbook, Chapter "Creating a well designed DB"
<https://www.gaia-gis.it/gaia-sins/spatialite-cookbook-5/cookbook_topics.03.html#topic_Creating_a_well_designed_DB>`__
- `SQL functions reference list <https://www.gaia-gis.it/gaia-sins/spatialite-sql-5.0.0.html>`__
"""
# Python standard
from contextlib import closing
from contextlib import contextmanager
from math import degrees
import random
import string
from warnings import warn
# Database interface
import sqlite3
from sqlite3 import Connection
# Typing
from typing import cast
from typing import Generator
from typing import Iterable
from typing import Iterator
from typing import Optional
# Shapely for internal abstraction
from shapely.geometry import LineString
from shapely.geometry import Point
from shapely.geometry import Polygon
import shapely.wkb
# Planning primitives
from pyrate.plan.geometry import Direction
from pyrate.plan.geometry import LocationType
from pyrate.plan.geometry import PolarGeometry
from pyrate.plan.geometry import PolarLocation
from pyrate.plan.geometry import PolarPolygon
from pyrate.plan.geometry import PolarRoute
# Geospatial helpers
from pyrate.plan.geometry.helpers import difference_latitude
from pyrate.plan.geometry.helpers import difference_longitude
from pyrate.plan.geometry.helpers import meters2rad
# Import this to enable GDAL/libgeos exceptions if it has not already happened
from . import s57_files as tmp_import
del tmp_import
class SpatialiteDatabase:
"""Allows for IO with the *Spatialite* SQLite database containing obstacles.
Reading of entries from the database is implemented using generators, i.e. the elements are retrieved
one by one as they are consumed by the caller. While this allows for the processing of large amounts of
data in constant memory, it also keeps the cursor to the database open until all elements have been
consumed. To consume all of the rows at once simply wrap it into the list constructor like this:
``all_as_list = list(database.read_all_obstacles())``. Note that only the parsing to Pyrate primitives is
done lazily, while the actual database reading happens eagerly.
Internally, a spatial index is used for fast *retrieval* of obstacles given geometric constrains.
For example, this makes queries for all obstacles in a given bounding boxes take time roughly
proportional to the result set, and not the total size of the database.
Some real-world benchmarks can be obtained from the script :ref:`script-benchmark_db_and_projections`
and are discussed in :ref:`design-decisions-local-projections`.
See ``SpatialiteDatabase._CREATE_TABLES_SQL_STATEMENT`` for details on the structure of the
database. The longitude is always the first/the X component of the two-dimensional geometries.
*QGIS* can natively open the created databases for visual inspection. It's very efficient too.
A single polygon in the dabase might get split into multiple ones in a query due to clipping. A unique
index is maintained with best-effort by adding subsequent numbers to the other slices of the same polygon.
This assumes that indices are somewhat uniformly distributed and not sequential numbers.
Examples:
First, let us create some polygons to be stored (PolarPoint and PolarRoute would also work):
>>> from pyrate.plan.geometry import PolarLocation, PolarPolygon, LocationType
>>> locations = [PolarLocation(50, 50), PolarLocation(50, 51), PolarLocation(51, 51)]
>>> polygon_1 = PolarPolygon(locations=locations, name="A Polygon, YaY!")
>>> polygon_2 = PolarPolygon(locations=locations, name="Some Name", identifier=42,
... location_type=LocationType.LAND)
>>> polygons = [polygon_1, polygon_2]
>>> polygons #doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
[PolarPolygon(locations=[...], name="A Polygon, YaY!"),
PolarPolygon(locations=[...], location_type=LocationType.LAND, name="Some Name", identifier=42)]
Then, you can simply store and then retrieve some polygons.
Note, that you have to call :meth:`SpatialiteDatabase.close` after using it or use it as a context
manager, as shown here.
>>> from pyrate.common.charts import SpatialiteDatabase
>>> with SpatialiteDatabase(":memory:") as database:
... print(len(database))
... database.write_geometries(polygons)
... # We need to wrap it into a call to `list()` to evaluate the generator returned by
... # `read_obstacles_around` while the database is still open
... read = list(database.read_geometries_around(locations[0], radius=200_000)) # 200 km
... assert len(database) == len(read)
... print(len(database))
0
2
>>> # The database does not guarantee an order of the result set
>>> sort_by_name = lambda geometry: geometry.name
>>> read = list(sorted(read, key=sort_by_name))
>>> read #doctest: +NORMALIZE_WHITESPACE +ELLIPSIS
[PolarPolygon(locations=[...], name="A Polygon, YaY!", identifier=1),
PolarPolygon(locations=[...], location_type=LocationType.LAND, name="Some Name", identifier=42)]
>>> read == polygons
False
>>> # This is due to the first polygon now being given a new unused identifier
>>> read[0].identifier
1
>>> # So we reset it here for the comparison to succeed
>>> read[0].identifier = None
>>> # Now, they should be almost equal (except for floating-point approximations)
>>> polygon_1.equals_almost_congruent(read[0])
True
>>> polygon_2.equals_almost_congruent(read[1])
True
A full example application can be found in the script :ref:`script-benchmark_db_and_projections`.
Possible extensions:
- Allow for retrieving from arbitrary PolarPolygon/bounding box. See ``_read_obstacles_clipped``.
- The method :meth:`~read_obstacles_around` could be easily extended/complemented to support ellipses.
However, rotation of that eclipse would only make this useful, and adding that appears to be tricky.
Note:
Use with ``storage_path=":memory:"`` (see example above) to open a ephemeral database that resides in
RAM. This works when only a single databse user (like a :class:`~SpatialiteDatabase` instance) will
access it.
Otherwise, passing ``"file::memory:?cache=shared"`` as a file will allow the same database to be
accessed by multiple different users within the same process. Both are useful for (unit-)testing too.
When passing extra options like ``"file::memory:?cache=shared"``,
you will also have to pass ``uri=True`` to :class:`~SpatialiteDatabase`
such that the parameters do not get mistaken for being part of the an actual file name:
>>> with SpatialiteDatabase("file::memory:?cache=shared", uri=True) as database:
... print(len(database))
0
Args:
storage_path: the path where to look for the database file, usually ending with ``.sqlite``
issue_create_statement: tries to create the table(s) and indices if not yet existing:
this can be safely let enabled as existing tables make this a NO-OP
kwargs: additional parameters to be passed to the database creation, see :class:`sqlite3.Connection`
Raises:
IOError: When the data base file cannot be accessed
RuntimeError: If the Spatialite extension (*libsqlite3-mod-spatialite*) cannot be loaded
"""
#: The Spatial Reference System Identifier used for storing objects
#: this is the widely used WGS 84; see: https://spatialreference.org/ref/epsg/4326/
_SRID: int = 4326
def __init__(self, storage_path: str, issue_create_statement: bool = True, **kwargs) -> None:
# This may raise an IOError:
self._connection: Connection = sqlite3.connect(storage_path, **kwargs)
try:
# load the spatialite module
self._connection.load_extension("mod_spatialite.so")
except sqlite3.OperationalError as error: # pragma: no cover
raise RuntimeError(
"Cannot load the spatialite extension. Is it installed (see installation instructions)? "
f"Error was: {error}"
) from error
if issue_create_statement:
self.create_tables()
def create_tables(self, table_name: str = "Obstacles") -> None:
"""This creates the table(s) and indices in the database (if they do not exist yet).
See the module documentation of :mod:`pyrate.common.charts.db` for more information.
Args:
table_name: The name of the table to initialize
"""
# Check if the table "Obstacles" is present
check = f"SELECT COUNT(1) FROM SQLITE_MASTER WHERE name = '{table_name}'"
with closing(self._connection.execute(check)) as cursor:
count = cast(int, cursor.fetchone()[0]) # This needs to be cast as the result is untyped
if count == 0:
# It is not present, so we initialize the database here
statement = f"""
CREATE TABLE IF NOT EXISTS '{table_name}' (
id INTEGER PRIMARY KEY NOT NULL,
location_type TINYINT unsigned NOT NULL DEFAULT 0,
name VARCHAR CHARACTER DEFAULT NULL,
CHECK (location_type <= {LocationType.max_value()})
);
CREATE INDEX IF NOT EXISTS by_location_type ON '{table_name}' (location_type);
SELECT InitSpatialMetaDataFull();
SELECT AddGeometryColumn(
'{table_name}',
'geometry',
{SpatialiteDatabase._SRID},
'GEOMETRY', -- just as fast as e.g. POLYGON but more flexible
'XY',
TRUE -- set to NOT NULL
);
SELECT CreateSpatialIndex(
'{table_name}',
'geometry'
);
-- can only be done after column is added; but is not supported by SQLite
-- ALTER TABLE '{table_name}' ADD CHECK (IsValid(geometry));
"""
with self._connection: # auto-commits at the end
with self.disable_synchronization():
self._connection.executescript(statement).close()
@contextmanager
def disable_synchronization(self) -> Iterator[None]:
"""Temporarily disables file system synchronization for consistency to increase write performance.
To quote the `documentation of SQLite <https://www.sqlite.org/pragma.html#pragma_synchronous>`__:
"With synchronous OFF (0), SQLite continues without syncing as soon as it has handed data off to
the operating system. If the application running SQLite crashes, the data will be safe, but the
database might become corrupted if the operating system crashes or the computer loses power before
that data has been written to the disk surface. On the other hand, commits can be orders of
magnitude faster with synchronous OFF."
"""
self._connection.execute("PRAGMA synchronous=OFF").close()
yield
self._connection.execute("PRAGMA synchronous=ON").close()
def copy_contents_to_database(self, file_path: str, update: bool = False, **kwargs) -> None:
"""Dumps the content of this obstacle database to a new one.
This can be useful in cases where one wants to first create an extremely fast in-memory database and
later copy it to a file on disk.
Args:
file_path: the path of the other database
update: is set to ``True``, update/replace on conflicting identifier; else raise an error in that
case
kwargs: additional parameters to be passed to the database creation, see attribute ``kwargs`` of
:class:`~SpatialiteDatabase`
Raises:
sqlite3.IntegrityError: If a duplicate key should have been inserted and ``update`` was set to
``False``
"""
# init the other database
with SpatialiteDatabase(file_path, **kwargs):
pass
database_name = _random_name()
command = "REPLACE" if update else "INSERT"
statements = f"""
ATTACH '{file_path}' AS {database_name};
{command} INTO {database_name}.Obstacles SELECT * FROM main.Obstacles;
DETACH {database_name};
"""
with self._connection: # auto-commits at the end
self._connection.executescript(statements).close()
def simplify_contents(self, simplify_tolerance: float) -> None:
"""Simplifies all geometries within the database. Always runs :meth:`~vacuum` afterwards.
Args:
simplify_tolerance: the tolerance within all new points shall lie wrt. to the old ones, in meters,
non-negative. Set to zero to disable.
Further ideas:
- Keep topology between objects, not just within them, e.g. see
`this blog post <https://trac.osgeo.org/postgis/wiki/UsersWikiSimplifyPreserveTopology>`__.
"""
assert simplify_tolerance >= 0, "tolerance must be non-negative"
if simplify_tolerance > 0:
tolerance_degrees = degrees(meters2rad(simplify_tolerance))
statement = (
f"UPDATE Obstacles SET geometry = ST_SimplifyPreserveTopology(geometry, {tolerance_degrees})"
)
with self._connection: # auto-commits at the end
self._connection.execute(statement).close()
self.vacuum()
def vacuum(self) -> None:
"""Defragments the database. This is useful after deleting or shrinking many entries."""
with self._connection: # auto-commits at the end
self._connection.execute("VACUUM").close()
def write_geometry(
self, geometry: PolarGeometry, update: bool = False, raise_on_failure: bool = True
) -> None:
"""Alias for ``write_obstacles([obstacle], update)``. See :meth:`~write_obstacles`.
Args:
geometry: the geometry to place into the database, identified by
its :attr:`~pyrate.plan.geometry.Geospatial.identifier`
update: see :meth:`~write_obstacles`
raise_on_failure: see :meth:`~write_obstacles`
"""
return self.write_geometries([geometry], update=update, raise_on_failure=raise_on_failure)
def write_geometries(
self, geometries: Iterable[PolarGeometry], update: bool = False, raise_on_failure: bool = True
) -> None:
"""Writes geometries into the database.
All geometries are only identified by their identifier as a primary key.
If ``update is True``, any existing geometries with the same IDs will be updated/replaced.
If ``update is False``, an exception is raised if duplicate keys are to be inserted.
Args:
geometries:
The geometries to place into the database, identified by their
:attr:`~pyrate.plan.geometry.Geospatial.identifier`.
update:
If set to ``True``, update/replace on conflicting identifiers;
else raise an error in that case.
If set to ``True``, no guarantees about inserts can be made (see :class:`ValueError` below).
raise_on_failure:
If set to ``False`` suppress the :class:`ValueError` below and instead print a warning.
Raises:
sqlite3.IntegrityError: If a duplicate key should have been inserted and ``update`` was set to
``False``
sqlite3.IntegrityError: If a value was not within the constraints; should never happen if all
:class:`~pyrate.plan.geometry.polygon.PolarPolygon` were created properly
ValueError:
If the provided geometries are not valid according to *spatialite* and could not be repaired.
However, any valid geometries will have been inserted by then.
Also, this is only possible to be checked if ``update`` is set to ``False``.
Else, incomplete inserts will simply be ignored.
Only very basic cleanup is attempted.
Suppressed if ``raise_on_failure is False``.
"""
count_before = self.count_geometries()
# Build query
command = "REPLACE" if update else "INSERT"
statement = f"""
WITH _insert_temp(id,location_type,name,geometry)
AS (VALUES (?,?,?,SanitizeGeometry(GeomFromWKB(?,{SpatialiteDatabase._SRID}))))
{command} INTO Obstacles
SELECT * FROM _insert_temp WHERE IsValid(geometry)
"""
# Convert data
data = [(g.identifier, g.location_type, g.name, to_wkb(g)) for g in geometries]
# Execute statement
with self._connection: # auto-commits at the end
self._connection.executemany(statement, data).close()
# TODO(Felix.Divo):
# We want to notify the user if the insert was incomplete, i.e. if a geometry was invalid
# (1) `cursor.rowcount` from executemany() does not work since it returns -1
# (2) `cursor.lastrowid` contains only the last ID, so we cannot use that
# (3) Appending `RETURNING id` causes exceptions when used with executemany():
# "sqlite3.ProgrammingError: executemany() can only execute DML statements"
# (4) So we do the stupid thing: Count before and after. Does not work with ``update=True``!
# This could also cause problems with concurrency.
# (5) One could also repair by Buffer(), though that might not be what is desired,
# cf. https://shapely.readthedocs.io/en/stable/manual.html#object.buffer.
# Make sure that all rows were valid
missing = count_before + len(data) - self.count_geometries()
if not update and missing > 0:
message = f"{missing} of the {len(data)} geometries were invalid"
if raise_on_failure:
raise ValueError(message)
warn(message)
def read_all(
self, only_location_type: Optional[LocationType] = None
) -> Generator[PolarGeometry, None, None]:
"""Read all stored geometries, optionally filtered by a type.
Args:
only_location_type: get only geometries of that type, if not set to ``None``
Yields:
The geometries as read from the database.
"""
yield from self._read_geometries(
geometry_column="geometry", # no transformation, just select the column
only_location_type=only_location_type,
)
def read_geometries_around(
self,
around: PolarLocation,
radius: float = 10_000.0,
only_location_type: Optional[LocationType] = None,
) -> Generator[PolarGeometry, None, None]:
"""Reads and clips geometries in a given radius around some location.
The geometries are clipped to the circle given by the location ``around`` and ``radius``. This means
that any parts stretching beyond the circles are not returned and the geometry approximately follows
the outer circle in such cases. If the ellipse if deformed at (very) high/low latitudes, the clipping
area is selected such that at least all geometries in the defined area are included, and possibly some
more.
Note:
This method internally uses an ellipse as the clipping area even for circular clipping areas in
order to account for some distortions of polar coordinates at high latitudes. Also, keep in mind
that clipping/selecting for inclusion in the result set is not a perfect operation, as the
clipping area is internally discretized to a geometry. It thus has corners, and is not smooth
like an ellipse in the mathematical sense.
Args:
around: The point around which obstacles are to be extracted; assumed to be in degrees
radius: The radius around *around* to retrieve items from in meters; default: 10 000 m.
The behaviour is unspecified if it is very large, like more than 1 000 000 m.
It must be at least zero.
only_location_type: Get only obstacles of that type, if not set to ``None``
Yields:
The geometries as read from the database.
"""
# Safety assertions on given radius
assert radius >= 0.0, "the radius must be non-negative"
assert (
radius <= 1_000_000
), "radius > 1_000_000; see docs; this is not a fundamental restriction but it is untested"
# The distance in polar coordinate is always equal when going either west or east
east_west, _ = around.translate(Direction.East, radius)
longitudal_radius = difference_longitude(around.longitude, east_west.longitude)
# The above might be different when going north or south
# To disambiguate, we take the larger of the two distances as the radius
north, _ = around.translate(Direction.North, radius)
south, _ = around.translate(Direction.South, radius)
latitudal_radius = max(
difference_latitude(around.latitude, north.latitude),
difference_latitude(around.latitude, south.latitude),
)
# Place a corner of the discretized ellipse every 24 degrees,
# i.e. turn it into a polygon with 15 corners
every_degrees = 24
# MakeEllipse(...) takes the two radii in lat/long direction in degrees and returns a line string
clipping_area = (
f"MakePolygon(MakeEllipse({around.longitude}, {around.latitude}, "
f"{longitudal_radius}, {latitudal_radius}, {SpatialiteDatabase._SRID}, {every_degrees}))"
)
yield from self._read_geometries_clipped(clipping_area, only_location_type)
def _read_geometries_clipped(
self, clipping_area: str, only_location_type: Optional[LocationType]
) -> Generator[PolarGeometry, None, None]:
"""Internal helper for querying for clipped geometries.
Args:
clipping_area: The area to clip to
only_location_type: The type of the read location
Yields:
The geometries clipped to the given area.
"""
yield from self._read_geometries(
geometry_column=f"Intersection(geometry, {clipping_area})",
only_location_type=only_location_type,
)
def _read_geometries(
self, geometry_column: str, only_location_type: Optional[LocationType]
) -> Generator[PolarGeometry, None, None]:
"""Factors out the common parts of assembling SQL statements for the ``read_*`` methods.
Args:
geometry_column:
A SQL "column name" that can be used in a SELECT clause and which returns a Spatialite
geometry. Examples are ``"geometry"`` to simply return the geometries unmodified or something
like ``"Reverse(geometry) as ignored_column_name"`` to perform some modification.
The name does not matter.
only_location_type: Get only obstacles of that type, if not set to ``None``
Yields:
The geometries as read from the database.
"""
if only_location_type is None:
additional_where_constraint = ""
else:
additional_where_constraint = f"AND location_type = {only_location_type.value}"
# `IsValid(wrapped_geometry)` excludes empty geometries which can sometimes occur
yield from self._read_from_sql(
f"""
WITH temptable AS (
SELECT id, location_type, name, ({geometry_column}) as wrapped_geometry
FROM Obstacles
WHERE wrapped_geometry IS NOT NULL AND IsValid(wrapped_geometry)
{additional_where_constraint}
)
SELECT id, location_type, name, AsBinary(wrapped_geometry) as geometry
FROM temptable
"""
)
def _read_from_sql(self, sql_statement: str) -> Generator[PolarGeometry, None, None]: # noqa: C901
"""Reads geometries for a given complete SQL query.
Supports reading these geometry types and maps them to instances of
:attr:`pyrate.plan.geometry.PolarGeometry`:
- ``Point``
- ``LineString`` and ``MultiLineString``
- ``Polygon`` and ``MultiPolygon``
Args:
sql_statement: The SQL statement to query with and read geometries from
Yields:
The geometries as read from the database.
"""
with closing(self._connection.execute(sql_statement)) as cursor:
# This should be theoretically parallelizable, but was not required as of now
# keep in mind that `cursor.fetchall()` returns a list, not a generator
for (identifier, location_type, name, binary_geometry) in cursor.fetchall():
parsed_geometry = shapely.wkb.loads(binary_geometry)
geometry_type = parsed_geometry.type
# The database contains only the geometry types Point, LineString and Polygon.
# However, depending on the performed operation, some entries might be cut into
# MultiLineString or MultiPolygon, so we need to be able to decode them too.
# MultiPoint can currently not occur.
def to_polygon(
polygon: Polygon, unique_identifier: int, name=name, location_type=location_type
) -> PolarPolygon:
locations = [PolarLocation(y, x) for (x, y) in polygon.exterior.coords]
return PolarPolygon(locations, LocationType(location_type), name, unique_identifier)
def to_route(
line_string: LineString, unique_identifier: int, name=name, location_type=location_type
) -> PolarRoute:
locations = [PolarLocation(y, x) for (x, y) in line_string.coords]
return PolarRoute(locations, LocationType(location_type), name, unique_identifier)
if geometry_type == "Point":
point = cast(Point, parsed_geometry)
yield PolarLocation(
latitude=point.y,
longitude=point.x,
location_type=LocationType(location_type),
name=name,
identifier=identifier,
)
elif geometry_type == "LineString":
yield to_route(cast(LineString, parsed_geometry), identifier)
elif geometry_type == "MultiLineString":
for index, route in enumerate(parsed_geometry.geoms):
# Make identifier unique by best effort
yield to_route(cast(LineString, route), unique_identifier=identifier + index)
elif geometry_type == "Polygon":
yield to_polygon(cast(Polygon, parsed_geometry), identifier)
elif geometry_type == "MultiPolygon":
for index, polygon in enumerate(parsed_geometry.geoms):
# Make identifier unique by best effort
yield to_polygon(cast(Polygon, polygon), unique_identifier=identifier + index)
else: # pragma: no cover
# This should never happen in a well-formatted database
raise RuntimeError(f'illegal geometry type "{geometry_type}" returned')
def clear(self) -> None:
"""Deletes all obstacles from the database, but does not touch the table structure or indices."""
with self._connection: # auto-commits at the end
self._connection.execute("DELETE FROM Obstacles").close()
def count_geometries(self) -> int:
"""Counts all obstacles in the database."""
with closing(self._connection.execute("SELECT COUNT(*) FROM Obstacles")) as cursor:
result = cursor.fetchone()
return cast(int, result[0]) # needs to be cast as the result is untyped
def count_vertices(self) -> int:
"""Counts all vertices of all obstacles in the database."""
statement = "SELECT SUM(ST_NPoints(geometry)) FROM Obstacles"
with closing(self._connection.execute(statement)) as cursor:
result = cursor.fetchone()
count = cast(Optional[int], result[0]) # needs to be cast as the result is untyped
if count is None:
# this can happen if the database is empty since `SUM` will return `NULL` in that case
return 0
return count
def __len__(self) -> int:
return self.count_geometries()
def close(self) -> None:
"""Closes the connection to the database and releases all associated resources.
It is not really documented in the standard library but :meth:`sqlite3.Connection.close` and thus
this method can apparently be called multiple times.
"""
# The `_connection` is unset if an exception has been thrown, but
# `close()` might still be called when the database was used as a
# context manager
if hasattr(self, "_connection"):
self._connection.close() # pragma: no cover
def __enter__(self) -> "SpatialiteDatabase":
return self
def __exit__(self, exc_type, exc_value, traceback) -> None:
self.close()
# Makes sure the connection is closed when this object ceases to exist
def __del__(self) -> None:
self.close()
def _random_name() -> str:
"""Returns a probably unique random name consisting only of latin letters."""
return "".join(random.choices(string.ascii_letters, k=32))
def to_wkb(geometry: PolarGeometry) -> bytes:
"""Converts the given geometries into well-known binary (WKB) bytes.
Args:
geometry: The polar geometry to be converted
Returns:
The WKB representation of the geometry
Raises:
NotImplementedError:
If the geometry type cannot be converted to bytes.
This will never occur when the type signature is obeyed.
"""
if isinstance(geometry, PolarLocation):
return cast(bytes, shapely.wkb.dumps(Point(geometry.longitude, geometry.latitude)))
if isinstance(geometry, PolarRoute):
return cast(bytes, shapely.wkb.dumps(LineString(geometry.to_numpy())))
if isinstance(geometry, PolarPolygon):
return cast(bytes, shapely.wkb.dumps(Polygon(geometry.to_numpy())))
# Can never occur if the type signature was obeyed but better be explicit here
raise NotImplementedError(f"unknown geometry type: {type(PolarLocation).__name__}")

View File

@ -0,0 +1,319 @@
"""Allows to find and read nautical charts. Currently, this only supports IHO S-57 charts.
Examples:
This shows how to recursively read all obstacles/relevant chart objects from a given directory:
>>> from pyrate.common.charts import ChartFileHandler, S57ChartHandler
>>> path_to_charts = "stda/data/charts/noaa_vector/data"
>>> # Nothing about this is is specific to the `S57ChartHandler`, so cast it to `ChartFileHandler`
>>> handler: ChartFileHandler = S57ChartHandler()
>>> polygons = [ #doctest: +SKIP
... handler.read_chart_file(chart_file)
... for chart_file in handler.find_chart_files(path_to_charts)
... ]
Ideas:
- Maybe use `Fiona <https://pypi.org/project/Fiona/>`__ as an alternative?
Resources:
- Documentation on the S-57 file format and the relevant parts of GDAL:
- https://gdal.org/python/osgeo.ogr-module.html
- https://gdal.org/drivers/vector/s57.html
- https://www.teledynecaris.com/s-57/frames/S57catalog.htm (the entire object catalogue!)
- https://gdal.org/api/python_gotchas.html (!)
- Examples and Cookbooks:
- https://pcjericks.github.io/py-gdalogr-cookbook/vector_layers.html
- and more general: https://pcjericks.github.io/py-gdalogr-cookbook/index.html
- https://lists.osgeo.org/pipermail/gdal-dev/2008-April/016767.html
- Helpers:
- The program QGIS is very helpful because it can open S-57 files visually.
"""
# Python standard
from abc import ABC
from abc import abstractmethod
from hashlib import sha1
import os
import os.path
from pathlib import Path
import sys
from warnings import catch_warnings
from warnings import simplefilter
from warnings import warn
# Typing
from typing import Generator
from typing import Mapping
from typing import Optional
from typing import Tuple
from typing import Union
# Planning primitives
from pyrate.plan.geometry import LocationType
from pyrate.plan.geometry import PolarLocation
from pyrate.plan.geometry import PolarPolygon
# Allow osgeo to be missing
# Set to True if the osgeo is available, or False if not
_OSGEO_PRESENT: bool
try:
# This emits warnings (at least on Python 3.8)
with catch_warnings():
simplefilter("ignore", DeprecationWarning, lineno=8)
from osgeo import gdal
from osgeo import ogr
except ImportError as _error: # pragma: no cover
_OSGEO_PRESENT = False
warn(
"Could not import package osgeo. Please install it as described in the README. "
f"Error was: {_error}"
)
del _error
else:
_OSGEO_PRESENT = True
ogr.UseExceptions()
#: Currently there are only locations and polygons, see :meth:`S57ChartHandler._create_obstacle`
PolarChartGeometry = Union[PolarLocation, PolarPolygon]
class ChartFileHandler(ABC):
"""This is a generic class for handling chart files, that defines a common interface."""
@staticmethod
@abstractmethod
def find_chart_files(search_path: Union[str, "os.PathLike[str]"]) -> Generator[Path, None, None]:
"""Recursively find all files that can be handled by this handler.
Args:
search_path: The path to search in recursively. Follows symlinks.
Yields:
str: A path per found file
"""
@abstractmethod
def read_chart_file(
self, path: Union[str, "os.PathLike[str]"]
) -> Generator[PolarChartGeometry, None, None]:
"""Reads a chart file and converts the relevant layers/features into ChartObstacles.
Args:
path: The path to a chart file of the right format
"""
class S57ChartHandler(ChartFileHandler):
"""Reads IHO S-57 chart files. The returned geometries are *not* checked for validity.
These chart objects are extracted from the source files:
- Landmasses (from S-57 object type ``LNAM``)
- Depth values (from S-57 object type ``DEPARE``, via attribute ``DRVAL2``, assumed to be in meters)
- Buoys (from S-57 object type ``BOY*``, e.g. ``BOYCAR``)
- Possibly more in the future
The identifiers of the created objects are created deterministically from the chart name and the already
contained identifiers. They are supposed to be unique across all charts. They are created by first
assembling a string that is guaranteed to be a globally unique identifier from the chart file name and the
``LNAM`` field. Then, the string is hashed and truncated to form a 63-bit identifier.
The names of the objects are created like this:
``{chart file name}#{chart-unique alphanumeric identifier} ({human-readable type}): "{common name}"``.
All objects are associated with the applicable :class:`pyrate.plan.geometry.LocationType`.
Raises:
ImportError: If the :mod:`osgeo` package is missing
"""
def __init__(self):
if not _OSGEO_PRESENT: # pragma: no cover
raise ImportError('the "osgeo" package must be installed for this handler to function')
#: This maps layer names to the corresponding parameters for S57ChartHandler._create_obstacle(...)
#: These are not all objects but merely the ones which are trivial to map.
_SIMPLE_MAPPINGS: Mapping[str, Tuple[LocationType, str]] = {
"LNDARE": (LocationType.LAND, "Landmass"),
"BOYCAR": (LocationType.OBSTRUCTION, "Buoy (BOYCAR)"),
"BOYINB": (LocationType.OBSTRUCTION, "Buoy (BOYINB)"),
"BOYISD": (LocationType.OBSTRUCTION, "Buoy (BOYISD)"),
"BOYLAT": (LocationType.OBSTRUCTION, "Buoy (BOYLAT)"),
"BOYSAW": (LocationType.OBSTRUCTION, "Buoy (BOYSAW)"),
"BOYSPP": (LocationType.OBSTRUCTION, "Buoy (BOYSPP)"),
# TODO(Felix): Should be included later on; See #19
# "OBSTRN": (LocationType.OBSTRUCTION, "Obstruction"),
# "OFSPLF": (LocationType.OBSTRUCTION, "Platform"),
# "OSPARE": (LocationType.OBSTRUCTION, "Production Area/Wind farm"),
# "PILPNT": (LocationType.OBSTRUCTION, "Post"),
# "MIPARE": (LocationType.OBSTRUCTION, "Military Exercise Area"),
# "DMPGRD": (LocationType.OBSTRUCTION, "Dumping Ground"),
# TODO(Felix): maybe later add anchorage and water sport; See #19
}
@staticmethod
def find_chart_files(search_path: Union[str, "os.PathLike[str]"]) -> Generator[Path, None, None]:
for root, _, files in os.walk(str(search_path), followlinks=True):
for file in files:
if file.endswith(".000"):
# assume it is an IHO S-57 file
yield Path(root) / file
# else: ignore the file
def read_chart_file(
self, path: Union[str, "os.PathLike[str]"]
) -> Generator[PolarChartGeometry, None, None]:
"""Reads a chart file and converts the relevant layers/features into ChartObstacles.
Args:
path: The path to the S-57 chart file (e.g. ``something.000``)
Returns:
All relevant obstacles with globally unique and deterministic names
Raises:
FileNotFoundError: If the database file(s) is/are missing
IOError: If the database file(s) cannot be opened for another reason
"""
file_path = str(path)
if not os.path.exists(file_path):
raise FileNotFoundError(f"cannot open dataset: {file_path}")
# open database
dataset = ogr.Open(file_path, gdal.GA_ReadOnly)
if not dataset:
raise IOError(f"cannot open dataset (invalid file): {file_path}")
file_name = os.path.splitext(os.path.basename(file_path))[0]
file_name_bytes = file_name.encode()
# read contents
for i in range(int(dataset.GetLayerCount())):
layer = dataset.GetLayerByIndex(i)
for geometry, feature_id in S57ChartHandler._convert_layer_to_obstacles(layer):
# prepend the name of the file to make it unique and ease lookup of objects in the source
# this is also required because the LNAM field is not guaranteed to be unique across files
geometry.name = f"{file_name}#{geometry.name}"
# hash a combination of file name and feature identifier as that together is globally unique
hashed_id = sha1(file_name_bytes + feature_id.encode()).digest()
# truncate to 64 bit and create an int from it
identifier = int.from_bytes(hashed_id[-8:], sys.byteorder, signed=True)
# cut off the most-significant bit to arrive at 63 bits
geometry.identifier = identifier & 0x7F_FF_FF_FF_FF_FF_FF_FF
yield geometry
@staticmethod
def _convert_layer_to_obstacles(
layer: ogr.Layer,
) -> Generator[Tuple[PolarChartGeometry, str], None, None]:
"""Converts the relevant obstacles of a layer into :attr:`s57_files.PolarChartGeometry`.
Args:
layer: The layer to search in
Returns:
For each relevant feature in the layer: a polygon and a feature ID (32 bit)
"""
layer_name = layer.GetName()
# we first do the more complicated stuff and then convert using S57ChartHandler.SIMPLE_MAPPINGS
if layer_name == "DEPARE": # "depth area"
for feature in layer:
# Warning: we assume these depths are given in meters, which could be wrong in some cases but
# worked in our tests
depth_max = feature["DRVAL2"]
if depth_max <= 5:
yield from S57ChartHandler._create_obstacle(
feature, "Depth <= 5m", LocationType.SHALLOW_WATER
)
elif depth_max <= 10:
yield from S57ChartHandler._create_obstacle(
feature, "Depth <= 10m", LocationType.SHALLOW_WATER
)
elif depth_max <= 20:
yield from S57ChartHandler._create_obstacle(
feature, "Depth <= 20m", LocationType.SHALLOW_WATER
)
elif depth_max <= 50:
yield from S57ChartHandler._create_obstacle(
feature, "Depth <= 50m", LocationType.SHALLOW_WATER
)
else:
if layer_name in S57ChartHandler._SIMPLE_MAPPINGS:
location_type, human_readable_type = S57ChartHandler._SIMPLE_MAPPINGS[layer_name]
for feature in layer:
yield from S57ChartHandler._create_obstacle(feature, human_readable_type, location_type)
@staticmethod
def _create_obstacle(
feature: ogr.Feature,
human_readable_type: str,
location_type: LocationType,
) -> Generator[Tuple[PolarChartGeometry, str], None, None]:
"""Creates a point or area obstacle from a given feature.
Args:
feature: The feature to transform
human_readable_type: A human-readable string describing what this is, like ``"landmass"``
location_type: The location type to be used
Returns:
(1) A location or polygon that represents an obstacle
(2) A (not necessarily unique) feature ID (32 bit) for that obstacle; but unique per chart file
"""
# This ID is guaranteed to be unique within the chart file and composed of AGEN, FIDN, and FIDS
feature_id: str = feature["LNAM"]
assert feature_id is not None, "the LNAM field is mandatory for all objects"
# Remark: feature.IsFieldSetAndNotNull("OBJNAM") seems to work but logs tons of errors to syserr
# It is not mandatory for all types of chart objects
object_name: Optional[str]
try:
object_name = feature["OBJNAM"] # might be None
except (ValueError, KeyError):
object_name = None
if object_name is None:
object_name = "---"
else:
# Replace broken unicode text (surrogates)
object_name = object_name.encode("utf-8", "replace").decode("utf-8")
# Construct the obstacle's name
name = f'{feature_id} ({human_readable_type}): "{object_name}"'
# Extract the geometries (as the feature may or may not contain a geometry collection)
geometry = feature.GetGeometryRef()
geometry_type = geometry.GetGeometryType()
if geometry_type == ogr.wkbPoint:
point = PolarLocation(
latitude=geometry.GetY(), longitude=geometry.GetX(), name=name, location_type=location_type
)
yield point, feature_id
elif geometry_type == ogr.wkbLineString:
# Ignore this feature as there are currently no feature being extracted that are
# LineStrings and relevant for navigation
# TODO(Someone): One should verify that this is okay; See #125
warn(f"Ignoring LineString geometry in chart: {name}")
elif geometry_type == ogr.wkbPolygon:
# TODO(Felix): We throw away the inner rings (i.e. the holes); See #106
outer_ring = geometry.GetGeometryRef(0)
points = [PolarLocation(latitude=lat, longitude=lon) for lon, lat in outer_ring.GetPoints()]
yield PolarPolygon(points, name=name, location_type=location_type), feature_id
else:
# Apparently, no other geometries appear in charts
raise NotImplementedError(f"Cannot handle geometry type {ogr.GeometryTypeToName(geometry_type)}")

View File

@ -0,0 +1,5 @@
"""Provides mathematical classes that are useful throughout Pyrate's codebase."""
from .gaussian import Gaussian
__all__ = ["Gaussian"]

View File

@ -0,0 +1,132 @@
"""This module includes an abstraction of gaussian distributions."""
# Typing
from typing import cast
# Mathematics
from numpy import ndarray
from scipy.stats import multivariate_normal
class Gaussian:
"""A weighted multivariate gaussian distribution.
Examples:
A Gaussian can be simply created from a mean and covarinace vector (and an optional weight):
>>> from numpy import array
>>> from numpy import vstack
>>> mean = vstack([0.0, 0.0])
>>> covariance = array([[1.0, 0.0], [0.0, 1.0]])
>>> N = Gaussian(mean, covariance, weight=1.0)
>>> N(vstack([0.0, 0.0])) # doctest: +ELLIPSIS
0.159...
Two Gaussians are equal if and only if all attributes are equal:
>>> N == N
True
>>> other_covariance = array([[99.0, 0.0], [0.0, 99.0]])
>>> other_N = Gaussian(mean, other_covariance, weight=1.0)
>>> other_N(vstack([10.0, 10.0])) # doctest: +ELLIPSIS
0.000585...
>>> N == other_N
False
Args:
mean: The mean of the distribution as column vector, of dimension ``(n, 1)``
covariance: The covariance matrix of the distribution, of dimension ``(n, n)``
weight: The weight of the distribution, e.g. within a mixture model
References:
- https://en.wikipedia.org/wiki/Multivariate_normal_distribution
"""
def __init__(self, mean: ndarray, covariance: ndarray, weight: float = 1.0) -> None:
# Sanity checks on given parameters
assert len(mean.shape) == 2 and mean.shape[1] == 1, "Mean needs to be a column vector!"
assert len(covariance.shape) == 2, "Covariance needs to be a 2D matrix!"
assert covariance.shape[0] == covariance.shape[1], "Covariance needs to be a square matrix!"
assert covariance.shape[0] == mean.shape[0], "Dimensions of mean and covariance don't fit!"
# Assign values
self.mean = mean
self.covariance = covariance
self.weight = weight
# ######################################
# Properties following a common filter notation
# pylint: disable=invalid-name
@property
def x(self) -> ndarray:
"""A shorthand for the distribution's mean.
Returns:
The mean, of dimension ``(1, n)``
"""
return self.mean
@x.setter
def x(self, value: ndarray) -> None:
self.mean = value
@property
def P(self) -> ndarray:
"""A shorthand for the distribution's covariance matrix.
Returns:
The covariance, of dimension ``(n, n)``
"""
return self.covariance
@P.setter
def P(self, value: ndarray) -> None:
self.covariance = value
@property
def w(self) -> float:
"""A shorthand for the distribution's weight.
Returns:
The weight of this distribution
"""
return self.weight
@w.setter
def w(self, value: float):
self.weight = value
def __call__(self, value: ndarray) -> float:
"""Evaluate the gaussian at the given location.
Args:
value: Where to evaluate the gaussian, of dimension ``(n, 1)``
Returns:
The probability density at the given location
"""
# Compute weighted probability density function
distribution = multivariate_normal(mean=self.mean.T[0], cov=self.covariance)
return self.weight * cast(float, distribution.pdf(value.T[0]))
def __eq__(self, other) -> bool:
"""Checks if two multivariate normal distributions are equal.
Args:
other: The distribution to compare within
Returns:
Whether the two distributions are the same
"""
return (
cast(bool, (self.mean == other.mean).all())
and cast(bool, (self.covariance == other.covariance).all())
and self.weight == other.weight
)

View File

@ -0,0 +1,39 @@
"""The module contains methods to access raster data sets (as opposed to vector data sets).
The :class:`~pyrate.common.raster_datasets.geo_datasets.DataSetAccess` allows to read data arrays from
raster datasets using query windows. It also computes such windows for a given point and radius.
However, client code will often want to use some transformed properties of these datasets.
To that end, a concrete :class:`~pyrate.common.raster_datasets.transformer_base.BaseTransformer` can be used,
either implemented in the :mod:`~pyrate.common.raster_datasets.transformers_concrete` module
or in some client code.
Transformers query some data source for given nodes and radii of influence and then return some
- well, transformed - property vectors for the query nodes.
By the way: Instances of :class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`
directly accept instances of :class:`~pyrate.common.raster_datasets.transformer_base.BaseTransformer`
to generate properties for nodes in the graph.
.. inheritance-diagram::
pyrate.common.raster_datasets.transformers_concrete.ConstantTransformer
pyrate.common.raster_datasets.transformers_concrete.BathymetricTransformer
:parts: 2
:top-classes: pyrate.common.raster_datasets.transformer_base.BaseTransformer
You might want to set some options for the underlying *rasterio*/*GDAL* drivers, like
`GDAL_CACHEMAX <https://trac.osgeo.org/gdal/wiki/ConfigOptions#GDAL_CACHEMAX>`_
("If its value is small (less than 100000), it is assumed to be measured in megabytes, otherwise in bytes."):
.. code-block:: python
with rasterio.Env(GDAL_CACHEMAX=1024):
with DataSetAccess(...) as data_set:
pass # do cool stuff
"""
from .geo_datasets import DataSetAccess
from .transformer_base import BaseDatasetTransformer
from .transformer_base import BaseTransformer
# Don't directly expose transformers_concrete here to keep it simple
__all__ = ["BaseTransformer", "BaseDatasetTransformer", "DataSetAccess"]

View File

@ -0,0 +1,415 @@
"""
This module provides an abstraction over geographical data sets which can be used to efficiently retrieve
properties for many nodes on a possibly irregular grid.
The implementation is currently single threaded but should be straightforward to parallelize.
Many commonly used datasets require a couple of gigabytes of memory, so make sure you do not open too many at
once.
"""
# Standard library
import math
# Typing
from typing import Any
from typing import cast
from typing import ContextManager
from typing import Optional
from typing import Tuple
from typing import Union
# Scientific
from numpy import allclose
from numpy import clip
from numpy import hstack
from numpy import linspace
from numpy import meshgrid
from numpy import ndarray
# Raster data set library
import rasterio
import rasterio.coords
import rasterio.io
import rasterio.windows
from rasterio.windows import Window
# Geographic helpers
from pyrate.plan.geometry.geospatial import MAXIMUM_EARTH_CIRCUMFERENCE
from pyrate.plan.geometry.helpers import meters2rad
class DataSetAccess(ContextManager["DataSetAccess"]):
"""Represents a global raster geo dataset that can be efficiently queried for a set of nodes.
See :meth:`~.get_bounding_windows_around` for why two bounding boxes/windows are supported by some
methods.
Notes:
The type of the data that is read depends on the dataset that is used.
Warning:
This class shall only be used as a context manager (using the `with`-syntax) in order to initialize
and clean up any resources that are required and possibly several gigabytes large. The behaviour of
this class is undefined after the context was left, as the internal data array is deleted to free up
memory. Also, the data is only guaranteed to be available once the context was entered.
Warning:
There are many subtle pitfalls with processing geographical datasets. They are only alleviated by
software abstractions like *GDAL*, *rasterio* and also *Pyrate* to some degree and are often badly
documented. For instance: All raster datasets define a grid (well, the *raster*). The creator(s) of
the dataset define whether the data at each entry refers to the center of the cell or the grid line of
the raster [1]. However, in a lot of software handling these datasets (like *GDAL* and *rasterio*),
it is not always clear what interpretation is used. *rasterio* seems to always assume grid-centered
values, and so does Pyrate. This could lead to small discrepancies between the data extracted from the
dataset and its real structure. On large-scale queries spanning many cells, this will not be a
problem.
While **many** libraries like *rasterio* exist for querying raster datasets at grid-like points or even
irregular points, none seems to allow for getting all raw data points of some datasets around some point
with some radius [2]. This was, however, required since we wanted to calculate statistics like "number of
points below sea level" for all data points around a given center point and radius. If we interpolated the
dataset to the center point, we would only get some local information and not cover the entire area within
the radius. The following libraries were investigated but did not provide the needed functionality:
- `GDALRasterBand tool from GDAL <https://gdal.org/api/gdalrasterband_cpp.html>`_
- `grdtrack tool from GMT <https://docs.generic-mapping-tools.org/latest/grdtrack.html>`_
or `its Python version <https://www.pygmt.org/latest/api/generated/pygmt.grdtrack.html>`_
- `geonum's topodata module <https://geonum.readthedocs.io/en/latest/api.html#module-geonum.topodata>`_
Args:
dataset: A :mod:`rasterio` raster dataset to read from or path to the file to open.
It must cover the entire planet with ``(0, 0)`` degrees being in the center.
raster_band_index: the index of band (the "layer") of the raster dataset (*GDAL*/*rasterio*
terminology)
Attributes:
dataset: the underlying :mod:`rasterio` dataset; read-only
raster_band_index: the index of band (the "layer") of the raster dataset (*GDAL*/*rasterio*
terminology); read-only
References:
- [1] This concept is also called grid- vs cell-registration. See also
`this Earth Science post <https://earthscience.stackexchange.com/q/4868/10692>`__.
- [2] Some `kind answers <https://lists.osgeo.org/pipermail/gdal-dev/2020-December/053125.html>`__
on the *gdal-dev* mailing list that did not help.
"""
#: The bounding box that any dataset wrapped by this class must match
_DATASET_BOUNDING_BOX = rasterio.coords.BoundingBox(left=-180.0, bottom=-90.0, right=180.0, top=90.0)
def __init__(self, dataset: Union[str, rasterio.DatasetReader], raster_band_index: int = 1) -> None:
self.dataset = rasterio.open(dataset) if isinstance(dataset, str) else dataset
self.raster_band_index = raster_band_index
self._data_array: ndarray
self._dataset_window = Window.from_slices(
rows=(0, self.dataset.height),
cols=(0, self.dataset.width),
)
assert allclose(DataSetAccess._DATASET_BOUNDING_BOX, self.dataset.bounds, atol=1e-12), (
"the dataset needs to cover the entire planet with (0, 0) degrees being in the center but was "
+ repr(self.dataset.bounds)
)
def get_bounding_windows_around(
self, center_latitude: float, center_longitude: float, radius: float
) -> Tuple[Window, Optional[Window]]:
"""Computes a bounding boxes/windows around the given center containing all points within the radius.
This method will return one bounding box per coordinate pair for *most* locations on earth.
However, near longitude +180°/-180° the dataset wraps around at the side, and as such, a bounding box
used for querying the dataset contains one or two such bounding boxes. Due to the internal design of
numpy, such a "wrapping" query cannot be expressed in a single slice [1] [2]. Thus, this method
might return one or two windows.
Correspondingly, :meth:`~.lat_lon_meshgrid_for` and :meth:`~.data_for` take one or two windows each.
Args:
center_latitude: The latitude of center of the box, in radians
center_longitude: The longitude of center of the box, in radians
radius: The radius around the center that the box should include. Strictly positive, in meters.
Returns:
One or two integer bounding windows as a tuple; might be slightly overestimated (by design)
References:
- [1] Some `answers <https://lists.osgeo.org/pipermail/gdal-dev/2020-December/053132.html>`_
on the *gdal-dev* mailing list that did not help
- [2] Numpy docs on
`Basic Slicing and Indexing
<https://numpy.org/doc/stable/reference/arrays.indexing.html#basic-slicing-and-indexing>`_
"""
# pylint: disable=too-many-locals
assert radius > 0.0, "radius must be strictly positive"
center_longitude = math.degrees(center_longitude)
center_latitude = math.degrees(center_latitude)
delta_lat = math.degrees(meters2rad(radius)) # uniform across the globe
assert delta_lat > 0.0, "is the input in radians?"
# Better slightly overestimate it by using the earth circumference at the equator
# That is about 1% larger than mean circumference
earth_circumference_at_lat = math.cos(math.radians(center_latitude)) * MAXIMUM_EARTH_CIRCUMFERENCE
delta_lon = radius / earth_circumference_at_lat * 360.0
assert delta_lon > 0.0, "is the input in radians?"
# Top & bottom are simple: just clip the latitude at the poles
# Left & right are a bit trickier since that possibly requires the creation of two slices
# These four coordinates determine the primary window
left = center_longitude - delta_lon
bottom = max(center_latitude - delta_lat, -90.0)
right = center_longitude + delta_lon
top = min(center_latitude + delta_lat, +90.0)
# `additional_window` determines the extra query if wrapping near longitude (+/-) 180° occurs
# `window` is geographically more west-ward than `additional_window`, if the latter exists
# Keep in mind though, that the common border might lie on +/- 180° longitude and thus on the
# border of the dataset/data array
window: Window
additional_window: Optional[Window]
# Handle the horizontal overflow of the window
# This also handles the case where 2*radius is larger than the width of the dataset
if left < -180.0: # Overflow on the left
overshoot = clip(-(left + 180), 0.0, 360.0)
left_wrapped = +180 - overshoot
# It might be the case that it also overflows on the right if the overall radius was so large
# that the window(s) would wrap around the world more than once. This can especially happen
# at high latitudes, where horizontally wrapping around the globe can happen at arbitrarily small
# radii/window sizes near the poles. We thus clip it to (in sum) only cover the world once.
right = clip(right, -180.0, left_wrapped)
# If the bounds overflow on the left, make the wrapped (i.e. the non-clipped) one the primary
# window, as it is geographically more west-ward
window = self.dataset.window(left_wrapped, bottom, +180.0, top)
additional_window = self.dataset.window(
-180.0, bottom, right, top
) # Possibly a window with zero width
elif right > +180.0: # Overflow on the right
overshoot = clip(right - 180, 0.0, 360.0)
right_wrapped = -180 + overshoot
# See the previous case "Overflow on the left" for an analogous explanation
left = clip(left, right_wrapped, +180.0)
# If the bounds overflow on the right, make the clipped (i.e. the non-wrapped) one the primary
# window, as it is geographically more west-ward
window = self.dataset.window(left, bottom, +180.0, top)
# `right_wrapped == -180` similar to above cannot occur here since then we must have landed in the
# `left < -180.0` branch instead
assert right_wrapped > -180, "The window would extend zero meters from east to west"
additional_window = self.dataset.window(-180.0, bottom, right_wrapped, top)
else: # No overflow at the bounds occurred, so we only need one `window`
window = self.dataset.window(left, bottom, right, top)
additional_window = None
# Jointly round the window(s) to integers and return the result
return self._round_windows_ceil(window, additional_window)
def _round_windows_ceil(
self, window_1: Window, window_2: Optional[Window]
) -> Tuple[Window, Optional[Window]]:
"""Rounds one or two windows to integer types and avoids an overlap to be created.
Always rounds to the larger windows if non-integer bounds are given. This guarantees that at least all
points initially given as the window(s) are also included in the resulting windows.
The actual rounding is done in :func:`rasterio.windows.window_index`.
Args:
window_1: The left window
window_2: An optional right window (geographically touches the eastern/right side of
``window_1``). Keep in mind though, that the common border might lie on +/- 180°
longitude and thus on the border of the dataset/data array.
Returns:
One or two windows, rounded to :class:`int` values.
Due to rounding, this method may return only a single window even if two were initially provided.
"""
(_, (_, w1_old_right)) = window_1.toranges()
# Round the first window
# The actual rounding is done in :func:`rasterio.windows.window_index`
window_1 = Window.from_slices(*cast(Tuple[slice, slice], rasterio.windows.window_index(window_1)))
# The rounding may move it beyond the bounds of the dataset, so clip it at the array borders
window_1 = window_1.intersection(self._dataset_window)
if window_2 is not None:
# Adjust `window_2` in the correct direction for a possibly created overlap
# Afterward, round it too
# Unpack the bounds that we will work with
((w1_top, w1_bottom), (_, w1_right)) = window_1.toranges()
(_, (left, right)) = window_2.toranges()
# Correct for the horizontal change that was induced by enlarging the `window_1`
# This will make `window_2` smaller if their common boundary was not already on a cell border
left += w1_right - w1_old_right
# Round away from the existing `window_1`, i.e. to the right/geographically west-ward
left = int(math.ceil(left))
right = int(math.ceil(right))
# There is a 1-cell overlap between the windows that was created by rounding, i.e.
# ``right == w1_left``. Therefore, we cut one index off.
right -= 1
# The case ``left == w1_right`` cannot occur since the second window is always guaranteed to be to
# the right of the first. We still check that though:
assert (
left - w1_right
) % self.dataset.width == 0, "this can never happen if the second window is truly to the right"
# Make sure that the extra window is non-empty and if not, just discard it
if right - left <= 0:
window_2 = None
else:
# We simply adopt the top and bottom bounds as they are the same in both windows
window_2 = Window.from_slices(rows=(w1_top, w1_bottom), cols=(left, right))
# May become obsolete if https://github.com/mapbox/rasterio/pull/2090 gets accepted
def to_int(win: Window) -> Window:
((float_top, float_bottom), (float_left, float_right)) = win.toranges()
return Window.from_slices(
rows=(int(float_top), int(float_bottom)), cols=(int(float_left), int(float_right))
)
return to_int(window_1), window_2 if window_2 is None else to_int(window_2)
def _lat_lon_meshgrid_single(self, window: Window, radians: bool) -> Tuple[ndarray, ndarray]:
"""Creates a meshgrid with all coordinates the data set has entries for in the given window.
Args:
window: as returned by :meth:`~get_bounding_windows_around`
radians: if ``True`` return in radians, else in degrees
Returns:
A latitude, longitude meshgrid matching the data returned by :meth:`~_data_single`
"""
# These values are in degrees
longitude_left, latitude_up = self.dataset.xy(window.row_off, window.col_off)
longitude_right, latitude_down = self.dataset.xy(
window.row_off + window.height, window.col_off + window.width
)
if radians:
longitude_left = math.radians(longitude_left)
latitude_up = math.radians(latitude_up)
longitude_right = math.radians(longitude_right)
latitude_down = math.radians(latitude_down)
coords_lat = linspace(latitude_up, latitude_down, window.height)
coords_lon = linspace(longitude_left, longitude_right, window.width)
coords_lat, coords_lon = meshgrid(coords_lat, coords_lon, indexing="ij")
assert coords_lat.shape == coords_lon.shape
return coords_lat, coords_lon
def lat_lon_meshgrid_for(
self,
window: Window,
additional_window: Optional[Window],
radians: bool,
) -> Tuple[ndarray, ndarray]:
"""Creates a meshgrid with all coordinates the data set has entries for in the given windows.
Args:
window: as returned by :meth:`~get_bounding_windows_around`
additional_window: as returned by :meth:`~get_bounding_windows_around`
radians: if ``True`` return in radians, else in degrees
Returns:
A single latitude, longitude meshgrid matching the data returned by :meth:`~data_for`
"""
coords_lat, coords_lon = self._lat_lon_meshgrid_single(window, radians)
# append additional window (only if required)
if additional_window is not None:
coords_lat_additional, coords_lon_additional = self._lat_lon_meshgrid_single(
additional_window, radians
)
coords_lat = hstack((coords_lat, coords_lat_additional))
coords_lon = hstack((coords_lon, coords_lon_additional))
return coords_lat, coords_lon
def _data_single(self, window: Window) -> ndarray:
"""Get all data points within the given window.
Notes:
The type of the data that is read depends on the dataset that is used.
See ``self.dataset.dtypes``.
Warnings:
Never modify the data returned by this method directly! It is only a view into the raw data.
Args:
window: A window as returned by :meth:`~get_bounding_windows_around`
Returns:
The 2D data array matching the coordinates returned by :meth:`~_lat_lon_meshgrid_single`
"""
assert hasattr(self, "_data_array"), "DataSetAccess must be used as a context manager and be open"
# one could read by this:
# self.dataset.read(self.raster_band_index, window=window)
# however, this does not map the file into memory and is thus about 10x slower than directly using a
# numpy array
# keep in mind that the slice will create a view into the raw data, and not a copy
# this is intentional to make the data access fast
data: ndarray = self._data_array[window.toslices()]
assert data.shape == (window.height, window.width)
return data
def data_for(self, window: Window, additional_window: Optional[Window]) -> ndarray:
"""Get all data points within the given windows as a single array.
Notes:
The type of the data that is read depends on the dataset that is used.
See ``self.dataset.dtypes``.
Warnings:
Never modify the data returned by this method directly! It is only a view into the raw data.
Args:
window: as returned by :meth:`~get_bounding_windows_around`
additional_window: as returned by :meth:`~get_bounding_windows_around`
Returns:
The single 2D data array matching the coordinates returned by :meth:`~lat_lon_meshgrid_for`
"""
result = self._data_single(window)
# append additional window (only if required)
if additional_window is not None:
additional_result = self._data_single(additional_window)
result = hstack((result, additional_result))
return result
def __enter__(self) -> "DataSetAccess":
self.dataset.__enter__()
self._data_array = self.dataset.read(self.raster_band_index)
self._data_array.flags.writeable = False # make this read-only to prevent accidents
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
del self._data_array
return self.dataset.__exit__(exc_type, exc_val, exc_tb)

View File

@ -0,0 +1,147 @@
"""
This module provides the base classes for the transformers, which extract property vectors for given query
points form (usually) a geographical dataset.
"""
# Standard library
from abc import ABC
from abc import abstractmethod
from itertools import repeat
# Typing
from typing import Any
from typing import cast
from typing import ContextManager
from typing import Iterable
from typing import Sequence
from typing import Tuple
from typing import Union
# Scientific
from numpy import array
from numpy import empty
from numpy import ndarray
from numpy.typing import DTypeLike
from pandas import DataFrame
# Progress bar
from tqdm import tqdm
# Typing helpers
from .geo_datasets import DataSetAccess
class BaseTransformer(ContextManager["BaseTransformer"], ABC):
"""This class allows to query some data source for a property at each node.
Subclasses will usually override :meth:`_get_transformed_at` in order to return the data vector for some
specific node with given latitude and longitude. Note that the result of calling
:meth:`~get_transformed_at_nodes` is a :class:`pandas.DataFrame`, in order to allow a single transformer
to return multiple values for each vector if this simplifies or speeds up calculations.
Querying all nodes prints a progress bar to the command line by default.
Warning:
This class (and any subclasses) shall only be used as a context manager. See :class:`~DataSetAccess`
for the reasons for this.
Args:
structured_dtype: For each column in the query result a tuple consisting of a human-readable names and
the (numpy) data type of property. This follows the syntax of NumPy's
`"Structured Datatypes" <https://numpy.org/doc/stable/user/basics.rec.html#structured-datatypes>`_
.
See Also:
BaseDatasetTransformer
"""
def __init__(self, structured_dtype: Sequence[Tuple[str, DTypeLike]]) -> None:
super().__init__()
self.structured_dtype = cast(DTypeLike, structured_dtype)
@abstractmethod
def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple:
"""Get the property at some specific node, given by its geographical location.
Args:
latitude: The geographical location of the node, in radians
longitude: The geographical location of the node, in radians
radius: The radius of the area that this node shall represent, in meters
Returns:
A single property vector for each single node.
"""
def get_transformed_at_nodes(
self,
latitudes: ndarray,
longitudes: ndarray,
radius: Union[float, ndarray],
show_progress: bool = False,
) -> DataFrame:
"""Computes the property for each individual node. Prints a progress bar by default.
Args:
latitudes: latitude values of all nodes, in radians
longitudes: longitude values of all nodes, in radians
radius: the radius around each node that it should represent, in meters; may be an array of shape
`(num_nodes, )` or a single scalar if the radius is uniform
show_progress: whether to print a nice and simple progress bar
Returns:
An array of all values generated for each (latitude, longitude) node, with shape
`(number of nodes, number of properties per node)`
"""
assert latitudes.shape == longitudes.shape
radii = repeat(radius) if isinstance(radius, float) else cast(Iterable[float], radius)
if len(latitudes) > 0:
result = [
self._get_transformed_at(latitude, longitude, rad)
for latitude, longitude, rad in tqdm(
zip(latitudes, longitudes, radii),
unit=" nodes",
unit_scale=True,
colour="white",
total=len(latitudes),
disable=not show_progress,
)
]
else:
result = empty((0, len(self.structured_dtype))) # type: ignore
assert len(result) == latitudes.shape[0]
# this also ensures that all property vectors have the same length
structured_array = array(result, dtype=self.structured_dtype)
return DataFrame.from_records(structured_array)
def __enter__(self) -> "BaseTransformer":
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
return False
class BaseDatasetTransformer(BaseTransformer, ABC):
"""A specialized dataset transformer which extracts properties from a geographical dataset.
Args:
structured_dtype: see constructor argument ``structured_dtype`` in :class:`BaseTransformer`
dataset: A dataset to read from.
It is automatically managed when this class is used as a context manager.
"""
def __init__(self, structured_dtype: Sequence[Tuple[str, DTypeLike]], dataset: DataSetAccess) -> None:
super().__init__(structured_dtype)
self.dataset = dataset
def __enter__(self) -> "BaseDatasetTransformer":
self.dataset.__enter__()
return self
def __exit__(self, exc_type, exc_val, exc_tb) -> Any:
super().__exit__(exc_type, exc_val, exc_tb)
return self.dataset.__exit__(exc_type, exc_val, exc_tb)

View File

@ -0,0 +1,163 @@
"""
This module exposes specific property transformers.
See the `data repository <https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data>`_ for details on
the actual data sets referenced and used here.
"""
# Standard library
from enum import auto
from enum import Enum
# Typing
from typing import Any
from typing import List
from typing import Optional
from typing import Sequence
from typing import Tuple
# Scientific
import numpy
from numpy import clip
from numpy import count_nonzero
from numpy import extract
from numpy import float32
from numpy.typing import DTypeLike
# Helpers and own typing
from ...plan.geometry.helpers import haversine_numpy
from .geo_datasets import DataSetAccess
from .transformer_base import BaseDatasetTransformer
from .transformer_base import BaseTransformer
class ConstantTransformer(BaseTransformer):
"""A very simple transformer class to fill a property with a constant value.
Args:
value: The constant value to be added to each node
dtype: The numpy data type of the resulting property vector field
name: The name of the property. If set to ``None``, a reasonable default name will be used.
"""
# pylint: disable=too-few-public-methods
# idea: could make this class explicitly generic in the type signatures
def __init__(self, value: Any, dtype: DTypeLike, name: Optional[str] = None) -> None:
name = f"constant value of {value}" if name is None else name
super().__init__([(name, dtype)])
self.value = value
def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple[Any]:
return (self.value,) # return a tuple
class BathymetricTransformer(BaseDatasetTransformer):
"""Extracts values from a given bathymetric datasets, i.e. depth information.
The datatype for all modes is ``np.float32``. Raises a :class:`ValueError` if not data is found for any
given query ``(latitude, longitude, radius)``.
Args:
dataset: the data set to be used
modes: a sequence of modes of the values to be extracted; see :class:`~BathymetricTransformer.Modes`
"""
# pylint: disable=too-few-public-methods
#: The depth/elevation below which a point is considered navigable, in meters;
#: positive values are above sea level, negative ones below
NAVIGABLE_BELOW: float = -5.0
class Modes(Enum):
"""The different modes how depth values can be extracted."""
AVERAGE_DEPTH = auto()
"""The average depth, weighted with more emphasis on the area near the query point, in meters.
This is a visualisation of the mode when applied to the `Earth 2014
<https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data/-/tree/master/topography/earth2014>`_
topographic/bathymetric dataset:
.. image:: plot_global_bathymetry_depth.png
:alt: AVERAGE_DEPTH mode when applied to the Earth 2014 topographic/bathymetric dataset
"""
FRACTION_NAVIGABLE = auto()
"""The fraction of data points at which a boat can navigate, as a scalar in :math:`[0, 1]`.
See :attr:`BathymetricTransformer.NAVIGABLE_BELOW` for what is considered navigable.
Only the topographic/bathymetric height/depth value is used for determining navigability as an
approximation, and not actual water coverage.
To not count the Netherlands as navigable, the value is set to a vale a little bit below zero,
where zero means sea level.
This is a visualisation of the mode when applied to the `Earth 2014
<https://gitlab.sailingteam.hg.tu-darmstadt.de/informatik/data/-/tree/master/topography/earth2014>`_
topographic/bathymetric dataset:
.. image:: plot_global_bathymetry_fraction_navigable.png
:alt: FRACTION_NAVIGABLE mode when applied to the Earth 2014 topographic/bathymetric dataset
"""
@property
def column_name(self) -> str:
"""Returns the name that is used for the dataframe column."""
return f"bathymetric data ({self.name})"
def __init__(self, dataset: DataSetAccess, modes: Sequence[Modes] = tuple(Modes)) -> None:
structured_dtype = [(mode.column_name, float32) for mode in modes]
super().__init__(structured_dtype, dataset)
self.modes = modes
def _get_transformed_at(self, latitude: float, longitude: float, radius: float) -> Tuple:
# pylint: disable=too-many-locals
assert radius > 0, "the radius must be positive"
windows = self.dataset.get_bounding_windows_around(
center_latitude=latitude, center_longitude=longitude, radius=radius
)
lats, lons = self.dataset.lat_lon_meshgrid_for(*windows, radians=True)
assert lats.shape == lons.shape
# the depth data is the negative for below-sea level and positive for landmasses
depth = self.dataset.data_for(*windows) # as int16 !
assert depth.shape == lats.shape
# get the distances to all points
distances = haversine_numpy(lats, lons, latitude, longitude)
result: List[float] = [] # will be transformed to float32 later
for mode in self.modes: # respect the ordering
if mode is BathymetricTransformer.Modes.AVERAGE_DEPTH:
# we simply use the inverse distance as a weight for the weighted arithmetic mean
weights = clip(1.0 - (distances / radius), 0.0, 1.0)
try:
average = numpy.average(depth.astype(float32), weights=weights)
except ZeroDivisionError as error:
raise ValueError(
f"no points in radius {radius} m around (lat, lon) = {(latitude, longitude)} rad"
) from error
result.append(average)
elif mode is BathymetricTransformer.Modes.FRACTION_NAVIGABLE:
# we use the distance to cut off unwanted entries
depth_within_radius = extract(distances <= radius, depth) # also flattens
if len(depth_within_radius) == 0:
raise ValueError(
f"no points in radius {radius} m around (lat, lon) = {(latitude, longitude)} radians"
)
number_of_navigable = count_nonzero(
depth_within_radius <= BathymetricTransformer.NAVIGABLE_BELOW
)
fraction = number_of_navigable / len(depth_within_radius)
result.append(fraction)
else: # pragma: no branch
raise ValueError(f"invalid mode {mode}") # pragma: no cover
return tuple(result)

View File

@ -0,0 +1,39 @@
"""This module contains helpers for writing and running tests.
In particular, it contains flags for detecting specific scenarios (like the test running on a CI platform)
and a variety of custom strategies for *Hypothesis*.
"""
# Standard library
from os import environ
# Spatialite for environment setup
from pyrate.common.charts import SpatialiteDatabase
def env(name: str) -> bool:
"""Checks if an environment variable exists and its value in lower case is ``{yes, true, t, 1}``.
Args:
name: The name of the environment variable to check
"""
return environ.get(name, "").lower() in ("yes", "true", "t", "1")
#: Set to ``True`` if running on a CI server (best effort)
IS_CI: bool = env("CI") or env("CONTINUOUS_INTEGRATION")
#: Whether to intensify tests at the expense of more time, e.g. with more example data for hypothesis
IS_EXTENDED_TESTING: bool = env("EXTENDED_TESTING")
#: True iff the Spatialite SQLite extension is installed & can be used
SPATIALITE_AVAILABLE: bool
try:
with SpatialiteDatabase(":memory:"):
pass # make sure it is properly closed
except RuntimeError: # pragma: no cover
SPATIALITE_AVAILABLE = False
else:
SPATIALITE_AVAILABLE = True

View File

@ -0,0 +1,18 @@
"""This module provides testing helpers like hypothesis strategies.
Some typecasts of custom strategies using :func:`hypothesis.strategies.composite` are actually wrong but
required to paint over some Mypy shortcomings
(see `hypothesis#2748 <https://github.com/HypothesisWorks/hypothesis/issues/2748>`_).
"""
# Typing
from typing import Callable
from typing import TypeVar
# Hypothesis typing
from hypothesis.strategies import SearchStrategy
#: The type of the draw parameter to :func:`hypothesis.strategies.composite` strategies
T = TypeVar("T")
DrawType = Callable[[SearchStrategy[T]], T]

View File

@ -0,0 +1,141 @@
"""Contains helpers based on hypothesis test data generators."""
# Typing
from typing import cast
from typing import Tuple
# Hypothesis testing
from hypothesis.extra.numpy import arrays
from hypothesis.strategies import composite
from hypothesis.strategies import floats
from hypothesis.strategies import lists
from hypothesis.strategies import SearchStrategy
# Mathematics
from numpy import eye
from numpy import float64
# Gaussian representation
from pyrate.common.math import Gaussian
# Own typing
from . import DrawType # pylint: disable=unused-import
# In this context, we reproduce a common filter notation
# pylint: disable=invalid-name, too-many-locals, unused-argument
@composite
def linear_model(
draw: DrawType, state_dim: int = 2, input_dim: int = 1, sensor_dim: int = 2
) -> SearchStrategy[Tuple]:
"""Generate a linear state space model.
Args:
draw: see :func:`hypothesis.strategies.composite`
state_dim: Number of state variables
input_dim: Number of input variables
sensor_dim: Number of measurement variables
"""
# Strategies
float_strategy = floats(0, 5)
# Transition model
F = draw(arrays(float64, (state_dim, state_dim), elements=float_strategy))
# Input model
B = draw(arrays(float64, (state_dim, input_dim), elements=float_strategy))
# Measurement model
H = draw(arrays(float64, (sensor_dim, state_dim), elements=float_strategy))
# Symmetric, positive definite process noise
q = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
Q = q @ q.T + eye(state_dim)
# Symmetric, positive definite sensor noise
r = draw(arrays(float64, (sensor_dim, 1), elements=float_strategy))
R = r @ r.T + eye(sensor_dim)
# Initial belief
x = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
p = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
P = p @ p.T + eye(state_dim)
estimate = Gaussian(x, P)
# Measurements and inputs
measurements = draw(
lists(arrays(float64, (sensor_dim, 1), elements=float_strategy), min_size=2, max_size=4)
)
inputs = draw(
lists(
arrays(float64, (input_dim, 1), elements=float_strategy),
min_size=len(measurements),
max_size=len(measurements),
)
)
# Return model
result = estimate, F, B, H, Q, R, measurements, inputs
return cast(SearchStrategy[Tuple], result)
@composite
def nonlinear_model(draw: DrawType, state_dim: int = 2, sensor_dim: int = 2) -> SearchStrategy[Tuple]:
"""Generate a nonlinear state space model.
Args:
draw: see :func:`hypothesis.strategies.composite`
state_dim: Number of state variables
sensor_dim: Number of measurement variables
"""
# Strategies
float_strategy = floats(0, 5)
# Transition model
F = draw(arrays(float64, (state_dim, state_dim), elements=float_strategy))
def f(x):
return F @ x
# Jacobi of f about state
def Jf(x):
return F
# Measurement model
H = draw(arrays(float64, (sensor_dim, state_dim), elements=float_strategy))
def h(x):
return H @ x
# Jacobi of h about state
def Jh(state):
return H
# Symmetric, positive definite process noise
q = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
Q = q @ q.T + eye(state_dim)
# Symmetric, positive definite sensor noise
r = draw(arrays(float64, (sensor_dim, 1), elements=float_strategy))
R = r @ r.T + eye(sensor_dim)
# Initial belief
x = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
p = draw(arrays(float64, (state_dim, 1), elements=float_strategy))
P = p @ p.T + eye(state_dim)
estimate = Gaussian(x, P)
# Measurements
measurements = draw(
lists(arrays(float64, (sensor_dim, 1), elements=float_strategy), min_size=2, max_size=4)
)
# Return model
result = estimate, f, F, Jf, h, H, Jh, Q, R, measurements
return cast(SearchStrategy[Tuple], result)

View File

@ -0,0 +1,448 @@
"""Contains helpers like hypothesis test data generators."""
# Typing
from typing import Any
from typing import cast
from typing import Dict
from typing import Optional
from typing import Union
# Hypothesis testing
from hypothesis import assume
import hypothesis.extra.numpy as numpy_st
import hypothesis.strategies as st
from hypothesis.strategies import SearchStrategy
# Scientific stack
import numpy
from scipy.spatial import Voronoi
# Planning primitives
from shapely.geometry import box
from shapely.geometry import MultiLineString
from shapely.geometry import Polygon
from shapely.ops import polygonize
from shapely.ops import unary_union
# Geospatial objects
from pyrate.plan.geometry import CartesianGeometry
from pyrate.plan.geometry import CartesianLocation
from pyrate.plan.geometry import CartesianPolygon
from pyrate.plan.geometry import CartesianRoute
from pyrate.plan.geometry import Geospatial
from pyrate.plan.geometry import LocationType
from pyrate.plan.geometry import PolarGeometry
from pyrate.plan.geometry import PolarLocation
from pyrate.plan.geometry import PolarPolygon
from pyrate.plan.geometry import PolarRoute
# Own typing
from . import DrawType # pylint: disable=unused-import
@st.composite
def geo_bearings(draw: DrawType) -> SearchStrategy[float]:
"""Returns a direction/bearing/azimuth/yaw for navigation in degrees in :math:`[0, 360)`.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
bearing = draw(st.floats(min_value=0.0, max_value=360.0, exclude_max=True))
return cast(SearchStrategy[float], bearing)
@st.composite
def geospatial_identifiers(draw: DrawType) -> SearchStrategy[Optional[int]]:
"""Returns identifiers for subclasses of :class:`pyrate.plan.geometry.Geospatial`.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
identifier = draw(st.one_of(st.none(), st.integers(min_value=0, max_value=(2**63) - 1)))
return cast(SearchStrategy[Optional[int]], identifier)
@st.composite
def location_types(draw: DrawType) -> SearchStrategy[LocationType]:
"""Returns location types.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
location_type = draw(st.sampled_from(LocationType))
return cast(SearchStrategy[LocationType], location_type)
@st.composite
def geospatial_attributes(draw: DrawType) -> SearchStrategy[Dict[str, Any]]:
"""Returns the common attributes for subclasses of :class:`pyrate.plan.geometry.Geospatial`.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
attributes = {
"location_type": draw(location_types()),
"name": draw(st.text()),
"identifier": draw(geospatial_identifiers()),
}
return cast(SearchStrategy[Dict[str, Any]], attributes)
@st.composite
def polar_locations(draw: DrawType) -> SearchStrategy[PolarLocation]:
"""Returns a polar location.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
location = PolarLocation(
draw(st.floats(min_value=-90.0, max_value=+90.0)),
draw(st.floats(min_value=-180.0, max_value=+180.0, exclude_max=True)),
**draw(geospatial_attributes()),
)
return cast(SearchStrategy[PolarLocation], location)
@st.composite
def cartesian_locations(
draw: DrawType,
origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
) -> SearchStrategy[CartesianLocation]:
"""Returns a cartesian location.
Args:
draw: see :func:`hypothesis.strategies.composite`
origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
locations
"""
location = CartesianLocation(
draw(st.floats(min_value=-10_000.0, max_value=+10_000.0)),
draw(st.floats(min_value=-10_000.0, max_value=+10_000.0)),
origin=draw(origin),
**draw(geospatial_attributes()),
)
return cast(SearchStrategy[CartesianLocation], location)
@st.composite
def polar_objects(
draw: DrawType, stable: bool = False, non_repeating: bool = False
) -> SearchStrategy[PolarGeometry]:
"""Returns polar geometries.
The concrete type is sampled randomly from all three polar geometries.
Args:
draw: see :func:`hypothesis.strategies.composite`
stable: see :func:`~polar_routes_stable`
non_repeating: if ``True``, the strategy will not produce routes with duplicate locations.
Ignored if ``stable`` is given.
"""
possible_sources = st.one_of(
[
polar_locations(),
polar_routes_stable() if stable else polar_routes(non_repeating=non_repeating),
polar_polygons(),
]
)
geospatial: PolarGeometry = draw(possible_sources)
return cast(SearchStrategy[PolarGeometry], geospatial)
@st.composite
def cartesian_objects(draw: DrawType) -> SearchStrategy[CartesianGeometry]:
"""Returns cartesian geometries.
The concrete type is sampled randomly from all three cartesian geometries.
Args:
draw: see :func:`hypothesis.strategies.composite`
"""
possible_sources = st.one_of(
[
cartesian_locations(),
cartesian_routes(),
cartesian_polygons(),
]
)
geospatial: CartesianGeometry = draw(possible_sources)
return cast(SearchStrategy[CartesianGeometry], geospatial)
@st.composite
def geospatial_objects(draw: DrawType, stable: bool = False) -> SearchStrategy[Geospatial]:
"""Returns instances of the abstract class :class:`pyrate.plan.geometry.Geospatial`.
The concrete type is sampled randomly from all six cartesian and polar geometries.
Args:
draw: see :func:`hypothesis.strategies.composite`
stable: see :func:`~polar_routes_stable`
"""
geospatial: Geospatial = draw(st.one_of([polar_objects(stable=stable), cartesian_objects()]))
return cast(SearchStrategy[Geospatial], geospatial)
@st.composite
def cartesian_polygons( # pylint: disable=too-many-arguments,too-many-locals
draw: DrawType,
min_vertices: int = 5,
max_vertices: int = 15,
scale: float = 100_000,
center_x: float = 0.0,
center_y: float = 0.0,
origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
) -> SearchStrategy[CartesianPolygon]:
"""Returns non-empty valid cartesian polygons around the origin of the coordinate system.
Inspired `by testing code from the spatialpandas
<https://github.com/holoviz/spatialpandas/blob/efdabe5c736db8103a4bfedca55a414a365b754a/tests/geometry/strategies.py#L141>`_
library.
Args:
draw: see :func:`hypothesis.strategies.composite`
min_vertices: the minimum number of locations that shall form this polygon; needs to be at least ``5``
such that the generation algorithm works reliably
max_vertices: the minimum number of locations that shall form this polygon; needs to be larger than
``min_vertices``; this may not be very large as this will make example generation
extremely slow
scale: the maximum that a single coordinate value may be away from the center (in meters)
center_x: the east-west center (in meters)
center_y: the north-south center (in meters)
origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
locations
Raises:
ValueError: if polygon generation fails
"""
assert min_vertices >= 5, "min_vertices needs to be at least 5"
assert max_vertices >= min_vertices, "max_vertices needs to be at least min_vertices"
assert scale >= 0.0, "scale must be non-negative"
count = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
# very often, this only takes a single try
# it is highly unlikely that it will take more than 50
tries = 50
for _ in range(tries): # pragma: no branch
# create points in [-0.5, +0.5]
points = numpy.random.rand(count, 2) - 0.5
# scale them to the desired size
points *= scale * 2
voronoi = Voronoi(points)
multi_line_string = MultiLineString(
[voronoi.vertices[s] for s in voronoi.ridge_vertices if all(numpy.array(s) >= 0)]
)
poly = unary_union(list(polygonize(multi_line_string)))
poly = poly.intersection(box(-scale, -scale, scale, scale))
if ( # pragma: no branch
isinstance(poly, Polygon) and not poly.is_empty and poly.is_simple and poly.is_valid
):
coordinates = numpy.array(poly.exterior.coords)
# move them to the desired center
coordinates[:, 0] += center_x
coordinates[:, 1] += center_y
polygon = CartesianPolygon.from_numpy(
coordinates, origin=draw(origin), **draw(geospatial_attributes())
)
return cast(SearchStrategy[CartesianPolygon], polygon)
# This should practically never occur (the probability is very, very low)
raise ValueError("Failed to construct polygon") # pragma: no cover
@st.composite
def polar_polygons(
draw: DrawType,
min_vertices: int = 5,
max_vertices: int = 15,
scale: float = 100_000,
center: Optional[PolarLocation] = None,
) -> SearchStrategy[PolarPolygon]:
"""Returns non-empty valid polar polygons.
Args:
draw: see :func:`hypothesis.strategies.composite`
min_vertices: the minimum number of locations that shall form this polygon; needs to be at least ``5``
such that the generation algorithm works reliably
max_vertices: the minimum number of locations that shall form this polygon; needs to be larger than
``min_vertices``; this may not be very large as this will make example generation
extremely slow
scale: the maximum that a single coordinate value may be away from the center (in meters)
center: the center of the polygon or ``None`` randomly select one
Raises:
ValueError: if polygon generation fails
"""
cartesian = draw(
cartesian_polygons(
min_vertices=min_vertices,
max_vertices=max_vertices,
scale=scale,
origin=polar_locations() if center is None else st.just(center), # type: ignore
)
)
return cast(SearchStrategy[PolarPolygon], cartesian.to_polar())
@st.composite
def cartesian_routes( # pylint: disable=too-many-arguments
draw: DrawType,
min_vertices: int = 2,
max_vertices: int = 10,
scale: float = 100_000,
center_x: float = 0.0,
center_y: float = 0.0,
origin: SearchStrategy[Union[CartesianPolygon, None]] = st.one_of(st.none(), polar_locations()),
non_repeating: bool = True,
) -> SearchStrategy[CartesianRoute]:
"""Returns a cartesian route.
Args:
draw: see :func:`hypothesis.strategies.composite`
min_vertices: the minimum number of locations that shall form this route, must be ``2`` or greater
max_vertices: the maximum number of locations that shall form this route
scale: the maximum that a single coordinate value may be away from the center (in meters); strictly
positive
center_x: the east-west center (in meters)
center_y: the north-south center (in meters)
origin: an optional strategy for specifying origins, defaults to providing both ``None`` and real
locations
non_repeating: if ``True``, the route will not contain any duplicate locations
"""
assert min_vertices >= 2, "min_vertices may not be less than 2"
assert (
max_vertices is None or max_vertices >= min_vertices
), "max_vertices may not be less than min_vertices"
assert scale > 0.0, "scale must be strictly positive"
# define the actual values in the coordinate arrays
elements_x = st.floats(min_value=center_x - scale, max_value=center_x + scale)
elements_y = st.floats(min_value=center_y - scale, max_value=center_y + scale)
# define the number of coordinates; we must draw directly and not pass the strategy such that x and y have
# the same number of elements
length = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
# create the actual coordinates and ensure that all are different from each other
coordinates_x = draw(numpy_st.arrays(dtype="float64", shape=length, elements=elements_x, unique=True))
coordinates_y = draw(numpy_st.arrays(dtype="float64", shape=length, elements=elements_y, unique=True))
coordinates = numpy.vstack((coordinates_x, coordinates_y)).T
# make sure that the route has non-zero length
if numpy.abs(numpy.diff(coordinates, axis=1)).sum() < 1: # one meter in the typical interpretation
coordinates[0, 0] += 1 # add an arbitrary value # pragma: no cover
assume(not non_repeating or (numpy.abs(numpy.diff(coordinates, axis=1)) > 1).all()) # Difficult to handle
# create the route with the other parameters of geospatial objects
route = CartesianRoute.from_numpy(coordinates, origin=draw(origin), **draw(geospatial_attributes()))
return cast(SearchStrategy[CartesianRoute], route)
@st.composite
def polar_routes(
draw: DrawType,
min_vertices: int = 2,
max_vertices: int = 10,
non_repeating: bool = True,
) -> SearchStrategy[PolarRoute]:
"""Returns a polar route.
Args:
draw: see :func:`hypothesis.strategies.composite`
min_vertices: the minimum number of locations that shall form this route, must be ``2`` or greater
max_vertices: the maximum number of locations that shall form this route or ``None`` to let
*hypothesis* decide
non_repeating: if ``True``, the route will not contain any duplicate locations
"""
assert min_vertices >= 2, "min_vertices may not be less than 2"
assert (
max_vertices is None or max_vertices >= min_vertices
), "max_vertices may not be less than min_vertices"
# define the actual values in the coordinate arrays
elements_latitude = st.floats(min_value=-90, max_value=90)
elements_longitude = st.floats(min_value=-180, max_value=180, exclude_max=True)
# define the number of coordinates; we must draw directly and not pass the strategy such that x and y have
# the same number of elements
length = draw(st.integers(min_value=min_vertices, max_value=max_vertices))
# create the actual coordinates
coordinates_latitude = draw(
numpy_st.arrays(dtype="float64", shape=length, elements=elements_latitude, unique=True)
)
coordinates_longitude = draw(
numpy_st.arrays(dtype="float64", shape=length, elements=elements_longitude, unique=True)
)
coordinates = numpy.vstack((coordinates_latitude, coordinates_longitude)).T
# make sure that the route has non-zero length
# there is no single correct value for the threshold near the poles, but 1e-4 appears to work fine
if numpy.abs(numpy.diff(coordinates, axis=1)).sum() < 1e-4:
coordinates[0, 0] += 1e-4 # add an arbitrary value # pragma: no cover
assume(
not non_repeating or (numpy.abs(numpy.diff(coordinates, axis=1)) > 1e-4).all()
) # Difficult to handle
# create the route with the other parameters of geospatial objects
try:
route = PolarRoute.from_numpy(coordinates, **draw(geospatial_attributes()))
except ValueError:
# This can still happen if the duplicate entries check above does not catch it
assume(False) # pragma: no cover
# Make sure we only generate routes that can be projected
try:
route.to_cartesian(route.locations[0])
except AssertionError:
assume(False) # pragma: no cover
return cast(SearchStrategy[PolarRoute], route)
@st.composite
def polar_routes_stable(
draw: DrawType,
min_vertices: int = 5, # polar_polygons() requires at least 5
max_vertices: int = 10,
) -> SearchStrategy[PolarRoute]:
"""Returns a polar route where the vertices are not too far apart.
It is therefore numerically more stable when projecting to a cartesian plane.
Args:
draw: see :func:`hypothesis.strategies.composite`
min_vertices: the minimum number of locations that shall form this route, must be ``5`` or greater
max_vertices: the maximum number of locations that shall form this route or ``None`` to let
*hypothesis* decide
"""
assert min_vertices >= 5, "min_vertices may not be less than 5"
assert (
max_vertices is None or max_vertices >= min_vertices
), "max_vertices may not be less than min_vertices"
# We create a polygon since that is known to cause fewer numerical issues when projecting
# since we generate them in a way that guarantees similar locality of the vertices
polygon: PolarPolygon = draw(polar_polygons(min_vertices=min_vertices, max_vertices=max_vertices))
return cast(
SearchStrategy[PolarRoute],
PolarRoute.from_numpy(
polygon.to_numpy(),
location_type=polygon.location_type,
name=polygon.name,
identifier=polygon.identifier,
),
)

View File

@ -0,0 +1,11 @@
"""The plan package provides tools to plan actions that can aid a roboter in reaching its goals.
One important goal is reaching certain locations despite physically constrained
movement abilities of a sailing vessel.
Thus, a big part of this package deals with navigation strategies.
In the ``geometry`` package, general geometrical objects and transformations are
provided in cartesian (local 2D world) and spherical (latitude/longitude) coordinates.
The ``graph`` module provides navigation tools where the world is modeled as a graph.
This includes generating a graph, assigning properties to nodes of the graph and
finding good paths on the graph."""

View File

@ -0,0 +1,49 @@
"""This package provides geometric abstractions for action planning algorithms.
Warning:
This module docstring is not included in the *Sphinx* documentation.
"""
import typing # Will be removed from the namespace after being used below
from .geospatial import Direction
from .geospatial import Geospatial
from .geospatial import LocationType
from .location import CartesianLocation
from .location import PolarLocation
from .polygon import CartesianPolygon
from .polygon import PolarPolygon
from .route import CartesianRoute
from .route import PolarRoute
# provide useful aliases
#: Any of :class:`pyrate.plan.geometry.PolarLocation`, :class:`pyrate.plan.geometry.PolarRoute` and
#: :class:`pyrate.plan.geometry.PolarPolygon`.
PolarGeometry = typing.Union[PolarLocation, PolarRoute, PolarPolygon]
#: Any of :class:`pyrate.plan.geometry.CartesianLocation`, :class:`pyrate.plan.geometry.CartesianRoute` and
#: :class:`pyrate.plan.geometry.CartesianPolygon`.
CartesianGeometry = typing.Union[CartesianLocation, CartesianRoute, CartesianPolygon]
del typing
# don't expose .helpers here as it will rarely be used directly
__all__ = [
"CartesianLocation",
"CartesianPolygon",
"CartesianRoute",
"CartesianGeometry",
"Direction",
"Geospatial",
"LocationType",
"PolarLocation",
"PolarPolygon",
"PolarRoute",
"PolarGeometry",
]

View File

@ -0,0 +1,221 @@
"""This module contains common base classes for the geospatial objects like polygons, routes and points."""
# Standard library
from abc import ABC
from abc import abstractmethod
from enum import Enum
from enum import IntEnum
from math import pi
# Typing
from typing import Any
from typing import cast
from typing import Dict
from typing import Optional
from typing import Union
# Geospatial
from geojson import dumps
from geojson import Feature
#: The mean earth radius at the equator in meters (taken from
#: `Earth radius (Wikipedia) <https://en.wikipedia.org/wiki/Earth_radius#Mean_radius>`__).
MEAN_EARTH_RADIUS = 6371_008.8
#: The mean earth circumference in meters (derived form :attr:`~MEAN_EARTH_RADIUS`).
MEAN_EARTH_CIRCUMFERENCE = MEAN_EARTH_RADIUS * 2.0 * pi
#: The maximal earth circumference in meters (i.e. at the equator; taken from
#: `Earth's circumference (Wikipedia) <https://en.wikipedia.org/wiki/Earth%27s_circumference>`__).
MAXIMUM_EARTH_CIRCUMFERENCE = 40_075_017.0
class LocationType(IntEnum):
"""Represents what type a location is of.
Notes:
The values are set to fixed values such that they can be serialized.
New members may therefore only be added below, with strictly ascending numbers.
"""
#: An object of unknown type.
UNKNOWN = 0
#: An abstract thing that is used for testing purposes and may not have a correspondence in the real
#: world. Ships should usually avoid such obstacles too, as they could represent things like
#: `icebergs <https://de.wikipedia.org/wiki/RMS_Titanic>`__.
TESTING = 1
#: A generic obstruction like a buoy, oil rig or special area extracted form a nautical chart.
OBSTRUCTION = 2
#: A land mass like an island or continent.
LAND = 3
#: Water area that might be considered not sufficiently deep for navigation depending on the context.
SHALLOW_WATER = 4
#: Some object representing special weather conditions, like strong winds or just precipitation.
WEATHER = 5
#: An object representing other vessels 🚢.
#: It might have been detected via
#: `AIS <https://en.wikipedia.org/wiki/Automatic_identification_system>`__.
SHIP = 6
@classmethod
def max_value(cls) -> int:
"""Get the maximum value of all members of this enum."""
return max(cls)
class Direction(float, Enum):
"""A simple collection of named "compass" bearings 🧭 in degrees for self-documenting code."""
# pylint: disable=invalid-name
North = 0.0
East = 90.0
South = 180.0
West = 270.0
class Geospatial(ABC):
"""The common abstract base class for both polar and cartesian geospatial objects.
See :meth:`~Geospatial.to_geo_json` for hints on how this class can be used for visualizing geometries.
Args:
location_type: The type of this polygon
name: An optional name of this polygon
identifier: An optional unique identifier for this object, in :math:`[0, 2^{63})`, i.e. 64 signed bits
"""
def __init__(self, location_type: LocationType, name: Optional[str], identifier: Optional[int]) -> None:
self.location_type = location_type
self.name = name
self.identifier = identifier
super().__init__()
@property
def identifier(self) -> Optional[int]:
"""The numerical identifier of this object.
Must be `None` or in :math:`[0, 2^{63})`, i.e. 64 signed bits.
"""
return self._identifier
@identifier.setter
def identifier(self, value: Optional[int]) -> None:
assert value is None or 0 <= value < 2**63, "Identifiers must be in [0, 2**63) or None"
self._identifier = value
def to_geo_json(self, indent: Optional[Union[int, str]] = None, **kwargs) -> str:
"""Returns the GeoJSON representation of the geometry embedded into a feature.
Args:
indent: the number of levels to indent or ``None`` for compactness (see :func:`json.dumps`)
kwargs: much like indent, any keyword argument that can be passed to :func:`json.dumps`,
like ``allow_nan``, ``sort_keys``, and more
Returns:
The GeoJSON representation as a string
Examples:
See also: :ref:`geometry-plotting`.
GeoJSON is a widely used format that can be interpreted by a variety of GIS programs (geo
information systems). Among them are for example the very simple website
`geojson.io <https://geojson.io/>`__.
However, sometimes the geometries are too large to be handled by the web browser.
Then there are other programs available, like the free open-source tool
`QGIS (Desktop) <https://www.qgis.org/de/site/>`__. Its even available in the usual Ubuntu
repositories, so just run ``[sudo] apt install qgis``. later, you can simply copy-pasta it into
the tool.
The geojson representation can be obtained like this (using a
:class:`~pyrate.plan.geometry.location.PolarLocation` just as an example):
>>> from pyrate.plan.geometry import PolarLocation
>>> team_room = PolarLocation(latitude=49.878091, longitude=8.654052)
>>> print(team_room.to_geo_json(indent=4))
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
8.654052,
49.878091
]
},
"properties": {}
}
See also:
- `GeoJSON on Wikipedia <https://en.wikipedia.org/wiki/GeoJSON>`__
- `geojson.io <https://geojson.io/>`__
- `QGIS (Desktop) <https://www.qgis.org/de/site/>`__
"""
# this relies on the inheriting instance to provide __geo_interface__ property/attribute
return cast(str, dumps(Feature(geometry=self), indent=indent, **kwargs))
@property
@abstractmethod
def __geo_interface__(self) -> Dict[str, Any]:
raise NotImplementedError()
@abstractmethod
def __eq__(self, other: Any) -> bool:
return (
isinstance(other, Geospatial)
and self.location_type == other.location_type
and self.name == other.name
and self.identifier == other.identifier
)
@property
def _repr_extras(self) -> str:
"""Create a string representation of the three extra attributes for use in :meth:`~__repr__`.
Examples:
The output is suited to be directly inlucded before the final closing bracet of a typical
implementation of ``__repr__()``:
>>> from pyrate.plan.geometry import PolarLocation
>>> PolarLocation(0, 0)._repr_extras
''
>>> PolarLocation(0, 0, location_type=LocationType.UNKNOWN, name=None)._repr_extras
''
>>> PolarLocation(0, 0, name="")._repr_extras
', name=""'
>>> PolarLocation(0, 0, location_type=LocationType.SHIP, identifier=12)._repr_extras
', location_type=LocationType.SHIP, identifier=12'
The class :class:`pyrate.plan.geometry.location.PolarLocation` was only chosen as an example.
Returns:
The extra arguments in the syntax of keyword arguments, as is common for :meth:`~__repr__`.
"""
result = ""
if self.location_type != LocationType.UNKNOWN:
result += f", location_type=LocationType.{self.location_type.name}"
if self.name is not None:
result += f', name="{self.name}"'
if self.identifier is not None:
result += f", identifier={self.identifier}"
return result
@abstractmethod
def __repr__(self) -> str:
raise NotImplementedError()

View File

@ -0,0 +1,729 @@
"""Contains helpers for dealing with distances and normalization of spherical coordinates and compass
directions. Also allows for translating (collections of) points in polar coordinates.
Maybe we should use `geopandas <https://geopandas.org/reference.html#geopandas.GeoSeries.distance>`__.
References:
- Introduction on `Wikipedia <https://en.wikipedia.org/wiki/Great-circle_distance>`__
- Simple discussion on `StackOverflow <https://stackoverflow.com/q/38248046/3753684>`__
- Charles F. F. Karney (2013): Algorithms for geodesics.
`Paper as PDF <https://link.springer.com/content/pdf/10.1007%2Fs00190-012-0578-z.pdf>`__.
- `Walter Bislin's Blog <https://walter.bislins.ch/bloge/index.asp?page=Distances+on+Globe+and+Flat+Earth>`__
"""
# Python standard library
from math import atan2
from math import pi
from math import tau
# Typing
from typing import cast
from typing import Tuple
from typing import TypeVar
from typing import Union
from warnings import warn
# Scientific
import numpy
from numpy import absolute
from numpy import arccos
from numpy import arcsin
from numpy import arctan2
from numpy import array
from numpy import choose
from numpy import clip
from numpy import cos
from numpy import full
from numpy import hypot
from numpy import isfinite
from numpy import isscalar
from numpy import ndarray
from numpy import sin
from numpy import sqrt
from numpy import square
# Geospatial
from pyproj import Geod
# Own constants
from .geospatial import MEAN_EARTH_CIRCUMFERENCE
from .geospatial import MEAN_EARTH_RADIUS
# Constants -------------------------------------------------------------------
#: A scalar or a numpy array
ScalarOrArray = TypeVar("ScalarOrArray", float, ndarray)
#: The pyproj WGS84 object used as the basis for all polar representations and coordinate projections
WGS84_PYPROJ_GEOD = Geod("+ellps=WGS84 +units=m")
# Normalize -------------------------------------------------------------------
def _normalize_circular_range(value: ScalarOrArray, minimum: float, maximum: float) -> ScalarOrArray:
"""Normalizes the value to reside in :math:`[minimum, maximum[` by wrapping around.
Used by the other normalization functions in this package.
Args:
value: the value to be normalized
minimum: the minimum of the desired bounds
maximum: the maximum of the desired bounds, assumed to be truly larger than *minimum*
Returns:
The normalized value
"""
# general approach: remove offset -> normalize with span -> add offset
span = maximum - minimum
# the second `% span` is required due to floating point issues: `-1e-15 % 360` -> `360.0`,
# but not less than `360.0` as required
return ((value - minimum) % span) % span + minimum
def normalize_latitude(value: ScalarOrArray) -> ScalarOrArray:
"""Normalizes a latitudal value to the usual bounds by wrapping around.
Note:
This is already done automatically by
:attr:`pyrate.plan.geometry.location.PolarLocation.latitude`.
Examples:
>>> normalize_latitude(20.0)
20.0
>>> normalize_latitude(-90.0)
-90.0
>>> normalize_latitude(90.0)
90.0
It is also possible to wrap over the pole coordinates.
>>> normalize_latitude(91.0)
89.0
>>> normalize_latitude(185.0)
-5.0
Take care: this will also normalize rubbish values.
>>> normalize_latitude(3229764.25)
-24.25
Args:
value: the raw latitudal value in degrees
Returns:
the normalized value in :math:`[-90, +90]` degrees
"""
# touch_point_*: the latitudes would meet at this point if values outside [-90, +90] would be allowed
# pole_*: the actual bounds of the latitude values; they describe the south and north poles
touch_point_min, touch_point_max = -180.0, +180.0
pole_down, pole_up = -90.0, +90.0
# map into [-180.0, +180.0] by modulo exactly as with the longitude
value = _normalize_circular_range(value, touch_point_min, touch_point_max)
# map into [-90.0, +90.0] by mirroring, since `100°` would be `180° - 100° = 80°` and not
# `100° mod 90° = 10°` (as an example)
try:
if value > pole_up:
return touch_point_max - value
if value < pole_down:
return touch_point_min - value
return value
except ValueError:
clipped_below = choose(value < pole_down, (value, touch_point_min - value))
clipped_above = choose(value > pole_up, (clipped_below, touch_point_max - value))
return cast(ScalarOrArray, clipped_above)
def normalize_longitude(value: ScalarOrArray) -> ScalarOrArray:
"""Normalizes a longitudal value to the usual bounds by wrapping.
Note:
This is already done automatically by
:attr:`pyrate.plan.geometry.location.PolarLocation.longitude`.
Examples:
>>> normalize_longitude(136.0)
136.0
>>> normalize_longitude(-86.0)
-86.0
>>> normalize_longitude(-180.0)
-180.0
You can also get rid of redundant values, e.g. at 180.0°,
as well as wrap around the boundaries.
>>> normalize_longitude(+180.0)
-180.0
>>> normalize_longitude(185.0)
-175.0
Take care: this will also normalize rubbish values.
>>> normalize_longitude(3229764.25)
-155.75
Args:
value: the raw longitudal value in degrees
Returns:
the normalized value in :math:`[-180, +180[` degrees
"""
return _normalize_circular_range(value, -180.0, +180.0)
def normalize_direction(value: ScalarOrArray) -> ScalarOrArray:
"""Normalizes a direction (azimuth/yaw) value to the usual 360° compass values.
Examples:
>>> normalize_direction(45.0)
45.0
>>> normalize_direction(250.0)
250.0
>>> normalize_direction(-6.0)
354.0
>>> normalize_direction(360.0)
0.0
>>> normalize_direction(450.0)
90.0
Take care: this will also normalize rubbish values.
>>> normalize_longitude(3229764.25)
-155.75
Args:
value: the raw value in degrees
Returns:
the normalized value in :math:`[0, 360[` degrees
"""
return _normalize_circular_range(value, 0.0, 360.0)
# Difference ------------------------------------------------------------------
def _difference_circular_range(
value_a: ScalarOrArray, value_b: ScalarOrArray, minimum: float, maximum: float
) -> ScalarOrArray:
"""Calculates differences on a circular number line, where minimum and maximum meet.
The values do not need to be normalized.
If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
warning is printed and ``NaN`` is returned. Both other values are assumed to be finite.
Args:
value_a: the first value
value_b: the second value
minimum: the minimum of the desired bounds
maximum: the maximum of the desired bounds, assumed to be strictly larger than ``minimum``
Returns:
the normalized value in :math:`[0, (maximum - minimum)/2]`
"""
raw_difference = value_a - value_b
if not isfinite(raw_difference).all():
warn(
"_difference_circular_range(): "
f"difference between {value_a} and {value_b} was not a valid number: {raw_difference}",
UserWarning,
)
span = maximum - minimum
difference: ScalarOrArray = raw_difference % span
# take the smaller one of the two possible distances, i.e. the smaller path around the circular range
try:
# Try the cae where we have floats, not arrays
if difference > span / 2.0:
return span - difference
return difference
except ValueError:
return cast(ScalarOrArray, choose(difference > span / 2.0, (difference, span - difference)))
def difference_latitude(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
"""Calculates the difference between two latitudal values.
The values do not need to be normalized.
If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
warning is printed and ``NaN`` is returned.
Examples:
>>> difference_latitude(-45.0, +50.0)
95.0
>>> difference_latitude(-90.0, -90.0)
0.0
>>> difference_latitude(-90.0, +90.0) # the maximum distance
180.0
>>> difference_latitude(-90.0, +190.0)
80.0
Take care: this will also calculate distances for rubbish values.
>>> difference_latitude(95324.0, 3224.25)
60.25
Args:
value_a: the first latitude in degrees
value_b: the second latitude in degrees
Returns:
The difference between the two values in degrees in :math:`[0, 180]`
"""
# normalization is required because the distance between +80° and +100° shall be 0° and not 20°
value_a = normalize_latitude(value_a)
value_b = normalize_latitude(value_b)
# mathematically, there is no need to calculate in modulo `span` like in #difference_circular_range, since
# both values are already guaranteed to be in [-90.0, +90.0] and their absolute difference already gets
# what we need
difference: ScalarOrArray = numpy.abs(value_a - value_b)
if not isfinite(difference).all():
warn(
"difference_latitude(): "
f"difference between {value_a} and {value_b} was not a valid number: {difference}",
UserWarning,
)
return difference
def difference_longitude(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
"""Calculates the difference between two longitudal values.
The values do not need to be normalized.
If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
warning is printed and ``NaN`` is returned.
Examples:
>>> difference_longitude(-145.0, +150.0)
65.0
>>> difference_longitude(-90.0, -90.0)
0.0
>>> difference_longitude(-90.0, +90.0) # the maximum distance
180.0
>>> difference_longitude(-180.0, +190.0)
10.0
Take care: this will also calculate distances for rubbish values.
>>> difference_longitude(95324.0, 3224.25)
60.25
Args:
value_a: the first longitude in degrees
value_b: the second longitude in degrees
Returns:
The difference between the two values in degrees in :math:`[0, 180]`
"""
return _difference_circular_range(value_a, value_b, -180.0, +180.0)
def difference_direction(value_a: ScalarOrArray, value_b: ScalarOrArray) -> ScalarOrArray:
"""Calculates the difference between two directional (azimuthal/yaw) values.
The values do not need to be normalized.
If the difference between ``value_a`` and ``value_b`` is not finite (i.e. ``NaN``, ``+Inf`` or ``-Inf``) a
warning is printed and ``NaN`` is returned.
Examples:
>>> difference_direction(145.0, 165.0)
20.0
>>> difference_direction(42.0, 42.0)
0.0
>>> difference_direction(350.0, 334.5)
15.5
>>> difference_direction(270.0, 90.0) # the maximum distance
180.0
>>> difference_direction(365.0, 1.0)
4.0
>>> difference_direction(370.0, -20.0)
30.0
Take care: this will also calculate distances for rubbish values.
>>> difference_direction(95324.0, 3224.25)
60.25
Args:
value_a: the first direction in degrees
value_b: the second direction in degrees
Returns:
The difference between the two values in degrees in :math:`[0, 180]`
"""
return _difference_circular_range(value_a, value_b, 0.0, +360.0)
# Translation -----------------------------------------------------------------
def translate_floats(
longitude: float, latitude: float, direction: float, distance: float
) -> Tuple[Tuple[float, float], float]:
"""Simply a convenience method for calling :func:`~.translate_numpy` with a single point.
Args:
longitude: the original longitude in degrees
latitude: the original latitude in degrees
direction: the direction to translate into in degrees
distance: the distance to translate by in meters
Returns:
a pair ``(longitude, latitude)`` with the new coordinates and the back azimuth
"""
# just use the numpy variant as it would be converted to an array in pyproj internally anyhow
coordinates_array = array([[longitude, latitude]])
result, back = translate_numpy(coordinates_array, direction, distance)
new_coordinates = (result[0, 0], result[0, 1])
return new_coordinates, back[0]
def translate_numpy(
coordinates: ndarray,
direction: Union[float, ndarray],
distance: Union[float, ndarray],
) -> Tuple[ndarray, ndarray]:
"""Translates the given point(s) by a given distance and direction/azimuth.
Everything is assumed to be in degrees.
Furthermore, this method returns the back azimuth as documented below.
Under the hood uses :meth:`pyproj.Geod.fwd`, which computes the *forward transformation* or
*forward azimuth*. This walks the given distance on the great circle arc given by the direction/
azimuth. It uses the direction to define the initial azimuth, as the real azimuth will probably change
along the great circle path (unless going exactly north/south or east/west).
See also `this website <https://www.movable-type.co.uk/scripts/latlong.html>`__, sections "Bearing"
and "Midpoint".
Note:
See see the underlying geographiclib library, <geodesic.h>, *geod_direct()* for details on the
behaviour poles and other special cases. It's rather strange. Also keep in mind that this
method suffers from numerical issues like pretty much anything involving floating point
computations.
Note:
This is already provided in an object-oriented fashion by
- :meth:`pyrate.plan.geometry.location.PolarLocation.translate`
- :meth:`pyrate.plan.geometry.polygon.PolarPolygon.translate`
- :meth:`pyrate.plan.geometry.route.PolarRoute.translate`
Args:
coordinates: the coordinates as a numpy array with dimensions ``(number of points, 2)``,
where the first component describes the longitude and the second one the latitude
direction: The direction/azimuth to head to in degrees in :math:`[0, 360]` (0° is north, 90° is east).
If it is a scalar, a single value is assumed for all points.
If it is an array, it must be of shape ``(number of points, )``.
distance: The distance to transpose by in meters; should not be very close to zero if the
backwards azimuth shall be used due to numerical stability.
If it is a scalar, a single value is assumed for all points.
If it is an array, it must be of shape ``(number of points, )``.
Returns:
(1) The new coordinates in the same format as the inout
(2) The backwards azimuth in :math:`[0, 360)`, i.e. the direction which could be used to travel
from the modified location back to the original one by translating given that ``direction`` and
the same ``distance``.
"""
# Convert from [0, 360[ to [-180, +180]
if isscalar(direction):
direction = cast(float, direction) # The cast is needed until isscalar() narrows the type correctly
if direction > 180:
direction -= 360
azimuth = full((coordinates.shape[0],), direction)
else:
# The cast is needed until isscalar() narrows the type correctly
azimuth = cast(ndarray, direction).copy()
azimuth[azimuth > 180] -= 360
# Make sure that dist is an array
dist = full((coordinates.shape[0],), distance) if isscalar(distance) else distance
# If any input to fwd() is an array/sequence, then all must be
coordinates[:, 0], coordinates[:, 1], back_azimuth = WGS84_PYPROJ_GEOD.fwd(
lons=coordinates[:, 0],
lats=coordinates[:, 1],
az=azimuth,
dist=dist,
radians=False,
)
# back azimuth is in [-180, +180], so we need to convert to [0, 360[
# see the underlying *geographiclib* library, <geodesic.h>, `geod_direct()`:
# https://geographiclib.sourceforge.io/1.49/C/geodesic_8h.html#a676f59f07987ddd3dd4109fcfeccdb9d
back_azimuth[back_azimuth < 0] += 360
back_azimuth[back_azimuth == 360.0] = 0.0
return coordinates, back_azimuth
# Distance --------------------------------------------------------------------
def fast_distance_geo(
latitudes: ScalarOrArray, longitudes: ScalarOrArray, center_latitude: float, center_longitude: float
) -> ScalarOrArray:
"""Approximates the great circle distance of all points to the center.
Warnings:
All coordinates are assumed to be within about 250 km of the center to provide reasonable accuracy.
Then, it was determined experimentally that the error compared to the great-circle distance was always
below 5%.
This was done by setting ``@hypothesis.settings(max_examples=50000)`` on the test case
``TestDistanceCalculation.test_fast_distance_geo`` and observing that it did not fail.
Depending on the latitude **of the center**, the *equirectangular approximation*
or the *polar coordinate flat-earth formula* are used. Both assume a spherical world and then flatten it
onto a plane.
Args:
latitudes: the latitude values, in radians in range :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
longitudes: the longitude values, in radians in range :math:`[-π, +π]`
center_latitude: the latitude of the center, in radians in range
:math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
center_longitude: the longitude of the center, in radians in range :math:`[-π, +π]`
See Also:
:func:`~haversine_numpy`: about three times slower but more precise
References:
- Based on
`Movable Type Scripts: Calculate distance, bearing and more between Latitude/Longitude points
<https://www.movable-type.co.uk/scripts/latlong.html>`__
(as of Dec. 2020), Section "Equirectangular approximation".
In that source: ``phi = latitude``, ``lambda = longitude``, ``theta = co-latitude`` and
``R = (mean) earth radius``.
"""
delta_lambda = _difference_circular_range(longitudes, center_longitude, -pi, +pi) # type: ignore
# The border value of about 75.0° latitude was determined by eye-balling from some Tissot's indicatrixes
if abs(center_latitude) > 1.3962634015954636:
# move all locations to the northern hemisphere first if required
if center_latitude < 0:
center_latitude = -center_latitude
latitudes = -latitudes
del longitudes, center_longitude # they are now wrong
# use the "polar coordinate flat-earth formula"
theta_1 = (pi / 2) - latitudes
theta_2 = (pi / 2) - center_latitude
summed = square(theta_1) + square(theta_2) - 2 * theta_1 * theta_2 * cos(delta_lambda) # type: ignore
summed = clip(summed, 0.0, None) # for numerical stability as above sum may be slightly negative
return cast(ScalarOrArray, sqrt(summed) * MEAN_EARTH_RADIUS)
# use the "equirectangular approximation"
d_lat = _difference_circular_range(latitudes, center_latitude, -pi / 2, +pi / 2) # type: ignore
d_lon = delta_lambda * cos(center_latitude)
dist_degrees = hypot(d_lat, d_lon) # type: ignore
return cast(ScalarOrArray, dist_degrees * MEAN_EARTH_RADIUS)
def haversine_numpy(
latitudes: ScalarOrArray, longitudes: ScalarOrArray, center_latitude: float, center_longitude: float
) -> ScalarOrArray:
"""Calculate the great circle distance between each point to the center in meters.
Note:
"The min() function protects against possible roundoff errors that could
sabotage computation of the arcsine if the two points are very nearly
antipodal (that is, on opposite sides of the Earth). Under these conditions,
the Haversine Formula is ill-conditioned (see the discussion below), but
the error, perhaps as large as 2 km [...], is in the context of a
distance near 20,000 km [...]."
(Source: `Movable Type Scripts: GIS FAQ Q5.1: Great circle distance between 2 points
<https://www.movable-type.co.uk/scripts/gis-faq-5.1.html>`__)
Args:
latitudes: the latitude values, in radians in range :math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
longitudes: the longitude values, in radians in range :math:`[-π, +π]`
center_latitude: the latitude of the center, in radians in range
:math:`[-\\frac{π}{2}, +\\frac{π}{2}]`
center_longitude: the longitude of the center, in radians in range :math:`[-π, +π]`
See Also:
:func:`~fast_distance_geo`: an approximation that is about three times faster
Returns:
The great circle distance between each point to the center in meters.
References:
- `Wikipedia: Haversine formula <https://en.wikipedia.org/wiki/Haversine_formula>`__
"""
d_lat = latitudes - center_latitude
d_lon = longitudes - center_longitude
summed = sin(d_lat / 2) ** 2 + cos(latitudes) * cos(center_latitude) * sin(d_lon / 2) ** 2
# the intermediate result c is the great circle distance in radians
d_rad = 2 * arcsin(numpy.minimum(sqrt(summed), 1.0))
# the great circle distance will be in the same units as MEAN_EARTH_RADIUS
return cast(ScalarOrArray, d_rad * MEAN_EARTH_RADIUS)
# Conversion between meters and radians ---------------------------------------
def meters2rad(meters: ScalarOrArray) -> ScalarOrArray:
"""Meters to radians (latitude or longitude) at the equator."""
return (meters / MEAN_EARTH_CIRCUMFERENCE) * (2.0 * pi)
def rad2meters(rad: ScalarOrArray) -> ScalarOrArray:
"""Radians (latitude or longitude) at the equator to meters."""
return (rad / (2.0 * pi)) * MEAN_EARTH_CIRCUMFERENCE
# Cartesian to Spherical ------------------------------------------------------
def cartesian_to_spherical(xyz: ndarray) -> Tuple[ndarray, ndarray]:
"""Converts cartesian coordinates on a unit sphere to spherical coordinates.
Args:
xyz: The cartesian coordinates, expected as an array where each line contains three coordinates for
a point.
Returns:
The coordinates as latitude and longitude in radians,
such that :math:`-\\frac{π}{2} φ +\\frac{π}{2}` is the latitude and :math:`-π θ < +π` is the
longitude.
Raises:
:class:`AssertionError`: if not all pints lie on the unit sphere, as then the altitude would be
relevant, but it is not considered by this conversion
References:
- `Movable Type Scripts: Vector-based geodesy
<https://www.movable-type.co.uk/scripts/latlong-vectors.html>`__
- `The relevant Wikipedia article
<https://en.wikipedia.org/wiki/Spherical_coordinate_system#Cartesian_coordinates>`__.
Note: In these formulas, mathematicians' coordinates are used, where :math:`0 ≤ φ ≤ π` is the
latitude coming down from the pole and :math:`0 θ 2π` is the longitude,
with the prime meridian being at :math:`π`.
We convert these to the usual coordinate conventions of the geographic community within this method.
- The `nvector library <https://github.com/pbrod/nvector/>`__ provides a possible alternative
implementation (see section "Example 3: 'ECEF-vector to geodetic latitude'").
"""
# elevation / r:
elevation = sqrt(xyz[:, 0] ** 2 + xyz[:, 1] ** 2 + xyz[:, 2] ** 2)
assert not numpy.any(absolute(elevation - 1.0) > 1e-9), "not all points lie on the unit sphere"
# also normalize because the floating point representation of the cartesian coordinates might have
# slightly messed with it; this value moves the borders of the clipping slightly inwards
# in other words: it makes the clipped values lie *strict* within the bounds, and never
# with equality
move_in = 1e-14 # empirically worked well
# latitude / theta:
# we know that the elevation is very close to 1, so we do not need to divide by it
latitudes = arccos(xyz[:, 2])
latitudes = clip(latitudes, move_in, pi - move_in) # clip at the poles
latitudes -= pi / 2 # convert from mathematical to geographic convention
# longitude / phi
longitudes = arctan2(xyz[:, 1], xyz[:, 0])
# we also clip here although wrapping using modulo 2*pi would be more appropriate
# however, this had introduced new numerical new problems which are avoided by clipping
# This also guarantees that each longitude is strictly less than 180°
longitudes = clip(longitudes, -pi + move_in, +pi - move_in)
return latitudes, longitudes
# Mean computation on angles and coordinates ----------------------------------
def mean_coordinate(latitudes: ndarray, longitudes: ndarray) -> Tuple[float, float]:
"""Computes a reasonable mean coordinate if possible.
Args:
latitudes: The array of latitude values to compute the mean of, in degrees. Will be flattened.
longitudes: The array of longitude values to compute the mean of, in degrees. Will be flattened.
Must be of the same length as ``latitudes``.
Returns:
The mean coordinate of the given ones, in degrees as ``(latitude, longitude)``.
Raises:
ValueError: If no meaningful mean (of the longitudes) can be computed. See :func:`~mean_angle`.
See Also:
- :func:`~mean_angle`
"""
assert len(latitudes) == len(longitudes), "Both coordinate arrays must have the same length"
# In case of the latitude values, the "ambiguous" case of antipodal angles/points can be solved by
# observing that only latitude values between -90° and +90° are allowed. Therefore, +/- 0° is a reasonable
# result in this case.
try:
latitude = mean_angle(numpy.radians(latitudes))
except ValueError:
latitude = 0.0
# In the case of longitudes, simply let the ValueError raise as there is nothing we can do here
longitude = mean_angle(numpy.radians(longitudes))
return numpy.degrees(latitude), numpy.degrees(longitude)
def mean_angle(radians: ndarray, tolerance: float = 1e-6) -> float:
"""Computes a reasonable mean value if possible.
Args:
radians: The array of angles to compute the mean of, in radians. Will be flattened.
tolerance: If both components of the cartesian intermediate representation are less than this value,
a ``ValueError`` with a descriptive error message will be raised.
Returns:
The mean angle of the given ones
References:
- `Mean of circular quantities (section Mean of angles) on Wikipedia
<https://en.wikipedia.org/wiki/Mean_of_circular_quantities#Mean_of_angles>`
Raises:
ValueError: If no meaningful mean can be computed. This is the case when two antipodal angles are
given or the sum of multiple ones is "antipodal".
See Also:
- :func:`~mean_coordinate`
"""
x: float = sin(radians).sum()
y: float = cos(radians).sum()
if abs(x) < tolerance and abs(y) < tolerance:
raise ValueError(
"The mean angle of nearly antipodal is ambiguous. "
"If this arises while computing mean points on polygons and routes, "
"the geometry likely is just so large that many approximations will not work anymore. "
"Consider splitting them up into smaller ones."
)
return atan2(x, y) % tau

View File

@ -0,0 +1,491 @@
"""This module implements abstractions for timestamped geospatial locations in WGS84 and local coordinates.
Two locations are ``==`` if and only if they are equal according to ``equals_exact()``.
"""
# Standard library
from copy import deepcopy
from math import cos
from math import radians
from math import sin
# Typing
from typing import Any
from typing import cast
from typing import Dict
from typing import Optional
from typing import Tuple
# Mathematics
from geopy.distance import GeodesicDistance
from geopy.distance import GreatCircleDistance
from numpy import array
from numpy import ndarray
from pyproj import Proj
from shapely.affinity import translate
from shapely.geometry import Point
# Basis
from .geospatial import Geospatial
from .geospatial import LocationType
# Helpers
from .helpers import normalize_latitude
from .helpers import normalize_longitude
from .helpers import translate_floats
class PolarLocation(Geospatial):
"""A geospatial location representing a spatial object on earth.
See `here <http://www.movable-type.co.uk/scripts/latlong.html>`__ for a nice collection of formulas and
explanations on geographic transformations and calculations. This is the *Rome* for geographic calculation
questions on *Stack Overflow*: All roads seem to eventually lead here.
Examples:
First import some packages
>>> from math import isclose
>>> from pyrate.plan.geometry import PolarLocation, Direction
Then create two example coordinates to work with:
>>> team_room = PolarLocation(latitude=49.878091, longitude=8.654052)
>>> frankfurt = PolarLocation(latitude=50.113709, longitude=8.656561)
Translate the team room 27 km north, which is towards *Frankfurt*:
>>> team_room, direction_going_back = team_room.translate(direction=Direction.North, distance=27_000)
>>> assert isclose(direction_going_back, Direction.South)
The variable ``team_room`` now represents a location in/near *Frankfurt*,
only a couple hundred meters away from the location ``frankfurt``:
>>> print(team_room.distance(frankfurt)) # doctest: +ELLIPSIS
812.512...
Coordinats can also be projected onto a local tanged plane and back.
The ``origin`` defines the point where the plane touches the sphere.
>>> frankfurt == frankfurt.to_cartesian(origin=frankfurt).to_polar()
True
Args:
latitude: The latitude in degrees (will be normalized)
longitude: The longitude in degrees (will be normalized)
location_type: The type of this polygon
name: An optional name of this polygon
identifier: An optional unique identifier for this object, in :math:`[0, 2**63)`, i.e. 64 signed bits
"""
def __init__( # pylint: disable=too-many-arguments
self,
latitude: float,
longitude: float,
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
) -> None:
# Type hints
self._latitude: float
self._longitude: float
self._projection: Optional[Proj]
# Attributes setup
self.latitude = latitude
self.longitude = longitude
# self._projection = None # already set by the property accesses before
super().__init__(location_type=location_type, name=name, identifier=identifier)
@property
def latitude(self) -> float:
"""The latitude of this location in degrees in :math:`[-90, +90]`.
The value is always disambiguated/normalized.
"""
return self._latitude
@latitude.setter
def latitude(self, latitude: float) -> None:
self._projection = None
self._latitude = normalize_latitude(latitude)
@property
def longitude(self) -> float:
"""The longitude of this location degrees in in :math:`[-180, +180)`.
The value is always disambiguated/normalized.
"""
return self._longitude
@longitude.setter
def longitude(self, longitude: float) -> None:
self._projection = None
self._longitude = normalize_longitude(longitude)
@property
def projection(self) -> Proj:
"""Derive a :class:`pyproj.Proj` instance for projecting points.
This instance is cached for performance reasons, since its creation is relatively time consuming. The
cache is appropriately invalidated when setting a new :attr:`~latitude` or :attr:`~longitude`.
"""
if self._projection is None:
self._projection = Proj(
proj="tmerc",
ellps="WGS84",
units="m",
lon_0=self.longitude,
lat_0=self.latitude,
)
return self._projection
def to_cartesian(self, origin: "PolarLocation") -> "CartesianLocation":
"""Projects this point to a cartesian representation according to the given global reference.
Args:
origin: The reference by which to project onto the local tangent plane
Returns:
The cartesian representation of this point with the given reference point being set
"""
# convert to cartesian
east, north = origin.projection(self.longitude, self.latitude)
return CartesianLocation(
east,
north,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=origin,
)
def translate(self, direction: float, distance: float) -> Tuple["PolarLocation", float]:
"""Translates this location and returns the new location and back-azimuth.
See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
"""
back_azimuth: float # this is required for mypy, don't know why that is
(longitude, latitude), back_azimuth = translate_floats(
self.longitude, self.latitude, direction, distance
)
new_location = PolarLocation(
longitude=longitude,
latitude=latitude,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
return new_location, back_azimuth
def distance(self, other: "PolarLocation", approximate: bool = False) -> float:
"""Calculate the horizontal geodesic distance to another location in meters, assumes degrees.
This assumes an ellipsoidal earth and converges for any pair of points on earth.
It is accurate to round-off and uses *geographiclib* (https://pypi.org/project/geographiclib/)
via *geopy* (https://pypi.org/project/geopy/).
The faster *great-circle distance* can also be used by setting *approximate=True*.
It assumes only a spherical earth and is guaranteed to give a result for any pair of points.
It is wrong by up to 0.5% and based on *geopy*. It is advised to use the exact solution unless you
know what you are doing.
See also:
- https://en.wikipedia.org/wiki/Geodesics_on_an_ellipsoid
- https://en.wikipedia.org/wiki/Great-circle_distance
- https://en.wikipedia.org/wiki/Geographical_distance
Args:
other: The location to measure the distance to in degrees
approximate: Whether to use a faster approximation or not (default: ``False``)
Returns:
The distance to the other point in meters
"""
# input as latitude, longitude
this = (self.latitude, self.longitude)
that = (other.latitude, other.longitude)
if approximate:
distance = GreatCircleDistance(this, that).meters
else:
distance = GeodesicDistance(this, that).meters
# Geopy is not typed as of now
return cast(float, distance)
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {"type": "Point", "coordinates": (self.longitude, self.latitude)}
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
def equals(self, other: Any) -> bool:
"""Determines whether the given ``other`` object exactly equals this one.
This function mimics :meth:`shapely.geometry.base.BaseGeometry.equals`:
"Refers to point-set equality (or topological equality), and is equivalent to
``self.within(other) and self.contains(other)``."
Args:
other: The object to compare to
Returns:
Whether this and the other object are exactly equal
"""
# The above docstring is also copied by PolarPolygon and PolarRoute
return self.equals_exact(other, 0.0)
def equals_exact(self, other: Any, tolerance: float) -> bool:
"""Determines whether the given ``other`` object equals this one.
This function mimics :meth:`shapely.geometry.base.BaseGeometry.equals_exact`:
"Refers to coordinate equality, which requires coordinates to be equal
and in the same order for all components of a geometry."
Args:
other: The object to compare to
tolerance: The absolute deviation in meters that is tolerated on the latitude and longitude values
Returns:
Whether this and the ``other`` object are (nearly) equal
"""
# The above docstring is also copied by PolarPolygon and PolarRoute
return (
isinstance(other, PolarLocation)
and Geospatial.__eq__(self, other)
and self.distance(other) <= tolerance
)
def __repr__(self) -> str:
# we leave out self._projection due to performance reasons and because it is redundant
return f"PolarLocation(latitude={self.latitude}, longitude={self.longitude}{self._repr_extras})"
class CartesianLocation(Geospatial, Point):
"""A point in the cartesian plane based on local coordinates with an optional global reference.
Examples:
You can simply create a cartesion location like this, where coordinates are in meters:
>>> location_a = CartesianLocation(east=10, north=-20)
>>> location_b = CartesianLocation(east=-30, north=0)
>>> distance = location_a.distance(location_b)
>>> distance # doctest: +ELLIPSIS
44.721...
Keep in mind that locations (like all other cartesian geomerties) are iummutable due to the underlying
Shapely library:
>>> location_a.x = 5.0
Traceback (most recent call last):
...
AttributeError: can't set attribute
The attributes ``east`` and ``north`` are provided as aliases for ``x`` and ``y``:
>>> assert location_a.x == location_a.east
>>> assert location_a.y == location_a.north
You can also project them to a polar coordinate.
To do this, one must only provide a reference point ``origin`` either when constructing the loction
or when calling :meth:`~to_polar`:
>>> reference = PolarLocation(latitude=50, longitude=30)
>>> location_a.origin = reference
>>> location_b.origin = reference
>>> location_a.to_polar().distance(location_b.to_polar()) # doctest: +ELLIPSIS
44.721...
As any :class:`~CartesianLocation` also inherits from :class:`shapely.geometry.Point`,
we can also use :mod:`shapely` methods
(see `the Shapely docs <https://shapely.readthedocs.io/en/stable/manual.html>`__).
For example, we can inflate the point using ``buffer()``.
Mind though, that this will return a :mod:`shapely` geometry and not a :mod:`pyrate.plan.geometry`
object.
>>> buffered = location_a.buffer(10)
>>> buffered.geometryType()
'Polygon'
Thus, we need to convert it back to a pyrate object like so (keep in mind that we now need a polygon):
>>> from pyrate.plan.geometry.polygon import CartesianPolygon
>>> buffered_pyrate = CartesianPolygon.from_shapely(buffered)
>>> buffered.equals(buffered_pyrate)
True
Args:
east: The easting of the location in meters
north: The northing of the location in meters
origin: A reference that can be used to project this cartesian representation (back)
into a polar one
location_type: The type of this polygon
name: An optional name of this polygon
identifier: An optional unique identifier for this object, in :math:`[0, 2**63)`, i.e. 64 signed bits
"""
def __init__( # pylint: disable=too-many-arguments
self,
east: float,
north: float,
origin: Optional["PolarLocation"] = None,
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
) -> None:
# Set attribute
self.origin = origin
# Typing hints (actually defined by shapely)
self.x: float
self.y: float
# Initialize the super classes
Point.__init__(self, east, north)
Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
#: Named access to the internal shapely point ``x``. Readonly.
east: float = Point.x
#: Named access to the internal shapely point ``y``. Readonly.
north: float = Point.y
def to_polar(self, origin: Optional["PolarLocation"] = None) -> PolarLocation:
"""Computes the polar representation of this point.
Args:
origin: The global reference to be used for back-projection, must be set if and only if
:attr:`~pyrate.plan.geometry.CartesianLocation.origin` is ``None``
Returns:
The global, polar representation of this point
"""
if origin is None:
if self.origin is None:
raise ValueError("need to give an explicit origin when the instance does not have one")
origin = self.origin
elif self.origin is not None:
raise ValueError("provided an explicit origin while the instance already has one")
# convert to cartesian
longitude, latitude = origin.projection(self.east, self.north, inverse=True)
return PolarLocation(
longitude=longitude,
latitude=latitude,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
@classmethod
def from_shapely(cls, point: Point, *args, **kwargs) -> "CartesianLocation":
"""Create a cartesian location from a shapely point.
Args:
point: A shapely point
*args: Positional arguments to be passed to :class:`~CartesianLocation`
**kwargs: Keyword arguments to be passed to :class:`~CartesianLocation`
Returns:
The cartesian location created from the given geometry and other parameters
"""
return cls(point.x, point.y, *args, **kwargs)
def translate(self, direction: float, distance: float) -> Tuple["CartesianLocation", ndarray]:
"""Translates this location.
Args:
direction: The direction angle in degrees (``0`` is north, clockwise)
distance: The distance to translate bin meters
Returns:
The translated polygon and the translation vector ``(x_offset, y_offset)`` in meters
that can be used to reconstruct the original polygon
"""
x_offset = sin(radians(direction)) * distance
y_offset = cos(radians(direction)) * distance
return (
CartesianLocation.from_shapely(
translate(Point(self.east, self.north), xoff=x_offset, yoff=y_offset),
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
),
array([-x_offset, -y_offset]),
)
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {"type": "Point", "coordinates": (self.east, self.north)}
def __copy__(self) -> "CartesianLocation":
return CartesianLocation(
east=self.east,
north=self.north,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
)
def __deepcopy__(self, memodict: Dict) -> "CartesianLocation":
return CartesianLocation(
east=deepcopy(self.east, memodict),
north=deepcopy(self.north, memodict),
location_type=deepcopy(self.location_type, memodict),
name=deepcopy(self.name, memodict),
identifier=deepcopy(self.identifier, memodict),
origin=deepcopy(self.origin, memodict),
)
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
# Inherits the docstring
def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
return (
isinstance(other, CartesianLocation)
and Point.equals(self, other)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
# Inherits the docstring
def equals_exact( # pylint: disable=missing-function-docstring
self, other: Any, tolerance: float
) -> bool:
return (
isinstance(other, CartesianLocation)
and Point.equals_exact(self, other, tolerance)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
def __repr__(self) -> str:
origin = f", origin={self.origin}" if self.origin is not None else ""
return f"CartesianLocation(east={self.east}, north={self.north}{origin}{self._repr_extras})"
def __str__(self) -> str:
# this is required to override shapely.geometry.Point.__str__()
return self.__repr__()

View File

@ -0,0 +1,624 @@
"""This module implements abstractions for geospatial, polygonal shapes in WGS84 and local cartesian
coordinates using shapely.
Two polygons are ``==`` if and only if they are equal according to ``equals_exact()``.
"""
# Python standard library
from copy import deepcopy
from math import cos
from math import radians
from math import sin
# Typing
from typing import Any
from typing import cast
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
# Data modelling
from numpy import array
from numpy import isfinite
from numpy import ndarray
from shapely.affinity import translate
from shapely.geometry import Polygon
# Geospatial basis
from .geospatial import Geospatial
from .geospatial import LocationType
# Geospatial helpers
from .helpers import mean_coordinate
from .helpers import translate_numpy
from .helpers import WGS84_PYPROJ_GEOD
# Location representation
from .location import CartesianLocation
from .location import PolarLocation
class PolarPolygon(Geospatial):
"""A polygon based on WGS84 coordinates.
An object with only a single point may be represented by a polygon with three times the same location.
Args:
locations: The points that make up this polygon; see :attr:`~.locations`
location_type: The type of this polygon
name: An optional name of this polygon
identifier: The polygon's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
"""
def __init__(
self,
locations: List[PolarLocation],
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
) -> None:
# Type hints
self._locations: List[PolarLocation]
# Attributes setup
self.locations = locations
self._mean_location: Optional[PolarLocation] = None
super().__init__(location_type=location_type, name=name, identifier=identifier)
@property
def locations(self) -> List[PolarLocation]:
"""The points that make up this polygon.
Getter:
At least three points are returned.
Setter:
The list is closed if not already done, such that the first and last points in the list always
match exactly. Raises an :class:`AssertionError` if less than three points are given.
"""
return self._locations
@locations.setter
def locations(self, locations: List[PolarLocation]) -> None:
assert len(locations) >= 3, "a polygon must contain at least three points"
# close the ring as shapely would do it
# comparison is done by exact comparison
if (
locations[0].latitude != locations[-1].latitude
or locations[0].longitude != locations[-1].longitude
):
locations.append(locations[0])
self._locations = locations
self._mean_location = None
def to_cartesian(self, origin: PolarLocation) -> "CartesianPolygon":
"""Projects this polygon to a cartesian representation according to the given global reference.
Args:
origin: The reference point by which to project onto the local tangent plane
Returns:
The cartesian representation of this polygon with the given reference point being set
"""
# convert to cartesian
coordinates = self.to_numpy()
coordinates[:, 0], coordinates[:, 1] = origin.projection(coordinates[:, 0], coordinates[:, 1])
return CartesianPolygon.from_numpy(
coordinates,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=origin,
)
def distance_to_vertices(self, location: PolarLocation, approximate: bool = False) -> float:
"""Computes the distance of the given location to the nearest vertex of this polygon.
Args:
location: The location to compute the distance from
approximate: Whether to use a less precise, faster method or not
"""
return min([location.distance(loc, approximate) for loc in self.locations])
@property
def area(self) -> float:
"""Returns the area of the polygon in :math:`meters^2`.
Only simple polygons are supported, i.e. not self-intersecting ones.
See :meth:`pyproj.Geod.polygon_area_perimeter` for the implementation.
The returned value is always non-negative.
"""
_, area = WGS84_PYPROJ_GEOD.polygon_area_perimeter(
lons=[location.longitude for location in self.locations],
lats=[location.latitude for location in self.locations],
radians=False,
)
# pyproj is not typed as of now
# the returned area is signed with the ordering of the points
return abs(area)
@property
def is_valid(self) -> bool:
"""Whether this geometry is valid according to :mod:`shapely`. Quite expensive, not cached.
Invalid ones might cross themselves or have zero area. Other tools might still refuse it, like *GEOS*.
"""
return cast(bool, self.to_cartesian(self.mean).is_valid)
def simplify(self, tolerance: float, preserve_topology: bool = True) -> "PolarPolygon":
"""Creates a simplified copy analogous to :meth:`shapely.geometry.Polygon.simplify`.
The simplification is achieved by reducing its number of vertices in a way that least deforms the
shape.
Args:
tolerance: This is passed to :meth:`shapely.geometry.Polygon.simplify`:
"All points in the simplified object will be within the tolerance distance of the
original geometry."
preserve_topology: This is passed to :meth:`shapely.geometry.Polygon.simplify`:
"By default a slower algorithm is used that preserves topology."
Returns:
A simplified version of the polygon with the same other attributes
"""
projection_center = self.mean
cartesian = self.to_cartesian(projection_center)
simplified = cartesian.simplify(tolerance, preserve_topology)
coords = array(simplified.exterior.xy).T # this is the fastest known method
result_cartesian = CartesianPolygon.from_numpy(
coords, location_type=self.location_type, name=self.name, identifier=self.identifier
)
return result_cartesian.to_polar(projection_center)
def translate(self, direction: float, distance: float) -> Tuple["PolarPolygon", ndarray]:
"""Translates this location and returns the new polygon and back-azimuth.
See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
"""
new_coordinates, back_azimuth_array = translate_numpy(self.to_numpy(), direction, distance)
new_polygon = PolarPolygon.from_numpy(
new_coordinates,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
return new_polygon, back_azimuth_array
def to_numpy(self) -> ndarray:
"""Converts the coordinates defining this polygon into a :class:`numpy.ndarray`.
Returns:
An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
``(longitude, latitude)``, each in degrees.
See Also:
:meth:`~from_numpy`
"""
return array(
[(location.longitude, location.latitude) for location in self.locations],
dtype="float64",
order="C",
)
@classmethod
def from_numpy(cls, data: ndarray, *args, **kwargs) -> "PolarPolygon":
"""Create a polar polygon from a numpy representation.
Args:
data: An array with shape ``(number of locations, 2)``, where each location is represented by a
pair of ``(longitude, latitude)``, each in degrees.
args: positional arguments to be passed to :class:`~PolarPolygon`
kwargs: keyword arguments to be passed to :class:`~PolarPolygon`
Returns:
The polar polygon created from the given coordinates and other parameters
Raises:
AssertionError: If the shape of ``data`` is invalid or contains non-finite values
See Also:
:meth:`~to_numpy`
"""
assert len(data.shape) == 2
assert data.shape[1] == 2
assert isfinite(data).all(), "Invalid values in CartesianPolygon.from_numpy()"
return cls([PolarLocation(latitude=lat, longitude=lon) for (lon, lat) in data], *args, **kwargs)
@property
def mean(self) -> PolarLocation:
"""Computes a reasonable mean location of the polygon, if possible. The result is cached.
Raises:
ValueError: If no meaningful mean (of the longitudes) can be computed.
See :func:`pyrate.plan.geometry.helpers.mean_angle`.
"""
if self._mean_location is None:
coordinates = self.to_numpy()
latitude, longitude = mean_coordinate(latitudes=coordinates[:, 1], longitudes=coordinates[:, 0])
name = f"{self.name} - mean" if self.name else "mean"
self._mean_location = PolarLocation(latitude, longitude, name=name)
return self._mean_location
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {
"type": "Polygon",
"coordinates": [
# the inner array is only the exterior ring,
# and we don't have an interior one
[(location.longitude, location.latitude) for location in self.locations]
],
}
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
return (
isinstance(other, PolarPolygon)
and self.to_cartesian(self.mean).equals(other.to_cartesian(self.mean))
and Geospatial.__eq__(self, other)
)
equals.__doc__ = PolarLocation.equals.__doc__
def equals_exact(self, other: Any, tolerance: float) -> bool:
# pylint: disable=missing-function-docstring
return (
isinstance(other, PolarPolygon)
and self.to_cartesian(self.mean).equals_exact(other.to_cartesian(self.mean), tolerance)
and Geospatial.__eq__(self, other)
)
equals_exact.__doc__ = PolarLocation.equals_exact.__doc__
def equals_almost_congruent(
self, other: Any, rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
) -> bool:
"""Returns whether two objects are approximately congruent and their attributes equal exactly.
See :meth:`~almost_congruent` for details on the specific definition of congruence and the tolerances.
Args:
other: The object to compare with
rel_tolerance: The relative tolerance (relative to the larger area)
abs_tolerance: The absolute area of tolerance in square meters
Returns:
Whether this and the ``other`` polygon are approximately congruent and all attributes are equal.
Returns ``False`` if ``other`` is not a :class:`~PolarPolygon`.
"""
return (
isinstance(other, PolarPolygon)
and self.almost_congruent(other, rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance)
and Geospatial.__eq__(self, other)
)
def almost_congruent(
self, other: "PolarPolygon", rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
) -> bool:
"""Returns whether two polygons are approximately congruent while allowing for small differences.
This function is not directly part of shapely and is somewhat costly to compute. It has to:
- Project both polygons to cartesian coordinates (to continue with shapely calculations).
- Calculate the area of the symmetric difference between this and the other polygon.
- Calculate the area of both individual polygons.
The arguments follow the style of :func:`math.isclose`.
Args:
other: The polygon to compare with
rel_tolerance: The relative tolerance (relative to the larger area)
abs_tolerance: The absolute area of tolerance in square meters
Returns:
Whether this and the other polygon are approximately congruent. The larger one of the relative
and absolute tolerance is used.
"""
return self.to_cartesian(self.mean).almost_congruent(
other.to_cartesian(self.mean), rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance
)
def __repr__(self) -> str:
locations = ", ".join(str(loc) for loc in self.locations)
return f"PolarPolygon(locations=[{locations}]{self._repr_extras})"
class CartesianPolygon(Geospatial, Polygon):
"""A cartesian polygon based on local coordinates with an optional global reference.
Note:
For the sake of simplicity and performance, this class does not store the given
:class:`~pyrate.plan.geometry.location.CartesianLocation` instances directly,
but only their coordinates.
Thus, when reading back attributes like ``origin``, ``name``, etc. of the locations they are derived
from the polygon instance and not from the individual locations.
Args:
locations: The list of locations that this shape consists of; see :attr:`~.locations`
location_type: The type of this polygon
name: The name of this polygon
identifier: The polygon's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
origin: A reference that can be used to project this cartesian representation (back)
into a polar one
"""
def __init__( # pylint: disable=too-many-arguments
self,
locations: List[CartesianLocation],
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
origin: Optional[PolarLocation] = None,
) -> None:
self.origin = origin
if isinstance(locations, list):
Polygon.__init__(self, [location.coords[0] for location in locations])
else:
# this is required for an efficient implementation of CartesianPolygon.from_numpy
# we do not add this possibility to the type signature to make people use from_numpy().
Polygon.__init__(self, locations)
Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
@property
def locations(self) -> List[CartesianLocation]:
"""Get the locations of this polygon. See the class description for caveats."""
return [
CartesianLocation(
x,
y,
origin=self.origin,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
for (x, y) in self.exterior.coords
]
def to_polar(self, origin: Optional[PolarLocation] = None) -> PolarPolygon:
"""Computes the polar representation of this shape.
Args:
origin: The global reference to be used for back-projection, must be set if and only if
:attr:`~pyrate.plan.geometry.CartesianPolygon.origin` is ``None``
Returns:
The global, polar representation of this geometry
"""
if origin is None:
if self.origin is None:
raise ValueError("need to give an explicit origin when the instance does not have one")
origin = self.origin
elif self.origin is not None:
raise ValueError("provided an explicit origin while the instance already has one")
# convert to cartesian
coordinates = self.to_numpy()
coordinates[:, 0], coordinates[:, 1] = origin.projection(
coordinates[:, 0], coordinates[:, 1], inverse=True
)
return PolarPolygon.from_numpy(
coordinates, location_type=self.location_type, name=self.name, identifier=self.identifier
)
def to_numpy(self) -> ndarray:
"""Converts the coordinates defining this polygon into a :class:`numpy.ndarray`.
Returns:
An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
``(longitude, latitude)``, each in degrees.
See Also:
:meth:`~from_numpy`
"""
return array(self.exterior.coords, dtype="float64", order="C")
@classmethod
def from_numpy(cls, data: ndarray, *args, **kwargs) -> "CartesianPolygon":
"""Create a cartesian polygon from a numpy representation.
Args:
data: An array with shape ``(number of locations, 2)``, where each location is represented by a
pair of ``(longitude, latitude)``, each in degrees.
*args: Positional arguments to be passed to :class:`~CartesianPolygon`
**kwargs: Keyword arguments to be passed to :class:`~CartesianPolygon`
Returns:
The polar polygon created from the given coordinates and other parameters
Raises:
AssertionError: If the shape of ``data`` is invalid or contains non-finite values
See Also:
:meth:`~to_numpy`
"""
assert len(data.shape) == 2
assert data.shape[1] == 2
assert isfinite(data).all(), "Invalid values in PolarPolygon.from_numpy()"
return cls(data, *args, **kwargs) # type: ignore
@classmethod
def from_shapely(cls, polygon: Polygon, *args, **kwargs) -> "CartesianPolygon":
"""Create a cartesian polygon from a shapely polygon.
Args:
polygon: A shapely polygon
*args: Positional arguments to be passed to :class:`~CartesianPolygon`
**kwargs: Keyword arguments to be passed to :class:`~CartesianPolygon`
Returns:
The cartesian polygon created from the given geometry and other parameters
"""
return cls.from_numpy(array(polygon.exterior.xy).T, *args, **kwargs)
def translate(self, direction: float, distance: float) -> Tuple["CartesianPolygon", ndarray]:
"""Translates this polygon.
Args:
direction: The direction angle in degrees (``0`` is north, clockwise)
distance: The distance to translate bin meters
Returns:
The translated polygon and the translation vector ``(x_offset, y_offset)`` in meters
that can be used to reconstruct the original polygon
"""
x_offset = sin(radians(direction)) * distance
y_offset = cos(radians(direction)) * distance
return (
CartesianPolygon.from_shapely(
translate(Polygon(self.to_numpy()), xoff=x_offset, yoff=y_offset),
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
),
array([-x_offset, -y_offset]),
)
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {
"type": "Polygon",
"coordinates": [
# the inner array is only the exterior ring,
# and we don't have an interior one
list(self.exterior.coords),
],
}
def __copy__(self) -> "CartesianPolygon":
return CartesianPolygon(
locations=self.locations,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
)
def __deepcopy__(self, memodict: Dict) -> "CartesianPolygon":
return CartesianPolygon(
locations=deepcopy(self.locations, memodict),
location_type=deepcopy(self.location_type, memodict),
name=deepcopy(self.name, memodict),
identifier=deepcopy(self.identifier, memodict),
origin=deepcopy(self.origin, memodict),
)
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
# Inherits the docstring
def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
return (
isinstance(other, CartesianPolygon)
and Polygon.equals(self, other)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
# Inherits the docstring
def equals_exact( # pylint: disable=missing-function-docstring
self, other: Any, tolerance: float
) -> bool:
return (
isinstance(other, CartesianPolygon)
and Polygon.equals_exact(self, other, tolerance)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
def equals_almost_congruent(
self, other: Any, rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
) -> bool:
"""Returns whether two objects are approximately congruent and their attributes equal exactly.
See :meth:`~almost_congruent` for details on the specific definition of congruence and the tolerances.
Args:
other: The object to compare with
rel_tolerance: The relative tolerance (relative to the larger area)
abs_tolerance: The absolute area of tolerance in square meters
Returns:
Whether this and the ``other`` polygon are approximately congruent and all attributes are equal.
Returns ``False`` if ``other`` is not a :class:`~CartesianPolygon`.
"""
return (
isinstance(other, CartesianPolygon)
and self.almost_congruent(other, rel_tolerance=rel_tolerance, abs_tolerance=abs_tolerance)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
def almost_congruent(
self, other: "CartesianPolygon", rel_tolerance: float = 1e-6, abs_tolerance: float = 1e-6
) -> bool:
"""Returns whether two polygons are approximately congruent while allowing for small differences.
This function is not directly part of shapely and is somewhat costly to compute. It has to:
- Calculate the area of the symmetric difference between this and the ``other`` polygon.
- Calculate the area of both individual polygons.
The arguments follow the style of :func:`math.isclose`.
Args:
other: The polygon to compare with
rel_tolerance: The relative tolerance (relative to the larger area)
abs_tolerance: The absolute area of tolerance in square meters
Returns:
Whether this and the ``other`` polygon are approximately congruent. The larger one of the relative
and absolute tolerance is used.
"""
rel_tolerance_as_abs: float = max(self.area, other.area) * rel_tolerance
tolerance: float = max(rel_tolerance_as_abs, abs_tolerance)
difference: float = self.symmetric_difference(other).area
return difference <= tolerance
def __repr__(self) -> str:
origin = f", origin={self.origin}" if self.origin is not None else ""
locations = ", ".join(f"({x}, {y})" for x, y in self.exterior.coords)
return f"CartesianPolygon(locations=[{locations}]{origin}{self._repr_extras})"
def __str__(self) -> str:
# this is required to override shapely.geometry.Polygon.__str__()
return self.__repr__()

View File

@ -0,0 +1,466 @@
"""This module implements abstractions for geospatial routes (line strings) in WGS84 and local coordinate
frames.
Two routes are ``==`` if and only if they are equal according to ``equals_exact()``.
"""
# Python standard library
from copy import deepcopy
from math import cos
from math import radians
from math import sin
# Typing
from typing import Any
from typing import Dict
from typing import List
from typing import Optional
from typing import Tuple
# Data modelling
from numpy import array
from numpy import isfinite
from numpy import ndarray
from shapely.affinity import translate
from shapely.geometry import LineString
# Geospatial basis
from .geospatial import Geospatial
from .geospatial import LocationType
# Geospatial helpers
from .helpers import mean_coordinate
from .helpers import translate_numpy
# Location representation
from .location import CartesianLocation
from .location import PolarLocation
class PolarRoute(Geospatial):
"""A route (line string) based on WGS84 coordinates.
Note:
This class does not yet support simplification as it was not required so far.
Args:
locations: The two or more points that make up this route; see :attr:`~.locations`
location_type: The type of this polygon
name: An optional name of this polygon
identifier: The route's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
"""
def __init__(
self,
locations: List[PolarLocation],
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
) -> None:
# Type hints
self._locations: List[PolarLocation]
# Attributes setup
self.locations = locations
self._mean_location: Optional[PolarLocation] = None
super().__init__(location_type=location_type, name=name, identifier=identifier)
# See Shapely issue
if self.length(approximate=True) < 1e-9:
raise ValueError(f"(Nearly) zero-length line strings are not allowed by Shapely; got {locations}")
@property
def locations(self) -> List[PolarLocation]:
"""The points that make up this route.
Getter:
At least two points are returned.
Setter:
Raises an :class:`AssertionError` if less than two points are given.
"""
return self._locations
@locations.setter
def locations(self, locations: List[PolarLocation]) -> None:
assert len(locations) >= 2, "a route must contain at least two points"
self._locations = locations
self._mean_location = None
def distance_to_vertices(self, location: PolarLocation, approximate: bool = False) -> float:
"""Computes the distance of the given ``location`` to the nearest vertex of this route.
Args:
location: The location to compute the distance from
approximate: Whether to use a less precise, faster method or not
"""
return min([location.distance(loc, approximate) for loc in self.locations])
def length(self, approximate: bool = False) -> float:
"""Compute the length of this route from start to end.
Args:
approximate: Whether to use a less precise, faster method or not
"""
return sum([a.distance(b, approximate) for (a, b) in zip(self.locations[:-1], self.locations[1:])])
def to_cartesian(self, origin: PolarLocation) -> "CartesianRoute":
"""Projects this route to a cartesian representation according to the given global reference.
Args:
origin: The reference by which to project onto the local tangent plane
Returns:
The cartesian representation of this route with the given reference point being set
"""
# convert to cartesian
coordinates = self.to_numpy()
coordinates[:, 0], coordinates[:, 1] = origin.projection(coordinates[:, 0], coordinates[:, 1])
return CartesianRoute.from_numpy(
coordinates,
origin=origin,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
def to_numpy(self) -> ndarray:
"""Converts the coordinates defining this route into a :class:`numpy.ndarray`.
Returns:
An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
``(longitude, latitude)``, each in degrees.
See Also:
:meth:`~from_numpy`
"""
return array(
[(location.longitude, location.latitude) for location in self.locations],
dtype="float64",
order="C",
)
@classmethod
def from_numpy(cls, data: ndarray, *args, **kwargs) -> "PolarRoute":
"""Create a polar route from a numpy representation.
Args:
data: An array with shape ``(number of locations, 2)``, where each location is represented by a
pair of ``(longitude, latitude)``, each in degrees.
*args: Positional arguments to be passed to :class:`~PolarRoute`
**kwargs: Keyword arguments to be passed to :class:`~PolarRoute`
Returns:
The polar route created from the given coordinates and other parameters
Raises:
AssertionError: If the shape of ``data`` is invalid or contains non-finite values
See Also:
:meth:`~to_numpy`
"""
assert len(data.shape) == 2
assert data.shape[1] == 2
assert isfinite(data).all(), "Invalid values in CartesianRoute.from_numpy()"
return cls([PolarLocation(latitude=lat, longitude=lon) for (lon, lat) in data], *args, **kwargs)
def translate(self, direction: float, distance: float) -> Tuple["PolarRoute", ndarray]:
"""Translates this location and returns the new route and back-azimuth.
See :func:`pyrate.plan.geometry.helpers.translate_floats` for details.
"""
new_coordinates, back_azimuth_array = translate_numpy(self.to_numpy(), direction, distance)
new_route = PolarRoute.from_numpy(
new_coordinates,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
return new_route, back_azimuth_array
@property
def mean(self) -> PolarLocation:
"""Computes a reasonable mean location of the route, if possible. The result is cached.
Raises:
ValueError: If no meaningful mean (of the longitudes) can be computed.
See :func:`pyrate.plan.geometry.helpers.mean_angle`.
"""
if self._mean_location is None:
coordinates = self.to_numpy()
latitude, longitude = mean_coordinate(latitudes=coordinates[:, 1], longitudes=coordinates[:, 0])
name = f"{self.name} - mean" if self.name else "mean"
self._mean_location = PolarLocation(latitude, longitude, name=name)
return self._mean_location
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {
"type": "LineString",
"coordinates": [(location.longitude, location.latitude) for location in self.locations],
}
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
return (
isinstance(other, PolarRoute)
and self.to_cartesian(self.mean).equals(other.to_cartesian(self.mean))
and Geospatial.__eq__(self, other)
)
equals.__doc__ = PolarLocation.equals.__doc__
def equals_exact(self, other: Any, tolerance: float) -> bool:
# pylint: disable=missing-function-docstring
return (
isinstance(other, PolarRoute)
and self.to_cartesian(self.mean).equals_exact(other.to_cartesian(self.mean), tolerance)
and Geospatial.__eq__(self, other)
)
equals_exact.__doc__ = PolarLocation.equals_exact.__doc__
def __repr__(self) -> str:
locations = ", ".join(str(loc) for loc in self.locations)
return f"PolarRoute(locations=[{locations}]{self._repr_extras})"
class CartesianRoute(Geospatial, LineString):
"""A cartesian route (line string) in local coordinates, optionally with a global reference point.
Note:
For the sake of simplicity and performance, this class does not store the given
:class:`~pyrate.plan.geometry.location.CartesianLocation` instances directly,
but only their coordinates.
Thus, when reading back attributes like ``origin``, ``name``, etc. of the locations they are derived
from the route instance and not from the individual locations.
Args:
locations: The list of two or more locations that this shape consists of; see :attr:`~locations`
location_type: The type of this route
name: The name of this route
identifier: The route's optional unique identifier, in :math:`[0, 2**63)`, i.e. 64 signed bits
origin: A reference that can be used to project this cartesian representation (back)
into a polar one
"""
def __init__( # pylint: disable=too-many-arguments
self,
locations: List[CartesianLocation],
location_type: LocationType = LocationType.UNKNOWN,
name: Optional[str] = None,
identifier: Optional[int] = None,
origin: Optional[PolarLocation] = None,
) -> None:
# Store attributes
self.origin = origin
if isinstance(locations, list):
LineString.__init__(self, [location.coords[0] for location in locations])
else:
# this is required for an efficient implementation of CartesianRoute.from_numpy
# we do not add this possibility to the type signature to make people use from_numpy().
LineString.__init__(self, locations)
Geospatial.__init__(self, location_type=location_type, name=name, identifier=identifier)
# See Shapely issue
if self.length < 1e-9:
raise ValueError(f"(Nearly) zero-length line strings are not allowed by Shapely; got {locations}")
@property
def locations(self) -> List[CartesianLocation]:
"""Get the locations of this route. See the class description for caveats."""
return [
CartesianLocation(
x,
y,
origin=self.origin,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
)
for (x, y) in self.coords
]
def to_polar(self, origin: Optional[PolarLocation] = None) -> PolarRoute:
"""Computes the polar representation of this route.
Args:
origin: The global reference to be used for back-projection, must be set if and only if
:attr:`~origin` is ``None``
Returns:
The global, polar representation of this route
"""
if origin is None:
if self.origin is None:
raise ValueError("need to give an explicit origin when the instance does not have one")
origin = self.origin
elif self.origin is not None:
raise ValueError("provided an explicit origin while the instance already has one")
# convert to cartesian
coordinates = self.to_numpy()
coordinates[:, 0], coordinates[:, 1] = origin.projection(
coordinates[:, 0], coordinates[:, 1], inverse=True
)
return PolarRoute.from_numpy(
coordinates, location_type=self.location_type, name=self.name, identifier=self.identifier
)
def to_numpy(self) -> ndarray:
"""Converts the coordinates defining this route into a :class:`numpy.ndarray`.
Returns:
An array with shape ``(number of locations, 2)``, where each location is represented by a pair of
``(longitude, latitude)``, each in degrees.
See Also:
:meth:`~from_numpy`
"""
return array(self.coords, dtype="float64", order="C")
@classmethod
def from_numpy(cls, data: ndarray, *args, **kwargs) -> "CartesianRoute":
"""Create a cartesian route from a numpy representation.
Args:
data: An array with shape ``(number of locations, 2)``, where each location is represented by a
pair of ``(longitude, latitude)``, each in degrees.
*args: positional arguments to be passed to :class:`~CartesianRoute`
**kwargs: keyword arguments to be passed to :class:`~CartesianRoute`
Returns:
The cartesian route created from the given coordinates and other parameters
Raises:
AssertionError: If the shape of ``data`` is invalid or contains non-finite values
See Also:
:meth:`~to_numpy`
"""
assert len(data.shape) == 2
assert data.shape[1] == 2
assert isfinite(data).all(), "Invalid values in PolarRoute.from_numpy()"
return cls(data, *args, **kwargs) # type: ignore
@classmethod
def from_shapely(cls, line_string: LineString, *args, **kwargs) -> "CartesianRoute":
"""Create a cartesian route from a shapely line string.
Args:
line_string: A shapely line_string
*args: Positional arguments to be passed to :class:`~CartesianRoute`
**kwargs: Keyword arguments to be passed to :class:`~CartesianRoute`
Returns:
The cartesian route created from the given geometry and other parameters
"""
return cls.from_numpy(array(line_string.xy).T, *args, **kwargs)
def translate(self, direction: float, distance: float) -> Tuple["CartesianRoute", ndarray]:
"""Translates this route.
Args:
direction: The direction angle in degrees (``0`` is north, clockwise)
distance: The distance to translate bin meters
Returns:
The translated route and the translation vector ``(x_offset, y_offset)`` in meters
that can be used to reconstruct the original route
"""
x_offset = sin(radians(direction)) * distance
y_offset = cos(radians(direction)) * distance
return (
CartesianRoute.from_shapely(
translate(LineString(self.to_numpy()), xoff=x_offset, yoff=y_offset),
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
),
array([-x_offset, -y_offset]),
)
@property
def __geo_interface__(self) -> Dict[str, Any]:
return {"type": "LineString", "coordinates": self.coords}
def __copy__(self) -> "CartesianRoute":
return CartesianRoute(
locations=self.locations,
location_type=self.location_type,
name=self.name,
identifier=self.identifier,
origin=self.origin,
)
def __deepcopy__(self, memodict: Dict) -> "CartesianRoute":
return CartesianRoute(
locations=deepcopy(self.locations, memodict),
location_type=deepcopy(self.location_type, memodict),
name=deepcopy(self.name, memodict),
identifier=deepcopy(self.identifier, memodict),
origin=deepcopy(self.origin, memodict),
)
def __eq__(self, other: Any) -> bool:
return self.equals_exact(other, tolerance=0.0)
# Inherits the docstring
def equals(self, other: Any) -> bool: # pylint: disable=missing-function-docstring
return (
isinstance(other, CartesianRoute)
and LineString.equals(self, other)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
# Inherits the docstring
def equals_exact( # pylint: disable=missing-function-docstring
self, other: Any, tolerance: float
) -> bool:
return (
isinstance(other, CartesianRoute)
and LineString.equals_exact(self, other, tolerance)
and Geospatial.__eq__(self, other)
and self.origin == other.origin
)
def __repr__(self) -> str:
origin = f", origin={self.origin}" if self.origin is not None else ""
locations = ", ".join(f"({x}, {y})" for x, y in self.coords)
return f"CartesianRoute(locations=[{locations}]{origin}{self._repr_extras})"
def __str__(self) -> str:
# this is required to override shapely.geometry.LineString.__str__()
return self.__repr__()

View File

@ -0,0 +1,26 @@
"""
The ``graph`` module provides navigation tools where the world is modeled as a graph.
This includes generating a graph, assigning properties to nodes of the graph and finding good paths on it.
Two graph models are provided:
:class:`~pyrate.plan.graph.graph.NavigationGraph` is a generic implementation and in
:class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`, nodes are referenced to geographical locations.
"""
from .graph import NavigationGraph
from .geo_graph import GeoNavigationGraph
from .generate import angular_distance_for
from .generate import create_earth_graph
from .generate import great_circle_distance_distance_for
from .generate import min_required_frequency
__all__ = [
"GeoNavigationGraph",
"NavigationGraph",
"angular_distance_for",
"create_earth_graph",
"great_circle_distance_distance_for",
"min_required_frequency",
]

View File

@ -0,0 +1,350 @@
"""
Creates a grid on a globe with vertices and edges. Assumes that the earth is a sphere.
Examples:
The usual approach it to first determine the maximum allowed distance between two nodes on the graph.
Note, that small distances (e.g. of less than 100km) might take a while to compute and can create very
large graphs. See the script
:ref:`earth_graph_frequency_statistics.py <script-earth_graph_frequency_statistics-example>`
for details on the performance and size of the output.
>>> maximum_node_distance = 100_000 # in meters
Then, the minimum required ``frequency`` can be computed from that, which is an integer value determining
the granularity of the graph.
Higher frequencies result in finer graphs.
>>> frequency = min_required_frequency(maximum_node_distance, in_meters=True)
We could have also passed the angular distance in radians (by setting ``in_meters=False``).
Alternatively, we can now compute the actual angular distance and great-circle distance in meters from the
frequency that we know have.
It is in general less (or equal) than the ``maximum_node_distance``, as the ``frequency`` only allows for
integer steps in the geranularity.
>>> angular_distance_for(frequency) # doctest: +ELLIPSIS
0.01559...
>>> actual_node_distance = great_circle_distance_distance_for(frequency) # again, in meters
>>> actual_node_distance # doctest: +ELLIPSIS
99347.242...
>>> actual_node_distance <= maximum_node_distance
True
Now, we can finally generate the :class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`.
If we wanted to have some progress messsages printed, we would pass ``print_status=True``.
>>> graph = create_earth_graph(frequency)
>>> len(graph) # the number of nodes
50412
>>> graph.num_edges
151230
Furthermore, the ``graph`` has some specific attributes set, which result from the icoshedron subdivision
approach of the algorithm.
These allow for certain optimizations and more convenience when applying algorithms as they do not have
to be passed explicictly to other functions.
>>> graph.node_radius * 2 == actual_node_distance
True
>>> graph.max_neighbors == 6
True
Visualization
-------------
The following visualization shows how the vertices of an earth graph are spread when plotted using the
`mercator projection <https://en.wikipedia.org/wiki/Mercator_projection>`_.
The vertices and their area near the equator are very evenly spaced.
However, their positions and shapes get very distorted at high latitudes (i.e. near the north and south
poles) `due to the projection <https://en.wikipedia.org/wiki/Mercator_projection#Distortion_of_sizes>`_.
.. image:: vertices_distribution_mercator.png
:alt: visualization of the vertices of an earth graph, distorted by the mercator projection
The following plot illustrates the area of influence/responsibility around each vertex.
Again, notice how the quite evenly spaced vertices are very distorted in this projection at high latitudes.
The visualization was obtained by computing the fraction of navigable area within the nodes' vicinity.
Navigability was simply determined by the land being below sea level as a rough approximation.
.. image:: vertices_area_of_influence.png
:alt: visualization of the area of influence/responsibility of all vertices of an earth graph,
distorted by the mercator projection;
obtained by computing the fraction of navigable area within the nodes vicinity
Note:
Some methods require the `Antiprism <https://www.antiprism.com>`_ software package (version 0.26+).
There is a PPA for it available `here <https://launchpad.net/~antiprism/+archive/ubuntu/ppa>`_.
Use ``add-apt-repository ppa:antiprism/ppa && apt install antiprism`` on Ubuntu.
"""
# Standard library
from math import ceil
from math import degrees
import subprocess
from warnings import warn
# Typing
from typing import Tuple
# Scientific
from numpy import compress
from numpy import empty
from numpy import empty_like
from numpy import float64
from numpy import genfromtxt
from numpy import maximum
from numpy import minimum
from numpy import ndarray
from numpy import uint32
from numpy import unique
# Geometry helpers
from pyrate.plan.geometry.helpers import cartesian_to_spherical
from pyrate.plan.geometry.helpers import meters2rad
from pyrate.plan.geometry.helpers import rad2meters
# Graph implementation
from pyrate.plan.graph import GeoNavigationGraph
def create_earth_graph(frequency: int, print_status: bool = False) -> GeoNavigationGraph:
"""Returns roughly equally spaced points on the earth, creating an *icoshpere* 🌐.
This basically works by constructing a geodesic polyhedron based on an icosahedron as a starting point,
dividing it (with *Class I*) as much as required by the desired distance and then projecting it onto a
sphere. The following image visualizes the process for the case ``frequency = 6``:
.. image:: https://upload.wikimedia.org/wikipedia/commons/f/ff/Geodesic_icosahedral_polyhedron_example.png
:alt:
Illustration of icosahedron subdivision: (1) create a regular icosahedron, (2) perform 6-frequency
subdivision of all faces and (3) project all vertices on a sphere;
Licensed under *CC BY-SA 4.0*: Created by *Tomruen* and provided on
`Wikipedia <https://commons.wikimedia.org/wiki/File:Geodesic_icosahedral_polyhedron_example.png>`_.
The implementation is mostly single-threaded. See the script
:ref:`earth_graph_frequency_statistics.py <script-earth_graph_frequency_statistics-example>`
for details on the performance and size of the output.
*Class I* determines the way that the icosahedron is sliced (see
`Wikipedia: Geodesic notation <https://en.wikipedia.org/wiki/Geodesic_polyhedron#Geodesic_notation>`_).
It was chosen since it is apparently used quite often to create regular grids
(according to `Wikipedia <https://en.wikipedia.org/wiki/Geodesic_grid#Construction>`_)
and since if appears to be very regular (see
`this <https://www.antiprism.com/examples/200_programs/650_geodesic/imagelist.html>`_ page in the
Antiprism documentation for a visualization).
References:
- https://en.wikipedia.org/wiki/Geodesic_grid#Construction
- https://en.wikipedia.org/wiki/Geodesic_polyhedron
- https://people.sc.fsu.edu/~jburkardt/presentations/sphere_grid_2014_fsu.pdf
Further ideas:
- One could also use `Goldberg polyhedra <https://en.wikipedia.org/wiki/Goldberg_polyhedron>`_,
as those are the `duals <https://en.wikipedia.org/wiki/Dual_polygon>`_ to the geodesic spheres used
in this implementation and should also work.
- Alternatively, one could also use the already implemented
`Quaternary Triangular Meshes <https://github.com/paulojraposo/QTM>`_
Args:
frequency: The number of subdivisions per icosahedron edge.
Keep in mind that high frequencies could be computationally costly.
print_status: If set to ``True``, print human-readable status messages about the progress.
Returns:
A graph covering the entire earth
"""
angular_distance = angular_distance_for(frequency)
distance_meters = great_circle_distance_distance_for(frequency)
if print_status:
print(f"creating an earth grid with a point distance of at most {distance_meters / 1000:.3f} km")
print(f"dividing each edge of the base icosahedron into {frequency} parts")
print(f"the angular distance of vertices will be ~{degrees(angular_distance):.6f}°")
if 10_000 < distance_meters < 25_000: # pragma: no cover
warn("this might take a while", ResourceWarning)
elif distance_meters <= 10_000: # pragma: no cover
warn("this might take *very* long", ResourceWarning)
# this defines how to slice the edges/triangles
polyhedron_class = "1"
if print_status:
print('calling Antiprisim\'s "geodesic"')
# check the geodesic/Antiprism version
_assert_antiprism_is_installed()
command = f"geodesic -M s -c {polyhedron_class} -f {frequency} ico"
# check_output raises an Error on a non-zero exit code
# use ASCII encoding since the resulting file will not contain any Unicode text
output = subprocess.check_output(command.split(), encoding="ascii")
if print_status:
print("parsing the resulting OFF file")
latitudes, longitudes, edges = _parse_off_file(output)
if print_status:
print("finished earth grid generation")
return GeoNavigationGraph.from_coordinates_radians(
latitudes=latitudes,
longitudes=longitudes,
edges=edges,
max_neighbors=6,
node_radius=distance_meters / 2,
)
#: The approximate angle between two edges on an icosahedron, in radians, about 63.4°
_ALPHA = 1.1071487177940905030170654601785
# calculation:
# (note: we use latitude is in [-pi/2, +pi/2], longitude is in [-pi, +pi])
# take two edges in spherical coordinates,
# see https://en.wikipedia.org/wiki/Regular_icosahedron#Spherical_coordinates
# (in the link, other coordinates are used!)
# we choose A=(lat_a, lon_a)=(pi/2, 0) and
# B=(lat_b, lon_b)=(arctan(1/2), 0) for simplicity
# then the angle between them is given by
# alpha = lat_a - lat_b = pi/2 - arctan(1/2)
# result: https://www.wolframalpha.com/input/?i=pi%2F2+-+arctan%281%2F2%29
def min_required_frequency(desired_distance: float, in_meters: bool) -> int:
"""Compute the minimum frequency to reach the ``desired_distance`` by icosahedron subdivision.
Here, the frequency is the number of cuts to make on an edge of a polyhedron.
Higher frequencies result in finer graphs.
Args:
desired_distance: The maximum distance that two neighboring nodes may be apart.
Must be a strictly positive number.
If ``in_meters`` is ``True`` in meters of the great-circle distance, else the
angular distance in radians.
in_meters: Interpret ``desired_distance`` as meters instead of as radians
Returns:
The minimum frequency to reach the ``desired_distance``, at least ``1``
"""
assert desired_distance > 0, "the desired_angular_distance must be positive"
if in_meters:
desired_angular_distance = meters2rad(desired_distance)
else:
desired_angular_distance = desired_distance
# calculate the number of slices per edge (=the frequency) by simple division:
frequency = _ALPHA / desired_angular_distance
# if the distance is too big, we simply do not divide the edges at all
frequency = max(frequency, 1.0)
# then we need to round: we round up since we would rather have
# more edges than too few of them
return int(ceil(frequency))
def great_circle_distance_distance_for(frequency: int) -> float:
"""The great-circle distance that subdivision with the frequency will result in.
Args:
frequency: The frequency of the subdivision, at least ``1``
Returns:
The great-circle distance that the frequency will result in, in meters
"""
return rad2meters(angular_distance_for(frequency))
def angular_distance_for(frequency: int) -> float:
"""The angular distance that subdivision with the frequency will result in.
Args:
frequency: The frequency of the subdivision, at least ``1``
Returns:
The angular distance that the frequency will result in, in radians
"""
assert frequency >= 1, "the frequency must be at least one"
return _ALPHA / frequency
_ANTIPRISM_REQUIRED_VERSION = (0, 26)
#: The minimum required Antiprism version
def _assert_antiprism_is_installed() -> None:
"""Raises an exception if *Antiprism* (with the geodesic tool) in not installed in the required version.
Raises:
:class:`AssertionError`: If the *Antiprism* version is insufficient
"""
try:
version = subprocess.check_output(["geodesic", "--version"], encoding="utf8").split(" ", 3)[2]
except FileNotFoundError as error: # pragma: no cover
raise AssertionError(
'Could not call tool "geodesic" from Antiprism, is it installed? (See installation instructions.)'
) from error
assert tuple(int(v) for v in version.split(".")) >= _ANTIPRISM_REQUIRED_VERSION, (
f'tool "geodesic" from Antiprism version >= {_ANTIPRISM_REQUIRED_VERSION} is required, '
f"but you have version {version}!"
)
def _parse_off_file(source_text: str) -> Tuple[ndarray, ndarray, ndarray]:
"""Parses an Antiprism OFF file and return the result in spherical coordinates.
Warnings:
Assumes that the point :math:`(0, 0, 0)` is not present and that all faces
are triangles or "polygons" with fewer vertices.
Warnings:
This is only meant to parse OFF files produced by *Antiprism*.
These are not standard OFF files as described
`here <https://www.antiprism.com/programs/off_format.html>`_!
Args:
source_text: The raw file content to be parsed
Returns:
all vertices' points (like returned by :meth:`~off_handler.cartesian_to_spherical`)
as well as a list of all edges, each consisting of zero-based indices
of the endpoints from the first argument.
"""
# split
source = source_text.splitlines()
assert len(source) >= 2, "OFF file must have at least two lines"
# check header
assert source[0] == "OFF", 'file does not start with "OFF"'
# get size of file
# note: num_edges is usually not set to a correct value, so we ignore the last value
num_vertices, num_faces, _ = map(int, source[1].split())
# get the vertices
points = genfromtxt(source[2:], max_rows=num_vertices, dtype=float64)
latitudes, longitudes = cartesian_to_spherical(points)
# get faces
faces = genfromtxt(source[2 + num_vertices :], max_rows=num_faces, usecols=(0, 1, 2, 3), dtype=uint32)
triangles = compress(faces[:, 0] == 3, faces[:, 1:4], axis=0)
del faces # free this memory
count = len(triangles)
# now we want to transform each triangle into three edges
edges = empty([count * 3, 2], dtype=uint32)
edges[0:count, :] = triangles[:, (0, 1)]
edges[count : 2 * count, :] = triangles[:, (1, 2)]
edges[2 * count : 3 * count, :] = triangles[:, (0, 2)]
# then we filter out duplicates or wrong values
# sort the IDs in each row in ascending order, to find duplicates since the graph is directed
# one could also use `np.sort`
sorted_edges = empty_like(edges)
sorted_edges[:, 0] = minimum(edges[:, 0], edges[:, 1])
sorted_edges[:, 1] = maximum(edges[:, 0], edges[:, 1])
edges = unique(sorted_edges, axis=0)
return latitudes, longitudes, edges

View File

@ -0,0 +1,248 @@
"""This module provides geo-referenced navigation graphs."""
# Typing
from typing import Any
from typing import List
from typing import Optional
from typing import Sequence
from typing import Union
# Scientific
import numpy
from numpy import degrees
from numpy import ndarray
from numpy import radians
# Scientific
from pandas import DataFrame
from pandas import Series
# Progress bars
from tqdm import tqdm
# Own typing
from pyrate.common.raster_datasets import BaseTransformer
from pyrate.plan.graph.graph import NavigationGraph
class GeoNavigationGraph(NavigationGraph):
"""An undirected navigation graph specifically for geo-referenced graphs.
It is similar to the more generic :class:`~pyrate.plan.graph.graph.NavigationGraph` but ensures that the
property dataframe always contains columns `Latitude (radians)` and `Longitude (radians)`.
Not providing these when creating the graph will result in an :class:`AssertionError`.
This class also useful methods for adding new properties and plotting the graph.
Examples:
This creates a very simple graph with two connected nodes at *Darmstadt* and *Griesheim*.
>>> import numpy
>>> nodes = DataFrame(data={'Latitude (radians)': numpy.radians([49.872222, 49.863889]), \
'Longitude (radians)': numpy.radians([ 8.652778, 8.563889])})
>>> edges = numpy.array([[0, 1], ])
>>> graph = GeoNavigationGraph(nodes, edges)
>>> graph.neighbors
array([[1],
[0]], dtype=int32)
>>> graph.latitudes_degrees
0 49.872222
1 49.863889
Name: Latitude (radians), dtype: float64
Alternatively, such a graph can be created using `GeoNavigationGraph.from_coordinates_*`
>>> same_graph = GeoNavigationGraph.from_coordinates_degrees( \
latitudes=[49.872222, 49.863889], longitudes=[ 8.652778, 8.563889], edges=edges)
>>> graph == same_graph
True
Args:
nodes: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
This must contain columns ``"Latitude (radians)"`` (with values in :math:`[-π/2, +π/2]`) and
``"Longitude (radians)"`` (with values in :math:`[-π, +π)`).
edges: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
neighbours: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
max_neighbors: See :class:`~pyrate.plan.graph.graph.NavigationGraph`.
node_radius: The radius around each node of the area on the globe that it should represent, in meters,
non-negative. It can be interpreded as the radius of influence or of responsibility.
It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is uniform
across all nodes.
Setting this allows it to be omitted in some methods of this class, like in
:meth:`~append_property`.
"""
def __init__(self, *args, node_radius: Optional[Union[float, ndarray]] = None, **kwargs):
super().__init__(*args, **kwargs)
assert node_radius is None or numpy.all(node_radius >= 0)
self.node_radius = node_radius
assert "Latitude (radians)" in self.nodes, 'column "Latitude (radians)" missing'
assert "Longitude (radians)" in self.nodes, 'column "Longitude (radians)" missing'
@classmethod
def from_coordinates_radians(
cls,
latitudes: ndarray,
longitudes: ndarray,
node_properties: Optional[DataFrame] = None,
**kwargs: Any,
) -> "GeoNavigationGraph":
"""Creates a new geo-referenced navigation graph from the given coordinates and node properties.
The same as the constructor of :class:`GeoNavigationGraph`, except that that latitude, longitude and
properties of the nodes can be given separately. For clarity, everything should be passes as
keyword arguments.
Args:
latitudes: The latitudes of all nodes in radians in :math:`[-π/2, +π/2]`
longitudes: The longitudes of all nodes in radians in :math:`[-π, +π)`
node_properties: The properties of all nodes (will be modified if not set to ``None``)
kwargs**: passed to the constructor of :class:`GeoNavigationGraph`
Returns:
A newly created graph
"""
if node_properties is None:
node_properties = DataFrame() # create an empty one
node_properties["Latitude (radians)"] = latitudes
node_properties["Longitude (radians)"] = longitudes
assert "nodes" not in kwargs, (
"do not pass nodes, instead explicitly set them via latitudes, "
"longitudes and node_properties or directly use the constructor instead"
)
return cls(node_properties, **kwargs)
@classmethod
def from_coordinates_degrees(
cls, latitudes: ndarray, longitudes: ndarray, **kwargs: Any
) -> "GeoNavigationGraph":
"""The same as :func:`~from_coordinates_radians` except that the coordinates are in degrees.
For clarity, everything should be passes as keyword arguments.
Args:
latitudes: The latitudes of all nodes in degrees in :math:`[-90, +90]`
longitudes: The latitudes of all nodes in degrees in :math:`[-180, +180)`
kwargs**: passed to :func:`~from_coordinates_radians`
"""
return GeoNavigationGraph.from_coordinates_radians(radians(latitudes), radians(longitudes), **kwargs)
@staticmethod
def _serialized_attributes() -> List[str]:
"""The list of attributes that shall be (de)serialized (on top of the nodes and edges)."""
return NavigationGraph._serialized_attributes() + ["node_radius"]
@property
def latitudes_radians(self) -> Series:
"""The latitudes of all nodes in radians in :math:`[-π/2, +π/2]`."""
return self.nodes["Latitude (radians)"]
@property
def longitudes_radians(self) -> Series:
"""The longitudes of all nodes in radians in :math:`[-π, +π)`."""
return self.nodes["Longitude (radians)"]
@property
def latitudes_degrees(self) -> Series:
"""The latitudes of all nodes in degrees in :math:`[-90, +90]`."""
return degrees(self.latitudes_radians)
@property
def longitudes_degrees(self) -> Series:
"""The longitudes of all nodes in degrees in :math:`[-180, +180)`."""
return degrees(self.longitudes_radians)
@property
def node_properties(self) -> DataFrame:
"""The properties of all nodes as a view (as opposed to a copy).
This is the same as :attr:`~nodes`, but without the latitude and longitude values.
"""
return self.nodes.drop(columns=["Latitude (radians)", "Longitude (radians)"])
def clear_node_properties(self) -> None:
"""Deletes all properties but retains the coordinate values."""
self.nodes = self.nodes[["Latitude (radians)", "Longitude (radians)"]]
def append_property(
self,
transformer: BaseTransformer,
node_radius: Optional[Union[float, ndarray]] = None,
show_progress: bool = False,
) -> None:
"""Append the properties given by the transformer.
The name and data type are taken from the given ``transformer``.
Args:
transformer: The dimension/property that shall be queried for each node
node_radius: The radius around each node of the area on the globe that it should represent,
in meters, non-negative.
It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is
uniform across all nodes.
This value is uniform across all ``transformers``.
It may be omitted if :attr:`~node_radius` is set.
show_progress: Whether to print a simple progress bar
See Also:
:meth:`~append_properties`
Raises:
:class:`ValueError`: if a property with that name is already present
"""
node_radius = node_radius if node_radius is not None else self.node_radius
assert node_radius is not None, (
"parameter node_radius must be set either with the method or the object attribute but is "
"missing on both"
)
with transformer:
new = transformer.get_transformed_at_nodes(
self.latitudes_radians, self.longitudes_radians, node_radius, show_progress=show_progress
)
self.nodes = self.nodes.join(new)
def append_properties(
self,
transformers: Sequence[BaseTransformer],
node_radius: Optional[Union[float, ndarray]] = None,
show_progress: bool = False,
) -> None:
"""Append multiple properties at once. This has the benefit of printing a combined progress bar.
Args:
transformers: The dimensions/properties that shall be queried for each node
node_radius: The radius around each node of the area on the globe that it should represent,
in meters, non-negative.
It may be an array of shape ``(num_nodes, )`` or a single scalar if the radius is
uniform across all nodes.
This value is uniform across all ``transformers``.
It may be omitted if :attr:`~node_radius` is set.
show_progress: Whether to print a simple progress bar
See Also:
:meth:`~append_property`
Raises:
ValueError: if a property with any given name is already present
"""
node_radius = node_radius if node_radius is not None else self.node_radius
assert node_radius is not None, (
"parameter node_radius must be set either with the method or the object attribute but is "
"missing on both"
)
for transformer in tqdm(transformers, unit=" transformers", colour="blue", disable=not show_progress):
self.append_property(transformer, node_radius, show_progress)
def __eq__(self, other: Any) -> bool:
return (
isinstance(other, GeoNavigationGraph)
and NavigationGraph.__eq__(self, other)
and self.node_radius == other.node_radius
)

View File

@ -0,0 +1,245 @@
"""This module provides generic navigation graphs."""
# Typing
from typing import Any
from typing import List
from typing import Optional
from typing import Sized
from typing import Type
from typing import TypeVar
# Mathematics
from numpy import array_equal
from numpy import asarray
from numpy import compress
from numpy import cumsum
from numpy import empty
from numpy import int32
from numpy import logical_and
from numpy import logical_not
from numpy import ndarray
# Scientific
import h5py
from pandas import DataFrame
from pandas import read_hdf
NavigationGraphSubclass = TypeVar("NavigationGraphSubclass", bound="NavigationGraph")
class NavigationGraph(Sized):
"""A generic undirected graph that can be used for navigation.
It is represented by nodes and their properties as rows in a pandas dataframe, and edges as an array of
indices of nodes for connections. Additionally, :attr:`~neighbors` array is provided which provides all
neighbors for all nodes for faster access in graph search.
Args:
nodes: the nodes as a dataframe where each row is a node
edges: the edges of shape ``(number_of_edges, 2)``, where each row contains the indices of two
neighboring nodes
neighbours: the neighbors of all nodes with shape ``(number_of_nodes, max_neighbors_per_node)``,
where each row contains the indices of all neighbors of the node, filled with ``-1`` at
the end
max_neighbors: the maximum number of neighbors of any node (optional); this can be set to allow for
some optimizations (e.g. in the neighbor search)
Examples:
This creates a very simple node where ``0`` and ``1`` as well as ``1`` and ``2`` are connected to from
a small chain.
>>> nodes = DataFrame(data={'property_1': [1, 2, 3], 'property_2': [10, 20, 30]})
>>> edges = asarray([[0, 1], [1, 2]])
>>> graph = NavigationGraph(nodes, edges)
>>> graph.neighbors
array([[ 1, -1],
[ 0, 2],
[ 1, -1]], dtype=int32)
>>> len(graph)
3
>>> graph.num_edges
2
See Also:
:class:`~pyrate.plan.graph.geo_graph.GeoNavigationGraph`:
A more specific implementation that references nodes to geographic locations and contains
useful methods for adding properties from datasets and plotting the graph
"""
def __init__(
self,
nodes: DataFrame,
edges: ndarray,
neighbours: Optional[ndarray] = None,
max_neighbors: Optional[int] = None,
) -> None:
super().__init__()
self.nodes = nodes
assert (
len(edges.shape) == 2 and edges.shape[1] == 2
), "the edges must be a 2D-array of shape (number_of_edges, 2)"
self.edges = edges
assert neighbours is None or neighbours.shape[0] == len(nodes)
self._neighbors = neighbours
assert max_neighbors is None or max_neighbors >= 0, "max_neighbors must be non-negative"
self.max_neighbors = max_neighbors
@property
def neighbors(self) -> ndarray:
"""The list of neighbors of each node, identified by their node index.
An array of dimension ``(number_of_nodes, max_neighbors_per_node)``, with each row containing the
indices of the neighbors of the node at that position and the rest of the row filled with ``-1``.
This might take a short while to be computed for the first time but the result is cached and
also serialized if present at the point of saving it to disk.
See :ref:`script-benchmark_graph_neighbor_search` for performance measurements and a link to an issue
about speeding up this search for neighbors.
"""
if self._neighbors is not None:
return self._neighbors
if self.nodes.empty: # this requires special case
self._neighbors = empty((0, 0), dtype=int32)
else:
# each row/inner list contains the neighbors of the node at the index
# and the rest of the row is filled with -1s
neighbors: List[List[int]] = [[] for _ in range(len(self))]
for from_node, to_node in self.edges:
neighbors[from_node].append(to_node)
neighbors[to_node].append(from_node)
# calculate length of maximal list
longest = len(max(neighbors, key=len))
# make the lists equal in length by filling with -1
neighbors = [x + [-1] * (longest - len(x)) for x in neighbors]
self._neighbors = asarray(neighbors, dtype=int32)
return self._neighbors
def prune_nodes(self, keep_condition: ndarray) -> None:
"""Only retain the given nodes with their properties and appropriately update all edges and neighbors.
For example, this should decrease the number of nodes and edges by about 30% when filtering with the
``keep_condition`` set to ``my_graph.nodes["elevation_to_sea_level"] < 0.0`` on a graph representing
earth.
Args:
keep_condition: the nodes which to keep as a numpy array with boolean values
"""
assert keep_condition.shape == (len(self),), "keep condition shape does not match nodes"
# filter points
self.nodes = self.nodes[keep_condition]
# filter edges
keep_condition_edges = logical_and(keep_condition[self.edges[:, 0]], keep_condition[self.edges[:, 1]])
self.edges = compress(keep_condition_edges, self.edges, axis=0)
# then correct the indices that the entries in filtered_edges refer to by subtracting the number of
# removed entries before each one
index_shift = cumsum(logical_not(keep_condition)).astype(self.edges.dtype)
self.edges -= index_shift[self.edges]
# reset neighbors
self._neighbors = None
@staticmethod
def _serialized_attributes() -> List[str]:
"""The list of attributes that shall be (de)serialized (on top of the nodes and edges)."""
return ["max_neighbors"]
def to_disk(self, file_path: str, overwrite_existing: bool = False) -> None:
"""Save the graph to disk. Possibly missing parent directories are automatically created.
The data is stored in an interoperable `HDF5 <https://docs.h5py.org/en/stable/>`_ file with the
keys ``nodes``, ``edges`` and optionally ``neighbors``.
The ``nodes`` are compressed using the default settings of :meth:`pandas.DataFrame.to_hdf`.
The ``edges`` (and ``neighbors`` if present) are slightly compressed using the library
`h5py <https://docs.h5py.org/en/stable/high/dataset.html#filter-pipeline>`_ (using GZIP level 4).
See also `the available options in h5py
<https://docs.h5py.org/en/stable/faq.html#what-compression-processing-filters-are-supported>`_.
Args:
file_path: the path to the file where to store the graph; usually ends with ``.hdf5``
overwrite_existing: whether to overwrite the file if it already exists; else, this causes an error
to be risen
Raises:
IOError: when the file cannot be accessed or written to, or it already exists and
``overwrite_existing`` is not set
See Also:
:meth:`~from_disk`
"""
compression_options = {"compression": "gzip", "compression_opts": 4}
with h5py.File(file_path, "w" if overwrite_existing else "w-") as graph_file:
graph_file.create_dataset("edges", data=self.edges, **compression_options)
if self._neighbors is not None:
graph_file.create_dataset("neighbors", data=self.neighbors, **compression_options)
# Serialize attributes
for attribute in self._serialized_attributes():
graph_file.attrs[attribute] = getattr(self, attribute)
# pandas automatically chooses an appropriate compression
self.nodes.to_hdf(file_path, key="nodes", mode="r+", append=True)
@classmethod
def from_disk(cls: Type[NavigationGraphSubclass], file_path: str) -> NavigationGraphSubclass:
"""Reads a file from disk.
Assumes the HDF5-based format compatible to the one created by :meth:`~NavigationGraph`.
Args:
file_path: the path to the file where to read the graph from; usually ends with ``.hdf5``
Raises:
IOError: when the file cannot be accessed or read from
Returns:
The newly loaded navigation graph, which will be of a subclass of :class:`NavigationGraph` if this
method was called on that class.
See also:
:meth:`~to_disk`
"""
with h5py.File(file_path, "r") as graph_file:
edges = graph_file["edges"][:]
neighbors = graph_file["neighbors"][:] if "neighbors" in graph_file else None
# Deserialize attributes
attributes = {
attribute: graph_file.attrs[attribute] for attribute in cls._serialized_attributes()
}
nodes = read_hdf(file_path, key="nodes")
assert isinstance(nodes, DataFrame)
return cls(nodes, edges, neighbors, **attributes)
@property
def num_edges(self) -> int:
"""Returns the number of edges. The number of nodes can be obtained via ``len(graph)``."""
return self.edges.shape[0]
def __len__(self) -> int:
return len(self.nodes)
def __eq__(self, other: Any) -> bool:
return (
isinstance(other, NavigationGraph)
and self.nodes.equals(other.nodes)
and array_equal(self.edges, other.edges)
# no need to check array_equal(self.neighbors, other.neighbors) as it is a derived property
and self.max_neighbors == other.max_neighbors
)

Some files were not shown because too many files have changed in this diff Show More