Compare commits
1 commit
main
...
privacy_zo
Author | SHA1 | Date | |
---|---|---|---|
b1271f4dd0 |
|
@ -1,10 +1,2 @@
|
||||||
local
|
local/
|
||||||
*.user
|
node_modules/
|
||||||
frontend/node_modules
|
|
||||||
api/.pyenv
|
|
||||||
.git
|
|
||||||
cache
|
|
||||||
data
|
|
||||||
tile-generator/cache
|
|
||||||
tile-generator/data
|
|
||||||
tile-generator/build
|
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
root = true
|
|
||||||
|
|
||||||
[*]
|
|
||||||
end_of_line = lf
|
|
||||||
insert_final_newline = true
|
|
||||||
trim_trailing_whitespace = true
|
|
||||||
charset = utf-8
|
|
||||||
indent_style = space
|
|
||||||
indent_size = 2
|
|
||||||
|
|
||||||
[Makefile]
|
|
||||||
indent_style = tab
|
|
||||||
indent_size = 4
|
|
||||||
|
|
||||||
[*.md]
|
|
||||||
trim_trailing_whitespace = false
|
|
||||||
|
|
||||||
[*.{py,rs}]
|
|
||||||
indent_size = 4
|
|
|
@ -1,20 +0,0 @@
|
||||||
name: Build docker image
|
|
||||||
on: [push]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
build-image:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
container:
|
|
||||||
image: catthehacker/ubuntu:act-latest
|
|
||||||
steps:
|
|
||||||
- name: Login to Forgejo docker registry
|
|
||||||
uses: docker/login-action@v3.0.0
|
|
||||||
with:
|
|
||||||
registry: git.pub.solar
|
|
||||||
username: hakkonaut
|
|
||||||
password: ${{ secrets.GIT_AUTH_TOKEN }}
|
|
||||||
- name: Build and push
|
|
||||||
uses: docker/build-push-action@v5.1.0
|
|
||||||
with:
|
|
||||||
push: true
|
|
||||||
tags: git.pub.solar/pub-solar/obs-portal:latest
|
|
48
.gitignore
vendored
48
.gitignore
vendored
|
@ -1,3 +1,45 @@
|
||||||
local
|
# Logs
|
||||||
data
|
logs
|
||||||
export
|
*.log
|
||||||
|
.DS_Store
|
||||||
|
|
||||||
|
npm-debug.log*
|
||||||
|
|
||||||
|
# Runtime data
|
||||||
|
pids
|
||||||
|
*.pid
|
||||||
|
*.seed
|
||||||
|
|
||||||
|
# Directory for instrumented libs generated by jscoverage/JSCover
|
||||||
|
lib-cov
|
||||||
|
|
||||||
|
# Coverage directory used by tools like istanbul
|
||||||
|
coverage
|
||||||
|
|
||||||
|
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
||||||
|
.grunt
|
||||||
|
|
||||||
|
# node-waf configuration
|
||||||
|
.lock-wscript
|
||||||
|
|
||||||
|
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
||||||
|
build/Release
|
||||||
|
|
||||||
|
# Dependency directory
|
||||||
|
node_modules
|
||||||
|
|
||||||
|
# Optional npm cache directory
|
||||||
|
.npm
|
||||||
|
|
||||||
|
# Optional REPL history
|
||||||
|
.node_repl_history
|
||||||
|
|
||||||
|
.idea
|
||||||
|
|
||||||
|
# Storage place for local files, such as developer database etc.
|
||||||
|
local/
|
||||||
|
|
||||||
|
# We don't include this file in favor of package-lock.json -- we cannot have
|
||||||
|
# both, because then developers will only update one of them and they'll
|
||||||
|
# contradict. For now, npm shall be the canonical default (compare README.md).
|
||||||
|
yarn.lock
|
||||||
|
|
3
.gitmodules
vendored
3
.gitmodules
vendored
|
@ -1,3 +0,0 @@
|
||||||
[submodule "api/scripts"]
|
|
||||||
path = api/scripts
|
|
||||||
url = https://github.com/openbikesensor/OpenBikeSensor-Scripts
|
|
191
CHANGELOG.md
191
CHANGELOG.md
|
@ -1,191 +0,0 @@
|
||||||
# Changelog
|
|
||||||
|
|
||||||
## 0.8.1
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* The zone (urban/rural) is now also exported with the events GeoJson export.
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* Update to a current version of gpstime (python dependency) fix portal startup.
|
|
||||||
|
|
||||||
## 0.8.0
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Bulk actions on users owned tracks (reprocess, download, make private, make public, delete) (#269, #38)
|
|
||||||
* Easy sorting by device for "multi-device users" (e.g. group lending out OBSes)
|
|
||||||
* Region display at higher zoom levels to easily find interesting areas (#112)
|
|
||||||
* Export of road statistics on top of the already-existing event statistics (#341)
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* Refactored database access to hopefully combat portal crashes (#337)
|
|
||||||
* New infrastructure for map imports that makes import of larger maps possible on small VMs (#334)
|
|
||||||
* Reference current postgres and postgis versions in docker-compose.yaml files (#286)
|
|
||||||
* Configurable terms-and-conditions link (#320)
|
|
||||||
* French translation by @cbiteau (#303)
|
|
||||||
|
|
||||||
### Bug Fixes
|
|
||||||
|
|
||||||
* Logout not working (#285)
|
|
||||||
* Duplicate road usage hashes (#335, #253)
|
|
||||||
* cannot import name .... (#338)
|
|
||||||
|
|
||||||
## 0.7.0
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Add histogram of overtaking distances in road details panel
|
|
||||||
* Flip table in road details panel and make it easier to read
|
|
||||||
* Implement difference between urban and rural for events and road segments.
|
|
||||||
* Better road zone detection in import
|
|
||||||
* Make the frontend translatable and add German translation
|
|
||||||
* Add time and user filters to map view (for logged-in users only)
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* Make raw track not look like a river (#252)
|
|
||||||
* Update many dependencies
|
|
||||||
|
|
||||||
### Bug fixes
|
|
||||||
|
|
||||||
* Overtaking events are now deleted when the parent track is deleted (#206)
|
|
||||||
* Remove useless session creation (#192)
|
|
||||||
* Remove some error logs for canceled requests (as the map page tends to do that quite a lot)
|
|
||||||
* Fix ExportPage bounding box input
|
|
||||||
|
|
||||||
|
|
||||||
## 0.6.2
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* Prevent directory traversals inside container on python-served frontend.
|
|
||||||
|
|
||||||
## 0.6.1
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* Make road details request (clicking on a road segment in the map) way faster
|
|
||||||
by using PostGIS geometry index correctly (#226).
|
|
||||||
|
|
||||||
## 0.6.0
|
|
||||||
|
|
||||||
Starting in this version, the database schema is created through migrations
|
|
||||||
instead of using the `reset_database.py` script. This means that for both the
|
|
||||||
initial setup, as well as for upgrades, only the migrations have to be run.
|
|
||||||
|
|
||||||
After updating and migrating, it is good practice to regenerate the SQL tile
|
|
||||||
functions (`api/tools/prepare_sql_tiles.py`) as well. It doesn't matter if you
|
|
||||||
do this when it is not required, so we've written a simple all-in-one update
|
|
||||||
script that you can run to do all upgrade tasks. This is now in
|
|
||||||
`api/tools/upgrade.py`.
|
|
||||||
|
|
||||||
Please check [`UPGRADING.md`](./UPGRADING.md) for more details if you're
|
|
||||||
upgrading an existing installation. It contains an important note for this
|
|
||||||
upgrade in particular.
|
|
||||||
|
|
||||||
## 0.5.1
|
|
||||||
|
|
||||||
Maintenance release, only includes build, deployment and documentation changes.
|
|
||||||
|
|
||||||
## 0.5.0
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Use discrete colors for distances, with greens only above 1.5m
|
|
||||||
* Use viridis colormap for roads' count layers
|
|
||||||
* Generate usage count information (how often has a road been traveled)
|
|
||||||
* Project the whole track to the map, and show both versions
|
|
||||||
* Log out of OpenID server when logging out of application
|
|
||||||
* Convert speed units to km/h in frontend
|
|
||||||
* Pages now have titles (#148)
|
|
||||||
* Remove map from home page, it was empty anyway (#120)
|
|
||||||
|
|
||||||
### Internal
|
|
||||||
|
|
||||||
* Add alembic setup for migrating
|
|
||||||
* Build osm2pgsql with -j4
|
|
||||||
* Update sqlalchemy[asyncio] requirement from ~=1.4.31 to ~=1.4.32 in /api
|
|
||||||
|
|
||||||
## 0.4.2
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
### Bugfixes
|
|
||||||
|
|
||||||
* Fix export route, it should be a child of /api
|
|
||||||
|
|
||||||
## 0.4.1
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Add page for exporting data through web frontend
|
|
||||||
* Generate GPX track file when importing a track
|
|
||||||
* Add GPX track export button on the track page (accessible for anybody who can
|
|
||||||
see the track)
|
|
||||||
|
|
||||||
## 0.4.0
|
|
||||||
|
|
||||||
### Improvements
|
|
||||||
|
|
||||||
* Retry OpenID Connect connection if it fails on boot
|
|
||||||
* Format log outputs with color and improve access log
|
|
||||||
* Make pool_size and overflow configurable for worker and portal
|
|
||||||
* Add a route for exporting events as GeoJSON/Shapefile
|
|
||||||
* Point footer to forum, not slack (fixes #140)
|
|
||||||
* Improve wording on profile page ("My" instead of "Your")
|
|
||||||
* Show "My tracks" directly in main menu (fixes #136)
|
|
||||||
|
|
||||||
### Bugfixes
|
|
||||||
|
|
||||||
* Make sure the API can recover from the broken postgresql connection state
|
|
||||||
* Remove duplicate events from the same track
|
|
||||||
* Fix direction of road segments (fixes #142)
|
|
||||||
* Solve a few problems with the colormap scales in the map view
|
|
||||||
|
|
||||||
### Docs & deployment
|
|
||||||
|
|
||||||
* Greatly improve deployement docs for a simple follow-along routine
|
|
||||||
* Use environment variables (`OBS_*`) for configuration
|
|
||||||
* Fix port numbers in example files and expose 3000 in the image
|
|
||||||
* Add `LEAN_MODE` configuration to disable `road` database table usage and fall
|
|
||||||
back to Overpass API for processing tracks (see
|
|
||||||
[docs/lean-mode.md](docs/lean-mode.md)).
|
|
||||||
* Read `config.overrides.py` file if it exists
|
|
||||||
* Add osm2pgsql to portal image to be able to import OSM data from within the
|
|
||||||
container
|
|
||||||
* Fix path to roads_import.lua in docs
|
|
||||||
* Explain to use the portal service, instead of api, in production
|
|
||||||
* Use entrypoint instead of command, so you can run process_track.py one-off tasks
|
|
||||||
|
|
||||||
### Internals
|
|
||||||
|
|
||||||
* Use custom `get_single_arg` everywhere, remove sanicargs (fixes #193)
|
|
||||||
* Update requirements and make them consistent
|
|
||||||
* Fix error handling, especially for file uploads
|
|
||||||
|
|
||||||
|
|
||||||
## 0.3.4
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* Reintroduce event view (fixes #111)
|
|
||||||
* Add layer configuration panel to map page
|
|
||||||
- Allow choosing basemap style
|
|
||||||
- Add toggles for event and road layers
|
|
||||||
- Make untagged roads display optional
|
|
||||||
- Show a legend for event color
|
|
||||||
- Alow choosing attribute used for coloring road segments
|
|
||||||
* Add optional banner to frontend via config entry (solves #128)
|
|
||||||
|
|
||||||
### Bugfixes
|
|
||||||
|
|
||||||
* Clicking on road without events should not cause 500 error
|
|
||||||
* Improve mobile layout a bit (fixes #123)
|
|
||||||
|
|
||||||
### Technical
|
|
||||||
|
|
||||||
* Allow explicit configuration of api base url via `API_URL` config
|
|
||||||
* Remove outdated "mapTileset" frontend config section
|
|
674
COPYING
674
COPYING
|
@ -1,674 +0,0 @@
|
||||||
GNU GENERAL PUBLIC LICENSE
|
|
||||||
Version 3, 29 June 2007
|
|
||||||
|
|
||||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
|
||||||
Everyone is permitted to copy and distribute verbatim copies
|
|
||||||
of this license document, but changing it is not allowed.
|
|
||||||
|
|
||||||
Preamble
|
|
||||||
|
|
||||||
The GNU General Public License is a free, copyleft license for
|
|
||||||
software and other kinds of works.
|
|
||||||
|
|
||||||
The licenses for most software and other practical works are designed
|
|
||||||
to take away your freedom to share and change the works. By contrast,
|
|
||||||
the GNU General Public License is intended to guarantee your freedom to
|
|
||||||
share and change all versions of a program--to make sure it remains free
|
|
||||||
software for all its users. We, the Free Software Foundation, use the
|
|
||||||
GNU General Public License for most of our software; it applies also to
|
|
||||||
any other work released this way by its authors. You can apply it to
|
|
||||||
your programs, too.
|
|
||||||
|
|
||||||
When we speak of free software, we are referring to freedom, not
|
|
||||||
price. Our General Public Licenses are designed to make sure that you
|
|
||||||
have the freedom to distribute copies of free software (and charge for
|
|
||||||
them if you wish), that you receive source code or can get it if you
|
|
||||||
want it, that you can change the software or use pieces of it in new
|
|
||||||
free programs, and that you know you can do these things.
|
|
||||||
|
|
||||||
To protect your rights, we need to prevent others from denying you
|
|
||||||
these rights or asking you to surrender the rights. Therefore, you have
|
|
||||||
certain responsibilities if you distribute copies of the software, or if
|
|
||||||
you modify it: responsibilities to respect the freedom of others.
|
|
||||||
|
|
||||||
For example, if you distribute copies of such a program, whether
|
|
||||||
gratis or for a fee, you must pass on to the recipients the same
|
|
||||||
freedoms that you received. You must make sure that they, too, receive
|
|
||||||
or can get the source code. And you must show them these terms so they
|
|
||||||
know their rights.
|
|
||||||
|
|
||||||
Developers that use the GNU GPL protect your rights with two steps:
|
|
||||||
(1) assert copyright on the software, and (2) offer you this License
|
|
||||||
giving you legal permission to copy, distribute and/or modify it.
|
|
||||||
|
|
||||||
For the developers' and authors' protection, the GPL clearly explains
|
|
||||||
that there is no warranty for this free software. For both users' and
|
|
||||||
authors' sake, the GPL requires that modified versions be marked as
|
|
||||||
changed, so that their problems will not be attributed erroneously to
|
|
||||||
authors of previous versions.
|
|
||||||
|
|
||||||
Some devices are designed to deny users access to install or run
|
|
||||||
modified versions of the software inside them, although the manufacturer
|
|
||||||
can do so. This is fundamentally incompatible with the aim of
|
|
||||||
protecting users' freedom to change the software. The systematic
|
|
||||||
pattern of such abuse occurs in the area of products for individuals to
|
|
||||||
use, which is precisely where it is most unacceptable. Therefore, we
|
|
||||||
have designed this version of the GPL to prohibit the practice for those
|
|
||||||
products. If such problems arise substantially in other domains, we
|
|
||||||
stand ready to extend this provision to those domains in future versions
|
|
||||||
of the GPL, as needed to protect the freedom of users.
|
|
||||||
|
|
||||||
Finally, every program is threatened constantly by software patents.
|
|
||||||
States should not allow patents to restrict development and use of
|
|
||||||
software on general-purpose computers, but in those that do, we wish to
|
|
||||||
avoid the special danger that patents applied to a free program could
|
|
||||||
make it effectively proprietary. To prevent this, the GPL assures that
|
|
||||||
patents cannot be used to render the program non-free.
|
|
||||||
|
|
||||||
The precise terms and conditions for copying, distribution and
|
|
||||||
modification follow.
|
|
||||||
|
|
||||||
TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
0. Definitions.
|
|
||||||
|
|
||||||
"This License" refers to version 3 of the GNU General Public License.
|
|
||||||
|
|
||||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
|
||||||
works, such as semiconductor masks.
|
|
||||||
|
|
||||||
"The Program" refers to any copyrightable work licensed under this
|
|
||||||
License. Each licensee is addressed as "you". "Licensees" and
|
|
||||||
"recipients" may be individuals or organizations.
|
|
||||||
|
|
||||||
To "modify" a work means to copy from or adapt all or part of the work
|
|
||||||
in a fashion requiring copyright permission, other than the making of an
|
|
||||||
exact copy. The resulting work is called a "modified version" of the
|
|
||||||
earlier work or a work "based on" the earlier work.
|
|
||||||
|
|
||||||
A "covered work" means either the unmodified Program or a work based
|
|
||||||
on the Program.
|
|
||||||
|
|
||||||
To "propagate" a work means to do anything with it that, without
|
|
||||||
permission, would make you directly or secondarily liable for
|
|
||||||
infringement under applicable copyright law, except executing it on a
|
|
||||||
computer or modifying a private copy. Propagation includes copying,
|
|
||||||
distribution (with or without modification), making available to the
|
|
||||||
public, and in some countries other activities as well.
|
|
||||||
|
|
||||||
To "convey" a work means any kind of propagation that enables other
|
|
||||||
parties to make or receive copies. Mere interaction with a user through
|
|
||||||
a computer network, with no transfer of a copy, is not conveying.
|
|
||||||
|
|
||||||
An interactive user interface displays "Appropriate Legal Notices"
|
|
||||||
to the extent that it includes a convenient and prominently visible
|
|
||||||
feature that (1) displays an appropriate copyright notice, and (2)
|
|
||||||
tells the user that there is no warranty for the work (except to the
|
|
||||||
extent that warranties are provided), that licensees may convey the
|
|
||||||
work under this License, and how to view a copy of this License. If
|
|
||||||
the interface presents a list of user commands or options, such as a
|
|
||||||
menu, a prominent item in the list meets this criterion.
|
|
||||||
|
|
||||||
1. Source Code.
|
|
||||||
|
|
||||||
The "source code" for a work means the preferred form of the work
|
|
||||||
for making modifications to it. "Object code" means any non-source
|
|
||||||
form of a work.
|
|
||||||
|
|
||||||
A "Standard Interface" means an interface that either is an official
|
|
||||||
standard defined by a recognized standards body, or, in the case of
|
|
||||||
interfaces specified for a particular programming language, one that
|
|
||||||
is widely used among developers working in that language.
|
|
||||||
|
|
||||||
The "System Libraries" of an executable work include anything, other
|
|
||||||
than the work as a whole, that (a) is included in the normal form of
|
|
||||||
packaging a Major Component, but which is not part of that Major
|
|
||||||
Component, and (b) serves only to enable use of the work with that
|
|
||||||
Major Component, or to implement a Standard Interface for which an
|
|
||||||
implementation is available to the public in source code form. A
|
|
||||||
"Major Component", in this context, means a major essential component
|
|
||||||
(kernel, window system, and so on) of the specific operating system
|
|
||||||
(if any) on which the executable work runs, or a compiler used to
|
|
||||||
produce the work, or an object code interpreter used to run it.
|
|
||||||
|
|
||||||
The "Corresponding Source" for a work in object code form means all
|
|
||||||
the source code needed to generate, install, and (for an executable
|
|
||||||
work) run the object code and to modify the work, including scripts to
|
|
||||||
control those activities. However, it does not include the work's
|
|
||||||
System Libraries, or general-purpose tools or generally available free
|
|
||||||
programs which are used unmodified in performing those activities but
|
|
||||||
which are not part of the work. For example, Corresponding Source
|
|
||||||
includes interface definition files associated with source files for
|
|
||||||
the work, and the source code for shared libraries and dynamically
|
|
||||||
linked subprograms that the work is specifically designed to require,
|
|
||||||
such as by intimate data communication or control flow between those
|
|
||||||
subprograms and other parts of the work.
|
|
||||||
|
|
||||||
The Corresponding Source need not include anything that users
|
|
||||||
can regenerate automatically from other parts of the Corresponding
|
|
||||||
Source.
|
|
||||||
|
|
||||||
The Corresponding Source for a work in source code form is that
|
|
||||||
same work.
|
|
||||||
|
|
||||||
2. Basic Permissions.
|
|
||||||
|
|
||||||
All rights granted under this License are granted for the term of
|
|
||||||
copyright on the Program, and are irrevocable provided the stated
|
|
||||||
conditions are met. This License explicitly affirms your unlimited
|
|
||||||
permission to run the unmodified Program. The output from running a
|
|
||||||
covered work is covered by this License only if the output, given its
|
|
||||||
content, constitutes a covered work. This License acknowledges your
|
|
||||||
rights of fair use or other equivalent, as provided by copyright law.
|
|
||||||
|
|
||||||
You may make, run and propagate covered works that you do not
|
|
||||||
convey, without conditions so long as your license otherwise remains
|
|
||||||
in force. You may convey covered works to others for the sole purpose
|
|
||||||
of having them make modifications exclusively for you, or provide you
|
|
||||||
with facilities for running those works, provided that you comply with
|
|
||||||
the terms of this License in conveying all material for which you do
|
|
||||||
not control copyright. Those thus making or running the covered works
|
|
||||||
for you must do so exclusively on your behalf, under your direction
|
|
||||||
and control, on terms that prohibit them from making any copies of
|
|
||||||
your copyrighted material outside their relationship with you.
|
|
||||||
|
|
||||||
Conveying under any other circumstances is permitted solely under
|
|
||||||
the conditions stated below. Sublicensing is not allowed; section 10
|
|
||||||
makes it unnecessary.
|
|
||||||
|
|
||||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
|
||||||
|
|
||||||
No covered work shall be deemed part of an effective technological
|
|
||||||
measure under any applicable law fulfilling obligations under article
|
|
||||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
|
||||||
similar laws prohibiting or restricting circumvention of such
|
|
||||||
measures.
|
|
||||||
|
|
||||||
When you convey a covered work, you waive any legal power to forbid
|
|
||||||
circumvention of technological measures to the extent such circumvention
|
|
||||||
is effected by exercising rights under this License with respect to
|
|
||||||
the covered work, and you disclaim any intention to limit operation or
|
|
||||||
modification of the work as a means of enforcing, against the work's
|
|
||||||
users, your or third parties' legal rights to forbid circumvention of
|
|
||||||
technological measures.
|
|
||||||
|
|
||||||
4. Conveying Verbatim Copies.
|
|
||||||
|
|
||||||
You may convey verbatim copies of the Program's source code as you
|
|
||||||
receive it, in any medium, provided that you conspicuously and
|
|
||||||
appropriately publish on each copy an appropriate copyright notice;
|
|
||||||
keep intact all notices stating that this License and any
|
|
||||||
non-permissive terms added in accord with section 7 apply to the code;
|
|
||||||
keep intact all notices of the absence of any warranty; and give all
|
|
||||||
recipients a copy of this License along with the Program.
|
|
||||||
|
|
||||||
You may charge any price or no price for each copy that you convey,
|
|
||||||
and you may offer support or warranty protection for a fee.
|
|
||||||
|
|
||||||
5. Conveying Modified Source Versions.
|
|
||||||
|
|
||||||
You may convey a work based on the Program, or the modifications to
|
|
||||||
produce it from the Program, in the form of source code under the
|
|
||||||
terms of section 4, provided that you also meet all of these conditions:
|
|
||||||
|
|
||||||
a) The work must carry prominent notices stating that you modified
|
|
||||||
it, and giving a relevant date.
|
|
||||||
|
|
||||||
b) The work must carry prominent notices stating that it is
|
|
||||||
released under this License and any conditions added under section
|
|
||||||
7. This requirement modifies the requirement in section 4 to
|
|
||||||
"keep intact all notices".
|
|
||||||
|
|
||||||
c) You must license the entire work, as a whole, under this
|
|
||||||
License to anyone who comes into possession of a copy. This
|
|
||||||
License will therefore apply, along with any applicable section 7
|
|
||||||
additional terms, to the whole of the work, and all its parts,
|
|
||||||
regardless of how they are packaged. This License gives no
|
|
||||||
permission to license the work in any other way, but it does not
|
|
||||||
invalidate such permission if you have separately received it.
|
|
||||||
|
|
||||||
d) If the work has interactive user interfaces, each must display
|
|
||||||
Appropriate Legal Notices; however, if the Program has interactive
|
|
||||||
interfaces that do not display Appropriate Legal Notices, your
|
|
||||||
work need not make them do so.
|
|
||||||
|
|
||||||
A compilation of a covered work with other separate and independent
|
|
||||||
works, which are not by their nature extensions of the covered work,
|
|
||||||
and which are not combined with it such as to form a larger program,
|
|
||||||
in or on a volume of a storage or distribution medium, is called an
|
|
||||||
"aggregate" if the compilation and its resulting copyright are not
|
|
||||||
used to limit the access or legal rights of the compilation's users
|
|
||||||
beyond what the individual works permit. Inclusion of a covered work
|
|
||||||
in an aggregate does not cause this License to apply to the other
|
|
||||||
parts of the aggregate.
|
|
||||||
|
|
||||||
6. Conveying Non-Source Forms.
|
|
||||||
|
|
||||||
You may convey a covered work in object code form under the terms
|
|
||||||
of sections 4 and 5, provided that you also convey the
|
|
||||||
machine-readable Corresponding Source under the terms of this License,
|
|
||||||
in one of these ways:
|
|
||||||
|
|
||||||
a) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by the
|
|
||||||
Corresponding Source fixed on a durable physical medium
|
|
||||||
customarily used for software interchange.
|
|
||||||
|
|
||||||
b) Convey the object code in, or embodied in, a physical product
|
|
||||||
(including a physical distribution medium), accompanied by a
|
|
||||||
written offer, valid for at least three years and valid for as
|
|
||||||
long as you offer spare parts or customer support for that product
|
|
||||||
model, to give anyone who possesses the object code either (1) a
|
|
||||||
copy of the Corresponding Source for all the software in the
|
|
||||||
product that is covered by this License, on a durable physical
|
|
||||||
medium customarily used for software interchange, for a price no
|
|
||||||
more than your reasonable cost of physically performing this
|
|
||||||
conveying of source, or (2) access to copy the
|
|
||||||
Corresponding Source from a network server at no charge.
|
|
||||||
|
|
||||||
c) Convey individual copies of the object code with a copy of the
|
|
||||||
written offer to provide the Corresponding Source. This
|
|
||||||
alternative is allowed only occasionally and noncommercially, and
|
|
||||||
only if you received the object code with such an offer, in accord
|
|
||||||
with subsection 6b.
|
|
||||||
|
|
||||||
d) Convey the object code by offering access from a designated
|
|
||||||
place (gratis or for a charge), and offer equivalent access to the
|
|
||||||
Corresponding Source in the same way through the same place at no
|
|
||||||
further charge. You need not require recipients to copy the
|
|
||||||
Corresponding Source along with the object code. If the place to
|
|
||||||
copy the object code is a network server, the Corresponding Source
|
|
||||||
may be on a different server (operated by you or a third party)
|
|
||||||
that supports equivalent copying facilities, provided you maintain
|
|
||||||
clear directions next to the object code saying where to find the
|
|
||||||
Corresponding Source. Regardless of what server hosts the
|
|
||||||
Corresponding Source, you remain obligated to ensure that it is
|
|
||||||
available for as long as needed to satisfy these requirements.
|
|
||||||
|
|
||||||
e) Convey the object code using peer-to-peer transmission, provided
|
|
||||||
you inform other peers where the object code and Corresponding
|
|
||||||
Source of the work are being offered to the general public at no
|
|
||||||
charge under subsection 6d.
|
|
||||||
|
|
||||||
A separable portion of the object code, whose source code is excluded
|
|
||||||
from the Corresponding Source as a System Library, need not be
|
|
||||||
included in conveying the object code work.
|
|
||||||
|
|
||||||
A "User Product" is either (1) a "consumer product", which means any
|
|
||||||
tangible personal property which is normally used for personal, family,
|
|
||||||
or household purposes, or (2) anything designed or sold for incorporation
|
|
||||||
into a dwelling. In determining whether a product is a consumer product,
|
|
||||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
|
||||||
product received by a particular user, "normally used" refers to a
|
|
||||||
typical or common use of that class of product, regardless of the status
|
|
||||||
of the particular user or of the way in which the particular user
|
|
||||||
actually uses, or expects or is expected to use, the product. A product
|
|
||||||
is a consumer product regardless of whether the product has substantial
|
|
||||||
commercial, industrial or non-consumer uses, unless such uses represent
|
|
||||||
the only significant mode of use of the product.
|
|
||||||
|
|
||||||
"Installation Information" for a User Product means any methods,
|
|
||||||
procedures, authorization keys, or other information required to install
|
|
||||||
and execute modified versions of a covered work in that User Product from
|
|
||||||
a modified version of its Corresponding Source. The information must
|
|
||||||
suffice to ensure that the continued functioning of the modified object
|
|
||||||
code is in no case prevented or interfered with solely because
|
|
||||||
modification has been made.
|
|
||||||
|
|
||||||
If you convey an object code work under this section in, or with, or
|
|
||||||
specifically for use in, a User Product, and the conveying occurs as
|
|
||||||
part of a transaction in which the right of possession and use of the
|
|
||||||
User Product is transferred to the recipient in perpetuity or for a
|
|
||||||
fixed term (regardless of how the transaction is characterized), the
|
|
||||||
Corresponding Source conveyed under this section must be accompanied
|
|
||||||
by the Installation Information. But this requirement does not apply
|
|
||||||
if neither you nor any third party retains the ability to install
|
|
||||||
modified object code on the User Product (for example, the work has
|
|
||||||
been installed in ROM).
|
|
||||||
|
|
||||||
The requirement to provide Installation Information does not include a
|
|
||||||
requirement to continue to provide support service, warranty, or updates
|
|
||||||
for a work that has been modified or installed by the recipient, or for
|
|
||||||
the User Product in which it has been modified or installed. Access to a
|
|
||||||
network may be denied when the modification itself materially and
|
|
||||||
adversely affects the operation of the network or violates the rules and
|
|
||||||
protocols for communication across the network.
|
|
||||||
|
|
||||||
Corresponding Source conveyed, and Installation Information provided,
|
|
||||||
in accord with this section must be in a format that is publicly
|
|
||||||
documented (and with an implementation available to the public in
|
|
||||||
source code form), and must require no special password or key for
|
|
||||||
unpacking, reading or copying.
|
|
||||||
|
|
||||||
7. Additional Terms.
|
|
||||||
|
|
||||||
"Additional permissions" are terms that supplement the terms of this
|
|
||||||
License by making exceptions from one or more of its conditions.
|
|
||||||
Additional permissions that are applicable to the entire Program shall
|
|
||||||
be treated as though they were included in this License, to the extent
|
|
||||||
that they are valid under applicable law. If additional permissions
|
|
||||||
apply only to part of the Program, that part may be used separately
|
|
||||||
under those permissions, but the entire Program remains governed by
|
|
||||||
this License without regard to the additional permissions.
|
|
||||||
|
|
||||||
When you convey a copy of a covered work, you may at your option
|
|
||||||
remove any additional permissions from that copy, or from any part of
|
|
||||||
it. (Additional permissions may be written to require their own
|
|
||||||
removal in certain cases when you modify the work.) You may place
|
|
||||||
additional permissions on material, added by you to a covered work,
|
|
||||||
for which you have or can give appropriate copyright permission.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, for material you
|
|
||||||
add to a covered work, you may (if authorized by the copyright holders of
|
|
||||||
that material) supplement the terms of this License with terms:
|
|
||||||
|
|
||||||
a) Disclaiming warranty or limiting liability differently from the
|
|
||||||
terms of sections 15 and 16 of this License; or
|
|
||||||
|
|
||||||
b) Requiring preservation of specified reasonable legal notices or
|
|
||||||
author attributions in that material or in the Appropriate Legal
|
|
||||||
Notices displayed by works containing it; or
|
|
||||||
|
|
||||||
c) Prohibiting misrepresentation of the origin of that material, or
|
|
||||||
requiring that modified versions of such material be marked in
|
|
||||||
reasonable ways as different from the original version; or
|
|
||||||
|
|
||||||
d) Limiting the use for publicity purposes of names of licensors or
|
|
||||||
authors of the material; or
|
|
||||||
|
|
||||||
e) Declining to grant rights under trademark law for use of some
|
|
||||||
trade names, trademarks, or service marks; or
|
|
||||||
|
|
||||||
f) Requiring indemnification of licensors and authors of that
|
|
||||||
material by anyone who conveys the material (or modified versions of
|
|
||||||
it) with contractual assumptions of liability to the recipient, for
|
|
||||||
any liability that these contractual assumptions directly impose on
|
|
||||||
those licensors and authors.
|
|
||||||
|
|
||||||
All other non-permissive additional terms are considered "further
|
|
||||||
restrictions" within the meaning of section 10. If the Program as you
|
|
||||||
received it, or any part of it, contains a notice stating that it is
|
|
||||||
governed by this License along with a term that is a further
|
|
||||||
restriction, you may remove that term. If a license document contains
|
|
||||||
a further restriction but permits relicensing or conveying under this
|
|
||||||
License, you may add to a covered work material governed by the terms
|
|
||||||
of that license document, provided that the further restriction does
|
|
||||||
not survive such relicensing or conveying.
|
|
||||||
|
|
||||||
If you add terms to a covered work in accord with this section, you
|
|
||||||
must place, in the relevant source files, a statement of the
|
|
||||||
additional terms that apply to those files, or a notice indicating
|
|
||||||
where to find the applicable terms.
|
|
||||||
|
|
||||||
Additional terms, permissive or non-permissive, may be stated in the
|
|
||||||
form of a separately written license, or stated as exceptions;
|
|
||||||
the above requirements apply either way.
|
|
||||||
|
|
||||||
8. Termination.
|
|
||||||
|
|
||||||
You may not propagate or modify a covered work except as expressly
|
|
||||||
provided under this License. Any attempt otherwise to propagate or
|
|
||||||
modify it is void, and will automatically terminate your rights under
|
|
||||||
this License (including any patent licenses granted under the third
|
|
||||||
paragraph of section 11).
|
|
||||||
|
|
||||||
However, if you cease all violation of this License, then your
|
|
||||||
license from a particular copyright holder is reinstated (a)
|
|
||||||
provisionally, unless and until the copyright holder explicitly and
|
|
||||||
finally terminates your license, and (b) permanently, if the copyright
|
|
||||||
holder fails to notify you of the violation by some reasonable means
|
|
||||||
prior to 60 days after the cessation.
|
|
||||||
|
|
||||||
Moreover, your license from a particular copyright holder is
|
|
||||||
reinstated permanently if the copyright holder notifies you of the
|
|
||||||
violation by some reasonable means, this is the first time you have
|
|
||||||
received notice of violation of this License (for any work) from that
|
|
||||||
copyright holder, and you cure the violation prior to 30 days after
|
|
||||||
your receipt of the notice.
|
|
||||||
|
|
||||||
Termination of your rights under this section does not terminate the
|
|
||||||
licenses of parties who have received copies or rights from you under
|
|
||||||
this License. If your rights have been terminated and not permanently
|
|
||||||
reinstated, you do not qualify to receive new licenses for the same
|
|
||||||
material under section 10.
|
|
||||||
|
|
||||||
9. Acceptance Not Required for Having Copies.
|
|
||||||
|
|
||||||
You are not required to accept this License in order to receive or
|
|
||||||
run a copy of the Program. Ancillary propagation of a covered work
|
|
||||||
occurring solely as a consequence of using peer-to-peer transmission
|
|
||||||
to receive a copy likewise does not require acceptance. However,
|
|
||||||
nothing other than this License grants you permission to propagate or
|
|
||||||
modify any covered work. These actions infringe copyright if you do
|
|
||||||
not accept this License. Therefore, by modifying or propagating a
|
|
||||||
covered work, you indicate your acceptance of this License to do so.
|
|
||||||
|
|
||||||
10. Automatic Licensing of Downstream Recipients.
|
|
||||||
|
|
||||||
Each time you convey a covered work, the recipient automatically
|
|
||||||
receives a license from the original licensors, to run, modify and
|
|
||||||
propagate that work, subject to this License. You are not responsible
|
|
||||||
for enforcing compliance by third parties with this License.
|
|
||||||
|
|
||||||
An "entity transaction" is a transaction transferring control of an
|
|
||||||
organization, or substantially all assets of one, or subdividing an
|
|
||||||
organization, or merging organizations. If propagation of a covered
|
|
||||||
work results from an entity transaction, each party to that
|
|
||||||
transaction who receives a copy of the work also receives whatever
|
|
||||||
licenses to the work the party's predecessor in interest had or could
|
|
||||||
give under the previous paragraph, plus a right to possession of the
|
|
||||||
Corresponding Source of the work from the predecessor in interest, if
|
|
||||||
the predecessor has it or can get it with reasonable efforts.
|
|
||||||
|
|
||||||
You may not impose any further restrictions on the exercise of the
|
|
||||||
rights granted or affirmed under this License. For example, you may
|
|
||||||
not impose a license fee, royalty, or other charge for exercise of
|
|
||||||
rights granted under this License, and you may not initiate litigation
|
|
||||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
|
||||||
any patent claim is infringed by making, using, selling, offering for
|
|
||||||
sale, or importing the Program or any portion of it.
|
|
||||||
|
|
||||||
11. Patents.
|
|
||||||
|
|
||||||
A "contributor" is a copyright holder who authorizes use under this
|
|
||||||
License of the Program or a work on which the Program is based. The
|
|
||||||
work thus licensed is called the contributor's "contributor version".
|
|
||||||
|
|
||||||
A contributor's "essential patent claims" are all patent claims
|
|
||||||
owned or controlled by the contributor, whether already acquired or
|
|
||||||
hereafter acquired, that would be infringed by some manner, permitted
|
|
||||||
by this License, of making, using, or selling its contributor version,
|
|
||||||
but do not include claims that would be infringed only as a
|
|
||||||
consequence of further modification of the contributor version. For
|
|
||||||
purposes of this definition, "control" includes the right to grant
|
|
||||||
patent sublicenses in a manner consistent with the requirements of
|
|
||||||
this License.
|
|
||||||
|
|
||||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
|
||||||
patent license under the contributor's essential patent claims, to
|
|
||||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
|
||||||
propagate the contents of its contributor version.
|
|
||||||
|
|
||||||
In the following three paragraphs, a "patent license" is any express
|
|
||||||
agreement or commitment, however denominated, not to enforce a patent
|
|
||||||
(such as an express permission to practice a patent or covenant not to
|
|
||||||
sue for patent infringement). To "grant" such a patent license to a
|
|
||||||
party means to make such an agreement or commitment not to enforce a
|
|
||||||
patent against the party.
|
|
||||||
|
|
||||||
If you convey a covered work, knowingly relying on a patent license,
|
|
||||||
and the Corresponding Source of the work is not available for anyone
|
|
||||||
to copy, free of charge and under the terms of this License, through a
|
|
||||||
publicly available network server or other readily accessible means,
|
|
||||||
then you must either (1) cause the Corresponding Source to be so
|
|
||||||
available, or (2) arrange to deprive yourself of the benefit of the
|
|
||||||
patent license for this particular work, or (3) arrange, in a manner
|
|
||||||
consistent with the requirements of this License, to extend the patent
|
|
||||||
license to downstream recipients. "Knowingly relying" means you have
|
|
||||||
actual knowledge that, but for the patent license, your conveying the
|
|
||||||
covered work in a country, or your recipient's use of the covered work
|
|
||||||
in a country, would infringe one or more identifiable patents in that
|
|
||||||
country that you have reason to believe are valid.
|
|
||||||
|
|
||||||
If, pursuant to or in connection with a single transaction or
|
|
||||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
|
||||||
covered work, and grant a patent license to some of the parties
|
|
||||||
receiving the covered work authorizing them to use, propagate, modify
|
|
||||||
or convey a specific copy of the covered work, then the patent license
|
|
||||||
you grant is automatically extended to all recipients of the covered
|
|
||||||
work and works based on it.
|
|
||||||
|
|
||||||
A patent license is "discriminatory" if it does not include within
|
|
||||||
the scope of its coverage, prohibits the exercise of, or is
|
|
||||||
conditioned on the non-exercise of one or more of the rights that are
|
|
||||||
specifically granted under this License. You may not convey a covered
|
|
||||||
work if you are a party to an arrangement with a third party that is
|
|
||||||
in the business of distributing software, under which you make payment
|
|
||||||
to the third party based on the extent of your activity of conveying
|
|
||||||
the work, and under which the third party grants, to any of the
|
|
||||||
parties who would receive the covered work from you, a discriminatory
|
|
||||||
patent license (a) in connection with copies of the covered work
|
|
||||||
conveyed by you (or copies made from those copies), or (b) primarily
|
|
||||||
for and in connection with specific products or compilations that
|
|
||||||
contain the covered work, unless you entered into that arrangement,
|
|
||||||
or that patent license was granted, prior to 28 March 2007.
|
|
||||||
|
|
||||||
Nothing in this License shall be construed as excluding or limiting
|
|
||||||
any implied license or other defenses to infringement that may
|
|
||||||
otherwise be available to you under applicable patent law.
|
|
||||||
|
|
||||||
12. No Surrender of Others' Freedom.
|
|
||||||
|
|
||||||
If conditions are imposed on you (whether by court order, agreement or
|
|
||||||
otherwise) that contradict the conditions of this License, they do not
|
|
||||||
excuse you from the conditions of this License. If you cannot convey a
|
|
||||||
covered work so as to satisfy simultaneously your obligations under this
|
|
||||||
License and any other pertinent obligations, then as a consequence you may
|
|
||||||
not convey it at all. For example, if you agree to terms that obligate you
|
|
||||||
to collect a royalty for further conveying from those to whom you convey
|
|
||||||
the Program, the only way you could satisfy both those terms and this
|
|
||||||
License would be to refrain entirely from conveying the Program.
|
|
||||||
|
|
||||||
13. Use with the GNU Affero General Public License.
|
|
||||||
|
|
||||||
Notwithstanding any other provision of this License, you have
|
|
||||||
permission to link or combine any covered work with a work licensed
|
|
||||||
under version 3 of the GNU Affero General Public License into a single
|
|
||||||
combined work, and to convey the resulting work. The terms of this
|
|
||||||
License will continue to apply to the part which is the covered work,
|
|
||||||
but the special requirements of the GNU Affero General Public License,
|
|
||||||
section 13, concerning interaction through a network will apply to the
|
|
||||||
combination as such.
|
|
||||||
|
|
||||||
14. Revised Versions of this License.
|
|
||||||
|
|
||||||
The Free Software Foundation may publish revised and/or new versions of
|
|
||||||
the GNU General Public License from time to time. Such new versions will
|
|
||||||
be similar in spirit to the present version, but may differ in detail to
|
|
||||||
address new problems or concerns.
|
|
||||||
|
|
||||||
Each version is given a distinguishing version number. If the
|
|
||||||
Program specifies that a certain numbered version of the GNU General
|
|
||||||
Public License "or any later version" applies to it, you have the
|
|
||||||
option of following the terms and conditions either of that numbered
|
|
||||||
version or of any later version published by the Free Software
|
|
||||||
Foundation. If the Program does not specify a version number of the
|
|
||||||
GNU General Public License, you may choose any version ever published
|
|
||||||
by the Free Software Foundation.
|
|
||||||
|
|
||||||
If the Program specifies that a proxy can decide which future
|
|
||||||
versions of the GNU General Public License can be used, that proxy's
|
|
||||||
public statement of acceptance of a version permanently authorizes you
|
|
||||||
to choose that version for the Program.
|
|
||||||
|
|
||||||
Later license versions may give you additional or different
|
|
||||||
permissions. However, no additional obligations are imposed on any
|
|
||||||
author or copyright holder as a result of your choosing to follow a
|
|
||||||
later version.
|
|
||||||
|
|
||||||
15. Disclaimer of Warranty.
|
|
||||||
|
|
||||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
|
||||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
|
||||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
|
||||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
|
||||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
|
||||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
|
||||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
|
||||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
|
||||||
|
|
||||||
16. Limitation of Liability.
|
|
||||||
|
|
||||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
|
||||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
|
||||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
|
||||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
|
||||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
|
||||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
|
||||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
|
||||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
|
||||||
SUCH DAMAGES.
|
|
||||||
|
|
||||||
17. Interpretation of Sections 15 and 16.
|
|
||||||
|
|
||||||
If the disclaimer of warranty and limitation of liability provided
|
|
||||||
above cannot be given local legal effect according to their terms,
|
|
||||||
reviewing courts shall apply local law that most closely approximates
|
|
||||||
an absolute waiver of all civil liability in connection with the
|
|
||||||
Program, unless a warranty or assumption of liability accompanies a
|
|
||||||
copy of the Program in return for a fee.
|
|
||||||
|
|
||||||
END OF TERMS AND CONDITIONS
|
|
||||||
|
|
||||||
How to Apply These Terms to Your New Programs
|
|
||||||
|
|
||||||
If you develop a new program, and you want it to be of the greatest
|
|
||||||
possible use to the public, the best way to achieve this is to make it
|
|
||||||
free software which everyone can redistribute and change under these terms.
|
|
||||||
|
|
||||||
To do so, attach the following notices to the program. It is safest
|
|
||||||
to attach them to the start of each source file to most effectively
|
|
||||||
state the exclusion of warranty; and each file should have at least
|
|
||||||
the "copyright" line and a pointer to where the full notice is found.
|
|
||||||
|
|
||||||
<one line to give the program's name and a brief idea of what it does.>
|
|
||||||
Copyright (C) <year> <name of author>
|
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
|
||||||
it under the terms of the GNU General Public License as published by
|
|
||||||
the Free Software Foundation, either version 3 of the License, or
|
|
||||||
(at your option) any later version.
|
|
||||||
|
|
||||||
This program is distributed in the hope that it will be useful,
|
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
GNU General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
|
||||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
Also add information on how to contact you by electronic and paper mail.
|
|
||||||
|
|
||||||
If the program does terminal interaction, make it output a short
|
|
||||||
notice like this when it starts in an interactive mode:
|
|
||||||
|
|
||||||
<program> Copyright (C) <year> <name of author>
|
|
||||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
|
||||||
This is free software, and you are welcome to redistribute it
|
|
||||||
under certain conditions; type `show c' for details.
|
|
||||||
|
|
||||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
|
||||||
parts of the General Public License. Of course, your program's commands
|
|
||||||
might be different; for a GUI interface, you would use an "about box".
|
|
||||||
|
|
||||||
You should also get your employer (if you work as a programmer) or school,
|
|
||||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
|
||||||
For more information on this, and how to apply and follow the GNU GPL, see
|
|
||||||
<https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
The GNU General Public License does not permit incorporating your program
|
|
||||||
into proprietary programs. If your program is a subroutine library, you
|
|
||||||
may consider it more useful to permit linking proprietary applications with
|
|
||||||
the library. If this is what you want to do, use the GNU Lesser General
|
|
||||||
Public License instead of this License. But first, please read
|
|
||||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
|
64
Dockerfile
64
Dockerfile
|
@ -1,61 +1,11 @@
|
||||||
# This dockerfile is for the API + Frontend production image
|
FROM node:14
|
||||||
|
|
||||||
#############################################
|
WORKDIR /opt/obsAPI
|
||||||
# Build the frontend AS builder
|
ADD package.json package-lock.json /opt/obsAPI/
|
||||||
#############################################
|
|
||||||
|
|
||||||
FROM node:18 as frontend-builder
|
|
||||||
|
|
||||||
WORKDIR /opt/obs/frontend
|
|
||||||
ADD frontend/package.json frontend/package-lock.json /opt/obs/frontend/
|
|
||||||
RUN echo update-notifier=false >> ~/.npmrc
|
|
||||||
RUN npm ci
|
RUN npm ci
|
||||||
|
|
||||||
ADD frontend/tsconfig.json frontend/webpack.config.js /opt/obs/frontend/
|
ADD src /opt/obsAPI/src/
|
||||||
ADD frontend/src /opt/obs/frontend/src/
|
|
||||||
ADD frontend/public /opt/obs/frontend/public/
|
|
||||||
|
|
||||||
RUN npm run build
|
EXPOSE 8080
|
||||||
|
ENV PORT=8080
|
||||||
#############################################
|
CMD ["npm", "start"]
|
||||||
# Build the API and add the built frontend to it
|
|
||||||
#############################################
|
|
||||||
|
|
||||||
FROM python:3.11.3-bullseye
|
|
||||||
|
|
||||||
RUN apt-get update &&\
|
|
||||||
apt-get install -y \
|
|
||||||
libboost-dev \
|
|
||||||
libboost-system-dev \
|
|
||||||
libboost-filesystem-dev \
|
|
||||||
libexpat1-dev \
|
|
||||||
zlib1g-dev \
|
|
||||||
libbz2-dev \
|
|
||||||
libpq-dev \
|
|
||||||
libproj-dev \
|
|
||||||
lua5.3 \
|
|
||||||
liblua5.3-dev &&\
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
WORKDIR /opt/obs/api
|
|
||||||
|
|
||||||
ADD api/requirements.txt /opt/obs/api/
|
|
||||||
RUN pip install -r requirements.txt
|
|
||||||
|
|
||||||
ADD tile-generator /opt/obs/tile-generator
|
|
||||||
|
|
||||||
ADD api/scripts /opt/obs/scripts
|
|
||||||
RUN pip install -e /opt/obs/scripts
|
|
||||||
|
|
||||||
ADD api/setup.py /opt/obs/api/
|
|
||||||
ADD api/alembic.ini /opt/obs/api/
|
|
||||||
ADD api/migrations /opt/obs/api/migrations/
|
|
||||||
ADD api/obs /opt/obs/api/obs/
|
|
||||||
ADD api/tools /opt/obs/api/tools/
|
|
||||||
RUN pip install -e /opt/obs/api/
|
|
||||||
|
|
||||||
COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build
|
|
||||||
|
|
||||||
EXPOSE 3000
|
|
||||||
|
|
||||||
CMD ["openbikesensor-api"]
|
|
||||||
|
|
292
README.md
292
README.md
|
@ -1,230 +1,100 @@
|
||||||
# OpenBikeSensor Portal
|
# OpenBikeSensor Web API
|
||||||
|
|
||||||
This repository contains the source code required to run the
|
The backend API for the [OpenBikeSensor](https://openbikesensor.org/) Web App.
|
||||||
[OpenBikeSensor](https://openbikesensor.org) data collection portal. It is
|
|
||||||
separated into components:
|
|
||||||
|
|
||||||
* **api**: The backend service, written in Python 3 with
|
## Direct setup
|
||||||
[Sanic](https://sanicframework.org/),
|
|
||||||
[SQLAlchemy](https://www.sqlalchemy.org/), and a PostgreSQL/PostGIS database
|
|
||||||
for storage. It also depends highly on
|
|
||||||
[OpenMapTiles](https://openmaptiles.org) to generate vector tiles of the
|
|
||||||
data.
|
|
||||||
* **frontend**: A React single-page application that allows access to the data,
|
|
||||||
provides summaries and visualizations, and lets users adjust settings and
|
|
||||||
manage and publish their tracks.
|
|
||||||
|
|
||||||
Check out the [Architecture Documentation](docs/architecture.md) for more
|
### Requirements
|
||||||
details on what parts the whole application is made of.
|
|
||||||
|
|
||||||
This project follows [semantic versioning](https://semver.org). Refer to [issue
|
* A working installation of npm and node.js - get the latest node.js LTS
|
||||||
#44](https://github.com/openbikesensor/portal/issues/44) for a description of
|
release at [the node.js homepage](https://nodejs.org/en/) and verify it's
|
||||||
what that means for our project and what is considered the public interface.
|
working via `node -v` and `npm -v` in a command prompt of your choice. At
|
||||||
|
least node version 10.x is required.
|
||||||
|
* A working installation of [Docker](https://www.docker.com) for the
|
||||||
|
containerized MongoDB. Alternatively, you can set up your own MongoDB
|
||||||
|
elsewhere.
|
||||||
|
|
||||||
## Clone the Project
|
### First start
|
||||||
|
|
||||||
First of all, you must clone this project. This project uses submodules,
|
To get started you first need to download all dependencies in the project's
|
||||||
thus ensure, that they are cloned as well:
|
root folder:
|
||||||
|
|
||||||
|
npm install
|
||||||
|
|
||||||
|
Next up we have to run a MongoDB instance. The following command uses docker,
|
||||||
|
it assumes you have the docker daemon installed and running. Working with
|
||||||
|
docker might require root privileges, depending on your docker setup, so you
|
||||||
|
might want to prefix the following command with `sudo`:
|
||||||
|
|
||||||
|
npm run mongo:start
|
||||||
|
|
||||||
|
The development server will be accessible at `http://localhost:3000/api` after
|
||||||
|
starting it like this:
|
||||||
|
|
||||||
|
npm run dev
|
||||||
|
|
||||||
|
To stop the database when you're done developing, run (potentially with sudo):
|
||||||
|
|
||||||
|
npm run mongo:stop
|
||||||
|
|
||||||
|
## Updating
|
||||||
|
|
||||||
|
If you run this through cloning the git and setting it up as a systemctl
|
||||||
|
service, you can follow this procedure to update the application:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
git clone --recursive https://github.com/openbikesensor/portal
|
sudo systemctl stop obsApp.service
|
||||||
|
git pull
|
||||||
# ... or if you forgot the --recursive argument, you can run this in the
|
npm install
|
||||||
# repository's directory later:
|
npm run migrate:up
|
||||||
git submodule update --init --recursive
|
sudo systemctl start obsApp.service
|
||||||
```
|
```
|
||||||
|
|
||||||
## Production setup
|
## Docker setup
|
||||||
|
|
||||||
There is a guide for a deployment based on docker at
|
If you have docker and don't want to bother installing Node.js on your machine,
|
||||||
[docs/production-deployment.md](docs/production-deployment.md). Lots of
|
you can run the application inside docker as well:
|
||||||
non-docker deployment strategies are possible, but they are not "officially"
|
|
||||||
supported, so please do not expect the authors of the software to assist in
|
|
||||||
troubleshooting.
|
|
||||||
|
|
||||||
This is a rather complex application, and it is expected that you know the
|
docker-compose up -d
|
||||||
basics of deploying a modern web application securely onto a production server.
|
|
||||||
We are sorry that we cannot guide you through all the details of that, as we
|
|
||||||
just don't have the capacities to do so. Please research the respective topics
|
|
||||||
first. If you struggle with application-specific issues, please let us know, we
|
|
||||||
might be able to assist with those.
|
|
||||||
|
|
||||||
Please note that you will always need to install your own reverse proxy that
|
This will first build the `obs-api` image, which contains all the steps
|
||||||
terminates TLS for you and handles certificates. We do not support TLS directly
|
outlined above, and then run the services, both a mongodb and the api itself,
|
||||||
in the application, instead, please use this prefered method.
|
in docker containers. Interaction with the processes is different though,
|
||||||
|
expect other guides or commands to work differently in this type of setup.
|
||||||
|
|
||||||
Upgrading and migrating is described in [UPGRADING.md](./UPGRADING.md) for each
|
|
||||||
version.
|
|
||||||
|
|
||||||
### Migrating (Production)
|
## Custom MongoDB installation
|
||||||
|
|
||||||
Migrations are done with
|
If you have your own MongoDB instance running somewhere, you can set the
|
||||||
[Alembic](https://alembic.sqlalchemy.org/en/latest/index.html), please refer to
|
environment variable `MONGODB_URL` when starting the server, and it will read
|
||||||
its documentation for help. Most of the time, running this command will do all
|
that URL for connecting.
|
||||||
the migrations you need:
|
|
||||||
|
export MONGODB_URL=mongodb://user:password@mongodb.example.com/obs-app-database
|
||||||
|
|
||||||
|
This does not work when using docker-compose, in that case, you will have to
|
||||||
|
modify the `docker-compose.yaml` to include that URL.
|
||||||
|
|
||||||
|
|
||||||
|
## E-Mail Setup
|
||||||
|
|
||||||
|
By default in development mode mails are not sent, but instead the mail data is
|
||||||
|
logged to the console. This can be overriden with the `--devSendMails` flag if
|
||||||
|
you start the application like so: `npm run dev -- --devSendMails`.
|
||||||
|
|
||||||
|
Mails are also always sent in production mode!
|
||||||
|
|
||||||
|
For actually sending e-mails the mailserver, sender, user and password for the
|
||||||
|
SMTP server need to be specified as environment variables:
|
||||||
|
|
||||||
|
* `MAILUSER` -- the smtp mailbox login name
|
||||||
|
* `MAILPW` -- password for the mailbox
|
||||||
|
* `MAILSERVER` -- the hostname of the SMTP server, e.g. `mail.example.com`
|
||||||
|
* `MAILSENDER` -- sender name, e.g. `noreply@example.com`
|
||||||
|
|
||||||
|
Full command example:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker-compose run --rm api tools/upgrade.py
|
MAILSERVER=mail.example.com MAILSENDER=noreply@example.com \
|
||||||
|
MAILUSER=my_mail_login MAILPW=hunter2 \
|
||||||
|
npm run dev -- --devSendMails
|
||||||
```
|
```
|
||||||
|
|
||||||
This command is equivalent to running migrations through *alembic*, then
|
|
||||||
regenerating the SQL functions that compute vector tiles directly in the
|
|
||||||
database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# equivalent to the above command, you don't usually run these
|
|
||||||
docker-compose run --rm api alembic upgrade head
|
|
||||||
docker-compose run --rm api tools/prepare_sql_tiles
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development setup
|
|
||||||
|
|
||||||
We've moved the whole development setup into Docker to make it easy for
|
|
||||||
everyone to get involved.
|
|
||||||
|
|
||||||
### Install docker
|
|
||||||
|
|
||||||
Please [install Docker Engine](https://docs.docker.com/engine/install/) as well as
|
|
||||||
[Docker Compose](https://docs.docker.com/compose/install/) onto your machine.
|
|
||||||
|
|
||||||
Then clone the repository as described above.
|
|
||||||
|
|
||||||
### Configure Keycloak
|
|
||||||
|
|
||||||
Login will not be possible until you configure the keycloak realm correctly. Boot your keycloak instance:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d keycloak
|
|
||||||
```
|
|
||||||
|
|
||||||
Now navigate to http://localhost:3003/ and follow these steps:
|
|
||||||
|
|
||||||
- Click *Administration Console* and log in with `admin` / `admin`.
|
|
||||||
- Hover over the realm name on the top left and click *Add realm*.
|
|
||||||
- Name the Realm `obs-dev` (spelling matters) and create it.
|
|
||||||
- In the sidebar, navigate to *Configure* → *Clients*, and click *Create* on the top right.
|
|
||||||
- *Client ID* should be `portal`. Click *Save*.
|
|
||||||
- In the Tab *Settings*, edit the new client's *Access Type* to *confidential*
|
|
||||||
and enter as *Valid Redirect URIs*: `http://localhost:3000/login/redirect`,
|
|
||||||
then *Save*
|
|
||||||
- Under *Credentials*, copy the *Secret*. Create a file at `api/config.overrides.py` with the secret in it:
|
|
||||||
|
|
||||||
```python
|
|
||||||
KEYCLOAK_CLIENT_SECRET="your secret here"
|
|
||||||
```
|
|
||||||
|
|
||||||
You can use this file in development mode to change settings without editing
|
|
||||||
the git-controlled default file at `api/config.dev.py`. Options in this file
|
|
||||||
take precendence.
|
|
||||||
- In the sidebar, navigate to *Manage* → *Users*, and click *Add user* on the top right.
|
|
||||||
- Give the user a name (e.g. `test`), leave the rest as-is.
|
|
||||||
- Under the tab *Credentials*, choose a new password, and make it
|
|
||||||
non-temporary. Click *Set Password*.
|
|
||||||
|
|
||||||
We are going to automate this process. For now, you will have to repeat it
|
|
||||||
every time you reset your keycloak settings, which are stored inside the
|
|
||||||
PostgreSQL as well. Luckily, the script `api/tools/reset_database.py` does
|
|
||||||
*not* affect the state of the keycloak database, so this should be rather rare.
|
|
||||||
|
|
||||||
### Prepare database
|
|
||||||
|
|
||||||
Start the PostgreSQL database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d postgres
|
|
||||||
```
|
|
||||||
|
|
||||||
The first time you start postgres, a lot of extensions will be installed. This
|
|
||||||
takes a while, so check the logs of the docker container until you see:
|
|
||||||
|
|
||||||
> PostgreSQL init process complete; ready for start up.
|
|
||||||
|
|
||||||
If you don't wait long enough, the following commands might fail. In this case,
|
|
||||||
you can always stop the container, remove the data directory (`local/postgres`)
|
|
||||||
and restart the process.
|
|
||||||
|
|
||||||
Next, run the upgrade command to generate the database schema:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm api tools/upgrade.py
|
|
||||||
```
|
|
||||||
|
|
||||||
You will need to re-run this command after updates, to migrate the database and
|
|
||||||
(re-)create the functions in the SQL database that are used when generating
|
|
||||||
vector tiles.
|
|
||||||
|
|
||||||
You should also [import OpenStreetMap data](docs/osm-import.md) now.
|
|
||||||
|
|
||||||
### Boot the application
|
|
||||||
|
|
||||||
Now you can run the remaining parts of the application:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d --build api worker frontend
|
|
||||||
```
|
|
||||||
|
|
||||||
Your frontend should be running at http://localhost:3001 and the API at
|
|
||||||
http://localhost:3000 -- but you probably only need to access the frontend for
|
|
||||||
testing.
|
|
||||||
|
|
||||||
### Migrating (Development)
|
|
||||||
|
|
||||||
Migrations are done with
|
|
||||||
[Alembic](https://alembic.sqlalchemy.org/en/latest/index.html), please refer to
|
|
||||||
its documentation for help. Most of the time, running this command will do all
|
|
||||||
the migrations you need:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm api alembic upgrade head
|
|
||||||
```
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Troubleshooting
|
|
||||||
|
|
||||||
If any step of the instructions does not work for you, please open an issue and
|
|
||||||
describe the problem you're having, as it is important to us that onboarding is
|
|
||||||
super easy :)
|
|
||||||
|
|
||||||
### Connecting to the PostgreSQL database
|
|
||||||
|
|
||||||
If you need to connect to your development PostgreSQL database, you should
|
|
||||||
install `psql` locally. The port 5432 is already forwarded, so you can connect with:
|
|
||||||
|
|
||||||
```
|
|
||||||
psql -h localhost -U obs -d obs
|
|
||||||
```
|
|
||||||
|
|
||||||
The password is `obs` as well.
|
|
||||||
|
|
||||||
## License
|
|
||||||
|
|
||||||
Copyright (C) 2020-2021 OpenBikeSensor Contributors
|
|
||||||
Contact: https://openbikesensor.org
|
|
||||||
|
|
||||||
The OpenBikeSensor Portal is free software: you can redistribute it
|
|
||||||
and/or modify it under the terms of the GNU Lesser General Public License
|
|
||||||
as published by the Free Software Foundation, either version 3 of the
|
|
||||||
License, or (at your option) any later version.
|
|
||||||
|
|
||||||
The OpenBikeSensor Portal is distributed in the hope that it will be
|
|
||||||
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
|
|
||||||
General Public License for more details.
|
|
||||||
|
|
||||||
You should have received a copy of the GNU Lesser General Public License
|
|
||||||
along with the OpenBikeSensor Portal. If not, see
|
|
||||||
<http://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
See also [`COPYING`](./COPYING) and [`COPYING.LESSER`](./COPYING.LESSER).
|
|
||||||
|
|
||||||
The above does not apply to the files listed below, their respective licenses
|
|
||||||
are included in a file next to each of them, named accordingly:
|
|
||||||
|
|
||||||
* `frontend/src/mapstyles/bright.json`
|
|
||||||
* `frontend/src/mapstyles/positron.json`
|
|
||||||
|
|
||||||
There are lots of other licenses to consider when using this software,
|
|
||||||
especially in conjunction with imported data and other tools. Check out the
|
|
||||||
[Licenses Documentation](docs/licenses.md) for an (unofficial) overview of the
|
|
||||||
license landscape surrounding this project.
|
|
||||||
|
|
170
UPGRADING.md
170
UPGRADING.md
|
@ -1,170 +0,0 @@
|
||||||
# Upgrading
|
|
||||||
This document describes the general steps to upgrade between major changes.
|
|
||||||
Simple migrations, e.g. for adding schema changes, are not documented
|
|
||||||
explicitly. Their general usage is described in the [README](./README.md) (for
|
|
||||||
development) and [docs/production-deployment.md](docs/production-deployment.md) (for production).
|
|
||||||
|
|
||||||
|
|
||||||
## 0.8.1
|
|
||||||
|
|
||||||
- Get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``)
|
|
||||||
- Rebuild images ``docker-compose build``
|
|
||||||
- No database upgrade is required, but tile functions need an update:
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal tools/prepare_sql_tiles.py
|
|
||||||
```
|
|
||||||
- Start your portal and worker services. ``docker-compose up -d worker portal``
|
|
||||||
|
|
||||||
|
|
||||||
## 0.8.0
|
|
||||||
Upgrade to `0.7.x` first. See below for details. Then follow these steps:
|
|
||||||
|
|
||||||
> **Warning** The update includes a reprocessing of tracks after import. Depending on the number of tracks this can take a few hours. The portal is reachable during that time but events disappear and incrementally reappear during reimport.
|
|
||||||
|
|
||||||
> **Info** With this version the import process for OpenStreetMap data has changed: the [new process](docs/osm-import.md) is easier on resources and finally permits to import a full country on a low-end VM.
|
|
||||||
|
|
||||||
- Do your [usual backup](docs/production-deployment.md)
|
|
||||||
- get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``)
|
|
||||||
- Rebuild images ``docker-compose build``
|
|
||||||
- Stop your portal and worker services ``docker-compose stop worker portal``
|
|
||||||
- run upgrade
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal tools/upgrade.py
|
|
||||||
```
|
|
||||||
this automatically does the following
|
|
||||||
- Migration of database schema using alembic.
|
|
||||||
- Upgrade of SQL tile schema to new schema.
|
|
||||||
- Import the nuts-regions from the web into the database.
|
|
||||||
- Trigger a re-import of all tracks.
|
|
||||||
- Start your portal and worker services. ``docker-compose up -d worker portal``
|
|
||||||
|
|
||||||
|
|
||||||
## 0.7.0
|
|
||||||
|
|
||||||
Upgrade to `0.6.x` first. See below for details. Then follow these steps:
|
|
||||||
|
|
||||||
- Rebuild images
|
|
||||||
- Stop your portal and worker services.
|
|
||||||
- **Migration with alembic**: required
|
|
||||||
- **Prepare SQL Tiles**: required
|
|
||||||
- Start your portal and worker services.
|
|
||||||
- **Reimport tracks**: no action required
|
|
||||||
- **OSM Import**: required
|
|
||||||
- **Config changes**: add `POSTGRES_MAX_OVERFLOW` and `POSTGRES_POOL_SIZE`
|
|
||||||
variables, see `api/config.py.example`
|
|
||||||
|
|
||||||
## 0.6.0
|
|
||||||
|
|
||||||
**Make sure to upgrade to `0.5.1` first, by checking out that version tag and
|
|
||||||
running migrations, then coming back to this version.** This is required
|
|
||||||
because the migrations have been edited to create the initial database schema,
|
|
||||||
but if you run the 0.5.1 migrations first, your database will remember that it
|
|
||||||
already has all the tables created. This is not required if you set up a new
|
|
||||||
installation.
|
|
||||||
|
|
||||||
For this update, run these steps:
|
|
||||||
|
|
||||||
- Build new images
|
|
||||||
- Stop portal and worker services
|
|
||||||
- Run the new upgrade tool:
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal tools/upgrade.py
|
|
||||||
```
|
|
||||||
- Start portal and worker services
|
|
||||||
|
|
||||||
## 0.5.0
|
|
||||||
|
|
||||||
The upgrade requires the following steps in the given order
|
|
||||||
|
|
||||||
- Rebuild images
|
|
||||||
- Stop your portal and worker services.
|
|
||||||
- **Migration with alembic**: required
|
|
||||||
- **Prepare SQL Tiles**: required
|
|
||||||
- Start your portal and worker services.
|
|
||||||
- **Reimport tracks**: required
|
|
||||||
- **OSM Import**: no action required
|
|
||||||
- **Config changes**: none
|
|
||||||
|
|
||||||
## 0.4.1
|
|
||||||
|
|
||||||
You can, but do not have to, reimport all tracks. This will generate a GPX file
|
|
||||||
for each track and allow the users to download those. If a GPX file has not yet
|
|
||||||
been created, the download will fail. To reimport all tracks, log in to your
|
|
||||||
PostgreSQL database (instructions are in [README.md](./README.md) for
|
|
||||||
development and [docs/production-deployment.md](./docs/production-deployment.md) for production)
|
|
||||||
and run:
|
|
||||||
|
|
||||||
```sql
|
|
||||||
UPDATE track SET processing_status = 'queued';
|
|
||||||
```
|
|
||||||
|
|
||||||
You can do this selectively with `WHERE` statements.
|
|
||||||
|
|
||||||
Make sure your worker is running to process the queue.
|
|
||||||
|
|
||||||
## 0.4.0
|
|
||||||
|
|
||||||
* Rebuild your image, this may take longer than usual, as it will compile
|
|
||||||
`osm2pgsql` for you. Next time, it should be in your docker build cache and
|
|
||||||
be fast again.
|
|
||||||
* Add new config flags: `VERBOSE`, `LEAN_MODE`, `POSTGRES_POOL_SIZE`,
|
|
||||||
`POSTGRES_MAX_OVERFLOW`. Check the example config for sane default values.
|
|
||||||
* Re-run `tools/prepare_sql_tiles.py` again (see README)
|
|
||||||
* It has been made easier to import OSM data, check
|
|
||||||
[docs/production-deployment.md](./docs/production-deployment.md) for the sections "Download
|
|
||||||
OpenStreetMap maps" and "Import OpenStreetMap data". You can now download
|
|
||||||
multiple .pbf files and then import them at once, using the docker image
|
|
||||||
built with the `Dockerfile`. Alternatively, you can choose to enable [lean
|
|
||||||
mode](docs/lean-mode.md). You do not need to reimport data, but setting this
|
|
||||||
up now will make your life easier in the long run ;)
|
|
||||||
|
|
||||||
## v0.2 to v0.3 (MongoDB to PostgreSQL)
|
|
||||||
|
|
||||||
* Shut down all services
|
|
||||||
* Obviously, now is a good time to perform a full backup ;)
|
|
||||||
* Update the codebase (`git pull`, `git submodule update`).
|
|
||||||
* Update your ``docker-compose.yaml`` with the one from the ``deployment/examples``
|
|
||||||
folder.
|
|
||||||
* Leave the MongoDB service in place for now.
|
|
||||||
* Update all other service descriptions.
|
|
||||||
* You can remove `redis` already.
|
|
||||||
* Generate a better password than the default for your
|
|
||||||
postgres user.
|
|
||||||
* Traefik rules have been simplified as all routes are handled
|
|
||||||
by the portal service now.
|
|
||||||
* Start up the `mongo` and `postgres` services. Wait for postgres to finish
|
|
||||||
initializing (see [README](README.md)).
|
|
||||||
* Build the new image (e.g. with `docker-compose build portal`)
|
|
||||||
* Configure your API. The example config file is `api/config.py.example`, and
|
|
||||||
it will need to be mounted to `api/config.py` in the container. Ignore the
|
|
||||||
Keycloak options for now.
|
|
||||||
* Prepare the database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal python tools/reset_database.py
|
|
||||||
docker-compose run --rm portal python tools/prepare_sql_tiles.py
|
|
||||||
```
|
|
||||||
* Import OSM data (see [README](README.md)).
|
|
||||||
* Run the database migration script:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm \
|
|
||||||
-v $PWD/export:/export \
|
|
||||||
portal \
|
|
||||||
python tools/import_from_mongodb.py mongodb://mongo/obs \
|
|
||||||
--keycloak-users-file /export/users.json
|
|
||||||
```
|
|
||||||
There is an option `--keep-api-keys` which means the users won't have to
|
|
||||||
reconfigure the devices they used their API key in. **However**, please try
|
|
||||||
to avoid this option if at all possible, as the old keys are *very* insecure.
|
|
||||||
The default without this option to generate a new, secure API key for each
|
|
||||||
user.
|
|
||||||
* Shut down the `mongo` service, you can now remove it from docker-compose.yaml
|
|
||||||
* Start `keycloak` and configure it, similarly to how it was configured in the
|
|
||||||
development setup (but choose more secure options). Update the API config
|
|
||||||
file to match your keycloak configuration. Import the file
|
|
||||||
`export/users.json` into your realm, it will re-add all the users from the
|
|
||||||
old installation. You should delete the file and `export/` folder afterwards.
|
|
||||||
* Start `portal`.
|
|
||||||
* Consider configuring a worker service. See [docs/production-deployment.md](./docs/production-deployment.md).
|
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
local/
|
|
||||||
node_modules/
|
|
||||||
node_modules
|
|
47
api/.gitignore
vendored
47
api/.gitignore
vendored
|
@ -1,47 +0,0 @@
|
||||||
# Logs
|
|
||||||
logs
|
|
||||||
*.log
|
|
||||||
.DS_Store
|
|
||||||
|
|
||||||
npm-debug.log*
|
|
||||||
|
|
||||||
# Runtime data
|
|
||||||
pids
|
|
||||||
*.pid
|
|
||||||
*.seed
|
|
||||||
|
|
||||||
# Directory for instrumented libs generated by jscoverage/JSCover
|
|
||||||
lib-cov
|
|
||||||
|
|
||||||
# Coverage directory used by tools like istanbul
|
|
||||||
coverage
|
|
||||||
|
|
||||||
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
|
|
||||||
.grunt
|
|
||||||
|
|
||||||
# node-waf configuration
|
|
||||||
.lock-wscript
|
|
||||||
|
|
||||||
# Compiled binary addons (http://nodejs.org/api/addons.html)
|
|
||||||
build/Release
|
|
||||||
|
|
||||||
# Dependency directory
|
|
||||||
node_modules
|
|
||||||
|
|
||||||
# Optional npm cache directory
|
|
||||||
.npm
|
|
||||||
|
|
||||||
# Optional REPL history
|
|
||||||
.node_repl_history
|
|
||||||
|
|
||||||
.idea
|
|
||||||
|
|
||||||
# Storage place for local files, such as developer database etc.
|
|
||||||
local/
|
|
||||||
|
|
||||||
# We don't include this file in favor of package-lock.json -- we cannot have
|
|
||||||
# both, because then developers will only update one of them and they'll
|
|
||||||
# contradict. For now, npm shall be the canonical default (compare README.md).
|
|
||||||
yarn.lock
|
|
||||||
|
|
||||||
config.overrides.py
|
|
|
@ -1,16 +0,0 @@
|
||||||
FROM python:3.11.3-bullseye
|
|
||||||
|
|
||||||
WORKDIR /opt/obs/api
|
|
||||||
|
|
||||||
ADD scripts /opt/obs/scripts
|
|
||||||
RUN pip install -e /opt/obs/scripts
|
|
||||||
|
|
||||||
ADD requirements.txt /opt/obs/api/
|
|
||||||
RUN pip install -r requirements.txt
|
|
||||||
ADD setup.py /opt/obs/api/
|
|
||||||
ADD obs /opt/obs/api/obs/
|
|
||||||
RUN pip install -e .
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
CMD ["openbikesensor-api"]
|
|
102
api/alembic.ini
102
api/alembic.ini
|
@ -1,102 +0,0 @@
|
||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = migrations
|
|
||||||
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python-dateutil library that can be
|
|
||||||
# installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to dateutil.tz.gettz()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to api/migrations/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:api/migrations/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
|
||||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
|
||||||
# Valid values for version_path_separator are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
|
@ -1,35 +0,0 @@
|
||||||
HOST = "0.0.0.0"
|
|
||||||
PORT = 3000
|
|
||||||
DEBUG = True
|
|
||||||
VERBOSE = False
|
|
||||||
AUTO_RELOAD = True
|
|
||||||
SECRET = "!!!!!!!!!!!!CHANGE ME!!!!!!!!!!!!"
|
|
||||||
POSTGRES_URL = "postgresql+asyncpg://obs:obs@postgres/obs"
|
|
||||||
POSTGRES_POOL_SIZE = 20
|
|
||||||
POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE
|
|
||||||
KEYCLOAK_URL = "http://keycloak:8080/auth/realms/obs-dev/"
|
|
||||||
KEYCLOAK_CLIENT_ID = "portal"
|
|
||||||
KEYCLOAK_CLIENT_SECRET = "c385278e-bd2e-4f13-9937-34b0c0f44c2d"
|
|
||||||
DEDICATED_WORKER = True
|
|
||||||
API_URL = "http://localhost:3000/"
|
|
||||||
FRONTEND_URL = "http://localhost:3001/"
|
|
||||||
FRONTEND_HTTPS = False
|
|
||||||
FRONTEND_DIR = None
|
|
||||||
FRONTEND_CONFIG = {
|
|
||||||
"imprintUrl": "https://example.com/imprint",
|
|
||||||
"privacyPolicyUrl": "https://example.com/privacy",
|
|
||||||
# "termsUrl": "https://example.com/terms", # Link is only shown when set
|
|
||||||
"mapHome": {"zoom": 6, "longitude": 10.2, "latitude": 51.3},
|
|
||||||
# "banner": {"text": "This is a development installation.", "style": "info"},
|
|
||||||
}
|
|
||||||
|
|
||||||
TILES_FILE = None # "/tiles/tiles.mbtiles"
|
|
||||||
DATA_DIR = "/data"
|
|
||||||
ADDITIONAL_CORS_ORIGINS = [
|
|
||||||
"http://localhost:8880/", # for maputnik on 8880
|
|
||||||
"http://localhost:8888/", # for maputnik on 8888
|
|
||||||
]
|
|
||||||
TILE_SEMAPHORE_SIZE = 4
|
|
||||||
EXPORT_SEMAPHORE_SIZE = 4
|
|
||||||
|
|
||||||
# vim: set ft=python :
|
|
|
@ -1,73 +0,0 @@
|
||||||
# Bind address of the server
|
|
||||||
HOST = "127.0.0.1"
|
|
||||||
PORT = 3000
|
|
||||||
|
|
||||||
# Extended log output, but slower
|
|
||||||
DEBUG = False
|
|
||||||
VERBOSE = DEBUG
|
|
||||||
AUTO_RELOAD = DEBUG
|
|
||||||
|
|
||||||
# Required to encrypt or sign sessions, cookies, tokens, etc.
|
|
||||||
SECRET = "!!!<<<CHANGEME>>>!!!"
|
|
||||||
|
|
||||||
# Connection to the database
|
|
||||||
POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname"
|
|
||||||
POSTGRES_POOL_SIZE = 20
|
|
||||||
POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE
|
|
||||||
|
|
||||||
# URL to the keycloak realm, as reachable by the API service. This is not
|
|
||||||
# necessarily its publicly reachable URL, keycloak advertises that iself.
|
|
||||||
KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/"
|
|
||||||
|
|
||||||
# Auth client credentials
|
|
||||||
KEYCLOAK_CLIENT_ID = "portal"
|
|
||||||
KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000"
|
|
||||||
|
|
||||||
# Whether the API should run the worker loop, or a dedicated worker is used
|
|
||||||
DEDICATED_WORKER = True
|
|
||||||
|
|
||||||
# The root of the frontend. Needed for redirecting after login, and for CORS.
|
|
||||||
# Set to None if frontend is served by the API.
|
|
||||||
FRONTEND_URL = None
|
|
||||||
FRONTEND_HTTPS = True
|
|
||||||
|
|
||||||
# Where to find the compiled frontend assets (must include index.html), or None
|
|
||||||
# to disable serving the frontend.
|
|
||||||
FRONTEND_DIR = "../frontend/build/"
|
|
||||||
|
|
||||||
# Can be an object or a JSON string
|
|
||||||
FRONTEND_CONFIG = {
|
|
||||||
"imprintUrl": "https://example.com/imprint",
|
|
||||||
"privacyPolicyUrl": "https://example.com/privacy",
|
|
||||||
# "termsUrl": "https://example.com/user_terms_and_conditions", # Link is only shown when set
|
|
||||||
"mapHome": {"zoom": 6, "longitude": 10.2, "latitude": 51.3},
|
|
||||||
"banner": {"text": "This is a test installation.", "style": "warning"},
|
|
||||||
}
|
|
||||||
|
|
||||||
# If the API should serve generated tiles, this is the path where the tiles are
|
|
||||||
# built. This is an experimental option and probably very inefficient, a proper
|
|
||||||
# tileserver should be prefered. Set to None to disable.
|
|
||||||
TILES_FILE = None
|
|
||||||
|
|
||||||
# Path overrides:
|
|
||||||
# API_ROOT_DIR = "??" # default: api/ inside repository
|
|
||||||
# DATA_DIR = "??" # default: $API_ROOT_DIR/..
|
|
||||||
# PROCESSING_DIR = "??" # default: DATA_DIR/processing
|
|
||||||
# PROCESSING_OUTPUT_DIR = "??" # default: DATA_DIR/processing-output
|
|
||||||
# TRACKS_DIR = "??" # default: DATA_DIR/tracks
|
|
||||||
# OBS_FACE_CACHE_DIR = "??" # default: DATA_DIR/obs-face-cache
|
|
||||||
|
|
||||||
# Additional allowed origins for CORS headers. The FRONTEND_URL is included by
|
|
||||||
# default. Python list, or whitespace separated string.
|
|
||||||
ADDITIONAL_CORS_ORIGINS = None
|
|
||||||
|
|
||||||
# How many asynchronous requests may be sent to the database to generate tile
|
|
||||||
# information. Should be less than POSTGRES_POOL_SIZE to leave some connections
|
|
||||||
# to the other features of the API ;)
|
|
||||||
TILE_SEMAPHORE_SIZE = 4
|
|
||||||
|
|
||||||
# How many asynchronous requests may generate exported data simultaneously.
|
|
||||||
# Keep this small.
|
|
||||||
EXPORT_SEMAPHORE_SIZE = 1
|
|
||||||
|
|
||||||
# vim: set ft=python :
|
|
|
@ -1 +0,0 @@
|
||||||
Generic single-database configuration.
|
|
|
@ -1,83 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
if config.config_file_name is not None:
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
target_metadata = None
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def do_run_migrations(connection):
|
|
||||||
context.configure(connection=connection, target_metadata=target_metadata)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from obs.api.app import app
|
|
||||||
|
|
||||||
url = app.config.POSTGRES_URL
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
async def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from obs.api.app import app, connect_db
|
|
||||||
|
|
||||||
url = app.config.POSTGRES_URL
|
|
||||||
async with connect_db(url) as engine:
|
|
||||||
async with engine.connect() as connection:
|
|
||||||
await connection.run_sync(do_run_migrations)
|
|
||||||
|
|
||||||
await engine.dispose()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
asyncio.run(run_migrations_online())
|
|
|
@ -1,24 +0,0 @@
|
||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
|
@ -1,16 +0,0 @@
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
def dbtype(name):
|
|
||||||
"""
|
|
||||||
Create a UserDefinedType for use in migrations as the type of a column,
|
|
||||||
when the type already exists in the database, but isn't available as a
|
|
||||||
proper sqlalchemy type.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class TheType(sa.types.UserDefinedType):
|
|
||||||
def get_col_spec(self):
|
|
||||||
return name
|
|
||||||
|
|
||||||
TheType.__name__ = name
|
|
||||||
return TheType
|
|
|
@ -1,39 +0,0 @@
|
||||||
"""create table road
|
|
||||||
|
|
||||||
Revision ID: 35e7f1768f9b
|
|
||||||
Revises: 5d75febe2d59
|
|
||||||
Create Date: 2022-03-30 21:36:48.157457
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
from migrations.utils import dbtype
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "35e7f1768f9b"
|
|
||||||
down_revision = "920aed1450c9"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.create_table(
|
|
||||||
"road",
|
|
||||||
sa.Column(
|
|
||||||
"way_id", sa.BIGINT, primary_key=True, index=True, autoincrement=False
|
|
||||||
),
|
|
||||||
sa.Column("zone", dbtype("zone_type")),
|
|
||||||
sa.Column("name", sa.Text),
|
|
||||||
sa.Column("geometry", dbtype("geometry(LINESTRING,3857)")),
|
|
||||||
sa.Column("directionality", sa.Integer),
|
|
||||||
sa.Column("oneway", sa.Boolean),
|
|
||||||
)
|
|
||||||
op.execute(
|
|
||||||
"CREATE INDEX road_geometry_idx ON road USING GIST (geometry) WITH (FILLFACTOR=100);"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("road")
|
|
|
@ -1,28 +0,0 @@
|
||||||
"""create extensions
|
|
||||||
|
|
||||||
Revision ID: 3856f240bb6d
|
|
||||||
Revises: a9627f63fbed
|
|
||||||
Create Date: 2022-03-30 21:31:06.282725
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "3856f240bb6d"
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.execute('CREATE EXTENSION IF NOT EXISTS "hstore";')
|
|
||||||
op.execute('CREATE EXTENSION IF NOT EXISTS "postgis";')
|
|
||||||
op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.execute('DROP EXTENSION "hstore";')
|
|
||||||
op.execute('DROP EXTENSION "postgis";')
|
|
||||||
op.execute('DROP EXTENSION "uuid-ossp";')
|
|
|
@ -1,30 +0,0 @@
|
||||||
"""transform overtaking_event geometry to 3857
|
|
||||||
|
|
||||||
Revision ID: 587e69ecb466
|
|
||||||
Revises: f4b0f460254d
|
|
||||||
Create Date: 2023-04-01 14:30:49.927505
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "587e69ecb466"
|
|
||||||
down_revision = "f4b0f460254d"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 3857);")
|
|
||||||
op.execute(
|
|
||||||
"ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry(POINT, 3857);"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.execute(
|
|
||||||
"ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry;"
|
|
||||||
)
|
|
||||||
op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 4326);")
|
|
|
@ -1,43 +0,0 @@
|
||||||
"""create table overtaking_event
|
|
||||||
|
|
||||||
Revision ID: 5d75febe2d59
|
|
||||||
Revises: 920aed1450c9
|
|
||||||
Create Date: 2022-03-30 21:36:37.687080
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
from migrations.utils import dbtype
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "5d75febe2d59"
|
|
||||||
down_revision = "9336eef458e7"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.create_table(
|
|
||||||
"overtaking_event",
|
|
||||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True, index=True),
|
|
||||||
sa.Column(
|
|
||||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
|
||||||
),
|
|
||||||
sa.Column("hex_hash", sa.String, unique=True, index=True),
|
|
||||||
sa.Column("way_id", sa.BIGINT, index=True),
|
|
||||||
sa.Column("direction_reversed", sa.Boolean),
|
|
||||||
sa.Column("geometry", dbtype("GEOMETRY")),
|
|
||||||
sa.Column("latitude", sa.Float),
|
|
||||||
sa.Column("longitude", sa.Float),
|
|
||||||
sa.Column("time", sa.DateTime),
|
|
||||||
sa.Column("distance_overtaker", sa.Float),
|
|
||||||
sa.Column("distance_stationary", sa.Float),
|
|
||||||
sa.Column("course", sa.Float),
|
|
||||||
sa.Column("speed", sa.Float),
|
|
||||||
sa.Index("road_segment", "way_id", "direction_reversed"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("overtaking_event")
|
|
|
@ -1,26 +0,0 @@
|
||||||
"""add_overtaking_event_index
|
|
||||||
|
|
||||||
|
|
||||||
Revision ID: 7868aed76122
|
|
||||||
Revises: 587e69ecb466
|
|
||||||
Create Date: 2023-07-16 13:37:17.694079
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = '7868aed76122'
|
|
||||||
down_revision = '587e69ecb466'
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.execute("CREATE INDEX IF NOT EXISTS ix_overtaking_event_geometry ON overtaking_event using GIST(geometry);")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_index("ix_overtaking_event_geometry")
|
|
||||||
|
|
|
@ -1,31 +0,0 @@
|
||||||
"""create enum processing_status
|
|
||||||
|
|
||||||
Revision ID: 920aed1450c9
|
|
||||||
Revises: 986c6953e431
|
|
||||||
Create Date: 2022-03-30 21:36:25.896192
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "920aed1450c9"
|
|
||||||
down_revision = "986c6953e431"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def _get_enum_type():
|
|
||||||
return postgresql.ENUM(
|
|
||||||
"created", "queued", "processing", "complete", "error", name="processing_status"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
_get_enum_type().create(op.get_bind(), checkfirst=True)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
_get_enum_type().drop(op.get_bind())
|
|
|
@ -1,42 +0,0 @@
|
||||||
"""create table comment
|
|
||||||
|
|
||||||
Revision ID: 9336eef458e7
|
|
||||||
Revises: 9d8c8c38a1d0
|
|
||||||
Create Date: 2022-03-30 21:37:02.080429
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "9336eef458e7"
|
|
||||||
down_revision = "d66baafab5ec"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
NOW = sa.text("NOW()")
|
|
||||||
|
|
||||||
op.create_table(
|
|
||||||
"comment",
|
|
||||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
|
||||||
sa.Column("uid", UUID, server_default=sa.func.uuid_generate_v4()),
|
|
||||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
|
||||||
sa.Column(
|
|
||||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
|
||||||
),
|
|
||||||
sa.Column("body", sa.TEXT),
|
|
||||||
sa.Column(
|
|
||||||
"author_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE")
|
|
||||||
),
|
|
||||||
sa.Column(
|
|
||||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("comment")
|
|
|
@ -1,29 +0,0 @@
|
||||||
"""create enum zone_type
|
|
||||||
|
|
||||||
Revision ID: 986c6953e431
|
|
||||||
Revises: 3856f240bb6d
|
|
||||||
Create Date: 2022-03-30 21:36:19.888268
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "986c6953e431"
|
|
||||||
down_revision = "3856f240bb6d"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def _get_enum_type():
|
|
||||||
return postgresql.ENUM("rural", "urban", "motorway", name="zone_type")
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
_get_enum_type().create(op.get_bind(), checkfirst=True)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
_get_enum_type().drop(op.get_bind())
|
|
|
@ -1,26 +0,0 @@
|
||||||
"""add user display_name
|
|
||||||
|
|
||||||
Revision ID: 99a3d2eb08f9
|
|
||||||
Revises: a9627f63fbed
|
|
||||||
Create Date: 2022-09-13 07:30:18.747880
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "99a3d2eb08f9"
|
|
||||||
down_revision = "a9627f63fbed"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.add_column(
|
|
||||||
"user", sa.Column("display_name", sa.String, nullable=True), schema="public"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_column("user", "display_name", schema="public")
|
|
|
@ -1,45 +0,0 @@
|
||||||
"""create table user
|
|
||||||
|
|
||||||
Revision ID: 9d8c8c38a1d0
|
|
||||||
Revises: d66baafab5ec
|
|
||||||
Create Date: 2022-03-30 21:36:59.375149
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "9d8c8c38a1d0"
|
|
||||||
down_revision = "35e7f1768f9b"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
NOW = sa.text("NOW()")
|
|
||||||
|
|
||||||
op.create_table(
|
|
||||||
"user",
|
|
||||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
|
||||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
|
||||||
sa.Column(
|
|
||||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
|
||||||
),
|
|
||||||
sa.Column("sub", sa.String, unique=True, nullable=False),
|
|
||||||
sa.Column("username", sa.String, unique=True, nullable=False),
|
|
||||||
sa.Column("email", sa.String, nullable=False),
|
|
||||||
sa.Column("bio", sa.TEXT),
|
|
||||||
sa.Column("image", sa.String),
|
|
||||||
sa.Column(
|
|
||||||
"are_tracks_visible_for_all",
|
|
||||||
sa.Boolean,
|
|
||||||
server_default=sa.false(),
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.Column("api_key", sa.String),
|
|
||||||
sa.Column("match_by_username_email", sa.Boolean, server_default=sa.false()),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("user")
|
|
|
@ -1,35 +0,0 @@
|
||||||
"""create table region
|
|
||||||
|
|
||||||
Revision ID: a049e5eb24dd
|
|
||||||
Revises: a9627f63fbed
|
|
||||||
Create Date: 2022-04-02 21:28:43.124521
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
from migrations.utils import dbtype
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "a049e5eb24dd"
|
|
||||||
down_revision = "99a3d2eb08f9"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.create_table(
|
|
||||||
"region",
|
|
||||||
sa.Column("id", sa.String(24), primary_key=True, index=True),
|
|
||||||
sa.Column("name", sa.Text),
|
|
||||||
sa.Column("geometry", dbtype("GEOMETRY(GEOMETRY,3857)"), index=False),
|
|
||||||
sa.Column("admin_level", sa.Integer, index=True),
|
|
||||||
)
|
|
||||||
op.execute(
|
|
||||||
"CREATE INDEX region_geometry_idx ON region USING GIST (geometry) WITH (FILLFACTOR=100);"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("region")
|
|
|
@ -1,34 +0,0 @@
|
||||||
"""create table road_usage
|
|
||||||
|
|
||||||
Revision ID: a9627f63fbed
|
|
||||||
Revises:
|
|
||||||
Create Date: 2022-03-16 20:26:17.449569
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "a9627f63fbed"
|
|
||||||
down_revision = "5d75febe2d59"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.create_table(
|
|
||||||
"road_usage",
|
|
||||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True, index=True),
|
|
||||||
sa.Column(
|
|
||||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
|
||||||
),
|
|
||||||
sa.Column("hex_hash", sa.String, unique=True, index=True),
|
|
||||||
sa.Column("way_id", sa.BIGINT, index=True),
|
|
||||||
sa.Column("time", sa.DateTime),
|
|
||||||
sa.Column("direction_reversed", sa.Boolean),
|
|
||||||
sa.Index("road_usage_segment", "way_id", "direction_reversed"),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("road_usage")
|
|
|
@ -1,39 +0,0 @@
|
||||||
"""add import groups
|
|
||||||
|
|
||||||
Revision ID: b8b0fbae50a4
|
|
||||||
Revises: f7b21148126a
|
|
||||||
Create Date: 2023-03-26 09:41:36.621203
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "b8b0fbae50a4"
|
|
||||||
down_revision = "f7b21148126a"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.add_column(
|
|
||||||
"road",
|
|
||||||
sa.Column("import_group", sa.String(), nullable=True),
|
|
||||||
)
|
|
||||||
op.add_column(
|
|
||||||
"region",
|
|
||||||
sa.Column("import_group", sa.String(), nullable=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set existing to "osm2pgsql"
|
|
||||||
road = sa.table("road", sa.column("import_group", sa.String))
|
|
||||||
op.execute(road.update().values(import_group="osm2pgsql"))
|
|
||||||
|
|
||||||
region = sa.table("region", sa.column("import_group", sa.String))
|
|
||||||
op.execute(region.update().values(import_group="osm2pgsql"))
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_column("road", "import_group")
|
|
||||||
op.drop_column("region", "import_group")
|
|
|
@ -1,66 +0,0 @@
|
||||||
"""create table track
|
|
||||||
|
|
||||||
Revision ID: d66baafab5ec
|
|
||||||
Revises: 35e7f1768f9b
|
|
||||||
Create Date: 2022-03-30 21:36:54.848452
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
from sqlalchemy.dialects import postgresql
|
|
||||||
from migrations.utils import dbtype
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "d66baafab5ec"
|
|
||||||
down_revision = "9d8c8c38a1d0"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
NOW = sa.text("NOW()")
|
|
||||||
|
|
||||||
op.create_table(
|
|
||||||
"track",
|
|
||||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
|
||||||
sa.Column("slug", sa.String, unique=True, nullable=False, index=True),
|
|
||||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
|
||||||
sa.Column(
|
|
||||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
|
||||||
),
|
|
||||||
sa.Column("title", sa.String),
|
|
||||||
sa.Column(
|
|
||||||
"processing_status",
|
|
||||||
dbtype("processing_status"),
|
|
||||||
server_default=sa.literal("created"),
|
|
||||||
),
|
|
||||||
sa.Column("processing_queued_at", sa.DateTime),
|
|
||||||
sa.Column("processed_at", sa.DateTime),
|
|
||||||
sa.Column("processing_log", sa.TEXT),
|
|
||||||
sa.Column(
|
|
||||||
"customized_title", sa.Boolean, server_default=sa.false(), nullable=False
|
|
||||||
),
|
|
||||||
sa.Column("description", sa.TEXT),
|
|
||||||
sa.Column("public", sa.Boolean, server_default=sa.false()),
|
|
||||||
sa.Column("uploaded_by_user_agent", sa.String),
|
|
||||||
sa.Column("original_file_name", sa.String),
|
|
||||||
sa.Column("original_file_hash", sa.String, nullable=False),
|
|
||||||
sa.Column(
|
|
||||||
"author_id",
|
|
||||||
sa.Integer,
|
|
||||||
sa.ForeignKey("user.id", ondelete="CASCADE"),
|
|
||||||
nullable=False,
|
|
||||||
),
|
|
||||||
sa.Column("recorded_at", sa.DateTime),
|
|
||||||
sa.Column("recorded_until", sa.DateTime),
|
|
||||||
sa.Column("duration", sa.Float),
|
|
||||||
sa.Column("length", sa.Float),
|
|
||||||
sa.Column("segments", sa.Integer),
|
|
||||||
sa.Column("num_events", sa.Integer),
|
|
||||||
sa.Column("num_measurements", sa.Integer),
|
|
||||||
sa.Column("num_valid", sa.Integer),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_table("track")
|
|
|
@ -1,24 +0,0 @@
|
||||||
"""add osm id indexes
|
|
||||||
|
|
||||||
Revision ID: f4b0f460254d
|
|
||||||
Revises: b8b0fbae50a4
|
|
||||||
Create Date: 2023-03-30 10:56:22.066768
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "f4b0f460254d"
|
|
||||||
down_revision = "b8b0fbae50a4"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.execute("CREATE INDEX IF NOT EXISTS ix_road_way_id ON road (way_id);")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_index("ix_road_way_id")
|
|
|
@ -1,41 +0,0 @@
|
||||||
"""add user_device
|
|
||||||
|
|
||||||
Revision ID: f7b21148126a
|
|
||||||
Revises: a9627f63fbed
|
|
||||||
Create Date: 2022-09-15 17:48:06.764342
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "f7b21148126a"
|
|
||||||
down_revision = "a049e5eb24dd"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
op.create_table(
|
|
||||||
"user_device",
|
|
||||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
|
||||||
sa.Column("user_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE")),
|
|
||||||
sa.Column("identifier", sa.String, nullable=False),
|
|
||||||
sa.Column("display_name", sa.String, nullable=True),
|
|
||||||
sa.Index("user_id_identifier", "user_id", "identifier", unique=True),
|
|
||||||
)
|
|
||||||
op.add_column(
|
|
||||||
"track",
|
|
||||||
sa.Column(
|
|
||||||
"user_device_id",
|
|
||||||
sa.Integer,
|
|
||||||
sa.ForeignKey("user_device.id", ondelete="RESTRICT"),
|
|
||||||
nullable=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.drop_column("track", "user_device_id")
|
|
||||||
op.drop_table("user_device")
|
|
|
@ -1 +0,0 @@
|
||||||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
|
@ -1 +0,0 @@
|
||||||
__version__ = "0.8.1"
|
|
|
@ -1,363 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
from json import JSONEncoder, dumps
|
|
||||||
from functools import wraps, partial
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
from os.path import dirname, join, normpath, abspath, isfile
|
|
||||||
from datetime import datetime, date
|
|
||||||
|
|
||||||
from sanic import Sanic, Blueprint
|
|
||||||
from sanic.response import (
|
|
||||||
text,
|
|
||||||
json as json_response,
|
|
||||||
file as file_response,
|
|
||||||
html as html_response,
|
|
||||||
)
|
|
||||||
from sanic.exceptions import Unauthorized, SanicException
|
|
||||||
from sanic_session import Session, InMemorySessionInterface
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from obs.api.db import User, make_session, connect_db
|
|
||||||
from obs.api.cors import setup_options, add_cors_headers
|
|
||||||
from obs.api.utils import get_single_arg
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SanicAccessMessageFilter(logging.Filter):
|
|
||||||
"""
|
|
||||||
A filter that modifies the log message of a sanic.access log entry to
|
|
||||||
include useful information.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def filter(self, record):
|
|
||||||
record.msg = f"{record.request} -> {record.status}"
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def configure_sanic_logging():
|
|
||||||
for logger_name in ["sanic.root", "sanic.access", "sanic.error"]:
|
|
||||||
logger = logging.getLogger(logger_name)
|
|
||||||
for handler in logger.handlers:
|
|
||||||
logger.removeHandler(handler)
|
|
||||||
|
|
||||||
logger = logging.getLogger("sanic.access")
|
|
||||||
for filter_ in logger.filters:
|
|
||||||
logger.removeFilter(filter_)
|
|
||||||
logger.addFilter(SanicAccessMessageFilter())
|
|
||||||
logging.getLogger("sanic.root").setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
|
|
||||||
app = Sanic(
|
|
||||||
"openbikesensor-api",
|
|
||||||
env_prefix="OBS_",
|
|
||||||
)
|
|
||||||
configure_sanic_logging()
|
|
||||||
|
|
||||||
app.config.update(
|
|
||||||
dict(
|
|
||||||
DEBUG=False,
|
|
||||||
VERBOSE=False,
|
|
||||||
AUTO_RELOAD=False,
|
|
||||||
POSTGRES_POOL_SIZE=20,
|
|
||||||
POSTGRES_MAX_OVERFLOW=40,
|
|
||||||
DEDICATED_WORKER=True,
|
|
||||||
FRONTEND_URL=None,
|
|
||||||
FRONTEND_HTTPS=True,
|
|
||||||
TILES_FILE=None,
|
|
||||||
TILE_SEMAPHORE_SIZE=4,
|
|
||||||
EXPORT_SEMAPHORE_SIZE=1,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# overwrite from defaults again
|
|
||||||
app.config.load_environment_vars("OBS_")
|
|
||||||
|
|
||||||
if isfile("./config.py"):
|
|
||||||
app.update_config("./config.py")
|
|
||||||
|
|
||||||
# For developers to override the config without committing it
|
|
||||||
if isfile("./config.overrides.py"):
|
|
||||||
app.update_config("./config.overrides.py")
|
|
||||||
|
|
||||||
c = app.config
|
|
||||||
|
|
||||||
api = Blueprint("api", url_prefix="/api")
|
|
||||||
auth = Blueprint("auth", url_prefix="")
|
|
||||||
|
|
||||||
import re
|
|
||||||
|
|
||||||
TILE_REQUEST_CANCELLED = re.compile(
|
|
||||||
r"Connection lost before response written.*GET /tiles"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NoConnectionLostFilter(logging.Filter):
|
|
||||||
def filter(record):
|
|
||||||
return not TILE_REQUEST_CANCELLED.match(record.getMessage())
|
|
||||||
|
|
||||||
|
|
||||||
logging.getLogger("sanic.error").addFilter(NoConnectionLostFilter)
|
|
||||||
|
|
||||||
|
|
||||||
def setup_cors(app):
|
|
||||||
frontend_url = app.config.get("FRONTEND_URL")
|
|
||||||
additional_origins = app.config.get("ADDITIONAL_CORS_ORIGINS")
|
|
||||||
if not frontend_url and not additional_origins:
|
|
||||||
# No CORS configured
|
|
||||||
return
|
|
||||||
|
|
||||||
origins = []
|
|
||||||
if frontend_url:
|
|
||||||
u = urlparse(frontend_url)
|
|
||||||
origins.append(f"{u.scheme}://{u.netloc}")
|
|
||||||
|
|
||||||
if isinstance(additional_origins, str):
|
|
||||||
origins += re.split(r"\s+", additional_origins)
|
|
||||||
elif isinstance(additional_origins, list):
|
|
||||||
origins += additional_origins
|
|
||||||
elif additional_origins is not None:
|
|
||||||
raise ValueError(
|
|
||||||
"invalid option type for ADDITIONAL_CORS_ORIGINS, must be list or space separated str"
|
|
||||||
)
|
|
||||||
|
|
||||||
app.ctx.cors_origins = origins
|
|
||||||
|
|
||||||
# Add OPTIONS handlers to any route that is missing it
|
|
||||||
app.register_listener(setup_options, "before_server_start")
|
|
||||||
|
|
||||||
# Fill in CORS headers
|
|
||||||
app.register_middleware(add_cors_headers, "response")
|
|
||||||
|
|
||||||
|
|
||||||
setup_cors(app)
|
|
||||||
|
|
||||||
|
|
||||||
@app.exception(SanicException, BaseException)
|
|
||||||
async def _handle_sanic_errors(_request, exception):
|
|
||||||
if isinstance(exception, asyncio.CancelledError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
log.error("Exception in handler: %s", exception, exc_info=True)
|
|
||||||
return json_response(
|
|
||||||
{
|
|
||||||
"errors": {
|
|
||||||
type(exception).__name__: str(exception),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
status=exception.status_code if hasattr(exception, "status_code") else 500,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Configure paths
|
|
||||||
def configure_paths(c):
|
|
||||||
c.API_ROOT_DIR = c.get("API_ROOT_DIR") or abspath(
|
|
||||||
join(dirname(__file__), "..", "..")
|
|
||||||
)
|
|
||||||
c.DATA_DIR = c.get("DATA_DIR") or normpath(join(c.API_ROOT_DIR, "../data"))
|
|
||||||
c.PROCESSING_DIR = c.get("PROCESSING_DIR") or join(c.DATA_DIR, "processing")
|
|
||||||
c.PROCESSING_OUTPUT_DIR = c.get("PROCESSING_OUTPUT_DIR") or join(
|
|
||||||
c.DATA_DIR, "processing-output"
|
|
||||||
)
|
|
||||||
c.TRACKS_DIR = c.get("TRACKS_DIR") or join(c.DATA_DIR, "tracks")
|
|
||||||
c.OBS_FACE_CACHE_DIR = c.get("OBS_FACE_CACHE_DIR") or join(
|
|
||||||
c.DATA_DIR, "obs-face-cache"
|
|
||||||
)
|
|
||||||
c.FRONTEND_DIR = c.get("FRONTEND_DIR")
|
|
||||||
|
|
||||||
|
|
||||||
configure_paths(app.config)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: use a different interface, maybe backed by the PostgreSQL, to allow
|
|
||||||
# scaling the API
|
|
||||||
Session(app, interface=InMemorySessionInterface())
|
|
||||||
|
|
||||||
|
|
||||||
@app.before_server_start
|
|
||||||
async def app_connect_db(app, loop):
|
|
||||||
app.ctx._db_engine_ctx = connect_db(
|
|
||||||
app.config.POSTGRES_URL,
|
|
||||||
app.config.POSTGRES_POOL_SIZE,
|
|
||||||
app.config.POSTGRES_MAX_OVERFLOW,
|
|
||||||
)
|
|
||||||
app.ctx._db_engine = await app.ctx._db_engine_ctx.__aenter__()
|
|
||||||
|
|
||||||
if app.config.TILE_SEMAPHORE_SIZE:
|
|
||||||
app.ctx.tile_semaphore = asyncio.Semaphore(app.config.TILE_SEMAPHORE_SIZE)
|
|
||||||
|
|
||||||
if app.config.EXPORT_SEMAPHORE_SIZE:
|
|
||||||
app.ctx.export_semaphore = asyncio.Semaphore(app.config.EXPORT_SEMAPHORE_SIZE)
|
|
||||||
|
|
||||||
|
|
||||||
@app.after_server_stop
|
|
||||||
async def app_disconnect_db(app, loop):
|
|
||||||
if hasattr(app.ctx, "_db_engine_ctx"):
|
|
||||||
await app.ctx._db_engine_ctx.__aexit__(None, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_right(l, r):
|
|
||||||
if l.endswith(r):
|
|
||||||
return l[: -len(r)]
|
|
||||||
return l
|
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("request")
|
|
||||||
async def inject_arg_getter(req):
|
|
||||||
req.ctx.get_single_arg = partial(get_single_arg, req)
|
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("request")
|
|
||||||
async def inject_urls(req):
|
|
||||||
if req.app.config.FRONTEND_HTTPS:
|
|
||||||
req.ctx.frontend_scheme = "https"
|
|
||||||
elif req.app.config.FRONTEND_URL:
|
|
||||||
req.ctx.frontend_scheme = (
|
|
||||||
"http" if req.app.config.FRONTEND_URL.startswith("http://") else "https"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
req.ctx.frontend_scheme = req.scheme
|
|
||||||
|
|
||||||
if req.app.config.get("API_URL"):
|
|
||||||
req.ctx.api_url = req.app.config.API_URL.rstrip("/")
|
|
||||||
api_url_parsed = urlparse(req.ctx.api_url)
|
|
||||||
req.ctx.api_scheme = api_url_parsed.scheme # just use the same for now
|
|
||||||
req.ctx.api_base_path = api_url_parsed.path
|
|
||||||
else:
|
|
||||||
req.ctx.api_scheme = req.ctx.frontend_scheme # just use the same for now
|
|
||||||
req.ctx.api_base_path = remove_right(req.server_path, req.path)
|
|
||||||
req.ctx.api_url = (
|
|
||||||
f"{req.ctx.frontend_scheme}://{req.host}{req.ctx.api_base_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
if req.app.config.FRONTEND_URL:
|
|
||||||
req.ctx.frontend_base_path = "/" + urlparse(
|
|
||||||
req.app.config.FRONTEND_URL
|
|
||||||
).path.strip("/")
|
|
||||||
req.ctx.frontend_url = req.app.config.FRONTEND_URL.rstrip("/")
|
|
||||||
elif app.config.FRONTEND_DIR:
|
|
||||||
req.ctx.frontend_base_path = req.ctx.api_base_path
|
|
||||||
req.ctx.frontend_url = req.ctx.api_url
|
|
||||||
|
|
||||||
else:
|
|
||||||
req.ctx.frontend_base_path = "/"
|
|
||||||
req.ctx.frontend_url = (
|
|
||||||
f"{req.ctx.frontend_scheme}://{req.host}{req.ctx.frontend_base_path}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("request")
|
|
||||||
async def inject_session(req):
|
|
||||||
req.ctx._session_ctx = make_session()
|
|
||||||
req.ctx.db = await req.ctx._session_ctx.__aenter__()
|
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("response")
|
|
||||||
async def close_session(req, response):
|
|
||||||
if hasattr(req.ctx, "_session_ctx"):
|
|
||||||
await req.ctx.db.close()
|
|
||||||
await req.ctx._session_ctx.__aexit__(None, None, None)
|
|
||||||
|
|
||||||
|
|
||||||
@app.middleware("request")
|
|
||||||
async def load_user(req):
|
|
||||||
user_id = req.ctx.session.get("user_id")
|
|
||||||
user = None
|
|
||||||
if user_id:
|
|
||||||
user = (
|
|
||||||
await req.ctx.db.execute(select(User).where(User.id == user_id))
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
req.ctx.user = user
|
|
||||||
|
|
||||||
|
|
||||||
def require_auth(fn):
|
|
||||||
@wraps(fn)
|
|
||||||
def wrapper(req, *args, **kwargs):
|
|
||||||
if not req.ctx.user:
|
|
||||||
raise Unauthorized("Login required")
|
|
||||||
return fn(req, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def read_api_key(fn):
|
|
||||||
"""
|
|
||||||
A middleware decorator to read the API Key of a user. It is an opt-in to
|
|
||||||
allow usage with API Keys on certain urls. Combine with require_auth to
|
|
||||||
actually check whether a user was authenticated through this. If a login
|
|
||||||
session exists, the api key is ignored.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@wraps(fn)
|
|
||||||
async def wrapper(req, *args, **kwargs):
|
|
||||||
# try to parse a token if one exists, unless a user is already authenticated
|
|
||||||
if (
|
|
||||||
not req.ctx.user
|
|
||||||
and isinstance(req.token, str)
|
|
||||||
and req.token.lower().startswith("obsuserid ")
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
api_key = req.token.split()[1]
|
|
||||||
except LookupError:
|
|
||||||
api_key = None
|
|
||||||
|
|
||||||
if api_key:
|
|
||||||
user = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(User).where(User.api_key == api_key.strip())
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
raise Unauthorized("invalid OBSUserId token")
|
|
||||||
|
|
||||||
req.ctx.user = user
|
|
||||||
|
|
||||||
return await fn(req, *args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
class CustomJsonEncoder(JSONEncoder):
|
|
||||||
def default(self, obj):
|
|
||||||
if isinstance(obj, (datetime, date)):
|
|
||||||
return obj.isoformat() + "+0000" # explicit UTC for javascript <3
|
|
||||||
|
|
||||||
# Let the base class default method raise the TypeError
|
|
||||||
return super().default(obj)
|
|
||||||
|
|
||||||
|
|
||||||
def json(*args, **kwargs):
|
|
||||||
return json_response(*args, **kwargs, dumps=partial(dumps, cls=CustomJsonEncoder))
|
|
||||||
|
|
||||||
|
|
||||||
from .routes import (
|
|
||||||
info,
|
|
||||||
login,
|
|
||||||
stats,
|
|
||||||
tracks,
|
|
||||||
users,
|
|
||||||
exports,
|
|
||||||
)
|
|
||||||
|
|
||||||
from .routes import tiles, mapdetails
|
|
||||||
from .routes import frontend
|
|
||||||
|
|
||||||
|
|
||||||
app.blueprint(api)
|
|
||||||
app.blueprint(auth)
|
|
||||||
|
|
||||||
if not app.config.DEDICATED_WORKER:
|
|
||||||
|
|
||||||
async def worker():
|
|
||||||
from obs.api.process import process_tracks_loop
|
|
||||||
|
|
||||||
# run forever
|
|
||||||
await process_tracks_loop(10)
|
|
||||||
|
|
||||||
app.add_task(worker())
|
|
|
@ -1,68 +0,0 @@
|
||||||
from collections import defaultdict
|
|
||||||
from typing import Dict, FrozenSet, Iterable
|
|
||||||
|
|
||||||
from sanic import Sanic, response
|
|
||||||
from sanic_routing.router import Route
|
|
||||||
|
|
||||||
|
|
||||||
def _add_cors_headers(request, response, methods: Iterable[str]) -> None:
|
|
||||||
allow_methods = list(set(methods))
|
|
||||||
|
|
||||||
if "OPTIONS" not in allow_methods:
|
|
||||||
allow_methods.append("OPTIONS")
|
|
||||||
|
|
||||||
origin = request.headers.get("origin")
|
|
||||||
if origin in request.app.ctx.cors_origins:
|
|
||||||
headers = {
|
|
||||||
"Access-Control-Allow-Methods": ",".join(allow_methods),
|
|
||||||
"Access-Control-Allow-Origin": origin,
|
|
||||||
"Access-Control-Allow-Credentials": "true",
|
|
||||||
"Access-Control-Allow-Headers": (
|
|
||||||
"origin, content-type, accept, "
|
|
||||||
"authorization, x-xsrf-token, x-request-id"
|
|
||||||
),
|
|
||||||
"Access-Control-Expose-Headers": "content-disposition",
|
|
||||||
}
|
|
||||||
response.headers.extend(headers)
|
|
||||||
|
|
||||||
|
|
||||||
def add_cors_headers(request, response):
|
|
||||||
if request.method != "OPTIONS":
|
|
||||||
methods = [method for method in request.route.methods]
|
|
||||||
_add_cors_headers(request, response, methods)
|
|
||||||
|
|
||||||
|
|
||||||
def _compile_routes_needing_options(routes: Dict[str, Route]) -> Dict[str, FrozenSet]:
|
|
||||||
needs_options = defaultdict(list)
|
|
||||||
# This is 21.12 and later. You will need to change this for older versions.
|
|
||||||
for route in routes.values():
|
|
||||||
if "OPTIONS" not in route.methods:
|
|
||||||
needs_options[route.uri].extend(route.methods)
|
|
||||||
|
|
||||||
return {uri: frozenset(methods) for uri, methods in dict(needs_options).items()}
|
|
||||||
|
|
||||||
|
|
||||||
def _options_wrapper(handler, methods):
|
|
||||||
def wrapped_handler(request, *args, **kwargs):
|
|
||||||
nonlocal methods
|
|
||||||
return handler(request, methods)
|
|
||||||
|
|
||||||
return wrapped_handler
|
|
||||||
|
|
||||||
|
|
||||||
async def options_handler(request, methods) -> response.HTTPResponse:
|
|
||||||
resp = response.empty()
|
|
||||||
_add_cors_headers(request, resp, methods)
|
|
||||||
return resp
|
|
||||||
|
|
||||||
|
|
||||||
def setup_options(app: Sanic, _):
|
|
||||||
app.router.reset()
|
|
||||||
needs_options = _compile_routes_needing_options(app.router.routes_all)
|
|
||||||
for uri, methods in needs_options.items():
|
|
||||||
app.add_route(
|
|
||||||
_options_wrapper(options_handler, methods),
|
|
||||||
uri,
|
|
||||||
methods=["OPTIONS"],
|
|
||||||
)
|
|
||||||
app.router.finalize()
|
|
|
@ -1,578 +0,0 @@
|
||||||
import hashlib
|
|
||||||
from contextvars import ContextVar
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from datetime import datetime
|
|
||||||
import os
|
|
||||||
from os.path import exists, join, dirname
|
|
||||||
from json import loads
|
|
||||||
import re
|
|
||||||
import math
|
|
||||||
import aiofiles
|
|
||||||
import random
|
|
||||||
import string
|
|
||||||
import secrets
|
|
||||||
from slugify import slugify
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
from sqlalchemy.ext.asyncio import create_async_engine
|
|
||||||
from sqlalchemy.orm import sessionmaker as SessionMaker, relationship
|
|
||||||
from sqlalchemy.types import UserDefinedType, BIGINT, TEXT
|
|
||||||
from sqlalchemy import (
|
|
||||||
Boolean,
|
|
||||||
Column,
|
|
||||||
DateTime,
|
|
||||||
Enum as SqlEnum,
|
|
||||||
Float,
|
|
||||||
ForeignKey,
|
|
||||||
Index,
|
|
||||||
Integer,
|
|
||||||
String,
|
|
||||||
false,
|
|
||||||
func,
|
|
||||||
select,
|
|
||||||
text,
|
|
||||||
literal,
|
|
||||||
Text,
|
|
||||||
)
|
|
||||||
from sqlalchemy.dialects.postgresql import UUID
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
|
|
||||||
engine = None
|
|
||||||
sessionmaker: SessionMaker
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def make_session():
|
|
||||||
async with sessionmaker(autoflush=True) as session:
|
|
||||||
yield session
|
|
||||||
|
|
||||||
|
|
||||||
async def drop_all():
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.drop_all)
|
|
||||||
|
|
||||||
|
|
||||||
async def init_models():
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "hstore";'))
|
|
||||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "postgis";'))
|
|
||||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'))
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
|
|
||||||
|
|
||||||
def random_string(length):
|
|
||||||
letters = string.ascii_lowercase + string.digits
|
|
||||||
return "".join(random.choice(letters) for _ in range(length))
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def connect_db(url, pool_size=10, max_overflow=20):
|
|
||||||
global engine, sessionmaker
|
|
||||||
|
|
||||||
engine = create_async_engine(
|
|
||||||
url, echo=False, pool_size=pool_size, max_overflow=max_overflow
|
|
||||||
)
|
|
||||||
sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False)
|
|
||||||
|
|
||||||
yield engine
|
|
||||||
|
|
||||||
# for AsyncEngine created in function scope, close and
|
|
||||||
# clean-up pooled connections
|
|
||||||
await engine.dispose()
|
|
||||||
|
|
||||||
engine = None
|
|
||||||
sessionmaker = None
|
|
||||||
|
|
||||||
|
|
||||||
ZoneType = SqlEnum("rural", "urban", "motorway", name="zone_type")
|
|
||||||
ProcessingStatus = SqlEnum(
|
|
||||||
"created", "queued", "processing", "complete", "error", name="processing_status"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Geometry(UserDefinedType):
|
|
||||||
def get_col_spec(self):
|
|
||||||
return "GEOMETRY"
|
|
||||||
|
|
||||||
def bind_expression(self, bindvalue):
|
|
||||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
|
||||||
|
|
||||||
def column_expression(self, col):
|
|
||||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
|
||||||
|
|
||||||
|
|
||||||
class LineString(UserDefinedType):
|
|
||||||
def get_col_spec(self):
|
|
||||||
return "geometry(LineString, 3857)"
|
|
||||||
|
|
||||||
def bind_expression(self, bindvalue):
|
|
||||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
|
||||||
|
|
||||||
def column_expression(self, col):
|
|
||||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
|
||||||
|
|
||||||
|
|
||||||
class GeometryGeometry(UserDefinedType):
|
|
||||||
def get_col_spec(self):
|
|
||||||
return "geometry(GEOMETRY, 3857)"
|
|
||||||
|
|
||||||
def bind_expression(self, bindvalue):
|
|
||||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
|
||||||
|
|
||||||
def column_expression(self, col):
|
|
||||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
|
||||||
|
|
||||||
|
|
||||||
class OvertakingEvent(Base):
|
|
||||||
__tablename__ = "overtaking_event"
|
|
||||||
__table_args__ = (Index("road_segment", "way_id", "direction_reversed"),)
|
|
||||||
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
|
||||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
|
||||||
hex_hash = Column(String, unique=True, index=True)
|
|
||||||
way_id = Column(BIGINT, index=True)
|
|
||||||
|
|
||||||
# whether we were traveling along the way in reverse direction
|
|
||||||
direction_reversed = Column(Boolean)
|
|
||||||
|
|
||||||
geometry = Column(Geometry)
|
|
||||||
latitude = Column(Float)
|
|
||||||
longitude = Column(Float)
|
|
||||||
time = Column(DateTime)
|
|
||||||
distance_overtaker = Column(Float)
|
|
||||||
distance_stationary = Column(Float)
|
|
||||||
course = Column(Float)
|
|
||||||
speed = Column(Float)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<OvertakingEvent {self.id}>"
|
|
||||||
|
|
||||||
|
|
||||||
class Road(Base):
|
|
||||||
__tablename__ = "road"
|
|
||||||
way_id = Column(BIGINT, primary_key=True, index=True, autoincrement=False)
|
|
||||||
zone = Column(ZoneType)
|
|
||||||
name = Column(Text)
|
|
||||||
geometry = Column(LineString)
|
|
||||||
directionality = Column(Integer)
|
|
||||||
oneway = Column(Boolean)
|
|
||||||
import_group = Column(String)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
# We keep the index name as osm2pgsql created it, way back when.
|
|
||||||
Index(
|
|
||||||
"road_geometry_idx",
|
|
||||||
"geometry",
|
|
||||||
postgresql_using="gist",
|
|
||||||
postgresql_with={"fillfactor": 100},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self):
|
|
||||||
return {
|
|
||||||
"way_id": self.way_id,
|
|
||||||
"zone": self.zone,
|
|
||||||
"name": self.name,
|
|
||||||
"directionality": self.directionality,
|
|
||||||
"oneway": self.oneway,
|
|
||||||
"geometry": loads(self.geometry),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class RoadUsage(Base):
|
|
||||||
__tablename__ = "road_usage"
|
|
||||||
__table_args__ = (Index("road_usage_segment", "way_id", "direction_reversed"),)
|
|
||||||
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
|
||||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
|
||||||
hex_hash = Column(String, unique=True, index=True)
|
|
||||||
way_id = Column(BIGINT, index=True)
|
|
||||||
time = Column(DateTime)
|
|
||||||
direction_reversed = Column(Boolean)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return f"<RoadUsage {self.id}>"
|
|
||||||
|
|
||||||
def __hash__(self):
|
|
||||||
return int(self.hex_hash, 16)
|
|
||||||
|
|
||||||
def __eq__(self, other):
|
|
||||||
return self.hex_hash == other.hex_hash
|
|
||||||
|
|
||||||
|
|
||||||
NOW = text("NOW()")
|
|
||||||
|
|
||||||
|
|
||||||
class DuplicateTrackFileError(ValueError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Track(Base):
|
|
||||||
__tablename__ = "track"
|
|
||||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
|
||||||
slug = Column(String, unique=True, nullable=False, index=True)
|
|
||||||
|
|
||||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
|
||||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
|
||||||
|
|
||||||
title = Column(String)
|
|
||||||
|
|
||||||
processing_status = Column(ProcessingStatus, server_default=literal("created"))
|
|
||||||
processing_queued_at = Column(DateTime)
|
|
||||||
processed_at = Column(DateTime)
|
|
||||||
|
|
||||||
processing_log = Column(TEXT)
|
|
||||||
|
|
||||||
# Set to true if the user customized the title. Disables auto-generating
|
|
||||||
# an updated title when the track is (re-)processed.
|
|
||||||
customized_title = Column(Boolean, server_default=false(), nullable=False)
|
|
||||||
|
|
||||||
# A user-provided description of the track. May contain markdown.
|
|
||||||
description = Column(TEXT)
|
|
||||||
|
|
||||||
# Whether this track is visible (anonymized) in the public track list or not.
|
|
||||||
public = Column(Boolean, server_default=false())
|
|
||||||
|
|
||||||
# Whether this track should be exported to the public track database
|
|
||||||
# (after anonymization).
|
|
||||||
# include_in_public_database = Column(Boolean, server_default=false())
|
|
||||||
|
|
||||||
# The user agent string, or a part thereof, that was used to upload this
|
|
||||||
# track. Usually contains only the OBS version, other user agents are
|
|
||||||
# discarded due to being irrelevant.
|
|
||||||
uploaded_by_user_agent = Column(String)
|
|
||||||
|
|
||||||
# The name of the original file, as provided during upload. Used for
|
|
||||||
# providing a download with the same name, and for display in the
|
|
||||||
# frontend.
|
|
||||||
original_file_name = Column(String)
|
|
||||||
|
|
||||||
# A hash of the original file's contents. Nobody can upload the same track twice.
|
|
||||||
original_file_hash = Column(String, nullable=False)
|
|
||||||
|
|
||||||
author_id = Column(
|
|
||||||
Integer, ForeignKey("user.id", ondelete="CASCADE"), nullable=False
|
|
||||||
)
|
|
||||||
|
|
||||||
user_device_id = Column(
|
|
||||||
Integer,
|
|
||||||
ForeignKey("user_device.id", ondelete="RESTRICT"),
|
|
||||||
nullable=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Statistics... maybe we'll drop some of this if we can easily compute them from SQL
|
|
||||||
recorded_at = Column(DateTime)
|
|
||||||
recorded_until = Column(DateTime)
|
|
||||||
duration = Column(Float)
|
|
||||||
length = Column(Float)
|
|
||||||
segments = Column(Integer)
|
|
||||||
num_events = Column(Integer)
|
|
||||||
num_measurements = Column(Integer)
|
|
||||||
num_valid = Column(Integer)
|
|
||||||
|
|
||||||
def to_dict(self, for_user_id=None):
|
|
||||||
result = {
|
|
||||||
"id": self.id,
|
|
||||||
"slug": self.slug,
|
|
||||||
"title": self.title,
|
|
||||||
"description": self.description,
|
|
||||||
"createdAt": self.created_at,
|
|
||||||
"updatedAt": self.updated_at,
|
|
||||||
"public": self.public,
|
|
||||||
"processingStatus": self.processing_status,
|
|
||||||
"recordedAt": self.recorded_at,
|
|
||||||
"recordedUntil": self.recorded_until,
|
|
||||||
"duration": self.duration,
|
|
||||||
"length": self.length,
|
|
||||||
"numEvents": self.num_events,
|
|
||||||
"numValid": self.num_valid,
|
|
||||||
"numMeasurements": self.num_measurements,
|
|
||||||
}
|
|
||||||
|
|
||||||
if for_user_id is not None and for_user_id == self.author_id:
|
|
||||||
result["uploadedByUserAgent"] = self.uploaded_by_user_agent
|
|
||||||
result["originalFileName"] = self.original_file_name
|
|
||||||
result["userDeviceId"] = self.user_device_id
|
|
||||||
|
|
||||||
if self.author:
|
|
||||||
result["author"] = self.author.to_dict(for_user_id=for_user_id)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def is_visible_to_private(self, user):
|
|
||||||
return user is not None and user.id == self.author_id
|
|
||||||
|
|
||||||
def is_visible_to(self, user):
|
|
||||||
return self.is_visible_to_private(user) or self.public
|
|
||||||
|
|
||||||
def generate_slug(self, new_title_or_filename=None):
|
|
||||||
input_text = new_title_or_filename or self.title or self.original_file_name
|
|
||||||
|
|
||||||
if input_text is not None:
|
|
||||||
self.slug = slugify(input_text, separator="_") + "-"
|
|
||||||
else:
|
|
||||||
self.slug = ""
|
|
||||||
|
|
||||||
# make unique
|
|
||||||
self.slug += random_string(8)
|
|
||||||
|
|
||||||
async def prevent_duplicates(self, session, file_body):
|
|
||||||
hex_hash = hashlib.sha512(file_body).hexdigest()
|
|
||||||
|
|
||||||
duplicate_count = await session.scalar(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(Track)
|
|
||||||
.where(
|
|
||||||
Track.original_file_hash == hex_hash
|
|
||||||
and Track.author_id == self.author_id
|
|
||||||
and Track.id != self.id
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if duplicate_count:
|
|
||||||
raise DuplicateTrackFileError()
|
|
||||||
|
|
||||||
self.original_file_hash = hex_hash
|
|
||||||
|
|
||||||
async def write_to_original_file(self, config, body):
|
|
||||||
mode = "wb" if isinstance(body, bytes) else "wt"
|
|
||||||
|
|
||||||
target = self.get_original_file_path(config)
|
|
||||||
os.makedirs(dirname(target), exist_ok=True)
|
|
||||||
async with aiofiles.open(target, mode=mode) as f:
|
|
||||||
await f.write(body)
|
|
||||||
|
|
||||||
def queue_processing(self):
|
|
||||||
self.processing_status = "queued"
|
|
||||||
self.processing_queued_at = datetime.utcnow()
|
|
||||||
|
|
||||||
def auto_generate_title(self):
|
|
||||||
if self.customized_title:
|
|
||||||
return
|
|
||||||
|
|
||||||
# Try to figure out when this file was recorded. Either we have it in then
|
|
||||||
# statistics, e.g. after parsing and processing the track, or we can maybe
|
|
||||||
# derive it from the filename.
|
|
||||||
recorded_at = self.recorded_at
|
|
||||||
|
|
||||||
if not recorded_at and self.original_file_name:
|
|
||||||
match = re.match(
|
|
||||||
r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}\.[0-9]{2}\.[0-9]{2}",
|
|
||||||
self.original_file_name,
|
|
||||||
)
|
|
||||||
if match:
|
|
||||||
try:
|
|
||||||
recorded_at = datetime.fromisoformat(match[0])
|
|
||||||
except ValueError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if recorded_at:
|
|
||||||
daytime = _get_daytime(recorded_at)
|
|
||||||
self.title = f"{daytime} ride on {recorded_at.strftime('%a, %x')}"
|
|
||||||
return
|
|
||||||
|
|
||||||
# Detecting recording date failed, use filename
|
|
||||||
if self.original_file_name:
|
|
||||||
words = self.original_file_name
|
|
||||||
words = re.sub(r"(\.obsdata)?\.csv$", "", words)
|
|
||||||
words = re.split(r"\W+", words)
|
|
||||||
words[0] = words[0][0].upper() + words[0][1:]
|
|
||||||
self.title = " ".join(words)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def file_path(self):
|
|
||||||
return join(self.author.username, self.slug)
|
|
||||||
|
|
||||||
def get_original_file_path(self, config):
|
|
||||||
return join(config.TRACKS_DIR, self.file_path, "original.csv")
|
|
||||||
|
|
||||||
|
|
||||||
class User(Base):
|
|
||||||
__tablename__ = "user"
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
|
||||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
|
||||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
|
||||||
sub = Column(String, unique=True, nullable=False)
|
|
||||||
username = Column(String, unique=True, nullable=False)
|
|
||||||
display_name = Column(String, nullable=True)
|
|
||||||
email = Column(String, nullable=False)
|
|
||||||
bio = Column(TEXT)
|
|
||||||
image = Column(String)
|
|
||||||
are_tracks_visible_for_all = Column(Boolean, server_default=false(), nullable=False)
|
|
||||||
api_key = Column(String)
|
|
||||||
|
|
||||||
# This user can be matched by the email address from the auth service
|
|
||||||
# instead of having to match by `sub`. If a matching user logs in, the
|
|
||||||
# `sub` is updated to the new sub and this flag is disabled. This is for
|
|
||||||
# migrating *to* the external authentication scheme.
|
|
||||||
match_by_username_email = Column(Boolean, server_default=false())
|
|
||||||
|
|
||||||
def generate_api_key(self):
|
|
||||||
"""
|
|
||||||
Generates a new :py:obj:`api_key` into this instance. The new key is
|
|
||||||
sourced from a secure random source and is urlsafe.
|
|
||||||
"""
|
|
||||||
self.api_key = secrets.token_urlsafe(24)
|
|
||||||
|
|
||||||
def to_dict(self, for_user_id=None):
|
|
||||||
result = {
|
|
||||||
"id": self.id,
|
|
||||||
"displayName": self.display_name or self.username,
|
|
||||||
"bio": self.bio,
|
|
||||||
"image": self.image,
|
|
||||||
}
|
|
||||||
if for_user_id == self.id:
|
|
||||||
result["username"] = self.username
|
|
||||||
return result
|
|
||||||
|
|
||||||
async def rename(self, config, new_name):
|
|
||||||
old_name = self.username
|
|
||||||
|
|
||||||
renames = [
|
|
||||||
(join(basedir, old_name), join(basedir, new_name))
|
|
||||||
for basedir in [config.PROCESSING_OUTPUT_DIR, config.TRACKS_DIR]
|
|
||||||
]
|
|
||||||
|
|
||||||
for src, dst in renames:
|
|
||||||
if exists(dst):
|
|
||||||
raise FileExistsError(
|
|
||||||
f"cannot move {src!r} to {dst!r}, destination exists"
|
|
||||||
)
|
|
||||||
|
|
||||||
for src, dst in renames:
|
|
||||||
if not exists(src):
|
|
||||||
log.debug("Rename user %s: Not moving %s, not found", self.id, src)
|
|
||||||
else:
|
|
||||||
log.info("Rename user %s: Moving %s to %s", self.id, src, dst)
|
|
||||||
os.rename(src, dst)
|
|
||||||
|
|
||||||
self.username = new_name
|
|
||||||
|
|
||||||
|
|
||||||
class UserDevice(Base):
|
|
||||||
__tablename__ = "user_device"
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
|
||||||
user_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE"))
|
|
||||||
identifier = Column(String, nullable=False)
|
|
||||||
display_name = Column(String, nullable=True)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
Index("user_id_identifier", "user_id", "identifier", unique=True),
|
|
||||||
)
|
|
||||||
|
|
||||||
def to_dict(self, for_user_id=None):
|
|
||||||
if for_user_id != self.user_id:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return {
|
|
||||||
"id": self.id,
|
|
||||||
"identifier": self.identifier,
|
|
||||||
"displayName": self.display_name,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Comment(Base):
|
|
||||||
__tablename__ = "comment"
|
|
||||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
|
||||||
uid = Column(UUID, server_default=func.uuid_generate_v4())
|
|
||||||
|
|
||||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
|
||||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
|
||||||
|
|
||||||
body = Column(TEXT)
|
|
||||||
|
|
||||||
author_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE"))
|
|
||||||
|
|
||||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
|
||||||
|
|
||||||
def to_dict(self, for_user_id=None):
|
|
||||||
return {
|
|
||||||
"id": self.uid,
|
|
||||||
"body": self.body,
|
|
||||||
"author": self.author.to_dict(for_user_id=for_user_id),
|
|
||||||
"createdAt": self.created_at,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class Region(Base):
|
|
||||||
__tablename__ = "region"
|
|
||||||
|
|
||||||
id = Column(String(24), primary_key=True, index=True)
|
|
||||||
name = Column(Text)
|
|
||||||
geometry = Column(GeometryGeometry)
|
|
||||||
admin_level = Column(Integer, index=True)
|
|
||||||
import_group = Column(String)
|
|
||||||
|
|
||||||
__table_args__ = (
|
|
||||||
# We keep the index name as osm2pgsql created it, way back when.
|
|
||||||
Index(
|
|
||||||
"region_geometry_idx",
|
|
||||||
"geometry",
|
|
||||||
postgresql_using="gist",
|
|
||||||
postgresql_with={"fillfactor": 100},
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
Comment.author = relationship("User", back_populates="authored_comments")
|
|
||||||
User.authored_comments = relationship(
|
|
||||||
"Comment",
|
|
||||||
order_by=Comment.created_at,
|
|
||||||
back_populates="author",
|
|
||||||
passive_deletes=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
Track.author = relationship("User", back_populates="authored_tracks")
|
|
||||||
User.authored_tracks = relationship(
|
|
||||||
"Track", order_by=Track.created_at, back_populates="author", passive_deletes=True
|
|
||||||
)
|
|
||||||
|
|
||||||
Comment.track = relationship("Track", back_populates="comments")
|
|
||||||
Track.comments = relationship(
|
|
||||||
"Comment", order_by=Comment.created_at, back_populates="track", passive_deletes=True
|
|
||||||
)
|
|
||||||
|
|
||||||
OvertakingEvent.track = relationship("Track", back_populates="overtaking_events")
|
|
||||||
Track.overtaking_events = relationship(
|
|
||||||
"OvertakingEvent",
|
|
||||||
order_by=OvertakingEvent.time,
|
|
||||||
back_populates="track",
|
|
||||||
passive_deletes=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
Track.user_device = relationship("UserDevice", back_populates="tracks")
|
|
||||||
UserDevice.tracks = relationship(
|
|
||||||
"Track",
|
|
||||||
order_by=Track.created_at,
|
|
||||||
back_populates="user_device",
|
|
||||||
passive_deletes=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# 0..4 Night, 4..10 Morning, 10..14 Noon, 14..18 Afternoon, 18..22 Evening, 22..00 Night
|
|
||||||
# Two hour intervals
|
|
||||||
_DAYTIMES = [
|
|
||||||
"Night", # 0h - 2h
|
|
||||||
"Night", # 2h - 4h
|
|
||||||
"Morning", # 4h - 6h
|
|
||||||
"Morning", # 6h - 8h
|
|
||||||
"Morning", # 8h - 10h
|
|
||||||
"Noon", # 10h - 12h
|
|
||||||
"Noon", # 12h - 14h
|
|
||||||
"Afternoon", # 14h - 16h
|
|
||||||
"Afternoon", # 16h - 18h
|
|
||||||
"Evening", # 18h - 20h
|
|
||||||
"Evening", # 20h - 22h
|
|
||||||
"Night", # 22h - 24h
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def _get_daytime(d):
|
|
||||||
return _DAYTIMES[math.floor((d.hour % 24) / 2)]
|
|
|
@ -1,377 +0,0 @@
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import json
|
|
||||||
import asyncio
|
|
||||||
import hashlib
|
|
||||||
import struct
|
|
||||||
import pytz
|
|
||||||
from os.path import join
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from sqlalchemy import delete, func, select, and_
|
|
||||||
from sqlalchemy.orm import joinedload
|
|
||||||
|
|
||||||
from obs.face.importer import ImportMeasurementsCsv
|
|
||||||
from obs.face.geojson import ExportMeasurements
|
|
||||||
from obs.face.annotate import AnnotateMeasurements
|
|
||||||
from obs.face.filter import (
|
|
||||||
AnonymizationMode,
|
|
||||||
ChainFilter,
|
|
||||||
ConfirmedFilter,
|
|
||||||
DistanceMeasuredFilter,
|
|
||||||
PrivacyFilter,
|
|
||||||
PrivacyZone,
|
|
||||||
PrivacyZonesFilter,
|
|
||||||
RequiredFieldsFilter,
|
|
||||||
)
|
|
||||||
|
|
||||||
from obs.face.osm import DataSource, DatabaseTileSource
|
|
||||||
|
|
||||||
from obs.api.db import OvertakingEvent, RoadUsage, Track, UserDevice, make_session
|
|
||||||
from obs.api.app import app
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_data_source():
|
|
||||||
"""
|
|
||||||
Creates a data source based on the configuration of the portal. In *lean*
|
|
||||||
mode, the OverpassTileSource is used to fetch data on demand. In normal
|
|
||||||
mode, the roads database is used.
|
|
||||||
"""
|
|
||||||
return DataSource(DatabaseTileSource())
|
|
||||||
|
|
||||||
|
|
||||||
async def process_tracks_loop(delay):
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
async with make_session() as session:
|
|
||||||
track = (
|
|
||||||
await session.execute(
|
|
||||||
select(Track)
|
|
||||||
.where(Track.processing_status == "queued")
|
|
||||||
.order_by(Track.processing_queued_at)
|
|
||||||
.options(joinedload(Track.author))
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if track is None:
|
|
||||||
await asyncio.sleep(delay)
|
|
||||||
continue
|
|
||||||
|
|
||||||
data_source = get_data_source()
|
|
||||||
await process_track(session, track, data_source)
|
|
||||||
except BaseException:
|
|
||||||
log.exception("Failed to process track. Will continue.")
|
|
||||||
await asyncio.sleep(1)
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
|
||||||
async def process_tracks(tracks):
|
|
||||||
"""
|
|
||||||
Processes the tracks and writes event data to the database.
|
|
||||||
|
|
||||||
:param tracks: A list of strings which
|
|
||||||
"""
|
|
||||||
data_source = get_data_source()
|
|
||||||
|
|
||||||
async with make_session() as session:
|
|
||||||
for track_id_or_slug in tracks:
|
|
||||||
track = (
|
|
||||||
await session.execute(
|
|
||||||
select(Track)
|
|
||||||
.where(
|
|
||||||
Track.id == track_id_or_slug
|
|
||||||
if isinstance(track_id_or_slug, int)
|
|
||||||
else Track.slug == track_id_or_slug
|
|
||||||
)
|
|
||||||
.options(joinedload(Track.author))
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if not track:
|
|
||||||
raise ValueError(f"Track {track_id_or_slug!r} not found.")
|
|
||||||
|
|
||||||
await process_track(session, track, data_source)
|
|
||||||
|
|
||||||
|
|
||||||
def to_naive_utc(t):
|
|
||||||
if t is None:
|
|
||||||
return None
|
|
||||||
return t.astimezone(pytz.UTC).replace(tzinfo=None)
|
|
||||||
|
|
||||||
|
|
||||||
async def export_gpx(track, filename, name):
|
|
||||||
import xml.etree.ElementTree as ET
|
|
||||||
|
|
||||||
gpx = ET.Element("gpx")
|
|
||||||
metadata = ET.SubElement(gpx, "metadata")
|
|
||||||
ET.SubElement(metadata, "name").text = name
|
|
||||||
|
|
||||||
trk = ET.SubElement(gpx, "trk")
|
|
||||||
|
|
||||||
ET.SubElement(trk, "name").text = name
|
|
||||||
ET.SubElement(trk, "type").text = "Cycling"
|
|
||||||
|
|
||||||
trkseg = ET.SubElement(trk, "trkseg")
|
|
||||||
|
|
||||||
for point in track:
|
|
||||||
trkpt = ET.SubElement(
|
|
||||||
trkseg, "trkpt", lat=str(point["latitude"]), lon=str(point["longitude"])
|
|
||||||
)
|
|
||||||
ET.SubElement(trkpt, "time").text = point["time"].isoformat()
|
|
||||||
|
|
||||||
et = ET.ElementTree(gpx)
|
|
||||||
et.write(filename, encoding="utf-8", xml_declaration=True)
|
|
||||||
|
|
||||||
|
|
||||||
async def process_track(session, track, data_source):
|
|
||||||
try:
|
|
||||||
track.processing_status = "complete"
|
|
||||||
track.processed_at = datetime.utcnow()
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
original_file_path = track.get_original_file_path(app.config)
|
|
||||||
|
|
||||||
output_dir = join(
|
|
||||||
app.config.PROCESSING_OUTPUT_DIR, track.author.username, track.slug
|
|
||||||
)
|
|
||||||
os.makedirs(output_dir, exist_ok=True)
|
|
||||||
|
|
||||||
log.info("Annotating and filtering CSV file")
|
|
||||||
imported_data, statistics, track_metadata = ImportMeasurementsCsv().read(
|
|
||||||
original_file_path,
|
|
||||||
user_id="dummy", # TODO: user username or id or nothing?
|
|
||||||
dataset_id=Track.slug, # TODO: use track id or slug or nothing?
|
|
||||||
return_metadata=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
annotator = AnnotateMeasurements(
|
|
||||||
data_source,
|
|
||||||
cache_dir=app.config.OBS_FACE_CACHE_DIR,
|
|
||||||
fully_annotate_unconfirmed=True,
|
|
||||||
)
|
|
||||||
input_data = await annotator.annotate(imported_data)
|
|
||||||
|
|
||||||
track_filter = ChainFilter(
|
|
||||||
RequiredFieldsFilter(),
|
|
||||||
PrivacyFilter(
|
|
||||||
user_id_mode=AnonymizationMode.REMOVE,
|
|
||||||
measurement_id_mode=AnonymizationMode.REMOVE,
|
|
||||||
),
|
|
||||||
# TODO: load user privacy zones and create a PrivacyZonesFilter() from them
|
|
||||||
)
|
|
||||||
measurements_filter = DistanceMeasuredFilter()
|
|
||||||
overtaking_events_filter = ConfirmedFilter()
|
|
||||||
|
|
||||||
track_points = track_filter.filter(input_data, log=log)
|
|
||||||
measurements = measurements_filter.filter(track_points, log=log)
|
|
||||||
overtaking_events = overtaking_events_filter.filter(measurements, log=log)
|
|
||||||
|
|
||||||
exporter = ExportMeasurements("measurements.dummy")
|
|
||||||
await exporter.add_measurements(measurements)
|
|
||||||
measurements_json = exporter.get_data()
|
|
||||||
del exporter
|
|
||||||
|
|
||||||
exporter = ExportMeasurements("overtaking_events.dummy")
|
|
||||||
await exporter.add_measurements(overtaking_events)
|
|
||||||
overtaking_events_json = exporter.get_data()
|
|
||||||
del exporter
|
|
||||||
|
|
||||||
track_json = {
|
|
||||||
"type": "Feature",
|
|
||||||
"geometry": {
|
|
||||||
"type": "LineString",
|
|
||||||
"coordinates": [[m["longitude"], m["latitude"]] for m in track_points],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
track_raw_json = {
|
|
||||||
"type": "Feature",
|
|
||||||
"geometry": {
|
|
||||||
"type": "LineString",
|
|
||||||
"coordinates": [
|
|
||||||
[m["longitude_GPS"], m["latitude_GPS"]] for m in track_points
|
|
||||||
],
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for output_filename, data in [
|
|
||||||
("measurements.json", measurements_json),
|
|
||||||
("overtakingEvents.json", overtaking_events_json),
|
|
||||||
("track.json", track_json),
|
|
||||||
("trackRaw.json", track_raw_json),
|
|
||||||
]:
|
|
||||||
target = join(output_dir, output_filename)
|
|
||||||
log.debug("Writing file %s", target)
|
|
||||||
with open(target, "w") as fp:
|
|
||||||
json.dump(data, fp, indent=4)
|
|
||||||
|
|
||||||
await export_gpx(track_points, join(output_dir, "track.gpx"), track.slug)
|
|
||||||
|
|
||||||
log.info("Clearing old track data...")
|
|
||||||
await clear_track_data(session, track)
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
device_identifier = track_metadata.get("DeviceId")
|
|
||||||
if device_identifier:
|
|
||||||
if isinstance(device_identifier, list):
|
|
||||||
device_identifier = device_identifier[0]
|
|
||||||
|
|
||||||
log.info("Finding or creating device %s", device_identifier)
|
|
||||||
user_device = (
|
|
||||||
await session.execute(
|
|
||||||
select(UserDevice).where(
|
|
||||||
and_(
|
|
||||||
UserDevice.user_id == track.author_id,
|
|
||||||
UserDevice.identifier == device_identifier,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
log.debug("user_device is %s", user_device)
|
|
||||||
|
|
||||||
if not user_device:
|
|
||||||
user_device = UserDevice(
|
|
||||||
user_id=track.author_id, identifier=device_identifier
|
|
||||||
)
|
|
||||||
log.debug("Create new device for this user")
|
|
||||||
session.add(user_device)
|
|
||||||
|
|
||||||
track.user_device = user_device
|
|
||||||
else:
|
|
||||||
log.info("No DeviceId in track metadata.")
|
|
||||||
|
|
||||||
log.info("Import events into database...")
|
|
||||||
await import_overtaking_events(session, track, overtaking_events)
|
|
||||||
|
|
||||||
log.info("import road usages...")
|
|
||||||
await import_road_usages(session, track, track_points)
|
|
||||||
|
|
||||||
log.info("Write track statistics and update status...")
|
|
||||||
track.recorded_at = to_naive_utc(statistics["t_min"])
|
|
||||||
track.recorded_until = to_naive_utc(statistics["t_max"])
|
|
||||||
track.duration = statistics["t"]
|
|
||||||
track.length = statistics["d"]
|
|
||||||
track.segments = statistics["n_segments"]
|
|
||||||
track.num_events = statistics["n_confirmed"]
|
|
||||||
track.num_measurements = statistics["n_measurements"]
|
|
||||||
track.num_valid = statistics["n_valid"]
|
|
||||||
track.processing_status = "complete"
|
|
||||||
track.processed_at = datetime.utcnow()
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
log.info("Track %s imported.", track.slug)
|
|
||||||
except BaseException as e:
|
|
||||||
await clear_track_data(session, track)
|
|
||||||
track.processing_status = "error"
|
|
||||||
track.processing_log = str(e)
|
|
||||||
track.processed_at = datetime.utcnow()
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
|
||||||
async def clear_track_data(session, track):
|
|
||||||
track.recorded_at = None
|
|
||||||
track.recorded_until = None
|
|
||||||
track.duration = None
|
|
||||||
track.length = None
|
|
||||||
track.segments = None
|
|
||||||
track.num_events = None
|
|
||||||
track.num_measurements = None
|
|
||||||
track.num_valid = None
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
delete(OvertakingEvent).where(OvertakingEvent.track_id == track.id)
|
|
||||||
)
|
|
||||||
await session.execute(delete(RoadUsage).where(RoadUsage.track_id == track.id))
|
|
||||||
|
|
||||||
|
|
||||||
async def import_overtaking_events(session, track, overtaking_events):
|
|
||||||
# We use a dictionary to prevent per-track hash collisions, ignoring all
|
|
||||||
# but the first event of the same hash
|
|
||||||
event_models = {}
|
|
||||||
|
|
||||||
for m in overtaking_events:
|
|
||||||
hex_hash = hashlib.sha256(
|
|
||||||
struct.pack(
|
|
||||||
"ddQ", m["latitude"], m["longitude"], int(m["time"].timestamp())
|
|
||||||
)
|
|
||||||
).hexdigest()
|
|
||||||
|
|
||||||
event_models[hex_hash] = OvertakingEvent(
|
|
||||||
track_id=track.id,
|
|
||||||
hex_hash=hex_hash,
|
|
||||||
way_id=m.get("OSM_way_id"),
|
|
||||||
direction_reversed=m.get("OSM_way_orientation", 0) < 0,
|
|
||||||
geometry=func.ST_Transform(
|
|
||||||
func.ST_GeomFromGeoJSON(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
"type": "Point",
|
|
||||||
"coordinates": [m["longitude"], m["latitude"]],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
),
|
|
||||||
3857,
|
|
||||||
),
|
|
||||||
latitude=m["latitude"],
|
|
||||||
longitude=m["longitude"],
|
|
||||||
time=m["time"].astimezone(pytz.utc).replace(tzinfo=None),
|
|
||||||
distance_overtaker=m["distance_overtaker"],
|
|
||||||
distance_stationary=m["distance_stationary"],
|
|
||||||
course=m["course"],
|
|
||||||
speed=m["speed"],
|
|
||||||
)
|
|
||||||
|
|
||||||
session.add_all(event_models.values())
|
|
||||||
|
|
||||||
|
|
||||||
def get_road_usages(track_points):
|
|
||||||
last_key = None
|
|
||||||
last = None
|
|
||||||
|
|
||||||
for p in track_points:
|
|
||||||
way_id = p.get("OSM_way_id")
|
|
||||||
direction_reversed = p.get("OSM_way_orientation", 0) < 0
|
|
||||||
|
|
||||||
key = (way_id, direction_reversed)
|
|
||||||
|
|
||||||
if last_key is None or last_key[0] is None:
|
|
||||||
last = p
|
|
||||||
last_key = key
|
|
||||||
continue
|
|
||||||
|
|
||||||
if last_key != key:
|
|
||||||
if last_key[0] is not None:
|
|
||||||
yield last
|
|
||||||
last_key = key
|
|
||||||
last = p
|
|
||||||
|
|
||||||
if last is not None and last_key[0] is not None:
|
|
||||||
yield last
|
|
||||||
|
|
||||||
|
|
||||||
async def import_road_usages(session, track, track_points):
|
|
||||||
usages = set()
|
|
||||||
for p in get_road_usages(track_points):
|
|
||||||
direction_reversed = p.get("OSM_way_orientation", 0) < 0
|
|
||||||
way_id = p.get("OSM_way_id")
|
|
||||||
time = p["time"]
|
|
||||||
|
|
||||||
hex_hash = hashlib.sha256(
|
|
||||||
struct.pack("dQ", way_id, int(time.timestamp()))
|
|
||||||
).hexdigest()
|
|
||||||
|
|
||||||
usages.add(
|
|
||||||
RoadUsage(
|
|
||||||
track_id=track.id,
|
|
||||||
hex_hash=hex_hash,
|
|
||||||
way_id=way_id,
|
|
||||||
time=time.astimezone(pytz.utc).replace(tzinfo=None),
|
|
||||||
direction_reversed=direction_reversed,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
session.add_all(usages)
|
|
|
@ -1,261 +0,0 @@
|
||||||
import json
|
|
||||||
from enum import Enum
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import zipfile
|
|
||||||
import io
|
|
||||||
import re
|
|
||||||
import math
|
|
||||||
from sqlite3 import connect
|
|
||||||
|
|
||||||
import shapefile
|
|
||||||
from obs.api.db import OvertakingEvent
|
|
||||||
from sqlalchemy import select, func, text
|
|
||||||
from sanic.response import raw
|
|
||||||
from sanic.exceptions import InvalidUsage
|
|
||||||
|
|
||||||
from obs.api.app import api, json as json_response
|
|
||||||
from obs.api.utils import use_request_semaphore
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ExportFormat(str, Enum):
|
|
||||||
SHAPEFILE = "shapefile"
|
|
||||||
GEOJSON = "geojson"
|
|
||||||
|
|
||||||
|
|
||||||
def parse_bounding_box(input_string):
|
|
||||||
left, bottom, right, top = map(float, input_string.split(","))
|
|
||||||
return func.ST_SetSRID(
|
|
||||||
func.ST_MakeBox2D(
|
|
||||||
func.ST_Point(left, bottom),
|
|
||||||
func.ST_Point(right, top),
|
|
||||||
),
|
|
||||||
4326,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
PROJECTION_4326 = (
|
|
||||||
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],'
|
|
||||||
'AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
|
|
||||||
'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def shapefile_zip(shape_type=shapefile.POINT, basename="events"):
|
|
||||||
zip_buffer = io.BytesIO()
|
|
||||||
shp, shx, dbf = (io.BytesIO() for _ in range(3))
|
|
||||||
writer = shapefile.Writer(
|
|
||||||
shp=shp, shx=shx, dbf=dbf, shapeType=shape_type, encoding="utf8"
|
|
||||||
)
|
|
||||||
|
|
||||||
yield writer, zip_buffer
|
|
||||||
|
|
||||||
writer.balance()
|
|
||||||
writer.close()
|
|
||||||
|
|
||||||
zip_file = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False)
|
|
||||||
zip_file.writestr(f"{basename}.shp", shp.getbuffer())
|
|
||||||
zip_file.writestr(f"{basename}.shx", shx.getbuffer())
|
|
||||||
zip_file.writestr(f"{basename}.dbf", dbf.getbuffer())
|
|
||||||
zip_file.writestr(f"{basename}.prj", PROJECTION_4326)
|
|
||||||
zip_file.close()
|
|
||||||
|
|
||||||
|
|
||||||
@api.get(r"/export/events")
|
|
||||||
async def export_events(req):
|
|
||||||
async with use_request_semaphore(req, "export_semaphore", timeout=30):
|
|
||||||
bbox = req.ctx.get_single_arg("bbox", default="-180,-90,180,90")
|
|
||||||
assert re.match(r"(-?\d+\.?\d+,?){4}", bbox)
|
|
||||||
bbox = list(map(float, bbox.split(",")))
|
|
||||||
|
|
||||||
fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat)
|
|
||||||
|
|
||||||
events = await req.ctx.db.stream(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
SELECT
|
|
||||||
ST_AsGeoJSON(ST_Transform(geometry, 4326)) AS geometry,
|
|
||||||
distance_overtaker,
|
|
||||||
distance_stationary,
|
|
||||||
way_id,
|
|
||||||
direction,
|
|
||||||
speed,
|
|
||||||
time_stamp,
|
|
||||||
course,
|
|
||||||
zone
|
|
||||||
FROM
|
|
||||||
layer_obs_events(
|
|
||||||
ST_Transform(ST_MakeEnvelope(:bbox0, :bbox1, :bbox2, :bbox3, 4326), 3857),
|
|
||||||
19,
|
|
||||||
NULL,
|
|
||||||
'1900-01-01'::timestamp,
|
|
||||||
'2100-01-01'::timestamp
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
).bindparams(bbox0=bbox[0], bbox1=bbox[1], bbox2=bbox[2], bbox3=bbox[3])
|
|
||||||
)
|
|
||||||
|
|
||||||
if fmt == ExportFormat.SHAPEFILE:
|
|
||||||
with shapefile_zip(basename="events") as (writer, zip_buffer):
|
|
||||||
writer.field("distance_overtaker", "N", decimal=4)
|
|
||||||
writer.field("distance_stationary", "N", decimal=4)
|
|
||||||
writer.field("way_id", "N", decimal=0)
|
|
||||||
writer.field("direction", "N", decimal=0)
|
|
||||||
writer.field("course", "N", decimal=4)
|
|
||||||
writer.field("speed", "N", decimal=4)
|
|
||||||
writer.field("zone", "C")
|
|
||||||
|
|
||||||
async for event in events:
|
|
||||||
coords = json.loads(event.geometry)["coordinates"]
|
|
||||||
writer.point(*coords)
|
|
||||||
writer.record(
|
|
||||||
distance_overtaker=event.distance_overtaker,
|
|
||||||
distance_stationary=event.distance_stationary,
|
|
||||||
direction=event.direction,
|
|
||||||
way_id=event.way_id,
|
|
||||||
course=event.course,
|
|
||||||
speed=event.speed,
|
|
||||||
zone=event.zone
|
|
||||||
# "time"=event.time,
|
|
||||||
)
|
|
||||||
|
|
||||||
return raw(zip_buffer.getbuffer())
|
|
||||||
|
|
||||||
if fmt == ExportFormat.GEOJSON:
|
|
||||||
features = []
|
|
||||||
async for event in events:
|
|
||||||
geom = json.loads(event.geometry)
|
|
||||||
features.append(
|
|
||||||
{
|
|
||||||
"type": "Feature",
|
|
||||||
"geometry": geom,
|
|
||||||
"properties": {
|
|
||||||
"distance_overtaker": event.distance_overtaker
|
|
||||||
if event.distance_overtaker is not None
|
|
||||||
and not math.isnan(event.distance_overtaker)
|
|
||||||
else None,
|
|
||||||
"distance_stationary": event.distance_stationary
|
|
||||||
if event.distance_stationary is not None
|
|
||||||
and not math.isnan(event.distance_stationary)
|
|
||||||
else None,
|
|
||||||
"direction": event.direction
|
|
||||||
if event.direction is not None
|
|
||||||
and not math.isnan(event.direction)
|
|
||||||
else None,
|
|
||||||
"way_id": event.way_id,
|
|
||||||
"course": event.course
|
|
||||||
if event.course is not None and not math.isnan(event.course)
|
|
||||||
else None,
|
|
||||||
"speed": event.speed
|
|
||||||
if event.speed is not None and not math.isnan(event.speed)
|
|
||||||
else None,
|
|
||||||
"time": event.time_stamp,
|
|
||||||
"zone": event.zone,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
geojson = {"type": "FeatureCollection", "features": features}
|
|
||||||
return json_response(geojson)
|
|
||||||
|
|
||||||
raise InvalidUsage("unknown export format")
|
|
||||||
|
|
||||||
|
|
||||||
@api.get(r"/export/segments")
|
|
||||||
async def export_segments(req):
|
|
||||||
async with use_request_semaphore(req, "export_semaphore", timeout=30):
|
|
||||||
bbox = req.ctx.get_single_arg("bbox", default="-180,-90,180,90")
|
|
||||||
assert re.match(r"(-?\d+\.?\d+,?){4}", bbox)
|
|
||||||
bbox = list(map(float, bbox.split(",")))
|
|
||||||
|
|
||||||
fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat)
|
|
||||||
segments = await req.ctx.db.stream(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
SELECT
|
|
||||||
ST_AsGeoJSON(ST_Transform(geometry, 4326)) AS geometry,
|
|
||||||
way_id,
|
|
||||||
distance_overtaker_mean,
|
|
||||||
distance_overtaker_min,
|
|
||||||
distance_overtaker_max,
|
|
||||||
distance_overtaker_median,
|
|
||||||
overtaking_event_count,
|
|
||||||
usage_count,
|
|
||||||
direction,
|
|
||||||
zone,
|
|
||||||
offset_direction,
|
|
||||||
distance_overtaker_array
|
|
||||||
FROM
|
|
||||||
layer_obs_roads(
|
|
||||||
ST_Transform(ST_MakeEnvelope(:bbox0, :bbox1, :bbox2, :bbox3, 4326), 3857),
|
|
||||||
11,
|
|
||||||
NULL,
|
|
||||||
'1900-01-01'::timestamp,
|
|
||||||
'2100-01-01'::timestamp
|
|
||||||
)
|
|
||||||
WHERE usage_count > 0
|
|
||||||
"""
|
|
||||||
).bindparams(bbox0=bbox[0], bbox1=bbox[1], bbox2=bbox[2], bbox3=bbox[3])
|
|
||||||
)
|
|
||||||
|
|
||||||
if fmt == ExportFormat.SHAPEFILE:
|
|
||||||
with shapefile_zip(shape_type=3, basename="segments") as (
|
|
||||||
writer,
|
|
||||||
zip_buffer,
|
|
||||||
):
|
|
||||||
writer.field("distance_overtaker_mean", "N", decimal=4)
|
|
||||||
writer.field("distance_overtaker_max", "N", decimal=4)
|
|
||||||
writer.field("distance_overtaker_min", "N", decimal=4)
|
|
||||||
writer.field("distance_overtaker_median", "N", decimal=4)
|
|
||||||
writer.field("overtaking_event_count", "N", decimal=4)
|
|
||||||
writer.field("usage_count", "N", decimal=4)
|
|
||||||
writer.field("way_id", "N", decimal=0)
|
|
||||||
writer.field("direction", "N", decimal=0)
|
|
||||||
writer.field("zone", "C")
|
|
||||||
|
|
||||||
async for segment in segments:
|
|
||||||
geom = json.loads(segment.st_asgeojson)
|
|
||||||
writer.line([geom["coordinates"]])
|
|
||||||
writer.record(
|
|
||||||
distance_overtaker_mean=segment.distance_overtaker_mean,
|
|
||||||
distance_overtaker_median=segment.distance_overtaker_median,
|
|
||||||
distance_overtaker_max=segment.distance_overtaker_max,
|
|
||||||
distance_overtaker_min=segment.distance_overtaker_min,
|
|
||||||
usage_count=segment.usage_count,
|
|
||||||
overtaking_event_count=segment.overtaking_event_count,
|
|
||||||
direction=segment.direction,
|
|
||||||
way_id=segment.way_id,
|
|
||||||
zone=segment.zone,
|
|
||||||
)
|
|
||||||
|
|
||||||
return raw(zip_buffer.getbuffer())
|
|
||||||
|
|
||||||
if fmt == ExportFormat.GEOJSON:
|
|
||||||
features = []
|
|
||||||
async for segment in segments:
|
|
||||||
features.append(
|
|
||||||
{
|
|
||||||
"type": "Feature",
|
|
||||||
"geometry": json.loads(segment.geometry),
|
|
||||||
"properties": {
|
|
||||||
"distance_overtaker_mean": segment.distance_overtaker_mean,
|
|
||||||
"distance_overtaker_max": segment.distance_overtaker_max,
|
|
||||||
"distance_overtaker_median": segment.distance_overtaker_median,
|
|
||||||
"overtaking_event_count": segment.overtaking_event_count,
|
|
||||||
"usage_count": segment.usage_count,
|
|
||||||
"distance_overtaker_array": segment.distance_overtaker_array,
|
|
||||||
"direction": segment.direction,
|
|
||||||
"way_id": segment.way_id,
|
|
||||||
"zone": segment.zone,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
geojson = {"type": "FeatureCollection", "features": features}
|
|
||||||
return json_response(geojson)
|
|
||||||
|
|
||||||
raise InvalidUsage("unknown export format")
|
|
|
@ -1,57 +0,0 @@
|
||||||
from os.path import join, exists, isfile, abspath
|
|
||||||
|
|
||||||
import sanic.response as response
|
|
||||||
from sanic.exceptions import NotFound
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
|
|
||||||
if app.config.FRONTEND_CONFIG:
|
|
||||||
|
|
||||||
@app.get("/config.json")
|
|
||||||
def get_frontend_config(req):
|
|
||||||
result = {
|
|
||||||
"basename": req.ctx.frontend_base_path,
|
|
||||||
**req.app.config.FRONTEND_CONFIG,
|
|
||||||
"apiUrl": f"{req.ctx.api_url}/api",
|
|
||||||
"loginUrl": f"{req.ctx.api_url}/login",
|
|
||||||
"obsMapSource": {
|
|
||||||
"type": "vector",
|
|
||||||
"tiles": [
|
|
||||||
req.ctx.api_url
|
|
||||||
+ req.app.url_for("tiles", zoom="000", x="111", y="222.pbf")
|
|
||||||
.replace("000", "{z}")
|
|
||||||
.replace("111", "{x}")
|
|
||||||
.replace("222", "{y}")
|
|
||||||
],
|
|
||||||
"minzoom": 0,
|
|
||||||
"maxzoom": 14,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.json(result)
|
|
||||||
|
|
||||||
|
|
||||||
INDEX_HTML = (
|
|
||||||
join(app.config.FRONTEND_DIR, "index.html")
|
|
||||||
if app.config.get("FRONTEND_DIR")
|
|
||||||
else None
|
|
||||||
)
|
|
||||||
if INDEX_HTML and exists(INDEX_HTML):
|
|
||||||
with open(INDEX_HTML, "rt") as f:
|
|
||||||
index_file_contents = f.read()
|
|
||||||
|
|
||||||
@app.get("/<path:path>")
|
|
||||||
def get_frontend_static(req, path):
|
|
||||||
if path.startswith("api/"):
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
file = join(app.config.FRONTEND_DIR, path)
|
|
||||||
if not abspath(file).startswith(abspath(app.config.FRONTEND_DIR)):
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
if not exists(file) or not path or not isfile(file):
|
|
||||||
return response.html(
|
|
||||||
index_file_contents.replace("__BASE_HREF__", req.ctx.frontend_url + "/")
|
|
||||||
)
|
|
||||||
|
|
||||||
return response.file(file)
|
|
|
@ -1,18 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from obs.api.app import api
|
|
||||||
|
|
||||||
from sanic.response import json
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from obs.api import __version__ as version
|
|
||||||
|
|
||||||
|
|
||||||
@api.route("/info")
|
|
||||||
async def info(req):
|
|
||||||
return json(
|
|
||||||
{
|
|
||||||
"version": version,
|
|
||||||
}
|
|
||||||
)
|
|
|
@ -1,173 +0,0 @@
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
from requests.exceptions import RequestException
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from oic import rndstr
|
|
||||||
from oic.oic import Client
|
|
||||||
from oic.oic.message import AuthorizationResponse, RegistrationResponse
|
|
||||||
from oic.utils.authn.client import CLIENT_AUTHN_METHOD
|
|
||||||
|
|
||||||
from obs.api.app import auth, api
|
|
||||||
from obs.api.db import User
|
|
||||||
|
|
||||||
from sanic.response import json, redirect
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
client = Client(client_authn_method=CLIENT_AUTHN_METHOD)
|
|
||||||
|
|
||||||
# Do not show verbose library output, even when the appliaction is in debug mode
|
|
||||||
logging.getLogger("oic").setLevel(logging.INFO)
|
|
||||||
|
|
||||||
|
|
||||||
@auth.before_server_start
|
|
||||||
async def connect_auth_client(app, loop):
|
|
||||||
client.allow["issuer_mismatch"] = True
|
|
||||||
try:
|
|
||||||
client.provider_config(app.config.KEYCLOAK_URL)
|
|
||||||
client.store_registration_info(
|
|
||||||
RegistrationResponse(
|
|
||||||
client_id=app.config.KEYCLOAK_CLIENT_ID,
|
|
||||||
client_secret=app.config.KEYCLOAK_CLIENT_SECRET,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
except RequestException:
|
|
||||||
log.exception(f"could not connect to {app.config.KEYCLOAK_URL}")
|
|
||||||
log.info("will retry")
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
log.info("retrying")
|
|
||||||
await connect_auth_client(app, loop)
|
|
||||||
|
|
||||||
|
|
||||||
@auth.route("/login")
|
|
||||||
async def login(req):
|
|
||||||
next_url = req.ctx.get_single_arg("next", default=None)
|
|
||||||
|
|
||||||
session = req.ctx.session
|
|
||||||
session["state"] = rndstr()
|
|
||||||
session["nonce"] = rndstr()
|
|
||||||
session["next"] = next_url
|
|
||||||
args = {
|
|
||||||
"client_id": client.client_id,
|
|
||||||
"response_type": "code",
|
|
||||||
"scope": ["openid"],
|
|
||||||
"nonce": session["nonce"],
|
|
||||||
"redirect_uri": req.ctx.api_url + "/login/redirect",
|
|
||||||
"state": session["state"],
|
|
||||||
}
|
|
||||||
|
|
||||||
auth_req = client.construct_AuthorizationRequest(request_args=args)
|
|
||||||
login_url = auth_req.request(client.authorization_endpoint)
|
|
||||||
|
|
||||||
return redirect(login_url)
|
|
||||||
|
|
||||||
|
|
||||||
@auth.route("/login/redirect")
|
|
||||||
async def login_redirect(req):
|
|
||||||
session = req.ctx.session
|
|
||||||
|
|
||||||
auth_response = client.parse_response(
|
|
||||||
AuthorizationResponse, info=dict(req.query_args), sformat="dict"
|
|
||||||
)
|
|
||||||
code = auth_response["code"]
|
|
||||||
state = auth_response["state"]
|
|
||||||
|
|
||||||
assert "state" in session
|
|
||||||
assert state == session["state"]
|
|
||||||
|
|
||||||
client.do_access_token_request(
|
|
||||||
state=state,
|
|
||||||
request_args={"code": code},
|
|
||||||
authn_method="client_secret_basic",
|
|
||||||
)
|
|
||||||
|
|
||||||
userinfo = client.do_user_info_request(state=state)
|
|
||||||
|
|
||||||
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
|
|
||||||
sub = userinfo["sub"]
|
|
||||||
preferred_username = userinfo["preferred_username"]
|
|
||||||
email = userinfo.get("email")
|
|
||||||
|
|
||||||
clean_username = re.sub(r"[^a-zA-Z0-9_.-]", "", preferred_username)
|
|
||||||
if clean_username != preferred_username:
|
|
||||||
log.warning(
|
|
||||||
"Username %r contained invalid characters and was changed to %r",
|
|
||||||
preferred_username,
|
|
||||||
clean_username,
|
|
||||||
)
|
|
||||||
preferred_username = clean_username
|
|
||||||
|
|
||||||
if email is None:
|
|
||||||
raise ValueError(
|
|
||||||
"user has no email set, please configure keycloak to require emails"
|
|
||||||
)
|
|
||||||
|
|
||||||
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
user = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(User).where(
|
|
||||||
User.email == email
|
|
||||||
and User.username == preferred_username
|
|
||||||
and User.match_by_username_email
|
|
||||||
)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if user:
|
|
||||||
log.info(
|
|
||||||
"Re-matched existing user %s (sub: %s) based on email and username (%s)",
|
|
||||||
user.id,
|
|
||||||
user.sub,
|
|
||||||
preferred_username,
|
|
||||||
)
|
|
||||||
user.match_by_username_email = False
|
|
||||||
user.sub = sub
|
|
||||||
|
|
||||||
if user is None:
|
|
||||||
log.info(
|
|
||||||
"Registering new user with sub %r (preferred username: %s)",
|
|
||||||
sub,
|
|
||||||
preferred_username,
|
|
||||||
)
|
|
||||||
user = User(sub=sub, username=preferred_username, email=email)
|
|
||||||
req.ctx.db.add(user)
|
|
||||||
else:
|
|
||||||
log.info(
|
|
||||||
"Logged in known user (id: %s, sub: %s, %s).",
|
|
||||||
user.id,
|
|
||||||
user.sub,
|
|
||||||
preferred_username,
|
|
||||||
)
|
|
||||||
|
|
||||||
if email != user.email:
|
|
||||||
log.debug("Updating user (id: %s) email from auth system.", user.id)
|
|
||||||
user.email = email
|
|
||||||
|
|
||||||
if preferred_username != user.username:
|
|
||||||
log.debug("Updating user (id: %s) username from auth system.", user.id)
|
|
||||||
await user.rename(req.app.config, preferred_username)
|
|
||||||
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
session["user_id"] = user.id
|
|
||||||
|
|
||||||
next_ = session.pop("next", "/") or "/"
|
|
||||||
return redirect(next_)
|
|
||||||
|
|
||||||
|
|
||||||
@api.route("/logout")
|
|
||||||
async def logout(req):
|
|
||||||
session = req.ctx.session
|
|
||||||
if "user_id" in session:
|
|
||||||
del session["user_id"]
|
|
||||||
|
|
||||||
auth_req = client.construct_EndSessionRequest(state=session["state"])
|
|
||||||
logout_url = auth_req.request(client.end_session_endpoint)
|
|
||||||
|
|
||||||
return redirect(logout_url + f"&post_logout_redirect_uri={req.ctx.api_url}/logout")
|
|
|
@ -1,147 +0,0 @@
|
||||||
import json
|
|
||||||
from functools import partial
|
|
||||||
import logging
|
|
||||||
import numpy
|
|
||||||
import math
|
|
||||||
|
|
||||||
from sqlalchemy import select, func, column
|
|
||||||
|
|
||||||
import sanic.response as response
|
|
||||||
from sanic.exceptions import InvalidUsage
|
|
||||||
|
|
||||||
from obs.api.app import api
|
|
||||||
from obs.api.db import Road, OvertakingEvent, Track
|
|
||||||
from obs.api.utils import round_to
|
|
||||||
|
|
||||||
round_distance = partial(round_to, multiples=0.001)
|
|
||||||
round_speed = partial(round_to, multiples=0.1)
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_bearing(b, a):
|
|
||||||
# longitude, latitude
|
|
||||||
dL = b[0] - a[0]
|
|
||||||
X = numpy.cos(b[1]) * numpy.sin(dL)
|
|
||||||
Y = numpy.cos(a[1]) * numpy.sin(b[1]) - numpy.sin(a[1]) * numpy.cos(
|
|
||||||
b[1]
|
|
||||||
) * numpy.cos(dL)
|
|
||||||
return numpy.arctan2(Y, X) + 0.5 * math.pi
|
|
||||||
|
|
||||||
|
|
||||||
# Bins for histogram on overtaker distances. 0, 0.25, ... 2.25, infinity
|
|
||||||
DISTANCE_BINS = numpy.arange(0, 2.5, 0.25).tolist() + [float('inf')]
|
|
||||||
|
|
||||||
@api.route("/mapdetails/road", methods=["GET"])
|
|
||||||
async def mapdetails_road(req):
|
|
||||||
longitude = req.ctx.get_single_arg("longitude", convert=float)
|
|
||||||
latitude = req.ctx.get_single_arg("latitude", convert=float)
|
|
||||||
radius = req.ctx.get_single_arg("radius", default=100, convert=float)
|
|
||||||
|
|
||||||
if not (1 <= radius <= 1000):
|
|
||||||
raise InvalidUsage("`radius` parameter must be between 1 and 1000")
|
|
||||||
|
|
||||||
road_geometry = Road.geometry
|
|
||||||
point = func.ST_Transform(
|
|
||||||
func.ST_GeomFromGeoJSON(
|
|
||||||
json.dumps(
|
|
||||||
{
|
|
||||||
"type": "point",
|
|
||||||
"coordinates": [longitude, latitude],
|
|
||||||
}
|
|
||||||
)
|
|
||||||
),
|
|
||||||
3857,
|
|
||||||
)
|
|
||||||
|
|
||||||
road = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(Road)
|
|
||||||
.where(func.ST_DWithin(road_geometry, point, radius))
|
|
||||||
.order_by(func.ST_Distance(road_geometry, point))
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if road is None:
|
|
||||||
return response.json({})
|
|
||||||
|
|
||||||
arrays = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(
|
|
||||||
[
|
|
||||||
OvertakingEvent.distance_overtaker,
|
|
||||||
OvertakingEvent.distance_stationary,
|
|
||||||
OvertakingEvent.speed,
|
|
||||||
# Keep this as the last entry always for numpy.partition
|
|
||||||
# below to work.
|
|
||||||
OvertakingEvent.direction_reversed,
|
|
||||||
]
|
|
||||||
).where(OvertakingEvent.way_id == road.way_id)
|
|
||||||
)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
arrays = numpy.array(arrays).T
|
|
||||||
|
|
||||||
if len(arrays) == 0:
|
|
||||||
arrays = numpy.array([[], [], [], []], dtype=float)
|
|
||||||
|
|
||||||
data, mask = arrays[:-1], arrays[-1]
|
|
||||||
data = data.astype(numpy.float64)
|
|
||||||
mask = mask.astype(bool)
|
|
||||||
|
|
||||||
def partition(arr, cond):
|
|
||||||
return arr[:, cond], arr[:, ~cond]
|
|
||||||
|
|
||||||
forwards, backwards = partition(data, ~mask)
|
|
||||||
|
|
||||||
def array_stats(arr, rounder, bins=30):
|
|
||||||
if len(arr):
|
|
||||||
arr = arr[~numpy.isnan(arr)]
|
|
||||||
|
|
||||||
n = len(arr)
|
|
||||||
|
|
||||||
hist, bins = numpy.histogram(arr, bins=bins)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"statistics": {
|
|
||||||
"count": n,
|
|
||||||
"mean": rounder(numpy.mean(arr)) if n else None,
|
|
||||||
"min": rounder(numpy.min(arr)) if n else None,
|
|
||||||
"max": rounder(numpy.max(arr)) if n else None,
|
|
||||||
"median": rounder(numpy.median(arr)) if n else None,
|
|
||||||
},
|
|
||||||
"histogram": {
|
|
||||||
"bins": [None if math.isinf(b) else b for b in bins.tolist()],
|
|
||||||
"counts": hist.tolist(),
|
|
||||||
"zone": road.zone
|
|
||||||
},
|
|
||||||
"values": list(map(rounder, arr.tolist())),
|
|
||||||
}
|
|
||||||
|
|
||||||
bearing = None
|
|
||||||
|
|
||||||
geom = json.loads(road.geometry)
|
|
||||||
if geom["type"] == "LineString":
|
|
||||||
coordinates = geom["coordinates"]
|
|
||||||
bearing = get_bearing(coordinates[0], coordinates[-1])
|
|
||||||
# convert to degrees, as this is more natural to understand for consumers
|
|
||||||
bearing = round_to((bearing / math.pi * 180 + 360) % 360, 1)
|
|
||||||
|
|
||||||
def get_direction_stats(direction_arrays, backwards=False):
|
|
||||||
return {
|
|
||||||
"bearing": ((bearing + 180) % 360 if backwards else bearing)
|
|
||||||
if bearing is not None
|
|
||||||
else None,
|
|
||||||
"distanceOvertaker": array_stats(direction_arrays[0], round_distance, bins=DISTANCE_BINS),
|
|
||||||
"distanceStationary": array_stats(direction_arrays[1], round_distance, bins=DISTANCE_BINS),
|
|
||||||
"speed": array_stats(direction_arrays[2], round_speed),
|
|
||||||
}
|
|
||||||
|
|
||||||
return response.json(
|
|
||||||
{
|
|
||||||
"road": road.to_dict(),
|
|
||||||
"forwards": get_direction_stats(forwards),
|
|
||||||
"backwards": get_direction_stats(backwards, True),
|
|
||||||
}
|
|
||||||
)
|
|
|
@ -1,206 +0,0 @@
|
||||||
import logging
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
from operator import and_
|
|
||||||
from functools import reduce
|
|
||||||
|
|
||||||
from sqlalchemy import distinct, select, func, desc
|
|
||||||
|
|
||||||
from sanic.response import json
|
|
||||||
|
|
||||||
from obs.api.app import api
|
|
||||||
from obs.api.db import Track, OvertakingEvent, User, Region, UserDevice
|
|
||||||
from obs.api.utils import round_to
|
|
||||||
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# round to this number of meters for privacy reasons
|
|
||||||
TRACK_LENGTH_ROUNDING = 1000
|
|
||||||
|
|
||||||
# round to this number of seconds for privacy reasons
|
|
||||||
TRACK_DURATION_ROUNDING = 120
|
|
||||||
|
|
||||||
# Everything before this date is probably parsed incorrectly
|
|
||||||
MINUMUM_RECORDING_DATE = datetime(2010, 1, 1)
|
|
||||||
|
|
||||||
|
|
||||||
@api.route("/stats")
|
|
||||||
async def stats(req):
|
|
||||||
user = req.ctx.get_single_arg("user", default=None)
|
|
||||||
start = req.ctx.get_single_arg("start", default=None, convert=datetime)
|
|
||||||
end = req.ctx.get_single_arg("end", default=None, convert=datetime)
|
|
||||||
|
|
||||||
conditions = [
|
|
||||||
Track.recorded_at != None,
|
|
||||||
Track.recorded_at > MINUMUM_RECORDING_DATE,
|
|
||||||
]
|
|
||||||
|
|
||||||
if start is not None:
|
|
||||||
conditions.append(Track.recorded_at >= start)
|
|
||||||
|
|
||||||
if end is not None:
|
|
||||||
conditions.append(Track.recorded_at < end)
|
|
||||||
|
|
||||||
# Only the user can look for their own stats, for now
|
|
||||||
by_user = (
|
|
||||||
user is not None and req.ctx.user is not None and req.ctx.user.id == int(user)
|
|
||||||
)
|
|
||||||
if by_user:
|
|
||||||
conditions.append(Track.author_id == req.ctx.user.id)
|
|
||||||
|
|
||||||
track_condition = reduce(and_, conditions)
|
|
||||||
public_track_condition = Track.public and track_condition
|
|
||||||
|
|
||||||
query = (
|
|
||||||
select(
|
|
||||||
[
|
|
||||||
func.count().label("publicTrackCount"),
|
|
||||||
func.sum(Track.duration).label("trackDuration"),
|
|
||||||
func.sum(Track.length).label("trackLength"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
.select_from(Track)
|
|
||||||
.where(public_track_condition)
|
|
||||||
)
|
|
||||||
|
|
||||||
public_track_count, track_duration, track_length = (
|
|
||||||
await req.ctx.db.execute(query)
|
|
||||||
).first()
|
|
||||||
|
|
||||||
# This is required because SQL returns NULL when the input set to a
|
|
||||||
# SUM() aggregation is empty.
|
|
||||||
track_duration = track_duration or 0
|
|
||||||
track_length = track_length or 0
|
|
||||||
|
|
||||||
user_count = (
|
|
||||||
1
|
|
||||||
if by_user
|
|
||||||
else (await req.ctx.db.execute(select(func.count()).select_from(User))).scalar()
|
|
||||||
)
|
|
||||||
track_count = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(func.count()).select_from(Track).where(track_condition)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
event_count = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(func.count())
|
|
||||||
.select_from(OvertakingEvent)
|
|
||||||
.join(OvertakingEvent.track)
|
|
||||||
.where(track_condition)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
device_count = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(func.count(distinct(UserDevice.id)))
|
|
||||||
.select_from(UserDevice)
|
|
||||||
.join(Track.user_device)
|
|
||||||
.where(track_condition)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
result = {
|
|
||||||
"numEvents": event_count,
|
|
||||||
"userCount": user_count,
|
|
||||||
"trackLength": round_to(track_length or 0, TRACK_LENGTH_ROUNDING),
|
|
||||||
"trackDuration": round_to(track_duration or 0, TRACK_DURATION_ROUNDING),
|
|
||||||
"publicTrackCount": public_track_count,
|
|
||||||
"trackCount": track_count,
|
|
||||||
"deviceCount": device_count,
|
|
||||||
}
|
|
||||||
|
|
||||||
return json(result)
|
|
||||||
|
|
||||||
|
|
||||||
# const trackCount = await Track.find(trackFilter).count();
|
|
||||||
#
|
|
||||||
# const publicTrackCount = await Track.find({
|
|
||||||
# ...trackFilter,
|
|
||||||
# public: true,
|
|
||||||
# }).count();
|
|
||||||
#
|
|
||||||
# const userCount = await User.find({
|
|
||||||
# ...(userFilter
|
|
||||||
# ? { _id: userFilter }
|
|
||||||
# : {
|
|
||||||
# createdAt: dateFilter,
|
|
||||||
# }),
|
|
||||||
# }).count();
|
|
||||||
#
|
|
||||||
# const trackStats = await Track.aggregate([
|
|
||||||
# { $match: trackFilter },
|
|
||||||
# {
|
|
||||||
# $addFields: {
|
|
||||||
# trackLength: {
|
|
||||||
# $cond: [{ $lt: ['$statistics.length', 500000] }, '$statistics.length', 0],
|
|
||||||
# },
|
|
||||||
# numEvents: '$statistics.numEvents',
|
|
||||||
# trackDuration: {
|
|
||||||
# $cond: [
|
|
||||||
# { $and: ['$statistics.recordedUntil', { $gt: ['$statistics.recordedAt', new Date('2010-01-01')] }] },
|
|
||||||
# { $subtract: ['$statistics.recordedUntil', '$statistics.recordedAt'] },
|
|
||||||
# 0,
|
|
||||||
# ],
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
# { $project: { trackLength: true, numEvents: true, trackDuration: true } },
|
|
||||||
# {
|
|
||||||
# $group: {
|
|
||||||
# _id: 'sum',
|
|
||||||
# trackLength: { $sum: '$trackLength' },
|
|
||||||
# numEvents: { $sum: '$numEvents' },
|
|
||||||
# trackDuration: { $sum: '$trackDuration' },
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
# ]);
|
|
||||||
#
|
|
||||||
# const [trackLength, numEvents, trackDuration] =
|
|
||||||
# trackStats.length > 0
|
|
||||||
# ? [trackStats[0].trackLength, trackStats[0].numEvents, trackStats[0].trackDuration]
|
|
||||||
# : [0, 0, 0];
|
|
||||||
#
|
|
||||||
# const trackLengthPrivatized = Math.floor(trackLength / TRACK_LENGTH_ROUNDING) * TRACK_LENGTH_ROUNDING;
|
|
||||||
# const trackDurationPrivatized =
|
|
||||||
# Math.round(trackDuration / 1000 / TRACK_DURATION_ROUNDING) * TRACK_DURATION_ROUNDING;
|
|
||||||
#
|
|
||||||
# return res.json({
|
|
||||||
# publicTrackCount,
|
|
||||||
# trackLength: trackLengthPrivatized,
|
|
||||||
# trackDuration: trackDurationPrivatized,
|
|
||||||
# numEvents,
|
|
||||||
# trackCount,
|
|
||||||
# userCount,
|
|
||||||
# });
|
|
||||||
# }),
|
|
||||||
# );
|
|
||||||
|
|
||||||
|
|
||||||
@api.route("/stats/regions")
|
|
||||||
async def stats(req):
|
|
||||||
query = (
|
|
||||||
select(
|
|
||||||
[
|
|
||||||
Region.id,
|
|
||||||
Region.name,
|
|
||||||
func.count(OvertakingEvent.id).label("overtaking_event_count"),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
.select_from(Region)
|
|
||||||
.join(
|
|
||||||
OvertakingEvent,
|
|
||||||
func.ST_Within(OvertakingEvent.geometry, Region.geometry),
|
|
||||||
)
|
|
||||||
.group_by(
|
|
||||||
Region.id,
|
|
||||||
Region.name,
|
|
||||||
Region.geometry,
|
|
||||||
)
|
|
||||||
.having(func.count(OvertakingEvent.id) > 0)
|
|
||||||
.order_by(desc("overtaking_event_count"))
|
|
||||||
)
|
|
||||||
|
|
||||||
regions = list(map(dict, (await req.ctx.db.execute(query)).all()))
|
|
||||||
return json(regions)
|
|
|
@ -1,129 +0,0 @@
|
||||||
from gzip import decompress
|
|
||||||
from sqlite3 import connect
|
|
||||||
from datetime import datetime, time, timedelta
|
|
||||||
from typing import Optional, Tuple
|
|
||||||
|
|
||||||
import dateutil.parser
|
|
||||||
from sanic.exceptions import Forbidden, InvalidUsage
|
|
||||||
from sanic.response import raw
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.utils import use_request_semaphore
|
|
||||||
|
|
||||||
|
|
||||||
def get_tile(filename, zoom, x, y):
|
|
||||||
"""
|
|
||||||
Inspired by:
|
|
||||||
https://github.com/TileStache/TileStache/blob/master/TileStache/MBTiles.py
|
|
||||||
"""
|
|
||||||
|
|
||||||
db = connect(filename)
|
|
||||||
db.text_factory = bytes
|
|
||||||
|
|
||||||
fmt = db.execute("SELECT value FROM metadata WHERE name='format'").fetchone()[0]
|
|
||||||
if fmt != b"pbf":
|
|
||||||
raise ValueError("mbtiles file is in wrong format: %s" % fmt)
|
|
||||||
|
|
||||||
content = db.execute(
|
|
||||||
"SELECT tile_data FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?",
|
|
||||||
(zoom, x, (2**zoom - 1) - y),
|
|
||||||
).fetchone()
|
|
||||||
return content and content[0] or None
|
|
||||||
|
|
||||||
|
|
||||||
def round_date(date, to="weeks", up=False):
|
|
||||||
if to != "weeks":
|
|
||||||
raise ValueError(f"cannot round to {to}")
|
|
||||||
|
|
||||||
midnight = time(0, 0, 0, 0)
|
|
||||||
start_of_day = date.date() # ignore time
|
|
||||||
weekday = date.weekday()
|
|
||||||
|
|
||||||
is_rounded = date.time() == midnight and weekday == 0
|
|
||||||
if is_rounded:
|
|
||||||
return date
|
|
||||||
|
|
||||||
if up:
|
|
||||||
return datetime.combine(start_of_day + timedelta(days=7 - weekday), midnight)
|
|
||||||
else:
|
|
||||||
return datetime.combine(start_of_day - timedelta(days=weekday), midnight)
|
|
||||||
|
|
||||||
|
|
||||||
# regenerate approx. once each day
|
|
||||||
TILE_CACHE_MAX_AGE = 3600 * 24
|
|
||||||
|
|
||||||
|
|
||||||
def get_filter_options(
|
|
||||||
req,
|
|
||||||
) -> Tuple[Optional[str], Optional[datetime], Optional[datetime]]:
|
|
||||||
"""
|
|
||||||
Returns parsed, validated and normalized options for filtering map data, a
|
|
||||||
tuple of
|
|
||||||
|
|
||||||
* user_id (str|None)
|
|
||||||
* start (datetime|None)
|
|
||||||
* end (datetime|None)
|
|
||||||
"""
|
|
||||||
user_id = req.ctx.get_single_arg("user", default=None, convert=int)
|
|
||||||
if user_id is not None and (req.ctx.user is None or req.ctx.user.id != user_id):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
parse_date = lambda s: dateutil.parser.parse(s)
|
|
||||||
start = req.ctx.get_single_arg("start", default=None, convert=parse_date)
|
|
||||||
end = req.ctx.get_single_arg("end", default=None, convert=parse_date)
|
|
||||||
|
|
||||||
start = round_date(start, to="weeks", up=False) if start else None
|
|
||||||
end = round_date(end, to="weeks", up=True) if end else None
|
|
||||||
|
|
||||||
if start is not None and end is not None and start >= end:
|
|
||||||
raise InvalidUsage(
|
|
||||||
"end date must be later than start date (note: dates are rounded to weeks)"
|
|
||||||
)
|
|
||||||
|
|
||||||
return user_id, start, end
|
|
||||||
|
|
||||||
|
|
||||||
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
|
|
||||||
async def tiles(req, zoom: int, x: int, y: str):
|
|
||||||
async with use_request_semaphore(req, "tile_semaphore"):
|
|
||||||
if app.config.get("TILES_FILE"):
|
|
||||||
tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y))
|
|
||||||
|
|
||||||
else:
|
|
||||||
user_id, start, end = get_filter_options(req)
|
|
||||||
|
|
||||||
tile = await req.ctx.db.scalar(
|
|
||||||
text(
|
|
||||||
"select data from getmvt(:zoom, :x, :y, :user_id, :min_time, :max_time) as b(data, key);"
|
|
||||||
).bindparams(
|
|
||||||
zoom=int(zoom),
|
|
||||||
x=int(x),
|
|
||||||
y=int(y),
|
|
||||||
user_id=user_id,
|
|
||||||
min_time=start,
|
|
||||||
max_time=end,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
gzip = "gzip" in req.headers["accept-encoding"]
|
|
||||||
|
|
||||||
headers = {}
|
|
||||||
headers["Vary"] = "Accept-Encoding"
|
|
||||||
|
|
||||||
if req.app.config.DEBUG:
|
|
||||||
headers["Cache-Control"] = "no-cache"
|
|
||||||
else:
|
|
||||||
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
|
|
||||||
|
|
||||||
# The tiles in the mbtiles file are gzip-compressed already, so we
|
|
||||||
# serve them actually as-is, and only decompress them if the browser
|
|
||||||
# doesn't accept gzip
|
|
||||||
if gzip:
|
|
||||||
headers["Content-Encoding"] = "gzip"
|
|
||||||
|
|
||||||
if not gzip:
|
|
||||||
tile = decompress(tile)
|
|
||||||
|
|
||||||
return raw(tile, content_type="application/x-protobuf", headers=headers)
|
|
|
@ -1,473 +0,0 @@
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
from datetime import date
|
|
||||||
from json import load as jsonload
|
|
||||||
from os.path import join, exists, isfile
|
|
||||||
|
|
||||||
from sanic.exceptions import InvalidUsage, NotFound, Forbidden
|
|
||||||
from sanic.response import file_stream, empty
|
|
||||||
from slugify import slugify
|
|
||||||
from sqlalchemy import select, func, and_
|
|
||||||
from sqlalchemy.orm import joinedload
|
|
||||||
|
|
||||||
from obs.api.app import api, require_auth, read_api_key, json
|
|
||||||
from obs.api.db import Track, Comment, DuplicateTrackFileError
|
|
||||||
from obs.api.utils import tar_of_tracks
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_user_agent(user_agent):
|
|
||||||
if not user_agent:
|
|
||||||
return None
|
|
||||||
|
|
||||||
m = re.match(r"\bOBS\/[^\s]+", user_agent)
|
|
||||||
return m[0] if m else None
|
|
||||||
|
|
||||||
|
|
||||||
async def _return_tracks(req, extend_query, limit, offset, order_by=None):
|
|
||||||
if limit <= 0 or limit > 1000:
|
|
||||||
raise InvalidUsage("invalid limit")
|
|
||||||
|
|
||||||
if offset < 0:
|
|
||||||
raise InvalidUsage("offset must be positive")
|
|
||||||
|
|
||||||
count_query = extend_query(
|
|
||||||
select(func.count()).select_from(Track).join(Track.author)
|
|
||||||
)
|
|
||||||
track_count = await req.ctx.db.scalar(count_query)
|
|
||||||
|
|
||||||
query = (
|
|
||||||
extend_query(select(Track).options(joinedload(Track.author)))
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
.order_by(order_by if order_by is not None else Track.created_at)
|
|
||||||
)
|
|
||||||
|
|
||||||
tracks = (await req.ctx.db.execute(query)).scalars()
|
|
||||||
|
|
||||||
return json(
|
|
||||||
{
|
|
||||||
"trackCount": track_count,
|
|
||||||
"tracks": list(
|
|
||||||
map(
|
|
||||||
lambda t: t.to_dict(
|
|
||||||
for_user_id=req.ctx.user.id if req.ctx.user else None
|
|
||||||
),
|
|
||||||
tracks,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks")
|
|
||||||
async def get_tracks(req):
|
|
||||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
|
||||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
|
||||||
# author = req.ctx.get_single_arg("author", default=None, convert=int)
|
|
||||||
|
|
||||||
def extend_query(q):
|
|
||||||
q = q.where(Track.public)
|
|
||||||
|
|
||||||
# if author is not None:
|
|
||||||
# q = q.where(Track.author_id == author)
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
return await _return_tracks(req, extend_query, limit, offset)
|
|
||||||
|
|
||||||
|
|
||||||
def parse_boolean(s):
|
|
||||||
if s is None:
|
|
||||||
return None
|
|
||||||
|
|
||||||
s = s.lower()
|
|
||||||
if s in ("true", "1", "yes", "y", "t"):
|
|
||||||
return True
|
|
||||||
if s in ("false", "0", "no", "n", "f"):
|
|
||||||
return False
|
|
||||||
|
|
||||||
raise ValueError("invalid value for boolean")
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/feed")
|
|
||||||
@require_auth
|
|
||||||
async def get_feed(req):
|
|
||||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
|
||||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
|
||||||
user_device_id = req.ctx.get_single_arg("user_device_id", default=None, convert=int)
|
|
||||||
|
|
||||||
order_by_columns = {
|
|
||||||
"recordedAt": Track.recorded_at,
|
|
||||||
"title": Track.title,
|
|
||||||
"visibility": Track.public,
|
|
||||||
"length": Track.length,
|
|
||||||
"duration": Track.duration,
|
|
||||||
"user_device_id": Track.user_device_id,
|
|
||||||
}
|
|
||||||
order_by = req.ctx.get_single_arg(
|
|
||||||
"order_by", default=None, convert=order_by_columns.get
|
|
||||||
)
|
|
||||||
|
|
||||||
reversed_ = req.ctx.get_single_arg("reversed", convert=parse_boolean, default=False)
|
|
||||||
if reversed_:
|
|
||||||
order_by = order_by.desc()
|
|
||||||
|
|
||||||
public = req.ctx.get_single_arg("public", convert=parse_boolean, default=None)
|
|
||||||
|
|
||||||
def extend_query(q):
|
|
||||||
q = q.where(Track.author_id == req.ctx.user.id)
|
|
||||||
|
|
||||||
if user_device_id is not None:
|
|
||||||
q = q.where(Track.user_device_id == user_device_id)
|
|
||||||
|
|
||||||
if public is not None:
|
|
||||||
q = q.where(Track.public == public)
|
|
||||||
|
|
||||||
return q
|
|
||||||
|
|
||||||
return await _return_tracks(req, extend_query, limit, offset, order_by)
|
|
||||||
|
|
||||||
|
|
||||||
@api.post("/tracks/bulk")
|
|
||||||
@require_auth
|
|
||||||
async def tracks_bulk_action(req):
|
|
||||||
body = req.json
|
|
||||||
action = body["action"]
|
|
||||||
track_slugs = body["tracks"]
|
|
||||||
|
|
||||||
if action not in ("delete", "makePublic", "makePrivate", "reprocess", "download"):
|
|
||||||
raise InvalidUsage("invalid action")
|
|
||||||
|
|
||||||
query = select(Track).where(
|
|
||||||
and_(Track.author_id == req.ctx.user.id, Track.slug.in_(track_slugs))
|
|
||||||
)
|
|
||||||
|
|
||||||
files = set()
|
|
||||||
|
|
||||||
for track in (await req.ctx.db.execute(query)).scalars():
|
|
||||||
if action == "delete":
|
|
||||||
await req.ctx.db.delete(track)
|
|
||||||
elif action == "makePublic":
|
|
||||||
if not track.public:
|
|
||||||
track.queue_processing()
|
|
||||||
track.public = True
|
|
||||||
elif action == "makePrivate":
|
|
||||||
if track.public:
|
|
||||||
track.queue_processing()
|
|
||||||
track.public = False
|
|
||||||
elif action == "reprocess":
|
|
||||||
track.queue_processing()
|
|
||||||
elif action == "download":
|
|
||||||
files.add(track.get_original_file_path(req.app.config))
|
|
||||||
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
if action == "download":
|
|
||||||
username_slug = slugify(req.ctx.user.username, separator="-")
|
|
||||||
date_str = date.today().isoformat()
|
|
||||||
file_basename = f"tracks_{username_slug}_{date_str}"
|
|
||||||
|
|
||||||
await tar_of_tracks(req, files, file_basename)
|
|
||||||
return
|
|
||||||
|
|
||||||
return empty()
|
|
||||||
|
|
||||||
|
|
||||||
@api.post("/tracks")
|
|
||||||
@read_api_key
|
|
||||||
@require_auth
|
|
||||||
async def post_track(req):
|
|
||||||
try:
|
|
||||||
file = req.files["body"][0]
|
|
||||||
except LookupError as e:
|
|
||||||
raise InvalidUsage(
|
|
||||||
'Track upload needs a single file in "body" multipart field'
|
|
||||||
) from e
|
|
||||||
|
|
||||||
try:
|
|
||||||
body = req.json["track"]
|
|
||||||
except (LookupError, InvalidUsage):
|
|
||||||
body = {}
|
|
||||||
|
|
||||||
title = body.get("title")
|
|
||||||
public = body.get("public")
|
|
||||||
|
|
||||||
track = Track(
|
|
||||||
title=title,
|
|
||||||
customized_title=bool(title),
|
|
||||||
author=req.ctx.user,
|
|
||||||
public=public
|
|
||||||
if public is not None
|
|
||||||
else req.ctx.user.are_tracks_visible_for_all,
|
|
||||||
)
|
|
||||||
track.generate_slug()
|
|
||||||
try:
|
|
||||||
await track.prevent_duplicates(req.ctx.db, file.body)
|
|
||||||
except DuplicateTrackFileError:
|
|
||||||
raise InvalidUsage("Track file is not unique")
|
|
||||||
|
|
||||||
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
|
|
||||||
track.original_file_name = file.name
|
|
||||||
await track.write_to_original_file(req.app.config, file.body)
|
|
||||||
track.queue_processing()
|
|
||||||
track.auto_generate_title()
|
|
||||||
|
|
||||||
req.ctx.db.add(track)
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
return await get_track(req, track.slug)
|
|
||||||
|
|
||||||
|
|
||||||
async def _load_track(req, slug, raise_not_found=True):
|
|
||||||
track = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(Track)
|
|
||||||
.where(Track.slug == slug)
|
|
||||||
.options(joinedload(Track.author))
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if raise_not_found and track is None:
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
if not track.is_visible_to(req.ctx.user):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
return track
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/<slug:str>")
|
|
||||||
async def get_track(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
return json(
|
|
||||||
{"track": track.to_dict(for_user_id=req.ctx.user.id if req.ctx.user else None)},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.delete("/tracks/<slug:str>")
|
|
||||||
@require_auth
|
|
||||||
async def delete_track(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
if not track.is_visible_to_private(req.ctx.user):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
await req.ctx.db.delete(track)
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
return empty()
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/<slug:str>/data")
|
|
||||||
async def get_track_data(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
FILE_BY_KEY = {
|
|
||||||
"measurements": "measurements.json",
|
|
||||||
"overtakingEvents": "overtakingEvents.json",
|
|
||||||
"track": "track.json",
|
|
||||||
"trackRaw": "trackRaw.json",
|
|
||||||
}
|
|
||||||
|
|
||||||
result = {}
|
|
||||||
|
|
||||||
for key, filename in FILE_BY_KEY.items():
|
|
||||||
file_path = join(
|
|
||||||
req.app.config.PROCESSING_OUTPUT_DIR, track.file_path, filename
|
|
||||||
)
|
|
||||||
if not exists(file_path) or not isfile(file_path):
|
|
||||||
continue
|
|
||||||
|
|
||||||
with open(file_path) as f:
|
|
||||||
result[key] = jsonload(f)
|
|
||||||
|
|
||||||
return json(
|
|
||||||
result,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/<slug:str>/download/original.csv")
|
|
||||||
async def download_original_file(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
if not track.is_visible_to_private(req.ctx.user):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
return await file_stream(
|
|
||||||
track.get_original_file_path(req.app.config),
|
|
||||||
mime_type="text/csv",
|
|
||||||
filename=f"{slug}.csv",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/<slug:str>/download/track.gpx")
|
|
||||||
async def download_track_gpx(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
if not track.is_visible_to(req.ctx.user):
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
file_path = join(req.app.config.PROCESSING_OUTPUT_DIR, track.file_path, "track.gpx")
|
|
||||||
if not exists(file_path) or not isfile(file_path):
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
return await file_stream(
|
|
||||||
file_path,
|
|
||||||
mime_type="application/gpx+xml",
|
|
||||||
filename=f"{slug}.gpx",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.put("/tracks/<slug:str>")
|
|
||||||
@require_auth
|
|
||||||
async def put_track(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
if track.author_id != req.ctx.user.id:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
try:
|
|
||||||
body = req.json["track"]
|
|
||||||
except BaseException:
|
|
||||||
body = {}
|
|
||||||
|
|
||||||
if "title" in body:
|
|
||||||
track.title = (body["title"] or "").strip() or None
|
|
||||||
track.customized_title = track.title is not None
|
|
||||||
|
|
||||||
if "description" in body:
|
|
||||||
track.description = (body["description"] or "").strip() or None
|
|
||||||
|
|
||||||
process = False
|
|
||||||
|
|
||||||
if "public" in body:
|
|
||||||
public = bool(body["public"])
|
|
||||||
process = process or (public != track.public) # if changed
|
|
||||||
track.public = public
|
|
||||||
|
|
||||||
if "body" in req.files:
|
|
||||||
try:
|
|
||||||
file = req.files["body"][0]
|
|
||||||
except LookupError as e:
|
|
||||||
raise InvalidUsage(
|
|
||||||
'Track upload needs a single file in "body" multipart field'
|
|
||||||
) from e
|
|
||||||
|
|
||||||
await track.prevent_duplicates(req.ctx.db, file.body)
|
|
||||||
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
|
|
||||||
track.original_file_name = file.name or (track.slug + ".csv")
|
|
||||||
await track.write_to_original_file(req.app.config, file.body)
|
|
||||||
process = True
|
|
||||||
|
|
||||||
if process:
|
|
||||||
track.queue_processing()
|
|
||||||
|
|
||||||
track.auto_generate_title()
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
track = await _load_track(req, track.slug)
|
|
||||||
return json(
|
|
||||||
{"track": track.to_dict(for_user_id=req.ctx.user.id)},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/tracks/<slug:str>/comments")
|
|
||||||
async def get_track_comments(req, slug: str):
|
|
||||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
|
||||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
|
||||||
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
comment_count = await req.ctx.db.scalar(
|
|
||||||
select(func.count()).select_from(Comment).where(Comment.track_id == track.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
query = (
|
|
||||||
select(Comment)
|
|
||||||
.options(joinedload(Comment.author))
|
|
||||||
.where(Comment.track_id == track.id)
|
|
||||||
.order_by(Comment.created_at.desc())
|
|
||||||
.limit(limit)
|
|
||||||
.offset(offset)
|
|
||||||
)
|
|
||||||
|
|
||||||
comments = (await req.ctx.db.execute(query)).scalars()
|
|
||||||
|
|
||||||
return json(
|
|
||||||
{
|
|
||||||
"commentCount": comment_count,
|
|
||||||
"comments": list(
|
|
||||||
map(
|
|
||||||
lambda c: c.to_dict(
|
|
||||||
for_user_id=req.ctx.user.id if req.ctx.user else None
|
|
||||||
),
|
|
||||||
comments,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@api.post("/tracks/<slug:str>/comments")
|
|
||||||
@require_auth
|
|
||||||
async def post_track_comment(req, slug: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
body = req.json.get("comment", {}).get("body")
|
|
||||||
if not isinstance(body, str):
|
|
||||||
raise InvalidUsage("no comment given")
|
|
||||||
|
|
||||||
# Ensure body is not empty
|
|
||||||
body = body.strip()
|
|
||||||
if not body:
|
|
||||||
raise InvalidUsage("empty comment")
|
|
||||||
|
|
||||||
comment = Comment(
|
|
||||||
body=body,
|
|
||||||
track_id=track.id,
|
|
||||||
author_id=req.ctx.user.id,
|
|
||||||
)
|
|
||||||
|
|
||||||
req.ctx.db.add(comment)
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
await req.ctx.db.refresh(comment)
|
|
||||||
|
|
||||||
comment = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(Comment)
|
|
||||||
.options(joinedload(Comment.author))
|
|
||||||
.where(Comment.id == comment.id)
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
return json({"comment": comment.to_dict(for_user_id=req.ctx.user.id)})
|
|
||||||
|
|
||||||
|
|
||||||
@api.delete("/tracks/<slug:str>/comments/<uid:str>")
|
|
||||||
@require_auth
|
|
||||||
async def delete_track_comment(req, slug: str, uid: str):
|
|
||||||
track = await _load_track(req, slug)
|
|
||||||
|
|
||||||
comment = (
|
|
||||||
await req.ctx.db.execute(
|
|
||||||
select(Comment)
|
|
||||||
.options(joinedload(Comment.author))
|
|
||||||
.where(Comment.track_id == track.id and Comment.uid == uid)
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
).scalar()
|
|
||||||
|
|
||||||
if not comment:
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
if comment.author_id != req.ctx.user.id:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
await req.ctx.db.delete(comment)
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
return empty()
|
|
|
@ -1,95 +0,0 @@
|
||||||
import logging
|
|
||||||
|
|
||||||
from sanic.response import json
|
|
||||||
from sanic.exceptions import InvalidUsage, Forbidden, NotFound
|
|
||||||
from sqlalchemy import and_, select
|
|
||||||
|
|
||||||
from obs.api.app import api, require_auth
|
|
||||||
from obs.api.db import UserDevice
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from obs.api import __version__ as version
|
|
||||||
|
|
||||||
|
|
||||||
def user_to_json(user):
|
|
||||||
return {
|
|
||||||
"id": user.id,
|
|
||||||
"username": user.username,
|
|
||||||
"displayName": user.display_name,
|
|
||||||
"email": user.email,
|
|
||||||
"bio": user.bio,
|
|
||||||
"image": user.image,
|
|
||||||
"areTracksVisibleForAll": user.are_tracks_visible_for_all,
|
|
||||||
"apiKey": user.api_key,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/user")
|
|
||||||
async def get_user(req):
|
|
||||||
return json(user_to_json(req.ctx.user) if req.ctx.user else None)
|
|
||||||
|
|
||||||
|
|
||||||
@api.get("/user/devices")
|
|
||||||
async def get_user_devices(req):
|
|
||||||
if not req.ctx.user:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
query = (
|
|
||||||
select(UserDevice)
|
|
||||||
.where(UserDevice.user_id == req.ctx.user.id)
|
|
||||||
.order_by(UserDevice.id)
|
|
||||||
)
|
|
||||||
|
|
||||||
devices = (await req.ctx.db.execute(query)).scalars()
|
|
||||||
|
|
||||||
return json([device.to_dict(req.ctx.user.id) for device in devices])
|
|
||||||
|
|
||||||
|
|
||||||
@api.put("/user/devices/<device_id:int>")
|
|
||||||
async def put_user_device(req, device_id):
|
|
||||||
if not req.ctx.user:
|
|
||||||
raise Forbidden()
|
|
||||||
|
|
||||||
body = req.json
|
|
||||||
|
|
||||||
query = (
|
|
||||||
select(UserDevice)
|
|
||||||
.where(and_(UserDevice.user_id == req.ctx.user.id, UserDevice.id == device_id))
|
|
||||||
.limit(1)
|
|
||||||
)
|
|
||||||
|
|
||||||
device = (await req.ctx.db.execute(query)).scalar()
|
|
||||||
|
|
||||||
if device is None:
|
|
||||||
raise NotFound()
|
|
||||||
|
|
||||||
new_name = body.get("displayName", "").strip()
|
|
||||||
if new_name and device.display_name != new_name:
|
|
||||||
device.display_name = new_name
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
|
|
||||||
return json(device.to_dict())
|
|
||||||
|
|
||||||
|
|
||||||
@api.put("/user")
|
|
||||||
@require_auth
|
|
||||||
async def put_user(req):
|
|
||||||
user = req.ctx.user
|
|
||||||
data = req.json
|
|
||||||
|
|
||||||
for key in ["email", "bio", "image"]:
|
|
||||||
if key in data and isinstance(data[key], (str, type(None))):
|
|
||||||
setattr(user, key, data[key])
|
|
||||||
|
|
||||||
if "displayName" in data:
|
|
||||||
user.display_name = data["displayName"] or None
|
|
||||||
|
|
||||||
if "areTracksVisibleForAll" in data:
|
|
||||||
user.are_tracks_visible_for_all = bool(data["areTracksVisibleForAll"])
|
|
||||||
|
|
||||||
if data.get("updateApiKey"):
|
|
||||||
user.generate_api_key()
|
|
||||||
|
|
||||||
await req.ctx.db.commit()
|
|
||||||
return json(user_to_json(req.ctx.user))
|
|
|
@ -1,162 +0,0 @@
|
||||||
import asyncio
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from datetime import datetime
|
|
||||||
import logging
|
|
||||||
from os.path import commonpath, join, relpath
|
|
||||||
import queue
|
|
||||||
import tarfile
|
|
||||||
|
|
||||||
import dateutil.parser
|
|
||||||
from sanic.exceptions import InvalidUsage, ServiceUnavailable
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
RAISE = object()
|
|
||||||
|
|
||||||
|
|
||||||
def get_single_arg(req, name, default=RAISE, convert=None):
|
|
||||||
try:
|
|
||||||
value = req.args[name][0]
|
|
||||||
except LookupError as e:
|
|
||||||
if default is RAISE:
|
|
||||||
raise InvalidUsage(f"missing `{name}`") from e
|
|
||||||
|
|
||||||
value = default
|
|
||||||
|
|
||||||
if convert is not None and value is not None:
|
|
||||||
if convert is datetime or convert in ("date", "datetime"):
|
|
||||||
convert = lambda s: dateutil.parser.parse(s)
|
|
||||||
|
|
||||||
try:
|
|
||||||
value = convert(value)
|
|
||||||
except (ValueError, TypeError) as e:
|
|
||||||
raise InvalidUsage(f"invalid `{name}`: {str(e)}") from e
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
|
|
||||||
def round_to(value: float, multiples: float) -> float:
|
|
||||||
if value is None:
|
|
||||||
return None
|
|
||||||
return round(value / multiples) * multiples
|
|
||||||
|
|
||||||
|
|
||||||
def chunk_list(lst, n):
|
|
||||||
for s in range(0, len(lst), n):
|
|
||||||
yield lst[s : s + n]
|
|
||||||
|
|
||||||
|
|
||||||
class chunk:
|
|
||||||
def __init__(self, iterable, n):
|
|
||||||
self.iterable = iterable
|
|
||||||
self.n = n
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
if isinstance(self.iterable, list):
|
|
||||||
yield from chunk_list(self.iterable, self.n)
|
|
||||||
return
|
|
||||||
|
|
||||||
it = iter(self.iterable)
|
|
||||||
while True:
|
|
||||||
current = []
|
|
||||||
try:
|
|
||||||
for _ in range(self.n):
|
|
||||||
current.append(next(it))
|
|
||||||
yield current
|
|
||||||
except StopIteration:
|
|
||||||
if current:
|
|
||||||
yield current
|
|
||||||
break
|
|
||||||
|
|
||||||
async def __aiter__(self):
|
|
||||||
if hasattr(self.iterable, "__iter__"):
|
|
||||||
for item in self:
|
|
||||||
yield item
|
|
||||||
return
|
|
||||||
|
|
||||||
it = self.iterable.__aiter__()
|
|
||||||
while True:
|
|
||||||
current = []
|
|
||||||
try:
|
|
||||||
for _ in range(self.n):
|
|
||||||
current.append(await it.__anext__())
|
|
||||||
yield current
|
|
||||||
except StopAsyncIteration:
|
|
||||||
if len(current):
|
|
||||||
yield current
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
async def tar_of_tracks(req, files, file_basename="tracks"):
|
|
||||||
response = await req.respond(
|
|
||||||
content_type="application/x-gtar",
|
|
||||||
headers={
|
|
||||||
"content-disposition": f'attachment; filename="{file_basename}.tar.bz2"'
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
helper = StreamerHelper(response)
|
|
||||||
|
|
||||||
tar = tarfile.open(name=None, fileobj=helper, mode="w|bz2", bufsize=256 * 512)
|
|
||||||
|
|
||||||
root = commonpath(list(files))
|
|
||||||
for fname in files:
|
|
||||||
log.info("Write file to tar: %s", fname)
|
|
||||||
with open(fname, "rb") as fobj:
|
|
||||||
tarinfo = tar.gettarinfo(fname)
|
|
||||||
tarinfo.name = join(file_basename, relpath(fname, root))
|
|
||||||
tar.addfile(tarinfo, fobj)
|
|
||||||
await helper.send_all()
|
|
||||||
tar.close()
|
|
||||||
await helper.send_all()
|
|
||||||
|
|
||||||
await response.eof()
|
|
||||||
|
|
||||||
|
|
||||||
class StreamerHelper:
|
|
||||||
def __init__(self, response):
|
|
||||||
self.response = response
|
|
||||||
self.towrite = queue.Queue()
|
|
||||||
|
|
||||||
def write(self, data):
|
|
||||||
self.towrite.put(data)
|
|
||||||
|
|
||||||
async def send_all(self):
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
tosend = self.towrite.get(block=False)
|
|
||||||
await self.response.send(tosend)
|
|
||||||
except queue.Empty:
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def use_request_semaphore(req, semaphore_name, timeout=10):
|
|
||||||
"""
|
|
||||||
If configured, acquire a semaphore for the map tile request and release it
|
|
||||||
after the context has finished.
|
|
||||||
|
|
||||||
If the semaphore cannot be acquired within the timeout, issue a 503 Service
|
|
||||||
Unavailable error response that describes that the database is overloaded,
|
|
||||||
so users know what the problem is.
|
|
||||||
|
|
||||||
Operates as a noop when the tile semaphore is not enabled.
|
|
||||||
"""
|
|
||||||
semaphore = getattr(req.app.ctx, semaphore_name, None)
|
|
||||||
|
|
||||||
if semaphore is None:
|
|
||||||
yield
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
await asyncio.wait_for(semaphore.acquire(), timeout)
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
semaphore.release()
|
|
||||||
|
|
||||||
except asyncio.TimeoutError:
|
|
||||||
raise ServiceUnavailable(
|
|
||||||
"Too many requests, database overloaded. Please retry later."
|
|
||||||
)
|
|
|
@ -1 +0,0 @@
|
||||||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
|
|
@ -1,66 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import math
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import coloredlogs
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.db import connect_db
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def format_size(n, b=1024):
|
|
||||||
if n == 0:
|
|
||||||
return "0 B"
|
|
||||||
if n < 0:
|
|
||||||
return "-" + format_size(n, b)
|
|
||||||
e = math.floor(math.log(n, b))
|
|
||||||
prefixes = ["", "Ki", "Mi", "Gi", "Ti"] if b == 1024 else ["", "K", "M", "G", "T"]
|
|
||||||
e = min(e, len(prefixes) - 1)
|
|
||||||
r = n / b**e
|
|
||||||
s = f"{r:0.2f}" if e > 0 else str(n)
|
|
||||||
return f"{s} {prefixes[e]}B"
|
|
||||||
|
|
||||||
|
|
||||||
class AccessLogFilter(logging.Filter):
|
|
||||||
def filter(self, record):
|
|
||||||
if not record.msg:
|
|
||||||
record.msg = (
|
|
||||||
f"{record.request} - {record.status} ({format_size(record.byte)})"
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
debug = app.config.DEBUG
|
|
||||||
|
|
||||||
coloredlogs.install(
|
|
||||||
level=logging.DEBUG if app.config.get("VERBOSE", debug) else logging.INFO,
|
|
||||||
milliseconds=True,
|
|
||||||
isatty=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
for ln in ["sanic.root", "sanic.error", "sanic.access"]:
|
|
||||||
l = logging.getLogger(ln)
|
|
||||||
for h in list(l.handlers):
|
|
||||||
l.removeHandler(h)
|
|
||||||
|
|
||||||
logging.getLogger("sanic.access").addFilter(AccessLogFilter())
|
|
||||||
|
|
||||||
app.run(
|
|
||||||
host=app.config.HOST,
|
|
||||||
port=app.config.PORT,
|
|
||||||
debug=debug,
|
|
||||||
auto_reload=app.config.get("AUTO_RELOAD", debug),
|
|
||||||
access_log=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,191 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import re
|
|
||||||
import msgpack
|
|
||||||
|
|
||||||
import osmium
|
|
||||||
import shapely.wkb as wkb
|
|
||||||
from shapely.ops import transform
|
|
||||||
|
|
||||||
HIGHWAY_TYPES = {
|
|
||||||
"trunk",
|
|
||||||
"primary",
|
|
||||||
"secondary",
|
|
||||||
"tertiary",
|
|
||||||
"unclassified",
|
|
||||||
"residential",
|
|
||||||
"trunk_link",
|
|
||||||
"primary_link",
|
|
||||||
"secondary_link",
|
|
||||||
"tertiary_link",
|
|
||||||
"living_street",
|
|
||||||
"service",
|
|
||||||
"track",
|
|
||||||
"road",
|
|
||||||
}
|
|
||||||
ZONE_TYPES = {
|
|
||||||
"urban",
|
|
||||||
"rural",
|
|
||||||
"motorway",
|
|
||||||
}
|
|
||||||
URBAN_TYPES = {
|
|
||||||
"residential",
|
|
||||||
"living_street",
|
|
||||||
"road",
|
|
||||||
}
|
|
||||||
MOTORWAY_TYPES = {
|
|
||||||
"motorway",
|
|
||||||
"motorway_link",
|
|
||||||
}
|
|
||||||
|
|
||||||
ADMIN_LEVEL_MIN = 2
|
|
||||||
ADMIN_LEVEL_MAX = 8
|
|
||||||
MINSPEED_RURAL = 60
|
|
||||||
|
|
||||||
ONEWAY_YES = {"yes", "true", "1"}
|
|
||||||
ONEWAY_REVERSE = {"reverse", "-1"}
|
|
||||||
|
|
||||||
|
|
||||||
def parse_number(tag):
|
|
||||||
if not tag:
|
|
||||||
return None
|
|
||||||
|
|
||||||
match = re.search(r"[0-9]+", tag)
|
|
||||||
if not match:
|
|
||||||
return None
|
|
||||||
|
|
||||||
digits = match.group(0)
|
|
||||||
try:
|
|
||||||
return int(digits)
|
|
||||||
except ValueError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def determine_zone(tags):
|
|
||||||
highway = tags.get("highway")
|
|
||||||
zone = tags.get("zone:traffic")
|
|
||||||
|
|
||||||
if zone is not None:
|
|
||||||
if "rural" in zone:
|
|
||||||
return "rural"
|
|
||||||
|
|
||||||
if "motorway" in zone:
|
|
||||||
return "motorway"
|
|
||||||
|
|
||||||
return "urban"
|
|
||||||
|
|
||||||
# From here on we are guessing based on other tags
|
|
||||||
|
|
||||||
if highway in URBAN_TYPES:
|
|
||||||
return "urban"
|
|
||||||
|
|
||||||
if highway in MOTORWAY_TYPES:
|
|
||||||
return "motorway"
|
|
||||||
|
|
||||||
maxspeed_source = tags.get("source:maxspeed")
|
|
||||||
if maxspeed_source and "rural" in maxspeed_source:
|
|
||||||
return "rural"
|
|
||||||
if maxspeed_source and "urban" in maxspeed_source:
|
|
||||||
return "urban"
|
|
||||||
|
|
||||||
for key in ["maxspeed", "maxspeed:forward", "maxspeed:backward"]:
|
|
||||||
maxspeed = parse_number(tags.get(key))
|
|
||||||
if maxspeed is not None and maxspeed > MINSPEED_RURAL:
|
|
||||||
return "rural"
|
|
||||||
|
|
||||||
# default to urban if we have no idea
|
|
||||||
return "urban"
|
|
||||||
|
|
||||||
|
|
||||||
def determine_direction(tags, zone):
|
|
||||||
if (
|
|
||||||
tags.get("oneway") in ONEWAY_YES
|
|
||||||
or tags.get("junction") == "roundabout"
|
|
||||||
or zone == "motorway"
|
|
||||||
):
|
|
||||||
return 1, True
|
|
||||||
|
|
||||||
if tags.get("oneway") in ONEWAY_REVERSE:
|
|
||||||
return -1, True
|
|
||||||
|
|
||||||
return 0, False
|
|
||||||
|
|
||||||
|
|
||||||
class StreamPacker:
|
|
||||||
def __init__(self, stream, *args, **kwargs):
|
|
||||||
self.stream = stream
|
|
||||||
self.packer = msgpack.Packer(*args, autoreset=False, **kwargs)
|
|
||||||
|
|
||||||
def _write_out(self):
|
|
||||||
if hasattr(self.packer, "getbuffer"):
|
|
||||||
chunk = self.packer.getbuffer()
|
|
||||||
else:
|
|
||||||
chunk = self.packer.bytes()
|
|
||||||
|
|
||||||
self.stream.write(chunk)
|
|
||||||
self.packer.reset()
|
|
||||||
|
|
||||||
def pack(self, *args, **kwargs):
|
|
||||||
self.packer.pack(*args, **kwargs)
|
|
||||||
self._write_out()
|
|
||||||
|
|
||||||
def pack_array_header(self, *args, **kwargs):
|
|
||||||
self.packer.pack_array_header(*args, **kwargs)
|
|
||||||
self._write_out()
|
|
||||||
|
|
||||||
def pack_map_header(self, *args, **kwargs):
|
|
||||||
self.packer.pack_map_header(*args, **kwargs)
|
|
||||||
self._write_out()
|
|
||||||
|
|
||||||
def pack_map_pairs(self, *args, **kwargs):
|
|
||||||
self.packer.pack_map_pairs(*args, **kwargs)
|
|
||||||
self._write_out()
|
|
||||||
|
|
||||||
|
|
||||||
# A global factory that creates WKB from a osmium geometry
|
|
||||||
wkbfab = osmium.geom.WKBFactory()
|
|
||||||
|
|
||||||
from pyproj import Transformer
|
|
||||||
|
|
||||||
project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform
|
|
||||||
|
|
||||||
|
|
||||||
class OSMHandler(osmium.SimpleHandler):
|
|
||||||
def __init__(self, packer):
|
|
||||||
self.packer = packer
|
|
||||||
super().__init__()
|
|
||||||
|
|
||||||
def way(self, way):
|
|
||||||
tags = way.tags
|
|
||||||
|
|
||||||
highway = tags.get("highway")
|
|
||||||
if not highway or highway not in HIGHWAY_TYPES:
|
|
||||||
return
|
|
||||||
|
|
||||||
access = tags.get("access", None)
|
|
||||||
bicycle = tags.get("bicycle", None)
|
|
||||||
if access == "no" and bicycle not in ["designated", "yes", "permissive", "destination"]:
|
|
||||||
return
|
|
||||||
|
|
||||||
zone = determine_zone(tags)
|
|
||||||
directionality, oneway = determine_direction(tags, zone)
|
|
||||||
name = tags.get("name")
|
|
||||||
|
|
||||||
geometry = wkb.loads(wkbfab.create_linestring(way), hex=True)
|
|
||||||
geometry = transform(project, geometry)
|
|
||||||
geometry = wkb.dumps(geometry)
|
|
||||||
self.packer.pack(
|
|
||||||
[b"\x01", way.id, name, zone, directionality, oneway, geometry]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
with open(sys.argv[2], "wb") as fout:
|
|
||||||
packer = StreamPacker(fout)
|
|
||||||
osmhandler = OSMHandler(packer)
|
|
||||||
osmhandler.apply_file(sys.argv[1], locations=True)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,22 +0,0 @@
|
||||||
coloredlogs~=15.0.1
|
|
||||||
sanic==22.6.2
|
|
||||||
oic~=1.5.0
|
|
||||||
sanic-session~=0.8.0
|
|
||||||
python-slugify~=6.1.2
|
|
||||||
motor~=3.1.1
|
|
||||||
pyyaml~=5.3.1
|
|
||||||
-e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools
|
|
||||||
sqlparse~=0.4.3
|
|
||||||
sqlalchemy[asyncio]~=1.4.46
|
|
||||||
asyncpg~=0.27.0
|
|
||||||
pyshp~=2.3.1
|
|
||||||
alembic~=1.9.4
|
|
||||||
stream-zip~=0.0.50
|
|
||||||
msgpack~=1.0.5
|
|
||||||
osmium~=3.6.0
|
|
||||||
psycopg~=3.1.8
|
|
||||||
shapely~=2.0.1
|
|
||||||
pyproj~=3.4.1
|
|
||||||
aiohttp~=3.8.1
|
|
||||||
# sanic requires websocets and chockes on >=10 in 2022.6.2
|
|
||||||
websockets<11
|
|
|
@ -1 +0,0 @@
|
||||||
Subproject commit 664e4d606416417c0651ea1748d32dd36209be6a
|
|
34
api/setup.py
34
api/setup.py
|
@ -1,34 +0,0 @@
|
||||||
from setuptools import setup, find_packages
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name="openbikesensor-api",
|
|
||||||
version="0.0.1",
|
|
||||||
author="OpenBikeSensor Contributors",
|
|
||||||
license="LGPL-3.0",
|
|
||||||
description="OpenBikeSensor Portal API",
|
|
||||||
url="https://github.com/openbikesensor/portal",
|
|
||||||
packages=find_packages(),
|
|
||||||
package_data={},
|
|
||||||
install_requires=[
|
|
||||||
"coloredlogs~=15.0.1",
|
|
||||||
"sanic==22.6.2",
|
|
||||||
"oic>=1.3.0, <2",
|
|
||||||
"sanic-session~=0.8.0",
|
|
||||||
"python-slugify>=5.0.2,<6.2.0",
|
|
||||||
"motor>=2.5.1,<3.1.2",
|
|
||||||
"pyyaml<6",
|
|
||||||
"sqlparse~=0.4.3",
|
|
||||||
"openmaptiles-tools", # install from git
|
|
||||||
"pyshp>=2.2,<2.4",
|
|
||||||
"sqlalchemy[asyncio]~=1.4.46",
|
|
||||||
"asyncpg~=0.27.0",
|
|
||||||
"alembic~=1.9.4",
|
|
||||||
"stream-zip~=0.0.50",
|
|
||||||
],
|
|
||||||
entry_points={
|
|
||||||
"console_scripts": [
|
|
||||||
"openbikesensor-api=obs.bin.openbikesensor_api:main",
|
|
||||||
"openbikesensor-transform-osm=obs.bin.openbikesensor_transform_osm:main",
|
|
||||||
]
|
|
||||||
},
|
|
||||||
)
|
|
|
@ -1,188 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import argparse
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
from uuid import uuid4
|
|
||||||
|
|
||||||
from sqlalchemy import select
|
|
||||||
|
|
||||||
from motor.motor_asyncio import AsyncIOMotorClient
|
|
||||||
|
|
||||||
from obs.api.db import make_session, connect_db, User, Track, Comment
|
|
||||||
from obs.api.app import app
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="processes a single track for use in the portal, "
|
|
||||||
"using the obs.face algorithms"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"mongodb_url",
|
|
||||||
metavar="MONGODB_URL",
|
|
||||||
help="url to the mongodb, in format mongodb://user:pass@host/dbname",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--keycloak-users-file",
|
|
||||||
metavar="FILE",
|
|
||||||
type=argparse.FileType("wt", encoding="utf-8"),
|
|
||||||
help="a file to write a JSON of all old users to, for importing to keycloak",
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--keep-api-keys",
|
|
||||||
action="store_true",
|
|
||||||
help="keep the old API keys (very insecure!) instead of generating new ones",
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.keep_api_keys:
|
|
||||||
log.warning(
|
|
||||||
"Importing users with their old API keys. These keys are very insecure and "
|
|
||||||
"could provide access to user data to third parties. Consider to notify "
|
|
||||||
"your users about the need to generate a new API key through their profile pages."
|
|
||||||
)
|
|
||||||
|
|
||||||
async with connect_db(app.config.POSTGRES_URL):
|
|
||||||
async with make_session() as session:
|
|
||||||
mongo = AsyncIOMotorClient(args.mongodb_url).get_default_database()
|
|
||||||
|
|
||||||
log.debug("Connected to mongodb and postgres.")
|
|
||||||
user_id_map = await import_users(
|
|
||||||
mongo, session, args.keycloak_users_file, args.keep_api_keys
|
|
||||||
)
|
|
||||||
|
|
||||||
await import_tracks(mongo, session, user_id_map)
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
async def import_users(mongo, session, keycloak_users_file, keep_api_keys):
|
|
||||||
keycloak_users = []
|
|
||||||
|
|
||||||
old_id_by_email = {}
|
|
||||||
async for user in mongo.users.find({}):
|
|
||||||
old_id_by_email[user["email"]] = user["_id"]
|
|
||||||
|
|
||||||
new_user = User(
|
|
||||||
sub=str(uuid4()),
|
|
||||||
email=user["email"],
|
|
||||||
username=user["username"],
|
|
||||||
bio=user.get("bio"),
|
|
||||||
image=user.get("image"),
|
|
||||||
are_tracks_visible_for_all=user.get("areTracksVisibleForAll") or False,
|
|
||||||
created_at=user.get("createdAt") or datetime.utcnow(),
|
|
||||||
updated_at=user.get("updatedAt") or datetime.utcnow(),
|
|
||||||
match_by_username_email=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if keep_api_keys:
|
|
||||||
new_user.api_key = str(user["_id"])
|
|
||||||
else:
|
|
||||||
new_user.generate_api_key()
|
|
||||||
|
|
||||||
if keycloak_users_file:
|
|
||||||
needs_email_verification = user.get("needsEmailValidation", True)
|
|
||||||
required_actions = ["UPDATE_PASSWORD"]
|
|
||||||
if needs_email_verification:
|
|
||||||
required_actions.append("VERIFY_EMAIL")
|
|
||||||
|
|
||||||
keycloak_users.append(
|
|
||||||
{
|
|
||||||
"username": new_user.username,
|
|
||||||
"email": new_user.email,
|
|
||||||
"enabled": True,
|
|
||||||
"requiredActions": required_actions,
|
|
||||||
"emailVerified": not needs_email_verification,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
session.add(new_user)
|
|
||||||
log.info("Creating user %s", new_user.username)
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
id_map = {}
|
|
||||||
result = await session.scalars(select(User))
|
|
||||||
for user in result:
|
|
||||||
id_map[old_id_by_email[user.email]] = user.id
|
|
||||||
|
|
||||||
if keycloak_users_file:
|
|
||||||
json.dump({"users": keycloak_users}, keycloak_users_file, indent=4)
|
|
||||||
log.info("Wrote keycloak users file to %s.", keycloak_users_file.name)
|
|
||||||
|
|
||||||
return id_map
|
|
||||||
|
|
||||||
|
|
||||||
def parse_datetime(s):
|
|
||||||
if isinstance(s, str):
|
|
||||||
return datetime.fromisoformat(s)
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
async def import_tracks(mongo, session, user_id_map):
|
|
||||||
track_count = 0
|
|
||||||
|
|
||||||
async for track in mongo.tracks.find({}):
|
|
||||||
stats = track.get("statistics") or {}
|
|
||||||
new_track = Track(
|
|
||||||
created_at=parse_datetime(track.get("createdAt")) or datetime.utcnow(),
|
|
||||||
updated_at=parse_datetime(track.get("updatedAt")) or datetime.utcnow(),
|
|
||||||
slug=track["slug"],
|
|
||||||
title=track.get("title"),
|
|
||||||
processing_status=track.get("processingStatus") or "pending",
|
|
||||||
processing_log=track.get("processingLog"),
|
|
||||||
customized_title=bool(track.get("customizedTitle")),
|
|
||||||
description=track.get("description"),
|
|
||||||
public=track.get("public"),
|
|
||||||
uploaded_by_user_agent=track.get("uploadedByUserAgent"),
|
|
||||||
original_file_name=track.get("originalFileName"),
|
|
||||||
original_file_hash=track.get("originalFileHash"),
|
|
||||||
# statistics
|
|
||||||
recorded_at=parse_datetime(stats.get("recordedAt")),
|
|
||||||
recorded_until=parse_datetime(stats.get("recordedUntil")),
|
|
||||||
duration=stats.get("duration"),
|
|
||||||
length=stats.get("length"),
|
|
||||||
segments=stats.get("segments"),
|
|
||||||
num_events=stats.get("num_events"),
|
|
||||||
num_measurements=stats.get("num_measurements"),
|
|
||||||
num_valid=stats.get("numValid"),
|
|
||||||
author_id=user_id_map[track["author"]],
|
|
||||||
)
|
|
||||||
|
|
||||||
session.add(new_track)
|
|
||||||
|
|
||||||
comment_ids = track.get("comments") or []
|
|
||||||
if comment_ids:
|
|
||||||
async for comment in mongo.comments.find({"_id": {"$in": comment_ids}}):
|
|
||||||
new_comment = Comment(
|
|
||||||
created_at=parse_datetime(comment.get("createdAt"))
|
|
||||||
or datetime.utcnow(),
|
|
||||||
updated_at=parse_datetime(comment.get("updatedAt"))
|
|
||||||
or datetime.utcnow(),
|
|
||||||
body=comment.get("body"),
|
|
||||||
author_id=user_id_map[comment["author"]],
|
|
||||||
)
|
|
||||||
new_track.comments.append(new_comment)
|
|
||||||
session.add(new_comment)
|
|
||||||
|
|
||||||
track_count += 1
|
|
||||||
|
|
||||||
log.info("Created %s tracks", track_count)
|
|
||||||
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,108 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import asyncio
|
|
||||||
from os.path import basename, splitext
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import msgpack
|
|
||||||
import psycopg
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.utils import chunk
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
ROAD_BUFFER = 1000
|
|
||||||
AREA_BUFFER = 100
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class Road:
|
|
||||||
way_id: int
|
|
||||||
name: str
|
|
||||||
zone: str
|
|
||||||
directionality: int
|
|
||||||
oneway: int
|
|
||||||
geometry: bytes
|
|
||||||
|
|
||||||
|
|
||||||
def read_file(filename):
|
|
||||||
"""
|
|
||||||
Reads a file iteratively, yielding
|
|
||||||
appear. Those may be mixed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open(filename, "rb") as f:
|
|
||||||
unpacker = msgpack.Unpacker(f)
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
type_id, *data = unpacker.unpack()
|
|
||||||
|
|
||||||
if type_id == b"\x01":
|
|
||||||
yield Road(*data)
|
|
||||||
|
|
||||||
except msgpack.OutOfData:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
async def import_osm(connection, filename, import_group=None):
|
|
||||||
if import_group is None:
|
|
||||||
import_group = splitext(basename(filename))[0]
|
|
||||||
|
|
||||||
# Pass 1: Find IDs only
|
|
||||||
road_ids = []
|
|
||||||
for item in read_file(filename):
|
|
||||||
road_ids.append(item.way_id)
|
|
||||||
|
|
||||||
async with connection.cursor() as cursor:
|
|
||||||
log.info("Pass 1: Delete previously imported data")
|
|
||||||
|
|
||||||
log.debug("Delete import group %s", import_group)
|
|
||||||
await cursor.execute(
|
|
||||||
"DELETE FROM road WHERE import_group = %s", (import_group,)
|
|
||||||
)
|
|
||||||
|
|
||||||
log.debug("Delete roads by way_id")
|
|
||||||
for ids in chunk(road_ids, 10000):
|
|
||||||
await cursor.execute("DELETE FROM road WHERE way_id = ANY(%s)", (ids,))
|
|
||||||
|
|
||||||
# Pass 2: Import
|
|
||||||
log.info("Pass 2: Import roads")
|
|
||||||
amount = 0
|
|
||||||
for items in chunk(read_file(filename), 10000):
|
|
||||||
amount += 10000
|
|
||||||
log.info(f"...{amount}/{len(road_ids)} ({100*amount/len(road_ids)}%)")
|
|
||||||
async with cursor.copy(
|
|
||||||
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
|
||||||
) as copy:
|
|
||||||
for item in items:
|
|
||||||
await copy.write_row(
|
|
||||||
(
|
|
||||||
item.way_id,
|
|
||||||
item.name,
|
|
||||||
item.zone,
|
|
||||||
item.directionality,
|
|
||||||
item.oneway,
|
|
||||||
bytes.hex(item.geometry),
|
|
||||||
import_group,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
url = app.config.POSTGRES_URL
|
|
||||||
url = url.replace("+asyncpg", "")
|
|
||||||
|
|
||||||
async with await psycopg.AsyncConnection.connect(url) as connection:
|
|
||||||
for filename in sys.argv[1:]:
|
|
||||||
log.debug("Loading file: %s", filename)
|
|
||||||
await import_osm(connection, filename)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,93 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
"""
|
|
||||||
This script downloads and/or imports regions for statistical analysis into the
|
|
||||||
PostGIS database. The regions are sourced from:
|
|
||||||
|
|
||||||
* EU countries are covered by
|
|
||||||
[NUTS](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/administrative-units-statistical-units/nuts).
|
|
||||||
"""
|
|
||||||
|
|
||||||
import tempfile
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import json
|
|
||||||
import asyncio
|
|
||||||
from os.path import basename, splitext
|
|
||||||
import sys
|
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
import aiohttp
|
|
||||||
import psycopg
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.utils import chunk
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
NUTS_URL = "https://gisco-services.ec.europa.eu/distribution/v2/nuts/geojson/NUTS_RG_01M_2021_3857.geojson"
|
|
||||||
|
|
||||||
from pyproj import Transformer
|
|
||||||
|
|
||||||
project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform
|
|
||||||
from shapely.ops import transform
|
|
||||||
from shapely.geometry import shape
|
|
||||||
import shapely.wkb as wkb
|
|
||||||
|
|
||||||
|
|
||||||
async def import_nuts(
|
|
||||||
connection, filename=None, level: int = 3, import_group: Optional[str] = None
|
|
||||||
):
|
|
||||||
if import_group is None:
|
|
||||||
import_group = f"nuts{level}"
|
|
||||||
|
|
||||||
if filename:
|
|
||||||
log.info("Load NUTS from file")
|
|
||||||
with open(filename) as f:
|
|
||||||
data = json.load(f)
|
|
||||||
else:
|
|
||||||
log.info("Download NUTS regions from europa.eu")
|
|
||||||
async with aiohttp.ClientSession() as session:
|
|
||||||
async with session.get(NUTS_URL) as resp:
|
|
||||||
data = await resp.json(content_type=None)
|
|
||||||
|
|
||||||
async with connection.cursor() as cursor:
|
|
||||||
log.info(
|
|
||||||
"Delete previously imported regions with import group %s", import_group
|
|
||||||
)
|
|
||||||
await cursor.execute(
|
|
||||||
"DELETE FROM region WHERE import_group = %s", (import_group,)
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info("Import regions")
|
|
||||||
async with cursor.copy(
|
|
||||||
"COPY region (id, name, geometry, import_group) FROM STDIN"
|
|
||||||
) as copy:
|
|
||||||
for feature in data["features"]:
|
|
||||||
if feature["properties"]["LEVL_CODE"] == level:
|
|
||||||
geometry = shape(feature["geometry"])
|
|
||||||
# geometry = transform(project, geometry)
|
|
||||||
geometry = wkb.dumps(geometry)
|
|
||||||
geometry = bytes.hex(geometry)
|
|
||||||
await copy.write_row(
|
|
||||||
(
|
|
||||||
feature["properties"]["NUTS_ID"],
|
|
||||||
feature["properties"]["NUTS_NAME"],
|
|
||||||
geometry,
|
|
||||||
import_group,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
url = app.config.POSTGRES_URL
|
|
||||||
url = url.replace("+asyncpg", "")
|
|
||||||
|
|
||||||
async with await psycopg.AsyncConnection.connect(url) as connection:
|
|
||||||
await import_nuts(connection, sys.argv[1] if len(sys.argv) > 1 else None)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,196 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
import tempfile
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
import glob
|
|
||||||
from os.path import normpath, abspath, join
|
|
||||||
from typing import List, Tuple
|
|
||||||
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
import sqlparse
|
|
||||||
from openmaptiles.sqltomvt import MvtGenerator
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.db import connect_db, make_session
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
TILE_GENERATOR = normpath(
|
|
||||||
abspath(join(app.config.API_ROOT_DIR, "..", "tile-generator"))
|
|
||||||
)
|
|
||||||
TILESET_FILE = join(TILE_GENERATOR, "openbikesensor.yaml")
|
|
||||||
|
|
||||||
EXTRA_ARGS = [
|
|
||||||
# name, type, default
|
|
||||||
("user_id", "integer", "NULL"),
|
|
||||||
("min_time", "timestamp", "NULL"),
|
|
||||||
("max_time", "timestamp", "NULL"),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class CustomMvtGenerator(MvtGenerator):
|
|
||||||
def generate_sqltomvt_func(self, fname, extra_args: List[Tuple[str, str]]) -> str:
|
|
||||||
"""
|
|
||||||
Creates a SQL function that returns a single bytea value or null. This
|
|
||||||
method is overridden to allow for custom arguments in the created function
|
|
||||||
"""
|
|
||||||
extra_args_types = "".join([f", {a[1]}" for a in extra_args])
|
|
||||||
extra_args_definitions = "".join(
|
|
||||||
[f", {a[0]} {a[1]} DEFAULT {a[2]}" for a in extra_args]
|
|
||||||
)
|
|
||||||
|
|
||||||
return f"""\
|
|
||||||
DROP FUNCTION IF EXISTS {fname}(integer, integer, integer{extra_args_types});
|
|
||||||
CREATE FUNCTION {fname}(zoom integer, x integer, y integer{extra_args_definitions})
|
|
||||||
RETURNS {'TABLE(mvt bytea, key text)' if self.key_column else 'bytea'} AS $$
|
|
||||||
{self.generate_sql()};
|
|
||||||
$$ LANGUAGE SQL STABLE CALLED ON NULL INPUT;"""
|
|
||||||
|
|
||||||
|
|
||||||
def parse_pg_url(url=app.config.POSTGRES_URL):
|
|
||||||
m = re.match(
|
|
||||||
r"^postgresql\+asyncpg://(?P<user>.*):(?P<password>.*)@(?P<host>.*)(:(?P<port>\d+))?/(?P<database>[^/]+)$",
|
|
||||||
url,
|
|
||||||
)
|
|
||||||
|
|
||||||
return (
|
|
||||||
m["user"] or "",
|
|
||||||
m["password"] or "",
|
|
||||||
m["host"],
|
|
||||||
m["port"] or "5432",
|
|
||||||
m["database"],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
await prepare_sql_tiles()
|
|
||||||
|
|
||||||
|
|
||||||
async def prepare_sql_tiles():
|
|
||||||
with tempfile.TemporaryDirectory() as build_dir:
|
|
||||||
await generate_data_yml(build_dir)
|
|
||||||
sql_snippets = await generate_sql(build_dir)
|
|
||||||
await import_sql(sql_snippets)
|
|
||||||
|
|
||||||
|
|
||||||
async def _run(cmd):
|
|
||||||
if isinstance(cmd, list):
|
|
||||||
cmd = " ".join(cmd)
|
|
||||||
proc = await asyncio.create_subprocess_shell(
|
|
||||||
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
|
||||||
)
|
|
||||||
|
|
||||||
stdout, stderr = await proc.communicate()
|
|
||||||
|
|
||||||
if proc.returncode != 0:
|
|
||||||
log.error(stderr.decode("utf-8"))
|
|
||||||
raise RuntimeError("external program failed: %s" % str(cmd))
|
|
||||||
|
|
||||||
return stdout.decode("utf-8")
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_data_yml(build_dir):
|
|
||||||
stdout = await _run(
|
|
||||||
[
|
|
||||||
"python",
|
|
||||||
"$(which generate-tm2source)",
|
|
||||||
TILESET_FILE,
|
|
||||||
*sum(
|
|
||||||
zip(
|
|
||||||
["--user", "--password", "--host", "--port", "--database"],
|
|
||||||
parse_pg_url(),
|
|
||||||
),
|
|
||||||
(),
|
|
||||||
),
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
tm2source = join(build_dir, "openbikesensor.tm2source")
|
|
||||||
os.makedirs(tm2source, exist_ok=True)
|
|
||||||
|
|
||||||
with open(join(tm2source, "data.yml"), "wt") as f:
|
|
||||||
f.write(stdout)
|
|
||||||
|
|
||||||
|
|
||||||
async def generate_sql(build_dir):
|
|
||||||
sql_dir = join(build_dir, "sql")
|
|
||||||
|
|
||||||
await _run(f"python $(which generate-sql) {TILESET_FILE!r} --dir {sql_dir!r}")
|
|
||||||
|
|
||||||
sql_snippet_files = [
|
|
||||||
*sorted(
|
|
||||||
glob.glob(
|
|
||||||
join(
|
|
||||||
app.config.API_ROOT_DIR, "src", "openmaptiles-tools", "sql", "*.sql"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
),
|
|
||||||
join(sql_dir, "run_first.sql"),
|
|
||||||
*sorted(glob.glob(join(sql_dir, "parallel", "*.sql"))),
|
|
||||||
join(sql_dir, "run_last.sql"),
|
|
||||||
]
|
|
||||||
|
|
||||||
sql_snippets = [
|
|
||||||
"CREATE EXTENSION IF NOT EXISTS hstore;"
|
|
||||||
"CREATE EXTENSION IF NOT EXISTS postgis;"
|
|
||||||
]
|
|
||||||
for filename in sql_snippet_files:
|
|
||||||
with open(filename, "rt") as f:
|
|
||||||
sql_snippets.append(f.read())
|
|
||||||
|
|
||||||
mvt = CustomMvtGenerator(
|
|
||||||
tileset=TILESET_FILE,
|
|
||||||
postgis_ver="3.0.1",
|
|
||||||
zoom="zoom",
|
|
||||||
x="x",
|
|
||||||
y="y",
|
|
||||||
gzip=True,
|
|
||||||
test_geometry=False, # ?
|
|
||||||
key_column=True,
|
|
||||||
)
|
|
||||||
getmvt_sql = mvt.generate_sqltomvt_func("getmvt", EXTRA_ARGS)
|
|
||||||
|
|
||||||
# drop old versions of the function
|
|
||||||
sql_snippets.append("DROP FUNCTION IF EXISTS getmvt(integer, integer, integer);")
|
|
||||||
sql_snippets.append(getmvt_sql)
|
|
||||||
|
|
||||||
return sql_snippets
|
|
||||||
|
|
||||||
|
|
||||||
async def import_sql(sql_snippets):
|
|
||||||
statements = sum(map(sqlparse.split, sql_snippets), [])
|
|
||||||
async with connect_db(
|
|
||||||
app.config.POSTGRES_URL,
|
|
||||||
app.config.POSTGRES_POOL_SIZE,
|
|
||||||
app.config.POSTGRES_MAX_OVERFLOW,
|
|
||||||
):
|
|
||||||
for i, statement in enumerate(statements):
|
|
||||||
clean_statement = sqlparse.format(
|
|
||||||
statement,
|
|
||||||
truncate_strings=20,
|
|
||||||
strip_comments=True,
|
|
||||||
keyword_case="upper",
|
|
||||||
)
|
|
||||||
|
|
||||||
if not clean_statement:
|
|
||||||
continue
|
|
||||||
|
|
||||||
log.debug(
|
|
||||||
"Running SQL statement %d of %d (%s...)",
|
|
||||||
i + 1,
|
|
||||||
len(statements),
|
|
||||||
clean_statement[:40],
|
|
||||||
)
|
|
||||||
|
|
||||||
async with make_session() as session:
|
|
||||||
await session.execute(text(statement))
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,46 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import argparse
|
|
||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from obs.api.db import connect_db
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.process import process_tracks, process_tracks_loop
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="processes a single track for use in the portal, "
|
|
||||||
"using the obs.face algorithms"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"--loop-delay",
|
|
||||||
action="store",
|
|
||||||
type=int,
|
|
||||||
default=10,
|
|
||||||
help="delay between loops, if no track was found in the queue (polling)",
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"tracks",
|
|
||||||
metavar="ID_OR_SLUG",
|
|
||||||
nargs="*",
|
|
||||||
help="ID or slug of tracks to process, if not passed, the queue is processed in a loop",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
async with connect_db(app.config.POSTGRES_URL, app.config.POSTGRES_POOL_SIZE, app.config.POSTGRES_MAX_OVERFLOW):
|
|
||||||
if args.tracks:
|
|
||||||
await process_tracks(args.tracks)
|
|
||||||
else:
|
|
||||||
await process_tracks_loop(args.loop_delay)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,30 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
|
|
||||||
from obs.api.app import app
|
|
||||||
from obs.api.db import connect_db, make_session
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
await reimport_tracks()
|
|
||||||
|
|
||||||
|
|
||||||
async def reimport_tracks():
|
|
||||||
|
|
||||||
async with connect_db(
|
|
||||||
app.config.POSTGRES_URL,
|
|
||||||
app.config.POSTGRES_POOL_SIZE,
|
|
||||||
app.config.POSTGRES_MAX_OVERFLOW,
|
|
||||||
):
|
|
||||||
async with make_session() as session:
|
|
||||||
await session.execute(text("UPDATE track SET processing_status = 'queued';"))
|
|
||||||
await session.commit()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,36 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import logging
|
|
||||||
import asyncio
|
|
||||||
import argparse
|
|
||||||
|
|
||||||
from obs.api.db import drop_all, init_models, connect_db
|
|
||||||
from obs.api.app import app
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
parser = argparse.ArgumentParser(
|
|
||||||
description="drops the whole database, and possibly creates new table schema"
|
|
||||||
)
|
|
||||||
|
|
||||||
parser.add_argument(
|
|
||||||
"-s",
|
|
||||||
"--create-schema",
|
|
||||||
action="store_true",
|
|
||||||
help="create the schema",
|
|
||||||
)
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
|
|
||||||
async with connect_db(app.config.POSTGRES_URL):
|
|
||||||
await drop_all()
|
|
||||||
if args.create_schema:
|
|
||||||
await init_models()
|
|
||||||
log.info("Database initialized.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,6 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
|
|
||||||
from obs.bin.openbikesensor_transform_osm import main
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,32 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
import asyncio
|
|
||||||
import logging
|
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
from prepare_sql_tiles import prepare_sql_tiles, _run
|
|
||||||
|
|
||||||
from import_regions import main as import_nuts
|
|
||||||
|
|
||||||
from reimport_tracks import main as reimport_tracks
|
|
||||||
|
|
||||||
|
|
||||||
async def _migrate():
|
|
||||||
await _run("alembic upgrade head")
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
|
||||||
log.info("Running migrations...")
|
|
||||||
await _migrate()
|
|
||||||
log.info("Preparing SQL tiles...")
|
|
||||||
await prepare_sql_tiles()
|
|
||||||
log.info("Importing nuts regions...")
|
|
||||||
await import_nuts()
|
|
||||||
log.info("Nuts regions imported, scheduling reimport of tracks")
|
|
||||||
await reimport_tracks()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
|
@ -1,49 +0,0 @@
|
||||||
###################################################
|
|
||||||
# Keycloak
|
|
||||||
###################################################
|
|
||||||
|
|
||||||
OBS_KEYCLOAK_URI=login.example.com
|
|
||||||
|
|
||||||
# Postgres
|
|
||||||
|
|
||||||
OBS_KEYCLOAK_POSTGRES_USER=obs
|
|
||||||
OBS_KEYCLOAK_POSTGRES_PASSWORD=<<TODO>>
|
|
||||||
OBS_KEYCLOAK_POSTGRES_DB=obs
|
|
||||||
OBS_POSTGRES_MAX_OVERFLOW=20
|
|
||||||
OBS_POSTGRES_POOL_SIZE=40
|
|
||||||
|
|
||||||
# KeyCloak
|
|
||||||
|
|
||||||
OBS_KEYCLOAK_POSTGRES_HOST=postgres-keycloak
|
|
||||||
OBS_KEYCLOAK_ADMIN_USER=admin
|
|
||||||
OBS_KEYCLOAK_ADMIN_PASSWORD=<<TODO>>
|
|
||||||
OBS_KEYCLOAK_REALM=obs
|
|
||||||
OBS_KEYCLOAK_PORTAL_REDIRECT_URI=https://portal.example.com/*
|
|
||||||
|
|
||||||
###################################################
|
|
||||||
# Portal
|
|
||||||
###################################################
|
|
||||||
|
|
||||||
OBS_PORTAL_URI=portal.example.com
|
|
||||||
|
|
||||||
# Postgres + osm2pgsql
|
|
||||||
|
|
||||||
OBS_POSTGRES_HOST=postgres
|
|
||||||
OBS_POSTGRES_USER=obs
|
|
||||||
OBS_POSTGRES_PASSWORD=<<TODO>>
|
|
||||||
OBS_POSTGRES_DB=obs
|
|
||||||
|
|
||||||
# Portal
|
|
||||||
|
|
||||||
OBS_HOST=0.0.0.0
|
|
||||||
OBS_PORT=3000
|
|
||||||
OBS_SECRET=<<TODO>>
|
|
||||||
OBS_POSTGRES_URL=postgresql+asyncpg://obs:<<TODO>>@postgres/obs
|
|
||||||
OBS_KEYCLOAK_URL=https://login.example.com/auth/realms/obs/
|
|
||||||
OBS_KEYCLOAK_CLIENT_ID=portal
|
|
||||||
OBS_KEYCLOAK_CLIENT_SECRET=<<TODO>>
|
|
||||||
OBS_DEDICATED_WORKER="True"
|
|
||||||
OBS_DATA_DIR=/data
|
|
||||||
OBS_PROXIES_COUNT=1
|
|
||||||
|
|
||||||
###################################################
|
|
|
@ -1,63 +0,0 @@
|
||||||
# Bind address of the server
|
|
||||||
# HOST = "127.0.0.1"
|
|
||||||
# PORT = 3000
|
|
||||||
|
|
||||||
# Extended log output, but slower
|
|
||||||
DEBUG = False
|
|
||||||
VERBOSE = DEBUG
|
|
||||||
AUTO_RELOAD = DEBUG
|
|
||||||
|
|
||||||
# Required to encrypt or sign sessions, cookies, tokens, etc.
|
|
||||||
# SECRET = "!!!<<<CHANGEME>>>!!!"
|
|
||||||
|
|
||||||
# Connection to the database
|
|
||||||
# POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname"
|
|
||||||
# POSTGRES_POOL_SIZE = 20
|
|
||||||
# POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE
|
|
||||||
|
|
||||||
# URL to the keycloak realm, as reachable by the API service. This is not
|
|
||||||
# necessarily its publicly reachable URL, keycloak advertises that iself.
|
|
||||||
# KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/"
|
|
||||||
|
|
||||||
# Auth client credentials
|
|
||||||
# KEYCLOAK_CLIENT_ID = "portal"
|
|
||||||
# KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000"
|
|
||||||
|
|
||||||
# Whether the API should run the worker loop, or a dedicated worker is used
|
|
||||||
# DEDICATED_WORKER = True
|
|
||||||
|
|
||||||
# The root of the frontend. Needed for redirecting after login, and for CORS.
|
|
||||||
# Set to None if frontend is served by the API.
|
|
||||||
FRONTEND_URL = None
|
|
||||||
FRONTEND_HTTPS = True
|
|
||||||
|
|
||||||
# Where to find the compiled frontend assets (must include index.html), or None
|
|
||||||
# to disable serving the frontend.
|
|
||||||
FRONTEND_DIR = "../frontend/build/"
|
|
||||||
|
|
||||||
# Can be an object or a JSON string
|
|
||||||
FRONTEND_CONFIG = {
|
|
||||||
"imprintUrl": "https://example.com/imprint",
|
|
||||||
"privacyPolicyUrl": "https://example.com/privacy",
|
|
||||||
"mapHome": {"zoom": 6, "longitude": 10.2, "latitude": 51.3},
|
|
||||||
"banner": {"text": "This is a test installation.", "style": "warning"},
|
|
||||||
}
|
|
||||||
|
|
||||||
# If the API should serve generated tiles, this is the path where the tiles are
|
|
||||||
# built. This is an experimental option and probably very inefficient, a proper
|
|
||||||
# tileserver should be prefered. Set to None to disable.
|
|
||||||
TILES_FILE = None
|
|
||||||
|
|
||||||
# Path overrides:
|
|
||||||
# API_ROOT_DIR = "??" # default: api/ inside repository
|
|
||||||
# DATA_DIR = "??" # default: $API_ROOT_DIR/..
|
|
||||||
# PROCESSING_DIR = "??" # default: DATA_DIR/processing
|
|
||||||
# PROCESSING_OUTPUT_DIR = "??" # default: DATA_DIR/processing-output
|
|
||||||
# TRACKS_DIR = "??" # default: DATA_DIR/tracks
|
|
||||||
# OBS_FACE_CACHE_DIR = "??" # default: DATA_DIR/obs-face-cache
|
|
||||||
|
|
||||||
# Additional allowed origins for CORS headers. The FRONTEND_URL is included by
|
|
||||||
# default. Python list, or whitespace separated string.
|
|
||||||
ADDITIONAL_CORS_ORIGINS = None
|
|
||||||
|
|
||||||
# vim: set ft=python :
|
|
|
@ -1,22 +0,0 @@
|
||||||
|
|
||||||
events {}
|
|
||||||
http {
|
|
||||||
proxy_cache_path /data/nginx/cache levels=1:2 keys_zone=STATIC:10m
|
|
||||||
inactive=24h max_size=1g;
|
|
||||||
server {
|
|
||||||
location ~* ^/tiles/\d[012]?/[^?]+$ {
|
|
||||||
proxy_pass http://portal:3000;
|
|
||||||
proxy_set_header Host $host:3000;
|
|
||||||
proxy_buffering on;
|
|
||||||
proxy_cache_methods GET HEAD;
|
|
||||||
proxy_cache STATIC;
|
|
||||||
proxy_cache_valid 200 1d;
|
|
||||||
proxy_cache_use_stale error timeout invalid_header updating
|
|
||||||
http_500 http_502 http_503 http_504;
|
|
||||||
}
|
|
||||||
location / {
|
|
||||||
proxy_pass http://portal:3000;
|
|
||||||
proxy_set_header Host $host:3000;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,30 +0,0 @@
|
||||||
# https://doc.traefik.io/traefik/v2.4/routing/entrypoints/
|
|
||||||
[entryPoints]
|
|
||||||
[entryPoints.web]
|
|
||||||
address = ":80"
|
|
||||||
|
|
||||||
[entryPoints.web.http]
|
|
||||||
[entryPoints.web.http.redirections]
|
|
||||||
[entryPoints.web.http.redirections.entryPoint]
|
|
||||||
to = "websecure"
|
|
||||||
scheme = "https"
|
|
||||||
|
|
||||||
[entryPoints.websecure]
|
|
||||||
address = ":443"
|
|
||||||
|
|
||||||
# Enable API
|
|
||||||
[api]
|
|
||||||
dashboard = false
|
|
||||||
|
|
||||||
# Enable docker backend
|
|
||||||
[providers.docker]
|
|
||||||
network = "gateway"
|
|
||||||
|
|
||||||
# https://doc.traefik.io/traefik/v2.4/https/acme/#configuration-examples
|
|
||||||
[certificatesResolvers.leresolver.acme]
|
|
||||||
email = "info@example.com"
|
|
||||||
storage = "acme/acme.json"
|
|
||||||
|
|
||||||
[certificatesResolvers.leresolver.acme.httpChallenge]
|
|
||||||
# used during the challenge
|
|
||||||
entryPoint = "web"
|
|
|
@ -1,150 +0,0 @@
|
||||||
version: '3.5'
|
|
||||||
|
|
||||||
networks:
|
|
||||||
gateway:
|
|
||||||
external: true
|
|
||||||
name: gateway
|
|
||||||
backend:
|
|
||||||
internal: true
|
|
||||||
|
|
||||||
services:
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# Portal
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
image: "openmaptiles/postgis:7.0"
|
|
||||||
environment:
|
|
||||||
- POSTGRES_DB=${OBS_POSTGRES_DB}
|
|
||||||
- POSTGRES_USER=${OBS_POSTGRES_USER}
|
|
||||||
- POSTGRES_PASSWORD=${OBS_POSTGRES_PASSWORD}
|
|
||||||
volumes:
|
|
||||||
- ./data/postgres/data:/var/lib/postgresql/data
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
|
|
||||||
portal:
|
|
||||||
image: openbikesensor-portal
|
|
||||||
build:
|
|
||||||
context: ./source
|
|
||||||
env_file: .env
|
|
||||||
volumes:
|
|
||||||
- ./data/api-data:${OBS_DATA_DIR}
|
|
||||||
- ./config/config.py:/opt/obs/api/config.py
|
|
||||||
- ./data/tiles/:/tiles
|
|
||||||
- ./data/pbf/:/pbf
|
|
||||||
restart: on-failure
|
|
||||||
depends_on:
|
|
||||||
- traefik
|
|
||||||
- postgres
|
|
||||||
- worker
|
|
||||||
# - keycloak
|
|
||||||
labels:
|
|
||||||
- traefik.http.routers.portal.rule=Host(`${OBS_PORTAL_URI}`)
|
|
||||||
- traefik.http.routers.portal.entrypoints=websecure
|
|
||||||
- traefik.http.routers.portal.tls=true
|
|
||||||
- traefik.http.routers.portal.tls.certresolver=leresolver
|
|
||||||
- traefik.docker.network=gateway
|
|
||||||
# - traefik.http.services.portal.loadbalancer.server.port=3000
|
|
||||||
networks:
|
|
||||||
- gateway
|
|
||||||
- backend
|
|
||||||
|
|
||||||
worker:
|
|
||||||
image: openbikesensor-portal
|
|
||||||
build:
|
|
||||||
context: ./source
|
|
||||||
env_file: .env
|
|
||||||
volumes:
|
|
||||||
- ./data/api-data:${OBS_DATA_DIR}
|
|
||||||
- ./config/config.py:/opt/obs/api/config.py
|
|
||||||
restart: on-failure
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
command:
|
|
||||||
- python
|
|
||||||
- tools/process_track.py
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# Traefik
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
traefik:
|
|
||||||
image: traefik:2.4.8
|
|
||||||
restart: always
|
|
||||||
ports:
|
|
||||||
- "80:80"
|
|
||||||
- "443:443"
|
|
||||||
# The Web UI (enabled by [api] in traefik.toml)
|
|
||||||
# - "8080:8080"
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
|
||||||
- ./config/traefik.toml:/traefik.toml
|
|
||||||
- ./config/usersfile:/usersfile
|
|
||||||
- ./config/acme:/acme
|
|
||||||
|
|
||||||
networks:
|
|
||||||
- gateway
|
|
||||||
|
|
||||||
labels:
|
|
||||||
# global redirect from http to https
|
|
||||||
- "traefik.http.routers.http-catchall.rule=hostregexp(`{host:.+}`)"
|
|
||||||
- "traefik.http.routers.http-catchall.entrypoints=web"
|
|
||||||
# Define middlewares to be used
|
|
||||||
- "traefik.http.routers.http-catchall.middlewares=redirect-http-to-https"
|
|
||||||
# Configure middlewares
|
|
||||||
- "traefik.http.middlewares.redirect-http-to-https.redirectscheme.scheme=https"
|
|
||||||
|
|
||||||
############################################################
|
|
||||||
# Keycloak
|
|
||||||
############################################################
|
|
||||||
|
|
||||||
keycloak:
|
|
||||||
image: jboss/keycloak:15.1.0
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- gateway
|
|
||||||
- backend
|
|
||||||
env_file: .env
|
|
||||||
environment:
|
|
||||||
# database
|
|
||||||
- DB_VENDOR=postgres
|
|
||||||
- DB_ADDR=${OBS_KEYCLOAK_POSTGRES_HOST}
|
|
||||||
- DB_DATABASE=${OBS_KEYCLOAK_POSTGRES_DB}
|
|
||||||
- DB_USER=${OBS_KEYCLOAK_POSTGRES_USER}
|
|
||||||
- DB_PASSWORD=${OBS_KEYCLOAK_POSTGRES_PASSWORD}
|
|
||||||
# admin user
|
|
||||||
- KEYCLOAK_USER=${OBS_KEYCLOAK_ADMIN_USER}
|
|
||||||
- KEYCLOAK_PASSWORD=${OBS_KEYCLOAK_ADMIN_PASSWORD}
|
|
||||||
- PROXY_ADDRESS_FORWARDING=true
|
|
||||||
- OBS_KEYCLOAK_PORTAL_REDIRECT_URI=${OBS_KEYCLOAK_PORTAL_REDIRECT_URI}
|
|
||||||
depends_on:
|
|
||||||
- traefik
|
|
||||||
- postgres-keycloak
|
|
||||||
labels:
|
|
||||||
- "traefik.http.routers.login.rule=Host(`${OBS_KEYCLOAK_URI}`)"
|
|
||||||
- "traefik.http.routers.login.entrypoints=websecure"
|
|
||||||
- "traefik.http.routers.login.tls=true"
|
|
||||||
- "traefik.http.routers.login.tls.certresolver=leresolver"
|
|
||||||
# This container runs on two ports (8080/tcp, 8443/tcp). Tell traefik, which one to use.
|
|
||||||
- "traefik.http.services.login.loadbalancer.server.port=8080"
|
|
||||||
# This container runs on more than one network. Tell traefik, which one to use.
|
|
||||||
- "traefik.docker.network=gateway"
|
|
||||||
|
|
||||||
postgres-keycloak:
|
|
||||||
image: postgres:15
|
|
||||||
restart: always
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
volumes:
|
|
||||||
- ./data/postgres-keycloak:/var/lib/postgresql/data
|
|
||||||
environment:
|
|
||||||
- POSTGRES_DB=${OBS_KEYCLOAK_POSTGRES_DB}
|
|
||||||
- POSTGRES_USER=${OBS_KEYCLOAK_POSTGRES_USER}
|
|
||||||
- POSTGRES_PASSWORD=${OBS_KEYCLOAK_POSTGRES_PASSWORD}
|
|
||||||
labels:
|
|
||||||
- traefik.enable=false
|
|
|
@ -1,107 +1,33 @@
|
||||||
# This docker-compose file is intended for development use. You can simply run
|
|
||||||
# `docker-compose up -d` in the repository and it should build and run all
|
|
||||||
# required parts of the application. See README.md for details.
|
|
||||||
#
|
|
||||||
# For a production docker setup, please check the corresponding documentation.
|
|
||||||
|
|
||||||
version: '3'
|
version: '3'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
postgres:
|
mongo:
|
||||||
image: "openmaptiles/postgis:7.0"
|
image: mongo
|
||||||
environment:
|
tty: true
|
||||||
POSTGRES_USER: obs
|
|
||||||
POSTGRES_PASSWORD: obs
|
|
||||||
POSTGRES_DB: obs
|
|
||||||
ports:
|
|
||||||
- '5432:5432'
|
|
||||||
volumes:
|
volumes:
|
||||||
- ./local/postgres/data:/var/lib/postgresql/data
|
- ./local/mongo:/data/db
|
||||||
|
ports:
|
||||||
|
- '27017:27017'
|
||||||
|
restart: on-failure
|
||||||
|
|
||||||
api:
|
api:
|
||||||
image: openbikesensor-api
|
image: obs-api
|
||||||
tty: true
|
|
||||||
build:
|
build:
|
||||||
context: ./api/
|
context: .
|
||||||
dockerfile: Dockerfile
|
dockerfile: ./Dockerfile
|
||||||
volumes:
|
volumes:
|
||||||
- ./api/obs:/opt/obs/api/obs
|
- ./src:/opt/obsAPI/src
|
||||||
- ./api/scripts/obs:/opt/obs/scripts/obs
|
|
||||||
- ./api/tools:/opt/obs/api/tools
|
|
||||||
- ./api/config.dev.py:/opt/obs/api/config.py
|
|
||||||
- ./api/config.overrides.py:/opt/obs/api/config.overrides.py
|
|
||||||
- ./frontend/build:/opt/obs/frontend/build
|
|
||||||
- ./tile-generator:/opt/obs/tile-generator
|
|
||||||
- ./local/api-data:/data
|
- ./local/api-data:/data
|
||||||
- ./tile-generator/data/:/tiles
|
environment:
|
||||||
- ./api/migrations:/opt/obs/api/migrations
|
- PORT=3000
|
||||||
- ./api/alembic.ini:/opt/obs/api/alembic.ini
|
- MONGODB_URL=mongodb://mongo/obsTest
|
||||||
- ./local/pbf:/pbf
|
- DATA_DIR=/data
|
||||||
- ./local/obsdata:/obsdata
|
links:
|
||||||
depends_on:
|
- mongo
|
||||||
- postgres
|
|
||||||
- keycloak
|
|
||||||
ports:
|
ports:
|
||||||
- '3000:3000'
|
- '3000:3000'
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
command:
|
|
||||||
- openbikesensor-api
|
|
||||||
|
|
||||||
worker:
|
|
||||||
image: openbikesensor-api
|
|
||||||
tty: true
|
|
||||||
build:
|
|
||||||
context: ./api/
|
|
||||||
dockerfile: Dockerfile
|
|
||||||
volumes:
|
|
||||||
- ./api/obs:/opt/obs/api/obs
|
|
||||||
- ./api/scripts/obs:/opt/obs/scripts/obs
|
|
||||||
- ./api/tools:/opt/obs/api/tools
|
|
||||||
- ./api/config.dev.py:/opt/obs/api/config.py
|
|
||||||
- ./api/config.overrides.py:/opt/obs/api/config.overrides.py
|
|
||||||
- ./local/api-data:/data
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
- keycloak
|
|
||||||
restart: on-failure
|
|
||||||
entrypoint:
|
|
||||||
- python
|
|
||||||
- tools/process_track.py
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
image: openbikesensor-frontend
|
|
||||||
build:
|
|
||||||
context: ./frontend
|
|
||||||
volumes:
|
|
||||||
- ./frontend/src:/opt/obs/frontend/src
|
|
||||||
- ./frontend/public:/opt/obs/frontend/public
|
|
||||||
- ./frontend/tsconfig.json:/opt/obs/frontend/tsconfig.json
|
|
||||||
- ./frontend/package.json:/opt/obs/frontend/package.json
|
|
||||||
- ./frontend/webpack.config.js:/opt/obs/frontend/webpack.config.js
|
|
||||||
depends_on:
|
|
||||||
- api
|
|
||||||
environment:
|
|
||||||
# used for proxy only
|
|
||||||
- API_URL=http://api:3000/
|
|
||||||
ports:
|
|
||||||
- '3001:3001'
|
|
||||||
restart: on-failure
|
|
||||||
command:
|
command:
|
||||||
- npm
|
- npm
|
||||||
- start
|
- run
|
||||||
|
- dev
|
||||||
keycloak:
|
|
||||||
image: jboss/keycloak
|
|
||||||
ports:
|
|
||||||
- 3003:8080
|
|
||||||
depends_on:
|
|
||||||
- postgres
|
|
||||||
environment:
|
|
||||||
KEYCLOAK_USER: admin
|
|
||||||
KEYCLOAK_PASSWORD: admin
|
|
||||||
KEYCLOAK_FRONTEND_URL: http://localhost:3003/auth/
|
|
||||||
DB_VENDOR: postgres
|
|
||||||
DB_ADDR: postgres
|
|
||||||
DB_DATABASE: obs
|
|
||||||
DB_USER: obs
|
|
||||||
DB_PASSWORD: obs
|
|
||||||
|
|
File diff suppressed because one or more lines are too long
Binary file not shown.
Before Width: | Height: | Size: 234 KiB |
|
@ -1,196 +0,0 @@
|
||||||
# Portal Architecture
|
|
||||||
|
|
||||||
Here I try to describe how the portal works in general, and which parts are
|
|
||||||
needed and developed in this repository. There is some variation possible for a
|
|
||||||
full setup, for example the data flow of the rendered data tiles can be
|
|
||||||
different. This article describes the standard production setup.
|
|
||||||
|
|
||||||
## General overview of the components
|
|
||||||
|
|
||||||
* **api**: A python process using Sanic to provide a HTTP interface. Everything
|
|
||||||
revolves around this.
|
|
||||||
* **postgresql**: A database instance.
|
|
||||||
* **frontend**: A React based web application.
|
|
||||||
* **worker**: Optional, a dedicated process for processing of tracks
|
|
||||||
* **keycloak**: An installation of [Keycloak](https://www.keycloak.org/) which
|
|
||||||
stores user credentials and provides a secure login, registration, password
|
|
||||||
recovery, and more.
|
|
||||||
* **tools**: Scripts to run as an operator of the application for various setup
|
|
||||||
and maintenance task.
|
|
||||||
|
|
||||||
![Architecture Overview](./architecture-portal.png)
|
|
||||||
|
|
||||||
## PostgreSQL
|
|
||||||
|
|
||||||
This is a database instance running the modified postgresql docker image
|
|
||||||
`openmaptiles/postgis:6.0`. This includes the extensions `postgis` and
|
|
||||||
`hstore`, among others, used for geospatial data processing.
|
|
||||||
|
|
||||||
You can try to use an external postgresql installation instead of the docker
|
|
||||||
image, however, a lot of prequisites have to be installed into that database.
|
|
||||||
|
|
||||||
You can check out how the docker image is generated in [its
|
|
||||||
repository](https://github.com/openmaptiles/openmaptiles-tools/tree/master/docker/postgis)
|
|
||||||
and try to replicate that setup. However, this is generally not supported by
|
|
||||||
the developers of the OpenBikeSensor portal.
|
|
||||||
|
|
||||||
## API
|
|
||||||
|
|
||||||
The API is written in Python 3 with [Sanic](https://sanicframework.org/) for
|
|
||||||
HTTP handling. It supports Python 3.6+ and comes with a list of dependencies
|
|
||||||
that is required. One of those is `openmaptiles-tools`, which is installed from
|
|
||||||
git (see `api/requirements.txt`). The API also depends on the `obs.face`
|
|
||||||
package, which is included as a submodule and developed [in its own
|
|
||||||
repository](https://github.com/openbikesensor/OpenBikeSensor-Scripts).
|
|
||||||
|
|
||||||
The API has the following tasks:
|
|
||||||
|
|
||||||
* Handle user authentication through keycloak
|
|
||||||
* Receive track uploads and serve track data and statistics via a RESTful API
|
|
||||||
* Process received tracks (unless using a dedicated worker, see below)
|
|
||||||
* Publish vector tiles directly from the database (if installed and configured)
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
The frontend can redirect to `$API_URL/login` to trigger a login. The API
|
|
||||||
negotiates a session and redirects the user agent to the keycloak instance.
|
|
||||||
Upon successful authentication, it receives user data and generates a user
|
|
||||||
object (or discovers the existing one) for the authenticated keycloak user.
|
|
||||||
|
|
||||||
A session is instanciated and kept alive through a session cookie. The API
|
|
||||||
currently stores session data in memory, so scaling the API process to more
|
|
||||||
replicas is not yet unsupported.
|
|
||||||
|
|
||||||
### RESTful API
|
|
||||||
|
|
||||||
There is not a lot to talk about here. The routes are pretty self explanatory,
|
|
||||||
please refer to the code for the current API. Consider it unstable as of now.
|
|
||||||
|
|
||||||
There are routes for general info (version number), track and recording
|
|
||||||
statistics (by user and time range), user management and track management.
|
|
||||||
|
|
||||||
### Track processing
|
|
||||||
|
|
||||||
If a dedicated worker is not used, the API runs the same logic as the worker
|
|
||||||
(see below), in an asyncio "background" task. It is however *not* threaded, so
|
|
||||||
it may block API request while processing tracks. This is the reason why a
|
|
||||||
dedicated worker is recommended, though for a simple or low traffic setup, it
|
|
||||||
is definitely not required. Configure whether you're using a dedicated worker
|
|
||||||
through the `DEDICATED_WORKER` api config flag.
|
|
||||||
|
|
||||||
### Publish vector tiles
|
|
||||||
|
|
||||||
Thanks to the [OpenMapTiles](https://openmaptiles.org/) project, we're able to
|
|
||||||
generate vector tiles from live data, directly in the PostGIS database. The
|
|
||||||
general workflow is as follows:
|
|
||||||
|
|
||||||
* We have defined a schema compatible with the `openmaptiles-tools` collection
|
|
||||||
that defines how to collect geospatial data from the postgresql database.
|
|
||||||
This depends on its `postgis` extension for computing geospatial information
|
|
||||||
(e.g. intersecting with a bounding box). This schema consists of a number of
|
|
||||||
layers, which contain SQL code that is used to produce the layer's geometries
|
|
||||||
and their attached properties.
|
|
||||||
* The `tools/prepare_sql_tiles.py` tool calls the respective scripts from
|
|
||||||
`openmaptiles-tools`, to compile all required SQL code into functions,
|
|
||||||
generate the "destination" function `getmvt` for generating a vector tile,
|
|
||||||
and store these [User-Defined
|
|
||||||
Functions](https://www.postgresql.org/docs/current/xfunc.html) in the
|
|
||||||
database.
|
|
||||||
* When a tile is requested from the Map Renderer through
|
|
||||||
`/tiles/{z}/{x}/{y}.pbf`, the API calls `getmvt` to have postgresql generate
|
|
||||||
the tile's content on the fly, and serves the result through HTTP.
|
|
||||||
|
|
||||||
For all of this to work, the `openmaptiles-tools` must be installed, and the
|
|
||||||
database has to prepared with the functions once, by use of the
|
|
||||||
`api/tools/prepare_sql_tiles.py` script. That script should be rerun every time the
|
|
||||||
schema changes, but doesn't need to be used if the data in the database was
|
|
||||||
edited, e.g. by uploading and processing a new track.
|
|
||||||
|
|
||||||
## Frontend
|
|
||||||
|
|
||||||
The frontend is written in React, using Semantic UI
|
|
||||||
([semantic-ui-react](https://react.semantic-ui.com/) and
|
|
||||||
[semantic-ui-less](https://www.npmjs.com/package/semantic-ui-less)), compiled
|
|
||||||
with Webpack. In a simple production setup, the frontend is compiled statically
|
|
||||||
and served by the API.
|
|
||||||
|
|
||||||
The `openbikesensor-portal` image (`Dockerfile` in repo root) performs the
|
|
||||||
build step and stores the compiled bundle and assets in
|
|
||||||
`/opt/obs/frontend/build`. The API process can simply serve the files from there.
|
|
||||||
|
|
||||||
This is done with a catchall route in `obs.api.routes.frontend`, which
|
|
||||||
determines whether to serve the `index.html` or an asset file. This ensures
|
|
||||||
that deep URLs in the frontend receive the index file, as frontend routing is
|
|
||||||
done in the JavaScript code by `react-router`.
|
|
||||||
|
|
||||||
In a development setup the frontend is served by a hot reloading development
|
|
||||||
server (`webpack-dev-server`), compiling into memory and updating as files
|
|
||||||
change. The frontend is then configured to communicate with the API on a
|
|
||||||
different URL (usually a different port on localhost), which the API has to
|
|
||||||
allow with CORS. It is configured to do so with the `FRONTEND_URL` and
|
|
||||||
`ADDITIONAL_CORS_ORIGINS` config options.
|
|
||||||
|
|
||||||
### Maps in the Frontend
|
|
||||||
|
|
||||||
The map data is visualized using
|
|
||||||
[maplibre-gl](https://github.com/MapLibre/maplibre-gl-js), a JavaScript library
|
|
||||||
for rendering (vector) maps in the browser.
|
|
||||||
|
|
||||||
The frontend combines a basemap (for example
|
|
||||||
[Positron](https://github.com/openmaptiles/positron-gl-style) with vector tiles
|
|
||||||
from Mapbox or from a custom OpenMapTiles schema vector tile source) with the
|
|
||||||
overlay data and styles. The overlay data is generated by the API
|
|
||||||
|
|
||||||
|
|
||||||
## Worker
|
|
||||||
|
|
||||||
The Worker's job is to import the uploaded track files. The track files are
|
|
||||||
stored as-is in the filesystem, and will usually follow the [OpenBikeSensor CSV
|
|
||||||
Format](https://github.com/openbikesensor/OpenBikeSensorFirmware/blob/master/docs/software/firmware/csv_format.md),
|
|
||||||
as they are generated by the measuring device.
|
|
||||||
|
|
||||||
The worker imports and uses the
|
|
||||||
[`obs.face`](https://github.com/openbikesensor/OpenBikeSensor-Scripts) scripts
|
|
||||||
to transform the data and extract the relevant events. Those are written into
|
|
||||||
the PostgreSQL database, such that it is easy to do statistics on them and
|
|
||||||
generate vector tiles with SQL code (see "Publish vector tiles" above).
|
|
||||||
|
|
||||||
|
|
||||||
The worker determines in a loop which track to process by looking for the oldes
|
|
||||||
unprocessed track in the database, ie. an entry in the `track` table with
|
|
||||||
column `processing_status` set to `"queued"`. After proessing the track, the
|
|
||||||
loop restarts after a short delay. If the worker has not found any track to
|
|
||||||
process, the delay is longer (typically 10s), to generate less load on the
|
|
||||||
database and CPU.
|
|
||||||
|
|
||||||
This means that uploading a track, within 0-10s the processing is started.
|
|
||||||
Bulk-reprocessing is possibly by just altering the `processing_status` of all
|
|
||||||
tracks you want to reprocess in the database directly, e.g. using the `psql`
|
|
||||||
command line client, for example:
|
|
||||||
|
|
||||||
```postgresql
|
|
||||||
UPDATE track SET processing_status = "queued" WHERE author_id = 100;
|
|
||||||
```
|
|
||||||
|
|
||||||
The worker script is
|
|
||||||
[`api/tools/process_track.py`](../api/tools/process_track.py). It has its own
|
|
||||||
command line parser with `--help` option, and uses the `config.py` from the API
|
|
||||||
for determining the connection to the PostgreSQL database.
|
|
||||||
|
|
||||||
|
|
||||||
## Keycloak
|
|
||||||
|
|
||||||
The use of keycloak as an authentication provider simplifies the code of the
|
|
||||||
portal immensely and lets us focus on actual features instead of authentication
|
|
||||||
and its security.
|
|
||||||
|
|
||||||
The portal might be compatible with other OpenID Connect providers, but only
|
|
||||||
the use of Keycloak is tested and documented. You can try to integrate with a
|
|
||||||
different provider -- if changes to the code are needed for this, please let us
|
|
||||||
know and/or create a Pull Request to share make the software better!
|
|
||||||
|
|
||||||
The keycloak configuration is rather straightforward, and it is described
|
|
||||||
shortly for a testing setup in [README.md](../README.md).
|
|
||||||
|
|
||||||
For the full, secure setup, make sure to reference the Keycloak documentation
|
|
||||||
at <https://www.keycloak.org/documentation>.
|
|
|
@ -1,77 +0,0 @@
|
||||||
# Licenses
|
|
||||||
|
|
||||||
The world of software licenses is a confusing one, and you should be aware of a
|
|
||||||
few licenses that apply to you if you use this software and its dependencies,
|
|
||||||
or the data generated with it.
|
|
||||||
|
|
||||||
**Disclaimer:** This document is just an overview of things to look out for,
|
|
||||||
and no legal advice. It may even be incorrect or incomplete. Do your own
|
|
||||||
research or consult a professional.
|
|
||||||
|
|
||||||
#### Glossary
|
|
||||||
|
|
||||||
* OSM: [OpenStreetMap](https://openstreetmap.org)
|
|
||||||
* OBS: [OpenBikeSensor](https://openbikesensor.org)
|
|
||||||
* OMT: [OpenMapTiles](https://openmaptiles.org)
|
|
||||||
|
|
||||||
## OpenBikeSensor Portal
|
|
||||||
|
|
||||||
Our own software (the python packages `obs.face` and `obs.api`, the react
|
|
||||||
frontend, the accompanying documentation, etc.) are licensed as LGPL-3.0. You
|
|
||||||
will find the respective license files in the repositories.
|
|
||||||
|
|
||||||
## OpenStreetMap data
|
|
||||||
|
|
||||||
The moment you start importing OpenStreetMap (OSM) data into your system, its
|
|
||||||
license applies to everything you derive from it. This usually means you'll
|
|
||||||
have to visibly credit the OpenStreetMap contributors. Details can be found
|
|
||||||
here: <https://www.openstreetmap.org/copyright>.
|
|
||||||
|
|
||||||
## OpenMapTiles project
|
|
||||||
|
|
||||||
The OpenMapTiles (OMT) project has a confusing multi-license setup, with different
|
|
||||||
licenses covering the code, the schema, and the derived or generated data.
|
|
||||||
|
|
||||||
In any case, we're using all of that, so make sure to credit the project as
|
|
||||||
required by their license when using the data in a different context. The
|
|
||||||
frontend is set up to show the appropriate attributions, you should do the same
|
|
||||||
when building a different application with the same data.
|
|
||||||
|
|
||||||
## Vector tiles from the API
|
|
||||||
|
|
||||||
These are generated using OSM data *and* OMT tools. Both have to be credited.
|
|
||||||
Thanks to OSM being distributed under [ODbL
|
|
||||||
1.0](https://opendatacommons.org/licenses/odbl/), you will have to distribute
|
|
||||||
this combined data under the same license.
|
|
||||||
|
|
||||||
## OBS-only data exports
|
|
||||||
|
|
||||||
As of the writing of this document, there is no implemented export of OBS data
|
|
||||||
(not including OSM data) from the API. We will at some point generate datasets
|
|
||||||
of OBS data for use with other tools or to merge into a pool database, and
|
|
||||||
these datasets will not contain OSM-licensed data, so they will have their own
|
|
||||||
license. The portal software will not dictate the license, so as a portal
|
|
||||||
operator, you will have to decide this for your platform, and obtain consent
|
|
||||||
from your users to publish their uploaded data or derivatives thereof under
|
|
||||||
your chosen license.
|
|
||||||
|
|
||||||
The OBS community will at some point decide on an open license for the data
|
|
||||||
generated by the community-run portal instance(s) and pool servers, and will
|
|
||||||
recommend the same license to all portal operators for interoperability
|
|
||||||
reasons.
|
|
||||||
|
|
||||||
|
|
||||||
## Your basemap provider
|
|
||||||
|
|
||||||
You have to figure out who you have to credit for the basemap you are using,
|
|
||||||
regardless of whether you use a provider or host them yourself. In many cases,
|
|
||||||
you will need to credit at least OpenStreetMap (for the input data) and
|
|
||||||
OpenMapTiles (for the data scheme and tools used). If using a commercial
|
|
||||||
provider (such as Mapbox or MapTiler), read their TOS.
|
|
||||||
|
|
||||||
## Code dependencies
|
|
||||||
|
|
||||||
Please check the licenses of our code dependencies to figure out whether they
|
|
||||||
restrict what you can do with them. See `frontend/package.json` and
|
|
||||||
`api/requirements.txt` for which dependencies are required for installation and
|
|
||||||
operation.
|
|
|
@ -1,103 +0,0 @@
|
||||||
# Importing OpenStreetMap data
|
|
||||||
|
|
||||||
The application requires a lot of data from the OpenStreetMap to work.
|
|
||||||
|
|
||||||
The required information is stored in the PostgreSQL database and used when
|
|
||||||
processing tracks, as well as for vector tile generation. The process applies
|
|
||||||
to both development and production setups. For development, you should choose a
|
|
||||||
small area for testing, such as your local county or city, to keep the amount
|
|
||||||
of data small. For production use you have to import the whole region you are
|
|
||||||
serving.
|
|
||||||
|
|
||||||
## General pipeline overview
|
|
||||||
|
|
||||||
1. Download OpenStreetMap data as one or more `.osm.pbf` files.
|
|
||||||
2. Transform this data to generate geometry data for all roads and regions, so
|
|
||||||
we don't need to look up nodes separately. This step requires a lot of CPU
|
|
||||||
and memory, so it can be done "offline" on a high power machine.
|
|
||||||
3. Import the transformed data into the PostgreSQL/PostGIS database.
|
|
||||||
|
|
||||||
## Community hosted transformed data
|
|
||||||
|
|
||||||
Since the first two steps are the same for everybody, the community will soon
|
|
||||||
provide a service where relatively up-to-date transformed data can be
|
|
||||||
downloaded for direct import. Stay tuned.
|
|
||||||
|
|
||||||
## Download data
|
|
||||||
|
|
||||||
[GeoFabrik](https://download.geofabrik.de) kindly hosts extracts of the
|
|
||||||
OpenStreetMap planet by region. Download all regions you're interested in from
|
|
||||||
there in `.osm.pbf` format, with the tool of your choice, e. g.:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
wget -P local/pbf/ https://download.geofabrik.de/europe/germany/baden-wuerttemberg-latest.osm.pbf
|
|
||||||
```
|
|
||||||
|
|
||||||
## Transform data
|
|
||||||
|
|
||||||
To transform downloaded data, you can either use the docker image from a
|
|
||||||
development or production environment, or locally install the API into your
|
|
||||||
python environment. Then run the `api/tools/transform_osm.py` script on the data.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
api/tools/transform_osm.py baden-wuerttemberg-latest.osm.pbf baden-wuerttemberg-latest.msgpack
|
|
||||||
```
|
|
||||||
|
|
||||||
In dockerized setups, make sure to mount your data somewhere in the container
|
|
||||||
and also mount a directory where the result can be written. The development
|
|
||||||
setup takes care of this, so you can use:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm api tools/transform_osm.py \
|
|
||||||
/pbf/baden-wuerttemberg-latest.osm.pbf /obsdata/baden-wuerttemberg-latest.msgpack
|
|
||||||
```
|
|
||||||
|
|
||||||
Repeat this command for every file you want to transform.
|
|
||||||
|
|
||||||
## Import transformed data
|
|
||||||
|
|
||||||
The command for importing looks like this:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
api/tools/import_osm.py baden-wuerttemberg-latest.msgpack
|
|
||||||
```
|
|
||||||
|
|
||||||
This tool reads your application config from `config.py`, so set that up first
|
|
||||||
as if you were setting up your application.
|
|
||||||
|
|
||||||
In dockerized setups, make sure to mount your data somewhere in the container.
|
|
||||||
Again, the development setup takes care of this, so you can use:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm api tools/import_osm.py \
|
|
||||||
/obsdata/baden-wuerttemberg-latest.msgpack
|
|
||||||
```
|
|
||||||
|
|
||||||
The transform process should take a few seconds to minutes, depending on the area
|
|
||||||
size. You can run the process multiple times, with the same or different area
|
|
||||||
files, to import or update the data. You can update only one region and leave
|
|
||||||
the others as they are, or add more filenames to the command line to
|
|
||||||
bulk-import data.
|
|
||||||
|
|
||||||
## How this works
|
|
||||||
|
|
||||||
* The transformation is done with a python script that uses
|
|
||||||
[pyosmium](https://osmcode.org/pyosmium/) to read the `.osm.pbf` file. This
|
|
||||||
script then filters the data for only the required objects (such as road
|
|
||||||
segments and administrative areas), and extracts the interesting information
|
|
||||||
from those objects.
|
|
||||||
* The node geolocations are looked up to generate a geometry for each object.
|
|
||||||
This requires a lot of memory to run efficiently.
|
|
||||||
* The geometry is projected to [Web Mercator](https://epsg.io/3857) in this
|
|
||||||
step to avoid continous transformation when tiles are generated later. Most
|
|
||||||
operations will work fine in this projection. Projection is done with the
|
|
||||||
[pyproj](https://pypi.org/project/pyproj/) library.
|
|
||||||
* The output is written to a binary file in a very simple format using
|
|
||||||
[msgpack](https://github.com/msgpack/msgpack-python), which is way more
|
|
||||||
efficient that (Geo-)JSON for example. This format is stremable, so the
|
|
||||||
generated file is never fully written or read into memory.
|
|
||||||
* The import script reads the msgpack file and sends it to the database using
|
|
||||||
[psycopg](https://www.psycopg.org/). This is done because it supports
|
|
||||||
PostgreSQL's `COPY FROM` statement, which enables much faster writes to the
|
|
||||||
database that a traditionional `INSERT VALUES`. The file is streamed directly
|
|
||||||
to the database, so it is never read into memory.
|
|
|
@ -1,414 +0,0 @@
|
||||||
# Deploying an OpenBikeSensor Portal with Docker
|
|
||||||
|
|
||||||
## Introduction
|
|
||||||
|
|
||||||
The main idea of this document is to provide an easy docker-based
|
|
||||||
production-ready setup of the openbikesensor portal. It uses the [the traefik
|
|
||||||
proxy](https://doc.traefik.io/traefik/) as a reverse proxy, which listens
|
|
||||||
on port 80 and 443. Based on some labels, traefik routes the domains to the
|
|
||||||
corresponding docker containers.
|
|
||||||
|
|
||||||
## Requirements
|
|
||||||
|
|
||||||
This guide requires a Linux-system, where `docker` and `docker-compose` are installed.
|
|
||||||
Ensure, that your system is up to date.
|
|
||||||
|
|
||||||
> TODO
|
|
||||||
|
|
||||||
```bash
|
|
||||||
apt install docker.io docker-compose pwgen
|
|
||||||
```
|
|
||||||
|
|
||||||
## Before Getting Started
|
|
||||||
|
|
||||||
The example configurations assume two domains, which points to the
|
|
||||||
server's IP address. This documentation uses `portal.example.com` and
|
|
||||||
`login.example.com`. The API is hosted at `https://portal.example.com/api`,
|
|
||||||
while the main frontend is reachable at the domain root.
|
|
||||||
|
|
||||||
## Setup instructions
|
|
||||||
|
|
||||||
First of all, login into your system via SSH.
|
|
||||||
|
|
||||||
### Create working directory
|
|
||||||
|
|
||||||
Create a folder somewhere in your system, in this guide we use
|
|
||||||
`/opt/openbikesensor`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
mkdir /opt/openbikesensor
|
|
||||||
```
|
|
||||||
|
|
||||||
### Clone the repository
|
|
||||||
|
|
||||||
Clone the repository to `/opt/openbikesensor/`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
git clone --recursive https://github.com/openbikesensor/portal source/
|
|
||||||
# If you accidentally cloned without --recursive, fix it by running:
|
|
||||||
# git submodule update --init --recursive
|
|
||||||
```
|
|
||||||
|
|
||||||
### Copy predefined configuration files
|
|
||||||
|
|
||||||
```bash
|
|
||||||
mkdir -p /opt/openbikesensor/config
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
cp -r source/deployment/config source/deployment/docker-compose.yaml source/deployment/.env .
|
|
||||||
```
|
|
||||||
|
|
||||||
### Create a Docker network
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker network create gateway
|
|
||||||
```
|
|
||||||
|
|
||||||
### Traefik
|
|
||||||
|
|
||||||
#### Configure `traefik.toml`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano config/traefik.toml
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure your email in the `config/traefik.toml`. This email is used by
|
|
||||||
*Let's Encrypt* to send you some emails regarding your certificates.
|
|
||||||
|
|
||||||
#### Start Traefik
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
docker-compose up -d traefik
|
|
||||||
docker-compose logs -f traefik
|
|
||||||
```
|
|
||||||
|
|
||||||
> traefik_1 | time="2022-01-03T13:02:36Z" level=info msg="Configuration loaded from file: /traefik.toml"
|
|
||||||
|
|
||||||
### Generate passwords
|
|
||||||
|
|
||||||
Generate three passords, for example with `pwgen`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pwgen -n 20
|
|
||||||
```
|
|
||||||
|
|
||||||
They will be uses in the next steps.
|
|
||||||
|
|
||||||
### KeyCloak
|
|
||||||
|
|
||||||
#### Configure `.env`
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure:
|
|
||||||
* `OBS_KEYCLOAK_URI`:
|
|
||||||
* The subdomain of your keycloak
|
|
||||||
* `OBS_KEYCLOAK_POSTGRES_PASSWORD`
|
|
||||||
* One of the generated passwords for the KeyCloak-postgres
|
|
||||||
* `OBS_KEYCLOAK_ADMIN_PASSWORD`:
|
|
||||||
* One of the generated passwords for the KeyCloak-admin
|
|
||||||
* `OBS_KEYCLOAK_PORTAL_REDIRECT_URI`:
|
|
||||||
* The Redirect URI, e.g. the subdomain of your portal (ensure, it ends with `/*`)
|
|
||||||
|
|
||||||
#### Start KeyCloak
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d keycloak
|
|
||||||
docker-compose logs -f keycloak
|
|
||||||
```
|
|
||||||
|
|
||||||
Wait until postgres and keycloak are started:
|
|
||||||
|
|
||||||
> keycloak_1 | 13:08:55,558 INFO [org.jboss.as] (Controller Boot Thread) WFLYSRV0051: Admin console listening on http://127.0.0.1:9990
|
|
||||||
|
|
||||||
Open:
|
|
||||||
|
|
||||||
* https://login.example.com/
|
|
||||||
* Test login to the admin console with your admin account
|
|
||||||
|
|
||||||
#### Configure Realm and Client
|
|
||||||
|
|
||||||
Jump into the KeyCloak container:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose exec keycloak /bin/bash
|
|
||||||
```
|
|
||||||
|
|
||||||
Since we configured the `.env`-file we can run the following commands
|
|
||||||
to create a realm and a client now:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Login
|
|
||||||
/opt/jboss/keycloak/bin/kcadm.sh config credentials --server http://localhost:8080/auth --realm master --user $KEYCLOAK_USER --password $KEYCLOAK_PASSWORD
|
|
||||||
|
|
||||||
# Create Realm
|
|
||||||
/opt/jboss/keycloak/bin/kcadm.sh create realms -s realm=$OBS_KEYCLOAK_REALM -s enabled=true -o
|
|
||||||
|
|
||||||
# Create a client and remember the unique id of the client
|
|
||||||
CID=$(/opt/jboss/keycloak/bin/kcadm.sh create clients -r $OBS_KEYCLOAK_REALM -s clientId=portal -s "redirectUris=[\"$OBS_KEYCLOAK_PORTAL_REDIRECT_URI\"]" -i)
|
|
||||||
|
|
||||||
# Create a secret for the client
|
|
||||||
/opt/jboss/keycloak/bin/kcadm.sh create clients/$CID/client-secret -r $OBS_KEYCLOAK_REALM
|
|
||||||
|
|
||||||
# Get the secret of the client
|
|
||||||
/opt/jboss/keycloak/bin/kcadm.sh get clients/$CID/client-secret -r $OBS_KEYCLOAK_REALM
|
|
||||||
```
|
|
||||||
|
|
||||||
Exit the container with `exit`. Configure the client secret:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure:
|
|
||||||
* `OBS_KEYCLOAK_CLIENT_SECRET`:
|
|
||||||
* Use the obtained client secret
|
|
||||||
|
|
||||||
#### Create a user
|
|
||||||
|
|
||||||
* Login into your Keycloak with the admin user and select the realm obs
|
|
||||||
* Create a user with username and email for the realm `obs` (*Hint*: email is required by the portal)
|
|
||||||
* Configure a password in the tab `Credentials` as well
|
|
||||||
|
|
||||||
### Portal
|
|
||||||
|
|
||||||
#### Configure Postgres
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure:
|
|
||||||
* `OBS_POSTGRES_HOST`:
|
|
||||||
* The should be the postgres-container, e.g. `postgres`
|
|
||||||
* `OBS_POSTGRES_USER`:
|
|
||||||
* The default postgres-user is `obs`
|
|
||||||
* `OBS_POSTGRES_PASSWORD`:
|
|
||||||
* Use one of the generated passwords for the postgres
|
|
||||||
* `OBS_POSTGRES_DB`:
|
|
||||||
* The default postgres-database is `obs`
|
|
||||||
* `OBS_POSTGRES_URL`:
|
|
||||||
* Use the same informations as aboe to configure the `POSTGRES_URL`,
|
|
||||||
this one is used by the portal.
|
|
||||||
|
|
||||||
#### Start Postgres for the portal
|
|
||||||
|
|
||||||
```
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
docker-compose up -d postgres
|
|
||||||
docker-compose logs -f postgres
|
|
||||||
```
|
|
||||||
Wait until started:
|
|
||||||
|
|
||||||
> postgres_1 | PostgreSQL init process complete; ready for start up.
|
|
||||||
|
|
||||||
|
|
||||||
#### Build the portal image
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
docker-compose build portal
|
|
||||||
```
|
|
||||||
|
|
||||||
*Hint*: This may take up to 10 minutes. In the future, we will provide a prebuild image.
|
|
||||||
|
|
||||||
#### Prepare database
|
|
||||||
|
|
||||||
Run the following scripts to prepare the database:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal tools/upgrade.py
|
|
||||||
```
|
|
||||||
|
|
||||||
For more details, see [README.md](../README.md) under "Prepare database".
|
|
||||||
|
|
||||||
#### Import OpenStreetMap data
|
|
||||||
|
|
||||||
Follow [these instructions](./osm-import.md).
|
|
||||||
|
|
||||||
|
|
||||||
#### Configure portal
|
|
||||||
|
|
||||||
The portal can be configured via env-vars or via the `config.py`.
|
|
||||||
It's important to know, that the `config.py` overrides the env-vars.
|
|
||||||
All env-vars start with `OBS_` and will be handled by the application without the prefix.
|
|
||||||
For example, the env-var `OBS_SECRET` will be same as `SECRET` within the `config.py` and will be `SECRET` within the application.
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano .env
|
|
||||||
```
|
|
||||||
|
|
||||||
Configure:
|
|
||||||
|
|
||||||
* `OBS_PORTAL_URI`:
|
|
||||||
* The subdomain of your portal
|
|
||||||
* `OBS_SECRET`:
|
|
||||||
* Generate a UUID with `uuidgen` and use it as the secret
|
|
||||||
* `OBS_POSTGRES_URL`:
|
|
||||||
* Should be configured already
|
|
||||||
* `OBS_KEYCLOAK_URL`:
|
|
||||||
* You can find it as the `issuer`, when you click on *OpenID Endpoint Configuration* in the realm obs
|
|
||||||
* `OBS_KEYCLOAK_CLIENT_SECRET`:
|
|
||||||
* Should be configured already
|
|
||||||
* `OBS: DEDICATED_WORKER`
|
|
||||||
* Should be set to `"True"`, since it the workder will be started with the portal
|
|
||||||
* `OBS_DATA_DIR`
|
|
||||||
* The data dir must be the same for the portal and the worer.
|
|
||||||
The default is `/data` within the containers
|
|
||||||
* `OBS_PROXIES_COUNT`:
|
|
||||||
* This sets `PROXIES_COUNT = 1` in your config
|
|
||||||
* Read the [Sanic docs](https://sanicframework.org/en/guide/advanced/proxy-headers.html)
|
|
||||||
for why this needs to be done. If your reverse proxy supports it, you can also
|
|
||||||
use a forwarded secret to secure your proxy target from spoofing. This is not
|
|
||||||
required if your application server does not listen on a public interface, but
|
|
||||||
it is recommended anyway, if possible.
|
|
||||||
|
|
||||||
Have a look into the `config.py`, which other variables may affect you.
|
|
||||||
|
|
||||||
#### Start the portal
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
docker-compose up -d portal
|
|
||||||
docker-compose logs -f portal worker
|
|
||||||
```
|
|
||||||
|
|
||||||
> portal_1 | [2022-01-03 13:37:48 +0000] [1] [INFO] Goin' Fast @ http://0.0.0.0:3000
|
|
||||||
|
|
||||||
This also starts a dedicated worker container to handle the tracks.
|
|
||||||
|
|
||||||
#### Test the portal
|
|
||||||
|
|
||||||
* Open: https://portal.example.com/ (URL depends on your setup)
|
|
||||||
* Login with the user
|
|
||||||
* Upload a track via My Tracks
|
|
||||||
|
|
||||||
You should see smth. like:
|
|
||||||
|
|
||||||
> worker_1 | INFO: Track uuqvcvlm imported.
|
|
||||||
|
|
||||||
When you click on *My Tracks*, you should see it on a map.
|
|
||||||
|
|
||||||
#### Configure the map position
|
|
||||||
|
|
||||||
Open the tab *Map** an zoom to the desired position. The URL contains the corresponding GPS position,
|
|
||||||
for example:
|
|
||||||
|
|
||||||
> 14/53.86449349032097/10.696108517499198
|
|
||||||
|
|
||||||
Configure the map position in the `config.py` and restart the portal, by setting `mapHome` in the variable `FRONTEND_CONFIG`:
|
|
||||||
|
|
||||||
```
|
|
||||||
cd /opt/openbikesensor/
|
|
||||||
nano config/config.py
|
|
||||||
|
|
||||||
docker-compose restart portal
|
|
||||||
```
|
|
||||||
|
|
||||||
**Hint**: Maybe it's required to disable the browser cache to see the change.
|
|
||||||
|
|
||||||
The tab *Map* should be the selected map section now.
|
|
||||||
When you uploaded some tracks, you map should show a colors overlay on the streets.
|
|
||||||
|
|
||||||
## Miscellaneous
|
|
||||||
|
|
||||||
### Logs
|
|
||||||
|
|
||||||
To read the logs, run
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose logs -f
|
|
||||||
```
|
|
||||||
|
|
||||||
If something went wrong, you can reconfigure your config files and rerun:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose up -d
|
|
||||||
```
|
|
||||||
|
|
||||||
### Updates
|
|
||||||
|
|
||||||
Before updating make sure that you have properly backed-up your instance so you
|
|
||||||
can always roll back to a pre-update state.
|
|
||||||
|
|
||||||
#### Migrating
|
|
||||||
|
|
||||||
Migrations are done with
|
|
||||||
[Alembic](https://alembic.sqlalchemy.org/en/latest/index.html), please refer to
|
|
||||||
its documentation for help. Most of the time, running this command will do all
|
|
||||||
the migrations you need:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
docker-compose run --rm portal alembic upgrade head
|
|
||||||
```
|
|
||||||
|
|
||||||
You are advised to create a backup (see below) before running a migration, and
|
|
||||||
to shut down the services before the migration and start them afterwards.
|
|
||||||
|
|
||||||
### Backups
|
|
||||||
|
|
||||||
To backup your instances private data you only need to backup the ``$ROOT`` folder.
|
|
||||||
This should contain everything needed to start your instance again, no persistent
|
|
||||||
data lives in docker containers. You should stop the containers for a clean backup.
|
|
||||||
|
|
||||||
This backup contains the imported OSM data as well. That is of course a lot of
|
|
||||||
redundant data, but very nice to have for a quick restore operation. If you
|
|
||||||
want to generate smaller, nonredundant backups, or backups during live
|
|
||||||
operation of the database, use a tool like `pg_dump` and extract only the
|
|
||||||
required tables:
|
|
||||||
|
|
||||||
* `road_usage`
|
|
||||||
* `overtaking_event`
|
|
||||||
* `track`
|
|
||||||
* `user` (make sure to reference `public.user`, not the postgres user table)
|
|
||||||
* `comment`
|
|
||||||
|
|
||||||
You might also instead use the `--exclude-table` option to ignore the `road`
|
|
||||||
table only (adjust connection parameters and names):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pg_dump -h localhost -d obs -U obs -n public -T road -f backup-`date +%F`.sql
|
|
||||||
```
|
|
||||||
|
|
||||||
Also back up the raw uploaded files, i.e. the `local/api-data/tracks`
|
|
||||||
directory. The processed data can be regenerated, but you can also back that
|
|
||||||
up, from `local/api-data/processing-output`.
|
|
||||||
|
|
||||||
Finally, make sure to create a backup of your keycloak instance. Refer to the
|
|
||||||
keycloak documentation for how to export its data in a restorable way. This
|
|
||||||
should work very well if you are storing keycloak data in the PostgreSQL and
|
|
||||||
exporting that with an exclusion pattern instead of an explicit list.
|
|
||||||
|
|
||||||
And then, please test your backup and restore strategy before going live, or at
|
|
||||||
least before you need it!
|
|
||||||
|
|
||||||
|
|
||||||
### Connecting to the PostgreSQL database
|
|
||||||
|
|
||||||
Here are the quick steps for connecting to your PostgreSQL database, should you
|
|
||||||
need that:
|
|
||||||
|
|
||||||
* Add the `gateway` network to your `postgres` service.
|
|
||||||
* Add a port forwarding to your `postgres` service:
|
|
||||||
```yaml
|
|
||||||
ports:
|
|
||||||
- 127.0.0.1:25432:5432
|
|
||||||
```
|
|
||||||
* Run `docker-compose up -d postgres` again
|
|
||||||
* You can now connect from your server to the PostgreSQL service with:
|
|
||||||
|
|
||||||
```
|
|
||||||
psql -h localhost -U obs -d obs -p 25432
|
|
||||||
```
|
|
||||||
|
|
||||||
You will need your database password for the connection.
|
|
||||||
* If you do not want to install `psql` outside your container, you can use an
|
|
||||||
SSH tunnel from your local machine to your server and run `psql` locally.
|
|
|
@ -1 +0,0 @@
|
||||||
node_modules
|
|
24
frontend/.gitignore
vendored
24
frontend/.gitignore
vendored
|
@ -1,24 +0,0 @@
|
||||||
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
|
|
||||||
|
|
||||||
# dependencies
|
|
||||||
/node_modules
|
|
||||||
/.pnp
|
|
||||||
.pnp.js
|
|
||||||
|
|
||||||
# testing
|
|
||||||
/coverage
|
|
||||||
|
|
||||||
# production
|
|
||||||
/build
|
|
||||||
|
|
||||||
# misc
|
|
||||||
.DS_Store
|
|
||||||
.env.local
|
|
||||||
.env.development.local
|
|
||||||
.env.test.local
|
|
||||||
.env.production.local
|
|
||||||
|
|
||||||
npm-debug.log*
|
|
||||||
yarn-debug.log*
|
|
||||||
yarn-error.log*
|
|
||||||
.eslintcache
|
|
|
@ -1,10 +0,0 @@
|
||||||
FROM node:17
|
|
||||||
|
|
||||||
WORKDIR /opt/obs/frontend
|
|
||||||
ADD package.json package-lock.json /opt/obs/frontend/
|
|
||||||
RUN echo update-notifier=false >> ~/.npmrc
|
|
||||||
RUN npm ci
|
|
||||||
|
|
||||||
ADD tsconfig.json webpack.config.js /opt/obs/frontend/
|
|
||||||
ADD public/ /opt/obs/frontend/public/
|
|
||||||
ADD src/ /opt/obs/frontend/src/
|
|
|
@ -1,34 +0,0 @@
|
||||||
#############################################
|
|
||||||
# Build the frontend AS builder
|
|
||||||
#############################################
|
|
||||||
|
|
||||||
FROM node:14 as builder
|
|
||||||
|
|
||||||
WORKDIR /opt/obs/frontend
|
|
||||||
ADD package.json package-lock.json /opt/obs/frontend/
|
|
||||||
RUN echo update-notifier=false >> ~/.npmrc
|
|
||||||
RUN npm ci
|
|
||||||
|
|
||||||
ADD tsconfig.json webpack.config.js /opt/obs/frontend/
|
|
||||||
ADD public/ /opt/obs/frontend/public/
|
|
||||||
ADD src/ /opt/obs/frontend/src/
|
|
||||||
|
|
||||||
# [Optional] Add the URL prefix. It must also be set in the docker-compose.yml
|
|
||||||
# ENV PUBLIC_URL=/prefix
|
|
||||||
|
|
||||||
RUN npm run build
|
|
||||||
|
|
||||||
#############################################
|
|
||||||
# Serve the frontend
|
|
||||||
#############################################
|
|
||||||
|
|
||||||
# Use apache
|
|
||||||
FROM httpd:2.4
|
|
||||||
# Enable rewrite_module
|
|
||||||
RUN sed -i '/LoadModule rewrite_module/s/^#//g' /usr/local/apache2/conf/httpd.conf
|
|
||||||
# Allow .htaccess within /usr/local/apache2/htdocs
|
|
||||||
RUN sed -i '/<Directory "\/usr\/local\/apache2\/htdocs">/,/<\/Directory>/ s/AllowOverride None/AllowOverride all/' /usr/local/apache2/conf/httpd.conf
|
|
||||||
|
|
||||||
COPY --from=builder /opt/obs/frontend/build /usr/local/apache2/htdocs
|
|
||||||
COPY apache/.htaccess /usr/local/apache2/htdocs/
|
|
||||||
RUN chmod -R a+rX /usr/local/apache2/htdocs
|
|
|
@ -1,6 +0,0 @@
|
||||||
# @see: https://create-react-app.dev/docs/deployment/#serving-apps-with-client-side-routing
|
|
||||||
|
|
||||||
Options -MultiViews
|
|
||||||
RewriteEngine On
|
|
||||||
RewriteCond %{REQUEST_FILENAME} !-f
|
|
||||||
RewriteRule ^ index.html [QSA,L]
|
|
|
@ -1,18 +0,0 @@
|
||||||
{
|
|
||||||
"apiUrl": "https://portal.example.com/api",
|
|
||||||
"loginUrl": "https://portal.example.com/login",
|
|
||||||
"imprintUrl": "https://example.com/imprint",
|
|
||||||
"privacyPolicyUrl": "https://example.com/privacy",
|
|
||||||
"basename": "/",
|
|
||||||
"mapHome": {
|
|
||||||
"zoom": 6,
|
|
||||||
"longitude": 10.2,
|
|
||||||
"latitude": 51.3
|
|
||||||
},
|
|
||||||
"obsMapSource": {
|
|
||||||
"type": "vector",
|
|
||||||
"tiles": ["https://portal.example.com/tiles/{z}/{x}/{y}.pbf"],
|
|
||||||
"minzoom": 0,
|
|
||||||
"maxzoom": 14
|
|
||||||
}
|
|
||||||
}
|
|
20031
frontend/package-lock.json
generated
20031
frontend/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,95 +0,0 @@
|
||||||
{
|
|
||||||
"name": "openbikesensor-portal-frontend",
|
|
||||||
"version": "0.0.0",
|
|
||||||
"private": true,
|
|
||||||
"scripts": {
|
|
||||||
"start": "webpack-dev-server --env development",
|
|
||||||
"build": "webpack --env production"
|
|
||||||
},
|
|
||||||
"dependencies": {
|
|
||||||
"@babel/runtime": "^7.16.3",
|
|
||||||
"@turf/bbox": "^6.5.0",
|
|
||||||
"classnames": "^2.3.1",
|
|
||||||
"colormap": "^2.3.2",
|
|
||||||
"downloadjs": "^1.4.7",
|
|
||||||
"echarts": "^5.3.2",
|
|
||||||
"echarts-for-react": "^3.0.2",
|
|
||||||
"fomantic-ui-less": "^2.8.8",
|
|
||||||
"i18next-browser-languagedetector": "^6.1.4",
|
|
||||||
"i18next-http-backend": "^1.4.1",
|
|
||||||
"immer": "^9.0.7",
|
|
||||||
"luxon": "^1.28.0",
|
|
||||||
"maplibre-gl": "^1.15.2",
|
|
||||||
"mini-css-extract-plugin": "^2.4.5",
|
|
||||||
"pkce": "^1.0.0-beta2",
|
|
||||||
"postcss-flexbugs-fixes": "^5.0.2",
|
|
||||||
"postcss-loader": "^6.2.1",
|
|
||||||
"postcss-normalize": "^10.0.1",
|
|
||||||
"postcss-preset-env": "^7.0.1",
|
|
||||||
"proj4": "^2.7.5",
|
|
||||||
"react": "^17.0.2",
|
|
||||||
"react-dom": "^17.0.2",
|
|
||||||
"react-helmet": "^6.1.0",
|
|
||||||
"react-hook-form": "^6.15.8",
|
|
||||||
"react-i18next": "^11.18.1",
|
|
||||||
"react-map-gl": "^6.1.17",
|
|
||||||
"react-markdown": "^5.0.3",
|
|
||||||
"react-redux": "^7.2.6",
|
|
||||||
"react-router-dom": "^5.3.0",
|
|
||||||
"redux": "^4.1.2",
|
|
||||||
"redux-localstorage": "^0.4.1",
|
|
||||||
"resolve-url-loader": "^4.0.0",
|
|
||||||
"rxjs": "^6.6.7",
|
|
||||||
"rxjs-hooks": "^0.6.2",
|
|
||||||
"sass": "^1.43.5",
|
|
||||||
"semantic-ui-react": "^2.0.4",
|
|
||||||
"ts-loader": "^9.2.6",
|
|
||||||
"typescript": "^4.7.4",
|
|
||||||
"yaml-loader": "^0.8.0"
|
|
||||||
},
|
|
||||||
"eslintConfig": {
|
|
||||||
"extends": [
|
|
||||||
"react-app",
|
|
||||||
"plugin:prettier/recommended"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"browserslist": {
|
|
||||||
"production": [
|
|
||||||
">0.2%",
|
|
||||||
"not dead",
|
|
||||||
"not op_mini all"
|
|
||||||
],
|
|
||||||
"development": [
|
|
||||||
"last 1 chrome version",
|
|
||||||
"last 1 firefox version",
|
|
||||||
"last 1 safari version"
|
|
||||||
]
|
|
||||||
},
|
|
||||||
"devDependencies": {
|
|
||||||
"@babel/core": "^7.16.0",
|
|
||||||
"@babel/plugin-transform-runtime": "^7.16.4",
|
|
||||||
"@babel/preset-env": "^7.16.4",
|
|
||||||
"@babel/preset-react": "^7.16.0",
|
|
||||||
"@babel/preset-typescript": "^7.16.0",
|
|
||||||
"@pmmmwh/react-refresh-webpack-plugin": "^0.5.2",
|
|
||||||
"@types/lodash": "^4.14.177",
|
|
||||||
"@types/node": "^14.17.34",
|
|
||||||
"@types/react": "^17.0.37",
|
|
||||||
"@types/react-dom": "^17.0.11",
|
|
||||||
"@types/react-redux": "^7.1.20",
|
|
||||||
"@types/react-router-dom": "^5.3.2",
|
|
||||||
"babel-loader": "^8.2.3",
|
|
||||||
"css-loader": "^5.2.7",
|
|
||||||
"eslint-config-prettier": "^8.5.0",
|
|
||||||
"eslint-config-react-app": "^7.0.1",
|
|
||||||
"eslint-plugin-prettier": "^4.2.1",
|
|
||||||
"html-webpack-plugin": "^5.5.0",
|
|
||||||
"less-loader": "^10.2.0",
|
|
||||||
"prettier": "^2.7.1",
|
|
||||||
"react-refresh": "^0.11.0",
|
|
||||||
"style-loader": "^3.3.1",
|
|
||||||
"webpack": "^5.64.4",
|
|
||||||
"webpack-cli": "^4.9.1",
|
|
||||||
"webpack-dev-server": "^4.5.0"
|
|
||||||
}
|
|
||||||
}
|
|
Binary file not shown.
Before Width: | Height: | Size: 5.8 KiB |
Binary file not shown.
Before Width: | Height: | Size: 15 KiB |
Binary file not shown.
Before Width: | Height: | Size: 3.8 KiB |
Binary file not shown.
Before Width: | Height: | Size: 15 KiB |
|
@ -1,183 +0,0 @@
|
||||||
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
|
||||||
<svg
|
|
||||||
xmlns:dc="http://purl.org/dc/elements/1.1/"
|
|
||||||
xmlns:cc="http://creativecommons.org/ns#"
|
|
||||||
xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#"
|
|
||||||
xmlns:svg="http://www.w3.org/2000/svg"
|
|
||||||
xmlns="http://www.w3.org/2000/svg"
|
|
||||||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
|
|
||||||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
|
|
||||||
width="180"
|
|
||||||
height="180"
|
|
||||||
viewBox="0 0 47.625001 47.625001"
|
|
||||||
version="1.1"
|
|
||||||
id="svg32"
|
|
||||||
inkscape:version="1.0.1 (3bc2e813f5, 2020-09-07)"
|
|
||||||
sodipodi:docname="OpenBikeSensor.svg">
|
|
||||||
<title
|
|
||||||
id="title25">ObenBikeSensor Logo</title>
|
|
||||||
<defs
|
|
||||||
id="defs26" />
|
|
||||||
<sodipodi:namedview
|
|
||||||
id="base"
|
|
||||||
pagecolor="#ffffff"
|
|
||||||
bordercolor="#666666"
|
|
||||||
borderopacity="1.0"
|
|
||||||
inkscape:pageopacity="0.0"
|
|
||||||
inkscape:pageshadow="2"
|
|
||||||
inkscape:zoom="1.979899"
|
|
||||||
inkscape:cx="201.42447"
|
|
||||||
inkscape:cy="210.76485"
|
|
||||||
inkscape:document-units="mm"
|
|
||||||
inkscape:current-layer="layer1"
|
|
||||||
inkscape:document-rotation="0"
|
|
||||||
showgrid="false"
|
|
||||||
inkscape:window-width="1920"
|
|
||||||
inkscape:window-height="1142"
|
|
||||||
inkscape:window-x="0"
|
|
||||||
inkscape:window-y="27"
|
|
||||||
inkscape:window-maximized="1"
|
|
||||||
fit-margin-top="0"
|
|
||||||
fit-margin-left="0"
|
|
||||||
fit-margin-right="0"
|
|
||||||
fit-margin-bottom="0"
|
|
||||||
inkscape:pagecheckerboard="false"
|
|
||||||
units="px" />
|
|
||||||
<metadata
|
|
||||||
id="metadata29">
|
|
||||||
<rdf:RDF>
|
|
||||||
<cc:Work
|
|
||||||
rdf:about="">
|
|
||||||
<dc:format>image/svg+xml</dc:format>
|
|
||||||
<dc:type
|
|
||||||
rdf:resource="http://purl.org/dc/dcmitype/StillImage" />
|
|
||||||
<dc:title>ObenBikeSensor Logo</dc:title>
|
|
||||||
<dc:identifier>https://www.openbikesensor.org/</dc:identifier>
|
|
||||||
<dc:contributor>
|
|
||||||
<cc:Agent>
|
|
||||||
<dc:title>The font is made available under the SIL Open Font v1.1 license.
|
|
||||||
The metainfo for packaging is made available under CC-BY-SA 4.0 license.
|
|
||||||
|
|
||||||
Copyright © 2017 Datto Inc. https://www.datto.com/fonts/d-din/</dc:title>
|
|
||||||
</cc:Agent>
|
|
||||||
</dc:contributor>
|
|
||||||
<dc:date>05.01.2021</dc:date>
|
|
||||||
<cc:license
|
|
||||||
rdf:resource="http://creativecommons.org/licenses/by-nc-sa/4.0/" />
|
|
||||||
</cc:Work>
|
|
||||||
<cc:License
|
|
||||||
rdf:about="http://creativecommons.org/licenses/by-nc-sa/4.0/">
|
|
||||||
<cc:permits
|
|
||||||
rdf:resource="http://creativecommons.org/ns#Reproduction" />
|
|
||||||
<cc:permits
|
|
||||||
rdf:resource="http://creativecommons.org/ns#Distribution" />
|
|
||||||
<cc:requires
|
|
||||||
rdf:resource="http://creativecommons.org/ns#Notice" />
|
|
||||||
<cc:requires
|
|
||||||
rdf:resource="http://creativecommons.org/ns#Attribution" />
|
|
||||||
<cc:prohibits
|
|
||||||
rdf:resource="http://creativecommons.org/ns#CommercialUse" />
|
|
||||||
<cc:permits
|
|
||||||
rdf:resource="http://creativecommons.org/ns#DerivativeWorks" />
|
|
||||||
<cc:requires
|
|
||||||
rdf:resource="http://creativecommons.org/ns#ShareAlike" />
|
|
||||||
</cc:License>
|
|
||||||
</rdf:RDF>
|
|
||||||
</metadata>
|
|
||||||
<g
|
|
||||||
inkscape:label="text"
|
|
||||||
inkscape:groupmode="layer"
|
|
||||||
id="layer1"
|
|
||||||
style="display:inline"
|
|
||||||
transform="translate(-50.984116,-107.55921)">
|
|
||||||
<g
|
|
||||||
id="g453"
|
|
||||||
transform="matrix(0.40064407,0,0,0.40722908,30.654488,63.527687)">
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 240.02293,503.36909 v -35.86042 h 10.10153 10.10152 v 12.62691 12.6269 h 42.93149 42.93148 v 10.60661 10.6066 h -42.93148 -42.93149 v 12.62691 12.6269 h -10.10152 -10.10153 z"
|
|
||||||
id="path401"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 384.90976,558.4839 c -8.01151,-1.91689 -14.1203,-8.67189 -17.39451,-19.23458 -1.45394,-4.69046 -1.72833,-9.9859 -1.72833,-33.35485 0,-30.98455 0.59528,-34.83053 6.80973,-43.99648 4.49493,-6.62975 9.40731,-9.03653 18.44409,-9.03653 8.64706,0 13.7048,2.33029 18.42657,8.48983 6.19075,8.07582 6.82768,12.28073 6.82354,45.04825 -0.004,28.46722 -0.0715,29.46557 -2.40527,35.35534 -5.17516,13.06071 -16.64086,19.68036 -28.97582,16.72902 z m 11.60077,-17.69467 c 5.86294,-3.03185 6.61847,-6.61224 6.96982,-33.0293 0.34879,-26.22531 -0.20608,-30.31732 -4.80968,-35.46964 -5.21109,-5.83222 -14.51084,-3.70354 -18.06837,4.13579 -1.42745,3.14551 -1.68339,7.55371 -1.68339,28.99412 0,22.81142 0.19069,25.70769 1.94959,29.60997 3.01098,6.68012 9.34862,9.0135 15.64203,5.75906 z"
|
|
||||||
id="path403"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 428.92146,505.38939 v -52.02285 h 13.08226 c 17.73684,0 22.4243,1.1082 27.91596,6.59986 6.27145,6.27145 8.88214,15.24842 8.30541,28.55847 -0.63336,14.61701 -5.17385,23.28004 -14.45337,27.57632 -2.99701,1.38757 -6.45394,1.87647 -13.38452,1.89292 l -9.34391,0.0222 v 19.69797 19.69798 h -6.06092 -6.06091 z m 28.58935,-4.61331 c 5.53376,-1.53668 7.76758,-5.66876 7.77236,-14.37721 0.007,-12.49556 -3.36126,-15.85974 -15.87835,-15.85974 h -8.36153 v 14.98393 c 0,8.24116 0.30304,15.28697 0.67343,15.65736 1.0491,1.0491 11.52597,0.78089 15.79409,-0.40434 z"
|
|
||||||
id="path405"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 488.52046,505.89447 v -52.52793 h 22.72843 22.72843 v 8.58629 8.5863 h -16.16244 -16.16244 v 13.13198 13.13199 h 13.63706 13.63706 v 8.58629 8.5863 H 515.2895 501.65244 v 13.63706 13.63706 h 16.16244 16.16244 v 8.58629 8.5863 h -22.72843 -22.72843 z"
|
|
||||||
id="path407"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 544.07885,505.34324 v -52.06901 l 6.78747,0.29869 6.78747,0.29869 12.73,31.1682 c 7.0015,17.1425 13.23906,31.91598 13.86125,32.82995 0.84624,1.24309 1.14622,-6.67321 1.19066,-31.42073 l 0.0594,-33.08249 h 6.56599 6.56599 v 52.02285 52.02286 h -6.90504 -6.90505 l -12.99304,-31.88387 c -7.14617,-17.53613 -13.35756,-32.10915 -13.80308,-32.3845 -0.44552,-0.27534 -0.81004,14.0724 -0.81004,31.88387 v 32.3845 h -6.56599 -6.56599 z"
|
|
||||||
id="path409"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="M 240.02293,631.15338 V 577.6153 h 17.21284 c 19.47478,0 22.39499,0.79963 28.07206,7.68689 8.51547,10.33075 8.0101,32.96229 -0.93582,41.90822 l -2.03411,2.03411 3.21254,3.65889 c 5.34118,6.08326 6.42521,9.69686 6.44473,21.48348 0.0194,11.73717 -1.02934,15.88428 -5.61957,22.2209 -5.10441,7.04643 -8.65418,8.03569 -28.92754,8.0615 l -17.42513,0.0222 z m 34.61153,34.35251 c 3.114,-2.44948 5.30465,-10.5546 4.30004,-15.90961 -1.75686,-9.36491 -5.64583,-11.75482 -18.20344,-11.18667 l -7.07107,0.31992 -0.28027,14.39467 -0.28028,14.39468 h 9.48796 c 7.94689,0 9.90361,-0.32696 12.04706,-2.01299 z M 271.3075,620.7423 c 7.26338,-2.49824 9.26926,-15.96143 3.41472,-22.91915 -2.50552,-2.97764 -2.73446,-3.03526 -12.06065,-3.03526 h -9.50666 v 12.96363 c 0,7.12999 0.30305,13.26667 0.67344,13.63706 1.11343,1.11342 13.71796,0.64739 17.47915,-0.64628 z"
|
|
||||||
id="path413"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="M 304.67269,631.15338 V 577.6153 h 6.566 6.56599 v 53.53808 53.53809 h -6.56599 -6.566 z"
|
|
||||||
id="path415"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="M 332.95697,631.15338 V 577.6153 h 6.56599 6.56599 v 24.95077 c 0,15.10605 0.36452,24.57212 0.92385,23.99112 0.50812,-0.52781 6.45595,-11.75565 13.21742,-24.95077 l 12.29356,-23.99112 h 8.05541 c 4.43047,0 7.86351,0.34093 7.62897,0.75761 -0.23454,0.41669 -5.55505,10.01092 -11.82337,21.32052 l -11.39695,20.5629 12.37997,30.95488 c 6.80898,17.02518 12.6136,31.54322 12.89915,32.26232 0.40374,1.0167 -1.22419,1.24132 -7.32077,1.01015 l -7.83996,-0.29729 -9.10775,-23.48604 c -5.00926,-12.91733 -9.37895,-23.48464 -9.71041,-23.48292 -0.33147,0.002 -2.76187,3.86633 -5.4009,8.58801 l -4.79822,8.58487 v 15.15058 15.15058 h -6.56599 -6.56599 z"
|
|
||||||
id="path417"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="M 398.61688,631.15338 V 577.6153 h 23.73859 23.73858 v 8.5863 8.58629 h -16.66752 -16.66751 v 13.63706 13.63706 h 14.14213 14.14214 v 8.5863 8.58629 h -14.14214 -14.14213 v 14.14214 14.14214 h 16.66751 16.66752 v 8.58629 8.5863 h -23.73858 -23.73859 z"
|
|
||||||
id="path419"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="M 575.39358,651.86151 V 639.2346 H 515.2895 455.18542 v -10.10152 -10.10153 h 60.10408 60.10408 v -12.6269 -12.62691 h 10.10152 10.10153 v 35.35534 35.35534 H 585.4951 575.39358 Z"
|
|
||||||
id="path421"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 254.08568,807.55922 c -9.7468,-3.48446 -15.21048,-11.83576 -16.77487,-25.64057 l -0.60098,-5.3033 h 6.07313 6.07313 l 0.62175,3.78807 c 1.53055,9.32506 8.4043,14.00169 16.85737,11.46909 5.61489,-1.68226 8.02536,-5.80974 8.02536,-13.74193 0,-8.78657 -2.28623,-12.08845 -9.80256,-14.15732 -15.20607,-4.18547 -19.76035,-7.24809 -23.62539,-15.88737 -1.97987,-4.42548 -2.37813,-7.00208 -2.38941,-15.45861 -0.0128,-9.58906 0.18058,-10.53482 3.51101,-17.17259 4.69445,-9.35632 9.51893,-12.82476 18.6473,-13.40601 5.21565,-0.33211 7.40408,0.0266 11.11109,1.8211 7.10945,3.44163 12.26305,12.24759 14.04909,24.00571 l 0.65212,4.29315 h -6.57793 c -6.29612,0 -6.57792,-0.10537 -6.57792,-2.45962 0,-3.55237 -1.56218,-7.13639 -4.16921,-9.56521 -2.50528,-2.33402 -9.61515,-2.92049 -13.05934,-1.07721 -3.00443,1.60792 -4.99481,6.42641 -4.99481,12.09189 0,9.11233 3.18576,12.42396 14.64721,15.22589 11.63441,2.8442 17.41881,8.18893 20.27259,18.73165 2.27779,8.41483 1.59261,21.21436 -1.51224,28.24958 -5.47965,12.41623 -18.48051,18.47501 -30.45649,14.19361 z"
|
|
||||||
id="path423"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 298.61178,755.40215 v -52.52794 h 22.72843 22.72843 v 8.5863 8.5863 H 327.9062 311.74376 v 13.13198 13.13198 h 13.63706 13.63706 v 8.5863 8.5863 h -13.63706 -13.63706 v 14.14213 14.14214 h 16.16244 16.16244 v 8.08122 8.08122 h -22.72843 -22.72843 z"
|
|
||||||
id="path425"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 355.18032,755.40215 v -52.52794 h 6.37553 6.37553 l 12.85146,31.56727 c 7.06831,17.362 13.28373,32.24912 13.81206,33.0825 0.61881,0.97611 0.96609,-10.2554 0.97605,-31.56727 l 0.0155,-33.0825 h 6.56599 6.566 v 52.57409 52.57409 l -6.66561,-0.2987 -6.66561,-0.29869 -13.28491,-32.65153 -13.2849,-32.65153 -0.26634,32.90407 -0.26634,32.90407 h -6.55219 -6.55219 z"
|
|
||||||
id="path427"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 433.44836,806.52236 c -3.79553,-1.65382 -9.07168,-7.56593 -11.17276,-12.51948 -0.87494,-2.06276 -2.18875,-6.83923 -2.9196,-10.61438 l -1.3288,-6.86391 6.19126,0.29792 6.19126,0.29792 0.58407,3.53553 c 0.82854,5.01541 2.62182,7.89327 6.3259,10.1518 6.32813,3.85855 14.83723,1.59927 17.44023,-4.63061 1.70406,-4.07838 1.86277,-11.06557 0.34992,-15.40532 -1.35728,-3.89351 -3.83671,-5.53349 -12.33365,-8.15793 -9.28931,-2.86918 -12.07632,-4.47783 -16.29335,-9.40448 -4.51227,-5.27157 -5.87928,-10.12391 -5.78538,-20.53571 0.15341,-17.00938 6.32106,-27.01874 18.29321,-29.68769 9.44408,-2.10537 18.88099,2.40489 23.34661,11.15823 1.09014,2.13685 2.69624,6.67286 3.56911,10.08002 1.87378,7.31407 1.43222,7.90519 -5.92274,7.9289 -4.47364,0.0144 -4.79823,-0.15876 -4.79823,-2.56008 0,-3.37284 -2.44801,-8.01361 -5.26976,-9.99004 -1.23796,-0.8671 -4.23022,-1.57655 -6.64946,-1.57655 -7.76375,0 -12.59682,7.49399 -10.84734,16.81951 1.02669,5.47277 4.98779,8.74026 14.39249,11.87231 11.29866,3.7628 14.36635,6.02117 17.97052,13.22951 2.81628,5.63255 3.03044,6.84626 3.03013,17.1726 -2.7e-4,9.4552 -0.34911,11.88243 -2.33999,16.2819 -4.43018,9.78989 -12.10891,14.56604 -23.29304,14.48821 -3.12475,-0.0217 -7.05353,-0.63742 -8.73061,-1.36818 z"
|
|
||||||
id="path431"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 495.07778,807.05632 c -5.40964,-2.45755 -9.90831,-7.78758 -12.62098,-14.95335 -5.11907,-13.5225 -5.1074,-59.90922 0.0185,-73.45101 4.46055,-11.78412 12.66067,-17.42901 24.11488,-16.60049 11.97283,0.86604 19.71765,8.89134 22.85039,23.67794 2.01585,9.51484 2.04895,49.67557 0.0487,59.1126 -2.44554,11.538 -7.82978,19.15118 -15.93509,22.5318 -5.02118,2.09427 -13.48778,1.94878 -18.47639,-0.31749 z m 17.72256,-18.35792 c 4.39267,-4.70236 5.22902,-11.02602 4.84534,-36.63572 -0.38594,-25.7615 -1.18846,-28.94284 -8.16035,-32.3496 -7.21055,-3.52337 -13.70844,0.41586 -16.04388,9.72631 -1.82925,7.29248 -1.83569,44.78604 -0.009,51.9679 1.78934,7.03487 5.61953,10.36035 11.93271,10.36035 3.78584,0 5.05889,-0.52552 7.43507,-3.06924 z"
|
|
||||||
id="path433"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d="m 542.05854,755.30029 v -52.62978 l 16.92006,0.38417 c 18.30586,0.41563 20.38377,0.94495 25.70473,6.54788 1.49807,1.57746 3.77091,5.14095 5.05076,7.91887 2.00154,4.34434 2.327,6.67553 2.327,16.66751 0,10.71938 -0.22179,12.06843 -2.87114,17.46381 -1.57912,3.21588 -4.19289,6.82428 -5.80837,8.01865 -1.61548,1.19438 -2.93724,2.69518 -2.93724,3.33511 0,0.63993 3.40926,10.88522 7.57614,22.76729 4.16688,11.88208 7.57615,21.7281 7.57615,21.88003 0,0.15194 -3.38989,0.27625 -7.53309,0.27625 h -7.53309 l -6.795,-21.2132 -6.795,-21.21321 h -5.87496 -5.87496 v 21.21321 21.2132 h -6.566 -6.56599 z m 33.47446,-8.81982 c 2.70487,-2.41681 3.27483,-3.74204 3.67267,-8.53948 0.64422,-7.76843 -0.34881,-12.1351 -3.4693,-15.25559 -2.44777,-2.44777 -3.28611,-2.63859 -11.59222,-2.63859 h -8.95362 v 14.64721 14.64721 h 8.57035 c 7.98252,0 8.78998,-0.19622 11.77212,-2.86076 z"
|
|
||||||
id="path435"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
</g>
|
|
||||||
<path
|
|
||||||
style="fill:#000000;fill-rule:evenodd;stroke-width:1.01015"
|
|
||||||
d=""
|
|
||||||
id="path411"
|
|
||||||
transform="scale(0.26458333)" />
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 14 KiB |
|
@ -1,28 +0,0 @@
|
||||||
{
|
|
||||||
"short_name": "OBS Portal",
|
|
||||||
"name": "OpenBikeSensor Portal",
|
|
||||||
"description": "Upload the tracks recorded with your OpenBikeSensor device to this portal and participate in the accumulation of Open Data.",
|
|
||||||
"icons": [
|
|
||||||
{
|
|
||||||
"src": "favicon.ico",
|
|
||||||
"sizes": "64x64 32x32 24x24 16x16",
|
|
||||||
"type": "image/x-icon"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"src": "android-chrome-192x192.png",
|
|
||||||
"type": "image/png",
|
|
||||||
"sizes": "192x192"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"src": "android-chrome-512x512.png",
|
|
||||||
"type": "image/png",
|
|
||||||
"sizes": "512x512"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"start_url": ".",
|
|
||||||
"display": "standalone",
|
|
||||||
"theme_color": "#114594",
|
|
||||||
"background_color": "#ffffff",
|
|
||||||
"manifest_version": 2,
|
|
||||||
"version": "0.0.0"
|
|
||||||
}
|
|
|
@ -1,3 +0,0 @@
|
||||||
# https://www.robotstxt.org/robotstxt.html
|
|
||||||
User-agent: *
|
|
||||||
Disallow:
|
|
|
@ -1,146 +0,0 @@
|
||||||
@import 'styles.less';
|
|
||||||
|
|
||||||
:global(#root) {
|
|
||||||
display: flex;
|
|
||||||
flex-direction: column;
|
|
||||||
justify-content: stretch;
|
|
||||||
|
|
||||||
html,
|
|
||||||
body,
|
|
||||||
& {
|
|
||||||
min-height: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.footer {
|
|
||||||
margin-top: auto;
|
|
||||||
min-height: 12rem;
|
|
||||||
padding: 2rem 0;
|
|
||||||
|
|
||||||
background: @obsColorB4;
|
|
||||||
color: @obsColorW;
|
|
||||||
|
|
||||||
h1,
|
|
||||||
h2,
|
|
||||||
h3,
|
|
||||||
h4,
|
|
||||||
h5,
|
|
||||||
h6 {
|
|
||||||
&:global(.ui.header) {
|
|
||||||
color: inherit;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
a {
|
|
||||||
&,
|
|
||||||
&:hover {
|
|
||||||
color: inherit;
|
|
||||||
}
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.avatar {
|
|
||||||
width: 2.5em;
|
|
||||||
height: 2.5em;
|
|
||||||
display: inline-block;
|
|
||||||
vertical-align: -0.6em;
|
|
||||||
border-radius: 100%;
|
|
||||||
|
|
||||||
> img {
|
|
||||||
border-radius: 100%;
|
|
||||||
width: 100%;
|
|
||||||
height: 100%;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.headline {
|
|
||||||
left: 0;
|
|
||||||
top: 0;
|
|
||||||
right: 0;
|
|
||||||
position: fixed;
|
|
||||||
// border-bottom: 1px solid #E0E0E4;
|
|
||||||
background: white;
|
|
||||||
z-index: 100;
|
|
||||||
box-shadow: 0 0 10px -6px black;
|
|
||||||
}
|
|
||||||
|
|
||||||
.pageTitle a {
|
|
||||||
font-weight: 600;
|
|
||||||
font-size: 18pt;
|
|
||||||
|
|
||||||
&,
|
|
||||||
&:hover {
|
|
||||||
color: @obsColorB4;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.menu.menu {
|
|
||||||
flex: 0 0 auto;
|
|
||||||
margin: 0;
|
|
||||||
|
|
||||||
> :global(.ui.container) {
|
|
||||||
height: @menuHeight;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
|
|
||||||
align-items: center;
|
|
||||||
justify-content: center;
|
|
||||||
color: white;
|
|
||||||
border-radius: 0;
|
|
||||||
border-right: 0;
|
|
||||||
border-left: 0;
|
|
||||||
border-top: 0;
|
|
||||||
|
|
||||||
ul {
|
|
||||||
margin: 0;
|
|
||||||
padding: 0;
|
|
||||||
list-style: none;
|
|
||||||
display: flex;
|
|
||||||
justify-content: flex-end;
|
|
||||||
align-items: baseline;
|
|
||||||
|
|
||||||
li {
|
|
||||||
padding: 1rem;
|
|
||||||
display: block;
|
|
||||||
|
|
||||||
a {
|
|
||||||
color: #877;
|
|
||||||
text-decoration: none;
|
|
||||||
|
|
||||||
&:hover {
|
|
||||||
text-decoration: underline;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@media @mobile {
|
|
||||||
.menu.menu {
|
|
||||||
> :global(.ui.container) {
|
|
||||||
height: @menuHeightMobile;
|
|
||||||
align-items: stretch;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.banner {
|
|
||||||
padding: 8px;
|
|
||||||
z-index: 100;
|
|
||||||
border-bottom: 1px solid #ddd;
|
|
||||||
|
|
||||||
&.warning {
|
|
||||||
background: #ffd54f;
|
|
||||||
border-color: #fbc02d;
|
|
||||||
color: #263238;
|
|
||||||
}
|
|
||||||
&.info {
|
|
||||||
background: #4fc3f7;
|
|
||||||
border-color: #0d47a1;
|
|
||||||
color: #0d47a1;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,267 +0,0 @@
|
||||||
import React from 'react'
|
|
||||||
import classnames from 'classnames'
|
|
||||||
import {connect} from 'react-redux'
|
|
||||||
import {List, Grid, Container, Menu, Header, Dropdown} from 'semantic-ui-react'
|
|
||||||
import {BrowserRouter as Router, Switch, Route, Link} from 'react-router-dom'
|
|
||||||
import {useObservable} from 'rxjs-hooks'
|
|
||||||
import {from} from 'rxjs'
|
|
||||||
import {pluck} from 'rxjs/operators'
|
|
||||||
import {Helmet} from 'react-helmet'
|
|
||||||
import {useTranslation} from 'react-i18next'
|
|
||||||
|
|
||||||
import {useConfig} from 'config'
|
|
||||||
import styles from './App.module.less'
|
|
||||||
import {AVAILABLE_LOCALES, setLocale} from 'i18n'
|
|
||||||
|
|
||||||
import {
|
|
||||||
AcknowledgementsPage,
|
|
||||||
ExportPage,
|
|
||||||
HomePage,
|
|
||||||
LoginRedirectPage,
|
|
||||||
LogoutPage,
|
|
||||||
NotFoundPage,
|
|
||||||
MapPage,
|
|
||||||
SettingsPage,
|
|
||||||
TrackEditor,
|
|
||||||
TrackPage,
|
|
||||||
TracksPage,
|
|
||||||
UploadPage,
|
|
||||||
MyTracksPage,
|
|
||||||
} from 'pages'
|
|
||||||
import {Avatar, LoginButton} from 'components'
|
|
||||||
import api from 'api'
|
|
||||||
|
|
||||||
// This component removes the "navigate" prop before rendering a Menu.Item,
|
|
||||||
// which is a workaround for an annoying warning that is somehow caused by the
|
|
||||||
// <Link /> and <Menu.Item /> combination.
|
|
||||||
function MenuItemForLink({navigate, ...props}) {
|
|
||||||
return (
|
|
||||||
<Menu.Item
|
|
||||||
{...props}
|
|
||||||
onClick={(e) => {
|
|
||||||
e.preventDefault()
|
|
||||||
navigate()
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
function DropdownItemForLink({navigate, ...props}) {
|
|
||||||
return (
|
|
||||||
<Dropdown.Item
|
|
||||||
{...props}
|
|
||||||
onClick={(e) => {
|
|
||||||
e.preventDefault()
|
|
||||||
navigate()
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
function Banner({text, style = 'warning'}: {text: string; style: 'warning' | 'info'}) {
|
|
||||||
return <div className={classnames(styles.banner, styles[style])}>{text}</div>
|
|
||||||
}
|
|
||||||
|
|
||||||
const App = connect((state) => ({login: state.login}))(function App({login}) {
|
|
||||||
const {t} = useTranslation()
|
|
||||||
const config = useConfig()
|
|
||||||
const apiVersion = useObservable(() => from(api.get('/info')).pipe(pluck('version')))
|
|
||||||
|
|
||||||
const hasMap = Boolean(config?.obsMapSource)
|
|
||||||
|
|
||||||
React.useEffect(() => {
|
|
||||||
api.loadUser()
|
|
||||||
}, [])
|
|
||||||
|
|
||||||
return config ? (
|
|
||||||
<Router basename={config.basename}>
|
|
||||||
<Helmet>
|
|
||||||
<meta charSet="utf-8" />
|
|
||||||
<title>OpenBikeSensor Portal</title>
|
|
||||||
</Helmet>
|
|
||||||
{config?.banner && <Banner {...config.banner} />}
|
|
||||||
<Menu className={styles.menu} stackable>
|
|
||||||
<Container>
|
|
||||||
<Link to="/" component={MenuItemForLink} header className={styles.pageTitle}>
|
|
||||||
OpenBikeSensor
|
|
||||||
</Link>
|
|
||||||
|
|
||||||
{hasMap && (
|
|
||||||
<Link component={MenuItemForLink} to="/map" as="a">
|
|
||||||
{t('App.menu.map')}
|
|
||||||
</Link>
|
|
||||||
)}
|
|
||||||
|
|
||||||
<Link component={MenuItemForLink} to="/tracks" as="a">
|
|
||||||
{t('App.menu.tracks')}
|
|
||||||
</Link>
|
|
||||||
|
|
||||||
<Link component={MenuItemForLink} to="/export" as="a">
|
|
||||||
{t('App.menu.export')}
|
|
||||||
</Link>
|
|
||||||
|
|
||||||
<Menu.Menu position="right">
|
|
||||||
{login ? (
|
|
||||||
<>
|
|
||||||
<Link component={MenuItemForLink} to="/my/tracks" as="a">
|
|
||||||
{t('App.menu.myTracks')}
|
|
||||||
</Link>
|
|
||||||
<Dropdown item trigger={<Avatar user={login} className={styles.avatar} />}>
|
|
||||||
<Dropdown.Menu>
|
|
||||||
<Link
|
|
||||||
to="/upload"
|
|
||||||
component={DropdownItemForLink}
|
|
||||||
icon="cloud upload"
|
|
||||||
text={t('App.menu.uploadTracks')}
|
|
||||||
/>
|
|
||||||
<Link to="/settings" component={DropdownItemForLink} icon="cog" text={t('App.menu.settings')} />
|
|
||||||
<Dropdown.Divider />
|
|
||||||
<Link to="/logout" component={DropdownItemForLink} icon="sign-out" text={t('App.menu.logout')} />
|
|
||||||
</Dropdown.Menu>
|
|
||||||
</Dropdown>
|
|
||||||
</>
|
|
||||||
) : (
|
|
||||||
<Menu.Item>
|
|
||||||
<LoginButton compact />
|
|
||||||
</Menu.Item>
|
|
||||||
)}
|
|
||||||
</Menu.Menu>
|
|
||||||
</Container>
|
|
||||||
</Menu>
|
|
||||||
|
|
||||||
<Switch>
|
|
||||||
<Route path="/" exact>
|
|
||||||
<HomePage />
|
|
||||||
</Route>
|
|
||||||
{hasMap && (
|
|
||||||
<Route path="/map" exact>
|
|
||||||
<MapPage />
|
|
||||||
</Route>
|
|
||||||
)}
|
|
||||||
<Route path="/tracks" exact>
|
|
||||||
<TracksPage />
|
|
||||||
</Route>
|
|
||||||
<Route path="/my/tracks" exact>
|
|
||||||
<MyTracksPage />
|
|
||||||
</Route>
|
|
||||||
<Route path={`/tracks/:slug`} exact>
|
|
||||||
<TrackPage />
|
|
||||||
</Route>
|
|
||||||
<Route path={`/tracks/:slug/edit`} exact>
|
|
||||||
<TrackEditor />
|
|
||||||
</Route>
|
|
||||||
<Route path="/export" exact>
|
|
||||||
<ExportPage />
|
|
||||||
</Route>
|
|
||||||
<Route path="/acknowledgements" exact>
|
|
||||||
<AcknowledgementsPage />
|
|
||||||
</Route>
|
|
||||||
<Route path="/redirect" exact>
|
|
||||||
<LoginRedirectPage />
|
|
||||||
</Route>
|
|
||||||
<Route path="/logout" exact>
|
|
||||||
<LogoutPage />
|
|
||||||
</Route>
|
|
||||||
{login && (
|
|
||||||
<>
|
|
||||||
<Route path="/upload" exact>
|
|
||||||
<UploadPage />
|
|
||||||
</Route>
|
|
||||||
<Route path="/settings" exact>
|
|
||||||
<SettingsPage />
|
|
||||||
</Route>
|
|
||||||
</>
|
|
||||||
)}
|
|
||||||
<Route>
|
|
||||||
<NotFoundPage />
|
|
||||||
</Route>
|
|
||||||
</Switch>
|
|
||||||
|
|
||||||
<div className={styles.footer}>
|
|
||||||
<Container>
|
|
||||||
<Grid columns={4} stackable>
|
|
||||||
<Grid.Row>
|
|
||||||
<Grid.Column>
|
|
||||||
<Header as="h5">{t('App.footer.aboutTheProject')}</Header>
|
|
||||||
<List>
|
|
||||||
<List.Item>
|
|
||||||
<a href="https://openbikesensor.org/" target="_blank" rel="noreferrer">
|
|
||||||
openbikesensor.org
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
</List>
|
|
||||||
</Grid.Column>
|
|
||||||
|
|
||||||
<Grid.Column>
|
|
||||||
<Header as="h5">{t('App.footer.getInvolved')}</Header>
|
|
||||||
<List>
|
|
||||||
<List.Item>
|
|
||||||
<a href="https://forum.openbikesensor.org/" target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.getHelpInForum')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
<List.Item>
|
|
||||||
<a href="https://github.com/openbikesensor/portal/issues/new" target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.reportAnIssue')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
<List.Item>
|
|
||||||
<a href="https://github.com/openbikesensor/portal" target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.development')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
</List>
|
|
||||||
</Grid.Column>
|
|
||||||
|
|
||||||
<Grid.Column>
|
|
||||||
<Header as="h5">{t('App.footer.thisInstallation')}</Header>
|
|
||||||
<List>
|
|
||||||
<List.Item>
|
|
||||||
<a href={config?.privacyPolicyUrl} target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.privacyPolicy')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
<List.Item>
|
|
||||||
<a href={config?.imprintUrl} target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.imprint')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
{config?.termsUrl && (
|
|
||||||
<List.Item>
|
|
||||||
<a href={config?.termsUrl} target="_blank" rel="noreferrer">
|
|
||||||
{t('App.footer.terms')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
)}
|
|
||||||
<List.Item>
|
|
||||||
<a
|
|
||||||
href={`https://github.com/openbikesensor/portal${
|
|
||||||
apiVersion ? `/releases/tag/${apiVersion}` : ''
|
|
||||||
}`}
|
|
||||||
target="_blank"
|
|
||||||
rel="noreferrer"
|
|
||||||
>
|
|
||||||
{apiVersion ? t('App.footer.version', {apiVersion}) : t('App.footer.versionLoading')}
|
|
||||||
</a>
|
|
||||||
</List.Item>
|
|
||||||
</List>
|
|
||||||
</Grid.Column>
|
|
||||||
|
|
||||||
<Grid.Column>
|
|
||||||
<Header as="h5">{t('App.footer.changeLanguage')}</Header>
|
|
||||||
<List>
|
|
||||||
{AVAILABLE_LOCALES.map((locale) => (
|
|
||||||
<List.Item key={locale}>
|
|
||||||
<a onClick={() => setLocale(locale)}>{t(`locales.${locale}`)}</a>
|
|
||||||
</List.Item>
|
|
||||||
))}
|
|
||||||
</List>
|
|
||||||
</Grid.Column>
|
|
||||||
</Grid.Row>
|
|
||||||
</Grid>
|
|
||||||
</Container>
|
|
||||||
</div>
|
|
||||||
</Router>
|
|
||||||
) : null
|
|
||||||
})
|
|
||||||
|
|
||||||
export default App
|
|
|
@ -1,140 +0,0 @@
|
||||||
import {stringifyParams} from 'query'
|
|
||||||
import globalStore from 'store'
|
|
||||||
import {setLogin} from 'reducers/login'
|
|
||||||
import configPromise from 'config'
|
|
||||||
import download from 'downloadjs'
|
|
||||||
|
|
||||||
function getFileNameFromContentDispostionHeader(contentDisposition) {
|
|
||||||
const standardPattern = /filename=(["']?)(.+)\1/i
|
|
||||||
const wrongPattern = /filename=([^"'][^;"'\n]+)/i
|
|
||||||
|
|
||||||
if (standardPattern.test(contentDisposition)) {
|
|
||||||
return contentDisposition.match(standardPattern)[2]
|
|
||||||
}
|
|
||||||
|
|
||||||
if (wrongPattern.test(contentDisposition)) {
|
|
||||||
return contentDisposition.match(wrongPattern)[1]
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class RequestError extends Error {
|
|
||||||
constructor(message, errors) {
|
|
||||||
super(message)
|
|
||||||
this.errors = errors
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
class API {
|
|
||||||
constructor(store) {
|
|
||||||
this.store = store
|
|
||||||
}
|
|
||||||
|
|
||||||
async loadUser() {
|
|
||||||
try {
|
|
||||||
const result = await this.get('/user')
|
|
||||||
this.store.dispatch(setLogin(result))
|
|
||||||
} catch {
|
|
||||||
this.store.dispatch(setLogin(null))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async logout() {
|
|
||||||
const config = await configPromise
|
|
||||||
const url = new URL(config.apiUrl + '/logout')
|
|
||||||
url.searchParams.append('next', window.location.href) // bring us back to the current page
|
|
||||||
window.location.href = url.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
async makeLoginUrl() {
|
|
||||||
const config = await configPromise
|
|
||||||
const url = new URL(config.loginUrl || config.apiUrl + '/login')
|
|
||||||
url.searchParams.append('next', window.location.href) // bring us back to the current page
|
|
||||||
return url.toString()
|
|
||||||
}
|
|
||||||
|
|
||||||
async fetch(url, options = {}) {
|
|
||||||
const config = await configPromise
|
|
||||||
|
|
||||||
const {returnResponse = false, ...fetchOptions} = options
|
|
||||||
|
|
||||||
const response = await window.fetch(config.apiUrl + url, {
|
|
||||||
...fetchOptions,
|
|
||||||
credentials: 'include',
|
|
||||||
})
|
|
||||||
|
|
||||||
if (response.status === 401) {
|
|
||||||
throw new Error('401 Unauthorized')
|
|
||||||
}
|
|
||||||
|
|
||||||
if (returnResponse) {
|
|
||||||
if (response.status === 200) {
|
|
||||||
return response
|
|
||||||
} else if (response.status === 204) {
|
|
||||||
return null
|
|
||||||
} else {
|
|
||||||
throw new RequestError('Error code ' + response.status)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let json
|
|
||||||
try {
|
|
||||||
json = await response.json()
|
|
||||||
} catch (err) {
|
|
||||||
json = null
|
|
||||||
}
|
|
||||||
|
|
||||||
if (response.status === 200) {
|
|
||||||
return json
|
|
||||||
} else if (response.status === 204) {
|
|
||||||
return null
|
|
||||||
} else {
|
|
||||||
throw new RequestError('Error code ' + response.status, json?.errors)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async post(url, {body: body_, ...options}) {
|
|
||||||
let body = body_
|
|
||||||
let headers = {...(options.headers || {})}
|
|
||||||
|
|
||||||
if (!(typeof body === 'string' || body instanceof FormData)) {
|
|
||||||
body = JSON.stringify(body)
|
|
||||||
headers['Content-Type'] = 'application/json'
|
|
||||||
}
|
|
||||||
|
|
||||||
return await this.fetch(url, {
|
|
||||||
method: 'post',
|
|
||||||
...options,
|
|
||||||
body,
|
|
||||||
headers,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
async get(url, {query, ...options} = {}) {
|
|
||||||
const queryString = query ? stringifyParams(query) : null
|
|
||||||
return await this.fetch(url + (queryString ? '?' + queryString : ''), {method: 'get', ...options})
|
|
||||||
}
|
|
||||||
|
|
||||||
async delete(url, options = {}) {
|
|
||||||
return await this.get(url, {...options, method: 'delete'})
|
|
||||||
}
|
|
||||||
|
|
||||||
async put(url, options = {}) {
|
|
||||||
return await this.post(url, {...options, method: 'put'})
|
|
||||||
}
|
|
||||||
|
|
||||||
async downloadFile(url, options = {}) {
|
|
||||||
const res = await this.fetch(url, {returnResponse: true, ...options})
|
|
||||||
const blob = await res.blob()
|
|
||||||
const filename = getFileNameFromContentDispostionHeader(res.headers.get('content-disposition'))
|
|
||||||
const contentType = res.headers.get('content-type')
|
|
||||||
|
|
||||||
// Apparently this workaround is needed for some browsers
|
|
||||||
const newBlob = new Blob([blob], {type: contentType})
|
|
||||||
|
|
||||||
download(newBlob, filename, contentType)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const api = new API(globalStore)
|
|
||||||
|
|
||||||
export default api
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue