Compare commits
524 commits
Author | SHA1 | Date | |
---|---|---|---|
Benjamin Yule Bädorf | 710a37dac3 | ||
fbf4d739f5 | |||
ec669fa077 | |||
f7c0d48c22 | |||
7bffc3a2b3 | |||
241a43c4ad | |||
4940679201 | |||
6d35001f8d | |||
c41aa3f6a0 | |||
278bcfc603 | |||
ba7de7582d | |||
4fa1d31f33 | |||
be6c736148 | |||
1faaa6e7b4 | |||
4f44cc0e56 | |||
74c7e6444e | |||
4ebffc529f | |||
b9c9a61ca1 | |||
f23ecc37e4 | |||
d29c68432d | |||
5b91449749 | |||
31d8390bdc | |||
14c7f6e88b | |||
c897412f99 | |||
43765092c3 | |||
1a1232f2a7 | |||
4a87489b3f | |||
7dd6b68da8 | |||
0233045959 | |||
b1cfd30da9 | |||
da82303042 | |||
497e1b739a | |||
d8e8d9aec1 | |||
e1763e0d3c | |||
de029fa3d2 | |||
0766467412 | |||
edc3c37abb | |||
41ce56ac09 | |||
7ff88aba15 | |||
a6811d4ba2 | |||
d3fbb113f3 | |||
c249b1638e | |||
dd2e995720 | |||
612a443dde | |||
1c53230b4d | |||
7e44f6d31d | |||
a946ea53c9 | |||
fb3e8bf701 | |||
56c9d2e455 | |||
c359d945da | |||
a66d96568e | |||
dc89db5471 | |||
10fd02804e | |||
5108eb02ce | |||
251be4a699 | |||
dd72ed791f | |||
ce8054b7ae | |||
0d9ddf4884 | |||
6fb5dfe6de | |||
10f6b0c0c9 | |||
8ce5816f53 | |||
dd912bcd0d | |||
39d90b3606 | |||
e13bc759d7 | |||
0a18cda691 | |||
761908a987 | |||
c4cc4a9078 | |||
ac90d50239 | |||
59f074cb28 | |||
4c1c95e4ff | |||
69d7f64ead | |||
276a2ddc69 | |||
de8d371b65 | |||
cf8358d14b | |||
eda3bf2688 | |||
df0466c6f1 | |||
9882b2041f | |||
a7566fb6b3 | |||
b6cf59a09d | |||
2f8e40db08 | |||
fa29deb397 | |||
665816cc98 | |||
0d44560830 | |||
61b74e90fd | |||
2c27a2c549 | |||
141460c79f | |||
4fe7d45dec | |||
cbab83e6e3 | |||
5a78d7eb38 | |||
56905fdf75 | |||
6c458a43f6 | |||
84ab957aa0 | |||
ed272b4e4a | |||
b9aaf23e0a | |||
78dca1477c | |||
215801f2b0 | |||
6d71b88010 | |||
e0070fc794 | |||
518bcd81ef | |||
7ae4ebebb6 | |||
382db5a11e | |||
3a97b07325 | |||
bea4174b37 | |||
78561d5929 | |||
7e51976c06 | |||
ec53591ce0 | |||
9e80113089 | |||
e7b02b170e | |||
94d23adcd2 | |||
d889abc798 | |||
1d2218b2df | |||
c1ccec9664 | |||
dec165341b | |||
426e6c8593 | |||
8d1d575215 | |||
0b5fe015d9 | |||
61890c6a5c | |||
c1c3797eb8 | |||
fc930fe433 | |||
5cfc8aae39 | |||
8096c2c2d2 | |||
c3ed4f24dd | |||
1a3b971a71 | |||
201db32050 | |||
a737d1ac1b | |||
57af4614b1 | |||
8878a71c14 | |||
dcfcd21c96 | |||
c02b40b0d3 | |||
e0cb36565a | |||
7716da8844 | |||
5beb5ac0d3 | |||
598ba8d187 | |||
24aaca654f | |||
373fab6e90 | |||
00f018c61c | |||
4a3681ec16 | |||
854332a188 | |||
7ed2e8532e | |||
3b21459805 | |||
10ced9d65e | |||
a736984265 | |||
e60c42990b | |||
3d1ac596b2 | |||
4e31f21059 | |||
d7a172b39c | |||
36f1675577 | |||
85e5e1ba65 | |||
f158414f24 | |||
6f7c8d54f2 | |||
ab6cc6f6d0 | |||
fe7d7ce274 | |||
76943fb1f0 | |||
248f8b4a6f | |||
a977e2d1c3 | |||
2cff606092 | |||
a85379418e | |||
1533fdc450 | |||
f1f40a254a | |||
682b41f2a4 | |||
c020839b31 | |||
31fac13f8a | |||
5f3ac69f60 | |||
ed9ed68d83 | |||
2755d6b2b5 | |||
617011c528 | |||
76b1b41b4b | |||
1ad5fe562e | |||
a11a3c4b8c | |||
a3d548cd4b | |||
eda0fe29b2 | |||
a0852fdc41 | |||
e266a4f40a | |||
0cbf03cd56 | |||
4907f038da | |||
8bc83a5f18 | |||
afc801aefc | |||
225a238e77 | |||
5e8830cc15 | |||
f70f4d5716 | |||
f36e38b10b | |||
ad5a0bfbf6 | |||
8ba5d8e3ad | |||
66dd84982c | |||
8728347695 | |||
cb6c94f7a5 | |||
04bf99b7cb | |||
2fd664f79a | |||
2e50e0c59c | |||
62528a04da | |||
96d157b226 | |||
c61157aca3 | |||
f229ab4112 | |||
4417263019 | |||
e5b48f8ffd | |||
4d0002e6d8 | |||
959cb7d2b7 | |||
850b907995 | |||
f0f804ae76 | |||
d7d00ac3fd | |||
6126e2273b | |||
388539fd71 | |||
b1071a34d3 | |||
36d6bb026c | |||
157b970b29 | |||
76270d199e | |||
1c52ce7de9 | |||
6893d7b56f | |||
36fd8c492c | |||
6ef233a2a2 | |||
85fcdea403 | |||
9a1c412597 | |||
5a5948b653 | |||
f3a1ca4165 | |||
34660b266c | |||
cb837ef5f2 | |||
e09c257995 | |||
0c43e49bb4 | |||
51f75fcf61 | |||
49f7827b51 | |||
8f2861a8c9 | |||
40d23c537e | |||
a013dae3fe | |||
bc17c72fdb | |||
f5be2b20f8 | |||
835aeeb483 | |||
fd06baeeb5 | |||
509e784521 | |||
abb935694e | |||
38e14c0084 | |||
b72499b29e | |||
2a9e3549b5 | |||
741ff0d488 | |||
21055e669a | |||
82f20e6354 | |||
af3e9574e4 | |||
7e33fb6424 | |||
70fa1a41c4 | |||
a71dadfc7f | |||
600457fe19 | |||
a884ac88d8 | |||
8135d4ed51 | |||
ba887e2208 | |||
ec7a4506f9 | |||
5a7900d269 | |||
bdc68e950e | |||
3ef6dcf5d9 | |||
d10b91804c | |||
01bde30d0c | |||
71a04b1611 | |||
7fc9558e42 | |||
8bb5d71186 | |||
412349cf4f | |||
1735f44769 | |||
e82f2c9a0e | |||
0f816e1680 | |||
e4e9f921b6 | |||
9df2914b86 | |||
af174bc930 | |||
12224db3b9 | |||
85911a2c97 | |||
b43f7a2ebb | |||
4505ddd0ee | |||
b8ab7da1a9 | |||
5ac2900e63 | |||
6a34eaf819 | |||
0d49945018 | |||
96642d2255 | |||
b66784f1ed | |||
15aaf06168 | |||
41e7fb001c | |||
530c604623 | |||
8a4fbf954c | |||
2ce0338f38 | |||
3da467800d | |||
0c256d8923 | |||
1c09725ff1 | |||
57976b4cec | |||
ee6163b301 | |||
057e2bcc6c | |||
442143a651 | |||
c63fd49245 | |||
6fd56334e2 | |||
076bc988df | |||
810bd39152 | |||
ab6e2b8a74 | |||
184e5c6f8f | |||
7b6e8cdf65 | |||
258c0fbaac | |||
33a8706109 | |||
724e48f738 | |||
6add053a92 | |||
dd5b31dcc9 | |||
cdcee6e29c | |||
a5f5acd6d5 | |||
f1f7ff1976 | |||
6158589b6f | |||
4e45ec6744 | |||
e95f5096db | |||
25ec75e781 | |||
5395712c3a | |||
e99aa62639 | |||
ee13e8e2f5 | |||
3db5132199 | |||
1669713fc5 | |||
6b38540586 | |||
f0c715bcbc | |||
83e945c7ff | |||
4bf23143e0 | |||
776275c52b | |||
69e8591fae | |||
09fe1a7ac0 | |||
99f33aa988 | |||
84683c7789 | |||
be4e0055cf | |||
3aee226630 | |||
7d2c45da43 | |||
456554c1de | |||
2592fe029a | |||
5309527c3e | |||
6e19411314 | |||
7669330aaa | |||
86bbf50ea2 | |||
4003d5e938 | |||
40784ba51e | |||
525004ab15 | |||
004ad46251 | |||
2f375dc24d | |||
2595c0bcc4 | |||
7ad5fad056 | |||
61efdeb673 | |||
fe3aa7a8f6 | |||
38b1b92210 | |||
15dfb2dc3b | |||
9c7b1bc662 | |||
618230601e | |||
989b294791 | |||
e481356af4 | |||
c283bed13c | |||
54cc80e5bc | |||
66ab9a73ef | |||
9ade5ecc7a | |||
88ea2a44d3 | |||
63adbc58cd | |||
0e5600b38d | |||
8fc755f153 | |||
ce805556ec | |||
8c6579b9bf | |||
4f382819fd | |||
1c39476ac5 | |||
04add36a5b | |||
40f336336b | |||
a7bbc50ac1 | |||
a8a416ca68 | |||
75323ebc79 | |||
8dec4c8262 | |||
9a13631097 | |||
6ed9edf191 | |||
bf1536f443 | |||
5fcb959002 | |||
f183b9e33a | |||
b06b7ba1ad | |||
93678375c2 | |||
a321cff6ea | |||
49101489fe | |||
947058e987 | |||
2d8194c7c2 | |||
2b9bdb6ff1 | |||
fe62af9d97 | |||
c90d4da97a | |||
8998ffa10a | |||
16903042bc | |||
2762e1f923 | |||
004deb8e60 | |||
4b270877ca | |||
5d96009b01 | |||
ea106539c6 | |||
12ef37392b | |||
12686abe14 | |||
f54fe701e7 | |||
7add8caaa1 | |||
bde1b77b48 | |||
ea6a631560 | |||
53e8d3ea45 | |||
fbc0e26912 | |||
e20b291823 | |||
b541f0b3fe | |||
b6aa0cb1c9 | |||
131afd5adc | |||
c85f261292 | |||
c353d2afc1 | |||
da33d814ae | |||
32e86be1d1 | |||
07ec5b40a0 | |||
67b6341f68 | |||
e3ec5ce1f9 | |||
f2fa806cab | |||
79f3469df8 | |||
ec60fc3873 | |||
a866eb3ab7 | |||
65b261630f | |||
6e5555782d | |||
e8a66fa7b9 | |||
ddeb5f9195 | |||
4742a6df13 | |||
e74d2c9130 | |||
8fd28dd130 | |||
0cd5525401 | |||
7901a78754 | |||
6b1e6eb06c | |||
62e8ff241c | |||
d81baaed1e | |||
2996ec35c3 | |||
9e6ee5deba | |||
33b14b16a3 | |||
7bba46fa52 | |||
305850ea63 | |||
67c6b3a725 | |||
0784f916b6 | |||
50d6697b5b | |||
22ce863ad4 | |||
90a36f8304 | |||
ee0e77c85b | |||
4eb504d719 | |||
59651d3cb9 | |||
4c28187741 | |||
e52575ab87 | |||
c7202eadd2 | |||
85d93fe598 | |||
1705d03683 | |||
4fe6d77a23 | |||
aaea78daf7 | |||
35fc7193b7 | |||
14039f30f3 | |||
88937b2f49 | |||
f38823d6c5 | |||
5b676bdc03 | |||
c143500605 | |||
dab902aa22 | |||
7162589617 | |||
e6350cdb66 | |||
00eece8406 | |||
3bb87989b2 | |||
1dce9aba60 | |||
fb1d04d98e | |||
54d0a56b9a | |||
630f8ca10c | |||
b34fbb1ee7 | |||
420b4f2a85 | |||
41313f6f63 | |||
5c28f1d344 | |||
95a28ec457 | |||
9b633a3c88 | |||
f2e6c062d3 | |||
94dae5e88a | |||
fb8e622d7c | |||
451abe28b5 | |||
16b89ce08b | |||
bc682a22a1 | |||
7aee81dcee | |||
ce2a27ed51 | |||
cf30037a85 | |||
798ff9fb1b | |||
5193acd9cd | |||
741ee78de3 | |||
5e213b8ea5 | |||
ecaf398f3b | |||
ba0db9a175 | |||
f5b6350d8a | |||
8612bb08a6 | |||
3d8238307a | |||
a4756873e5 | |||
c380b0d1fb | |||
73e855550c | |||
34042ede54 | |||
76620c5e8f | |||
9a7043ea71 | |||
817de8fae5 | |||
31af59819e | |||
c32a475b96 | |||
760ea4c013 | |||
912aebb9d5 | |||
44a18657f6 | |||
23843481c6 | |||
9002802d70 | |||
4f5177a5fc | |||
a00189d56a | |||
7cdf6b2d17 | |||
09dfe24045 | |||
e9ea0645c9 | |||
840ecc6d6f | |||
13b6dd8691 | |||
bca3582e30 | |||
764a711a9e | |||
39f5a011ed | |||
b80b219748 | |||
e1a9898fd9 | |||
da7f3fb432 | |||
46466874da | |||
5b4ec4d69e | |||
a0d9c2dbc4 | |||
e10b29e0c6 | |||
2c5ba1d5a0 | |||
4779965377 | |||
470dfe339d | |||
ccd3d80bae | |||
ad448efd7c | |||
d5a1eed27a | |||
cc4679d048 | |||
40882549f7 | |||
c53796f9b6 | |||
7ab7e4918e | |||
eef5deca70 | |||
6a1b193e06 | |||
45bbde1037 | |||
d1d7921808 | |||
6297fcd56f | |||
12bd42a3bb | |||
39e1d2a9f4 | |||
768f0f541b | |||
5ce2947fea | |||
6ba29e68a0 | |||
1e0544802f | |||
254b262a72 |
10
.dockerignore
Normal file
10
.dockerignore
Normal file
|
@ -0,0 +1,10 @@
|
|||
local
|
||||
*.user
|
||||
frontend/node_modules
|
||||
api/.pyenv
|
||||
.git
|
||||
cache
|
||||
data
|
||||
tile-generator/cache
|
||||
tile-generator/data
|
||||
tile-generator/build
|
19
.editorconfig
Normal file
19
.editorconfig
Normal file
|
@ -0,0 +1,19 @@
|
|||
root = true
|
||||
|
||||
[*]
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
charset = utf-8
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
[Makefile]
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
[*.{py,rs}]
|
||||
indent_size = 4
|
20
.forgejo/workflows/build-image.yml
Normal file
20
.forgejo/workflows/build-image.yml
Normal file
|
@ -0,0 +1,20 @@
|
|||
name: Build docker image
|
||||
on: [push]
|
||||
|
||||
jobs:
|
||||
build-image:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: catthehacker/ubuntu:act-latest
|
||||
steps:
|
||||
- name: Login to Forgejo docker registry
|
||||
uses: docker/login-action@v3.0.0
|
||||
with:
|
||||
registry: git.pub.solar
|
||||
username: hakkonaut
|
||||
password: ${{ secrets.GIT_AUTH_TOKEN }}
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
with:
|
||||
push: true
|
||||
tags: git.pub.solar/pub-solar/obs-portal:latest
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1 +1,3 @@
|
|||
local
|
||||
data
|
||||
export
|
||||
|
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
|||
[submodule "api/scripts"]
|
||||
path = api/scripts
|
||||
url = https://github.com/openbikesensor/OpenBikeSensor-Scripts
|
191
CHANGELOG.md
Normal file
191
CHANGELOG.md
Normal file
|
@ -0,0 +1,191 @@
|
|||
# Changelog
|
||||
|
||||
## 0.8.1
|
||||
|
||||
### Improvements
|
||||
|
||||
* The zone (urban/rural) is now also exported with the events GeoJson export.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Update to a current version of gpstime (python dependency) fix portal startup.
|
||||
|
||||
## 0.8.0
|
||||
|
||||
### Features
|
||||
|
||||
* Bulk actions on users owned tracks (reprocess, download, make private, make public, delete) (#269, #38)
|
||||
* Easy sorting by device for "multi-device users" (e.g. group lending out OBSes)
|
||||
* Region display at higher zoom levels to easily find interesting areas (#112)
|
||||
* Export of road statistics on top of the already-existing event statistics (#341)
|
||||
|
||||
### Improvements
|
||||
|
||||
* Refactored database access to hopefully combat portal crashes (#337)
|
||||
* New infrastructure for map imports that makes import of larger maps possible on small VMs (#334)
|
||||
* Reference current postgres and postgis versions in docker-compose.yaml files (#286)
|
||||
* Configurable terms-and-conditions link (#320)
|
||||
* French translation by @cbiteau (#303)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Logout not working (#285)
|
||||
* Duplicate road usage hashes (#335, #253)
|
||||
* cannot import name .... (#338)
|
||||
|
||||
## 0.7.0
|
||||
|
||||
### Features
|
||||
|
||||
* Add histogram of overtaking distances in road details panel
|
||||
* Flip table in road details panel and make it easier to read
|
||||
* Implement difference between urban and rural for events and road segments.
|
||||
* Better road zone detection in import
|
||||
* Make the frontend translatable and add German translation
|
||||
* Add time and user filters to map view (for logged-in users only)
|
||||
|
||||
### Improvements
|
||||
|
||||
* Make raw track not look like a river (#252)
|
||||
* Update many dependencies
|
||||
|
||||
### Bug fixes
|
||||
|
||||
* Overtaking events are now deleted when the parent track is deleted (#206)
|
||||
* Remove useless session creation (#192)
|
||||
* Remove some error logs for canceled requests (as the map page tends to do that quite a lot)
|
||||
* Fix ExportPage bounding box input
|
||||
|
||||
|
||||
## 0.6.2
|
||||
|
||||
### Improvements
|
||||
|
||||
* Prevent directory traversals inside container on python-served frontend.
|
||||
|
||||
## 0.6.1
|
||||
|
||||
### Improvements
|
||||
|
||||
* Make road details request (clicking on a road segment in the map) way faster
|
||||
by using PostGIS geometry index correctly (#226).
|
||||
|
||||
## 0.6.0
|
||||
|
||||
Starting in this version, the database schema is created through migrations
|
||||
instead of using the `reset_database.py` script. This means that for both the
|
||||
initial setup, as well as for upgrades, only the migrations have to be run.
|
||||
|
||||
After updating and migrating, it is good practice to regenerate the SQL tile
|
||||
functions (`api/tools/prepare_sql_tiles.py`) as well. It doesn't matter if you
|
||||
do this when it is not required, so we've written a simple all-in-one update
|
||||
script that you can run to do all upgrade tasks. This is now in
|
||||
`api/tools/upgrade.py`.
|
||||
|
||||
Please check [`UPGRADING.md`](./UPGRADING.md) for more details if you're
|
||||
upgrading an existing installation. It contains an important note for this
|
||||
upgrade in particular.
|
||||
|
||||
## 0.5.1
|
||||
|
||||
Maintenance release, only includes build, deployment and documentation changes.
|
||||
|
||||
## 0.5.0
|
||||
|
||||
### Features
|
||||
|
||||
* Use discrete colors for distances, with greens only above 1.5m
|
||||
* Use viridis colormap for roads' count layers
|
||||
* Generate usage count information (how often has a road been traveled)
|
||||
* Project the whole track to the map, and show both versions
|
||||
* Log out of OpenID server when logging out of application
|
||||
* Convert speed units to km/h in frontend
|
||||
* Pages now have titles (#148)
|
||||
* Remove map from home page, it was empty anyway (#120)
|
||||
|
||||
### Internal
|
||||
|
||||
* Add alembic setup for migrating
|
||||
* Build osm2pgsql with -j4
|
||||
* Update sqlalchemy[asyncio] requirement from ~=1.4.31 to ~=1.4.32 in /api
|
||||
|
||||
## 0.4.2
|
||||
|
||||
### Features
|
||||
|
||||
### Bugfixes
|
||||
|
||||
* Fix export route, it should be a child of /api
|
||||
|
||||
## 0.4.1
|
||||
|
||||
### Features
|
||||
|
||||
* Add page for exporting data through web frontend
|
||||
* Generate GPX track file when importing a track
|
||||
* Add GPX track export button on the track page (accessible for anybody who can
|
||||
see the track)
|
||||
|
||||
## 0.4.0
|
||||
|
||||
### Improvements
|
||||
|
||||
* Retry OpenID Connect connection if it fails on boot
|
||||
* Format log outputs with color and improve access log
|
||||
* Make pool_size and overflow configurable for worker and portal
|
||||
* Add a route for exporting events as GeoJSON/Shapefile
|
||||
* Point footer to forum, not slack (fixes #140)
|
||||
* Improve wording on profile page ("My" instead of "Your")
|
||||
* Show "My tracks" directly in main menu (fixes #136)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
* Make sure the API can recover from the broken postgresql connection state
|
||||
* Remove duplicate events from the same track
|
||||
* Fix direction of road segments (fixes #142)
|
||||
* Solve a few problems with the colormap scales in the map view
|
||||
|
||||
### Docs & deployment
|
||||
|
||||
* Greatly improve deployement docs for a simple follow-along routine
|
||||
* Use environment variables (`OBS_*`) for configuration
|
||||
* Fix port numbers in example files and expose 3000 in the image
|
||||
* Add `LEAN_MODE` configuration to disable `road` database table usage and fall
|
||||
back to Overpass API for processing tracks (see
|
||||
[docs/lean-mode.md](docs/lean-mode.md)).
|
||||
* Read `config.overrides.py` file if it exists
|
||||
* Add osm2pgsql to portal image to be able to import OSM data from within the
|
||||
container
|
||||
* Fix path to roads_import.lua in docs
|
||||
* Explain to use the portal service, instead of api, in production
|
||||
* Use entrypoint instead of command, so you can run process_track.py one-off tasks
|
||||
|
||||
### Internals
|
||||
|
||||
* Use custom `get_single_arg` everywhere, remove sanicargs (fixes #193)
|
||||
* Update requirements and make them consistent
|
||||
* Fix error handling, especially for file uploads
|
||||
|
||||
|
||||
## 0.3.4
|
||||
|
||||
### Features
|
||||
|
||||
* Reintroduce event view (fixes #111)
|
||||
* Add layer configuration panel to map page
|
||||
- Allow choosing basemap style
|
||||
- Add toggles for event and road layers
|
||||
- Make untagged roads display optional
|
||||
- Show a legend for event color
|
||||
- Alow choosing attribute used for coloring road segments
|
||||
* Add optional banner to frontend via config entry (solves #128)
|
||||
|
||||
### Bugfixes
|
||||
|
||||
* Clicking on road without events should not cause 500 error
|
||||
* Improve mobile layout a bit (fixes #123)
|
||||
|
||||
### Technical
|
||||
|
||||
* Allow explicit configuration of api base url via `API_URL` config
|
||||
* Remove outdated "mapTileset" frontend config section
|
674
COPYING
Normal file
674
COPYING
Normal file
|
@ -0,0 +1,674 @@
|
|||
GNU GENERAL PUBLIC LICENSE
|
||||
Version 3, 29 June 2007
|
||||
|
||||
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
Preamble
|
||||
|
||||
The GNU General Public License is a free, copyleft license for
|
||||
software and other kinds of works.
|
||||
|
||||
The licenses for most software and other practical works are designed
|
||||
to take away your freedom to share and change the works. By contrast,
|
||||
the GNU General Public License is intended to guarantee your freedom to
|
||||
share and change all versions of a program--to make sure it remains free
|
||||
software for all its users. We, the Free Software Foundation, use the
|
||||
GNU General Public License for most of our software; it applies also to
|
||||
any other work released this way by its authors. You can apply it to
|
||||
your programs, too.
|
||||
|
||||
When we speak of free software, we are referring to freedom, not
|
||||
price. Our General Public Licenses are designed to make sure that you
|
||||
have the freedom to distribute copies of free software (and charge for
|
||||
them if you wish), that you receive source code or can get it if you
|
||||
want it, that you can change the software or use pieces of it in new
|
||||
free programs, and that you know you can do these things.
|
||||
|
||||
To protect your rights, we need to prevent others from denying you
|
||||
these rights or asking you to surrender the rights. Therefore, you have
|
||||
certain responsibilities if you distribute copies of the software, or if
|
||||
you modify it: responsibilities to respect the freedom of others.
|
||||
|
||||
For example, if you distribute copies of such a program, whether
|
||||
gratis or for a fee, you must pass on to the recipients the same
|
||||
freedoms that you received. You must make sure that they, too, receive
|
||||
or can get the source code. And you must show them these terms so they
|
||||
know their rights.
|
||||
|
||||
Developers that use the GNU GPL protect your rights with two steps:
|
||||
(1) assert copyright on the software, and (2) offer you this License
|
||||
giving you legal permission to copy, distribute and/or modify it.
|
||||
|
||||
For the developers' and authors' protection, the GPL clearly explains
|
||||
that there is no warranty for this free software. For both users' and
|
||||
authors' sake, the GPL requires that modified versions be marked as
|
||||
changed, so that their problems will not be attributed erroneously to
|
||||
authors of previous versions.
|
||||
|
||||
Some devices are designed to deny users access to install or run
|
||||
modified versions of the software inside them, although the manufacturer
|
||||
can do so. This is fundamentally incompatible with the aim of
|
||||
protecting users' freedom to change the software. The systematic
|
||||
pattern of such abuse occurs in the area of products for individuals to
|
||||
use, which is precisely where it is most unacceptable. Therefore, we
|
||||
have designed this version of the GPL to prohibit the practice for those
|
||||
products. If such problems arise substantially in other domains, we
|
||||
stand ready to extend this provision to those domains in future versions
|
||||
of the GPL, as needed to protect the freedom of users.
|
||||
|
||||
Finally, every program is threatened constantly by software patents.
|
||||
States should not allow patents to restrict development and use of
|
||||
software on general-purpose computers, but in those that do, we wish to
|
||||
avoid the special danger that patents applied to a free program could
|
||||
make it effectively proprietary. To prevent this, the GPL assures that
|
||||
patents cannot be used to render the program non-free.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow.
|
||||
|
||||
TERMS AND CONDITIONS
|
||||
|
||||
0. Definitions.
|
||||
|
||||
"This License" refers to version 3 of the GNU General Public License.
|
||||
|
||||
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||
works, such as semiconductor masks.
|
||||
|
||||
"The Program" refers to any copyrightable work licensed under this
|
||||
License. Each licensee is addressed as "you". "Licensees" and
|
||||
"recipients" may be individuals or organizations.
|
||||
|
||||
To "modify" a work means to copy from or adapt all or part of the work
|
||||
in a fashion requiring copyright permission, other than the making of an
|
||||
exact copy. The resulting work is called a "modified version" of the
|
||||
earlier work or a work "based on" the earlier work.
|
||||
|
||||
A "covered work" means either the unmodified Program or a work based
|
||||
on the Program.
|
||||
|
||||
To "propagate" a work means to do anything with it that, without
|
||||
permission, would make you directly or secondarily liable for
|
||||
infringement under applicable copyright law, except executing it on a
|
||||
computer or modifying a private copy. Propagation includes copying,
|
||||
distribution (with or without modification), making available to the
|
||||
public, and in some countries other activities as well.
|
||||
|
||||
To "convey" a work means any kind of propagation that enables other
|
||||
parties to make or receive copies. Mere interaction with a user through
|
||||
a computer network, with no transfer of a copy, is not conveying.
|
||||
|
||||
An interactive user interface displays "Appropriate Legal Notices"
|
||||
to the extent that it includes a convenient and prominently visible
|
||||
feature that (1) displays an appropriate copyright notice, and (2)
|
||||
tells the user that there is no warranty for the work (except to the
|
||||
extent that warranties are provided), that licensees may convey the
|
||||
work under this License, and how to view a copy of this License. If
|
||||
the interface presents a list of user commands or options, such as a
|
||||
menu, a prominent item in the list meets this criterion.
|
||||
|
||||
1. Source Code.
|
||||
|
||||
The "source code" for a work means the preferred form of the work
|
||||
for making modifications to it. "Object code" means any non-source
|
||||
form of a work.
|
||||
|
||||
A "Standard Interface" means an interface that either is an official
|
||||
standard defined by a recognized standards body, or, in the case of
|
||||
interfaces specified for a particular programming language, one that
|
||||
is widely used among developers working in that language.
|
||||
|
||||
The "System Libraries" of an executable work include anything, other
|
||||
than the work as a whole, that (a) is included in the normal form of
|
||||
packaging a Major Component, but which is not part of that Major
|
||||
Component, and (b) serves only to enable use of the work with that
|
||||
Major Component, or to implement a Standard Interface for which an
|
||||
implementation is available to the public in source code form. A
|
||||
"Major Component", in this context, means a major essential component
|
||||
(kernel, window system, and so on) of the specific operating system
|
||||
(if any) on which the executable work runs, or a compiler used to
|
||||
produce the work, or an object code interpreter used to run it.
|
||||
|
||||
The "Corresponding Source" for a work in object code form means all
|
||||
the source code needed to generate, install, and (for an executable
|
||||
work) run the object code and to modify the work, including scripts to
|
||||
control those activities. However, it does not include the work's
|
||||
System Libraries, or general-purpose tools or generally available free
|
||||
programs which are used unmodified in performing those activities but
|
||||
which are not part of the work. For example, Corresponding Source
|
||||
includes interface definition files associated with source files for
|
||||
the work, and the source code for shared libraries and dynamically
|
||||
linked subprograms that the work is specifically designed to require,
|
||||
such as by intimate data communication or control flow between those
|
||||
subprograms and other parts of the work.
|
||||
|
||||
The Corresponding Source need not include anything that users
|
||||
can regenerate automatically from other parts of the Corresponding
|
||||
Source.
|
||||
|
||||
The Corresponding Source for a work in source code form is that
|
||||
same work.
|
||||
|
||||
2. Basic Permissions.
|
||||
|
||||
All rights granted under this License are granted for the term of
|
||||
copyright on the Program, and are irrevocable provided the stated
|
||||
conditions are met. This License explicitly affirms your unlimited
|
||||
permission to run the unmodified Program. The output from running a
|
||||
covered work is covered by this License only if the output, given its
|
||||
content, constitutes a covered work. This License acknowledges your
|
||||
rights of fair use or other equivalent, as provided by copyright law.
|
||||
|
||||
You may make, run and propagate covered works that you do not
|
||||
convey, without conditions so long as your license otherwise remains
|
||||
in force. You may convey covered works to others for the sole purpose
|
||||
of having them make modifications exclusively for you, or provide you
|
||||
with facilities for running those works, provided that you comply with
|
||||
the terms of this License in conveying all material for which you do
|
||||
not control copyright. Those thus making or running the covered works
|
||||
for you must do so exclusively on your behalf, under your direction
|
||||
and control, on terms that prohibit them from making any copies of
|
||||
your copyrighted material outside their relationship with you.
|
||||
|
||||
Conveying under any other circumstances is permitted solely under
|
||||
the conditions stated below. Sublicensing is not allowed; section 10
|
||||
makes it unnecessary.
|
||||
|
||||
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||
|
||||
No covered work shall be deemed part of an effective technological
|
||||
measure under any applicable law fulfilling obligations under article
|
||||
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||
similar laws prohibiting or restricting circumvention of such
|
||||
measures.
|
||||
|
||||
When you convey a covered work, you waive any legal power to forbid
|
||||
circumvention of technological measures to the extent such circumvention
|
||||
is effected by exercising rights under this License with respect to
|
||||
the covered work, and you disclaim any intention to limit operation or
|
||||
modification of the work as a means of enforcing, against the work's
|
||||
users, your or third parties' legal rights to forbid circumvention of
|
||||
technological measures.
|
||||
|
||||
4. Conveying Verbatim Copies.
|
||||
|
||||
You may convey verbatim copies of the Program's source code as you
|
||||
receive it, in any medium, provided that you conspicuously and
|
||||
appropriately publish on each copy an appropriate copyright notice;
|
||||
keep intact all notices stating that this License and any
|
||||
non-permissive terms added in accord with section 7 apply to the code;
|
||||
keep intact all notices of the absence of any warranty; and give all
|
||||
recipients a copy of this License along with the Program.
|
||||
|
||||
You may charge any price or no price for each copy that you convey,
|
||||
and you may offer support or warranty protection for a fee.
|
||||
|
||||
5. Conveying Modified Source Versions.
|
||||
|
||||
You may convey a work based on the Program, or the modifications to
|
||||
produce it from the Program, in the form of source code under the
|
||||
terms of section 4, provided that you also meet all of these conditions:
|
||||
|
||||
a) The work must carry prominent notices stating that you modified
|
||||
it, and giving a relevant date.
|
||||
|
||||
b) The work must carry prominent notices stating that it is
|
||||
released under this License and any conditions added under section
|
||||
7. This requirement modifies the requirement in section 4 to
|
||||
"keep intact all notices".
|
||||
|
||||
c) You must license the entire work, as a whole, under this
|
||||
License to anyone who comes into possession of a copy. This
|
||||
License will therefore apply, along with any applicable section 7
|
||||
additional terms, to the whole of the work, and all its parts,
|
||||
regardless of how they are packaged. This License gives no
|
||||
permission to license the work in any other way, but it does not
|
||||
invalidate such permission if you have separately received it.
|
||||
|
||||
d) If the work has interactive user interfaces, each must display
|
||||
Appropriate Legal Notices; however, if the Program has interactive
|
||||
interfaces that do not display Appropriate Legal Notices, your
|
||||
work need not make them do so.
|
||||
|
||||
A compilation of a covered work with other separate and independent
|
||||
works, which are not by their nature extensions of the covered work,
|
||||
and which are not combined with it such as to form a larger program,
|
||||
in or on a volume of a storage or distribution medium, is called an
|
||||
"aggregate" if the compilation and its resulting copyright are not
|
||||
used to limit the access or legal rights of the compilation's users
|
||||
beyond what the individual works permit. Inclusion of a covered work
|
||||
in an aggregate does not cause this License to apply to the other
|
||||
parts of the aggregate.
|
||||
|
||||
6. Conveying Non-Source Forms.
|
||||
|
||||
You may convey a covered work in object code form under the terms
|
||||
of sections 4 and 5, provided that you also convey the
|
||||
machine-readable Corresponding Source under the terms of this License,
|
||||
in one of these ways:
|
||||
|
||||
a) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by the
|
||||
Corresponding Source fixed on a durable physical medium
|
||||
customarily used for software interchange.
|
||||
|
||||
b) Convey the object code in, or embodied in, a physical product
|
||||
(including a physical distribution medium), accompanied by a
|
||||
written offer, valid for at least three years and valid for as
|
||||
long as you offer spare parts or customer support for that product
|
||||
model, to give anyone who possesses the object code either (1) a
|
||||
copy of the Corresponding Source for all the software in the
|
||||
product that is covered by this License, on a durable physical
|
||||
medium customarily used for software interchange, for a price no
|
||||
more than your reasonable cost of physically performing this
|
||||
conveying of source, or (2) access to copy the
|
||||
Corresponding Source from a network server at no charge.
|
||||
|
||||
c) Convey individual copies of the object code with a copy of the
|
||||
written offer to provide the Corresponding Source. This
|
||||
alternative is allowed only occasionally and noncommercially, and
|
||||
only if you received the object code with such an offer, in accord
|
||||
with subsection 6b.
|
||||
|
||||
d) Convey the object code by offering access from a designated
|
||||
place (gratis or for a charge), and offer equivalent access to the
|
||||
Corresponding Source in the same way through the same place at no
|
||||
further charge. You need not require recipients to copy the
|
||||
Corresponding Source along with the object code. If the place to
|
||||
copy the object code is a network server, the Corresponding Source
|
||||
may be on a different server (operated by you or a third party)
|
||||
that supports equivalent copying facilities, provided you maintain
|
||||
clear directions next to the object code saying where to find the
|
||||
Corresponding Source. Regardless of what server hosts the
|
||||
Corresponding Source, you remain obligated to ensure that it is
|
||||
available for as long as needed to satisfy these requirements.
|
||||
|
||||
e) Convey the object code using peer-to-peer transmission, provided
|
||||
you inform other peers where the object code and Corresponding
|
||||
Source of the work are being offered to the general public at no
|
||||
charge under subsection 6d.
|
||||
|
||||
A separable portion of the object code, whose source code is excluded
|
||||
from the Corresponding Source as a System Library, need not be
|
||||
included in conveying the object code work.
|
||||
|
||||
A "User Product" is either (1) a "consumer product", which means any
|
||||
tangible personal property which is normally used for personal, family,
|
||||
or household purposes, or (2) anything designed or sold for incorporation
|
||||
into a dwelling. In determining whether a product is a consumer product,
|
||||
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||
product received by a particular user, "normally used" refers to a
|
||||
typical or common use of that class of product, regardless of the status
|
||||
of the particular user or of the way in which the particular user
|
||||
actually uses, or expects or is expected to use, the product. A product
|
||||
is a consumer product regardless of whether the product has substantial
|
||||
commercial, industrial or non-consumer uses, unless such uses represent
|
||||
the only significant mode of use of the product.
|
||||
|
||||
"Installation Information" for a User Product means any methods,
|
||||
procedures, authorization keys, or other information required to install
|
||||
and execute modified versions of a covered work in that User Product from
|
||||
a modified version of its Corresponding Source. The information must
|
||||
suffice to ensure that the continued functioning of the modified object
|
||||
code is in no case prevented or interfered with solely because
|
||||
modification has been made.
|
||||
|
||||
If you convey an object code work under this section in, or with, or
|
||||
specifically for use in, a User Product, and the conveying occurs as
|
||||
part of a transaction in which the right of possession and use of the
|
||||
User Product is transferred to the recipient in perpetuity or for a
|
||||
fixed term (regardless of how the transaction is characterized), the
|
||||
Corresponding Source conveyed under this section must be accompanied
|
||||
by the Installation Information. But this requirement does not apply
|
||||
if neither you nor any third party retains the ability to install
|
||||
modified object code on the User Product (for example, the work has
|
||||
been installed in ROM).
|
||||
|
||||
The requirement to provide Installation Information does not include a
|
||||
requirement to continue to provide support service, warranty, or updates
|
||||
for a work that has been modified or installed by the recipient, or for
|
||||
the User Product in which it has been modified or installed. Access to a
|
||||
network may be denied when the modification itself materially and
|
||||
adversely affects the operation of the network or violates the rules and
|
||||
protocols for communication across the network.
|
||||
|
||||
Corresponding Source conveyed, and Installation Information provided,
|
||||
in accord with this section must be in a format that is publicly
|
||||
documented (and with an implementation available to the public in
|
||||
source code form), and must require no special password or key for
|
||||
unpacking, reading or copying.
|
||||
|
||||
7. Additional Terms.
|
||||
|
||||
"Additional permissions" are terms that supplement the terms of this
|
||||
License by making exceptions from one or more of its conditions.
|
||||
Additional permissions that are applicable to the entire Program shall
|
||||
be treated as though they were included in this License, to the extent
|
||||
that they are valid under applicable law. If additional permissions
|
||||
apply only to part of the Program, that part may be used separately
|
||||
under those permissions, but the entire Program remains governed by
|
||||
this License without regard to the additional permissions.
|
||||
|
||||
When you convey a copy of a covered work, you may at your option
|
||||
remove any additional permissions from that copy, or from any part of
|
||||
it. (Additional permissions may be written to require their own
|
||||
removal in certain cases when you modify the work.) You may place
|
||||
additional permissions on material, added by you to a covered work,
|
||||
for which you have or can give appropriate copyright permission.
|
||||
|
||||
Notwithstanding any other provision of this License, for material you
|
||||
add to a covered work, you may (if authorized by the copyright holders of
|
||||
that material) supplement the terms of this License with terms:
|
||||
|
||||
a) Disclaiming warranty or limiting liability differently from the
|
||||
terms of sections 15 and 16 of this License; or
|
||||
|
||||
b) Requiring preservation of specified reasonable legal notices or
|
||||
author attributions in that material or in the Appropriate Legal
|
||||
Notices displayed by works containing it; or
|
||||
|
||||
c) Prohibiting misrepresentation of the origin of that material, or
|
||||
requiring that modified versions of such material be marked in
|
||||
reasonable ways as different from the original version; or
|
||||
|
||||
d) Limiting the use for publicity purposes of names of licensors or
|
||||
authors of the material; or
|
||||
|
||||
e) Declining to grant rights under trademark law for use of some
|
||||
trade names, trademarks, or service marks; or
|
||||
|
||||
f) Requiring indemnification of licensors and authors of that
|
||||
material by anyone who conveys the material (or modified versions of
|
||||
it) with contractual assumptions of liability to the recipient, for
|
||||
any liability that these contractual assumptions directly impose on
|
||||
those licensors and authors.
|
||||
|
||||
All other non-permissive additional terms are considered "further
|
||||
restrictions" within the meaning of section 10. If the Program as you
|
||||
received it, or any part of it, contains a notice stating that it is
|
||||
governed by this License along with a term that is a further
|
||||
restriction, you may remove that term. If a license document contains
|
||||
a further restriction but permits relicensing or conveying under this
|
||||
License, you may add to a covered work material governed by the terms
|
||||
of that license document, provided that the further restriction does
|
||||
not survive such relicensing or conveying.
|
||||
|
||||
If you add terms to a covered work in accord with this section, you
|
||||
must place, in the relevant source files, a statement of the
|
||||
additional terms that apply to those files, or a notice indicating
|
||||
where to find the applicable terms.
|
||||
|
||||
Additional terms, permissive or non-permissive, may be stated in the
|
||||
form of a separately written license, or stated as exceptions;
|
||||
the above requirements apply either way.
|
||||
|
||||
8. Termination.
|
||||
|
||||
You may not propagate or modify a covered work except as expressly
|
||||
provided under this License. Any attempt otherwise to propagate or
|
||||
modify it is void, and will automatically terminate your rights under
|
||||
this License (including any patent licenses granted under the third
|
||||
paragraph of section 11).
|
||||
|
||||
However, if you cease all violation of this License, then your
|
||||
license from a particular copyright holder is reinstated (a)
|
||||
provisionally, unless and until the copyright holder explicitly and
|
||||
finally terminates your license, and (b) permanently, if the copyright
|
||||
holder fails to notify you of the violation by some reasonable means
|
||||
prior to 60 days after the cessation.
|
||||
|
||||
Moreover, your license from a particular copyright holder is
|
||||
reinstated permanently if the copyright holder notifies you of the
|
||||
violation by some reasonable means, this is the first time you have
|
||||
received notice of violation of this License (for any work) from that
|
||||
copyright holder, and you cure the violation prior to 30 days after
|
||||
your receipt of the notice.
|
||||
|
||||
Termination of your rights under this section does not terminate the
|
||||
licenses of parties who have received copies or rights from you under
|
||||
this License. If your rights have been terminated and not permanently
|
||||
reinstated, you do not qualify to receive new licenses for the same
|
||||
material under section 10.
|
||||
|
||||
9. Acceptance Not Required for Having Copies.
|
||||
|
||||
You are not required to accept this License in order to receive or
|
||||
run a copy of the Program. Ancillary propagation of a covered work
|
||||
occurring solely as a consequence of using peer-to-peer transmission
|
||||
to receive a copy likewise does not require acceptance. However,
|
||||
nothing other than this License grants you permission to propagate or
|
||||
modify any covered work. These actions infringe copyright if you do
|
||||
not accept this License. Therefore, by modifying or propagating a
|
||||
covered work, you indicate your acceptance of this License to do so.
|
||||
|
||||
10. Automatic Licensing of Downstream Recipients.
|
||||
|
||||
Each time you convey a covered work, the recipient automatically
|
||||
receives a license from the original licensors, to run, modify and
|
||||
propagate that work, subject to this License. You are not responsible
|
||||
for enforcing compliance by third parties with this License.
|
||||
|
||||
An "entity transaction" is a transaction transferring control of an
|
||||
organization, or substantially all assets of one, or subdividing an
|
||||
organization, or merging organizations. If propagation of a covered
|
||||
work results from an entity transaction, each party to that
|
||||
transaction who receives a copy of the work also receives whatever
|
||||
licenses to the work the party's predecessor in interest had or could
|
||||
give under the previous paragraph, plus a right to possession of the
|
||||
Corresponding Source of the work from the predecessor in interest, if
|
||||
the predecessor has it or can get it with reasonable efforts.
|
||||
|
||||
You may not impose any further restrictions on the exercise of the
|
||||
rights granted or affirmed under this License. For example, you may
|
||||
not impose a license fee, royalty, or other charge for exercise of
|
||||
rights granted under this License, and you may not initiate litigation
|
||||
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||
any patent claim is infringed by making, using, selling, offering for
|
||||
sale, or importing the Program or any portion of it.
|
||||
|
||||
11. Patents.
|
||||
|
||||
A "contributor" is a copyright holder who authorizes use under this
|
||||
License of the Program or a work on which the Program is based. The
|
||||
work thus licensed is called the contributor's "contributor version".
|
||||
|
||||
A contributor's "essential patent claims" are all patent claims
|
||||
owned or controlled by the contributor, whether already acquired or
|
||||
hereafter acquired, that would be infringed by some manner, permitted
|
||||
by this License, of making, using, or selling its contributor version,
|
||||
but do not include claims that would be infringed only as a
|
||||
consequence of further modification of the contributor version. For
|
||||
purposes of this definition, "control" includes the right to grant
|
||||
patent sublicenses in a manner consistent with the requirements of
|
||||
this License.
|
||||
|
||||
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||
patent license under the contributor's essential patent claims, to
|
||||
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||
propagate the contents of its contributor version.
|
||||
|
||||
In the following three paragraphs, a "patent license" is any express
|
||||
agreement or commitment, however denominated, not to enforce a patent
|
||||
(such as an express permission to practice a patent or covenant not to
|
||||
sue for patent infringement). To "grant" such a patent license to a
|
||||
party means to make such an agreement or commitment not to enforce a
|
||||
patent against the party.
|
||||
|
||||
If you convey a covered work, knowingly relying on a patent license,
|
||||
and the Corresponding Source of the work is not available for anyone
|
||||
to copy, free of charge and under the terms of this License, through a
|
||||
publicly available network server or other readily accessible means,
|
||||
then you must either (1) cause the Corresponding Source to be so
|
||||
available, or (2) arrange to deprive yourself of the benefit of the
|
||||
patent license for this particular work, or (3) arrange, in a manner
|
||||
consistent with the requirements of this License, to extend the patent
|
||||
license to downstream recipients. "Knowingly relying" means you have
|
||||
actual knowledge that, but for the patent license, your conveying the
|
||||
covered work in a country, or your recipient's use of the covered work
|
||||
in a country, would infringe one or more identifiable patents in that
|
||||
country that you have reason to believe are valid.
|
||||
|
||||
If, pursuant to or in connection with a single transaction or
|
||||
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||
covered work, and grant a patent license to some of the parties
|
||||
receiving the covered work authorizing them to use, propagate, modify
|
||||
or convey a specific copy of the covered work, then the patent license
|
||||
you grant is automatically extended to all recipients of the covered
|
||||
work and works based on it.
|
||||
|
||||
A patent license is "discriminatory" if it does not include within
|
||||
the scope of its coverage, prohibits the exercise of, or is
|
||||
conditioned on the non-exercise of one or more of the rights that are
|
||||
specifically granted under this License. You may not convey a covered
|
||||
work if you are a party to an arrangement with a third party that is
|
||||
in the business of distributing software, under which you make payment
|
||||
to the third party based on the extent of your activity of conveying
|
||||
the work, and under which the third party grants, to any of the
|
||||
parties who would receive the covered work from you, a discriminatory
|
||||
patent license (a) in connection with copies of the covered work
|
||||
conveyed by you (or copies made from those copies), or (b) primarily
|
||||
for and in connection with specific products or compilations that
|
||||
contain the covered work, unless you entered into that arrangement,
|
||||
or that patent license was granted, prior to 28 March 2007.
|
||||
|
||||
Nothing in this License shall be construed as excluding or limiting
|
||||
any implied license or other defenses to infringement that may
|
||||
otherwise be available to you under applicable patent law.
|
||||
|
||||
12. No Surrender of Others' Freedom.
|
||||
|
||||
If conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot convey a
|
||||
covered work so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you may
|
||||
not convey it at all. For example, if you agree to terms that obligate you
|
||||
to collect a royalty for further conveying from those to whom you convey
|
||||
the Program, the only way you could satisfy both those terms and this
|
||||
License would be to refrain entirely from conveying the Program.
|
||||
|
||||
13. Use with the GNU Affero General Public License.
|
||||
|
||||
Notwithstanding any other provision of this License, you have
|
||||
permission to link or combine any covered work with a work licensed
|
||||
under version 3 of the GNU Affero General Public License into a single
|
||||
combined work, and to convey the resulting work. The terms of this
|
||||
License will continue to apply to the part which is the covered work,
|
||||
but the special requirements of the GNU Affero General Public License,
|
||||
section 13, concerning interaction through a network will apply to the
|
||||
combination as such.
|
||||
|
||||
14. Revised Versions of this License.
|
||||
|
||||
The Free Software Foundation may publish revised and/or new versions of
|
||||
the GNU General Public License from time to time. Such new versions will
|
||||
be similar in spirit to the present version, but may differ in detail to
|
||||
address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the
|
||||
Program specifies that a certain numbered version of the GNU General
|
||||
Public License "or any later version" applies to it, you have the
|
||||
option of following the terms and conditions either of that numbered
|
||||
version or of any later version published by the Free Software
|
||||
Foundation. If the Program does not specify a version number of the
|
||||
GNU General Public License, you may choose any version ever published
|
||||
by the Free Software Foundation.
|
||||
|
||||
If the Program specifies that a proxy can decide which future
|
||||
versions of the GNU General Public License can be used, that proxy's
|
||||
public statement of acceptance of a version permanently authorizes you
|
||||
to choose that version for the Program.
|
||||
|
||||
Later license versions may give you additional or different
|
||||
permissions. However, no additional obligations are imposed on any
|
||||
author or copyright holder as a result of your choosing to follow a
|
||||
later version.
|
||||
|
||||
15. Disclaimer of Warranty.
|
||||
|
||||
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. Limitation of Liability.
|
||||
|
||||
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||
SUCH DAMAGES.
|
||||
|
||||
17. Interpretation of Sections 15 and 16.
|
||||
|
||||
If the disclaimer of warranty and limitation of liability provided
|
||||
above cannot be given local legal effect according to their terms,
|
||||
reviewing courts shall apply local law that most closely approximates
|
||||
an absolute waiver of all civil liability in connection with the
|
||||
Program, unless a warranty or assumption of liability accompanies a
|
||||
copy of the Program in return for a fee.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Programs
|
||||
|
||||
If you develop a new program, and you want it to be of the greatest
|
||||
possible use to the public, the best way to achieve this is to make it
|
||||
free software which everyone can redistribute and change under these terms.
|
||||
|
||||
To do so, attach the following notices to the program. It is safest
|
||||
to attach them to the start of each source file to most effectively
|
||||
state the exclusion of warranty; and each file should have at least
|
||||
the "copyright" line and a pointer to where the full notice is found.
|
||||
|
||||
<one line to give the program's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This program is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
This program is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU General Public License
|
||||
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
If the program does terminal interaction, make it output a short
|
||||
notice like this when it starts in an interactive mode:
|
||||
|
||||
<program> Copyright (C) <year> <name of author>
|
||||
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||
This is free software, and you are welcome to redistribute it
|
||||
under certain conditions; type `show c' for details.
|
||||
|
||||
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||
parts of the General Public License. Of course, your program's commands
|
||||
might be different; for a GUI interface, you would use an "about box".
|
||||
|
||||
You should also get your employer (if you work as a programmer) or school,
|
||||
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||
For more information on this, and how to apply and follow the GNU GPL, see
|
||||
<https://www.gnu.org/licenses/>.
|
||||
|
||||
The GNU General Public License does not permit incorporating your program
|
||||
into proprietary programs. If your program is a subroutine library, you
|
||||
may consider it more useful to permit linking proprietary applications with
|
||||
the library. If this is what you want to do, use the GNU Lesser General
|
||||
Public License instead of this License. But first, please read
|
||||
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
61
Dockerfile
Normal file
61
Dockerfile
Normal file
|
@ -0,0 +1,61 @@
|
|||
# This dockerfile is for the API + Frontend production image
|
||||
|
||||
#############################################
|
||||
# Build the frontend AS builder
|
||||
#############################################
|
||||
|
||||
FROM node:18 as frontend-builder
|
||||
|
||||
WORKDIR /opt/obs/frontend
|
||||
ADD frontend/package.json frontend/package-lock.json /opt/obs/frontend/
|
||||
RUN echo update-notifier=false >> ~/.npmrc
|
||||
RUN npm ci
|
||||
|
||||
ADD frontend/tsconfig.json frontend/webpack.config.js /opt/obs/frontend/
|
||||
ADD frontend/src /opt/obs/frontend/src/
|
||||
ADD frontend/public /opt/obs/frontend/public/
|
||||
|
||||
RUN npm run build
|
||||
|
||||
#############################################
|
||||
# Build the API and add the built frontend to it
|
||||
#############################################
|
||||
|
||||
FROM python:3.11.3-bullseye
|
||||
|
||||
RUN apt-get update &&\
|
||||
apt-get install -y \
|
||||
libboost-dev \
|
||||
libboost-system-dev \
|
||||
libboost-filesystem-dev \
|
||||
libexpat1-dev \
|
||||
zlib1g-dev \
|
||||
libbz2-dev \
|
||||
libpq-dev \
|
||||
libproj-dev \
|
||||
lua5.3 \
|
||||
liblua5.3-dev &&\
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
WORKDIR /opt/obs/api
|
||||
|
||||
ADD api/requirements.txt /opt/obs/api/
|
||||
RUN pip install -r requirements.txt
|
||||
|
||||
ADD tile-generator /opt/obs/tile-generator
|
||||
|
||||
ADD api/scripts /opt/obs/scripts
|
||||
RUN pip install -e /opt/obs/scripts
|
||||
|
||||
ADD api/setup.py /opt/obs/api/
|
||||
ADD api/alembic.ini /opt/obs/api/
|
||||
ADD api/migrations /opt/obs/api/migrations/
|
||||
ADD api/obs /opt/obs/api/obs/
|
||||
ADD api/tools /opt/obs/api/tools/
|
||||
RUN pip install -e /opt/obs/api/
|
||||
|
||||
COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
CMD ["openbikesensor-api"]
|
296
README.md
296
README.md
|
@ -4,121 +4,227 @@ This repository contains the source code required to run the
|
|||
[OpenBikeSensor](https://openbikesensor.org) data collection portal. It is
|
||||
separated into components:
|
||||
|
||||
* **api**: The backend service, written in JavaScript for Node.js, using
|
||||
express.js, and a MongoDB for metadata storage.
|
||||
* **api**: The backend service, written in Python 3 with
|
||||
[Sanic](https://sanicframework.org/),
|
||||
[SQLAlchemy](https://www.sqlalchemy.org/), and a PostgreSQL/PostGIS database
|
||||
for storage. It also depends highly on
|
||||
[OpenMapTiles](https://openmaptiles.org) to generate vector tiles of the
|
||||
data.
|
||||
* **frontend**: A React single-page application that allows access to the data,
|
||||
provides summaries and visualizations, and lets users adjust settings and
|
||||
manage and publish their tracks.
|
||||
|
||||
Check out the [Architecture Documentation](docs/architecture.md) for more
|
||||
details on what parts the whole application is made of.
|
||||
|
||||
This project follows [semantic versioning](https://semver.org). Refer to [issue
|
||||
#44](https://github.com/openbikesensor/portal/issues/44) for a description of
|
||||
what that means for our project and what is considered the public interface.
|
||||
|
||||
## Clone the Project
|
||||
|
||||
First of all, you must clone this project. This project uses submodules,
|
||||
thus ensure, that they are cloned as well:
|
||||
|
||||
```bash
|
||||
git clone --recursive https://github.com/openbikesensor/portal
|
||||
|
||||
# ... or if you forgot the --recursive argument, you can run this in the
|
||||
# repository's directory later:
|
||||
git submodule update --init --recursive
|
||||
```
|
||||
|
||||
## Production setup
|
||||
|
||||
There is a guide for a deployment based on docker at
|
||||
[docs/production-deployment.md](docs/production-deployment.md). Lots of
|
||||
non-docker deployment strategies are possible, but they are not "officially"
|
||||
supported, so please do not expect the authors of the software to assist in
|
||||
troubleshooting.
|
||||
|
||||
This is a rather complex application, and it is expected that you know the
|
||||
basics of deploying a modern web application securely onto a production server.
|
||||
We are sorry that we cannot guide you through all the details of that, as we
|
||||
just don't have the capacities to do so. Please research the respective topics
|
||||
first. If you struggle with application-specific issues, please let us know, we
|
||||
might be able to assist with those.
|
||||
|
||||
Please note that you will always need to install your own reverse proxy that
|
||||
terminates TLS for you and handles certificates. We do not support TLS directly
|
||||
in the application, instead, please use this prefered method.
|
||||
|
||||
Upgrading and migrating is described in [UPGRADING.md](./UPGRADING.md) for each
|
||||
version.
|
||||
|
||||
### Migrating (Production)
|
||||
|
||||
Migrations are done with
|
||||
[Alembic](https://alembic.sqlalchemy.org/en/latest/index.html), please refer to
|
||||
its documentation for help. Most of the time, running this command will do all
|
||||
the migrations you need:
|
||||
|
||||
```bash
|
||||
docker-compose run --rm api tools/upgrade.py
|
||||
```
|
||||
|
||||
This command is equivalent to running migrations through *alembic*, then
|
||||
regenerating the SQL functions that compute vector tiles directly in the
|
||||
database:
|
||||
|
||||
```bash
|
||||
# equivalent to the above command, you don't usually run these
|
||||
docker-compose run --rm api alembic upgrade head
|
||||
docker-compose run --rm api tools/prepare_sql_tiles
|
||||
```
|
||||
|
||||
## Development setup
|
||||
|
||||
We've moved the whole development setup into Docker to make it easy for
|
||||
everyone to get involved. After sucessfully [installing Docker
|
||||
Engine](https://docs.docker.com/engine/install/) as well as [Docker
|
||||
Compose](https://docs.docker.com/compose/install/) onto your machine, and
|
||||
cloning the repository, all you need to do is:
|
||||
everyone to get involved.
|
||||
|
||||
### Install docker
|
||||
|
||||
Please [install Docker Engine](https://docs.docker.com/engine/install/) as well as
|
||||
[Docker Compose](https://docs.docker.com/compose/install/) onto your machine.
|
||||
|
||||
Then clone the repository as described above.
|
||||
|
||||
### Configure Keycloak
|
||||
|
||||
Login will not be possible until you configure the keycloak realm correctly. Boot your keycloak instance:
|
||||
|
||||
```bash
|
||||
docker-compose up -d
|
||||
docker-compose up -d keycloak
|
||||
```
|
||||
|
||||
If this does not work, please open an issue and describe the problem you're
|
||||
having, as it is important to us that onboarding is super easy :)
|
||||
Now navigate to http://localhost:3003/ and follow these steps:
|
||||
|
||||
- Click *Administration Console* and log in with `admin` / `admin`.
|
||||
- Hover over the realm name on the top left and click *Add realm*.
|
||||
- Name the Realm `obs-dev` (spelling matters) and create it.
|
||||
- In the sidebar, navigate to *Configure* → *Clients*, and click *Create* on the top right.
|
||||
- *Client ID* should be `portal`. Click *Save*.
|
||||
- In the Tab *Settings*, edit the new client's *Access Type* to *confidential*
|
||||
and enter as *Valid Redirect URIs*: `http://localhost:3000/login/redirect`,
|
||||
then *Save*
|
||||
- Under *Credentials*, copy the *Secret*. Create a file at `api/config.overrides.py` with the secret in it:
|
||||
|
||||
```python
|
||||
KEYCLOAK_CLIENT_SECRET="your secret here"
|
||||
```
|
||||
|
||||
You can use this file in development mode to change settings without editing
|
||||
the git-controlled default file at `api/config.dev.py`. Options in this file
|
||||
take precendence.
|
||||
- In the sidebar, navigate to *Manage* → *Users*, and click *Add user* on the top right.
|
||||
- Give the user a name (e.g. `test`), leave the rest as-is.
|
||||
- Under the tab *Credentials*, choose a new password, and make it
|
||||
non-temporary. Click *Set Password*.
|
||||
|
||||
We are going to automate this process. For now, you will have to repeat it
|
||||
every time you reset your keycloak settings, which are stored inside the
|
||||
PostgreSQL as well. Luckily, the script `api/tools/reset_database.py` does
|
||||
*not* affect the state of the keycloak database, so this should be rather rare.
|
||||
|
||||
### Prepare database
|
||||
|
||||
Start the PostgreSQL database:
|
||||
|
||||
```bash
|
||||
docker-compose up -d postgres
|
||||
```
|
||||
|
||||
The first time you start postgres, a lot of extensions will be installed. This
|
||||
takes a while, so check the logs of the docker container until you see:
|
||||
|
||||
> PostgreSQL init process complete; ready for start up.
|
||||
|
||||
If you don't wait long enough, the following commands might fail. In this case,
|
||||
you can always stop the container, remove the data directory (`local/postgres`)
|
||||
and restart the process.
|
||||
|
||||
Next, run the upgrade command to generate the database schema:
|
||||
|
||||
```bash
|
||||
docker-compose run --rm api tools/upgrade.py
|
||||
```
|
||||
|
||||
You will need to re-run this command after updates, to migrate the database and
|
||||
(re-)create the functions in the SQL database that are used when generating
|
||||
vector tiles.
|
||||
|
||||
You should also [import OpenStreetMap data](docs/osm-import.md) now.
|
||||
|
||||
### Boot the application
|
||||
|
||||
Now you can run the remaining parts of the application:
|
||||
|
||||
```bash
|
||||
docker-compose up -d --build api worker frontend
|
||||
```
|
||||
|
||||
Your frontend should be running at http://localhost:3001 and the API at
|
||||
http://localhost:3000 -- but you probably only need to access the frontend for
|
||||
testing. The frontend dev server also proxies all unknown requests to the API,
|
||||
so the frontend always just requests data at its own URL.
|
||||
testing.
|
||||
|
||||
## Running without docker
|
||||
### Migrating (Development)
|
||||
|
||||
If you don't like docker, or want to run this in production without it, you can
|
||||
do so as well. Our Docker setup is simply a slight wrapper around very simple
|
||||
JavaScript packages that you can install yourself as usual, with `npm install`.
|
||||
The API can be started with `npm start` inside its folder. The frontend
|
||||
development server uses `npm start` as well, while building a production
|
||||
version of the frontend happens with `npm run build`.
|
||||
|
||||
To connect the parts together, please have a look at what we're doing in the
|
||||
"official" setup of docker, i.e. in `docker-compose.yaml`, the `Dockerfile`s
|
||||
and in the respective `package.json` of the service. If you've done this kind
|
||||
of thing before, it's not that hard. Otherwise, ask on Slack and there will be
|
||||
somebody to help you ;)
|
||||
|
||||
## Running in production
|
||||
|
||||
You are advised not to use the dockerized mongodb service and instead do a
|
||||
proper MongoDB setup on a server that is backed up and secured.
|
||||
|
||||
You can run the API in docker, but it is prefered to run it as a restricted
|
||||
user in its own directory somewhere where it cannot escape ;)
|
||||
|
||||
The frontend should be built using `npm run build` and then served from a
|
||||
proper web server, such as nginx or apache. See the instructions at
|
||||
create-react-app concerning [deployment of an app](http://cra.link/deployment).
|
||||
|
||||
You are advised to virtualize your server for security reason, and separate
|
||||
this whole application from other parts of your server system.
|
||||
|
||||
Also please install a reverse proxy that terminates TLS for you and handles
|
||||
certificates. We do not support TLS directly in the application, instead,
|
||||
please use this prefered method. This reverse proxy can also handle static file
|
||||
serving for the frontend, no need for two separate server processes.
|
||||
|
||||
## Migrating
|
||||
|
||||
Sometimes your database will have to be migrated. The docker setup should do
|
||||
this automatically, but if it does not work, you can run the following
|
||||
commands:
|
||||
Migrations are done with
|
||||
[Alembic](https://alembic.sqlalchemy.org/en/latest/index.html), please refer to
|
||||
its documentation for help. Most of the time, running this command will do all
|
||||
the migrations you need:
|
||||
|
||||
```bash
|
||||
# if running locally
|
||||
(cd api/; npm run migrate:up)
|
||||
|
||||
# if running in docker
|
||||
docker-compose run --rm api npm run migrate:up
|
||||
````
|
||||
|
||||
## Custom MongoDB installation
|
||||
|
||||
If you have your own MongoDB instance running somewhere, you can set the
|
||||
environment variable `MONGODB_URL` when starting the server, and it will read
|
||||
that URL for connecting.
|
||||
|
||||
export MONGODB_URL=mongodb://user:password@mongodb.example.com/obs-app-database
|
||||
|
||||
This does not work when using docker-compose, in that case, you will have to
|
||||
modify the `docker-compose.yaml` to include that URL.
|
||||
|
||||
|
||||
## E-Mail Setup
|
||||
|
||||
By default in development mode mails are not sent, but instead the mail data is
|
||||
logged to the console. This can be overriden with the `--devSendMails` flag if
|
||||
you start the application like so: `npm run dev -- --devSendMails`.
|
||||
|
||||
Mails are also always sent in production mode!
|
||||
|
||||
For actually sending e-mails the mailserver, sender, user and password for the
|
||||
SMTP server need to be specified as environment variables:
|
||||
|
||||
* `MAILUSER` -- the smtp mailbox login name
|
||||
* `MAILPW` -- password for the mailbox
|
||||
* `MAILSERVER` -- the hostname of the SMTP server, e.g. `mail.example.com`
|
||||
* `MAILSENDER` -- sender name, e.g. `noreply@example.com`
|
||||
|
||||
Full command example:
|
||||
|
||||
```bash
|
||||
MAILSERVER=mail.example.com MAILSENDER=noreply@example.com \
|
||||
MAILUSER=my_mail_login MAILPW=hunter2 \
|
||||
npm run dev -- --devSendMails
|
||||
docker-compose run --rm api alembic upgrade head
|
||||
```
|
||||
|
||||
All of this of course is not too important if you're developing locally. To get
|
||||
to the logged email content that *would* have been sent, check your docker log:
|
||||
|
||||
```bash
|
||||
docker-compose log -f api
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If any step of the instructions does not work for you, please open an issue and
|
||||
describe the problem you're having, as it is important to us that onboarding is
|
||||
super easy :)
|
||||
|
||||
### Connecting to the PostgreSQL database
|
||||
|
||||
If you need to connect to your development PostgreSQL database, you should
|
||||
install `psql` locally. The port 5432 is already forwarded, so you can connect with:
|
||||
|
||||
```
|
||||
psql -h localhost -U obs -d obs
|
||||
```
|
||||
|
||||
The password is `obs` as well.
|
||||
|
||||
## License
|
||||
|
||||
Copyright (C) 2020-2021 OpenBikeSensor Contributors
|
||||
Contact: https://openbikesensor.org
|
||||
|
||||
The OpenBikeSensor Portal is free software: you can redistribute it
|
||||
and/or modify it under the terms of the GNU Lesser General Public License
|
||||
as published by the Free Software Foundation, either version 3 of the
|
||||
License, or (at your option) any later version.
|
||||
|
||||
The OpenBikeSensor Portal is distributed in the hope that it will be
|
||||
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser
|
||||
General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public License
|
||||
along with the OpenBikeSensor Portal. If not, see
|
||||
<http://www.gnu.org/licenses/>.
|
||||
|
||||
See also [`COPYING`](./COPYING) and [`COPYING.LESSER`](./COPYING.LESSER).
|
||||
|
||||
The above does not apply to the files listed below, their respective licenses
|
||||
are included in a file next to each of them, named accordingly:
|
||||
|
||||
* `frontend/src/mapstyles/bright.json`
|
||||
* `frontend/src/mapstyles/positron.json`
|
||||
|
||||
There are lots of other licenses to consider when using this software,
|
||||
especially in conjunction with imported data and other tools. Check out the
|
||||
[Licenses Documentation](docs/licenses.md) for an (unofficial) overview of the
|
||||
license landscape surrounding this project.
|
||||
|
|
170
UPGRADING.md
Normal file
170
UPGRADING.md
Normal file
|
@ -0,0 +1,170 @@
|
|||
# Upgrading
|
||||
This document describes the general steps to upgrade between major changes.
|
||||
Simple migrations, e.g. for adding schema changes, are not documented
|
||||
explicitly. Their general usage is described in the [README](./README.md) (for
|
||||
development) and [docs/production-deployment.md](docs/production-deployment.md) (for production).
|
||||
|
||||
|
||||
## 0.8.1
|
||||
|
||||
- Get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``)
|
||||
- Rebuild images ``docker-compose build``
|
||||
- No database upgrade is required, but tile functions need an update:
|
||||
```bash
|
||||
docker-compose run --rm portal tools/prepare_sql_tiles.py
|
||||
```
|
||||
- Start your portal and worker services. ``docker-compose up -d worker portal``
|
||||
|
||||
|
||||
## 0.8.0
|
||||
Upgrade to `0.7.x` first. See below for details. Then follow these steps:
|
||||
|
||||
> **Warning** The update includes a reprocessing of tracks after import. Depending on the number of tracks this can take a few hours. The portal is reachable during that time but events disappear and incrementally reappear during reimport.
|
||||
|
||||
> **Info** With this version the import process for OpenStreetMap data has changed: the [new process](docs/osm-import.md) is easier on resources and finally permits to import a full country on a low-end VM.
|
||||
|
||||
- Do your [usual backup](docs/production-deployment.md)
|
||||
- get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``)
|
||||
- Rebuild images ``docker-compose build``
|
||||
- Stop your portal and worker services ``docker-compose stop worker portal``
|
||||
- run upgrade
|
||||
```bash
|
||||
docker-compose run --rm portal tools/upgrade.py
|
||||
```
|
||||
this automatically does the following
|
||||
- Migration of database schema using alembic.
|
||||
- Upgrade of SQL tile schema to new schema.
|
||||
- Import the nuts-regions from the web into the database.
|
||||
- Trigger a re-import of all tracks.
|
||||
- Start your portal and worker services. ``docker-compose up -d worker portal``
|
||||
|
||||
|
||||
## 0.7.0
|
||||
|
||||
Upgrade to `0.6.x` first. See below for details. Then follow these steps:
|
||||
|
||||
- Rebuild images
|
||||
- Stop your portal and worker services.
|
||||
- **Migration with alembic**: required
|
||||
- **Prepare SQL Tiles**: required
|
||||
- Start your portal and worker services.
|
||||
- **Reimport tracks**: no action required
|
||||
- **OSM Import**: required
|
||||
- **Config changes**: add `POSTGRES_MAX_OVERFLOW` and `POSTGRES_POOL_SIZE`
|
||||
variables, see `api/config.py.example`
|
||||
|
||||
## 0.6.0
|
||||
|
||||
**Make sure to upgrade to `0.5.1` first, by checking out that version tag and
|
||||
running migrations, then coming back to this version.** This is required
|
||||
because the migrations have been edited to create the initial database schema,
|
||||
but if you run the 0.5.1 migrations first, your database will remember that it
|
||||
already has all the tables created. This is not required if you set up a new
|
||||
installation.
|
||||
|
||||
For this update, run these steps:
|
||||
|
||||
- Build new images
|
||||
- Stop portal and worker services
|
||||
- Run the new upgrade tool:
|
||||
```bash
|
||||
docker-compose run --rm portal tools/upgrade.py
|
||||
```
|
||||
- Start portal and worker services
|
||||
|
||||
## 0.5.0
|
||||
|
||||
The upgrade requires the following steps in the given order
|
||||
|
||||
- Rebuild images
|
||||
- Stop your portal and worker services.
|
||||
- **Migration with alembic**: required
|
||||
- **Prepare SQL Tiles**: required
|
||||
- Start your portal and worker services.
|
||||
- **Reimport tracks**: required
|
||||
- **OSM Import**: no action required
|
||||
- **Config changes**: none
|
||||
|
||||
## 0.4.1
|
||||
|
||||
You can, but do not have to, reimport all tracks. This will generate a GPX file
|
||||
for each track and allow the users to download those. If a GPX file has not yet
|
||||
been created, the download will fail. To reimport all tracks, log in to your
|
||||
PostgreSQL database (instructions are in [README.md](./README.md) for
|
||||
development and [docs/production-deployment.md](./docs/production-deployment.md) for production)
|
||||
and run:
|
||||
|
||||
```sql
|
||||
UPDATE track SET processing_status = 'queued';
|
||||
```
|
||||
|
||||
You can do this selectively with `WHERE` statements.
|
||||
|
||||
Make sure your worker is running to process the queue.
|
||||
|
||||
## 0.4.0
|
||||
|
||||
* Rebuild your image, this may take longer than usual, as it will compile
|
||||
`osm2pgsql` for you. Next time, it should be in your docker build cache and
|
||||
be fast again.
|
||||
* Add new config flags: `VERBOSE`, `LEAN_MODE`, `POSTGRES_POOL_SIZE`,
|
||||
`POSTGRES_MAX_OVERFLOW`. Check the example config for sane default values.
|
||||
* Re-run `tools/prepare_sql_tiles.py` again (see README)
|
||||
* It has been made easier to import OSM data, check
|
||||
[docs/production-deployment.md](./docs/production-deployment.md) for the sections "Download
|
||||
OpenStreetMap maps" and "Import OpenStreetMap data". You can now download
|
||||
multiple .pbf files and then import them at once, using the docker image
|
||||
built with the `Dockerfile`. Alternatively, you can choose to enable [lean
|
||||
mode](docs/lean-mode.md). You do not need to reimport data, but setting this
|
||||
up now will make your life easier in the long run ;)
|
||||
|
||||
## v0.2 to v0.3 (MongoDB to PostgreSQL)
|
||||
|
||||
* Shut down all services
|
||||
* Obviously, now is a good time to perform a full backup ;)
|
||||
* Update the codebase (`git pull`, `git submodule update`).
|
||||
* Update your ``docker-compose.yaml`` with the one from the ``deployment/examples``
|
||||
folder.
|
||||
* Leave the MongoDB service in place for now.
|
||||
* Update all other service descriptions.
|
||||
* You can remove `redis` already.
|
||||
* Generate a better password than the default for your
|
||||
postgres user.
|
||||
* Traefik rules have been simplified as all routes are handled
|
||||
by the portal service now.
|
||||
* Start up the `mongo` and `postgres` services. Wait for postgres to finish
|
||||
initializing (see [README](README.md)).
|
||||
* Build the new image (e.g. with `docker-compose build portal`)
|
||||
* Configure your API. The example config file is `api/config.py.example`, and
|
||||
it will need to be mounted to `api/config.py` in the container. Ignore the
|
||||
Keycloak options for now.
|
||||
* Prepare the database:
|
||||
|
||||
```bash
|
||||
docker-compose run --rm portal python tools/reset_database.py
|
||||
docker-compose run --rm portal python tools/prepare_sql_tiles.py
|
||||
```
|
||||
* Import OSM data (see [README](README.md)).
|
||||
* Run the database migration script:
|
||||
|
||||
```bash
|
||||
docker-compose run --rm \
|
||||
-v $PWD/export:/export \
|
||||
portal \
|
||||
python tools/import_from_mongodb.py mongodb://mongo/obs \
|
||||
--keycloak-users-file /export/users.json
|
||||
```
|
||||
There is an option `--keep-api-keys` which means the users won't have to
|
||||
reconfigure the devices they used their API key in. **However**, please try
|
||||
to avoid this option if at all possible, as the old keys are *very* insecure.
|
||||
The default without this option to generate a new, secure API key for each
|
||||
user.
|
||||
* Shut down the `mongo` service, you can now remove it from docker-compose.yaml
|
||||
* Start `keycloak` and configure it, similarly to how it was configured in the
|
||||
development setup (but choose more secure options). Update the API config
|
||||
file to match your keycloak configuration. Import the file
|
||||
`export/users.json` into your realm, it will re-add all the users from the
|
||||
old installation. You should delete the file and `export/` folder afterwards.
|
||||
* Start `portal`.
|
||||
* Consider configuring a worker service. See [docs/production-deployment.md](./docs/production-deployment.md).
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
local/
|
||||
node_modules/
|
||||
node_modules
|
||||
|
|
2
api/.gitignore
vendored
2
api/.gitignore
vendored
|
@ -43,3 +43,5 @@ local/
|
|||
# both, because then developers will only update one of them and they'll
|
||||
# contradict. For now, npm shall be the canonical default (compare README.md).
|
||||
yarn.lock
|
||||
|
||||
config.overrides.py
|
||||
|
|
|
@ -1,11 +1,16 @@
|
|||
FROM node:14
|
||||
FROM python:3.11.3-bullseye
|
||||
|
||||
WORKDIR /opt/obs/api
|
||||
ADD package.json package-lock.json /opt/obs/api/
|
||||
RUN npm ci
|
||||
|
||||
ADD src /opt/obs/api/src/
|
||||
ADD scripts /opt/obs/scripts
|
||||
RUN pip install -e /opt/obs/scripts
|
||||
|
||||
EXPOSE 3000
|
||||
ENV PORT=3000
|
||||
CMD ["npm", "start"]
|
||||
ADD requirements.txt /opt/obs/api/
|
||||
RUN pip install -r requirements.txt
|
||||
ADD setup.py /opt/obs/api/
|
||||
ADD obs /opt/obs/api/obs/
|
||||
RUN pip install -e .
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
CMD ["openbikesensor-api"]
|
||||
|
|
102
api/alembic.ini
Normal file
102
api/alembic.ini
Normal file
|
@ -0,0 +1,102 @@
|
|||
# A generic, single database configuration.
|
||||
|
||||
[alembic]
|
||||
# path to migration scripts
|
||||
script_location = migrations
|
||||
|
||||
# template used to generate migration files
|
||||
# file_template = %%(rev)s_%%(slug)s
|
||||
|
||||
# sys.path path, will be prepended to sys.path if present.
|
||||
# defaults to the current working directory.
|
||||
prepend_sys_path = .
|
||||
|
||||
# timezone to use when rendering the date within the migration file
|
||||
# as well as the filename.
|
||||
# If specified, requires the python-dateutil library that can be
|
||||
# installed by adding `alembic[tz]` to the pip requirements
|
||||
# string value is passed to dateutil.tz.gettz()
|
||||
# leave blank for localtime
|
||||
# timezone =
|
||||
|
||||
# max length of characters to apply to the
|
||||
# "slug" field
|
||||
# truncate_slug_length = 40
|
||||
|
||||
# set to 'true' to run the environment during
|
||||
# the 'revision' command, regardless of autogenerate
|
||||
# revision_environment = false
|
||||
|
||||
# set to 'true' to allow .pyc and .pyo files without
|
||||
# a source .py file to be detected as revisions in the
|
||||
# versions/ directory
|
||||
# sourceless = false
|
||||
|
||||
# version location specification; This defaults
|
||||
# to api/migrations/versions. When using multiple version
|
||||
# directories, initial revisions must be specified with --version-path.
|
||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||
# version_locations = %(here)s/bar:%(here)s/bat:api/migrations/versions
|
||||
|
||||
# version path separator; As mentioned above, this is the character used to split
|
||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||
# Valid values for version_path_separator are:
|
||||
#
|
||||
# version_path_separator = :
|
||||
# version_path_separator = ;
|
||||
# version_path_separator = space
|
||||
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.
|
||||
|
||||
# the output encoding used when revision files
|
||||
# are written from script.py.mako
|
||||
# output_encoding = utf-8
|
||||
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
|
||||
[post_write_hooks]
|
||||
# post_write_hooks defines scripts or Python functions that are run
|
||||
# on newly generated revision scripts. See the documentation for further
|
||||
# detail and examples
|
||||
|
||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||
# hooks = black
|
||||
# black.type = console_scripts
|
||||
# black.entrypoint = black
|
||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||
|
||||
# Logging configuration
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
35
api/config.dev.py
Normal file
35
api/config.dev.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
HOST = "0.0.0.0"
|
||||
PORT = 3000
|
||||
DEBUG = True
|
||||
VERBOSE = False
|
||||
AUTO_RELOAD = True
|
||||
SECRET = "!!!!!!!!!!!!CHANGE ME!!!!!!!!!!!!"
|
||||
POSTGRES_URL = "postgresql+asyncpg://obs:obs@postgres/obs"
|
||||
POSTGRES_POOL_SIZE = 20
|
||||
POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE
|
||||
KEYCLOAK_URL = "http://keycloak:8080/auth/realms/obs-dev/"
|
||||
KEYCLOAK_CLIENT_ID = "portal"
|
||||
KEYCLOAK_CLIENT_SECRET = "c385278e-bd2e-4f13-9937-34b0c0f44c2d"
|
||||
DEDICATED_WORKER = True
|
||||
API_URL = "http://localhost:3000/"
|
||||
FRONTEND_URL = "http://localhost:3001/"
|
||||
FRONTEND_HTTPS = False
|
||||
FRONTEND_DIR = None
|
||||
FRONTEND_CONFIG = {
|
||||
"imprintUrl": "https://example.com/imprint",
|
||||
"privacyPolicyUrl": "https://example.com/privacy",
|
||||
# "termsUrl": "https://example.com/terms", # Link is only shown when set
|
||||
"mapHome": {"zoom": 6, "longitude": 10.2, "latitude": 51.3},
|
||||
# "banner": {"text": "This is a development installation.", "style": "info"},
|
||||
}
|
||||
|
||||
TILES_FILE = None # "/tiles/tiles.mbtiles"
|
||||
DATA_DIR = "/data"
|
||||
ADDITIONAL_CORS_ORIGINS = [
|
||||
"http://localhost:8880/", # for maputnik on 8880
|
||||
"http://localhost:8888/", # for maputnik on 8888
|
||||
]
|
||||
TILE_SEMAPHORE_SIZE = 4
|
||||
EXPORT_SEMAPHORE_SIZE = 4
|
||||
|
||||
# vim: set ft=python :
|
73
api/config.py.example
Normal file
73
api/config.py.example
Normal file
|
@ -0,0 +1,73 @@
|
|||
# Bind address of the server
|
||||
HOST = "127.0.0.1"
|
||||
PORT = 3000
|
||||
|
||||
# Extended log output, but slower
|
||||
DEBUG = False
|
||||
VERBOSE = DEBUG
|
||||
AUTO_RELOAD = DEBUG
|
||||
|
||||
# Required to encrypt or sign sessions, cookies, tokens, etc.
|
||||
SECRET = "!!!<<<CHANGEME>>>!!!"
|
||||
|
||||
# Connection to the database
|
||||
POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname"
|
||||
POSTGRES_POOL_SIZE = 20
|
||||
POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE
|
||||
|
||||
# URL to the keycloak realm, as reachable by the API service. This is not
|
||||
# necessarily its publicly reachable URL, keycloak advertises that iself.
|
||||
KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/"
|
||||
|
||||
# Auth client credentials
|
||||
KEYCLOAK_CLIENT_ID = "portal"
|
||||
KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000"
|
||||
|
||||
# Whether the API should run the worker loop, or a dedicated worker is used
|
||||
DEDICATED_WORKER = True
|
||||
|
||||
# The root of the frontend. Needed for redirecting after login, and for CORS.
|
||||
# Set to None if frontend is served by the API.
|
||||
FRONTEND_URL = None
|
||||
FRONTEND_HTTPS = True
|
||||
|
||||
# Where to find the compiled frontend assets (must include index.html), or None
|
||||
# to disable serving the frontend.
|
||||
FRONTEND_DIR = "../frontend/build/"
|
||||
|
||||
# Can be an object or a JSON string
|
||||
FRONTEND_CONFIG = {
|
||||
"imprintUrl": "https://example.com/imprint",
|
||||
"privacyPolicyUrl": "https://example.com/privacy",
|
||||
# "termsUrl": "https://example.com/user_terms_and_conditions", # Link is only shown when set
|
||||
"mapHome": {"zoom": 6, "longitude": 10.2, "latitude": 51.3},
|
||||
"banner": {"text": "This is a test installation.", "style": "warning"},
|
||||
}
|
||||
|
||||
# If the API should serve generated tiles, this is the path where the tiles are
|
||||
# built. This is an experimental option and probably very inefficient, a proper
|
||||
# tileserver should be prefered. Set to None to disable.
|
||||
TILES_FILE = None
|
||||
|
||||
# Path overrides:
|
||||
# API_ROOT_DIR = "??" # default: api/ inside repository
|
||||
# DATA_DIR = "??" # default: $API_ROOT_DIR/..
|
||||
# PROCESSING_DIR = "??" # default: DATA_DIR/processing
|
||||
# PROCESSING_OUTPUT_DIR = "??" # default: DATA_DIR/processing-output
|
||||
# TRACKS_DIR = "??" # default: DATA_DIR/tracks
|
||||
# OBS_FACE_CACHE_DIR = "??" # default: DATA_DIR/obs-face-cache
|
||||
|
||||
# Additional allowed origins for CORS headers. The FRONTEND_URL is included by
|
||||
# default. Python list, or whitespace separated string.
|
||||
ADDITIONAL_CORS_ORIGINS = None
|
||||
|
||||
# How many asynchronous requests may be sent to the database to generate tile
|
||||
# information. Should be less than POSTGRES_POOL_SIZE to leave some connections
|
||||
# to the other features of the API ;)
|
||||
TILE_SEMAPHORE_SIZE = 4
|
||||
|
||||
# How many asynchronous requests may generate exported data simultaneously.
|
||||
# Keep this small.
|
||||
EXPORT_SEMAPHORE_SIZE = 1
|
||||
|
||||
# vim: set ft=python :
|
|
@ -1,61 +0,0 @@
|
|||
const Track = require('../src/models/Track');
|
||||
const { replaceDollarNewlinesHack, detectFormat, buildObsver1 } = require('../src/logic/tracks');
|
||||
|
||||
function shouldRebuildBody(track) {
|
||||
if (!track.trackData || !track.trackData.points.length) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!track.body) {
|
||||
return true;
|
||||
}
|
||||
const body = track.body.trim();
|
||||
if (!body) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const actualBody = replaceDollarNewlinesHack(body).trim();
|
||||
if (body !== actualBody) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const lineCount = (actualBody.match(/\n/g) || []).length + 1;
|
||||
|
||||
const format = detectFormat(body);
|
||||
if (format === 'invalid') {
|
||||
return true;
|
||||
}
|
||||
|
||||
// never reconstruct body of version 2
|
||||
if (format > 1) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// not enough data in the file
|
||||
if (lineCount < track.trackData.points.length + 1) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function up(next) {
|
||||
const query = Track.find().populate('trackData');
|
||||
for await (const track of query) {
|
||||
const rebuild = shouldRebuildBody(track);
|
||||
if (rebuild) {
|
||||
track.body = buildObsver1(track.trackData.points);
|
||||
}
|
||||
|
||||
await track.save();
|
||||
}
|
||||
|
||||
next();
|
||||
}
|
||||
|
||||
async function down(next) {
|
||||
// nothing to do
|
||||
next();
|
||||
}
|
||||
|
||||
module.exports = { up, down };
|
|
@ -1,15 +0,0 @@
|
|||
const Track = require('../src/models/Track');
|
||||
|
||||
module.exports = {
|
||||
async up(next) {
|
||||
for await (const track of Track.find()) {
|
||||
await track.rebuildTrackDataAndSave();
|
||||
}
|
||||
|
||||
next();
|
||||
},
|
||||
|
||||
async down(next) {
|
||||
next();
|
||||
},
|
||||
};
|
|
@ -1,21 +0,0 @@
|
|||
const Track = require('../src/models/Track');
|
||||
|
||||
module.exports = {
|
||||
async up(next) {
|
||||
try {
|
||||
for await (const track of Track.find()) {
|
||||
track.originalFileName = track.slug + '.csv'
|
||||
await track.generateOriginalFilePath();
|
||||
await track.save()
|
||||
}
|
||||
next();
|
||||
} catch(err) {
|
||||
next(err)
|
||||
}
|
||||
},
|
||||
|
||||
async down(next) {
|
||||
next();
|
||||
},
|
||||
};
|
||||
|
|
@ -1,25 +0,0 @@
|
|||
|
||||
const Track = require('../src/models/Track');
|
||||
|
||||
module.exports = {
|
||||
async up(next) {
|
||||
try {
|
||||
for await (const track of Track.find()) {
|
||||
if (!track.body) {
|
||||
continue
|
||||
}
|
||||
|
||||
await track.writeToOriginalFile(track.body)
|
||||
delete track.body;
|
||||
await track.save()
|
||||
}
|
||||
next();
|
||||
} catch(err) {
|
||||
next(err)
|
||||
}
|
||||
},
|
||||
|
||||
async down(next) {
|
||||
next();
|
||||
},
|
||||
};
|
1
api/migrations/README
Normal file
1
api/migrations/README
Normal file
|
@ -0,0 +1 @@
|
|||
Generic single-database configuration.
|
83
api/migrations/env.py
Normal file
83
api/migrations/env.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from sqlalchemy import engine_from_config
|
||||
from sqlalchemy import pool
|
||||
|
||||
from alembic import context
|
||||
|
||||
# this is the Alembic Config object, which provides
|
||||
# access to the values within the .ini file in use.
|
||||
config = context.config
|
||||
|
||||
# Interpret the config file for Python logging.
|
||||
# This line sets up loggers basically.
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# add your model's MetaData object here
|
||||
# for 'autogenerate' support
|
||||
# from myapp import mymodel
|
||||
# target_metadata = mymodel.Base.metadata
|
||||
target_metadata = None
|
||||
|
||||
# other values from the config, defined by the needs of env.py,
|
||||
# can be acquired:
|
||||
# my_important_option = config.get_main_option("my_important_option")
|
||||
# ... etc.
|
||||
|
||||
|
||||
def do_run_migrations(connection):
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def run_migrations_offline():
|
||||
"""Run migrations in 'offline' mode.
|
||||
|
||||
This configures the context with just a URL
|
||||
and not an Engine, though an Engine is acceptable
|
||||
here as well. By skipping the Engine creation
|
||||
we don't even need a DBAPI to be available.
|
||||
|
||||
Calls to context.execute() here emit the given string to the
|
||||
script output.
|
||||
|
||||
"""
|
||||
from obs.api.app import app
|
||||
|
||||
url = app.config.POSTGRES_URL
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_migrations_online():
|
||||
"""Run migrations in 'online' mode.
|
||||
|
||||
In this scenario we need to create an Engine
|
||||
and associate a connection with the context.
|
||||
|
||||
"""
|
||||
from obs.api.app import app, connect_db
|
||||
|
||||
url = app.config.POSTGRES_URL
|
||||
async with connect_db(url) as engine:
|
||||
async with engine.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await engine.dispose()
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
asyncio.run(run_migrations_online())
|
24
api/migrations/script.py.mako
Normal file
24
api/migrations/script.py.mako
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade():
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
${downgrades if downgrades else "pass"}
|
16
api/migrations/utils.py
Normal file
16
api/migrations/utils.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
import sqlalchemy as sa
|
||||
|
||||
|
||||
def dbtype(name):
|
||||
"""
|
||||
Create a UserDefinedType for use in migrations as the type of a column,
|
||||
when the type already exists in the database, but isn't available as a
|
||||
proper sqlalchemy type.
|
||||
"""
|
||||
|
||||
class TheType(sa.types.UserDefinedType):
|
||||
def get_col_spec(self):
|
||||
return name
|
||||
|
||||
TheType.__name__ = name
|
||||
return TheType
|
39
api/migrations/versions/35e7f1768f9b_create_table_road.py
Normal file
39
api/migrations/versions/35e7f1768f9b_create_table_road.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""create table road
|
||||
|
||||
Revision ID: 35e7f1768f9b
|
||||
Revises: 5d75febe2d59
|
||||
Create Date: 2022-03-30 21:36:48.157457
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from migrations.utils import dbtype
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "35e7f1768f9b"
|
||||
down_revision = "920aed1450c9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"road",
|
||||
sa.Column(
|
||||
"way_id", sa.BIGINT, primary_key=True, index=True, autoincrement=False
|
||||
),
|
||||
sa.Column("zone", dbtype("zone_type")),
|
||||
sa.Column("name", sa.Text),
|
||||
sa.Column("geometry", dbtype("geometry(LINESTRING,3857)")),
|
||||
sa.Column("directionality", sa.Integer),
|
||||
sa.Column("oneway", sa.Boolean),
|
||||
)
|
||||
op.execute(
|
||||
"CREATE INDEX road_geometry_idx ON road USING GIST (geometry) WITH (FILLFACTOR=100);"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("road")
|
28
api/migrations/versions/3856f240bb6d_create_extensions.py
Normal file
28
api/migrations/versions/3856f240bb6d_create_extensions.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
"""create extensions
|
||||
|
||||
Revision ID: 3856f240bb6d
|
||||
Revises: a9627f63fbed
|
||||
Create Date: 2022-03-30 21:31:06.282725
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "3856f240bb6d"
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute('CREATE EXTENSION IF NOT EXISTS "hstore";')
|
||||
op.execute('CREATE EXTENSION IF NOT EXISTS "postgis";')
|
||||
op.execute('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute('DROP EXTENSION "hstore";')
|
||||
op.execute('DROP EXTENSION "postgis";')
|
||||
op.execute('DROP EXTENSION "uuid-ossp";')
|
|
@ -0,0 +1,30 @@
|
|||
"""transform overtaking_event geometry to 3857
|
||||
|
||||
Revision ID: 587e69ecb466
|
||||
Revises: f4b0f460254d
|
||||
Create Date: 2023-04-01 14:30:49.927505
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "587e69ecb466"
|
||||
down_revision = "f4b0f460254d"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 3857);")
|
||||
op.execute(
|
||||
"ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry(POINT, 3857);"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.execute(
|
||||
"ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry;"
|
||||
)
|
||||
op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 4326);")
|
|
@ -0,0 +1,43 @@
|
|||
"""create table overtaking_event
|
||||
|
||||
Revision ID: 5d75febe2d59
|
||||
Revises: 920aed1450c9
|
||||
Create Date: 2022-03-30 21:36:37.687080
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from migrations.utils import dbtype
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "5d75febe2d59"
|
||||
down_revision = "9336eef458e7"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"overtaking_event",
|
||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True, index=True),
|
||||
sa.Column(
|
||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
||||
),
|
||||
sa.Column("hex_hash", sa.String, unique=True, index=True),
|
||||
sa.Column("way_id", sa.BIGINT, index=True),
|
||||
sa.Column("direction_reversed", sa.Boolean),
|
||||
sa.Column("geometry", dbtype("GEOMETRY")),
|
||||
sa.Column("latitude", sa.Float),
|
||||
sa.Column("longitude", sa.Float),
|
||||
sa.Column("time", sa.DateTime),
|
||||
sa.Column("distance_overtaker", sa.Float),
|
||||
sa.Column("distance_stationary", sa.Float),
|
||||
sa.Column("course", sa.Float),
|
||||
sa.Column("speed", sa.Float),
|
||||
sa.Index("road_segment", "way_id", "direction_reversed"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("overtaking_event")
|
|
@ -0,0 +1,26 @@
|
|||
"""add_overtaking_event_index
|
||||
|
||||
|
||||
Revision ID: 7868aed76122
|
||||
Revises: 587e69ecb466
|
||||
Create Date: 2023-07-16 13:37:17.694079
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = '7868aed76122'
|
||||
down_revision = '587e69ecb466'
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("CREATE INDEX IF NOT EXISTS ix_overtaking_event_geometry ON overtaking_event using GIST(geometry);")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_overtaking_event_geometry")
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
"""create enum processing_status
|
||||
|
||||
Revision ID: 920aed1450c9
|
||||
Revises: 986c6953e431
|
||||
Create Date: 2022-03-30 21:36:25.896192
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "920aed1450c9"
|
||||
down_revision = "986c6953e431"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _get_enum_type():
|
||||
return postgresql.ENUM(
|
||||
"created", "queued", "processing", "complete", "error", name="processing_status"
|
||||
)
|
||||
|
||||
|
||||
def upgrade():
|
||||
_get_enum_type().create(op.get_bind(), checkfirst=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
_get_enum_type().drop(op.get_bind())
|
42
api/migrations/versions/9336eef458e7_create_table_comment.py
Normal file
42
api/migrations/versions/9336eef458e7_create_table_comment.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
"""create table comment
|
||||
|
||||
Revision ID: 9336eef458e7
|
||||
Revises: 9d8c8c38a1d0
|
||||
Create Date: 2022-03-30 21:37:02.080429
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9336eef458e7"
|
||||
down_revision = "d66baafab5ec"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
NOW = sa.text("NOW()")
|
||||
|
||||
op.create_table(
|
||||
"comment",
|
||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
||||
sa.Column("uid", UUID, server_default=sa.func.uuid_generate_v4()),
|
||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
||||
sa.Column(
|
||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
||||
),
|
||||
sa.Column("body", sa.TEXT),
|
||||
sa.Column(
|
||||
"author_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE")
|
||||
),
|
||||
sa.Column(
|
||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("comment")
|
|
@ -0,0 +1,29 @@
|
|||
"""create enum zone_type
|
||||
|
||||
Revision ID: 986c6953e431
|
||||
Revises: 3856f240bb6d
|
||||
Create Date: 2022-03-30 21:36:19.888268
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "986c6953e431"
|
||||
down_revision = "3856f240bb6d"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def _get_enum_type():
|
||||
return postgresql.ENUM("rural", "urban", "motorway", name="zone_type")
|
||||
|
||||
|
||||
def upgrade():
|
||||
_get_enum_type().create(op.get_bind(), checkfirst=True)
|
||||
|
||||
|
||||
def downgrade():
|
||||
_get_enum_type().drop(op.get_bind())
|
|
@ -0,0 +1,26 @@
|
|||
"""add user display_name
|
||||
|
||||
Revision ID: 99a3d2eb08f9
|
||||
Revises: a9627f63fbed
|
||||
Create Date: 2022-09-13 07:30:18.747880
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "99a3d2eb08f9"
|
||||
down_revision = "a9627f63fbed"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"user", sa.Column("display_name", sa.String, nullable=True), schema="public"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("user", "display_name", schema="public")
|
45
api/migrations/versions/9d8c8c38a1d0_create_table_user.py
Normal file
45
api/migrations/versions/9d8c8c38a1d0_create_table_user.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
"""create table user
|
||||
|
||||
Revision ID: 9d8c8c38a1d0
|
||||
Revises: d66baafab5ec
|
||||
Create Date: 2022-03-30 21:36:59.375149
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "9d8c8c38a1d0"
|
||||
down_revision = "35e7f1768f9b"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
NOW = sa.text("NOW()")
|
||||
|
||||
op.create_table(
|
||||
"user",
|
||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
||||
sa.Column(
|
||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
||||
),
|
||||
sa.Column("sub", sa.String, unique=True, nullable=False),
|
||||
sa.Column("username", sa.String, unique=True, nullable=False),
|
||||
sa.Column("email", sa.String, nullable=False),
|
||||
sa.Column("bio", sa.TEXT),
|
||||
sa.Column("image", sa.String),
|
||||
sa.Column(
|
||||
"are_tracks_visible_for_all",
|
||||
sa.Boolean,
|
||||
server_default=sa.false(),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("api_key", sa.String),
|
||||
sa.Column("match_by_username_email", sa.Boolean, server_default=sa.false()),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("user")
|
35
api/migrations/versions/a049e5eb24dd_create_table_region.py
Normal file
35
api/migrations/versions/a049e5eb24dd_create_table_region.py
Normal file
|
@ -0,0 +1,35 @@
|
|||
"""create table region
|
||||
|
||||
Revision ID: a049e5eb24dd
|
||||
Revises: a9627f63fbed
|
||||
Create Date: 2022-04-02 21:28:43.124521
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
from migrations.utils import dbtype
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a049e5eb24dd"
|
||||
down_revision = "99a3d2eb08f9"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"region",
|
||||
sa.Column("id", sa.String(24), primary_key=True, index=True),
|
||||
sa.Column("name", sa.Text),
|
||||
sa.Column("geometry", dbtype("GEOMETRY(GEOMETRY,3857)"), index=False),
|
||||
sa.Column("admin_level", sa.Integer, index=True),
|
||||
)
|
||||
op.execute(
|
||||
"CREATE INDEX region_geometry_idx ON region USING GIST (geometry) WITH (FILLFACTOR=100);"
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("region")
|
|
@ -0,0 +1,34 @@
|
|||
"""create table road_usage
|
||||
|
||||
Revision ID: a9627f63fbed
|
||||
Revises:
|
||||
Create Date: 2022-03-16 20:26:17.449569
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "a9627f63fbed"
|
||||
down_revision = "5d75febe2d59"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"road_usage",
|
||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True, index=True),
|
||||
sa.Column(
|
||||
"track_id", sa.Integer, sa.ForeignKey("track.id", ondelete="CASCADE")
|
||||
),
|
||||
sa.Column("hex_hash", sa.String, unique=True, index=True),
|
||||
sa.Column("way_id", sa.BIGINT, index=True),
|
||||
sa.Column("time", sa.DateTime),
|
||||
sa.Column("direction_reversed", sa.Boolean),
|
||||
sa.Index("road_usage_segment", "way_id", "direction_reversed"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("road_usage")
|
39
api/migrations/versions/b8b0fbae50a4_add_import_groups.py
Normal file
39
api/migrations/versions/b8b0fbae50a4_add_import_groups.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
"""add import groups
|
||||
|
||||
Revision ID: b8b0fbae50a4
|
||||
Revises: f7b21148126a
|
||||
Create Date: 2023-03-26 09:41:36.621203
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "b8b0fbae50a4"
|
||||
down_revision = "f7b21148126a"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.add_column(
|
||||
"road",
|
||||
sa.Column("import_group", sa.String(), nullable=True),
|
||||
)
|
||||
op.add_column(
|
||||
"region",
|
||||
sa.Column("import_group", sa.String(), nullable=True),
|
||||
)
|
||||
|
||||
# Set existing to "osm2pgsql"
|
||||
road = sa.table("road", sa.column("import_group", sa.String))
|
||||
op.execute(road.update().values(import_group="osm2pgsql"))
|
||||
|
||||
region = sa.table("region", sa.column("import_group", sa.String))
|
||||
op.execute(region.update().values(import_group="osm2pgsql"))
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("road", "import_group")
|
||||
op.drop_column("region", "import_group")
|
66
api/migrations/versions/d66baafab5ec_create_table_track.py
Normal file
66
api/migrations/versions/d66baafab5ec_create_table_track.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
"""create table track
|
||||
|
||||
Revision ID: d66baafab5ec
|
||||
Revises: 35e7f1768f9b
|
||||
Create Date: 2022-03-30 21:36:54.848452
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from migrations.utils import dbtype
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "d66baafab5ec"
|
||||
down_revision = "9d8c8c38a1d0"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
NOW = sa.text("NOW()")
|
||||
|
||||
op.create_table(
|
||||
"track",
|
||||
sa.Column("id", sa.Integer, primary_key=True, autoincrement=True),
|
||||
sa.Column("slug", sa.String, unique=True, nullable=False, index=True),
|
||||
sa.Column("created_at", sa.DateTime, nullable=False, server_default=NOW),
|
||||
sa.Column(
|
||||
"updated_at", sa.DateTime, nullable=False, server_default=NOW, onupdate=NOW
|
||||
),
|
||||
sa.Column("title", sa.String),
|
||||
sa.Column(
|
||||
"processing_status",
|
||||
dbtype("processing_status"),
|
||||
server_default=sa.literal("created"),
|
||||
),
|
||||
sa.Column("processing_queued_at", sa.DateTime),
|
||||
sa.Column("processed_at", sa.DateTime),
|
||||
sa.Column("processing_log", sa.TEXT),
|
||||
sa.Column(
|
||||
"customized_title", sa.Boolean, server_default=sa.false(), nullable=False
|
||||
),
|
||||
sa.Column("description", sa.TEXT),
|
||||
sa.Column("public", sa.Boolean, server_default=sa.false()),
|
||||
sa.Column("uploaded_by_user_agent", sa.String),
|
||||
sa.Column("original_file_name", sa.String),
|
||||
sa.Column("original_file_hash", sa.String, nullable=False),
|
||||
sa.Column(
|
||||
"author_id",
|
||||
sa.Integer,
|
||||
sa.ForeignKey("user.id", ondelete="CASCADE"),
|
||||
nullable=False,
|
||||
),
|
||||
sa.Column("recorded_at", sa.DateTime),
|
||||
sa.Column("recorded_until", sa.DateTime),
|
||||
sa.Column("duration", sa.Float),
|
||||
sa.Column("length", sa.Float),
|
||||
sa.Column("segments", sa.Integer),
|
||||
sa.Column("num_events", sa.Integer),
|
||||
sa.Column("num_measurements", sa.Integer),
|
||||
sa.Column("num_valid", sa.Integer),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_table("track")
|
24
api/migrations/versions/f4b0f460254d_add_osm_id_indexes.py
Normal file
24
api/migrations/versions/f4b0f460254d_add_osm_id_indexes.py
Normal file
|
@ -0,0 +1,24 @@
|
|||
"""add osm id indexes
|
||||
|
||||
Revision ID: f4b0f460254d
|
||||
Revises: b8b0fbae50a4
|
||||
Create Date: 2023-03-30 10:56:22.066768
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "f4b0f460254d"
|
||||
down_revision = "b8b0fbae50a4"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.execute("CREATE INDEX IF NOT EXISTS ix_road_way_id ON road (way_id);")
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_index("ix_road_way_id")
|
41
api/migrations/versions/f7b21148126a_add_user_device.py
Normal file
41
api/migrations/versions/f7b21148126a_add_user_device.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
"""add user_device
|
||||
|
||||
Revision ID: f7b21148126a
|
||||
Revises: a9627f63fbed
|
||||
Create Date: 2022-09-15 17:48:06.764342
|
||||
|
||||
"""
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision = "f7b21148126a"
|
||||
down_revision = "a049e5eb24dd"
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
def upgrade():
|
||||
op.create_table(
|
||||
"user_device",
|
||||
sa.Column("id", sa.Integer, autoincrement=True, primary_key=True),
|
||||
sa.Column("user_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE")),
|
||||
sa.Column("identifier", sa.String, nullable=False),
|
||||
sa.Column("display_name", sa.String, nullable=True),
|
||||
sa.Index("user_id_identifier", "user_id", "identifier", unique=True),
|
||||
)
|
||||
op.add_column(
|
||||
"track",
|
||||
sa.Column(
|
||||
"user_device_id",
|
||||
sa.Integer,
|
||||
sa.ForeignKey("user_device.id", ondelete="RESTRICT"),
|
||||
nullable=True,
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
op.drop_column("track", "user_device_id")
|
||||
op.drop_table("user_device")
|
1
api/obs/__init__.py
Normal file
1
api/obs/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
1
api/obs/api/__init__.py
Normal file
1
api/obs/api/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__version__ = "0.8.1"
|
363
api/obs/api/app.py
Normal file
363
api/obs/api/app.py
Normal file
|
@ -0,0 +1,363 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
|
||||
from json import JSONEncoder, dumps
|
||||
from functools import wraps, partial
|
||||
from urllib.parse import urlparse
|
||||
from os.path import dirname, join, normpath, abspath, isfile
|
||||
from datetime import datetime, date
|
||||
|
||||
from sanic import Sanic, Blueprint
|
||||
from sanic.response import (
|
||||
text,
|
||||
json as json_response,
|
||||
file as file_response,
|
||||
html as html_response,
|
||||
)
|
||||
from sanic.exceptions import Unauthorized, SanicException
|
||||
from sanic_session import Session, InMemorySessionInterface
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from obs.api.db import User, make_session, connect_db
|
||||
from obs.api.cors import setup_options, add_cors_headers
|
||||
from obs.api.utils import get_single_arg
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SanicAccessMessageFilter(logging.Filter):
|
||||
"""
|
||||
A filter that modifies the log message of a sanic.access log entry to
|
||||
include useful information.
|
||||
"""
|
||||
|
||||
def filter(self, record):
|
||||
record.msg = f"{record.request} -> {record.status}"
|
||||
return True
|
||||
|
||||
|
||||
def configure_sanic_logging():
|
||||
for logger_name in ["sanic.root", "sanic.access", "sanic.error"]:
|
||||
logger = logging.getLogger(logger_name)
|
||||
for handler in logger.handlers:
|
||||
logger.removeHandler(handler)
|
||||
|
||||
logger = logging.getLogger("sanic.access")
|
||||
for filter_ in logger.filters:
|
||||
logger.removeFilter(filter_)
|
||||
logger.addFilter(SanicAccessMessageFilter())
|
||||
logging.getLogger("sanic.root").setLevel(logging.WARNING)
|
||||
|
||||
|
||||
app = Sanic(
|
||||
"openbikesensor-api",
|
||||
env_prefix="OBS_",
|
||||
)
|
||||
configure_sanic_logging()
|
||||
|
||||
app.config.update(
|
||||
dict(
|
||||
DEBUG=False,
|
||||
VERBOSE=False,
|
||||
AUTO_RELOAD=False,
|
||||
POSTGRES_POOL_SIZE=20,
|
||||
POSTGRES_MAX_OVERFLOW=40,
|
||||
DEDICATED_WORKER=True,
|
||||
FRONTEND_URL=None,
|
||||
FRONTEND_HTTPS=True,
|
||||
TILES_FILE=None,
|
||||
TILE_SEMAPHORE_SIZE=4,
|
||||
EXPORT_SEMAPHORE_SIZE=1,
|
||||
)
|
||||
)
|
||||
|
||||
# overwrite from defaults again
|
||||
app.config.load_environment_vars("OBS_")
|
||||
|
||||
if isfile("./config.py"):
|
||||
app.update_config("./config.py")
|
||||
|
||||
# For developers to override the config without committing it
|
||||
if isfile("./config.overrides.py"):
|
||||
app.update_config("./config.overrides.py")
|
||||
|
||||
c = app.config
|
||||
|
||||
api = Blueprint("api", url_prefix="/api")
|
||||
auth = Blueprint("auth", url_prefix="")
|
||||
|
||||
import re
|
||||
|
||||
TILE_REQUEST_CANCELLED = re.compile(
|
||||
r"Connection lost before response written.*GET /tiles"
|
||||
)
|
||||
|
||||
|
||||
class NoConnectionLostFilter(logging.Filter):
|
||||
def filter(record):
|
||||
return not TILE_REQUEST_CANCELLED.match(record.getMessage())
|
||||
|
||||
|
||||
logging.getLogger("sanic.error").addFilter(NoConnectionLostFilter)
|
||||
|
||||
|
||||
def setup_cors(app):
|
||||
frontend_url = app.config.get("FRONTEND_URL")
|
||||
additional_origins = app.config.get("ADDITIONAL_CORS_ORIGINS")
|
||||
if not frontend_url and not additional_origins:
|
||||
# No CORS configured
|
||||
return
|
||||
|
||||
origins = []
|
||||
if frontend_url:
|
||||
u = urlparse(frontend_url)
|
||||
origins.append(f"{u.scheme}://{u.netloc}")
|
||||
|
||||
if isinstance(additional_origins, str):
|
||||
origins += re.split(r"\s+", additional_origins)
|
||||
elif isinstance(additional_origins, list):
|
||||
origins += additional_origins
|
||||
elif additional_origins is not None:
|
||||
raise ValueError(
|
||||
"invalid option type for ADDITIONAL_CORS_ORIGINS, must be list or space separated str"
|
||||
)
|
||||
|
||||
app.ctx.cors_origins = origins
|
||||
|
||||
# Add OPTIONS handlers to any route that is missing it
|
||||
app.register_listener(setup_options, "before_server_start")
|
||||
|
||||
# Fill in CORS headers
|
||||
app.register_middleware(add_cors_headers, "response")
|
||||
|
||||
|
||||
setup_cors(app)
|
||||
|
||||
|
||||
@app.exception(SanicException, BaseException)
|
||||
async def _handle_sanic_errors(_request, exception):
|
||||
if isinstance(exception, asyncio.CancelledError):
|
||||
return None
|
||||
|
||||
log.error("Exception in handler: %s", exception, exc_info=True)
|
||||
return json_response(
|
||||
{
|
||||
"errors": {
|
||||
type(exception).__name__: str(exception),
|
||||
},
|
||||
},
|
||||
status=exception.status_code if hasattr(exception, "status_code") else 500,
|
||||
)
|
||||
|
||||
|
||||
# Configure paths
|
||||
def configure_paths(c):
|
||||
c.API_ROOT_DIR = c.get("API_ROOT_DIR") or abspath(
|
||||
join(dirname(__file__), "..", "..")
|
||||
)
|
||||
c.DATA_DIR = c.get("DATA_DIR") or normpath(join(c.API_ROOT_DIR, "../data"))
|
||||
c.PROCESSING_DIR = c.get("PROCESSING_DIR") or join(c.DATA_DIR, "processing")
|
||||
c.PROCESSING_OUTPUT_DIR = c.get("PROCESSING_OUTPUT_DIR") or join(
|
||||
c.DATA_DIR, "processing-output"
|
||||
)
|
||||
c.TRACKS_DIR = c.get("TRACKS_DIR") or join(c.DATA_DIR, "tracks")
|
||||
c.OBS_FACE_CACHE_DIR = c.get("OBS_FACE_CACHE_DIR") or join(
|
||||
c.DATA_DIR, "obs-face-cache"
|
||||
)
|
||||
c.FRONTEND_DIR = c.get("FRONTEND_DIR")
|
||||
|
||||
|
||||
configure_paths(app.config)
|
||||
|
||||
|
||||
# TODO: use a different interface, maybe backed by the PostgreSQL, to allow
|
||||
# scaling the API
|
||||
Session(app, interface=InMemorySessionInterface())
|
||||
|
||||
|
||||
@app.before_server_start
|
||||
async def app_connect_db(app, loop):
|
||||
app.ctx._db_engine_ctx = connect_db(
|
||||
app.config.POSTGRES_URL,
|
||||
app.config.POSTGRES_POOL_SIZE,
|
||||
app.config.POSTGRES_MAX_OVERFLOW,
|
||||
)
|
||||
app.ctx._db_engine = await app.ctx._db_engine_ctx.__aenter__()
|
||||
|
||||
if app.config.TILE_SEMAPHORE_SIZE:
|
||||
app.ctx.tile_semaphore = asyncio.Semaphore(app.config.TILE_SEMAPHORE_SIZE)
|
||||
|
||||
if app.config.EXPORT_SEMAPHORE_SIZE:
|
||||
app.ctx.export_semaphore = asyncio.Semaphore(app.config.EXPORT_SEMAPHORE_SIZE)
|
||||
|
||||
|
||||
@app.after_server_stop
|
||||
async def app_disconnect_db(app, loop):
|
||||
if hasattr(app.ctx, "_db_engine_ctx"):
|
||||
await app.ctx._db_engine_ctx.__aexit__(None, None, None)
|
||||
|
||||
|
||||
def remove_right(l, r):
|
||||
if l.endswith(r):
|
||||
return l[: -len(r)]
|
||||
return l
|
||||
|
||||
|
||||
@app.middleware("request")
|
||||
async def inject_arg_getter(req):
|
||||
req.ctx.get_single_arg = partial(get_single_arg, req)
|
||||
|
||||
|
||||
@app.middleware("request")
|
||||
async def inject_urls(req):
|
||||
if req.app.config.FRONTEND_HTTPS:
|
||||
req.ctx.frontend_scheme = "https"
|
||||
elif req.app.config.FRONTEND_URL:
|
||||
req.ctx.frontend_scheme = (
|
||||
"http" if req.app.config.FRONTEND_URL.startswith("http://") else "https"
|
||||
)
|
||||
else:
|
||||
req.ctx.frontend_scheme = req.scheme
|
||||
|
||||
if req.app.config.get("API_URL"):
|
||||
req.ctx.api_url = req.app.config.API_URL.rstrip("/")
|
||||
api_url_parsed = urlparse(req.ctx.api_url)
|
||||
req.ctx.api_scheme = api_url_parsed.scheme # just use the same for now
|
||||
req.ctx.api_base_path = api_url_parsed.path
|
||||
else:
|
||||
req.ctx.api_scheme = req.ctx.frontend_scheme # just use the same for now
|
||||
req.ctx.api_base_path = remove_right(req.server_path, req.path)
|
||||
req.ctx.api_url = (
|
||||
f"{req.ctx.frontend_scheme}://{req.host}{req.ctx.api_base_path}"
|
||||
)
|
||||
|
||||
if req.app.config.FRONTEND_URL:
|
||||
req.ctx.frontend_base_path = "/" + urlparse(
|
||||
req.app.config.FRONTEND_URL
|
||||
).path.strip("/")
|
||||
req.ctx.frontend_url = req.app.config.FRONTEND_URL.rstrip("/")
|
||||
elif app.config.FRONTEND_DIR:
|
||||
req.ctx.frontend_base_path = req.ctx.api_base_path
|
||||
req.ctx.frontend_url = req.ctx.api_url
|
||||
|
||||
else:
|
||||
req.ctx.frontend_base_path = "/"
|
||||
req.ctx.frontend_url = (
|
||||
f"{req.ctx.frontend_scheme}://{req.host}{req.ctx.frontend_base_path}"
|
||||
)
|
||||
|
||||
|
||||
@app.middleware("request")
|
||||
async def inject_session(req):
|
||||
req.ctx._session_ctx = make_session()
|
||||
req.ctx.db = await req.ctx._session_ctx.__aenter__()
|
||||
|
||||
|
||||
@app.middleware("response")
|
||||
async def close_session(req, response):
|
||||
if hasattr(req.ctx, "_session_ctx"):
|
||||
await req.ctx.db.close()
|
||||
await req.ctx._session_ctx.__aexit__(None, None, None)
|
||||
|
||||
|
||||
@app.middleware("request")
|
||||
async def load_user(req):
|
||||
user_id = req.ctx.session.get("user_id")
|
||||
user = None
|
||||
if user_id:
|
||||
user = (
|
||||
await req.ctx.db.execute(select(User).where(User.id == user_id))
|
||||
).scalar()
|
||||
|
||||
req.ctx.user = user
|
||||
|
||||
|
||||
def require_auth(fn):
|
||||
@wraps(fn)
|
||||
def wrapper(req, *args, **kwargs):
|
||||
if not req.ctx.user:
|
||||
raise Unauthorized("Login required")
|
||||
return fn(req, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
def read_api_key(fn):
|
||||
"""
|
||||
A middleware decorator to read the API Key of a user. It is an opt-in to
|
||||
allow usage with API Keys on certain urls. Combine with require_auth to
|
||||
actually check whether a user was authenticated through this. If a login
|
||||
session exists, the api key is ignored.
|
||||
"""
|
||||
|
||||
@wraps(fn)
|
||||
async def wrapper(req, *args, **kwargs):
|
||||
# try to parse a token if one exists, unless a user is already authenticated
|
||||
if (
|
||||
not req.ctx.user
|
||||
and isinstance(req.token, str)
|
||||
and req.token.lower().startswith("obsuserid ")
|
||||
):
|
||||
try:
|
||||
api_key = req.token.split()[1]
|
||||
except LookupError:
|
||||
api_key = None
|
||||
|
||||
if api_key:
|
||||
user = (
|
||||
await req.ctx.db.execute(
|
||||
select(User).where(User.api_key == api_key.strip())
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if not user:
|
||||
raise Unauthorized("invalid OBSUserId token")
|
||||
|
||||
req.ctx.user = user
|
||||
|
||||
return await fn(req, *args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
class CustomJsonEncoder(JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, (datetime, date)):
|
||||
return obj.isoformat() + "+0000" # explicit UTC for javascript <3
|
||||
|
||||
# Let the base class default method raise the TypeError
|
||||
return super().default(obj)
|
||||
|
||||
|
||||
def json(*args, **kwargs):
|
||||
return json_response(*args, **kwargs, dumps=partial(dumps, cls=CustomJsonEncoder))
|
||||
|
||||
|
||||
from .routes import (
|
||||
info,
|
||||
login,
|
||||
stats,
|
||||
tracks,
|
||||
users,
|
||||
exports,
|
||||
)
|
||||
|
||||
from .routes import tiles, mapdetails
|
||||
from .routes import frontend
|
||||
|
||||
|
||||
app.blueprint(api)
|
||||
app.blueprint(auth)
|
||||
|
||||
if not app.config.DEDICATED_WORKER:
|
||||
|
||||
async def worker():
|
||||
from obs.api.process import process_tracks_loop
|
||||
|
||||
# run forever
|
||||
await process_tracks_loop(10)
|
||||
|
||||
app.add_task(worker())
|
68
api/obs/api/cors.py
Normal file
68
api/obs/api/cors.py
Normal file
|
@ -0,0 +1,68 @@
|
|||
from collections import defaultdict
|
||||
from typing import Dict, FrozenSet, Iterable
|
||||
|
||||
from sanic import Sanic, response
|
||||
from sanic_routing.router import Route
|
||||
|
||||
|
||||
def _add_cors_headers(request, response, methods: Iterable[str]) -> None:
|
||||
allow_methods = list(set(methods))
|
||||
|
||||
if "OPTIONS" not in allow_methods:
|
||||
allow_methods.append("OPTIONS")
|
||||
|
||||
origin = request.headers.get("origin")
|
||||
if origin in request.app.ctx.cors_origins:
|
||||
headers = {
|
||||
"Access-Control-Allow-Methods": ",".join(allow_methods),
|
||||
"Access-Control-Allow-Origin": origin,
|
||||
"Access-Control-Allow-Credentials": "true",
|
||||
"Access-Control-Allow-Headers": (
|
||||
"origin, content-type, accept, "
|
||||
"authorization, x-xsrf-token, x-request-id"
|
||||
),
|
||||
"Access-Control-Expose-Headers": "content-disposition",
|
||||
}
|
||||
response.headers.extend(headers)
|
||||
|
||||
|
||||
def add_cors_headers(request, response):
|
||||
if request.method != "OPTIONS":
|
||||
methods = [method for method in request.route.methods]
|
||||
_add_cors_headers(request, response, methods)
|
||||
|
||||
|
||||
def _compile_routes_needing_options(routes: Dict[str, Route]) -> Dict[str, FrozenSet]:
|
||||
needs_options = defaultdict(list)
|
||||
# This is 21.12 and later. You will need to change this for older versions.
|
||||
for route in routes.values():
|
||||
if "OPTIONS" not in route.methods:
|
||||
needs_options[route.uri].extend(route.methods)
|
||||
|
||||
return {uri: frozenset(methods) for uri, methods in dict(needs_options).items()}
|
||||
|
||||
|
||||
def _options_wrapper(handler, methods):
|
||||
def wrapped_handler(request, *args, **kwargs):
|
||||
nonlocal methods
|
||||
return handler(request, methods)
|
||||
|
||||
return wrapped_handler
|
||||
|
||||
|
||||
async def options_handler(request, methods) -> response.HTTPResponse:
|
||||
resp = response.empty()
|
||||
_add_cors_headers(request, resp, methods)
|
||||
return resp
|
||||
|
||||
|
||||
def setup_options(app: Sanic, _):
|
||||
app.router.reset()
|
||||
needs_options = _compile_routes_needing_options(app.router.routes_all)
|
||||
for uri, methods in needs_options.items():
|
||||
app.add_route(
|
||||
_options_wrapper(options_handler, methods),
|
||||
uri,
|
||||
methods=["OPTIONS"],
|
||||
)
|
||||
app.router.finalize()
|
578
api/obs/api/db.py
Normal file
578
api/obs/api/db.py
Normal file
|
@ -0,0 +1,578 @@
|
|||
import hashlib
|
||||
from contextvars import ContextVar
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
import os
|
||||
from os.path import exists, join, dirname
|
||||
from json import loads
|
||||
import re
|
||||
import math
|
||||
import aiofiles
|
||||
import random
|
||||
import string
|
||||
import secrets
|
||||
from slugify import slugify
|
||||
import logging
|
||||
|
||||
from sqlalchemy.ext.declarative import declarative_base
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
from sqlalchemy.ext.asyncio import create_async_engine
|
||||
from sqlalchemy.orm import sessionmaker as SessionMaker, relationship
|
||||
from sqlalchemy.types import UserDefinedType, BIGINT, TEXT
|
||||
from sqlalchemy import (
|
||||
Boolean,
|
||||
Column,
|
||||
DateTime,
|
||||
Enum as SqlEnum,
|
||||
Float,
|
||||
ForeignKey,
|
||||
Index,
|
||||
Integer,
|
||||
String,
|
||||
false,
|
||||
func,
|
||||
select,
|
||||
text,
|
||||
literal,
|
||||
Text,
|
||||
)
|
||||
from sqlalchemy.dialects.postgresql import UUID
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
Base = declarative_base()
|
||||
|
||||
|
||||
engine = None
|
||||
sessionmaker: SessionMaker
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def make_session():
|
||||
async with sessionmaker(autoflush=True) as session:
|
||||
yield session
|
||||
|
||||
|
||||
async def drop_all():
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.drop_all)
|
||||
|
||||
|
||||
async def init_models():
|
||||
async with engine.begin() as conn:
|
||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "hstore";'))
|
||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "postgis";'))
|
||||
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'))
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
|
||||
def random_string(length):
|
||||
letters = string.ascii_lowercase + string.digits
|
||||
return "".join(random.choice(letters) for _ in range(length))
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def connect_db(url, pool_size=10, max_overflow=20):
|
||||
global engine, sessionmaker
|
||||
|
||||
engine = create_async_engine(
|
||||
url, echo=False, pool_size=pool_size, max_overflow=max_overflow
|
||||
)
|
||||
sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False)
|
||||
|
||||
yield engine
|
||||
|
||||
# for AsyncEngine created in function scope, close and
|
||||
# clean-up pooled connections
|
||||
await engine.dispose()
|
||||
|
||||
engine = None
|
||||
sessionmaker = None
|
||||
|
||||
|
||||
ZoneType = SqlEnum("rural", "urban", "motorway", name="zone_type")
|
||||
ProcessingStatus = SqlEnum(
|
||||
"created", "queued", "processing", "complete", "error", name="processing_status"
|
||||
)
|
||||
|
||||
|
||||
class Geometry(UserDefinedType):
|
||||
def get_col_spec(self):
|
||||
return "GEOMETRY"
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
||||
|
||||
def column_expression(self, col):
|
||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
||||
|
||||
|
||||
class LineString(UserDefinedType):
|
||||
def get_col_spec(self):
|
||||
return "geometry(LineString, 3857)"
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
||||
|
||||
def column_expression(self, col):
|
||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
||||
|
||||
|
||||
class GeometryGeometry(UserDefinedType):
|
||||
def get_col_spec(self):
|
||||
return "geometry(GEOMETRY, 3857)"
|
||||
|
||||
def bind_expression(self, bindvalue):
|
||||
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
|
||||
|
||||
def column_expression(self, col):
|
||||
return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self)
|
||||
|
||||
|
||||
class OvertakingEvent(Base):
|
||||
__tablename__ = "overtaking_event"
|
||||
__table_args__ = (Index("road_segment", "way_id", "direction_reversed"),)
|
||||
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
||||
hex_hash = Column(String, unique=True, index=True)
|
||||
way_id = Column(BIGINT, index=True)
|
||||
|
||||
# whether we were traveling along the way in reverse direction
|
||||
direction_reversed = Column(Boolean)
|
||||
|
||||
geometry = Column(Geometry)
|
||||
latitude = Column(Float)
|
||||
longitude = Column(Float)
|
||||
time = Column(DateTime)
|
||||
distance_overtaker = Column(Float)
|
||||
distance_stationary = Column(Float)
|
||||
course = Column(Float)
|
||||
speed = Column(Float)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<OvertakingEvent {self.id}>"
|
||||
|
||||
|
||||
class Road(Base):
|
||||
__tablename__ = "road"
|
||||
way_id = Column(BIGINT, primary_key=True, index=True, autoincrement=False)
|
||||
zone = Column(ZoneType)
|
||||
name = Column(Text)
|
||||
geometry = Column(LineString)
|
||||
directionality = Column(Integer)
|
||||
oneway = Column(Boolean)
|
||||
import_group = Column(String)
|
||||
|
||||
__table_args__ = (
|
||||
# We keep the index name as osm2pgsql created it, way back when.
|
||||
Index(
|
||||
"road_geometry_idx",
|
||||
"geometry",
|
||||
postgresql_using="gist",
|
||||
postgresql_with={"fillfactor": 100},
|
||||
),
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
"way_id": self.way_id,
|
||||
"zone": self.zone,
|
||||
"name": self.name,
|
||||
"directionality": self.directionality,
|
||||
"oneway": self.oneway,
|
||||
"geometry": loads(self.geometry),
|
||||
}
|
||||
|
||||
|
||||
class RoadUsage(Base):
|
||||
__tablename__ = "road_usage"
|
||||
__table_args__ = (Index("road_usage_segment", "way_id", "direction_reversed"),)
|
||||
|
||||
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
|
||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
||||
hex_hash = Column(String, unique=True, index=True)
|
||||
way_id = Column(BIGINT, index=True)
|
||||
time = Column(DateTime)
|
||||
direction_reversed = Column(Boolean)
|
||||
|
||||
def __repr__(self):
|
||||
return f"<RoadUsage {self.id}>"
|
||||
|
||||
def __hash__(self):
|
||||
return int(self.hex_hash, 16)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.hex_hash == other.hex_hash
|
||||
|
||||
|
||||
NOW = text("NOW()")
|
||||
|
||||
|
||||
class DuplicateTrackFileError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
class Track(Base):
|
||||
__tablename__ = "track"
|
||||
id = Column(Integer, primary_key=True, autoincrement=True)
|
||||
slug = Column(String, unique=True, nullable=False, index=True)
|
||||
|
||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
||||
|
||||
title = Column(String)
|
||||
|
||||
processing_status = Column(ProcessingStatus, server_default=literal("created"))
|
||||
processing_queued_at = Column(DateTime)
|
||||
processed_at = Column(DateTime)
|
||||
|
||||
processing_log = Column(TEXT)
|
||||
|
||||
# Set to true if the user customized the title. Disables auto-generating
|
||||
# an updated title when the track is (re-)processed.
|
||||
customized_title = Column(Boolean, server_default=false(), nullable=False)
|
||||
|
||||
# A user-provided description of the track. May contain markdown.
|
||||
description = Column(TEXT)
|
||||
|
||||
# Whether this track is visible (anonymized) in the public track list or not.
|
||||
public = Column(Boolean, server_default=false())
|
||||
|
||||
# Whether this track should be exported to the public track database
|
||||
# (after anonymization).
|
||||
# include_in_public_database = Column(Boolean, server_default=false())
|
||||
|
||||
# The user agent string, or a part thereof, that was used to upload this
|
||||
# track. Usually contains only the OBS version, other user agents are
|
||||
# discarded due to being irrelevant.
|
||||
uploaded_by_user_agent = Column(String)
|
||||
|
||||
# The name of the original file, as provided during upload. Used for
|
||||
# providing a download with the same name, and for display in the
|
||||
# frontend.
|
||||
original_file_name = Column(String)
|
||||
|
||||
# A hash of the original file's contents. Nobody can upload the same track twice.
|
||||
original_file_hash = Column(String, nullable=False)
|
||||
|
||||
author_id = Column(
|
||||
Integer, ForeignKey("user.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
|
||||
user_device_id = Column(
|
||||
Integer,
|
||||
ForeignKey("user_device.id", ondelete="RESTRICT"),
|
||||
nullable=True,
|
||||
)
|
||||
|
||||
# Statistics... maybe we'll drop some of this if we can easily compute them from SQL
|
||||
recorded_at = Column(DateTime)
|
||||
recorded_until = Column(DateTime)
|
||||
duration = Column(Float)
|
||||
length = Column(Float)
|
||||
segments = Column(Integer)
|
||||
num_events = Column(Integer)
|
||||
num_measurements = Column(Integer)
|
||||
num_valid = Column(Integer)
|
||||
|
||||
def to_dict(self, for_user_id=None):
|
||||
result = {
|
||||
"id": self.id,
|
||||
"slug": self.slug,
|
||||
"title": self.title,
|
||||
"description": self.description,
|
||||
"createdAt": self.created_at,
|
||||
"updatedAt": self.updated_at,
|
||||
"public": self.public,
|
||||
"processingStatus": self.processing_status,
|
||||
"recordedAt": self.recorded_at,
|
||||
"recordedUntil": self.recorded_until,
|
||||
"duration": self.duration,
|
||||
"length": self.length,
|
||||
"numEvents": self.num_events,
|
||||
"numValid": self.num_valid,
|
||||
"numMeasurements": self.num_measurements,
|
||||
}
|
||||
|
||||
if for_user_id is not None and for_user_id == self.author_id:
|
||||
result["uploadedByUserAgent"] = self.uploaded_by_user_agent
|
||||
result["originalFileName"] = self.original_file_name
|
||||
result["userDeviceId"] = self.user_device_id
|
||||
|
||||
if self.author:
|
||||
result["author"] = self.author.to_dict(for_user_id=for_user_id)
|
||||
|
||||
return result
|
||||
|
||||
def is_visible_to_private(self, user):
|
||||
return user is not None and user.id == self.author_id
|
||||
|
||||
def is_visible_to(self, user):
|
||||
return self.is_visible_to_private(user) or self.public
|
||||
|
||||
def generate_slug(self, new_title_or_filename=None):
|
||||
input_text = new_title_or_filename or self.title or self.original_file_name
|
||||
|
||||
if input_text is not None:
|
||||
self.slug = slugify(input_text, separator="_") + "-"
|
||||
else:
|
||||
self.slug = ""
|
||||
|
||||
# make unique
|
||||
self.slug += random_string(8)
|
||||
|
||||
async def prevent_duplicates(self, session, file_body):
|
||||
hex_hash = hashlib.sha512(file_body).hexdigest()
|
||||
|
||||
duplicate_count = await session.scalar(
|
||||
select(func.count())
|
||||
.select_from(Track)
|
||||
.where(
|
||||
Track.original_file_hash == hex_hash
|
||||
and Track.author_id == self.author_id
|
||||
and Track.id != self.id
|
||||
)
|
||||
)
|
||||
|
||||
if duplicate_count:
|
||||
raise DuplicateTrackFileError()
|
||||
|
||||
self.original_file_hash = hex_hash
|
||||
|
||||
async def write_to_original_file(self, config, body):
|
||||
mode = "wb" if isinstance(body, bytes) else "wt"
|
||||
|
||||
target = self.get_original_file_path(config)
|
||||
os.makedirs(dirname(target), exist_ok=True)
|
||||
async with aiofiles.open(target, mode=mode) as f:
|
||||
await f.write(body)
|
||||
|
||||
def queue_processing(self):
|
||||
self.processing_status = "queued"
|
||||
self.processing_queued_at = datetime.utcnow()
|
||||
|
||||
def auto_generate_title(self):
|
||||
if self.customized_title:
|
||||
return
|
||||
|
||||
# Try to figure out when this file was recorded. Either we have it in then
|
||||
# statistics, e.g. after parsing and processing the track, or we can maybe
|
||||
# derive it from the filename.
|
||||
recorded_at = self.recorded_at
|
||||
|
||||
if not recorded_at and self.original_file_name:
|
||||
match = re.match(
|
||||
r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}\.[0-9]{2}\.[0-9]{2}",
|
||||
self.original_file_name,
|
||||
)
|
||||
if match:
|
||||
try:
|
||||
recorded_at = datetime.fromisoformat(match[0])
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
if recorded_at:
|
||||
daytime = _get_daytime(recorded_at)
|
||||
self.title = f"{daytime} ride on {recorded_at.strftime('%a, %x')}"
|
||||
return
|
||||
|
||||
# Detecting recording date failed, use filename
|
||||
if self.original_file_name:
|
||||
words = self.original_file_name
|
||||
words = re.sub(r"(\.obsdata)?\.csv$", "", words)
|
||||
words = re.split(r"\W+", words)
|
||||
words[0] = words[0][0].upper() + words[0][1:]
|
||||
self.title = " ".join(words)
|
||||
|
||||
@property
|
||||
def file_path(self):
|
||||
return join(self.author.username, self.slug)
|
||||
|
||||
def get_original_file_path(self, config):
|
||||
return join(config.TRACKS_DIR, self.file_path, "original.csv")
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "user"
|
||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
||||
sub = Column(String, unique=True, nullable=False)
|
||||
username = Column(String, unique=True, nullable=False)
|
||||
display_name = Column(String, nullable=True)
|
||||
email = Column(String, nullable=False)
|
||||
bio = Column(TEXT)
|
||||
image = Column(String)
|
||||
are_tracks_visible_for_all = Column(Boolean, server_default=false(), nullable=False)
|
||||
api_key = Column(String)
|
||||
|
||||
# This user can be matched by the email address from the auth service
|
||||
# instead of having to match by `sub`. If a matching user logs in, the
|
||||
# `sub` is updated to the new sub and this flag is disabled. This is for
|
||||
# migrating *to* the external authentication scheme.
|
||||
match_by_username_email = Column(Boolean, server_default=false())
|
||||
|
||||
def generate_api_key(self):
|
||||
"""
|
||||
Generates a new :py:obj:`api_key` into this instance. The new key is
|
||||
sourced from a secure random source and is urlsafe.
|
||||
"""
|
||||
self.api_key = secrets.token_urlsafe(24)
|
||||
|
||||
def to_dict(self, for_user_id=None):
|
||||
result = {
|
||||
"id": self.id,
|
||||
"displayName": self.display_name or self.username,
|
||||
"bio": self.bio,
|
||||
"image": self.image,
|
||||
}
|
||||
if for_user_id == self.id:
|
||||
result["username"] = self.username
|
||||
return result
|
||||
|
||||
async def rename(self, config, new_name):
|
||||
old_name = self.username
|
||||
|
||||
renames = [
|
||||
(join(basedir, old_name), join(basedir, new_name))
|
||||
for basedir in [config.PROCESSING_OUTPUT_DIR, config.TRACKS_DIR]
|
||||
]
|
||||
|
||||
for src, dst in renames:
|
||||
if exists(dst):
|
||||
raise FileExistsError(
|
||||
f"cannot move {src!r} to {dst!r}, destination exists"
|
||||
)
|
||||
|
||||
for src, dst in renames:
|
||||
if not exists(src):
|
||||
log.debug("Rename user %s: Not moving %s, not found", self.id, src)
|
||||
else:
|
||||
log.info("Rename user %s: Moving %s to %s", self.id, src, dst)
|
||||
os.rename(src, dst)
|
||||
|
||||
self.username = new_name
|
||||
|
||||
|
||||
class UserDevice(Base):
|
||||
__tablename__ = "user_device"
|
||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||
user_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE"))
|
||||
identifier = Column(String, nullable=False)
|
||||
display_name = Column(String, nullable=True)
|
||||
|
||||
__table_args__ = (
|
||||
Index("user_id_identifier", "user_id", "identifier", unique=True),
|
||||
)
|
||||
|
||||
def to_dict(self, for_user_id=None):
|
||||
if for_user_id != self.user_id:
|
||||
return {}
|
||||
|
||||
return {
|
||||
"id": self.id,
|
||||
"identifier": self.identifier,
|
||||
"displayName": self.display_name,
|
||||
}
|
||||
|
||||
|
||||
class Comment(Base):
|
||||
__tablename__ = "comment"
|
||||
id = Column(Integer, autoincrement=True, primary_key=True)
|
||||
uid = Column(UUID, server_default=func.uuid_generate_v4())
|
||||
|
||||
created_at = Column(DateTime, nullable=False, server_default=NOW)
|
||||
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
|
||||
|
||||
body = Column(TEXT)
|
||||
|
||||
author_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE"))
|
||||
|
||||
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
|
||||
|
||||
def to_dict(self, for_user_id=None):
|
||||
return {
|
||||
"id": self.uid,
|
||||
"body": self.body,
|
||||
"author": self.author.to_dict(for_user_id=for_user_id),
|
||||
"createdAt": self.created_at,
|
||||
}
|
||||
|
||||
|
||||
class Region(Base):
|
||||
__tablename__ = "region"
|
||||
|
||||
id = Column(String(24), primary_key=True, index=True)
|
||||
name = Column(Text)
|
||||
geometry = Column(GeometryGeometry)
|
||||
admin_level = Column(Integer, index=True)
|
||||
import_group = Column(String)
|
||||
|
||||
__table_args__ = (
|
||||
# We keep the index name as osm2pgsql created it, way back when.
|
||||
Index(
|
||||
"region_geometry_idx",
|
||||
"geometry",
|
||||
postgresql_using="gist",
|
||||
postgresql_with={"fillfactor": 100},
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
Comment.author = relationship("User", back_populates="authored_comments")
|
||||
User.authored_comments = relationship(
|
||||
"Comment",
|
||||
order_by=Comment.created_at,
|
||||
back_populates="author",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
Track.author = relationship("User", back_populates="authored_tracks")
|
||||
User.authored_tracks = relationship(
|
||||
"Track", order_by=Track.created_at, back_populates="author", passive_deletes=True
|
||||
)
|
||||
|
||||
Comment.track = relationship("Track", back_populates="comments")
|
||||
Track.comments = relationship(
|
||||
"Comment", order_by=Comment.created_at, back_populates="track", passive_deletes=True
|
||||
)
|
||||
|
||||
OvertakingEvent.track = relationship("Track", back_populates="overtaking_events")
|
||||
Track.overtaking_events = relationship(
|
||||
"OvertakingEvent",
|
||||
order_by=OvertakingEvent.time,
|
||||
back_populates="track",
|
||||
passive_deletes=True,
|
||||
)
|
||||
|
||||
Track.user_device = relationship("UserDevice", back_populates="tracks")
|
||||
UserDevice.tracks = relationship(
|
||||
"Track",
|
||||
order_by=Track.created_at,
|
||||
back_populates="user_device",
|
||||
passive_deletes=False,
|
||||
)
|
||||
|
||||
|
||||
# 0..4 Night, 4..10 Morning, 10..14 Noon, 14..18 Afternoon, 18..22 Evening, 22..00 Night
|
||||
# Two hour intervals
|
||||
_DAYTIMES = [
|
||||
"Night", # 0h - 2h
|
||||
"Night", # 2h - 4h
|
||||
"Morning", # 4h - 6h
|
||||
"Morning", # 6h - 8h
|
||||
"Morning", # 8h - 10h
|
||||
"Noon", # 10h - 12h
|
||||
"Noon", # 12h - 14h
|
||||
"Afternoon", # 14h - 16h
|
||||
"Afternoon", # 16h - 18h
|
||||
"Evening", # 18h - 20h
|
||||
"Evening", # 20h - 22h
|
||||
"Night", # 22h - 24h
|
||||
]
|
||||
|
||||
|
||||
def _get_daytime(d):
|
||||
return _DAYTIMES[math.floor((d.hour % 24) / 2)]
|
377
api/obs/api/process.py
Normal file
377
api/obs/api/process.py
Normal file
|
@ -0,0 +1,377 @@
|
|||
import logging
|
||||
import os
|
||||
import json
|
||||
import asyncio
|
||||
import hashlib
|
||||
import struct
|
||||
import pytz
|
||||
from os.path import join
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import delete, func, select, and_
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from obs.face.importer import ImportMeasurementsCsv
|
||||
from obs.face.geojson import ExportMeasurements
|
||||
from obs.face.annotate import AnnotateMeasurements
|
||||
from obs.face.filter import (
|
||||
AnonymizationMode,
|
||||
ChainFilter,
|
||||
ConfirmedFilter,
|
||||
DistanceMeasuredFilter,
|
||||
PrivacyFilter,
|
||||
PrivacyZone,
|
||||
PrivacyZonesFilter,
|
||||
RequiredFieldsFilter,
|
||||
)
|
||||
|
||||
from obs.face.osm import DataSource, DatabaseTileSource
|
||||
|
||||
from obs.api.db import OvertakingEvent, RoadUsage, Track, UserDevice, make_session
|
||||
from obs.api.app import app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_data_source():
|
||||
"""
|
||||
Creates a data source based on the configuration of the portal. In *lean*
|
||||
mode, the OverpassTileSource is used to fetch data on demand. In normal
|
||||
mode, the roads database is used.
|
||||
"""
|
||||
return DataSource(DatabaseTileSource())
|
||||
|
||||
|
||||
async def process_tracks_loop(delay):
|
||||
while True:
|
||||
try:
|
||||
async with make_session() as session:
|
||||
track = (
|
||||
await session.execute(
|
||||
select(Track)
|
||||
.where(Track.processing_status == "queued")
|
||||
.order_by(Track.processing_queued_at)
|
||||
.options(joinedload(Track.author))
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if track is None:
|
||||
await asyncio.sleep(delay)
|
||||
continue
|
||||
|
||||
data_source = get_data_source()
|
||||
await process_track(session, track, data_source)
|
||||
except BaseException:
|
||||
log.exception("Failed to process track. Will continue.")
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
|
||||
async def process_tracks(tracks):
|
||||
"""
|
||||
Processes the tracks and writes event data to the database.
|
||||
|
||||
:param tracks: A list of strings which
|
||||
"""
|
||||
data_source = get_data_source()
|
||||
|
||||
async with make_session() as session:
|
||||
for track_id_or_slug in tracks:
|
||||
track = (
|
||||
await session.execute(
|
||||
select(Track)
|
||||
.where(
|
||||
Track.id == track_id_or_slug
|
||||
if isinstance(track_id_or_slug, int)
|
||||
else Track.slug == track_id_or_slug
|
||||
)
|
||||
.options(joinedload(Track.author))
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if not track:
|
||||
raise ValueError(f"Track {track_id_or_slug!r} not found.")
|
||||
|
||||
await process_track(session, track, data_source)
|
||||
|
||||
|
||||
def to_naive_utc(t):
|
||||
if t is None:
|
||||
return None
|
||||
return t.astimezone(pytz.UTC).replace(tzinfo=None)
|
||||
|
||||
|
||||
async def export_gpx(track, filename, name):
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
gpx = ET.Element("gpx")
|
||||
metadata = ET.SubElement(gpx, "metadata")
|
||||
ET.SubElement(metadata, "name").text = name
|
||||
|
||||
trk = ET.SubElement(gpx, "trk")
|
||||
|
||||
ET.SubElement(trk, "name").text = name
|
||||
ET.SubElement(trk, "type").text = "Cycling"
|
||||
|
||||
trkseg = ET.SubElement(trk, "trkseg")
|
||||
|
||||
for point in track:
|
||||
trkpt = ET.SubElement(
|
||||
trkseg, "trkpt", lat=str(point["latitude"]), lon=str(point["longitude"])
|
||||
)
|
||||
ET.SubElement(trkpt, "time").text = point["time"].isoformat()
|
||||
|
||||
et = ET.ElementTree(gpx)
|
||||
et.write(filename, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
|
||||
async def process_track(session, track, data_source):
|
||||
try:
|
||||
track.processing_status = "complete"
|
||||
track.processed_at = datetime.utcnow()
|
||||
await session.commit()
|
||||
|
||||
original_file_path = track.get_original_file_path(app.config)
|
||||
|
||||
output_dir = join(
|
||||
app.config.PROCESSING_OUTPUT_DIR, track.author.username, track.slug
|
||||
)
|
||||
os.makedirs(output_dir, exist_ok=True)
|
||||
|
||||
log.info("Annotating and filtering CSV file")
|
||||
imported_data, statistics, track_metadata = ImportMeasurementsCsv().read(
|
||||
original_file_path,
|
||||
user_id="dummy", # TODO: user username or id or nothing?
|
||||
dataset_id=Track.slug, # TODO: use track id or slug or nothing?
|
||||
return_metadata=True,
|
||||
)
|
||||
|
||||
annotator = AnnotateMeasurements(
|
||||
data_source,
|
||||
cache_dir=app.config.OBS_FACE_CACHE_DIR,
|
||||
fully_annotate_unconfirmed=True,
|
||||
)
|
||||
input_data = await annotator.annotate(imported_data)
|
||||
|
||||
track_filter = ChainFilter(
|
||||
RequiredFieldsFilter(),
|
||||
PrivacyFilter(
|
||||
user_id_mode=AnonymizationMode.REMOVE,
|
||||
measurement_id_mode=AnonymizationMode.REMOVE,
|
||||
),
|
||||
# TODO: load user privacy zones and create a PrivacyZonesFilter() from them
|
||||
)
|
||||
measurements_filter = DistanceMeasuredFilter()
|
||||
overtaking_events_filter = ConfirmedFilter()
|
||||
|
||||
track_points = track_filter.filter(input_data, log=log)
|
||||
measurements = measurements_filter.filter(track_points, log=log)
|
||||
overtaking_events = overtaking_events_filter.filter(measurements, log=log)
|
||||
|
||||
exporter = ExportMeasurements("measurements.dummy")
|
||||
await exporter.add_measurements(measurements)
|
||||
measurements_json = exporter.get_data()
|
||||
del exporter
|
||||
|
||||
exporter = ExportMeasurements("overtaking_events.dummy")
|
||||
await exporter.add_measurements(overtaking_events)
|
||||
overtaking_events_json = exporter.get_data()
|
||||
del exporter
|
||||
|
||||
track_json = {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": [[m["longitude"], m["latitude"]] for m in track_points],
|
||||
},
|
||||
}
|
||||
|
||||
track_raw_json = {
|
||||
"type": "Feature",
|
||||
"geometry": {
|
||||
"type": "LineString",
|
||||
"coordinates": [
|
||||
[m["longitude_GPS"], m["latitude_GPS"]] for m in track_points
|
||||
],
|
||||
},
|
||||
}
|
||||
|
||||
for output_filename, data in [
|
||||
("measurements.json", measurements_json),
|
||||
("overtakingEvents.json", overtaking_events_json),
|
||||
("track.json", track_json),
|
||||
("trackRaw.json", track_raw_json),
|
||||
]:
|
||||
target = join(output_dir, output_filename)
|
||||
log.debug("Writing file %s", target)
|
||||
with open(target, "w") as fp:
|
||||
json.dump(data, fp, indent=4)
|
||||
|
||||
await export_gpx(track_points, join(output_dir, "track.gpx"), track.slug)
|
||||
|
||||
log.info("Clearing old track data...")
|
||||
await clear_track_data(session, track)
|
||||
await session.commit()
|
||||
|
||||
device_identifier = track_metadata.get("DeviceId")
|
||||
if device_identifier:
|
||||
if isinstance(device_identifier, list):
|
||||
device_identifier = device_identifier[0]
|
||||
|
||||
log.info("Finding or creating device %s", device_identifier)
|
||||
user_device = (
|
||||
await session.execute(
|
||||
select(UserDevice).where(
|
||||
and_(
|
||||
UserDevice.user_id == track.author_id,
|
||||
UserDevice.identifier == device_identifier,
|
||||
)
|
||||
)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
log.debug("user_device is %s", user_device)
|
||||
|
||||
if not user_device:
|
||||
user_device = UserDevice(
|
||||
user_id=track.author_id, identifier=device_identifier
|
||||
)
|
||||
log.debug("Create new device for this user")
|
||||
session.add(user_device)
|
||||
|
||||
track.user_device = user_device
|
||||
else:
|
||||
log.info("No DeviceId in track metadata.")
|
||||
|
||||
log.info("Import events into database...")
|
||||
await import_overtaking_events(session, track, overtaking_events)
|
||||
|
||||
log.info("import road usages...")
|
||||
await import_road_usages(session, track, track_points)
|
||||
|
||||
log.info("Write track statistics and update status...")
|
||||
track.recorded_at = to_naive_utc(statistics["t_min"])
|
||||
track.recorded_until = to_naive_utc(statistics["t_max"])
|
||||
track.duration = statistics["t"]
|
||||
track.length = statistics["d"]
|
||||
track.segments = statistics["n_segments"]
|
||||
track.num_events = statistics["n_confirmed"]
|
||||
track.num_measurements = statistics["n_measurements"]
|
||||
track.num_valid = statistics["n_valid"]
|
||||
track.processing_status = "complete"
|
||||
track.processed_at = datetime.utcnow()
|
||||
await session.commit()
|
||||
|
||||
log.info("Track %s imported.", track.slug)
|
||||
except BaseException as e:
|
||||
await clear_track_data(session, track)
|
||||
track.processing_status = "error"
|
||||
track.processing_log = str(e)
|
||||
track.processed_at = datetime.utcnow()
|
||||
|
||||
await session.commit()
|
||||
raise
|
||||
|
||||
|
||||
async def clear_track_data(session, track):
|
||||
track.recorded_at = None
|
||||
track.recorded_until = None
|
||||
track.duration = None
|
||||
track.length = None
|
||||
track.segments = None
|
||||
track.num_events = None
|
||||
track.num_measurements = None
|
||||
track.num_valid = None
|
||||
|
||||
await session.execute(
|
||||
delete(OvertakingEvent).where(OvertakingEvent.track_id == track.id)
|
||||
)
|
||||
await session.execute(delete(RoadUsage).where(RoadUsage.track_id == track.id))
|
||||
|
||||
|
||||
async def import_overtaking_events(session, track, overtaking_events):
|
||||
# We use a dictionary to prevent per-track hash collisions, ignoring all
|
||||
# but the first event of the same hash
|
||||
event_models = {}
|
||||
|
||||
for m in overtaking_events:
|
||||
hex_hash = hashlib.sha256(
|
||||
struct.pack(
|
||||
"ddQ", m["latitude"], m["longitude"], int(m["time"].timestamp())
|
||||
)
|
||||
).hexdigest()
|
||||
|
||||
event_models[hex_hash] = OvertakingEvent(
|
||||
track_id=track.id,
|
||||
hex_hash=hex_hash,
|
||||
way_id=m.get("OSM_way_id"),
|
||||
direction_reversed=m.get("OSM_way_orientation", 0) < 0,
|
||||
geometry=func.ST_Transform(
|
||||
func.ST_GeomFromGeoJSON(
|
||||
json.dumps(
|
||||
{
|
||||
"type": "Point",
|
||||
"coordinates": [m["longitude"], m["latitude"]],
|
||||
}
|
||||
)
|
||||
),
|
||||
3857,
|
||||
),
|
||||
latitude=m["latitude"],
|
||||
longitude=m["longitude"],
|
||||
time=m["time"].astimezone(pytz.utc).replace(tzinfo=None),
|
||||
distance_overtaker=m["distance_overtaker"],
|
||||
distance_stationary=m["distance_stationary"],
|
||||
course=m["course"],
|
||||
speed=m["speed"],
|
||||
)
|
||||
|
||||
session.add_all(event_models.values())
|
||||
|
||||
|
||||
def get_road_usages(track_points):
|
||||
last_key = None
|
||||
last = None
|
||||
|
||||
for p in track_points:
|
||||
way_id = p.get("OSM_way_id")
|
||||
direction_reversed = p.get("OSM_way_orientation", 0) < 0
|
||||
|
||||
key = (way_id, direction_reversed)
|
||||
|
||||
if last_key is None or last_key[0] is None:
|
||||
last = p
|
||||
last_key = key
|
||||
continue
|
||||
|
||||
if last_key != key:
|
||||
if last_key[0] is not None:
|
||||
yield last
|
||||
last_key = key
|
||||
last = p
|
||||
|
||||
if last is not None and last_key[0] is not None:
|
||||
yield last
|
||||
|
||||
|
||||
async def import_road_usages(session, track, track_points):
|
||||
usages = set()
|
||||
for p in get_road_usages(track_points):
|
||||
direction_reversed = p.get("OSM_way_orientation", 0) < 0
|
||||
way_id = p.get("OSM_way_id")
|
||||
time = p["time"]
|
||||
|
||||
hex_hash = hashlib.sha256(
|
||||
struct.pack("dQ", way_id, int(time.timestamp()))
|
||||
).hexdigest()
|
||||
|
||||
usages.add(
|
||||
RoadUsage(
|
||||
track_id=track.id,
|
||||
hex_hash=hex_hash,
|
||||
way_id=way_id,
|
||||
time=time.astimezone(pytz.utc).replace(tzinfo=None),
|
||||
direction_reversed=direction_reversed,
|
||||
)
|
||||
)
|
||||
session.add_all(usages)
|
261
api/obs/api/routes/exports.py
Normal file
261
api/obs/api/routes/exports.py
Normal file
|
@ -0,0 +1,261 @@
|
|||
import json
|
||||
from enum import Enum
|
||||
from contextlib import contextmanager
|
||||
import zipfile
|
||||
import io
|
||||
import re
|
||||
import math
|
||||
from sqlite3 import connect
|
||||
|
||||
import shapefile
|
||||
from obs.api.db import OvertakingEvent
|
||||
from sqlalchemy import select, func, text
|
||||
from sanic.response import raw
|
||||
from sanic.exceptions import InvalidUsage
|
||||
|
||||
from obs.api.app import api, json as json_response
|
||||
from obs.api.utils import use_request_semaphore
|
||||
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ExportFormat(str, Enum):
|
||||
SHAPEFILE = "shapefile"
|
||||
GEOJSON = "geojson"
|
||||
|
||||
|
||||
def parse_bounding_box(input_string):
|
||||
left, bottom, right, top = map(float, input_string.split(","))
|
||||
return func.ST_SetSRID(
|
||||
func.ST_MakeBox2D(
|
||||
func.ST_Point(left, bottom),
|
||||
func.ST_Point(right, top),
|
||||
),
|
||||
4326,
|
||||
)
|
||||
|
||||
|
||||
PROJECTION_4326 = (
|
||||
'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],'
|
||||
'AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],'
|
||||
'UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]]'
|
||||
)
|
||||
|
||||
|
||||
@contextmanager
|
||||
def shapefile_zip(shape_type=shapefile.POINT, basename="events"):
|
||||
zip_buffer = io.BytesIO()
|
||||
shp, shx, dbf = (io.BytesIO() for _ in range(3))
|
||||
writer = shapefile.Writer(
|
||||
shp=shp, shx=shx, dbf=dbf, shapeType=shape_type, encoding="utf8"
|
||||
)
|
||||
|
||||
yield writer, zip_buffer
|
||||
|
||||
writer.balance()
|
||||
writer.close()
|
||||
|
||||
zip_file = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False)
|
||||
zip_file.writestr(f"{basename}.shp", shp.getbuffer())
|
||||
zip_file.writestr(f"{basename}.shx", shx.getbuffer())
|
||||
zip_file.writestr(f"{basename}.dbf", dbf.getbuffer())
|
||||
zip_file.writestr(f"{basename}.prj", PROJECTION_4326)
|
||||
zip_file.close()
|
||||
|
||||
|
||||
@api.get(r"/export/events")
|
||||
async def export_events(req):
|
||||
async with use_request_semaphore(req, "export_semaphore", timeout=30):
|
||||
bbox = req.ctx.get_single_arg("bbox", default="-180,-90,180,90")
|
||||
assert re.match(r"(-?\d+\.?\d+,?){4}", bbox)
|
||||
bbox = list(map(float, bbox.split(",")))
|
||||
|
||||
fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat)
|
||||
|
||||
events = await req.ctx.db.stream(
|
||||
text(
|
||||
"""
|
||||
SELECT
|
||||
ST_AsGeoJSON(ST_Transform(geometry, 4326)) AS geometry,
|
||||
distance_overtaker,
|
||||
distance_stationary,
|
||||
way_id,
|
||||
direction,
|
||||
speed,
|
||||
time_stamp,
|
||||
course,
|
||||
zone
|
||||
FROM
|
||||
layer_obs_events(
|
||||
ST_Transform(ST_MakeEnvelope(:bbox0, :bbox1, :bbox2, :bbox3, 4326), 3857),
|
||||
19,
|
||||
NULL,
|
||||
'1900-01-01'::timestamp,
|
||||
'2100-01-01'::timestamp
|
||||
)
|
||||
"""
|
||||
).bindparams(bbox0=bbox[0], bbox1=bbox[1], bbox2=bbox[2], bbox3=bbox[3])
|
||||
)
|
||||
|
||||
if fmt == ExportFormat.SHAPEFILE:
|
||||
with shapefile_zip(basename="events") as (writer, zip_buffer):
|
||||
writer.field("distance_overtaker", "N", decimal=4)
|
||||
writer.field("distance_stationary", "N", decimal=4)
|
||||
writer.field("way_id", "N", decimal=0)
|
||||
writer.field("direction", "N", decimal=0)
|
||||
writer.field("course", "N", decimal=4)
|
||||
writer.field("speed", "N", decimal=4)
|
||||
writer.field("zone", "C")
|
||||
|
||||
async for event in events:
|
||||
coords = json.loads(event.geometry)["coordinates"]
|
||||
writer.point(*coords)
|
||||
writer.record(
|
||||
distance_overtaker=event.distance_overtaker,
|
||||
distance_stationary=event.distance_stationary,
|
||||
direction=event.direction,
|
||||
way_id=event.way_id,
|
||||
course=event.course,
|
||||
speed=event.speed,
|
||||
zone=event.zone
|
||||
# "time"=event.time,
|
||||
)
|
||||
|
||||
return raw(zip_buffer.getbuffer())
|
||||
|
||||
if fmt == ExportFormat.GEOJSON:
|
||||
features = []
|
||||
async for event in events:
|
||||
geom = json.loads(event.geometry)
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": geom,
|
||||
"properties": {
|
||||
"distance_overtaker": event.distance_overtaker
|
||||
if event.distance_overtaker is not None
|
||||
and not math.isnan(event.distance_overtaker)
|
||||
else None,
|
||||
"distance_stationary": event.distance_stationary
|
||||
if event.distance_stationary is not None
|
||||
and not math.isnan(event.distance_stationary)
|
||||
else None,
|
||||
"direction": event.direction
|
||||
if event.direction is not None
|
||||
and not math.isnan(event.direction)
|
||||
else None,
|
||||
"way_id": event.way_id,
|
||||
"course": event.course
|
||||
if event.course is not None and not math.isnan(event.course)
|
||||
else None,
|
||||
"speed": event.speed
|
||||
if event.speed is not None and not math.isnan(event.speed)
|
||||
else None,
|
||||
"time": event.time_stamp,
|
||||
"zone": event.zone,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
geojson = {"type": "FeatureCollection", "features": features}
|
||||
return json_response(geojson)
|
||||
|
||||
raise InvalidUsage("unknown export format")
|
||||
|
||||
|
||||
@api.get(r"/export/segments")
|
||||
async def export_segments(req):
|
||||
async with use_request_semaphore(req, "export_semaphore", timeout=30):
|
||||
bbox = req.ctx.get_single_arg("bbox", default="-180,-90,180,90")
|
||||
assert re.match(r"(-?\d+\.?\d+,?){4}", bbox)
|
||||
bbox = list(map(float, bbox.split(",")))
|
||||
|
||||
fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat)
|
||||
segments = await req.ctx.db.stream(
|
||||
text(
|
||||
"""
|
||||
SELECT
|
||||
ST_AsGeoJSON(ST_Transform(geometry, 4326)) AS geometry,
|
||||
way_id,
|
||||
distance_overtaker_mean,
|
||||
distance_overtaker_min,
|
||||
distance_overtaker_max,
|
||||
distance_overtaker_median,
|
||||
overtaking_event_count,
|
||||
usage_count,
|
||||
direction,
|
||||
zone,
|
||||
offset_direction,
|
||||
distance_overtaker_array
|
||||
FROM
|
||||
layer_obs_roads(
|
||||
ST_Transform(ST_MakeEnvelope(:bbox0, :bbox1, :bbox2, :bbox3, 4326), 3857),
|
||||
11,
|
||||
NULL,
|
||||
'1900-01-01'::timestamp,
|
||||
'2100-01-01'::timestamp
|
||||
)
|
||||
WHERE usage_count > 0
|
||||
"""
|
||||
).bindparams(bbox0=bbox[0], bbox1=bbox[1], bbox2=bbox[2], bbox3=bbox[3])
|
||||
)
|
||||
|
||||
if fmt == ExportFormat.SHAPEFILE:
|
||||
with shapefile_zip(shape_type=3, basename="segments") as (
|
||||
writer,
|
||||
zip_buffer,
|
||||
):
|
||||
writer.field("distance_overtaker_mean", "N", decimal=4)
|
||||
writer.field("distance_overtaker_max", "N", decimal=4)
|
||||
writer.field("distance_overtaker_min", "N", decimal=4)
|
||||
writer.field("distance_overtaker_median", "N", decimal=4)
|
||||
writer.field("overtaking_event_count", "N", decimal=4)
|
||||
writer.field("usage_count", "N", decimal=4)
|
||||
writer.field("way_id", "N", decimal=0)
|
||||
writer.field("direction", "N", decimal=0)
|
||||
writer.field("zone", "C")
|
||||
|
||||
async for segment in segments:
|
||||
geom = json.loads(segment.st_asgeojson)
|
||||
writer.line([geom["coordinates"]])
|
||||
writer.record(
|
||||
distance_overtaker_mean=segment.distance_overtaker_mean,
|
||||
distance_overtaker_median=segment.distance_overtaker_median,
|
||||
distance_overtaker_max=segment.distance_overtaker_max,
|
||||
distance_overtaker_min=segment.distance_overtaker_min,
|
||||
usage_count=segment.usage_count,
|
||||
overtaking_event_count=segment.overtaking_event_count,
|
||||
direction=segment.direction,
|
||||
way_id=segment.way_id,
|
||||
zone=segment.zone,
|
||||
)
|
||||
|
||||
return raw(zip_buffer.getbuffer())
|
||||
|
||||
if fmt == ExportFormat.GEOJSON:
|
||||
features = []
|
||||
async for segment in segments:
|
||||
features.append(
|
||||
{
|
||||
"type": "Feature",
|
||||
"geometry": json.loads(segment.geometry),
|
||||
"properties": {
|
||||
"distance_overtaker_mean": segment.distance_overtaker_mean,
|
||||
"distance_overtaker_max": segment.distance_overtaker_max,
|
||||
"distance_overtaker_median": segment.distance_overtaker_median,
|
||||
"overtaking_event_count": segment.overtaking_event_count,
|
||||
"usage_count": segment.usage_count,
|
||||
"distance_overtaker_array": segment.distance_overtaker_array,
|
||||
"direction": segment.direction,
|
||||
"way_id": segment.way_id,
|
||||
"zone": segment.zone,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
geojson = {"type": "FeatureCollection", "features": features}
|
||||
return json_response(geojson)
|
||||
|
||||
raise InvalidUsage("unknown export format")
|
57
api/obs/api/routes/frontend.py
Normal file
57
api/obs/api/routes/frontend.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
from os.path import join, exists, isfile, abspath
|
||||
|
||||
import sanic.response as response
|
||||
from sanic.exceptions import NotFound
|
||||
|
||||
from obs.api.app import app
|
||||
|
||||
if app.config.FRONTEND_CONFIG:
|
||||
|
||||
@app.get("/config.json")
|
||||
def get_frontend_config(req):
|
||||
result = {
|
||||
"basename": req.ctx.frontend_base_path,
|
||||
**req.app.config.FRONTEND_CONFIG,
|
||||
"apiUrl": f"{req.ctx.api_url}/api",
|
||||
"loginUrl": f"{req.ctx.api_url}/login",
|
||||
"obsMapSource": {
|
||||
"type": "vector",
|
||||
"tiles": [
|
||||
req.ctx.api_url
|
||||
+ req.app.url_for("tiles", zoom="000", x="111", y="222.pbf")
|
||||
.replace("000", "{z}")
|
||||
.replace("111", "{x}")
|
||||
.replace("222", "{y}")
|
||||
],
|
||||
"minzoom": 0,
|
||||
"maxzoom": 14,
|
||||
},
|
||||
}
|
||||
|
||||
return response.json(result)
|
||||
|
||||
|
||||
INDEX_HTML = (
|
||||
join(app.config.FRONTEND_DIR, "index.html")
|
||||
if app.config.get("FRONTEND_DIR")
|
||||
else None
|
||||
)
|
||||
if INDEX_HTML and exists(INDEX_HTML):
|
||||
with open(INDEX_HTML, "rt") as f:
|
||||
index_file_contents = f.read()
|
||||
|
||||
@app.get("/<path:path>")
|
||||
def get_frontend_static(req, path):
|
||||
if path.startswith("api/"):
|
||||
raise NotFound()
|
||||
|
||||
file = join(app.config.FRONTEND_DIR, path)
|
||||
if not abspath(file).startswith(abspath(app.config.FRONTEND_DIR)):
|
||||
raise NotFound()
|
||||
|
||||
if not exists(file) or not path or not isfile(file):
|
||||
return response.html(
|
||||
index_file_contents.replace("__BASE_HREF__", req.ctx.frontend_url + "/")
|
||||
)
|
||||
|
||||
return response.file(file)
|
18
api/obs/api/routes/info.py
Normal file
18
api/obs/api/routes/info.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
import logging
|
||||
|
||||
from obs.api.app import api
|
||||
|
||||
from sanic.response import json
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from obs.api import __version__ as version
|
||||
|
||||
|
||||
@api.route("/info")
|
||||
async def info(req):
|
||||
return json(
|
||||
{
|
||||
"version": version,
|
||||
}
|
||||
)
|
173
api/obs/api/routes/login.py
Normal file
173
api/obs/api/routes/login.py
Normal file
|
@ -0,0 +1,173 @@
|
|||
import asyncio
|
||||
import logging
|
||||
import re
|
||||
|
||||
from requests.exceptions import RequestException
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from oic import rndstr
|
||||
from oic.oic import Client
|
||||
from oic.oic.message import AuthorizationResponse, RegistrationResponse
|
||||
from oic.utils.authn.client import CLIENT_AUTHN_METHOD
|
||||
|
||||
from obs.api.app import auth, api
|
||||
from obs.api.db import User
|
||||
|
||||
from sanic.response import json, redirect
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
client = Client(client_authn_method=CLIENT_AUTHN_METHOD)
|
||||
|
||||
# Do not show verbose library output, even when the appliaction is in debug mode
|
||||
logging.getLogger("oic").setLevel(logging.INFO)
|
||||
|
||||
|
||||
@auth.before_server_start
|
||||
async def connect_auth_client(app, loop):
|
||||
client.allow["issuer_mismatch"] = True
|
||||
try:
|
||||
client.provider_config(app.config.KEYCLOAK_URL)
|
||||
client.store_registration_info(
|
||||
RegistrationResponse(
|
||||
client_id=app.config.KEYCLOAK_CLIENT_ID,
|
||||
client_secret=app.config.KEYCLOAK_CLIENT_SECRET,
|
||||
)
|
||||
)
|
||||
except RequestException:
|
||||
log.exception(f"could not connect to {app.config.KEYCLOAK_URL}")
|
||||
log.info("will retry")
|
||||
await asyncio.sleep(2)
|
||||
log.info("retrying")
|
||||
await connect_auth_client(app, loop)
|
||||
|
||||
|
||||
@auth.route("/login")
|
||||
async def login(req):
|
||||
next_url = req.ctx.get_single_arg("next", default=None)
|
||||
|
||||
session = req.ctx.session
|
||||
session["state"] = rndstr()
|
||||
session["nonce"] = rndstr()
|
||||
session["next"] = next_url
|
||||
args = {
|
||||
"client_id": client.client_id,
|
||||
"response_type": "code",
|
||||
"scope": ["openid"],
|
||||
"nonce": session["nonce"],
|
||||
"redirect_uri": req.ctx.api_url + "/login/redirect",
|
||||
"state": session["state"],
|
||||
}
|
||||
|
||||
auth_req = client.construct_AuthorizationRequest(request_args=args)
|
||||
login_url = auth_req.request(client.authorization_endpoint)
|
||||
|
||||
return redirect(login_url)
|
||||
|
||||
|
||||
@auth.route("/login/redirect")
|
||||
async def login_redirect(req):
|
||||
session = req.ctx.session
|
||||
|
||||
auth_response = client.parse_response(
|
||||
AuthorizationResponse, info=dict(req.query_args), sformat="dict"
|
||||
)
|
||||
code = auth_response["code"]
|
||||
state = auth_response["state"]
|
||||
|
||||
assert "state" in session
|
||||
assert state == session["state"]
|
||||
|
||||
client.do_access_token_request(
|
||||
state=state,
|
||||
request_args={"code": code},
|
||||
authn_method="client_secret_basic",
|
||||
)
|
||||
|
||||
userinfo = client.do_user_info_request(state=state)
|
||||
|
||||
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
|
||||
sub = userinfo["sub"]
|
||||
preferred_username = userinfo["preferred_username"]
|
||||
email = userinfo.get("email")
|
||||
|
||||
clean_username = re.sub(r"[^a-zA-Z0-9_.-]", "", preferred_username)
|
||||
if clean_username != preferred_username:
|
||||
log.warning(
|
||||
"Username %r contained invalid characters and was changed to %r",
|
||||
preferred_username,
|
||||
clean_username,
|
||||
)
|
||||
preferred_username = clean_username
|
||||
|
||||
if email is None:
|
||||
raise ValueError(
|
||||
"user has no email set, please configure keycloak to require emails"
|
||||
)
|
||||
|
||||
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()
|
||||
|
||||
if user is None:
|
||||
user = (
|
||||
await req.ctx.db.execute(
|
||||
select(User).where(
|
||||
User.email == email
|
||||
and User.username == preferred_username
|
||||
and User.match_by_username_email
|
||||
)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if user:
|
||||
log.info(
|
||||
"Re-matched existing user %s (sub: %s) based on email and username (%s)",
|
||||
user.id,
|
||||
user.sub,
|
||||
preferred_username,
|
||||
)
|
||||
user.match_by_username_email = False
|
||||
user.sub = sub
|
||||
|
||||
if user is None:
|
||||
log.info(
|
||||
"Registering new user with sub %r (preferred username: %s)",
|
||||
sub,
|
||||
preferred_username,
|
||||
)
|
||||
user = User(sub=sub, username=preferred_username, email=email)
|
||||
req.ctx.db.add(user)
|
||||
else:
|
||||
log.info(
|
||||
"Logged in known user (id: %s, sub: %s, %s).",
|
||||
user.id,
|
||||
user.sub,
|
||||
preferred_username,
|
||||
)
|
||||
|
||||
if email != user.email:
|
||||
log.debug("Updating user (id: %s) email from auth system.", user.id)
|
||||
user.email = email
|
||||
|
||||
if preferred_username != user.username:
|
||||
log.debug("Updating user (id: %s) username from auth system.", user.id)
|
||||
await user.rename(req.app.config, preferred_username)
|
||||
|
||||
await req.ctx.db.commit()
|
||||
|
||||
session["user_id"] = user.id
|
||||
|
||||
next_ = session.pop("next", "/") or "/"
|
||||
return redirect(next_)
|
||||
|
||||
|
||||
@api.route("/logout")
|
||||
async def logout(req):
|
||||
session = req.ctx.session
|
||||
if "user_id" in session:
|
||||
del session["user_id"]
|
||||
|
||||
auth_req = client.construct_EndSessionRequest(state=session["state"])
|
||||
logout_url = auth_req.request(client.end_session_endpoint)
|
||||
|
||||
return redirect(logout_url + f"&post_logout_redirect_uri={req.ctx.api_url}/logout")
|
147
api/obs/api/routes/mapdetails.py
Normal file
147
api/obs/api/routes/mapdetails.py
Normal file
|
@ -0,0 +1,147 @@
|
|||
import json
|
||||
from functools import partial
|
||||
import logging
|
||||
import numpy
|
||||
import math
|
||||
|
||||
from sqlalchemy import select, func, column
|
||||
|
||||
import sanic.response as response
|
||||
from sanic.exceptions import InvalidUsage
|
||||
|
||||
from obs.api.app import api
|
||||
from obs.api.db import Road, OvertakingEvent, Track
|
||||
from obs.api.utils import round_to
|
||||
|
||||
round_distance = partial(round_to, multiples=0.001)
|
||||
round_speed = partial(round_to, multiples=0.1)
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def get_bearing(b, a):
|
||||
# longitude, latitude
|
||||
dL = b[0] - a[0]
|
||||
X = numpy.cos(b[1]) * numpy.sin(dL)
|
||||
Y = numpy.cos(a[1]) * numpy.sin(b[1]) - numpy.sin(a[1]) * numpy.cos(
|
||||
b[1]
|
||||
) * numpy.cos(dL)
|
||||
return numpy.arctan2(Y, X) + 0.5 * math.pi
|
||||
|
||||
|
||||
# Bins for histogram on overtaker distances. 0, 0.25, ... 2.25, infinity
|
||||
DISTANCE_BINS = numpy.arange(0, 2.5, 0.25).tolist() + [float('inf')]
|
||||
|
||||
@api.route("/mapdetails/road", methods=["GET"])
|
||||
async def mapdetails_road(req):
|
||||
longitude = req.ctx.get_single_arg("longitude", convert=float)
|
||||
latitude = req.ctx.get_single_arg("latitude", convert=float)
|
||||
radius = req.ctx.get_single_arg("radius", default=100, convert=float)
|
||||
|
||||
if not (1 <= radius <= 1000):
|
||||
raise InvalidUsage("`radius` parameter must be between 1 and 1000")
|
||||
|
||||
road_geometry = Road.geometry
|
||||
point = func.ST_Transform(
|
||||
func.ST_GeomFromGeoJSON(
|
||||
json.dumps(
|
||||
{
|
||||
"type": "point",
|
||||
"coordinates": [longitude, latitude],
|
||||
}
|
||||
)
|
||||
),
|
||||
3857,
|
||||
)
|
||||
|
||||
road = (
|
||||
await req.ctx.db.execute(
|
||||
select(Road)
|
||||
.where(func.ST_DWithin(road_geometry, point, radius))
|
||||
.order_by(func.ST_Distance(road_geometry, point))
|
||||
.limit(1)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if road is None:
|
||||
return response.json({})
|
||||
|
||||
arrays = (
|
||||
await req.ctx.db.execute(
|
||||
select(
|
||||
[
|
||||
OvertakingEvent.distance_overtaker,
|
||||
OvertakingEvent.distance_stationary,
|
||||
OvertakingEvent.speed,
|
||||
# Keep this as the last entry always for numpy.partition
|
||||
# below to work.
|
||||
OvertakingEvent.direction_reversed,
|
||||
]
|
||||
).where(OvertakingEvent.way_id == road.way_id)
|
||||
)
|
||||
).all()
|
||||
|
||||
arrays = numpy.array(arrays).T
|
||||
|
||||
if len(arrays) == 0:
|
||||
arrays = numpy.array([[], [], [], []], dtype=float)
|
||||
|
||||
data, mask = arrays[:-1], arrays[-1]
|
||||
data = data.astype(numpy.float64)
|
||||
mask = mask.astype(bool)
|
||||
|
||||
def partition(arr, cond):
|
||||
return arr[:, cond], arr[:, ~cond]
|
||||
|
||||
forwards, backwards = partition(data, ~mask)
|
||||
|
||||
def array_stats(arr, rounder, bins=30):
|
||||
if len(arr):
|
||||
arr = arr[~numpy.isnan(arr)]
|
||||
|
||||
n = len(arr)
|
||||
|
||||
hist, bins = numpy.histogram(arr, bins=bins)
|
||||
|
||||
return {
|
||||
"statistics": {
|
||||
"count": n,
|
||||
"mean": rounder(numpy.mean(arr)) if n else None,
|
||||
"min": rounder(numpy.min(arr)) if n else None,
|
||||
"max": rounder(numpy.max(arr)) if n else None,
|
||||
"median": rounder(numpy.median(arr)) if n else None,
|
||||
},
|
||||
"histogram": {
|
||||
"bins": [None if math.isinf(b) else b for b in bins.tolist()],
|
||||
"counts": hist.tolist(),
|
||||
"zone": road.zone
|
||||
},
|
||||
"values": list(map(rounder, arr.tolist())),
|
||||
}
|
||||
|
||||
bearing = None
|
||||
|
||||
geom = json.loads(road.geometry)
|
||||
if geom["type"] == "LineString":
|
||||
coordinates = geom["coordinates"]
|
||||
bearing = get_bearing(coordinates[0], coordinates[-1])
|
||||
# convert to degrees, as this is more natural to understand for consumers
|
||||
bearing = round_to((bearing / math.pi * 180 + 360) % 360, 1)
|
||||
|
||||
def get_direction_stats(direction_arrays, backwards=False):
|
||||
return {
|
||||
"bearing": ((bearing + 180) % 360 if backwards else bearing)
|
||||
if bearing is not None
|
||||
else None,
|
||||
"distanceOvertaker": array_stats(direction_arrays[0], round_distance, bins=DISTANCE_BINS),
|
||||
"distanceStationary": array_stats(direction_arrays[1], round_distance, bins=DISTANCE_BINS),
|
||||
"speed": array_stats(direction_arrays[2], round_speed),
|
||||
}
|
||||
|
||||
return response.json(
|
||||
{
|
||||
"road": road.to_dict(),
|
||||
"forwards": get_direction_stats(forwards),
|
||||
"backwards": get_direction_stats(backwards, True),
|
||||
}
|
||||
)
|
206
api/obs/api/routes/stats.py
Normal file
206
api/obs/api/routes/stats.py
Normal file
|
@ -0,0 +1,206 @@
|
|||
import logging
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from operator import and_
|
||||
from functools import reduce
|
||||
|
||||
from sqlalchemy import distinct, select, func, desc
|
||||
|
||||
from sanic.response import json
|
||||
|
||||
from obs.api.app import api
|
||||
from obs.api.db import Track, OvertakingEvent, User, Region, UserDevice
|
||||
from obs.api.utils import round_to
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# round to this number of meters for privacy reasons
|
||||
TRACK_LENGTH_ROUNDING = 1000
|
||||
|
||||
# round to this number of seconds for privacy reasons
|
||||
TRACK_DURATION_ROUNDING = 120
|
||||
|
||||
# Everything before this date is probably parsed incorrectly
|
||||
MINUMUM_RECORDING_DATE = datetime(2010, 1, 1)
|
||||
|
||||
|
||||
@api.route("/stats")
|
||||
async def stats(req):
|
||||
user = req.ctx.get_single_arg("user", default=None)
|
||||
start = req.ctx.get_single_arg("start", default=None, convert=datetime)
|
||||
end = req.ctx.get_single_arg("end", default=None, convert=datetime)
|
||||
|
||||
conditions = [
|
||||
Track.recorded_at != None,
|
||||
Track.recorded_at > MINUMUM_RECORDING_DATE,
|
||||
]
|
||||
|
||||
if start is not None:
|
||||
conditions.append(Track.recorded_at >= start)
|
||||
|
||||
if end is not None:
|
||||
conditions.append(Track.recorded_at < end)
|
||||
|
||||
# Only the user can look for their own stats, for now
|
||||
by_user = (
|
||||
user is not None and req.ctx.user is not None and req.ctx.user.id == int(user)
|
||||
)
|
||||
if by_user:
|
||||
conditions.append(Track.author_id == req.ctx.user.id)
|
||||
|
||||
track_condition = reduce(and_, conditions)
|
||||
public_track_condition = Track.public and track_condition
|
||||
|
||||
query = (
|
||||
select(
|
||||
[
|
||||
func.count().label("publicTrackCount"),
|
||||
func.sum(Track.duration).label("trackDuration"),
|
||||
func.sum(Track.length).label("trackLength"),
|
||||
]
|
||||
)
|
||||
.select_from(Track)
|
||||
.where(public_track_condition)
|
||||
)
|
||||
|
||||
public_track_count, track_duration, track_length = (
|
||||
await req.ctx.db.execute(query)
|
||||
).first()
|
||||
|
||||
# This is required because SQL returns NULL when the input set to a
|
||||
# SUM() aggregation is empty.
|
||||
track_duration = track_duration or 0
|
||||
track_length = track_length or 0
|
||||
|
||||
user_count = (
|
||||
1
|
||||
if by_user
|
||||
else (await req.ctx.db.execute(select(func.count()).select_from(User))).scalar()
|
||||
)
|
||||
track_count = (
|
||||
await req.ctx.db.execute(
|
||||
select(func.count()).select_from(Track).where(track_condition)
|
||||
)
|
||||
).scalar()
|
||||
event_count = (
|
||||
await req.ctx.db.execute(
|
||||
select(func.count())
|
||||
.select_from(OvertakingEvent)
|
||||
.join(OvertakingEvent.track)
|
||||
.where(track_condition)
|
||||
)
|
||||
).scalar()
|
||||
device_count = (
|
||||
await req.ctx.db.execute(
|
||||
select(func.count(distinct(UserDevice.id)))
|
||||
.select_from(UserDevice)
|
||||
.join(Track.user_device)
|
||||
.where(track_condition)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
result = {
|
||||
"numEvents": event_count,
|
||||
"userCount": user_count,
|
||||
"trackLength": round_to(track_length or 0, TRACK_LENGTH_ROUNDING),
|
||||
"trackDuration": round_to(track_duration or 0, TRACK_DURATION_ROUNDING),
|
||||
"publicTrackCount": public_track_count,
|
||||
"trackCount": track_count,
|
||||
"deviceCount": device_count,
|
||||
}
|
||||
|
||||
return json(result)
|
||||
|
||||
|
||||
# const trackCount = await Track.find(trackFilter).count();
|
||||
#
|
||||
# const publicTrackCount = await Track.find({
|
||||
# ...trackFilter,
|
||||
# public: true,
|
||||
# }).count();
|
||||
#
|
||||
# const userCount = await User.find({
|
||||
# ...(userFilter
|
||||
# ? { _id: userFilter }
|
||||
# : {
|
||||
# createdAt: dateFilter,
|
||||
# }),
|
||||
# }).count();
|
||||
#
|
||||
# const trackStats = await Track.aggregate([
|
||||
# { $match: trackFilter },
|
||||
# {
|
||||
# $addFields: {
|
||||
# trackLength: {
|
||||
# $cond: [{ $lt: ['$statistics.length', 500000] }, '$statistics.length', 0],
|
||||
# },
|
||||
# numEvents: '$statistics.numEvents',
|
||||
# trackDuration: {
|
||||
# $cond: [
|
||||
# { $and: ['$statistics.recordedUntil', { $gt: ['$statistics.recordedAt', new Date('2010-01-01')] }] },
|
||||
# { $subtract: ['$statistics.recordedUntil', '$statistics.recordedAt'] },
|
||||
# 0,
|
||||
# ],
|
||||
# },
|
||||
# },
|
||||
# },
|
||||
# { $project: { trackLength: true, numEvents: true, trackDuration: true } },
|
||||
# {
|
||||
# $group: {
|
||||
# _id: 'sum',
|
||||
# trackLength: { $sum: '$trackLength' },
|
||||
# numEvents: { $sum: '$numEvents' },
|
||||
# trackDuration: { $sum: '$trackDuration' },
|
||||
# },
|
||||
# },
|
||||
# ]);
|
||||
#
|
||||
# const [trackLength, numEvents, trackDuration] =
|
||||
# trackStats.length > 0
|
||||
# ? [trackStats[0].trackLength, trackStats[0].numEvents, trackStats[0].trackDuration]
|
||||
# : [0, 0, 0];
|
||||
#
|
||||
# const trackLengthPrivatized = Math.floor(trackLength / TRACK_LENGTH_ROUNDING) * TRACK_LENGTH_ROUNDING;
|
||||
# const trackDurationPrivatized =
|
||||
# Math.round(trackDuration / 1000 / TRACK_DURATION_ROUNDING) * TRACK_DURATION_ROUNDING;
|
||||
#
|
||||
# return res.json({
|
||||
# publicTrackCount,
|
||||
# trackLength: trackLengthPrivatized,
|
||||
# trackDuration: trackDurationPrivatized,
|
||||
# numEvents,
|
||||
# trackCount,
|
||||
# userCount,
|
||||
# });
|
||||
# }),
|
||||
# );
|
||||
|
||||
|
||||
@api.route("/stats/regions")
|
||||
async def stats(req):
|
||||
query = (
|
||||
select(
|
||||
[
|
||||
Region.id,
|
||||
Region.name,
|
||||
func.count(OvertakingEvent.id).label("overtaking_event_count"),
|
||||
]
|
||||
)
|
||||
.select_from(Region)
|
||||
.join(
|
||||
OvertakingEvent,
|
||||
func.ST_Within(OvertakingEvent.geometry, Region.geometry),
|
||||
)
|
||||
.group_by(
|
||||
Region.id,
|
||||
Region.name,
|
||||
Region.geometry,
|
||||
)
|
||||
.having(func.count(OvertakingEvent.id) > 0)
|
||||
.order_by(desc("overtaking_event_count"))
|
||||
)
|
||||
|
||||
regions = list(map(dict, (await req.ctx.db.execute(query)).all()))
|
||||
return json(regions)
|
129
api/obs/api/routes/tiles.py
Normal file
129
api/obs/api/routes/tiles.py
Normal file
|
@ -0,0 +1,129 @@
|
|||
from gzip import decompress
|
||||
from sqlite3 import connect
|
||||
from datetime import datetime, time, timedelta
|
||||
from typing import Optional, Tuple
|
||||
|
||||
import dateutil.parser
|
||||
from sanic.exceptions import Forbidden, InvalidUsage
|
||||
from sanic.response import raw
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.utils import use_request_semaphore
|
||||
|
||||
|
||||
def get_tile(filename, zoom, x, y):
|
||||
"""
|
||||
Inspired by:
|
||||
https://github.com/TileStache/TileStache/blob/master/TileStache/MBTiles.py
|
||||
"""
|
||||
|
||||
db = connect(filename)
|
||||
db.text_factory = bytes
|
||||
|
||||
fmt = db.execute("SELECT value FROM metadata WHERE name='format'").fetchone()[0]
|
||||
if fmt != b"pbf":
|
||||
raise ValueError("mbtiles file is in wrong format: %s" % fmt)
|
||||
|
||||
content = db.execute(
|
||||
"SELECT tile_data FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?",
|
||||
(zoom, x, (2**zoom - 1) - y),
|
||||
).fetchone()
|
||||
return content and content[0] or None
|
||||
|
||||
|
||||
def round_date(date, to="weeks", up=False):
|
||||
if to != "weeks":
|
||||
raise ValueError(f"cannot round to {to}")
|
||||
|
||||
midnight = time(0, 0, 0, 0)
|
||||
start_of_day = date.date() # ignore time
|
||||
weekday = date.weekday()
|
||||
|
||||
is_rounded = date.time() == midnight and weekday == 0
|
||||
if is_rounded:
|
||||
return date
|
||||
|
||||
if up:
|
||||
return datetime.combine(start_of_day + timedelta(days=7 - weekday), midnight)
|
||||
else:
|
||||
return datetime.combine(start_of_day - timedelta(days=weekday), midnight)
|
||||
|
||||
|
||||
# regenerate approx. once each day
|
||||
TILE_CACHE_MAX_AGE = 3600 * 24
|
||||
|
||||
|
||||
def get_filter_options(
|
||||
req,
|
||||
) -> Tuple[Optional[str], Optional[datetime], Optional[datetime]]:
|
||||
"""
|
||||
Returns parsed, validated and normalized options for filtering map data, a
|
||||
tuple of
|
||||
|
||||
* user_id (str|None)
|
||||
* start (datetime|None)
|
||||
* end (datetime|None)
|
||||
"""
|
||||
user_id = req.ctx.get_single_arg("user", default=None, convert=int)
|
||||
if user_id is not None and (req.ctx.user is None or req.ctx.user.id != user_id):
|
||||
raise Forbidden()
|
||||
|
||||
parse_date = lambda s: dateutil.parser.parse(s)
|
||||
start = req.ctx.get_single_arg("start", default=None, convert=parse_date)
|
||||
end = req.ctx.get_single_arg("end", default=None, convert=parse_date)
|
||||
|
||||
start = round_date(start, to="weeks", up=False) if start else None
|
||||
end = round_date(end, to="weeks", up=True) if end else None
|
||||
|
||||
if start is not None and end is not None and start >= end:
|
||||
raise InvalidUsage(
|
||||
"end date must be later than start date (note: dates are rounded to weeks)"
|
||||
)
|
||||
|
||||
return user_id, start, end
|
||||
|
||||
|
||||
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
|
||||
async def tiles(req, zoom: int, x: int, y: str):
|
||||
async with use_request_semaphore(req, "tile_semaphore"):
|
||||
if app.config.get("TILES_FILE"):
|
||||
tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y))
|
||||
|
||||
else:
|
||||
user_id, start, end = get_filter_options(req)
|
||||
|
||||
tile = await req.ctx.db.scalar(
|
||||
text(
|
||||
"select data from getmvt(:zoom, :x, :y, :user_id, :min_time, :max_time) as b(data, key);"
|
||||
).bindparams(
|
||||
zoom=int(zoom),
|
||||
x=int(x),
|
||||
y=int(y),
|
||||
user_id=user_id,
|
||||
min_time=start,
|
||||
max_time=end,
|
||||
)
|
||||
)
|
||||
|
||||
gzip = "gzip" in req.headers["accept-encoding"]
|
||||
|
||||
headers = {}
|
||||
headers["Vary"] = "Accept-Encoding"
|
||||
|
||||
if req.app.config.DEBUG:
|
||||
headers["Cache-Control"] = "no-cache"
|
||||
else:
|
||||
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
|
||||
|
||||
# The tiles in the mbtiles file are gzip-compressed already, so we
|
||||
# serve them actually as-is, and only decompress them if the browser
|
||||
# doesn't accept gzip
|
||||
if gzip:
|
||||
headers["Content-Encoding"] = "gzip"
|
||||
|
||||
if not gzip:
|
||||
tile = decompress(tile)
|
||||
|
||||
return raw(tile, content_type="application/x-protobuf", headers=headers)
|
473
api/obs/api/routes/tracks.py
Normal file
473
api/obs/api/routes/tracks.py
Normal file
|
@ -0,0 +1,473 @@
|
|||
import logging
|
||||
import re
|
||||
from datetime import date
|
||||
from json import load as jsonload
|
||||
from os.path import join, exists, isfile
|
||||
|
||||
from sanic.exceptions import InvalidUsage, NotFound, Forbidden
|
||||
from sanic.response import file_stream, empty
|
||||
from slugify import slugify
|
||||
from sqlalchemy import select, func, and_
|
||||
from sqlalchemy.orm import joinedload
|
||||
|
||||
from obs.api.app import api, require_auth, read_api_key, json
|
||||
from obs.api.db import Track, Comment, DuplicateTrackFileError
|
||||
from obs.api.utils import tar_of_tracks
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def normalize_user_agent(user_agent):
|
||||
if not user_agent:
|
||||
return None
|
||||
|
||||
m = re.match(r"\bOBS\/[^\s]+", user_agent)
|
||||
return m[0] if m else None
|
||||
|
||||
|
||||
async def _return_tracks(req, extend_query, limit, offset, order_by=None):
|
||||
if limit <= 0 or limit > 1000:
|
||||
raise InvalidUsage("invalid limit")
|
||||
|
||||
if offset < 0:
|
||||
raise InvalidUsage("offset must be positive")
|
||||
|
||||
count_query = extend_query(
|
||||
select(func.count()).select_from(Track).join(Track.author)
|
||||
)
|
||||
track_count = await req.ctx.db.scalar(count_query)
|
||||
|
||||
query = (
|
||||
extend_query(select(Track).options(joinedload(Track.author)))
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
.order_by(order_by if order_by is not None else Track.created_at)
|
||||
)
|
||||
|
||||
tracks = (await req.ctx.db.execute(query)).scalars()
|
||||
|
||||
return json(
|
||||
{
|
||||
"trackCount": track_count,
|
||||
"tracks": list(
|
||||
map(
|
||||
lambda t: t.to_dict(
|
||||
for_user_id=req.ctx.user.id if req.ctx.user else None
|
||||
),
|
||||
tracks,
|
||||
)
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@api.get("/tracks")
|
||||
async def get_tracks(req):
|
||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
||||
# author = req.ctx.get_single_arg("author", default=None, convert=int)
|
||||
|
||||
def extend_query(q):
|
||||
q = q.where(Track.public)
|
||||
|
||||
# if author is not None:
|
||||
# q = q.where(Track.author_id == author)
|
||||
|
||||
return q
|
||||
|
||||
return await _return_tracks(req, extend_query, limit, offset)
|
||||
|
||||
|
||||
def parse_boolean(s):
|
||||
if s is None:
|
||||
return None
|
||||
|
||||
s = s.lower()
|
||||
if s in ("true", "1", "yes", "y", "t"):
|
||||
return True
|
||||
if s in ("false", "0", "no", "n", "f"):
|
||||
return False
|
||||
|
||||
raise ValueError("invalid value for boolean")
|
||||
|
||||
|
||||
@api.get("/tracks/feed")
|
||||
@require_auth
|
||||
async def get_feed(req):
|
||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
||||
user_device_id = req.ctx.get_single_arg("user_device_id", default=None, convert=int)
|
||||
|
||||
order_by_columns = {
|
||||
"recordedAt": Track.recorded_at,
|
||||
"title": Track.title,
|
||||
"visibility": Track.public,
|
||||
"length": Track.length,
|
||||
"duration": Track.duration,
|
||||
"user_device_id": Track.user_device_id,
|
||||
}
|
||||
order_by = req.ctx.get_single_arg(
|
||||
"order_by", default=None, convert=order_by_columns.get
|
||||
)
|
||||
|
||||
reversed_ = req.ctx.get_single_arg("reversed", convert=parse_boolean, default=False)
|
||||
if reversed_:
|
||||
order_by = order_by.desc()
|
||||
|
||||
public = req.ctx.get_single_arg("public", convert=parse_boolean, default=None)
|
||||
|
||||
def extend_query(q):
|
||||
q = q.where(Track.author_id == req.ctx.user.id)
|
||||
|
||||
if user_device_id is not None:
|
||||
q = q.where(Track.user_device_id == user_device_id)
|
||||
|
||||
if public is not None:
|
||||
q = q.where(Track.public == public)
|
||||
|
||||
return q
|
||||
|
||||
return await _return_tracks(req, extend_query, limit, offset, order_by)
|
||||
|
||||
|
||||
@api.post("/tracks/bulk")
|
||||
@require_auth
|
||||
async def tracks_bulk_action(req):
|
||||
body = req.json
|
||||
action = body["action"]
|
||||
track_slugs = body["tracks"]
|
||||
|
||||
if action not in ("delete", "makePublic", "makePrivate", "reprocess", "download"):
|
||||
raise InvalidUsage("invalid action")
|
||||
|
||||
query = select(Track).where(
|
||||
and_(Track.author_id == req.ctx.user.id, Track.slug.in_(track_slugs))
|
||||
)
|
||||
|
||||
files = set()
|
||||
|
||||
for track in (await req.ctx.db.execute(query)).scalars():
|
||||
if action == "delete":
|
||||
await req.ctx.db.delete(track)
|
||||
elif action == "makePublic":
|
||||
if not track.public:
|
||||
track.queue_processing()
|
||||
track.public = True
|
||||
elif action == "makePrivate":
|
||||
if track.public:
|
||||
track.queue_processing()
|
||||
track.public = False
|
||||
elif action == "reprocess":
|
||||
track.queue_processing()
|
||||
elif action == "download":
|
||||
files.add(track.get_original_file_path(req.app.config))
|
||||
|
||||
await req.ctx.db.commit()
|
||||
|
||||
if action == "download":
|
||||
username_slug = slugify(req.ctx.user.username, separator="-")
|
||||
date_str = date.today().isoformat()
|
||||
file_basename = f"tracks_{username_slug}_{date_str}"
|
||||
|
||||
await tar_of_tracks(req, files, file_basename)
|
||||
return
|
||||
|
||||
return empty()
|
||||
|
||||
|
||||
@api.post("/tracks")
|
||||
@read_api_key
|
||||
@require_auth
|
||||
async def post_track(req):
|
||||
try:
|
||||
file = req.files["body"][0]
|
||||
except LookupError as e:
|
||||
raise InvalidUsage(
|
||||
'Track upload needs a single file in "body" multipart field'
|
||||
) from e
|
||||
|
||||
try:
|
||||
body = req.json["track"]
|
||||
except (LookupError, InvalidUsage):
|
||||
body = {}
|
||||
|
||||
title = body.get("title")
|
||||
public = body.get("public")
|
||||
|
||||
track = Track(
|
||||
title=title,
|
||||
customized_title=bool(title),
|
||||
author=req.ctx.user,
|
||||
public=public
|
||||
if public is not None
|
||||
else req.ctx.user.are_tracks_visible_for_all,
|
||||
)
|
||||
track.generate_slug()
|
||||
try:
|
||||
await track.prevent_duplicates(req.ctx.db, file.body)
|
||||
except DuplicateTrackFileError:
|
||||
raise InvalidUsage("Track file is not unique")
|
||||
|
||||
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
|
||||
track.original_file_name = file.name
|
||||
await track.write_to_original_file(req.app.config, file.body)
|
||||
track.queue_processing()
|
||||
track.auto_generate_title()
|
||||
|
||||
req.ctx.db.add(track)
|
||||
await req.ctx.db.commit()
|
||||
|
||||
return await get_track(req, track.slug)
|
||||
|
||||
|
||||
async def _load_track(req, slug, raise_not_found=True):
|
||||
track = (
|
||||
await req.ctx.db.execute(
|
||||
select(Track)
|
||||
.where(Track.slug == slug)
|
||||
.options(joinedload(Track.author))
|
||||
.limit(1)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if raise_not_found and track is None:
|
||||
raise NotFound()
|
||||
|
||||
if not track.is_visible_to(req.ctx.user):
|
||||
raise Forbidden()
|
||||
|
||||
return track
|
||||
|
||||
|
||||
@api.get("/tracks/<slug:str>")
|
||||
async def get_track(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
return json(
|
||||
{"track": track.to_dict(for_user_id=req.ctx.user.id if req.ctx.user else None)},
|
||||
)
|
||||
|
||||
|
||||
@api.delete("/tracks/<slug:str>")
|
||||
@require_auth
|
||||
async def delete_track(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
if not track.is_visible_to_private(req.ctx.user):
|
||||
raise Forbidden()
|
||||
|
||||
await req.ctx.db.delete(track)
|
||||
await req.ctx.db.commit()
|
||||
|
||||
return empty()
|
||||
|
||||
|
||||
@api.get("/tracks/<slug:str>/data")
|
||||
async def get_track_data(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
FILE_BY_KEY = {
|
||||
"measurements": "measurements.json",
|
||||
"overtakingEvents": "overtakingEvents.json",
|
||||
"track": "track.json",
|
||||
"trackRaw": "trackRaw.json",
|
||||
}
|
||||
|
||||
result = {}
|
||||
|
||||
for key, filename in FILE_BY_KEY.items():
|
||||
file_path = join(
|
||||
req.app.config.PROCESSING_OUTPUT_DIR, track.file_path, filename
|
||||
)
|
||||
if not exists(file_path) or not isfile(file_path):
|
||||
continue
|
||||
|
||||
with open(file_path) as f:
|
||||
result[key] = jsonload(f)
|
||||
|
||||
return json(
|
||||
result,
|
||||
)
|
||||
|
||||
|
||||
@api.get("/tracks/<slug:str>/download/original.csv")
|
||||
async def download_original_file(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
if not track.is_visible_to_private(req.ctx.user):
|
||||
raise Forbidden()
|
||||
|
||||
return await file_stream(
|
||||
track.get_original_file_path(req.app.config),
|
||||
mime_type="text/csv",
|
||||
filename=f"{slug}.csv",
|
||||
)
|
||||
|
||||
|
||||
@api.get("/tracks/<slug:str>/download/track.gpx")
|
||||
async def download_track_gpx(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
if not track.is_visible_to(req.ctx.user):
|
||||
raise Forbidden()
|
||||
|
||||
file_path = join(req.app.config.PROCESSING_OUTPUT_DIR, track.file_path, "track.gpx")
|
||||
if not exists(file_path) or not isfile(file_path):
|
||||
raise NotFound()
|
||||
|
||||
return await file_stream(
|
||||
file_path,
|
||||
mime_type="application/gpx+xml",
|
||||
filename=f"{slug}.gpx",
|
||||
)
|
||||
|
||||
|
||||
@api.put("/tracks/<slug:str>")
|
||||
@require_auth
|
||||
async def put_track(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
if track.author_id != req.ctx.user.id:
|
||||
raise Forbidden()
|
||||
|
||||
try:
|
||||
body = req.json["track"]
|
||||
except BaseException:
|
||||
body = {}
|
||||
|
||||
if "title" in body:
|
||||
track.title = (body["title"] or "").strip() or None
|
||||
track.customized_title = track.title is not None
|
||||
|
||||
if "description" in body:
|
||||
track.description = (body["description"] or "").strip() or None
|
||||
|
||||
process = False
|
||||
|
||||
if "public" in body:
|
||||
public = bool(body["public"])
|
||||
process = process or (public != track.public) # if changed
|
||||
track.public = public
|
||||
|
||||
if "body" in req.files:
|
||||
try:
|
||||
file = req.files["body"][0]
|
||||
except LookupError as e:
|
||||
raise InvalidUsage(
|
||||
'Track upload needs a single file in "body" multipart field'
|
||||
) from e
|
||||
|
||||
await track.prevent_duplicates(req.ctx.db, file.body)
|
||||
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
|
||||
track.original_file_name = file.name or (track.slug + ".csv")
|
||||
await track.write_to_original_file(req.app.config, file.body)
|
||||
process = True
|
||||
|
||||
if process:
|
||||
track.queue_processing()
|
||||
|
||||
track.auto_generate_title()
|
||||
await req.ctx.db.commit()
|
||||
|
||||
track = await _load_track(req, track.slug)
|
||||
return json(
|
||||
{"track": track.to_dict(for_user_id=req.ctx.user.id)},
|
||||
)
|
||||
|
||||
|
||||
@api.get("/tracks/<slug:str>/comments")
|
||||
async def get_track_comments(req, slug: str):
|
||||
limit = req.ctx.get_single_arg("limit", default=20, convert=int)
|
||||
offset = req.ctx.get_single_arg("offset", default=0, convert=int)
|
||||
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
comment_count = await req.ctx.db.scalar(
|
||||
select(func.count()).select_from(Comment).where(Comment.track_id == track.id)
|
||||
)
|
||||
|
||||
query = (
|
||||
select(Comment)
|
||||
.options(joinedload(Comment.author))
|
||||
.where(Comment.track_id == track.id)
|
||||
.order_by(Comment.created_at.desc())
|
||||
.limit(limit)
|
||||
.offset(offset)
|
||||
)
|
||||
|
||||
comments = (await req.ctx.db.execute(query)).scalars()
|
||||
|
||||
return json(
|
||||
{
|
||||
"commentCount": comment_count,
|
||||
"comments": list(
|
||||
map(
|
||||
lambda c: c.to_dict(
|
||||
for_user_id=req.ctx.user.id if req.ctx.user else None
|
||||
),
|
||||
comments,
|
||||
)
|
||||
),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@api.post("/tracks/<slug:str>/comments")
|
||||
@require_auth
|
||||
async def post_track_comment(req, slug: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
body = req.json.get("comment", {}).get("body")
|
||||
if not isinstance(body, str):
|
||||
raise InvalidUsage("no comment given")
|
||||
|
||||
# Ensure body is not empty
|
||||
body = body.strip()
|
||||
if not body:
|
||||
raise InvalidUsage("empty comment")
|
||||
|
||||
comment = Comment(
|
||||
body=body,
|
||||
track_id=track.id,
|
||||
author_id=req.ctx.user.id,
|
||||
)
|
||||
|
||||
req.ctx.db.add(comment)
|
||||
await req.ctx.db.commit()
|
||||
|
||||
await req.ctx.db.refresh(comment)
|
||||
|
||||
comment = (
|
||||
await req.ctx.db.execute(
|
||||
select(Comment)
|
||||
.options(joinedload(Comment.author))
|
||||
.where(Comment.id == comment.id)
|
||||
.limit(1)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
return json({"comment": comment.to_dict(for_user_id=req.ctx.user.id)})
|
||||
|
||||
|
||||
@api.delete("/tracks/<slug:str>/comments/<uid:str>")
|
||||
@require_auth
|
||||
async def delete_track_comment(req, slug: str, uid: str):
|
||||
track = await _load_track(req, slug)
|
||||
|
||||
comment = (
|
||||
await req.ctx.db.execute(
|
||||
select(Comment)
|
||||
.options(joinedload(Comment.author))
|
||||
.where(Comment.track_id == track.id and Comment.uid == uid)
|
||||
.limit(1)
|
||||
)
|
||||
).scalar()
|
||||
|
||||
if not comment:
|
||||
raise NotFound()
|
||||
|
||||
if comment.author_id != req.ctx.user.id:
|
||||
raise Forbidden()
|
||||
|
||||
await req.ctx.db.delete(comment)
|
||||
await req.ctx.db.commit()
|
||||
|
||||
return empty()
|
95
api/obs/api/routes/users.py
Normal file
95
api/obs/api/routes/users.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
import logging
|
||||
|
||||
from sanic.response import json
|
||||
from sanic.exceptions import InvalidUsage, Forbidden, NotFound
|
||||
from sqlalchemy import and_, select
|
||||
|
||||
from obs.api.app import api, require_auth
|
||||
from obs.api.db import UserDevice
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from obs.api import __version__ as version
|
||||
|
||||
|
||||
def user_to_json(user):
|
||||
return {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"displayName": user.display_name,
|
||||
"email": user.email,
|
||||
"bio": user.bio,
|
||||
"image": user.image,
|
||||
"areTracksVisibleForAll": user.are_tracks_visible_for_all,
|
||||
"apiKey": user.api_key,
|
||||
}
|
||||
|
||||
|
||||
@api.get("/user")
|
||||
async def get_user(req):
|
||||
return json(user_to_json(req.ctx.user) if req.ctx.user else None)
|
||||
|
||||
|
||||
@api.get("/user/devices")
|
||||
async def get_user_devices(req):
|
||||
if not req.ctx.user:
|
||||
raise Forbidden()
|
||||
|
||||
query = (
|
||||
select(UserDevice)
|
||||
.where(UserDevice.user_id == req.ctx.user.id)
|
||||
.order_by(UserDevice.id)
|
||||
)
|
||||
|
||||
devices = (await req.ctx.db.execute(query)).scalars()
|
||||
|
||||
return json([device.to_dict(req.ctx.user.id) for device in devices])
|
||||
|
||||
|
||||
@api.put("/user/devices/<device_id:int>")
|
||||
async def put_user_device(req, device_id):
|
||||
if not req.ctx.user:
|
||||
raise Forbidden()
|
||||
|
||||
body = req.json
|
||||
|
||||
query = (
|
||||
select(UserDevice)
|
||||
.where(and_(UserDevice.user_id == req.ctx.user.id, UserDevice.id == device_id))
|
||||
.limit(1)
|
||||
)
|
||||
|
||||
device = (await req.ctx.db.execute(query)).scalar()
|
||||
|
||||
if device is None:
|
||||
raise NotFound()
|
||||
|
||||
new_name = body.get("displayName", "").strip()
|
||||
if new_name and device.display_name != new_name:
|
||||
device.display_name = new_name
|
||||
await req.ctx.db.commit()
|
||||
|
||||
return json(device.to_dict())
|
||||
|
||||
|
||||
@api.put("/user")
|
||||
@require_auth
|
||||
async def put_user(req):
|
||||
user = req.ctx.user
|
||||
data = req.json
|
||||
|
||||
for key in ["email", "bio", "image"]:
|
||||
if key in data and isinstance(data[key], (str, type(None))):
|
||||
setattr(user, key, data[key])
|
||||
|
||||
if "displayName" in data:
|
||||
user.display_name = data["displayName"] or None
|
||||
|
||||
if "areTracksVisibleForAll" in data:
|
||||
user.are_tracks_visible_for_all = bool(data["areTracksVisibleForAll"])
|
||||
|
||||
if data.get("updateApiKey"):
|
||||
user.generate_api_key()
|
||||
|
||||
await req.ctx.db.commit()
|
||||
return json(user_to_json(req.ctx.user))
|
162
api/obs/api/utils.py
Normal file
162
api/obs/api/utils.py
Normal file
|
@ -0,0 +1,162 @@
|
|||
import asyncio
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
import logging
|
||||
from os.path import commonpath, join, relpath
|
||||
import queue
|
||||
import tarfile
|
||||
|
||||
import dateutil.parser
|
||||
from sanic.exceptions import InvalidUsage, ServiceUnavailable
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
RAISE = object()
|
||||
|
||||
|
||||
def get_single_arg(req, name, default=RAISE, convert=None):
|
||||
try:
|
||||
value = req.args[name][0]
|
||||
except LookupError as e:
|
||||
if default is RAISE:
|
||||
raise InvalidUsage(f"missing `{name}`") from e
|
||||
|
||||
value = default
|
||||
|
||||
if convert is not None and value is not None:
|
||||
if convert is datetime or convert in ("date", "datetime"):
|
||||
convert = lambda s: dateutil.parser.parse(s)
|
||||
|
||||
try:
|
||||
value = convert(value)
|
||||
except (ValueError, TypeError) as e:
|
||||
raise InvalidUsage(f"invalid `{name}`: {str(e)}") from e
|
||||
|
||||
return value
|
||||
|
||||
|
||||
def round_to(value: float, multiples: float) -> float:
|
||||
if value is None:
|
||||
return None
|
||||
return round(value / multiples) * multiples
|
||||
|
||||
|
||||
def chunk_list(lst, n):
|
||||
for s in range(0, len(lst), n):
|
||||
yield lst[s : s + n]
|
||||
|
||||
|
||||
class chunk:
|
||||
def __init__(self, iterable, n):
|
||||
self.iterable = iterable
|
||||
self.n = n
|
||||
|
||||
def __iter__(self):
|
||||
if isinstance(self.iterable, list):
|
||||
yield from chunk_list(self.iterable, self.n)
|
||||
return
|
||||
|
||||
it = iter(self.iterable)
|
||||
while True:
|
||||
current = []
|
||||
try:
|
||||
for _ in range(self.n):
|
||||
current.append(next(it))
|
||||
yield current
|
||||
except StopIteration:
|
||||
if current:
|
||||
yield current
|
||||
break
|
||||
|
||||
async def __aiter__(self):
|
||||
if hasattr(self.iterable, "__iter__"):
|
||||
for item in self:
|
||||
yield item
|
||||
return
|
||||
|
||||
it = self.iterable.__aiter__()
|
||||
while True:
|
||||
current = []
|
||||
try:
|
||||
for _ in range(self.n):
|
||||
current.append(await it.__anext__())
|
||||
yield current
|
||||
except StopAsyncIteration:
|
||||
if len(current):
|
||||
yield current
|
||||
break
|
||||
|
||||
|
||||
async def tar_of_tracks(req, files, file_basename="tracks"):
|
||||
response = await req.respond(
|
||||
content_type="application/x-gtar",
|
||||
headers={
|
||||
"content-disposition": f'attachment; filename="{file_basename}.tar.bz2"'
|
||||
},
|
||||
)
|
||||
|
||||
helper = StreamerHelper(response)
|
||||
|
||||
tar = tarfile.open(name=None, fileobj=helper, mode="w|bz2", bufsize=256 * 512)
|
||||
|
||||
root = commonpath(list(files))
|
||||
for fname in files:
|
||||
log.info("Write file to tar: %s", fname)
|
||||
with open(fname, "rb") as fobj:
|
||||
tarinfo = tar.gettarinfo(fname)
|
||||
tarinfo.name = join(file_basename, relpath(fname, root))
|
||||
tar.addfile(tarinfo, fobj)
|
||||
await helper.send_all()
|
||||
tar.close()
|
||||
await helper.send_all()
|
||||
|
||||
await response.eof()
|
||||
|
||||
|
||||
class StreamerHelper:
|
||||
def __init__(self, response):
|
||||
self.response = response
|
||||
self.towrite = queue.Queue()
|
||||
|
||||
def write(self, data):
|
||||
self.towrite.put(data)
|
||||
|
||||
async def send_all(self):
|
||||
while True:
|
||||
try:
|
||||
tosend = self.towrite.get(block=False)
|
||||
await self.response.send(tosend)
|
||||
except queue.Empty:
|
||||
break
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def use_request_semaphore(req, semaphore_name, timeout=10):
|
||||
"""
|
||||
If configured, acquire a semaphore for the map tile request and release it
|
||||
after the context has finished.
|
||||
|
||||
If the semaphore cannot be acquired within the timeout, issue a 503 Service
|
||||
Unavailable error response that describes that the database is overloaded,
|
||||
so users know what the problem is.
|
||||
|
||||
Operates as a noop when the tile semaphore is not enabled.
|
||||
"""
|
||||
semaphore = getattr(req.app.ctx, semaphore_name, None)
|
||||
|
||||
if semaphore is None:
|
||||
yield
|
||||
return
|
||||
|
||||
try:
|
||||
await asyncio.wait_for(semaphore.acquire(), timeout)
|
||||
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
semaphore.release()
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
raise ServiceUnavailable(
|
||||
"Too many requests, database overloaded. Please retry later."
|
||||
)
|
1
api/obs/bin/__init__.py
Normal file
1
api/obs/bin/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__path__ = __import__("pkgutil").extend_path(__path__, __name__)
|
66
api/obs/bin/openbikesensor_api.py
Executable file
66
api/obs/bin/openbikesensor_api.py
Executable file
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import math
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
|
||||
import coloredlogs
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.db import connect_db
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def format_size(n, b=1024):
|
||||
if n == 0:
|
||||
return "0 B"
|
||||
if n < 0:
|
||||
return "-" + format_size(n, b)
|
||||
e = math.floor(math.log(n, b))
|
||||
prefixes = ["", "Ki", "Mi", "Gi", "Ti"] if b == 1024 else ["", "K", "M", "G", "T"]
|
||||
e = min(e, len(prefixes) - 1)
|
||||
r = n / b**e
|
||||
s = f"{r:0.2f}" if e > 0 else str(n)
|
||||
return f"{s} {prefixes[e]}B"
|
||||
|
||||
|
||||
class AccessLogFilter(logging.Filter):
|
||||
def filter(self, record):
|
||||
if not record.msg:
|
||||
record.msg = (
|
||||
f"{record.request} - {record.status} ({format_size(record.byte)})"
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
debug = app.config.DEBUG
|
||||
|
||||
coloredlogs.install(
|
||||
level=logging.DEBUG if app.config.get("VERBOSE", debug) else logging.INFO,
|
||||
milliseconds=True,
|
||||
isatty=True,
|
||||
)
|
||||
|
||||
for ln in ["sanic.root", "sanic.error", "sanic.access"]:
|
||||
l = logging.getLogger(ln)
|
||||
for h in list(l.handlers):
|
||||
l.removeHandler(h)
|
||||
|
||||
logging.getLogger("sanic.access").addFilter(AccessLogFilter())
|
||||
|
||||
app.run(
|
||||
host=app.config.HOST,
|
||||
port=app.config.PORT,
|
||||
debug=debug,
|
||||
auto_reload=app.config.get("AUTO_RELOAD", debug),
|
||||
access_log=True,
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
191
api/obs/bin/openbikesensor_transform_osm.py
Executable file
191
api/obs/bin/openbikesensor_transform_osm.py
Executable file
|
@ -0,0 +1,191 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import re
|
||||
import msgpack
|
||||
|
||||
import osmium
|
||||
import shapely.wkb as wkb
|
||||
from shapely.ops import transform
|
||||
|
||||
HIGHWAY_TYPES = {
|
||||
"trunk",
|
||||
"primary",
|
||||
"secondary",
|
||||
"tertiary",
|
||||
"unclassified",
|
||||
"residential",
|
||||
"trunk_link",
|
||||
"primary_link",
|
||||
"secondary_link",
|
||||
"tertiary_link",
|
||||
"living_street",
|
||||
"service",
|
||||
"track",
|
||||
"road",
|
||||
}
|
||||
ZONE_TYPES = {
|
||||
"urban",
|
||||
"rural",
|
||||
"motorway",
|
||||
}
|
||||
URBAN_TYPES = {
|
||||
"residential",
|
||||
"living_street",
|
||||
"road",
|
||||
}
|
||||
MOTORWAY_TYPES = {
|
||||
"motorway",
|
||||
"motorway_link",
|
||||
}
|
||||
|
||||
ADMIN_LEVEL_MIN = 2
|
||||
ADMIN_LEVEL_MAX = 8
|
||||
MINSPEED_RURAL = 60
|
||||
|
||||
ONEWAY_YES = {"yes", "true", "1"}
|
||||
ONEWAY_REVERSE = {"reverse", "-1"}
|
||||
|
||||
|
||||
def parse_number(tag):
|
||||
if not tag:
|
||||
return None
|
||||
|
||||
match = re.search(r"[0-9]+", tag)
|
||||
if not match:
|
||||
return None
|
||||
|
||||
digits = match.group(0)
|
||||
try:
|
||||
return int(digits)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def determine_zone(tags):
|
||||
highway = tags.get("highway")
|
||||
zone = tags.get("zone:traffic")
|
||||
|
||||
if zone is not None:
|
||||
if "rural" in zone:
|
||||
return "rural"
|
||||
|
||||
if "motorway" in zone:
|
||||
return "motorway"
|
||||
|
||||
return "urban"
|
||||
|
||||
# From here on we are guessing based on other tags
|
||||
|
||||
if highway in URBAN_TYPES:
|
||||
return "urban"
|
||||
|
||||
if highway in MOTORWAY_TYPES:
|
||||
return "motorway"
|
||||
|
||||
maxspeed_source = tags.get("source:maxspeed")
|
||||
if maxspeed_source and "rural" in maxspeed_source:
|
||||
return "rural"
|
||||
if maxspeed_source and "urban" in maxspeed_source:
|
||||
return "urban"
|
||||
|
||||
for key in ["maxspeed", "maxspeed:forward", "maxspeed:backward"]:
|
||||
maxspeed = parse_number(tags.get(key))
|
||||
if maxspeed is not None and maxspeed > MINSPEED_RURAL:
|
||||
return "rural"
|
||||
|
||||
# default to urban if we have no idea
|
||||
return "urban"
|
||||
|
||||
|
||||
def determine_direction(tags, zone):
|
||||
if (
|
||||
tags.get("oneway") in ONEWAY_YES
|
||||
or tags.get("junction") == "roundabout"
|
||||
or zone == "motorway"
|
||||
):
|
||||
return 1, True
|
||||
|
||||
if tags.get("oneway") in ONEWAY_REVERSE:
|
||||
return -1, True
|
||||
|
||||
return 0, False
|
||||
|
||||
|
||||
class StreamPacker:
|
||||
def __init__(self, stream, *args, **kwargs):
|
||||
self.stream = stream
|
||||
self.packer = msgpack.Packer(*args, autoreset=False, **kwargs)
|
||||
|
||||
def _write_out(self):
|
||||
if hasattr(self.packer, "getbuffer"):
|
||||
chunk = self.packer.getbuffer()
|
||||
else:
|
||||
chunk = self.packer.bytes()
|
||||
|
||||
self.stream.write(chunk)
|
||||
self.packer.reset()
|
||||
|
||||
def pack(self, *args, **kwargs):
|
||||
self.packer.pack(*args, **kwargs)
|
||||
self._write_out()
|
||||
|
||||
def pack_array_header(self, *args, **kwargs):
|
||||
self.packer.pack_array_header(*args, **kwargs)
|
||||
self._write_out()
|
||||
|
||||
def pack_map_header(self, *args, **kwargs):
|
||||
self.packer.pack_map_header(*args, **kwargs)
|
||||
self._write_out()
|
||||
|
||||
def pack_map_pairs(self, *args, **kwargs):
|
||||
self.packer.pack_map_pairs(*args, **kwargs)
|
||||
self._write_out()
|
||||
|
||||
|
||||
# A global factory that creates WKB from a osmium geometry
|
||||
wkbfab = osmium.geom.WKBFactory()
|
||||
|
||||
from pyproj import Transformer
|
||||
|
||||
project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform
|
||||
|
||||
|
||||
class OSMHandler(osmium.SimpleHandler):
|
||||
def __init__(self, packer):
|
||||
self.packer = packer
|
||||
super().__init__()
|
||||
|
||||
def way(self, way):
|
||||
tags = way.tags
|
||||
|
||||
highway = tags.get("highway")
|
||||
if not highway or highway not in HIGHWAY_TYPES:
|
||||
return
|
||||
|
||||
access = tags.get("access", None)
|
||||
bicycle = tags.get("bicycle", None)
|
||||
if access == "no" and bicycle not in ["designated", "yes", "permissive", "destination"]:
|
||||
return
|
||||
|
||||
zone = determine_zone(tags)
|
||||
directionality, oneway = determine_direction(tags, zone)
|
||||
name = tags.get("name")
|
||||
|
||||
geometry = wkb.loads(wkbfab.create_linestring(way), hex=True)
|
||||
geometry = transform(project, geometry)
|
||||
geometry = wkb.dumps(geometry)
|
||||
self.packer.pack(
|
||||
[b"\x01", way.id, name, zone, directionality, oneway, geometry]
|
||||
)
|
||||
|
||||
|
||||
def main():
|
||||
with open(sys.argv[2], "wb") as fout:
|
||||
packer = StreamPacker(fout)
|
||||
osmhandler = OSMHandler(packer)
|
||||
osmhandler.apply_file(sys.argv[1], locations=True)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
9014
api/package-lock.json
generated
9014
api/package-lock.json
generated
File diff suppressed because it is too large
Load diff
105
api/package.json
105
api/package.json
|
@ -1,105 +0,0 @@
|
|||
{
|
||||
"name": "open-bike-sensor-web-api",
|
||||
"version": "1.0.0",
|
||||
"description": "Backend API for the OpenBikeSensor web app",
|
||||
"main": "app.js",
|
||||
"scripts": {
|
||||
"mongo:start": "docker run --name realworld-mongo -p 27017:27017 mongo & sleep 5",
|
||||
"start": "node src/",
|
||||
"dev": "nodemon src/",
|
||||
"mongo:stop": "docker stop realworld-mongo && docker rm realworld-mongo",
|
||||
"autoformat": "eslint --fix .",
|
||||
"lint": "eslint .",
|
||||
"test": "jest",
|
||||
"migrate": "mongoose-data-migrate -c .migrations.js",
|
||||
"migrate:up": "npm run migrate -- up",
|
||||
"migrate:down": "npm run migrate -- down"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/openbikesensor/obsAPI.git"
|
||||
},
|
||||
"license": "LGPLv3",
|
||||
"dependencies": {
|
||||
"body-parser": "1.19.0",
|
||||
"connect-busboy": "0.0.2",
|
||||
"cors": "2.8.5",
|
||||
"csv-parse": "^4.15.1",
|
||||
"csv-stringify": "^5.6.1",
|
||||
"ejs": "^3.1.6",
|
||||
"errorhandler": "1.5.1",
|
||||
"express": "4.17.1",
|
||||
"express-jwt": "^6.0.0",
|
||||
"express-session": "1.17.1",
|
||||
"jest": "^26.6.3",
|
||||
"joi": "^17.4.0",
|
||||
"jsonwebtoken": "8.5.1",
|
||||
"method-override": "3.0.0",
|
||||
"methods": "1.1.2",
|
||||
"mongoose": "^5.11.17",
|
||||
"mongoose-data-migrate": "flashstockinc/mongoose-data-migrate",
|
||||
"mongoose-unique-validator": "2.0.3",
|
||||
"morgan": "1.10.0",
|
||||
"nodemailer": "^6.4.18",
|
||||
"passport": "0.4.1",
|
||||
"passport-local": "1.0.0",
|
||||
"request": "2.88.2",
|
||||
"sanitize-filename": "^1.6.3",
|
||||
"slug": "^3.5.2",
|
||||
"turf": "^3.0.14",
|
||||
"underscore": "^1.12.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"eslint": "^7.20.0",
|
||||
"eslint-config-prettier": "^6.15.0",
|
||||
"eslint-config-standard": "^16.0.2",
|
||||
"eslint-plugin-import": "^2.22.1",
|
||||
"eslint-plugin-jest": "^24.1.5",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-prettier": "^3.3.1",
|
||||
"eslint-plugin-promise": "^4.3.1",
|
||||
"nodemon": "^2.0.7",
|
||||
"prettier": "^2.2.1"
|
||||
},
|
||||
"jest": {
|
||||
"modulePathIgnorePatterns": [
|
||||
"local"
|
||||
]
|
||||
},
|
||||
"prettier": {
|
||||
"useTabs": false,
|
||||
"trailingComma": "all",
|
||||
"tabWidth": 2,
|
||||
"semi": true,
|
||||
"singleQuote": true,
|
||||
"printWidth": 120
|
||||
},
|
||||
"eslintConfig": {
|
||||
"extends": [
|
||||
"standard",
|
||||
"prettier"
|
||||
],
|
||||
"plugins": [
|
||||
"jest",
|
||||
"prettier"
|
||||
],
|
||||
"env": {
|
||||
"browser": false,
|
||||
"node": true,
|
||||
"jest/globals": true
|
||||
},
|
||||
"rules": {
|
||||
"prettier/prettier": "error",
|
||||
"standard/array-bracket-even-spacing": 0,
|
||||
"standard/computed-property-even-spacing": 0,
|
||||
"standard/object-curly-even-spacing": 0
|
||||
},
|
||||
"root": true,
|
||||
"ignorePatterns": [
|
||||
"postman-examples/**",
|
||||
"public/**",
|
||||
"node_modules",
|
||||
"local"
|
||||
]
|
||||
}
|
||||
}
|
22
api/requirements.txt
Normal file
22
api/requirements.txt
Normal file
|
@ -0,0 +1,22 @@
|
|||
coloredlogs~=15.0.1
|
||||
sanic==22.6.2
|
||||
oic~=1.5.0
|
||||
sanic-session~=0.8.0
|
||||
python-slugify~=6.1.2
|
||||
motor~=3.1.1
|
||||
pyyaml~=5.3.1
|
||||
-e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools
|
||||
sqlparse~=0.4.3
|
||||
sqlalchemy[asyncio]~=1.4.46
|
||||
asyncpg~=0.27.0
|
||||
pyshp~=2.3.1
|
||||
alembic~=1.9.4
|
||||
stream-zip~=0.0.50
|
||||
msgpack~=1.0.5
|
||||
osmium~=3.6.0
|
||||
psycopg~=3.1.8
|
||||
shapely~=2.0.1
|
||||
pyproj~=3.4.1
|
||||
aiohttp~=3.8.1
|
||||
# sanic requires websocets and chockes on >=10 in 2022.6.2
|
||||
websockets<11
|
1
api/scripts
Submodule
1
api/scripts
Submodule
|
@ -0,0 +1 @@
|
|||
Subproject commit 664e4d606416417c0651ea1748d32dd36209be6a
|
34
api/setup.py
Normal file
34
api/setup.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name="openbikesensor-api",
|
||||
version="0.0.1",
|
||||
author="OpenBikeSensor Contributors",
|
||||
license="LGPL-3.0",
|
||||
description="OpenBikeSensor Portal API",
|
||||
url="https://github.com/openbikesensor/portal",
|
||||
packages=find_packages(),
|
||||
package_data={},
|
||||
install_requires=[
|
||||
"coloredlogs~=15.0.1",
|
||||
"sanic==22.6.2",
|
||||
"oic>=1.3.0, <2",
|
||||
"sanic-session~=0.8.0",
|
||||
"python-slugify>=5.0.2,<6.2.0",
|
||||
"motor>=2.5.1,<3.1.2",
|
||||
"pyyaml<6",
|
||||
"sqlparse~=0.4.3",
|
||||
"openmaptiles-tools", # install from git
|
||||
"pyshp>=2.2,<2.4",
|
||||
"sqlalchemy[asyncio]~=1.4.46",
|
||||
"asyncpg~=0.27.0",
|
||||
"alembic~=1.9.4",
|
||||
"stream-zip~=0.0.50",
|
||||
],
|
||||
entry_points={
|
||||
"console_scripts": [
|
||||
"openbikesensor-api=obs.bin.openbikesensor_api:main",
|
||||
"openbikesensor-transform-osm=obs.bin.openbikesensor_transform_osm:main",
|
||||
]
|
||||
},
|
||||
)
|
|
@ -1,65 +0,0 @@
|
|||
function* pairwise(iter) {
|
||||
let last;
|
||||
let firstLoop = true;
|
||||
for (const it of iter) {
|
||||
if (firstLoop) {
|
||||
firstLoop = false;
|
||||
} else {
|
||||
yield [last, it];
|
||||
}
|
||||
last = it;
|
||||
}
|
||||
}
|
||||
|
||||
function* enumerate(iter) {
|
||||
let i = 0;
|
||||
for (const it of iter) {
|
||||
yield [i, it];
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
const map = (fn) =>
|
||||
function* (iter) {
|
||||
for (const [i, it] of enumerate(iter)) {
|
||||
yield fn(it, i);
|
||||
}
|
||||
};
|
||||
|
||||
const filter = (fn) =>
|
||||
function* (iter) {
|
||||
for (const it of iter) {
|
||||
if (fn(it)) {
|
||||
yield it;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const reduce = (fn, init) => (iter) => {
|
||||
let acc = init;
|
||||
for (const it of iter) {
|
||||
acc = fn(acc, it);
|
||||
}
|
||||
return acc;
|
||||
};
|
||||
|
||||
const scan = (fn) =>
|
||||
function* (iter, init) {
|
||||
let acc = init;
|
||||
for (const it of iter) {
|
||||
acc = fn(acc, it);
|
||||
yield acc;
|
||||
}
|
||||
};
|
||||
|
||||
const flow = (...reducers) => (input) => reducers.reduce((c, fn) => fn(c), input);
|
||||
|
||||
module.exports = {
|
||||
filter,
|
||||
map,
|
||||
enumerate,
|
||||
pairwise,
|
||||
flow,
|
||||
reduce,
|
||||
scan,
|
||||
};
|
|
@ -1,18 +0,0 @@
|
|||
const nodemailer = require('nodemailer');
|
||||
const config = require('../config/email');
|
||||
|
||||
module.exports = sendEmail;
|
||||
|
||||
async function sendEmail({ to, subject, html, from = config.emailFrom }) {
|
||||
if (config.sendMails) {
|
||||
const transporter = nodemailer.createTransport(config.smtpOptions);
|
||||
await transporter.sendMail({ from, to, subject, html });
|
||||
} else {
|
||||
console.log({
|
||||
to,
|
||||
subject,
|
||||
html,
|
||||
from,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
const wrapRoute = (fn) => async (req, res, next) => {
|
||||
try {
|
||||
return await fn(req, res);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = wrapRoute;
|
|
@ -1,23 +0,0 @@
|
|||
module.exports = errorHandler;
|
||||
|
||||
function errorHandler(err, req, res, next) {
|
||||
switch (true) {
|
||||
case typeof err === 'string': {
|
||||
// custom application error
|
||||
const is404 = err.toLowerCase().endsWith('not found');
|
||||
const statusCode = is404 ? 404 : 400;
|
||||
return res.status(statusCode).json({ message: err });
|
||||
}
|
||||
|
||||
case err.name === 'ValidationError':
|
||||
// mongoose validation error
|
||||
return res.status(400).json({ message: err.message });
|
||||
|
||||
case err.name === 'UnauthorizedError':
|
||||
// jwt authentication error
|
||||
return res.status(401).json({ message: 'Unauthorized' });
|
||||
|
||||
default:
|
||||
return res.status(500).json({ message: err.message });
|
||||
}
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
const validateRequest = (schema) => (req, res, next) => {
|
||||
console.log('validateRequest');
|
||||
|
||||
const options = {
|
||||
abortEarly: false, // include all errors
|
||||
allowUnknown: true, // ignore unknown props
|
||||
stripUnknown: true, // remove unknown props
|
||||
};
|
||||
const { error, value } = schema.validate(req.body, options);
|
||||
if (error) {
|
||||
console.log('error: ', error);
|
||||
next(`Validation error: ${error.details.map((x) => x.message).join(', ')}`);
|
||||
} else {
|
||||
req.body = value;
|
||||
next();
|
||||
}
|
||||
};
|
||||
|
||||
module.exports = validateRequest;
|
|
@ -1,154 +0,0 @@
|
|||
const crypto = require('crypto');
|
||||
const mongoose = require('mongoose');
|
||||
const sendEmail = require('../_helpers/send-email');
|
||||
const User = mongoose.model('User');
|
||||
|
||||
module.exports = {
|
||||
register,
|
||||
verifyEmail,
|
||||
forgotPassword,
|
||||
validateResetToken,
|
||||
resetPassword,
|
||||
};
|
||||
|
||||
async function register(params, origin) {
|
||||
const user = await User.findOne({ email: params.email });
|
||||
|
||||
if (user) {
|
||||
// send already registered error in email to prevent account enumeration
|
||||
return await sendAlreadyRegisteredEmail(params.email, origin);
|
||||
}
|
||||
|
||||
const newUser = new User();
|
||||
|
||||
newUser.username = params.username;
|
||||
newUser.email = params.email;
|
||||
newUser.setPassword(params.password);
|
||||
newUser.verificationToken = randomTokenString();
|
||||
newUser.needsEmailValidation = true;
|
||||
|
||||
await newUser.save();
|
||||
|
||||
// send email
|
||||
await sendVerificationEmail(newUser, origin);
|
||||
}
|
||||
|
||||
async function verifyEmail({ token }) {
|
||||
const account = await User.findOne({ verificationToken: token });
|
||||
|
||||
if (!account) {
|
||||
throw Error('Verification failed');
|
||||
}
|
||||
|
||||
account.needsEmailValidation = false;
|
||||
account.verificationToken = undefined;
|
||||
await account.save();
|
||||
}
|
||||
|
||||
async function forgotPassword({ email }, origin) {
|
||||
const account = await User.findOne({ email });
|
||||
|
||||
console.log('forgotPassword', account, email);
|
||||
|
||||
// always return ok response to prevent email enumeration
|
||||
if (!account) return;
|
||||
|
||||
// create reset token that expires after 24 hours
|
||||
account.resetToken = {
|
||||
token: randomTokenString(),
|
||||
expires: new Date(Date.now() + 24 * 60 * 60 * 1000),
|
||||
};
|
||||
await account.save();
|
||||
|
||||
console.log('forgotPassword account saved', account);
|
||||
|
||||
// send email
|
||||
await sendPasswordResetEmail(account, origin);
|
||||
}
|
||||
|
||||
async function validateResetToken({ token }) {
|
||||
const account = await User.findOne({
|
||||
'resetToken.token': token,
|
||||
'resetToken.expires': { $gt: Date.now() },
|
||||
});
|
||||
|
||||
if (!account) {
|
||||
throw Error('Invalid token');
|
||||
}
|
||||
}
|
||||
|
||||
async function resetPassword({ token, password }) {
|
||||
const account = await User.findOne({
|
||||
'resetToken.token': token,
|
||||
'resetToken.expires': { $gt: Date.now() },
|
||||
});
|
||||
|
||||
if (!account) {
|
||||
throw Error('Invalid token');
|
||||
}
|
||||
|
||||
// update password and remove reset token
|
||||
account.setPassword(password);
|
||||
account.resetToken = undefined;
|
||||
await account.save();
|
||||
}
|
||||
|
||||
function randomTokenString() {
|
||||
return crypto.randomBytes(40).toString('hex');
|
||||
}
|
||||
|
||||
async function sendVerificationEmail(account, origin) {
|
||||
let message;
|
||||
if (origin) {
|
||||
const verifyUrl = `${origin}/account/verify-email?token=${account.verificationToken}`;
|
||||
message = `<p>Please click the below link to verify your email address:</p>
|
||||
<p><a href="${verifyUrl}">${verifyUrl}</a></p>`;
|
||||
} else {
|
||||
message = `<p>Please use the below token to verify your email address with the <code>/account/verify-email</code> api route:</p>
|
||||
<p><code>${account.verificationToken}</code></p>`;
|
||||
}
|
||||
|
||||
await sendEmail({
|
||||
to: account.email,
|
||||
subject: 'Sign-up Verification API - Verify Email',
|
||||
html: `<h4>Verify Email</h4>
|
||||
<p>Thanks for registering!</p>
|
||||
${message}`,
|
||||
});
|
||||
}
|
||||
|
||||
async function sendAlreadyRegisteredEmail(email, origin) {
|
||||
let message;
|
||||
if (origin) {
|
||||
message = `<p>If you don't know your password please visit the <a href="${origin}/account/forgot-password">forgot password</a> page.</p>`;
|
||||
} else {
|
||||
message = `<p>If you don't know your password you can reset it via the <code>/account/forgot-password</code> api route.</p>`;
|
||||
}
|
||||
|
||||
await sendEmail({
|
||||
to: email,
|
||||
subject: 'Sign-up Verification API - Email Already Registered',
|
||||
html: `<h4>Email Already Registered</h4>
|
||||
<p>Your email <strong>${email}</strong> is already registered.</p>
|
||||
${message}`,
|
||||
});
|
||||
}
|
||||
|
||||
async function sendPasswordResetEmail(account, origin) {
|
||||
let message;
|
||||
if (origin) {
|
||||
const resetUrl = `${origin}/account/reset-password?token=${account.resetToken.token}`;
|
||||
message = `<p>Please click the below link to reset your password, the link will be valid for 1 day:</p>
|
||||
<p><a href="${resetUrl}">${resetUrl}</a></p>`;
|
||||
} else {
|
||||
message = `<p>Please use the below token to reset your password with the <code>/account/reset-password</code> api route:</p>
|
||||
<p><code>${account.resetToken.token}</code></p>`;
|
||||
}
|
||||
|
||||
await sendEmail({
|
||||
to: account.email,
|
||||
subject: 'Sign-up Verification API - Reset Password',
|
||||
html: `<h4>Reset Password Email</h4>
|
||||
${message}`,
|
||||
});
|
||||
}
|
|
@ -1,78 +0,0 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const Joi = require('joi');
|
||||
const wrapRoute = require('../_helpers/wrapRoute');
|
||||
const validateRequest = require('../_middleware/validate-request');
|
||||
const accountService = require('./account.service');
|
||||
|
||||
router.post(
|
||||
'/register',
|
||||
validateRequest(
|
||||
Joi.object({
|
||||
username: Joi.string().required(),
|
||||
email: Joi.string().email().required(),
|
||||
password: Joi.string().min(6).required(),
|
||||
confirmPassword: Joi.string().valid(Joi.ref('password')).required(),
|
||||
}),
|
||||
),
|
||||
wrapRoute(async (req, res) => {
|
||||
await accountService.register(req.body, req.get('origin'));
|
||||
res.json({ message: 'Registration successful, please check your email for verification instructions' });
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/verify-email',
|
||||
validateRequest(
|
||||
Joi.object({
|
||||
token: Joi.string().required(),
|
||||
}),
|
||||
),
|
||||
wrapRoute(async (req, res) => {
|
||||
await accountService.verifyEmail(req.body);
|
||||
res.json({ message: 'Verification successful, you can now login' });
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/forgot-password',
|
||||
validateRequest(
|
||||
Joi.object({
|
||||
email: Joi.string().email().required(),
|
||||
}),
|
||||
),
|
||||
wrapRoute(async (req, res) => {
|
||||
await accountService.forgotPassword(req.body, req.get('origin'));
|
||||
res.json({ message: 'Please check your email for password reset instructions' });
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/validate-reset-token',
|
||||
validateRequest(
|
||||
Joi.object({
|
||||
token: Joi.string().required(),
|
||||
}),
|
||||
),
|
||||
wrapRoute(async (req, res) => {
|
||||
await accountService.validateResetToken(req.body);
|
||||
res.json({ message: 'Token is valid' });
|
||||
}),
|
||||
);
|
||||
|
||||
router.post(
|
||||
'/reset-password',
|
||||
validateRequest(
|
||||
Joi.object({
|
||||
token: Joi.string().required(),
|
||||
password: Joi.string().min(6).required(),
|
||||
confirmPassword: Joi.string().valid(Joi.ref('password')).required(),
|
||||
}),
|
||||
),
|
||||
wrapRoute(async (req, res) => {
|
||||
await accountService.resetPassword(req.body);
|
||||
res.json({ message: 'Password reset successful, you can now login' });
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
|
@ -1,15 +0,0 @@
|
|||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const forcedMail = process.argv.findIndex((s) => s === '--devSendMails') !== -1;
|
||||
|
||||
module.exports = {
|
||||
sendMails: isProduction || forcedMail,
|
||||
emailFrom: process.env.MAILSENDER,
|
||||
smtpOptions: {
|
||||
host: process.env.MAILSERVER,
|
||||
port: 587,
|
||||
auth: {
|
||||
user: process.env.MAILUSER,
|
||||
pass: process.env.MAILPW,
|
||||
},
|
||||
},
|
||||
};
|
|
@ -1,3 +0,0 @@
|
|||
module.exports = {
|
||||
secret: process.env.NODE_ENV === 'production' ? process.env.SECRET : 'secret',
|
||||
};
|
|
@ -1,29 +0,0 @@
|
|||
const passport = require('passport');
|
||||
const LocalStrategy = require('passport-local').Strategy;
|
||||
const mongoose = require('mongoose');
|
||||
const User = mongoose.model('User');
|
||||
|
||||
passport.use(
|
||||
new LocalStrategy(
|
||||
{
|
||||
usernameField: 'user[email]',
|
||||
passwordField: 'user[password]',
|
||||
},
|
||||
async function (email, password, done) {
|
||||
try {
|
||||
const user = await User.findOne({ email: email });
|
||||
if (!user || !user.validPassword(password)) {
|
||||
return done(null, false, { errors: { 'email or password': 'is invalid' } });
|
||||
}
|
||||
|
||||
if (user.needsEmailValidation) {
|
||||
return done(null, false, { errors: { 'E-Mail-Bestätigung': 'noch nicht erfolgt' } });
|
||||
}
|
||||
|
||||
return done(null, user);
|
||||
} catch (err) {
|
||||
done(err);
|
||||
}
|
||||
},
|
||||
),
|
||||
);
|
|
@ -1,15 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
const mongodbUrl =
|
||||
process.env.MONGODB_URL || (isProduction ? 'mongodb://localhost/obs' : 'mongodb://localhost/obsTest');
|
||||
mongoose.connect(mongodbUrl);
|
||||
mongoose.set('debug', !isProduction);
|
||||
|
||||
require('./models/TrackData');
|
||||
require('./models/User');
|
||||
require('./models/Track');
|
||||
require('./models/Comment');
|
||||
require('./config/passport');
|
||||
|
||||
module.exports = mongoose;
|
|
@ -1,78 +0,0 @@
|
|||
const path = require('path');
|
||||
const express = require('express');
|
||||
const bodyParser = require('body-parser');
|
||||
const session = require('express-session');
|
||||
const cors = require('cors');
|
||||
const errorhandler = require('errorhandler');
|
||||
const auth = require('./routes/auth');
|
||||
|
||||
const isProduction = process.env.NODE_ENV === 'production';
|
||||
|
||||
// Create global app object
|
||||
const app = express();
|
||||
|
||||
app.use(cors());
|
||||
app.use(auth.getUserIdMiddleware);
|
||||
app.use(auth.loadUserMiddleware);
|
||||
|
||||
// Normal express config defaults
|
||||
app.use(require('morgan')('dev'));
|
||||
app.use(bodyParser.json({ limit: '50mb' }));
|
||||
app.use(bodyParser.urlencoded({ limit: '50mb', extended: false }));
|
||||
|
||||
app.use(require('method-override')());
|
||||
app.use(express.static(path.join(__dirname, 'public')));
|
||||
|
||||
app.use(session({ secret: 'obsobs', cookie: { maxAge: 60000 }, resave: false, saveUninitialized: false }));
|
||||
|
||||
if (!isProduction) {
|
||||
app.use(errorhandler());
|
||||
}
|
||||
|
||||
require('./db');
|
||||
|
||||
app.use(require('./routes'));
|
||||
|
||||
/// catch 404 and forward to error handler
|
||||
app.use(function (req, res, next) {
|
||||
const err = new Error('Not Found');
|
||||
err.status = 404;
|
||||
next(err);
|
||||
});
|
||||
|
||||
/// error handlers
|
||||
|
||||
// development error handler
|
||||
// will print stacktrace
|
||||
if (!isProduction) {
|
||||
app.use(function (err, req, res, next) {
|
||||
console.log(err.stack);
|
||||
|
||||
res.status(err.status || 500);
|
||||
|
||||
res.json({
|
||||
errors: {
|
||||
message: err.message,
|
||||
error: err,
|
||||
},
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
// production error handler
|
||||
// no stacktraces leaked to user
|
||||
app.use(function (err, req, res, next) {
|
||||
res.status(err.status || 500);
|
||||
res.json({
|
||||
errors: {
|
||||
message: err.message,
|
||||
error: {},
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
// finally, let's start our server...
|
||||
const port = process.env.PORT || 3000;
|
||||
app.listen(port, () => {
|
||||
console.log('Listening on port ' + port);
|
||||
});
|
|
@ -1,357 +0,0 @@
|
|||
const TEST_ROWS = [
|
||||
'Date;Time;Latitude;Longitude;Course;Speed;Right;Left;Confirmed;insidePrivacyArea;',
|
||||
'12.07.2020;09:02:59;0.000000;0.000000;0.000;0.0000;255;255;0;0;',
|
||||
'12.07.2020;09:02:59;0.000000;0.000000;0.000;0.0000;255;255;0;0;',
|
||||
'12.07.2020;09:03:00;0.000000;0.000000;0.000;0.0000;255;255;0;0;',
|
||||
'12.07.2020;09:03:01;48.722205;9.270218;0.000;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:03:02;48.722206;9.270219;0.000;0.5741;255;255;0;0;',
|
||||
'12.07.2020;09:03:03;48.722204;9.270221;0.000;0.5371;255;255;0;0;',
|
||||
'12.07.2020;09:03:04;48.722198;9.270229;0.000;0.7593;255;255;0;0;',
|
||||
'12.07.2020;09:03:05;48.722188;9.270241;0.000;0.5556;255;255;0;0;',
|
||||
'12.07.2020;09:03:06;48.722174;9.270259;0.000;0.4815;255;255;0;0;',
|
||||
'12.07.2020;09:03:07;48.722158;9.270278;0.000;0.3704;255;255;0;0;',
|
||||
'12.07.2020;09:03:08;48.722146;9.270293;0.000;0.5741;255;255;0;0;',
|
||||
'12.07.2020;09:03:09;48.722138;9.270305;0.000;1.2594;255;255;0;0;',
|
||||
'12.07.2020;09:03:10;48.722129;9.270318;0.000;1.5557;255;255;0;0;',
|
||||
'12.07.2020;09:03:11;48.722122;9.270329;0.000;1.5372;255;255;0;0;',
|
||||
'12.07.2020;09:03:12;48.722115;9.270339;0.000;0.4630;255;255;0;0;',
|
||||
'12.07.2020;09:03:13;48.722107;9.270350;0.000;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:03:14;48.722101;9.270357;0.000;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:03:15;48.722092;9.270367;0.000;0.8149;255;255;0;0;',
|
||||
'12.07.2020;09:03:16;48.722084;9.270377;0.000;1.2223;255;255;0;0;',
|
||||
'12.07.2020;09:03:17;48.722076;9.270385;0.000;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:03:18;48.722070;9.270391;0.000;1.4816;255;255;0;0;',
|
||||
'12.07.2020;09:03:19;48.722070;9.270392;0.000;1.0927;255;255;0;0;',
|
||||
'12.07.2020;09:03:20;48.722066;9.270395;0.000;1.6668;255;255;0;0;',
|
||||
'12.07.2020;09:03:21;48.722068;9.270391;0.000;2.0742;255;255;0;0;',
|
||||
'12.07.2020;09:03:22;48.722064;9.270396;0.000;1.6853;255;255;0;0;',
|
||||
'12.07.2020;09:03:23;48.722060;9.270401;0.000;1.0927;255;255;0;0;',
|
||||
'12.07.2020;09:03:24;48.722056;9.270406;0.000;0.9445;255;255;0;0;',
|
||||
'12.07.2020;09:03:25;48.722052;9.270411;0.000;0.7964;255;255;0;0;',
|
||||
'12.07.2020;09:03:26;48.722047;9.270416;0.000;0.6482;255;255;0;0;',
|
||||
'12.07.2020;09:03:27;48.722042;9.270419;0.000;1.0556;255;255;0;0;',
|
||||
'12.07.2020;09:03:28;48.722031;9.270433;0.000;2.0372;255;255;0;0;',
|
||||
'12.07.2020;09:03:29;48.722031;9.270432;0.000;2.4261;255;255;0;0;',
|
||||
'12.07.2020;09:03:30;48.722029;9.270433;0.000;0.8704;255;255;0;0;',
|
||||
'12.07.2020;09:03:31;48.722029;9.270433;0.000;1.8150;255;255;0;0;',
|
||||
'12.07.2020;09:03:32;48.722024;9.270439;0.000;1.2223;255;255;0;0;',
|
||||
'12.07.2020;09:03:33;48.722025;9.270439;0.000;0.3889;255;255;0;0;',
|
||||
'12.07.2020;09:03:34;48.722022;9.270440;0.000;0.3519;255;255;0;0;',
|
||||
'12.07.2020;09:03:35;48.722020;9.270445;0.000;0.9445;255;255;0;0;',
|
||||
'12.07.2020;09:03:36;48.722018;9.270447;0.000;0.9260;255;255;0;0;',
|
||||
'12.07.2020;09:03:37;48.722020;9.270444;0.000;0.9075;255;255;0;0;',
|
||||
'12.07.2020;09:03:38;48.722021;9.270443;0.000;1.9261;255;255;0;0;',
|
||||
'12.07.2020;09:03:39;48.722018;9.270447;0.000;0.3334;255;255;0;0;',
|
||||
'12.07.2020;09:03:40;48.722020;9.270445;0.000;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:03:41;48.722023;9.270440;0.000;1.2594;255;255;0;0;',
|
||||
'12.07.2020;09:03:42;48.722023;9.270442;0.000;0.5000;255;255;0;0;',
|
||||
'12.07.2020;09:03:43;48.722025;9.270440;0.000;0.6852;220;255;0;0;',
|
||||
'12.07.2020;09:03:44;48.722023;9.270441;0.000;0.8519;199;255;0;0;',
|
||||
'12.07.2020;09:03:45;48.722026;9.270438;0.000;1.4075;255;255;0;0;',
|
||||
'12.07.2020;09:03:46;48.722029;9.270436;0.000;0.5371;255;255;0;0;',
|
||||
'12.07.2020;09:03:47;48.722028;9.270435;0.000;0.8334;97;255;0;0;',
|
||||
'12.07.2020;09:03:48;48.722029;9.270435;0.000;0.3704;255;255;0;0;',
|
||||
'12.07.2020;09:03:49;48.722029;9.270436;0.000;1.1112;96;255;0;0;',
|
||||
'12.07.2020;09:03:50;48.722029;9.270435;0.000;1.8890;255;255;0;0;',
|
||||
'12.07.2020;09:03:51;48.722034;9.270429;0.000;1.0186;255;255;0;0;',
|
||||
'12.07.2020;09:03:52;48.721942;9.270529;128.450;5.2226;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;79;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;178;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;89;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;156;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;168;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;181;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;176;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;186;0;0;',
|
||||
'12.07.2020;09:03:53;48.721929;9.270546;128.450;1.3520;255;255;0;0;',
|
||||
'12.07.2020;09:04:10;48.721896;9.270602;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:04:11;48.721894;9.270609;916.230;0.0926;255;192;0;0;',
|
||||
'12.07.2020;09:04:12;48.721892;9.270616;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:04:13;48.721890;9.270623;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:04:14;48.721888;9.270629;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:15;48.721886;9.270635;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:16;48.721883;9.270640;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:04:17;48.721881;9.270644;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:04:18;48.721879;9.270649;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:04:19;48.721877;9.270653;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:04:20;48.721876;9.270657;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:21;48.721874;9.270658;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:04:22;48.721873;9.270659;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:04:23;48.721872;9.270661;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:04:24;48.721871;9.270661;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:25;48.721870;9.270660;916.230;0.3334;255;255;0;0;',
|
||||
'12.07.2020;09:04:26;48.721869;9.270658;916.230;0.5000;255;255;0;0;',
|
||||
'12.07.2020;09:04:27;48.721866;9.270660;916.230;1.6853;255;255;0;0;',
|
||||
'12.07.2020;09:04:28;48.721866;9.270659;916.230;0.8704;255;198;0;0;',
|
||||
'12.07.2020;09:04:29;48.721867;9.270659;916.230;0.5741;255;196;0;0;',
|
||||
'12.07.2020;09:04:30;48.721867;9.270660;916.230;0.3148;255;196;0;0;',
|
||||
'12.07.2020;09:04:31;48.721867;9.270659;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:04:32;48.721866;9.270659;916.230;0.0556;255;199;0;0;',
|
||||
'12.07.2020;09:04:33;48.721867;9.270656;916.230;0.1482;255;199;0;0;',
|
||||
'12.07.2020;09:04:34;48.721867;9.270654;916.230;0.0370;255;198;0;0;',
|
||||
'12.07.2020;09:04:35;48.721867;9.270653;916.230;0.1296;255;198;0;0;',
|
||||
'12.07.2020;09:04:36;48.721867;9.270651;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:04:37;48.721867;9.270650;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:04:38;48.721868;9.270650;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:04:39;48.721868;9.270649;916.230;0.1667;255;201;0;0;',
|
||||
'12.07.2020;09:04:40;48.721868;9.270647;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:41;48.721869;9.270644;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:04:42;48.721869;9.270641;916.230;0.0185;255;198;0;0;',
|
||||
'12.07.2020;09:04:43;48.721870;9.270638;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:44;48.721870;9.270635;916.230;0.0370;255;199;0;0;',
|
||||
'12.07.2020;09:04:45;48.721871;9.270632;916.230;0.1482;255;204;0;0;',
|
||||
'12.07.2020;09:04:46;48.721871;9.270630;916.230;0.0185;255;201;0;0;',
|
||||
'12.07.2020;09:04:47;48.721873;9.270630;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:04:48;48.721873;9.270629;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:04:49;48.721874;9.270628;916.230;0.4074;255;255;0;0;',
|
||||
'12.07.2020;09:04:50;48.721875;9.270627;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:04:51;48.721876;9.270625;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:04:52;48.721877;9.270623;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:04:53;48.721877;9.270622;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:04:54;48.721879;9.270621;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:04:55;48.721881;9.270618;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:04:56;48.721883;9.270615;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:04:57;48.721884;9.270612;916.230;0.2778;255;255;0;0;',
|
||||
'12.07.2020;09:04:58;48.721885;9.270609;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:04:59;48.721886;9.270606;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:05:00;48.721888;9.270602;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:05:01;48.721889;9.270598;916.230;0.1111;255;191;0;0;',
|
||||
'12.07.2020;09:05:02;48.721890;9.270595;916.230;0.1482;255;193;0;0;',
|
||||
'12.07.2020;09:05:03;48.721891;9.270593;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:05:04;48.721891;9.270591;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:05;48.721891;9.270589;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:05:06;48.721891;9.270587;916.230;0.3519;255;199;0;0;',
|
||||
'12.07.2020;09:05:07;48.721891;9.270586;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:08;48.721891;9.270588;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:09;48.721890;9.270589;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:10;48.721889;9.270589;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:11;48.721888;9.270589;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:05:12;48.721887;9.270589;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:05:13;48.721886;9.270590;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:14;48.721885;9.270591;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:05:15;48.721885;9.270592;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:05:16;48.721885;9.270596;916.230;0.5556;255;255;0;0;',
|
||||
'12.07.2020;09:05:17;48.721885;9.270598;916.230;0.3519;255;255;0;0;',
|
||||
'12.07.2020;09:05:18;48.721884;9.270600;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:05:19;48.721882;9.270600;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:20;48.721881;9.270602;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:05:21;48.721879;9.270603;916.230;0.0185;255;206;0;0;',
|
||||
'12.07.2020;09:05:22;48.721878;9.270605;916.230;0.0556;255;203;0;0;',
|
||||
'12.07.2020;09:05:23;48.721876;9.270606;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:05:24;48.721874;9.270605;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:05:25;48.721873;9.270605;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:05:26;48.721872;9.270605;916.230;0.1296;255;209;0;0;',
|
||||
'12.07.2020;09:05:27;48.721870;9.270606;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:05:28;48.721869;9.270608;916.230;0.1111;255;206;0;0;',
|
||||
'12.07.2020;09:05:29;48.721868;9.270610;916.230;0.3148;255;209;0;0;',
|
||||
'12.07.2020;09:05:30;48.721867;9.270610;916.230;0.2593;255;208;0;0;',
|
||||
'12.07.2020;09:05:31;48.721866;9.270611;916.230;0.0556;255;210;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:32;48.721866;9.270612;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:44;48.721855;9.270602;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:44;48.721855;9.270602;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:05:46;48.721854;9.270602;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:46;48.721854;9.270602;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:05:48;48.721852;9.270606;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:05:49;48.721851;9.270611;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:05:50;48.721851;9.270615;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:05:51;48.721851;9.270616;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:05:52;48.721851;9.270617;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:05:53;48.721852;9.270617;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:05:54;48.721853;9.270616;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:05:55;48.721855;9.270613;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:05:56;48.721858;9.270609;916.230;0.6482;255;255;0;0;',
|
||||
'12.07.2020;09:05:57;48.721860;9.270606;916.230;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:05:58;48.721864;9.270601;916.230;0.6297;255;255;0;0;',
|
||||
'12.07.2020;09:05:59;48.721867;9.270595;916.230;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:06:00;48.721872;9.270589;916.230;0.5000;255;255;0;0;',
|
||||
'12.07.2020;09:06:01;48.721875;9.270584;916.230;0.2593;255;255;0;0;',
|
||||
'12.07.2020;09:06:02;48.721880;9.270578;916.230;0.5186;255;255;0;0;',
|
||||
'12.07.2020;09:06:03;48.721883;9.270574;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:06:04;48.721886;9.270570;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:06:05;48.721890;9.270565;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:06:06;48.721893;9.270562;916.230;0.2593;255;255;0;0;',
|
||||
'12.07.2020;09:06:07;48.721893;9.270560;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:08;48.721894;9.270559;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:09;48.721894;9.270557;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:07;48.721896;9.270556;916.230;0.2778;255;255;0;0;',
|
||||
'12.07.2020;09:06:08;48.721896;9.270556;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:06:09;48.721895;9.270557;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:10;48.721894;9.270559;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:06:11;48.721892;9.270560;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:06:12;48.721891;9.270561;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:06:13;48.721892;9.270562;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:06:14;48.721891;9.270564;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:15;48.721889;9.270566;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:06:16;48.721888;9.270568;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:17;48.721888;9.270570;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:06:18;48.721888;9.270572;916.230;0.4630;255;255;0;0;',
|
||||
'12.07.2020;09:06:19;48.721887;9.270573;916.230;0.4815;255;255;0;0;',
|
||||
'12.07.2020;09:06:20;48.721886;9.270574;916.230;0.3334;255;255;0;0;',
|
||||
'12.07.2020;09:06:21;48.721885;9.270576;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:06:22;48.721884;9.270579;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:23;48.721882;9.270581;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:24;48.721881;9.270584;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:25;48.721880;9.270589;916.230;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:06:26;48.721879;9.270596;916.230;0.3519;255;255;0;0;',
|
||||
'12.07.2020;09:06:27;48.721878;9.270602;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:06:28;48.721876;9.270601;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:29;48.721874;9.270603;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:30;48.721873;9.270607;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:31;48.721872;9.270614;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:06:32;48.721870;9.270613;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:33;48.721869;9.270614;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:06:34;48.721868;9.270616;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:06:35;48.721867;9.270617;916.230;0.2593;255;255;0;0;',
|
||||
'12.07.2020;09:06:36;48.721867;9.270618;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:06:37;48.721867;9.270618;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:06:38;48.721867;9.270616;916.230;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:06:39;48.721867;9.270613;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:06:40;48.721867;9.270607;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:06:41;48.721866;9.270601;916.230;0.5186;255;255;0;0;',
|
||||
'12.07.2020;09:06:42;48.721866;9.270593;916.230;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:06:43;48.721866;9.270587;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:44;48.721866;9.270581;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:45;48.721866;9.270576;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:46;48.721866;9.270567;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:06:47;48.721866;9.270558;916.230;0.4074;255;255;0;0;',
|
||||
'12.07.2020;09:06:48;48.721866;9.270550;916.230;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:06:49;48.721866;9.270543;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:50;48.721867;9.270537;916.230;0.2778;255;255;0;0;',
|
||||
'12.07.2020;09:06:51;48.721867;9.270532;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:06:52;48.721867;9.270526;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:06:53;48.721868;9.270522;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:54;48.721868;9.270517;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:06:55;48.721869;9.270512;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:06:56;48.721869;9.270506;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:06:57;48.721869;9.270503;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:06:58;48.721870;9.270500;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:06:59;48.721870;9.270497;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:07:00;48.721871;9.270494;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:07:01;48.721871;9.270493;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:07:02;48.721871;9.270492;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:07:03;48.721872;9.270490;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:07:04;48.721873;9.270489;916.230;0.6667;255;255;0;0;',
|
||||
'12.07.2020;09:07:05;48.721873;9.270487;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:07:06;48.721873;9.270486;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:07:07;48.721873;9.270486;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:07:08;48.721873;9.270485;916.230;0.1852;255;255;0;0;',
|
||||
'12.07.2020;09:07:09;48.721873;9.270485;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:07:10;48.721872;9.270485;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:07:11;48.721870;9.270486;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:07:12;48.721869;9.270489;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:07:13;48.721867;9.270492;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:07:14;48.721865;9.270494;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:07:15;48.721863;9.270495;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:07:16;48.721861;9.270497;916.230;0.0000;255;255;0;0;',
|
||||
'12.07.2020;09:07:17;48.721860;9.270496;916.230;0.4074;255;255;0;0;',
|
||||
'12.07.2020;09:07:18;48.721859;9.270495;916.230;0.4445;255;255;0;0;',
|
||||
'12.07.2020;09:07:19;48.721857;9.270496;916.230;0.3889;255;255;0;0;',
|
||||
'12.07.2020;09:07:20;48.721856;9.270496;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:07:21;48.721854;9.270494;916.230;0.7593;255;255;0;0;',
|
||||
'12.07.2020;09:07:22;48.721851;9.270496;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:07:23;48.721850;9.270497;916.230;0.1667;255;255;0;0;',
|
||||
'12.07.2020;09:07:24;48.721848;9.270501;916.230;0.4074;255;255;0;0;',
|
||||
'12.07.2020;09:07:25;48.721847;9.270504;916.230;0.4074;255;255;0;0;',
|
||||
'12.07.2020;09:07:26;48.721846;9.270505;916.230;0.2037;255;255;0;0;',
|
||||
'12.07.2020;09:07:27;48.721844;9.270508;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:07:28;48.721843;9.270507;916.230;0.4630;255;255;0;0;',
|
||||
'12.07.2020;09:07:29;48.721842;9.270509;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:07:30;48.721841;9.270512;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:07:31;48.721840;9.270515;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:07:32;48.721839;9.270517;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:07:33;48.721838;9.270522;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:07:34;48.721838;9.270527;916.230;0.3889;255;255;0;0;',
|
||||
'12.07.2020;09:07:35;48.721837;9.270530;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:07:36;48.721836;9.270532;916.230;0.1111;255;255;0;0;',
|
||||
'12.07.2020;09:07:37;48.721835;9.270536;916.230;0.6112;255;255;0;0;',
|
||||
'12.07.2020;09:07:38;48.721835;9.270541;916.230;1.1668;255;255;0;0;',
|
||||
'12.07.2020;09:07:39;48.721835;9.270543;916.230;0.3889;255;255;0;0;',
|
||||
'12.07.2020;09:07:40;48.721834;9.270545;916.230;0.5000;255;255;0;0;',
|
||||
'12.07.2020;09:07:41;48.721834;9.270544;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:07:42;48.721834;9.270545;916.230;0.7593;255;255;0;0;',
|
||||
'12.07.2020;09:07:43;48.721834;9.270545;916.230;0.8890;255;255;0;0;',
|
||||
'12.07.2020;09:07:44;48.721834;9.270543;916.230;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:07:45;48.721834;9.270541;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:07:46;48.721834;9.270540;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:07:47;48.721835;9.270538;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:07:48;48.721835;9.270535;916.230;0.0556;255;255;0;0;',
|
||||
'12.07.2020;09:07:49;48.721835;9.270534;916.230;0.8890;255;255;0;0;',
|
||||
'12.07.2020;09:07:50;48.721835;9.270534;916.230;0.5926;255;255;0;0;',
|
||||
'12.07.2020;09:07:51;48.721835;9.270534;916.230;0.7593;255;255;0;0;',
|
||||
'12.07.2020;09:07:52;48.721836;9.270533;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:07:53;48.721836;9.270531;916.230;0.0741;255;255;0;0;',
|
||||
'12.07.2020;09:07:54;48.721836;9.270529;916.230;0.3889;255;255;0;0;',
|
||||
'12.07.2020;09:07:55;48.721836;9.270530;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:07:56;48.721836;9.270530;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:07:57;48.721837;9.270531;916.230;0.0185;255;255;0;0;',
|
||||
'12.07.2020;09:07:58;48.721837;9.270530;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:07:59;48.721838;9.270526;916.230;0.3519;255;255;0;0;',
|
||||
'12.07.2020;09:08:00;48.721838;9.270521;916.230;0.4260;255;255;0;0;',
|
||||
'12.07.2020;09:08:01;48.721839;9.270522;916.230;0.5556;255;255;0;0;',
|
||||
'12.07.2020;09:08:02;48.721840;9.270524;916.230;0.3519;255;255;0;0;',
|
||||
'12.07.2020;09:08:03;48.721842;9.270525;916.230;0.2963;255;255;0;0;',
|
||||
'12.07.2020;09:08:04;48.721843;9.270525;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:08:05;48.721844;9.270524;916.230;0.2222;255;255;0;0;',
|
||||
'12.07.2020;09:08:06;48.721846;9.270522;916.230;0.3704;255;255;0;0;',
|
||||
'12.07.2020;09:08:07;48.721847;9.270519;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:08:08;48.721848;9.270516;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:08:09;48.721849;9.270514;916.230;0.1296;255;255;0;0;',
|
||||
'12.07.2020;09:08:10;48.721850;9.270512;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:08:11;48.721851;9.270513;916.230;0.3334;255;255;0;0;',
|
||||
'12.07.2020;09:08:12;48.721851;9.270512;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:08:13;48.721851;9.270512;916.230;0.2593;255;255;0;0;',
|
||||
'12.07.2020;09:08:14;48.721852;9.270511;916.230;0.0926;255;255;0;0;',
|
||||
'12.07.2020;09:08:15;48.721853;9.270512;916.230;0.0370;255;255;0;0;',
|
||||
'12.07.2020;09:08:16;48.721852;9.270515;916.230;0.5371;255;255;0;0;',
|
||||
'12.07.2020;09:08:17;48.721853;9.270517;916.230;0.1482;255;255;0;0;',
|
||||
'12.07.2020;09:08:18;48.721854;9.270519;916.230;0.3148;255;255;0;0;',
|
||||
'12.07.2020;09:08:19;48.721855;9.270520;916.230;0.2408;255;255;0;0;',
|
||||
'12.07.2020;09:08:20;48.721856;9.270523;916.230;0.3704;255;255;0;0;',
|
||||
];
|
||||
|
||||
const test1 = TEST_ROWS.join('$');
|
||||
|
||||
const test2 = `OBSFirmwareVersion=v0.3.999&OBSDataFormat=2&DataPerMeasurement=3&MaximumMeasurementsPerLine=60&OffsetLeft=30&OffsetRight=30&NumberOfDefinedPrivacyAreas=3&PrivacyLevelApplied=AbsolutePrivacy&MaximumValidFlightTimeMicroseconds=18560&DistanceSensorsUsed=HC-SR04/JSN-SR04T&DeviceId=ECEC&OBSUserID=32423432342234
|
||||
Date;Time;Millis;Comment;Latitude;Longitude;Altitude;Course;Speed;HDOP;Satellites;BatteryLevel;Left;Right;Confirmed;Marked;Invalid;InsidePrivacyArea;Factor;Measurements;Tms1;Lus1;Rus1;Tms2;Lus2;Rus2;Tms3;Lus3;Rus3;Tms4;Lus4;Rus4;Tms5;Lus5;Rus5;Tms6;Lus6;Rus6;Tms7;Lus7;Rus7;Tms8;Lus8;Rus8;Tms9;Lus9;Rus9;Tms10;Lus10;Rus10;Tms11;Lus11;Rus11;Tms12;Lus12;Rus12;Tms13;Lus13;Rus13;Tms14;Lus14;Rus14;Tms15;Lus15;Rus15;Tms16;Lus16;Rus16;Tms17;Lus17;Rus17;Tms18;Lus18;Rus18;Tms19;Lus19;Rus19;Tms20;Lus20;Rus20;Tms21;Lus21;Rus21;Tms22;Lus22;Rus22;Tms23;Lus23;Rus23;Tms24;Lus24;Rus24;Tms25;Lus25;Rus25;Tms26;Lus26;Rus26;Tms27;Lus27;Rus27;Tms28;Lus28;Rus28;Tms29;Lus29;Rus29;Tms30;Lus30;Rus30;Tms31;Lus31;Rus31;Tms32;Lus32;Rus32;Tms33;Lus33;Rus33;Tms34;Lus34;Rus34;Tms35;Lus35;Rus35;Tms36;Lus36;Rus36;Tms37;Lus37;Rus37;Tms38;Lus38;Rus38;Tms39;Lus39;Rus39;Tms40;Lus40;Rus40;Tms41;Lus41;Rus41;Tms42;Lus42;Rus42;Tms43;Lus43;Rus43;Tms44;Lus44;Rus44;Tms45;Lus45;Rus45;Tms46;Lus46;Rus46;Tms47;Lus47;Rus47;Tms48;Lus48;Rus48;Tms49;Lus49;Rus49;Tms50;Lus50;Rus50;Tms51;Lus51;Rus51;Tms52;Lus52;Rus52;Tms53;Lus53;Rus53;Tms54;Lus54;Rus54;Tms55;Lus55;Rus55;Tms56;Lus56;Rus56;Tms57;Lus57;Rus57;Tms58;Lus58;Rus58;Tms59;Lus59;Rus59;Tms60;Lus60;Rus60
|
||||
18.11.2020;16:05:59;1265034;;48.723224;9.094103;495.3;189.86;3.2;1.01;7;3.74;770;;0;0;58;54;0;6231;;16;;;36;6350;;52;;;72;6263;;87;;;107;6828;;122;;;143;6836;;158;;;178;6936;;193;;;213;7094;;228;;;248;6822;;263;;;284;7019;;299;;;319;6942;;334;;;354;7110;;370;;;390;7203;;405;;;425;7758;;440;;;461;7266;;476;;;496;7499;;511;;;531;7328;;546;;;567;7354;;582;;;602;7397;;617;;;637;;;664;;;684;16615;;708;;;728;9161;;745;;;765;10238;;783;;;802;8525;;818;;;839;7756;;854;;;875;7580;;890;;;910;7926;;925;;;945;7624;;960;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:00;1266041;DEVELOP: GPSMessages: 2587 GPS crc errors: 0;48.723205;9.0941;495.4;189.86;2.87;1.01;7;3.74;1020;;0;0;58;53;0;8012;;27;;;47;7999;;62;;;83;7660;;98;;;118;7698;;133;;;158;1252;;169;;;194;1146;;204;;;229;1173;;239;;;264;1173;;274;;;300;1147;;310;;;335;7943;;352;;;371;8713;;387;;;407;8005;;423;;;443;8021;;458;;;478;;;505;;;525;8111;;541;;;560;8074;;576;;;596;8254;;612;;;632;8514;;647;;;667;8195;;682;;;703;8094;;718;;;738;8123;;754;;;774;8330;;789;;;810;8966;;826;;;846;9066;;862;;;882;10553;;899;;;920;8345;;935;;;955;9219;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:01;1267037;DEVELOP: Mem: 45k Buffer: 4k last write time: 58;48.723197;9.094089;495.7;189.86;2.93;1.01;7;3.74;1090;;0;0;58;53;0;8164;;18;;;39;8184;;53;;;74;16305;;98;;;118;8658;;135;;;155;8198;;170;;;190;8133;;205;;;226;8536;;241;;;261;8676;;276;;;296;8516;;314;;;334;8114;;350;;;370;8294;;385;;;405;8751;;422;;;441;8163;;457;;;478;8062;;493;;;513;8093;;528;;;549;8060;;564;;;584;8085;;599;;;619;8071;;634;;;655;8262;;671;;;690;8746;;707;;;726;9116;;742;;;762;;;789;;;808;8121;;825;;;845;8113;;860;;;881;8129;;896;;;916;8096;;932;;;952;10617;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:02;1268027;;48.723185;9.094076;496.1;189.86;3.02;1.01;7;3.74;980;;0;0;58;55;0;8173;;18;;;37;8535;;53;;;73;8435;;88;;;109;8592;;124;;;144;8012;;159;;;180;8037;;195;;;215;7975;;230;;;250;7970;;265;;;286;7850;;301;;;321;7861;;336;;;356;7826;;371;;;392;8097;;407;;;427;8467;;443;;;463;7763;;478;;;498;7687;;513;;;534;7950;;549;;;569;7806;;584;;;604;8253;;620;;;640;7753;;656;;;676;8188;;692;;;711;7533;;727;;;747;7791;;763;;;783;7460;;798;;;825;9827;;843;;;863;7432;;878;;;904;7646;;919;;;939;7538;;955;;;974;7508;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:03;1269096;;48.723177;9.094068;496.2;189.86;3;1.01;7;3.74;920;;0;0;58;51;0;7218;;19;;;38;8144;;54;;;74;7463;;89;;;110;7856;;125;;;145;7869;;161;;;181;7422;;196;;;216;7934;;232;;;252;7363;;267;;;293;7297;;307;;;332;8105;;348;;;367;7468;;383;;;403;7213;;418;;;439;7172;;454;;;478;7184;;489;;;514;7312;;528;;;550;7175;;565;;;585;7180;;600;;;620;7013;;635;;;655;7154;;670;;;691;7240;;706;;;726;7075;;741;;;761;7133;;776;;;801;7511;;815;;;836;7639;;851;;;872;8891;;888;;;908;7070;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:04;1270033;;48.723167;9.094056;496.6;189.86;3.19;1.01;7;3.74;870;;0;0;58;53;0;7617;;19;;;39;6812;;55;;;80;1173;;90;;;116;8173;;133;;;152;7431;;168;;;188;7197;;203;;;223;6984;;238;;;259;7218;;274;;;294;6881;;309;;;329;7111;;344;;;365;7500;;380;;;400;7462;;415;;;435;7094;;450;;;471;6820;;486;;;506;7147;;521;;;541;9156;;558;;;578;6961;;594;;;614;;;641;;;660;7176;;676;;;696;7177;;712;;;732;7199;;747;;;767;7218;;782;;;802;7360;;817;;;838;;;865;;;884;;;904;;;924;;;943;;;962;7252;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:05;1271032;;48.723153;9.094046;496.5;189.86;3.48;1.01;7;3.74;940;;0;0;58;54;0;7295;;14;;;35;7183;;50;;;71;7283;;85;;;106;8957;;122;;;142;8178;;158;;;178;7814;;194;;;213;7495;;229;;;249;7713;;265;;;285;7305;;300;;;320;7654;;335;;;356;7687;;371;;;391;7634;;406;;;426;7167;;441;;;461;;;488;;;508;7245;;524;;;544;7283;;559;;;580;7150;;595;;;615;7194;;630;;;650;7410;;665;;;686;7670;;702;;;721;7421;;737;;;757;7588;;772;;;792;7452;;809;;;828;8162;;844;;;865;9078;;881;;;901;7563;;917;;;936;7775;;952;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:06;1272031;;48.723146;9.094036;496.5;189.86;2.44;1.01;7;3.74;1000;;0;0;58;54;0;8193;;18;;;39;7629;;54;;;74;;;102;;;121;7778;;137;;;157;7773;;172;;;193;7922;;208;;;228;7706;;243;;;263;8881;;280;;;299;7776;;315;;;334;7797;;350;;;370;8683;;386;;;406;7863;;422;;;441;7901;;457;;;477;7747;;492;;;513;8246;;529;;;549;7756;;564;;;585;7667;;600;;;620;7657;;635;;;655;;;682;;;702;8193;;719;;;738;7751;;754;;;774;7731;;789;;;809;8109;;825;;;845;7623;;860;;;880;7883;;895;;;916;7579;;931;;;951;7514;;966;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:07;1273044;;48.723134;9.094026;496.7;189.86;3.44;1.01;7;3.74;990;;0;0;58;53;0;7543;;15;;;37;7535;;52;;;72;9628;;90;;;109;8166;;125;;;146;7469;;161;;;181;7923;;197;;;216;7651;;232;;;252;7594;;267;;;288;7796;;303;;;323;7960;;338;;;359;7862;;373;;;394;7633;;409;;;429;7926;;444;;;465;7661;;479;;;500;7546;;515;;;535;7522;;550;;;570;8461;;587;;;606;7520;;622;;;643;;;668;;;688;7495;;704;;;723;7672;;739;;;759;7964;;774;;;795;8725;;811;;;831;7366;;847;;;867;7586;;882;;;902;8634;;919;;;938;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:08;1274030;;48.723126;9.094013;496.7;230.11;3.54;1.01;7;3.74;850;;0;0;58;53;0;7452;;29;;;49;7446;;65;;;91;1147;;102;;;127;7517;;143;;;163;7411;;178;;;204;1148;;214;;;240;7282;;256;;;281;1201;;291;;;316;1144;;326;;;351;1173;;362;;;386;6718;;401;;;422;7303;;437;;;461;7621;;476;;;497;7557;;511;;;532;7451;;547;;;567;7658;;583;;;603;7534;;618;;;638;7306;;653;;;673;7222;;688;;;709;7169;;724;;;744;7115;;759;;;779;7277;;794;;;815;;;841;;;861;;;881;;;900;7403;;916;;;936;7356;;951;;;972;7030;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:09;1275038;;48.723121;9.093994;496.9;237.39;3.33;1.01;7;3.74;730;;0;0;58;54;0;7327;;16;;;36;6876;;52;;;72;6953;;87;;;107;7261;;122;;;142;6702;;158;;;178;7286;;193;;;213;6605;;228;;;249;7168;;264;;;284;6641;;299;;;324;7059;;339;;;359;7568;;374;;;394;6476;;409;;;430;6589;;445;;;470;1174;;480;;;505;1173;;515;;;541;1175;;551;;;576;1149;;586;;;611;6222;;626;;;647;6722;;661;;;687;5939;;700;;;723;5989;;735;;;760;6144;;773;;;795;6019;;808;;;830;6306;;844;;;866;6169;;879;;;901;6288;;914;;;936;9882;;954;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:10;1276038;;48.723117;9.093979;497.4;247.62;2.96;1.01;7;3.74;7;69;0;;0;0;58;52;0;;;30;;;50;;;69;;;89;;;109;;6187;124;9730;;144;;6203;160;14558;;182;;6178;195;;;222;;6233;235;;;257;;6323;275;;;295;;6379;311;;;331;;6371;346;8588;;366;;6330;381;2150;;401;;6275;417;1200;;437;;6184;461;;;488;;6033;505;;;525;;5943;543;2550;;561;;5872;579;2563;;596;;5844;614;1225;;631;;5835;650;1173;;667;;5799;685;2243;;702;;5804;720;2275;;737;;5798;759;;;785;;5854;805;;;825;;5984;840;;;860;;5979;875;;;895;;6027;911;7850;;931;;6001;946;13531;;969;;5957;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:11;1277041;;48.723117;9.093965;497.5;247.62;2.74;1.01;7;3.74;143;72;0;;0;0;58;48;0;;;30;;5966;43;;;65;;5940;78;12209;;101;;5923;113;;;140;;7918;155;18175;;182;;6159;199;;;226;;6047;239;;;261;;6283;274;;;297;;;328;;;348;;6015;364;;;384;;;418;1174;;428;;6166;453;;;480;;6265;498;;;518;;6241;536;9449;;553;;6311;571;17498;;597;;6394;611;;;638;;6380;652;;;673;;6408;687;;;708;;7059;722;7897;;744;;7059;759;10810;;779;;6459;794;;;822;;6680;841;15140;;864;;;891;6403;;906;;9053;930;10084;;948;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:12;1278045;;48.723109;9.093963;498;247.62;2.17;1.79;7;3.74;143;76;0;;0;0;58;48;0;;;30;;6485;52;;;71;;6321;93;;;113;;6283;128;;;148;;6319;164;10355;;184;;6232;199;17561;;225;;6259;238;;;265;;;292;16478;;317;;;344;8916;;361;;6160;379;;;406;;6242;426;;;445;;6318;461;;;481;;6172;496;;;516;;6271;534;;;554;;6184;571;10174;;590;;6204;607;14878;;630;;6333;643;;;670;;6332;683;;;705;;6231;718;;;740;;6227;753;;;776;;;803;;;822;;6469;844;;;864;;6215;879;;;899;;;927;;;946;;6326;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:13;1279029;;48.723109;9.093963;498;247.62;0;1.79;7;3.74;116;79;0;;0;0;58;47;0;;;20;;6783;41;;;61;;6343;81;;;101;;6506;121;11871;;142;;6364;162;16368;;185;;6365;197;;;224;;;250;15312;;274;;6698;295;12786;;315;;6428;330;;;357;;6556;375;17429;;401;;6426;418;16587;;444;;6539;462;;;488;;;515;18278;;542;;6507;556;;;584;;6506;593;;;619;;6774;632;;;654;;6775;668;;;690;;;717;;;737;;;764;;;784;;6708;799;;;819;;6908;840;8503;;856;;;882;1202;;893;;6729;917;13601;;939;;;974;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:14;1280049;;48.723109;9.093963;498;247.62;0;1.1;7;3.74;121;92;0;;0;0;58;45;0;;;33;;7087;56;;;75;;7170;94;;;113;;7158;129;;;149;;7575;164;;;184;;7233;204;13424;;226;;7616;248;18289;;282;;;315;;;342;;7353;357;;;377;;7521;400;;;419;;7510;435;;;455;;;481;;;500;;7484;516;12940;;537;;;571;8777;;588;;;615;11659;;634;;;660;1174;;670;;;696;;;723;;;742;;;762;;7640;778;;;797;;7980;819;;;839;;8759;855;;;875;;7752;890;11740;;910;;7612;929;12291;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:15;1281028;;48.723109;9.093963;498;247.62;0;1.01;7;3.74;133;95;0;;0;0;58;48;0;;;30;;7564;49;12590;;70;;7576;84;;;111;;7584;120;;;146;;7794;161;;;182;;7826;196;;;217;;;244;;;264;;8057;286;;;305;;7525;321;8033;;340;;7657;360;11295;;380;;;407;9480;;423;;;451;12842;;472;;7540;487;16790;;512;;;538;1175;;549;;;574;1175;;584;;7788;609;1175;;619;;;647;;;673;;7354;697;;;716;;7287;738;;;758;;7454;773;;;793;;7286;812;9244;;829;;7388;848;17471;;874;;7246;893;;;919;;;952;8724;;968;;7324;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
18.11.2020;16:06:16;1282037;;48.723109;9.093963;498;247.62;0;1.01;7;3.74;5;89;20;;0;0;58;49;0;;;31;;7290;44;;;66;;7277;80;;;101;;;128;;;148;;7213;164;;;183;;6901;202;10902;;223;;7060;242;2257;;258;;7057;277;2124;;293;;7045;313;1201;;328;;;361;2137;;371;;6931;396;2055;;407;;6910;432;1201;;442;;;468;2042;;478;;6961;503;1201;;513;;;548;12669;;568;;6909;590;;;617;;7063;636;;;656;;7148;672;;;691;;6777;707;;;727;;6903;747;11631;;767;;;793;1174;;803;;7283;828;;;856;;;889;9154;;908;;7489;929;9129;;943;;7430;965;14679;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
`;
|
||||
|
||||
const test3 = `Date;Time;Millis;Comment;Latitude;Longitude;Altitude;Course;Speed;HDOP;Satellites;BatteryLevel;Left;Right;Confirmed;Marked;Invalid;InsidePrivacyArea;Factor;Measurements;Tms1;Lus1;Rus1;Tms2;Lus2;Rus2;Tms3;Lus3;Rus3;Tms4;Lus4;Rus4;Tms5;Lus5;Rus5;Tms6;Lus6;Rus6;Tms7;Lus7;Rus7;Tms8;Lus8;Rus8;Tms9;Lus9;Rus9;Tms10;Lus10;Rus10;Tms11;Lus11;Rus11;Tms12;Lus12;Rus12;Tms13;Lus13;Rus13;Tms14;Lus14;Rus14;Tms15;Lus15;Rus15;Tms16;Lus16;Rus16;Tms17;Lus17;Rus17;Tms18;Lus18;Rus18;Tms19;Lus19;Rus19;Tms20;Lus20;Rus20;Tms21;Lus21;Rus21;Tms22;Lus22;Rus22;Tms23;Lus23;Rus23;Tms24;Lus24;Rus24;Tms25;Lus25;Rus25;Tms26;Lus26;Rus26;Tms27;Lus27;Rus27;Tms28;Lus28;Rus28;Tms29;Lus29;Rus29;Tms30;Lus30;Rus30;Tms31;Lus31;Rus31;Tms32;Lus32;Rus32;Tms33;Lus33;Rus33;Tms34;Lus34;Rus34;Tms35;Lus35;Rus35;Tms36;Lus36;Rus36;Tms37;Lus37;Rus37;Tms38;Lus38;Rus38;Tms39;Lus39;Rus39;Tms40;Lus40;Rus40;Tms41;Lus41;Rus41;Tms42;Lus42;Rus42;Tms43;Lus43;Rus43;Tms44;Lus44;Rus44;Tms45;Lus45;Rus45;Tms46;Lus46;Rus46;Tms47;Lus47;Rus47;Tms48;Lus48;Rus48;Tms49;Lus49;Rus49;Tms50;Lus50;Rus50;Tms51;Lus51;Rus51;Tms52;Lus52;Rus52;Tms53;Lus53;Rus53;Tms54;Lus54;Rus54;Tms55;Lus55;Rus55;Tms56;Lus56;Rus56;Tms57;Lus57;Rus57;Tms58;Lus58;Rus58;Tms59;Lus59;Rus59;Tms60;Lus60;Rus60;
|
||||
21.11.2020;14:27:00;66890;;;;;;;3.83;4;3.99;;286;0;;0;0;58;5;0;;;41;;18355;67;;;87;;18374;113;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
|
||||
`;
|
||||
|
||||
module.exports = { test1, test2, test3 };
|
|
@ -1,312 +0,0 @@
|
|||
const csvParse = require('csv-parse/lib/sync');
|
||||
const csvStringify = require('csv-stringify/lib/sync');
|
||||
|
||||
function _parseFloat(token) {
|
||||
if (typeof token !== 'string') {
|
||||
return null;
|
||||
}
|
||||
|
||||
token = token.trim();
|
||||
|
||||
if (token === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (/^nan$/i.test(token)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
let f = parseFloat(token);
|
||||
|
||||
if (isNaN(f)) {
|
||||
f = parseFloat(token.substring(0, 10));
|
||||
}
|
||||
|
||||
if (isNaN(f)) {
|
||||
f = 0.0;
|
||||
}
|
||||
|
||||
return f;
|
||||
}
|
||||
|
||||
function _parseInt(token) {
|
||||
const asFloat = _parseFloat(token);
|
||||
|
||||
if (asFloat !== null) {
|
||||
return Math.floor(asFloat);
|
||||
} else {
|
||||
return asFloat;
|
||||
}
|
||||
}
|
||||
|
||||
function _parseString(token) {
|
||||
if (typeof token !== 'string') {
|
||||
return null;
|
||||
}
|
||||
// This time we do not trim -- because we assume that the quoting mechanism
|
||||
// from CSV might have kicked in and we actually want the spacing around the
|
||||
// token.
|
||||
|
||||
if (token === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
return token;
|
||||
}
|
||||
|
||||
function replaceDollarNewlinesHack(body) {
|
||||
// see if we are using the hack with $ as newlines, replace them for the csv parser
|
||||
if (body.endsWith('$') || /insidePrivacyArea;\$/.test(body)) {
|
||||
return body.replace(/\$/g, '\n');
|
||||
}
|
||||
|
||||
return body;
|
||||
}
|
||||
|
||||
function* parseTrackPoints(body, format = null) {
|
||||
if (body instanceof Buffer) {
|
||||
body = body.toString('utf-8')
|
||||
}
|
||||
body = replaceDollarNewlinesHack(body);
|
||||
|
||||
const detectedFormat = format != null ? format : detectFormat(body);
|
||||
|
||||
let parser;
|
||||
switch (detectedFormat) {
|
||||
case 'invalid':
|
||||
throw new Error('track format cannot be detected');
|
||||
|
||||
case 1:
|
||||
parser = parseObsver1;
|
||||
break;
|
||||
|
||||
case 2:
|
||||
parser = parseObsver2;
|
||||
break;
|
||||
}
|
||||
|
||||
yield* parser(body);
|
||||
}
|
||||
|
||||
function detectFormat(body) {
|
||||
body = replaceDollarNewlinesHack(body);
|
||||
|
||||
if (!body.length) {
|
||||
return 'invalid';
|
||||
}
|
||||
|
||||
const firstLinebreakIndex = body.indexOf('\n');
|
||||
|
||||
if (firstLinebreakIndex === -1) {
|
||||
// We need at least one linebreak in the whole file, to separate header and
|
||||
// data. If the file contains no header, it is in valid.
|
||||
return 'invalid';
|
||||
}
|
||||
|
||||
const firstLine = body.substring(0, firstLinebreakIndex);
|
||||
|
||||
const match = firstLine.match(/(^|&)OBSDataFormat=([\d]+)($|&)/);
|
||||
if (match) {
|
||||
return Number(match[2]);
|
||||
}
|
||||
|
||||
// If we have no metadata line, but start immediately with a header, AND it contains
|
||||
// `;Rus`, it is a version 2
|
||||
if (/^Date;Time.*;Rus/.test(firstLine)) {
|
||||
return 2;
|
||||
}
|
||||
|
||||
// If we have no metadata line, but start immediately with a header, it is
|
||||
// format version 1.
|
||||
if (/^Date;Time/.test(firstLine)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
// If we immediately start with data (a date, formatted as DD.MM.YYYY), then
|
||||
// we have an old OBS not sending the header. It must therefore be old
|
||||
// format, too.
|
||||
if (/^[0-9]{2}\.[0-9]{2}\.[0-9]{4};/.test(firstLine)) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 'invalid';
|
||||
}
|
||||
|
||||
function* parseObsver1(body) {
|
||||
for (const record of csvParse(body, {
|
||||
delimiter: ';',
|
||||
encoding: 'utf8',
|
||||
// We specify different column names here, as the order of columns was
|
||||
// always the same, but their naming was different. By enforicing these
|
||||
// column names we don't have to translate between them. Then we just
|
||||
// ignore the first line (or any line that starts with "Date;").
|
||||
// Original header usually is:
|
||||
// Date;Time;Latitude;Longitude;Course;Speed;Right;Left;Confirmed;insidePrivacyArea
|
||||
columns: ['date', 'time', 'latitude', 'longitude', 'course', 'speed', 'd1', 'd2', 'flag', 'private'],
|
||||
relax_column_count: true,
|
||||
cast(value, { column }) {
|
||||
if (['latitude', 'longitude', 'course', 'speed'].includes(column)) {
|
||||
return _parseFloat(value);
|
||||
} else if (['d1', 'd2', 'flag'].includes(column)) {
|
||||
return _parseInt(value);
|
||||
} else if (column === 'private') {
|
||||
return Boolean(_parseInt(value));
|
||||
} else {
|
||||
return _parseString(value);
|
||||
}
|
||||
},
|
||||
})) {
|
||||
if (record.date === 'Date') {
|
||||
// ignore header line
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!record.latitude && !record.longitude) {
|
||||
// invalid record, make sure lat/lng say `null` instead of `0`
|
||||
record.latitude = null;
|
||||
record.longitude = null;
|
||||
}
|
||||
|
||||
// in old format, 255 or 999 means "no measurement"
|
||||
if (record.d1 === 255 || record.d1 === 999) {
|
||||
record.d1 = null;
|
||||
}
|
||||
|
||||
if (record.d2 === 255 || record.d2 === 999) {
|
||||
record.d2 = null;
|
||||
}
|
||||
|
||||
yield record;
|
||||
}
|
||||
}
|
||||
|
||||
function* parseObsver2(body) {
|
||||
for (const record of csvParse(body, {
|
||||
from_line: 2,
|
||||
trim: true,
|
||||
columns: true,
|
||||
skip_empty_lines: true,
|
||||
delimiter: ';',
|
||||
encoding: 'utf8',
|
||||
relax_column_count: true,
|
||||
cast(value, context) {
|
||||
if (value === '') {
|
||||
return null;
|
||||
}
|
||||
|
||||
let type;
|
||||
switch (context.column) {
|
||||
case 'Millis':
|
||||
case 'Left':
|
||||
case 'Right':
|
||||
case 'Confirmed':
|
||||
case 'Invalid':
|
||||
case 'InsidePrivacyArea':
|
||||
case 'Measurements':
|
||||
case 'Satellites':
|
||||
type = 'int';
|
||||
break;
|
||||
|
||||
case 'Date':
|
||||
case 'Time':
|
||||
case 'Comment':
|
||||
case 'Marked':
|
||||
type = 'string';
|
||||
break;
|
||||
|
||||
case 'Latitude':
|
||||
case 'Longitude':
|
||||
case 'Altitude':
|
||||
case 'Course':
|
||||
case 'Speed':
|
||||
case 'HDOP':
|
||||
case 'BatteryLevel':
|
||||
case 'Factor':
|
||||
type = 'float';
|
||||
break;
|
||||
|
||||
default:
|
||||
type = /^(Tms|Lus|Rus)/.test(context.column) ? 'int' : 'string';
|
||||
}
|
||||
|
||||
switch (type) {
|
||||
case 'int':
|
||||
return _parseInt(value);
|
||||
|
||||
case 'float':
|
||||
return _parseFloat(value);
|
||||
|
||||
case 'string':
|
||||
return _parseString(value);
|
||||
}
|
||||
},
|
||||
})) {
|
||||
// We convert the new format back to the old format for storage here, until
|
||||
// we upgrade the storage format as well to include all data. But we'll
|
||||
// have to upgrade the obsApp first.
|
||||
yield {
|
||||
date: record.Date,
|
||||
time: record.Time,
|
||||
latitude: record.Latitude,
|
||||
longitude: record.Longitude,
|
||||
course: record.Course,
|
||||
speed: record.Speed,
|
||||
d1: record.Left,
|
||||
d2: record.Right,
|
||||
flag: Boolean(record.Confirmed),
|
||||
private: Boolean(record.InsidePrivacyArea),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* This function normalizes a User-Agent header for storage in the database. It
|
||||
* make sure that we only store the user-agent if it matches the pattern
|
||||
* `OBS/*`, and extracts that part of the user agent, if it contains more
|
||||
* information. This is the only part we are interested in, the
|
||||
* remainder is too privacy sensitive to keep.
|
||||
*/
|
||||
function normalizeUserAgent(userAgent) {
|
||||
if (!userAgent) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const match = userAgent.match(/\bOBS\/[^\s]+/);
|
||||
if (match) {
|
||||
return match[0];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
function buildObsver1(points) {
|
||||
return csvStringify(points, {
|
||||
columns: [
|
||||
{ key: 'date', header: 'Date' },
|
||||
{ key: 'time', header: 'Time' },
|
||||
{ key: 'latitude', header: 'Latitude' },
|
||||
{ key: 'longitude', header: 'Longitude' },
|
||||
{ key: 'course', header: 'Course' },
|
||||
{ key: 'speed', header: 'Speed' },
|
||||
{ key: 'd1', header: 'Right' },
|
||||
{ key: 'd2', header: 'Left' },
|
||||
{ key: 'flag', header: 'Confirmed' },
|
||||
{ key: 'private', header: 'insidePrivacyArea' },
|
||||
],
|
||||
cast: {
|
||||
boolean: (v) => (v ? '1' : '0'),
|
||||
},
|
||||
delimiter: ';',
|
||||
header: true,
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
detectFormat,
|
||||
normalizeUserAgent,
|
||||
parseObsver1,
|
||||
parseObsver2,
|
||||
parseTrackPoints,
|
||||
replaceDollarNewlinesHack,
|
||||
buildObsver1,
|
||||
};
|
|
@ -1,171 +0,0 @@
|
|||
const {
|
||||
buildObsver1,
|
||||
detectFormat,
|
||||
normalizeUserAgent,
|
||||
parseObsver1,
|
||||
parseObsver2,
|
||||
parseTrackPoints,
|
||||
replaceDollarNewlinesHack,
|
||||
} = require('./tracks');
|
||||
|
||||
const { test1, test2, test3 } = require('./_tracks_testdata');
|
||||
|
||||
describe('parseTrackPoints', () => {
|
||||
it('is a function', () => {
|
||||
expect(typeof parseTrackPoints).toBe('function');
|
||||
});
|
||||
|
||||
it('works on the sample data with an empty track', () => {
|
||||
const points = Array.from(parseTrackPoints(test1));
|
||||
expect(points).toHaveLength(324);
|
||||
expect(points[0]).toEqual({
|
||||
date: '12.07.2020',
|
||||
time: '09:02:59',
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
course: 0,
|
||||
speed: 0,
|
||||
d1: null,
|
||||
d2: null,
|
||||
flag: 0,
|
||||
private: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseObsver1', () => {
|
||||
it('can parse sample data', () => {
|
||||
const points = Array.from(parseObsver1(replaceDollarNewlinesHack(test1)));
|
||||
expect(points).toHaveLength(324);
|
||||
expect(points[0]).toEqual({
|
||||
date: '12.07.2020',
|
||||
time: '09:02:59',
|
||||
latitude: null,
|
||||
longitude: null,
|
||||
course: 0,
|
||||
speed: 0,
|
||||
d1: null,
|
||||
d2: null,
|
||||
flag: 0,
|
||||
private: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parseObsver2', () => {
|
||||
it('can parse sample data', () => {
|
||||
const points = Array.from(parseObsver2(test2));
|
||||
expect(points).toHaveLength(18);
|
||||
expect(points[0]).toEqual({
|
||||
date: '18.11.2020',
|
||||
time: '16:05:59',
|
||||
latitude: 48.723224,
|
||||
longitude: 9.094103,
|
||||
course: 189.86,
|
||||
speed: 3.2,
|
||||
d1: 770,
|
||||
d2: null,
|
||||
flag: false,
|
||||
private: true,
|
||||
});
|
||||
|
||||
// this is a non-private, flagged point (i.e. "Confirmed" overtaking)
|
||||
expect(points[17]).toEqual({
|
||||
date: '18.11.2020',
|
||||
time: '16:06:16',
|
||||
latitude: 48.723109,
|
||||
longitude: 9.093963,
|
||||
course: 247.62,
|
||||
speed: 0,
|
||||
d1: 5,
|
||||
d2: 89,
|
||||
flag: true,
|
||||
private: false,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('detectFormat', () => {
|
||||
it('detects format 1', () => {
|
||||
expect(detectFormat(test1)).toBe(1);
|
||||
});
|
||||
|
||||
it('detects format 2', () => {
|
||||
expect(detectFormat(test2)).toBe(2);
|
||||
expect(detectFormat(test3)).toBe(2);
|
||||
});
|
||||
|
||||
it('detects invalid format', () => {
|
||||
expect(detectFormat('foobar\nbaz')).toBe('invalid');
|
||||
expect(detectFormat('')).toBe('invalid');
|
||||
});
|
||||
});
|
||||
|
||||
describe('normalizeUserAgent', () => {
|
||||
it('is a function', () => {
|
||||
expect(typeof normalizeUserAgent).toBe('function');
|
||||
});
|
||||
|
||||
it('ignores falsy values', () => {
|
||||
expect(normalizeUserAgent(null)).toBe(null);
|
||||
expect(normalizeUserAgent('')).toBe(null);
|
||||
});
|
||||
|
||||
it('ignores normal browser agents', () => {
|
||||
const browserAgents = [
|
||||
'Mozilla/5.0 (Linux; Android 6.0.1; Nexus 6P Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.83 Mobile Safari/537.36',
|
||||
'Mozilla/5.0 (Linux; Android 6.0; HTC One M9 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.98 Mobile Safari/537.3',
|
||||
'Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36',
|
||||
'Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A5370a Safari/604.1',
|
||||
'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1',
|
||||
];
|
||||
|
||||
for (const browserAgent of browserAgents) {
|
||||
expect(normalizeUserAgent(browserAgent)).toBe(null);
|
||||
}
|
||||
});
|
||||
|
||||
it('detects OBS versions', () => {
|
||||
const agents = ['OBS/123', 'OBS/2', 'OBS/1.2.3.4.5-rc123'];
|
||||
|
||||
for (const agent of agents) {
|
||||
expect(normalizeUserAgent(agent)).toBe(agent);
|
||||
}
|
||||
});
|
||||
|
||||
it('extracts OBS versions from extended formats', () => {
|
||||
const agents = ['foo OBS/123', 'OBS/123 bar', 'foo OBS/123 bar'];
|
||||
|
||||
for (const agent of agents) {
|
||||
expect(normalizeUserAgent(agent)).toBe('OBS/123');
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('buildObsver1', () => {
|
||||
it('is a function', () => {
|
||||
expect(typeof normalizeUserAgent).toBe('function');
|
||||
});
|
||||
|
||||
it('transforms properly back and forth', () => {
|
||||
const inputString = replaceDollarNewlinesHack(test1);
|
||||
|
||||
const points1 = Array.from(parseObsver1(inputString));
|
||||
const builtString = buildObsver1(points1);
|
||||
const points2 = Array.from(parseObsver1(builtString));
|
||||
|
||||
expect(points2).toEqual(points1);
|
||||
});
|
||||
|
||||
it('produces a header', () => {
|
||||
const builtString = buildObsver1([]);
|
||||
expect(builtString).toBe('Date;Time;Latitude;Longitude;Course;Speed;Right;Left;Confirmed;insidePrivacyArea\n');
|
||||
});
|
||||
|
||||
it('produces empty rows', () => {
|
||||
const builtString = buildObsver1([{}]);
|
||||
expect(builtString).toBe(
|
||||
'Date;Time;Latitude;Longitude;Course;Speed;Right;Left;Confirmed;insidePrivacyArea\n;;;;;;;;;\n',
|
||||
);
|
||||
});
|
||||
});
|
|
@ -1,25 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
|
||||
const schema = new mongoose.Schema(
|
||||
{
|
||||
body: String,
|
||||
author: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
|
||||
track: { type: mongoose.Schema.Types.ObjectId, ref: 'Track' },
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
class Comment extends mongoose.Model {
|
||||
toJSONFor(user) {
|
||||
return {
|
||||
id: this._id,
|
||||
body: this.body,
|
||||
createdAt: this.createdAt,
|
||||
author: this.author.toProfileJSONFor(user),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
mongoose.model(Comment, schema);
|
||||
|
||||
module.exports = Comment;
|
|
@ -1,168 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const uniqueValidator = require('mongoose-unique-validator');
|
||||
const slug = require('slug');
|
||||
const path = require('path');
|
||||
const sanitize = require('sanitize-filename');
|
||||
const fs = require('fs')
|
||||
|
||||
const { parseTrackPoints } = require('../logic/tracks');
|
||||
|
||||
const TrackData = require('./TrackData');
|
||||
|
||||
const DATA_DIR = process.env.DATA_DIR || path.resolve(__dirname, '../../data/')
|
||||
|
||||
const schema = new mongoose.Schema(
|
||||
{
|
||||
slug: { type: String, lowercase: true, unique: true },
|
||||
title: String,
|
||||
description: String,
|
||||
visible: Boolean,
|
||||
uploadedByUserAgent: String,
|
||||
body: String, // deprecated, remove after migration has read it
|
||||
comments: [{ type: mongoose.Schema.Types.ObjectId, ref: 'Comment' }],
|
||||
author: { type: mongoose.Schema.Types.ObjectId, ref: 'User' },
|
||||
trackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
|
||||
publicTrackData: { type: mongoose.Schema.Types.ObjectId, ref: 'TrackData' },
|
||||
originalFileName: {
|
||||
type: String,
|
||||
required: true,
|
||||
validate: {
|
||||
validator: function (v) {
|
||||
// Must be a sane filename, i.e. not change when being sanitized
|
||||
return sanitize(v) === v && v.length > 0 && /.+\.csv$/i.test(v);
|
||||
},
|
||||
message: (props) => `${props.value} is not a valid filename`,
|
||||
},
|
||||
},
|
||||
originalFilePath: String,
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
schema.plugin(uniqueValidator, { message: 'is already taken' });
|
||||
|
||||
schema.pre('validate', async function (next) {
|
||||
try {
|
||||
if (!this.slug) {
|
||||
this.slugify();
|
||||
}
|
||||
|
||||
if (!this.originalFilePath) {
|
||||
await this.generateOriginalFilePath();
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
|
||||
class Track extends mongoose.Model {
|
||||
slugify() {
|
||||
this.slug = slug(this.title || 'track') + '-' + ((Math.random() * Math.pow(36, 6)) | 0).toString(36);
|
||||
}
|
||||
|
||||
async generateOriginalFilePath() {
|
||||
await this.populate('author').execPopulate();
|
||||
this.originalFilePath = path.join('uploads', 'originals', this.author.username, this.slug, 'original.csv');
|
||||
}
|
||||
|
||||
isVisibleTo(user) {
|
||||
if (this.visible) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (!user) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (user._id.equals(this.author._id)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
isVisibleToPrivate(user) {
|
||||
return user && user._id.equals(this.author._id);
|
||||
}
|
||||
|
||||
async _ensureDirectoryExists() {
|
||||
if (!this.originalFilePath) {
|
||||
await this.generateOriginalFilePath()
|
||||
}
|
||||
|
||||
const dir = path.join(DATA_DIR, path.dirname(this.originalFilePath))
|
||||
await fs.promises.mkdir(dir, {recursive: true})
|
||||
}
|
||||
|
||||
get fullOriginalFilePath() {
|
||||
return path.join(DATA_DIR, this.originalFilePath)
|
||||
}
|
||||
|
||||
async writeToOriginalFile(fileBody) {
|
||||
await this._ensureDirectoryExists()
|
||||
await fs.promises.writeFile(this.fullOriginalFilePath, fileBody)
|
||||
}
|
||||
|
||||
/**
|
||||
* Fills the trackData and publicTrackData with references to correct
|
||||
* TrackData objects. For now, this is either the same, or publicTrackData
|
||||
* is set to null, depending on the visibility of the track. At some point,
|
||||
* this will include the anonymisation step, and produce a distinct TrackData
|
||||
* object for the publicTrackData reference.
|
||||
*
|
||||
* Existing TrackData objects will be deleted by this function.
|
||||
*/
|
||||
async rebuildTrackDataAndSave() {
|
||||
// clean up existing track data, we want to actually fully delete it
|
||||
if (this.trackData) {
|
||||
await TrackData.findByIdAndDelete(this.trackData);
|
||||
}
|
||||
|
||||
if (this.publicTrackData && this.publicTrackData.equals(this.trackData)) {
|
||||
await TrackData.findByIdAndDelete(this.publicTrackData);
|
||||
}
|
||||
|
||||
// Parse the points from the body.
|
||||
// TODO: Stream file contents, if possible
|
||||
const body = await fs.promises.readFile(this.fullOriginalFilePath)
|
||||
const points = Array.from(parseTrackPoints(body));
|
||||
|
||||
const trackData = TrackData.createFromPoints(points);
|
||||
await trackData.save();
|
||||
|
||||
this.trackData = trackData._id;
|
||||
|
||||
if (this.visible) {
|
||||
// TODO: create a distinct object with filtered data
|
||||
this.publicTrackData = trackData._id;
|
||||
}
|
||||
|
||||
await this.save();
|
||||
}
|
||||
|
||||
toJSONFor(user) {
|
||||
const includePrivateFields = user && user._id.equals(this.author._id);
|
||||
|
||||
return {
|
||||
slug: this.slug,
|
||||
title: this.title,
|
||||
description: this.description,
|
||||
createdAt: this.createdAt,
|
||||
updatedAt: this.updatedAt,
|
||||
visible: this.visible,
|
||||
author: this.author.toProfileJSONFor(user),
|
||||
...(includePrivateFields
|
||||
? {
|
||||
uploadedByUserAgent: this.uploadedByUserAgent,
|
||||
originalFileName: this.originalFileName,
|
||||
}
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
mongoose.model(Track, schema);
|
||||
|
||||
module.exports = Track;
|
|
@ -1,105 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const uniqueValidator = require('mongoose-unique-validator');
|
||||
const turf = require('turf');
|
||||
|
||||
const { flow, filter, map, pairwise, reduce } = require('../_helpers/generators');
|
||||
|
||||
const schema = new mongoose.Schema(
|
||||
{
|
||||
slug: { type: String, lowercase: true, unique: true },
|
||||
numEvents: { type: Number, default: 0 },
|
||||
recordedAt: { type: Date },
|
||||
recordedUntil: { type: Date },
|
||||
trackLength: { type: Number },
|
||||
points: [
|
||||
{
|
||||
date: String,
|
||||
time: String,
|
||||
latitude: Number,
|
||||
longitude: Number,
|
||||
course: Number,
|
||||
speed: Number,
|
||||
d1: Number,
|
||||
d2: Number,
|
||||
flag: Number,
|
||||
private: Number,
|
||||
},
|
||||
],
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
schema.plugin(uniqueValidator, { message: 'is already taken' });
|
||||
|
||||
schema.pre('validate', function (next) {
|
||||
if (!this.slug) {
|
||||
this.slugify();
|
||||
}
|
||||
next();
|
||||
});
|
||||
|
||||
schema.set('toJSON', { virtuals: true });
|
||||
|
||||
class TrackData extends mongoose.Model {
|
||||
slugify() {
|
||||
this.slug = 'td-' + String((Math.random() * Math.pow(36, 6)) | 0).toString(36);
|
||||
}
|
||||
|
||||
countEvents() {
|
||||
return this.points.filter((p) => p.flag).length;
|
||||
}
|
||||
|
||||
getRecoredAt(findEnd = false) {
|
||||
const pointsWithDate = this.points.filter((p) => p.date && p.time);
|
||||
|
||||
if (!pointsWithDate.length) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const point = pointsWithDate[findEnd ? pointsWithDate.length - 1 : 0];
|
||||
const [day, month, year] = point.date.split('.');
|
||||
const combinedString = `${year}-${month}-${day} ${point.time}.000+2000`;
|
||||
const parsedDate = new Date(combinedString);
|
||||
if (isNaN(parsedDate.getDate())) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return parsedDate;
|
||||
}
|
||||
|
||||
static createFromPoints(points) {
|
||||
const trackData = new TrackData();
|
||||
trackData.points = points;
|
||||
trackData.numEvents = trackData.countEvents();
|
||||
trackData.recordedAt = trackData.getRecoredAt();
|
||||
trackData.recordedUntil = trackData.getRecoredAt(true);
|
||||
trackData.trackLength = trackData.measureTrackLength();
|
||||
return trackData;
|
||||
}
|
||||
|
||||
measureTrackLength() {
|
||||
return flow(
|
||||
filter((p) => p.latitude != null && p.longitude != null),
|
||||
map((p) => turf.point([p.longitude, p.latitude])),
|
||||
pairwise,
|
||||
map(([a, b]) => turf.distance(a, b) * 1000),
|
||||
|
||||
// Ignore distances between two points that are bigger than 100m, this
|
||||
// must be a gap in the data or a mistake.
|
||||
filter((d) => d <= 100),
|
||||
reduce((c, d) => c + d, 0),
|
||||
)(this.points);
|
||||
}
|
||||
|
||||
get duration() {
|
||||
if (this.recordedAt == null || this.recordedUntil == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return (this.recordedUntil.getTime() - this.recordedAt.getTime()) / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
mongoose.model(TrackData, schema);
|
||||
|
||||
module.exports = TrackData;
|
|
@ -1,91 +0,0 @@
|
|||
const mongoose = require('mongoose');
|
||||
const uniqueValidator = require('mongoose-unique-validator');
|
||||
const crypto = require('crypto');
|
||||
const jwt = require('jsonwebtoken');
|
||||
const secret = require('../config').secret;
|
||||
|
||||
const schema = new mongoose.Schema(
|
||||
{
|
||||
username: {
|
||||
type: String,
|
||||
lowercase: true,
|
||||
unique: true,
|
||||
required: [true, "can't be blank"],
|
||||
match: [/^[a-zA-Z0-9]+$/, 'is invalid'],
|
||||
index: true,
|
||||
},
|
||||
email: {
|
||||
type: String,
|
||||
lowercase: true,
|
||||
unique: true,
|
||||
required: [true, "can't be blank"],
|
||||
match: [/\S+@\S+\.\S+/, 'is invalid'],
|
||||
index: true,
|
||||
},
|
||||
bio: String,
|
||||
image: String,
|
||||
areTracksVisibleForAll: Boolean,
|
||||
hash: String,
|
||||
salt: String,
|
||||
needsEmailValidation: Boolean,
|
||||
verificationToken: String,
|
||||
resetToken: {
|
||||
token: String,
|
||||
expires: Date,
|
||||
},
|
||||
},
|
||||
{ timestamps: true },
|
||||
);
|
||||
|
||||
schema.plugin(uniqueValidator, { message: 'ist bereits vergeben. Sorry!' });
|
||||
|
||||
class User extends mongoose.Model {
|
||||
validPassword(password) {
|
||||
const hash = crypto.pbkdf2Sync(password, this.salt, 10000, 512, 'sha512').toString('hex');
|
||||
return this.hash === hash;
|
||||
}
|
||||
|
||||
setPassword(password) {
|
||||
this.salt = crypto.randomBytes(16).toString('hex');
|
||||
this.hash = crypto.pbkdf2Sync(password, this.salt, 10000, 512, 'sha512').toString('hex');
|
||||
}
|
||||
|
||||
generateJWT() {
|
||||
const today = new Date();
|
||||
const exp = new Date(today);
|
||||
exp.setDate(today.getDate() + 60);
|
||||
|
||||
return jwt.sign(
|
||||
{
|
||||
id: this._id,
|
||||
username: this.username,
|
||||
exp: parseInt(exp.getTime() / 1000),
|
||||
},
|
||||
secret,
|
||||
);
|
||||
}
|
||||
|
||||
toAuthJSON() {
|
||||
return {
|
||||
username: this.username,
|
||||
email: this.email,
|
||||
token: this.generateJWT(),
|
||||
bio: this.bio,
|
||||
image: this.image,
|
||||
areTracksVisibleForAll: this.areTracksVisibleForAll,
|
||||
apiKey: this._id,
|
||||
};
|
||||
}
|
||||
|
||||
toProfileJSONFor(user) {
|
||||
return {
|
||||
username: this.username,
|
||||
bio: this.bio,
|
||||
image: this.image || 'https://static.productionready.io/images/smiley-cyrus.jpg',
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
mongoose.model(User, schema);
|
||||
|
||||
module.exports = User;
|
|
@ -1,24 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
|
||||
router.use('/', require('./users'));
|
||||
router.use('/profiles', require('./profiles'));
|
||||
router.use('/tracks', require('./tracks'));
|
||||
router.use('/tags', require('./tags'));
|
||||
router.use('/accounts', require('../../accounts/accounts.controller'));
|
||||
router.use('/stats', require('./stats'));
|
||||
|
||||
router.use(function (err, req, res, next) {
|
||||
if (err.name === 'ValidationError') {
|
||||
return res.status(422).json({
|
||||
errors: Object.keys(err.errors).reduce(function (errors, key) {
|
||||
errors[key] = err.errors[key].message;
|
||||
|
||||
return errors;
|
||||
}, {}),
|
||||
});
|
||||
}
|
||||
|
||||
return next(err);
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -1,31 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
const mongoose = require('mongoose');
|
||||
const User = mongoose.model('User');
|
||||
const wrapRoute = require('../../_helpers/wrapRoute');
|
||||
const auth = require('../auth');
|
||||
|
||||
// Preload user profile on routes with ':username'
|
||||
router.param('username', async function (req, res, next, username) {
|
||||
try {
|
||||
const user = await User.findOne({ username: username });
|
||||
if (!user) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
req.profile = user;
|
||||
|
||||
return next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/:username',
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
return res.json({ profile: req.profile.toProfileJSONFor(req.user) });
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
|
@ -1,66 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
const mongoose = require('mongoose');
|
||||
const Track = mongoose.model('Track');
|
||||
const User = mongoose.model('User');
|
||||
const wrapRoute = require('../../_helpers/wrapRoute');
|
||||
|
||||
// round to this number of meters for privacy reasons
|
||||
const TRACK_LENGTH_ROUNDING = 1000;
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
wrapRoute(async (req, res) => {
|
||||
const trackCount = await Track.find().count();
|
||||
const publicTrackCount = await Track.find({ visible: true }).count();
|
||||
const userCount = await User.find().count();
|
||||
|
||||
const [{ trackLength, publicTrackLength, numEvents, trackDuration }] = await Track.aggregate([
|
||||
{ $lookup: { from: 'trackdatas', localField: 'publicTrackData', foreignField: '_id', as: 'publicTrackDatas' } },
|
||||
{ $lookup: { from: 'trackdatas', localField: 'trackData', foreignField: '_id', as: 'trackDatas' } },
|
||||
{
|
||||
$addFields: {
|
||||
publicTrackData: { $arrayElemAt: ['$publicTrackDatas', 0] },
|
||||
trackData: { $arrayElemAt: ['$trackDatas', 0] },
|
||||
},
|
||||
},
|
||||
{
|
||||
$addFields: {
|
||||
publicTrackLength: '$publicTrackData.trackLength',
|
||||
trackLength: '$trackData.trackLength',
|
||||
numEvents: '$publicTrackData.numEvents',
|
||||
trackDuration: {
|
||||
$cond: [
|
||||
{ $and: ['$publicTrackData.recordedUntil', '$publicTrackData.recordedAt'] },
|
||||
{ $subtract: ['$publicTrackData.recordedUntil', '$publicTrackData.recordedAt'] },
|
||||
0,
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
{ $project: { publicTrackLength: true, trackLength: true, numEvents: true, trackDuration: true } },
|
||||
{
|
||||
$group: {
|
||||
_id: 'sum',
|
||||
trackLength: { $sum: '$trackLength' },
|
||||
publicTrackLength: { $sum: '$publicTrackLength' },
|
||||
numEvents: { $sum: '$numEvents' },
|
||||
trackDuration: { $sum: '$trackDuration' },
|
||||
},
|
||||
},
|
||||
]);
|
||||
|
||||
const trackLengthPrivatized = Math.floor(trackLength / TRACK_LENGTH_ROUNDING) * TRACK_LENGTH_ROUNDING;
|
||||
|
||||
return res.json({
|
||||
publicTrackCount,
|
||||
publicTrackLength,
|
||||
trackLength: trackLengthPrivatized,
|
||||
numEvents,
|
||||
trackCount,
|
||||
trackDuration: Math.round(trackDuration / 1000),
|
||||
userCount,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
|
@ -1,15 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
const mongoose = require('mongoose');
|
||||
const Track = mongoose.model('Track');
|
||||
const wrapRoute = require('../../_helpers/wrapRoute');
|
||||
|
||||
// return a list of tags
|
||||
router.get(
|
||||
'/',
|
||||
wrapRoute(async (req, res) => {
|
||||
const tags = await Track.find().distinct('tagList');
|
||||
return res.json({ tags: tags });
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
|
@ -1,392 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
const mongoose = require('mongoose');
|
||||
const TrackData = mongoose.model('TrackData');
|
||||
const Track = mongoose.model('Track');
|
||||
const Comment = mongoose.model('Comment');
|
||||
const User = mongoose.model('User');
|
||||
const busboy = require('connect-busboy');
|
||||
const auth = require('../auth');
|
||||
const { normalizeUserAgent, buildObsver1 } = require('../../logic/tracks');
|
||||
const wrapRoute = require('../../_helpers/wrapRoute');
|
||||
|
||||
function preloadByParam(target, getValueFromParam) {
|
||||
return async (req, res, next, paramValue) => {
|
||||
try {
|
||||
const value = await getValueFromParam(paramValue);
|
||||
|
||||
if (!value) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
req[target] = value;
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
router.param(
|
||||
'track',
|
||||
preloadByParam('track', (slug) => Track.findOne({ slug }).populate('author')),
|
||||
);
|
||||
router.param(
|
||||
'comment',
|
||||
preloadByParam('comment', (id) => Comment.findById(id)),
|
||||
);
|
||||
|
||||
router.param('comment', async (req, res, next, id) => {
|
||||
try {
|
||||
const comment = await Comment.findById(id);
|
||||
|
||||
if (!comment) {
|
||||
return res.sendStatus(404);
|
||||
}
|
||||
|
||||
req.comment = comment;
|
||||
|
||||
return next();
|
||||
} catch (err) {
|
||||
return next(err);
|
||||
}
|
||||
});
|
||||
|
||||
router.get(
|
||||
'/',
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
const query = { visible: true };
|
||||
let limit = 20;
|
||||
let offset = 0;
|
||||
|
||||
if (typeof req.query.limit !== 'undefined') {
|
||||
limit = req.query.limit;
|
||||
}
|
||||
|
||||
if (typeof req.query.offset !== 'undefined') {
|
||||
offset = req.query.offset;
|
||||
}
|
||||
|
||||
if (typeof req.query.tag !== 'undefined') {
|
||||
query.tagList = { $in: [req.query.tag] };
|
||||
}
|
||||
|
||||
const author = req.query.author ? await User.findOne({ username: req.query.author }) : null;
|
||||
|
||||
if (author) {
|
||||
query.author = author._id;
|
||||
}
|
||||
|
||||
const [tracks, tracksCount] = await Promise.all([
|
||||
Track.find(query).sort('-createdAt').limit(Number(limit)).skip(Number(offset)).sort({ createdAt: 'desc' }).populate('author').exec(),
|
||||
Track.countDocuments(query).exec(),
|
||||
]);
|
||||
|
||||
return res.json({
|
||||
tracks: tracks.map((track) => track.toJSONFor(req.user)),
|
||||
tracksCount,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
router.get(
|
||||
'/feed',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
let limit = 20;
|
||||
let offset = 0;
|
||||
|
||||
if (typeof req.query.limit !== 'undefined') {
|
||||
limit = req.query.limit;
|
||||
}
|
||||
|
||||
if (typeof req.query.offset !== 'undefined') {
|
||||
offset = req.query.offset;
|
||||
}
|
||||
|
||||
const query = { author: req.user.id };
|
||||
const [tracks, tracksCount] = await Promise.all([
|
||||
Track.find(query).sort('-createdAt').limit(Number(limit)).skip(Number(offset)).populate('author').exec(),
|
||||
Track.countDocuments(query),
|
||||
]);
|
||||
|
||||
return res.json({
|
||||
tracks: tracks.map(function (track) {
|
||||
return track.toJSONFor(req.user);
|
||||
}),
|
||||
tracksCount: tracksCount,
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
async function readFile(file) {
|
||||
let fileContent = '';
|
||||
|
||||
file.on('data', function (data) {
|
||||
fileContent += data;
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
file.on('end', resolve);
|
||||
file.on('error', reject);
|
||||
});
|
||||
|
||||
return fileContent;
|
||||
}
|
||||
|
||||
async function getMultipartOrJsonBody(req, mapJsonBody = (x) => x) {
|
||||
const fileInfo = {};
|
||||
let body;
|
||||
|
||||
if (req.busboy) {
|
||||
body = {};
|
||||
|
||||
req.busboy.on('file', async function (fieldname, file, filename, encoding, mimetype) {
|
||||
body[fieldname] = await readFile(file);
|
||||
fileInfo[fieldname] = { filename, encoding, mimetype };
|
||||
});
|
||||
|
||||
req.busboy.on('field', (key, value) => {
|
||||
body[key] = value;
|
||||
});
|
||||
|
||||
req.pipe(req.busboy);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
req.busboy.on('finish', resolve);
|
||||
req.busboy.on('error', reject);
|
||||
});
|
||||
} else if (req.headers['content-type'] === 'application/json') {
|
||||
body = mapJsonBody(req.body);
|
||||
} else {
|
||||
body = { body: await readFile(req), ...req.query };
|
||||
fileInfo.body = {
|
||||
mimetype: req.headers['content-type'],
|
||||
filename: req.headers['content-disposition'],
|
||||
encoding: req.headers['content-encoding'],
|
||||
};
|
||||
}
|
||||
|
||||
return { body, fileInfo };
|
||||
}
|
||||
|
||||
router.post(
|
||||
'/',
|
||||
auth.required,
|
||||
busboy(), // parse multipart body
|
||||
wrapRoute(async (req, res) => {
|
||||
// Read the whole file into memory. This is not optimal, instead, we should
|
||||
// write the file data directly to the target file. However, we first have
|
||||
// to parse the rest of the track data to know where to place the file.
|
||||
// TODO: Stream into temporary file, then move it later.
|
||||
const { body, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
|
||||
|
||||
const {body: fileBody, visible, ...trackBody} = body
|
||||
|
||||
const track = new Track({
|
||||
...trackBody,
|
||||
author: req.user,
|
||||
visible: visible == null ? req.user.areTracksVisibleForAll : Boolean(trackBody.visible)
|
||||
})
|
||||
track.slugify();
|
||||
|
||||
if (fileBody) {
|
||||
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
|
||||
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
|
||||
await track.writeToOriginalFile(fileBody)
|
||||
await track.rebuildTrackDataAndSave();
|
||||
} else {
|
||||
await track.save()
|
||||
}
|
||||
|
||||
// console.log(track.author);
|
||||
return res.json({ track: track.toJSONFor(req.user) });
|
||||
}),
|
||||
);
|
||||
|
||||
// return a track
|
||||
router.get(
|
||||
'/:track',
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (!req.track.isVisibleTo(req.user)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
return res.json({ track: req.track.toJSONFor(req.user) });
|
||||
}),
|
||||
);
|
||||
|
||||
// update track
|
||||
router.put(
|
||||
'/:track',
|
||||
busboy(),
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
const track = req.track;
|
||||
|
||||
if (!track.author._id.equals(req.user.id)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const { body: {body: fileBody, ...trackBody}, fileInfo } = await getMultipartOrJsonBody(req, (body) => body.track);
|
||||
|
||||
if (typeof trackBody.title !== 'undefined') {
|
||||
track.title = (trackBody.title || '').trim() || null;
|
||||
}
|
||||
|
||||
if (typeof trackBody.description !== 'undefined') {
|
||||
track.description = (trackBody.description || '').trim() || null;
|
||||
}
|
||||
|
||||
if (trackBody.visible != null) {
|
||||
track.visible = Boolean(trackBody.visible);
|
||||
}
|
||||
|
||||
if (fileBody) {
|
||||
track.originalFileName = fileInfo.body ? fileInfo.body.filename : track.slug + '.csv';
|
||||
track.uploadedByUserAgent = normalizeUserAgent(req.headers['user-agent']);
|
||||
await track.writeToOriginalFile(fileBody)
|
||||
|
||||
await track.rebuildTrackDataAndSave();
|
||||
} else if (track.visible && !track.publicTrackData) {
|
||||
await track.rebuildTrackDataAndSave();
|
||||
} else {
|
||||
await track.save();
|
||||
}
|
||||
|
||||
return res.json({ track: track.toJSONFor(req.user) });
|
||||
}),
|
||||
);
|
||||
|
||||
// delete track
|
||||
router.delete(
|
||||
'/:track',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (req.track.author._id.equals(req.user.id)) {
|
||||
await TrackData.findByIdAndDelete(req.track.trackData);
|
||||
await req.track.remove();
|
||||
return res.sendStatus(204);
|
||||
} else {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// return an track's comments
|
||||
router.get(
|
||||
'/:track/comments',
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (!req.track.isVisibleTo(req.user)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
await req.track
|
||||
.populate({
|
||||
path: 'comments',
|
||||
populate: {
|
||||
path: 'author',
|
||||
},
|
||||
options: {
|
||||
sort: {
|
||||
createdAt: 'asc',
|
||||
},
|
||||
},
|
||||
})
|
||||
.execPopulate();
|
||||
|
||||
return res.json({
|
||||
comments: req.track.comments.map(function (comment) {
|
||||
return comment.toJSONFor(req.user);
|
||||
}),
|
||||
});
|
||||
}),
|
||||
);
|
||||
|
||||
// create a new comment
|
||||
router.post(
|
||||
'/:track/comments',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
const comment = new Comment(req.body.comment);
|
||||
comment.track = req.track;
|
||||
comment.author = req.user;
|
||||
|
||||
await comment.save();
|
||||
|
||||
req.track.comments.push(comment);
|
||||
|
||||
await req.track.save();
|
||||
return res.json({ comment: comment.toJSONFor(req.user) });
|
||||
}),
|
||||
);
|
||||
|
||||
router.delete(
|
||||
'/:track/comments/:comment',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (req.comment.author.equals(req.user.id)) {
|
||||
req.track.comments.remove(req.comment._id);
|
||||
await req.track.save();
|
||||
await Comment.find({ _id: req.comment._id }).remove();
|
||||
res.sendStatus(204);
|
||||
} else {
|
||||
res.sendStatus(403);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
// return an track's trackData
|
||||
router.get(
|
||||
['/:track/data', '/:track/TrackData'],
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (!req.track.isVisibleTo(req.user)) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
let trackData;
|
||||
|
||||
if (req.track.isVisibleToPrivate(req.user)) {
|
||||
trackData = await TrackData.findById(req.track.trackData);
|
||||
} else if (!req.track.publicTrackData) {
|
||||
return res.sendStatus(403);
|
||||
} else {
|
||||
trackData = await TrackData.findById(req.track.publicTrackData);
|
||||
}
|
||||
|
||||
return res.json({ trackData });
|
||||
}),
|
||||
);
|
||||
|
||||
// download the original file
|
||||
router.get(
|
||||
'/:track/download',
|
||||
auth.optional,
|
||||
wrapRoute(async (req, res) => {
|
||||
if (req.track.isVisibleToPrivate(req.user)) {
|
||||
return res.download(req.track.fullOriginalFilePath)
|
||||
} else if (req.track.isVisibleTo(req.user)) {
|
||||
await req.track.populate('publicTrackData').execPopulate()
|
||||
|
||||
if (!req.track.publicTrackData) {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
|
||||
const body = buildObsver1(req.track.publicTrackData.points)
|
||||
const fileName = req.track.slug + '_public.csv'
|
||||
|
||||
res.set({
|
||||
'Content-Disposition': 'attachment; filename=' + JSON.stringify(fileName),
|
||||
'Content-Type': 'text/csv',
|
||||
});
|
||||
return res.end(body)
|
||||
} else {
|
||||
return res.sendStatus(403);
|
||||
}
|
||||
}),
|
||||
);
|
||||
|
||||
module.exports = router;
|
|
@ -1,68 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
const passport = require('passport');
|
||||
const wrapRoute = require('../../_helpers/wrapRoute');
|
||||
const auth = require('../auth');
|
||||
|
||||
router.get(
|
||||
'/user',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
return res.json({ user: req.user.toAuthJSON() });
|
||||
}),
|
||||
);
|
||||
|
||||
router.put(
|
||||
'/user',
|
||||
auth.required,
|
||||
wrapRoute(async (req, res) => {
|
||||
const user = req.user;
|
||||
|
||||
// only update fields that were actually passed...
|
||||
if (typeof req.body.user.username !== 'undefined') {
|
||||
user.username = req.body.user.username;
|
||||
}
|
||||
if (typeof req.body.user.email !== 'undefined') {
|
||||
user.email = req.body.user.email;
|
||||
}
|
||||
if (typeof req.body.user.bio !== 'undefined') {
|
||||
user.bio = req.body.user.bio;
|
||||
}
|
||||
if (typeof req.body.user.image !== 'undefined') {
|
||||
user.image = req.body.user.image;
|
||||
}
|
||||
if (typeof req.body.user.areTracksVisibleForAll !== 'undefined') {
|
||||
user.areTracksVisibleForAll = req.body.user.areTracksVisibleForAll;
|
||||
}
|
||||
if (typeof req.body.user.password === 'string' && req.body.user.password !== '') {
|
||||
user.setPassword(req.body.user.password);
|
||||
}
|
||||
|
||||
await user.save();
|
||||
return res.json({ user: user.toAuthJSON() });
|
||||
}),
|
||||
);
|
||||
|
||||
router.post('/users/login', function (req, res, next) {
|
||||
if (!req.body.user.email) {
|
||||
return res.status(422).json({ errors: { email: "can't be blank" } });
|
||||
}
|
||||
|
||||
if (!req.body.user.password) {
|
||||
return res.status(422).json({ errors: { password: "can't be blank" } });
|
||||
}
|
||||
|
||||
passport.authenticate('local', { session: false }, function (err, user, info) {
|
||||
if (err) {
|
||||
return next(err);
|
||||
}
|
||||
|
||||
if (user) {
|
||||
user.token = user.generateJWT();
|
||||
return res.json({ user: user.toAuthJSON() });
|
||||
} else {
|
||||
return res.status(422).json(info);
|
||||
}
|
||||
})(req, res, next);
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -1,80 +0,0 @@
|
|||
const jwt = require('express-jwt');
|
||||
const secret = require('../config').secret;
|
||||
const User = require('../models/User');
|
||||
|
||||
function getTokenFromHeader(req) {
|
||||
const authorization = req.headers.authorization;
|
||||
const [tokenType, token] = (authorization && authorization.split(' ')) || [];
|
||||
|
||||
if (tokenType === 'Token' || tokenType === 'Bearer') {
|
||||
return token;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const jwtOptional = jwt({
|
||||
secret: secret,
|
||||
userProperty: 'authInfo',
|
||||
credentialsRequired: false,
|
||||
getToken: getTokenFromHeader,
|
||||
algorithms: ['HS256'],
|
||||
});
|
||||
|
||||
async function getUserIdMiddleware(req, res, next) {
|
||||
try {
|
||||
const authorization = req.headers.authorization;
|
||||
const [tokenType, token] = (authorization && authorization.split(' ')) || [];
|
||||
|
||||
if (tokenType === 'Token' || tokenType === 'Bearer') {
|
||||
// only parse the token as jwt if it looks like one, otherwise we get an error
|
||||
return jwtOptional(req, res, next);
|
||||
} else if (tokenType === 'OBSUserId') {
|
||||
req.authInfo = { id: token.trim() };
|
||||
next();
|
||||
} else if (!authorization && req.body && req.body.id && req.body.id.length === 24) {
|
||||
const user = await User.findById(req.body.id);
|
||||
if (user) {
|
||||
req.authInfo = { id: user.id };
|
||||
req.user = user;
|
||||
}
|
||||
next();
|
||||
} else {
|
||||
req.authInfo = null;
|
||||
next();
|
||||
}
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
async function loadUserMiddleware(req, res, next) {
|
||||
try {
|
||||
if (req.authInfo && req.authInfo.id) {
|
||||
req.user = await User.findById(req.authInfo.id);
|
||||
|
||||
if (!req.user) {
|
||||
return res.sendStatus(401);
|
||||
}
|
||||
}
|
||||
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
required(req, res, next) {
|
||||
if (!req.authInfo) {
|
||||
return res.sendStatus(403);
|
||||
} else {
|
||||
return next();
|
||||
}
|
||||
},
|
||||
optional(req, res, next) {
|
||||
return next();
|
||||
},
|
||||
getUserIdMiddleware,
|
||||
loadUserMiddleware,
|
||||
};
|
|
@ -1,5 +0,0 @@
|
|||
const router = require('express').Router();
|
||||
|
||||
router.use('/api', require('./api'));
|
||||
|
||||
module.exports = router;
|
188
api/tools/import_from_mongodb.py
Normal file
188
api/tools/import_from_mongodb.py
Normal file
|
@ -0,0 +1,188 @@
|
|||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import asyncio
|
||||
import logging
|
||||
import json
|
||||
from datetime import datetime
|
||||
from uuid import uuid4
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from motor.motor_asyncio import AsyncIOMotorClient
|
||||
|
||||
from obs.api.db import make_session, connect_db, User, Track, Comment
|
||||
from obs.api.app import app
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="processes a single track for use in the portal, "
|
||||
"using the obs.face algorithms"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"mongodb_url",
|
||||
metavar="MONGODB_URL",
|
||||
help="url to the mongodb, in format mongodb://user:pass@host/dbname",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--keycloak-users-file",
|
||||
metavar="FILE",
|
||||
type=argparse.FileType("wt", encoding="utf-8"),
|
||||
help="a file to write a JSON of all old users to, for importing to keycloak",
|
||||
default=None,
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--keep-api-keys",
|
||||
action="store_true",
|
||||
help="keep the old API keys (very insecure!) instead of generating new ones",
|
||||
default=False,
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.keep_api_keys:
|
||||
log.warning(
|
||||
"Importing users with their old API keys. These keys are very insecure and "
|
||||
"could provide access to user data to third parties. Consider to notify "
|
||||
"your users about the need to generate a new API key through their profile pages."
|
||||
)
|
||||
|
||||
async with connect_db(app.config.POSTGRES_URL):
|
||||
async with make_session() as session:
|
||||
mongo = AsyncIOMotorClient(args.mongodb_url).get_default_database()
|
||||
|
||||
log.debug("Connected to mongodb and postgres.")
|
||||
user_id_map = await import_users(
|
||||
mongo, session, args.keycloak_users_file, args.keep_api_keys
|
||||
)
|
||||
|
||||
await import_tracks(mongo, session, user_id_map)
|
||||
|
||||
await session.commit()
|
||||
|
||||
|
||||
async def import_users(mongo, session, keycloak_users_file, keep_api_keys):
|
||||
keycloak_users = []
|
||||
|
||||
old_id_by_email = {}
|
||||
async for user in mongo.users.find({}):
|
||||
old_id_by_email[user["email"]] = user["_id"]
|
||||
|
||||
new_user = User(
|
||||
sub=str(uuid4()),
|
||||
email=user["email"],
|
||||
username=user["username"],
|
||||
bio=user.get("bio"),
|
||||
image=user.get("image"),
|
||||
are_tracks_visible_for_all=user.get("areTracksVisibleForAll") or False,
|
||||
created_at=user.get("createdAt") or datetime.utcnow(),
|
||||
updated_at=user.get("updatedAt") or datetime.utcnow(),
|
||||
match_by_username_email=True,
|
||||
)
|
||||
|
||||
if keep_api_keys:
|
||||
new_user.api_key = str(user["_id"])
|
||||
else:
|
||||
new_user.generate_api_key()
|
||||
|
||||
if keycloak_users_file:
|
||||
needs_email_verification = user.get("needsEmailValidation", True)
|
||||
required_actions = ["UPDATE_PASSWORD"]
|
||||
if needs_email_verification:
|
||||
required_actions.append("VERIFY_EMAIL")
|
||||
|
||||
keycloak_users.append(
|
||||
{
|
||||
"username": new_user.username,
|
||||
"email": new_user.email,
|
||||
"enabled": True,
|
||||
"requiredActions": required_actions,
|
||||
"emailVerified": not needs_email_verification,
|
||||
}
|
||||
)
|
||||
|
||||
session.add(new_user)
|
||||
log.info("Creating user %s", new_user.username)
|
||||
|
||||
await session.commit()
|
||||
|
||||
id_map = {}
|
||||
result = await session.scalars(select(User))
|
||||
for user in result:
|
||||
id_map[old_id_by_email[user.email]] = user.id
|
||||
|
||||
if keycloak_users_file:
|
||||
json.dump({"users": keycloak_users}, keycloak_users_file, indent=4)
|
||||
log.info("Wrote keycloak users file to %s.", keycloak_users_file.name)
|
||||
|
||||
return id_map
|
||||
|
||||
|
||||
def parse_datetime(s):
|
||||
if isinstance(s, str):
|
||||
return datetime.fromisoformat(s)
|
||||
return s
|
||||
|
||||
|
||||
async def import_tracks(mongo, session, user_id_map):
|
||||
track_count = 0
|
||||
|
||||
async for track in mongo.tracks.find({}):
|
||||
stats = track.get("statistics") or {}
|
||||
new_track = Track(
|
||||
created_at=parse_datetime(track.get("createdAt")) or datetime.utcnow(),
|
||||
updated_at=parse_datetime(track.get("updatedAt")) or datetime.utcnow(),
|
||||
slug=track["slug"],
|
||||
title=track.get("title"),
|
||||
processing_status=track.get("processingStatus") or "pending",
|
||||
processing_log=track.get("processingLog"),
|
||||
customized_title=bool(track.get("customizedTitle")),
|
||||
description=track.get("description"),
|
||||
public=track.get("public"),
|
||||
uploaded_by_user_agent=track.get("uploadedByUserAgent"),
|
||||
original_file_name=track.get("originalFileName"),
|
||||
original_file_hash=track.get("originalFileHash"),
|
||||
# statistics
|
||||
recorded_at=parse_datetime(stats.get("recordedAt")),
|
||||
recorded_until=parse_datetime(stats.get("recordedUntil")),
|
||||
duration=stats.get("duration"),
|
||||
length=stats.get("length"),
|
||||
segments=stats.get("segments"),
|
||||
num_events=stats.get("num_events"),
|
||||
num_measurements=stats.get("num_measurements"),
|
||||
num_valid=stats.get("numValid"),
|
||||
author_id=user_id_map[track["author"]],
|
||||
)
|
||||
|
||||
session.add(new_track)
|
||||
|
||||
comment_ids = track.get("comments") or []
|
||||
if comment_ids:
|
||||
async for comment in mongo.comments.find({"_id": {"$in": comment_ids}}):
|
||||
new_comment = Comment(
|
||||
created_at=parse_datetime(comment.get("createdAt"))
|
||||
or datetime.utcnow(),
|
||||
updated_at=parse_datetime(comment.get("updatedAt"))
|
||||
or datetime.utcnow(),
|
||||
body=comment.get("body"),
|
||||
author_id=user_id_map[comment["author"]],
|
||||
)
|
||||
new_track.comments.append(new_comment)
|
||||
session.add(new_comment)
|
||||
|
||||
track_count += 1
|
||||
|
||||
log.info("Created %s tracks", track_count)
|
||||
|
||||
await session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
108
api/tools/import_osm.py
Executable file
108
api/tools/import_osm.py
Executable file
|
@ -0,0 +1,108 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from dataclasses import dataclass
|
||||
import asyncio
|
||||
from os.path import basename, splitext
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import msgpack
|
||||
import psycopg
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.utils import chunk
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
ROAD_BUFFER = 1000
|
||||
AREA_BUFFER = 100
|
||||
|
||||
|
||||
@dataclass
|
||||
class Road:
|
||||
way_id: int
|
||||
name: str
|
||||
zone: str
|
||||
directionality: int
|
||||
oneway: int
|
||||
geometry: bytes
|
||||
|
||||
|
||||
def read_file(filename):
|
||||
"""
|
||||
Reads a file iteratively, yielding
|
||||
appear. Those may be mixed.
|
||||
"""
|
||||
|
||||
with open(filename, "rb") as f:
|
||||
unpacker = msgpack.Unpacker(f)
|
||||
try:
|
||||
while True:
|
||||
type_id, *data = unpacker.unpack()
|
||||
|
||||
if type_id == b"\x01":
|
||||
yield Road(*data)
|
||||
|
||||
except msgpack.OutOfData:
|
||||
pass
|
||||
|
||||
|
||||
async def import_osm(connection, filename, import_group=None):
|
||||
if import_group is None:
|
||||
import_group = splitext(basename(filename))[0]
|
||||
|
||||
# Pass 1: Find IDs only
|
||||
road_ids = []
|
||||
for item in read_file(filename):
|
||||
road_ids.append(item.way_id)
|
||||
|
||||
async with connection.cursor() as cursor:
|
||||
log.info("Pass 1: Delete previously imported data")
|
||||
|
||||
log.debug("Delete import group %s", import_group)
|
||||
await cursor.execute(
|
||||
"DELETE FROM road WHERE import_group = %s", (import_group,)
|
||||
)
|
||||
|
||||
log.debug("Delete roads by way_id")
|
||||
for ids in chunk(road_ids, 10000):
|
||||
await cursor.execute("DELETE FROM road WHERE way_id = ANY(%s)", (ids,))
|
||||
|
||||
# Pass 2: Import
|
||||
log.info("Pass 2: Import roads")
|
||||
amount = 0
|
||||
for items in chunk(read_file(filename), 10000):
|
||||
amount += 10000
|
||||
log.info(f"...{amount}/{len(road_ids)} ({100*amount/len(road_ids)}%)")
|
||||
async with cursor.copy(
|
||||
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
||||
) as copy:
|
||||
for item in items:
|
||||
await copy.write_row(
|
||||
(
|
||||
item.way_id,
|
||||
item.name,
|
||||
item.zone,
|
||||
item.directionality,
|
||||
item.oneway,
|
||||
bytes.hex(item.geometry),
|
||||
import_group,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
|
||||
url = app.config.POSTGRES_URL
|
||||
url = url.replace("+asyncpg", "")
|
||||
|
||||
async with await psycopg.AsyncConnection.connect(url) as connection:
|
||||
for filename in sys.argv[1:]:
|
||||
log.debug("Loading file: %s", filename)
|
||||
await import_osm(connection, filename)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
93
api/tools/import_regions.py
Executable file
93
api/tools/import_regions.py
Executable file
|
@ -0,0 +1,93 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
"""
|
||||
This script downloads and/or imports regions for statistical analysis into the
|
||||
PostGIS database. The regions are sourced from:
|
||||
|
||||
* EU countries are covered by
|
||||
[NUTS](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/administrative-units-statistical-units/nuts).
|
||||
"""
|
||||
|
||||
import tempfile
|
||||
from dataclasses import dataclass
|
||||
import json
|
||||
import asyncio
|
||||
from os.path import basename, splitext
|
||||
import sys
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
import aiohttp
|
||||
import psycopg
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.utils import chunk
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
NUTS_URL = "https://gisco-services.ec.europa.eu/distribution/v2/nuts/geojson/NUTS_RG_01M_2021_3857.geojson"
|
||||
|
||||
from pyproj import Transformer
|
||||
|
||||
project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform
|
||||
from shapely.ops import transform
|
||||
from shapely.geometry import shape
|
||||
import shapely.wkb as wkb
|
||||
|
||||
|
||||
async def import_nuts(
|
||||
connection, filename=None, level: int = 3, import_group: Optional[str] = None
|
||||
):
|
||||
if import_group is None:
|
||||
import_group = f"nuts{level}"
|
||||
|
||||
if filename:
|
||||
log.info("Load NUTS from file")
|
||||
with open(filename) as f:
|
||||
data = json.load(f)
|
||||
else:
|
||||
log.info("Download NUTS regions from europa.eu")
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(NUTS_URL) as resp:
|
||||
data = await resp.json(content_type=None)
|
||||
|
||||
async with connection.cursor() as cursor:
|
||||
log.info(
|
||||
"Delete previously imported regions with import group %s", import_group
|
||||
)
|
||||
await cursor.execute(
|
||||
"DELETE FROM region WHERE import_group = %s", (import_group,)
|
||||
)
|
||||
|
||||
log.info("Import regions")
|
||||
async with cursor.copy(
|
||||
"COPY region (id, name, geometry, import_group) FROM STDIN"
|
||||
) as copy:
|
||||
for feature in data["features"]:
|
||||
if feature["properties"]["LEVL_CODE"] == level:
|
||||
geometry = shape(feature["geometry"])
|
||||
# geometry = transform(project, geometry)
|
||||
geometry = wkb.dumps(geometry)
|
||||
geometry = bytes.hex(geometry)
|
||||
await copy.write_row(
|
||||
(
|
||||
feature["properties"]["NUTS_ID"],
|
||||
feature["properties"]["NUTS_NAME"],
|
||||
geometry,
|
||||
import_group,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
|
||||
url = app.config.POSTGRES_URL
|
||||
url = url.replace("+asyncpg", "")
|
||||
|
||||
async with await psycopg.AsyncConnection.connect(url) as connection:
|
||||
await import_nuts(connection, sys.argv[1] if len(sys.argv) > 1 else None)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
196
api/tools/prepare_sql_tiles.py
Executable file
196
api/tools/prepare_sql_tiles.py
Executable file
|
@ -0,0 +1,196 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
import tempfile
|
||||
import re
|
||||
import os
|
||||
import glob
|
||||
from os.path import normpath, abspath, join
|
||||
from typing import List, Tuple
|
||||
|
||||
|
||||
from sqlalchemy import text
|
||||
import sqlparse
|
||||
from openmaptiles.sqltomvt import MvtGenerator
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.db import connect_db, make_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
TILE_GENERATOR = normpath(
|
||||
abspath(join(app.config.API_ROOT_DIR, "..", "tile-generator"))
|
||||
)
|
||||
TILESET_FILE = join(TILE_GENERATOR, "openbikesensor.yaml")
|
||||
|
||||
EXTRA_ARGS = [
|
||||
# name, type, default
|
||||
("user_id", "integer", "NULL"),
|
||||
("min_time", "timestamp", "NULL"),
|
||||
("max_time", "timestamp", "NULL"),
|
||||
]
|
||||
|
||||
|
||||
class CustomMvtGenerator(MvtGenerator):
|
||||
def generate_sqltomvt_func(self, fname, extra_args: List[Tuple[str, str]]) -> str:
|
||||
"""
|
||||
Creates a SQL function that returns a single bytea value or null. This
|
||||
method is overridden to allow for custom arguments in the created function
|
||||
"""
|
||||
extra_args_types = "".join([f", {a[1]}" for a in extra_args])
|
||||
extra_args_definitions = "".join(
|
||||
[f", {a[0]} {a[1]} DEFAULT {a[2]}" for a in extra_args]
|
||||
)
|
||||
|
||||
return f"""\
|
||||
DROP FUNCTION IF EXISTS {fname}(integer, integer, integer{extra_args_types});
|
||||
CREATE FUNCTION {fname}(zoom integer, x integer, y integer{extra_args_definitions})
|
||||
RETURNS {'TABLE(mvt bytea, key text)' if self.key_column else 'bytea'} AS $$
|
||||
{self.generate_sql()};
|
||||
$$ LANGUAGE SQL STABLE CALLED ON NULL INPUT;"""
|
||||
|
||||
|
||||
def parse_pg_url(url=app.config.POSTGRES_URL):
|
||||
m = re.match(
|
||||
r"^postgresql\+asyncpg://(?P<user>.*):(?P<password>.*)@(?P<host>.*)(:(?P<port>\d+))?/(?P<database>[^/]+)$",
|
||||
url,
|
||||
)
|
||||
|
||||
return (
|
||||
m["user"] or "",
|
||||
m["password"] or "",
|
||||
m["host"],
|
||||
m["port"] or "5432",
|
||||
m["database"],
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
await prepare_sql_tiles()
|
||||
|
||||
|
||||
async def prepare_sql_tiles():
|
||||
with tempfile.TemporaryDirectory() as build_dir:
|
||||
await generate_data_yml(build_dir)
|
||||
sql_snippets = await generate_sql(build_dir)
|
||||
await import_sql(sql_snippets)
|
||||
|
||||
|
||||
async def _run(cmd):
|
||||
if isinstance(cmd, list):
|
||||
cmd = " ".join(cmd)
|
||||
proc = await asyncio.create_subprocess_shell(
|
||||
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||
)
|
||||
|
||||
stdout, stderr = await proc.communicate()
|
||||
|
||||
if proc.returncode != 0:
|
||||
log.error(stderr.decode("utf-8"))
|
||||
raise RuntimeError("external program failed: %s" % str(cmd))
|
||||
|
||||
return stdout.decode("utf-8")
|
||||
|
||||
|
||||
async def generate_data_yml(build_dir):
|
||||
stdout = await _run(
|
||||
[
|
||||
"python",
|
||||
"$(which generate-tm2source)",
|
||||
TILESET_FILE,
|
||||
*sum(
|
||||
zip(
|
||||
["--user", "--password", "--host", "--port", "--database"],
|
||||
parse_pg_url(),
|
||||
),
|
||||
(),
|
||||
),
|
||||
]
|
||||
)
|
||||
|
||||
tm2source = join(build_dir, "openbikesensor.tm2source")
|
||||
os.makedirs(tm2source, exist_ok=True)
|
||||
|
||||
with open(join(tm2source, "data.yml"), "wt") as f:
|
||||
f.write(stdout)
|
||||
|
||||
|
||||
async def generate_sql(build_dir):
|
||||
sql_dir = join(build_dir, "sql")
|
||||
|
||||
await _run(f"python $(which generate-sql) {TILESET_FILE!r} --dir {sql_dir!r}")
|
||||
|
||||
sql_snippet_files = [
|
||||
*sorted(
|
||||
glob.glob(
|
||||
join(
|
||||
app.config.API_ROOT_DIR, "src", "openmaptiles-tools", "sql", "*.sql"
|
||||
)
|
||||
)
|
||||
),
|
||||
join(sql_dir, "run_first.sql"),
|
||||
*sorted(glob.glob(join(sql_dir, "parallel", "*.sql"))),
|
||||
join(sql_dir, "run_last.sql"),
|
||||
]
|
||||
|
||||
sql_snippets = [
|
||||
"CREATE EXTENSION IF NOT EXISTS hstore;"
|
||||
"CREATE EXTENSION IF NOT EXISTS postgis;"
|
||||
]
|
||||
for filename in sql_snippet_files:
|
||||
with open(filename, "rt") as f:
|
||||
sql_snippets.append(f.read())
|
||||
|
||||
mvt = CustomMvtGenerator(
|
||||
tileset=TILESET_FILE,
|
||||
postgis_ver="3.0.1",
|
||||
zoom="zoom",
|
||||
x="x",
|
||||
y="y",
|
||||
gzip=True,
|
||||
test_geometry=False, # ?
|
||||
key_column=True,
|
||||
)
|
||||
getmvt_sql = mvt.generate_sqltomvt_func("getmvt", EXTRA_ARGS)
|
||||
|
||||
# drop old versions of the function
|
||||
sql_snippets.append("DROP FUNCTION IF EXISTS getmvt(integer, integer, integer);")
|
||||
sql_snippets.append(getmvt_sql)
|
||||
|
||||
return sql_snippets
|
||||
|
||||
|
||||
async def import_sql(sql_snippets):
|
||||
statements = sum(map(sqlparse.split, sql_snippets), [])
|
||||
async with connect_db(
|
||||
app.config.POSTGRES_URL,
|
||||
app.config.POSTGRES_POOL_SIZE,
|
||||
app.config.POSTGRES_MAX_OVERFLOW,
|
||||
):
|
||||
for i, statement in enumerate(statements):
|
||||
clean_statement = sqlparse.format(
|
||||
statement,
|
||||
truncate_strings=20,
|
||||
strip_comments=True,
|
||||
keyword_case="upper",
|
||||
)
|
||||
|
||||
if not clean_statement:
|
||||
continue
|
||||
|
||||
log.debug(
|
||||
"Running SQL statement %d of %d (%s...)",
|
||||
i + 1,
|
||||
len(statements),
|
||||
clean_statement[:40],
|
||||
)
|
||||
|
||||
async with make_session() as session:
|
||||
await session.execute(text(statement))
|
||||
await session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
46
api/tools/process_track.py
Executable file
46
api/tools/process_track.py
Executable file
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from obs.api.db import connect_db
|
||||
from obs.api.app import app
|
||||
from obs.api.process import process_tracks, process_tracks_loop
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="processes a single track for use in the portal, "
|
||||
"using the obs.face algorithms"
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"--loop-delay",
|
||||
action="store",
|
||||
type=int,
|
||||
default=10,
|
||||
help="delay between loops, if no track was found in the queue (polling)",
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
"tracks",
|
||||
metavar="ID_OR_SLUG",
|
||||
nargs="*",
|
||||
help="ID or slug of tracks to process, if not passed, the queue is processed in a loop",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
async with connect_db(app.config.POSTGRES_URL, app.config.POSTGRES_POOL_SIZE, app.config.POSTGRES_MAX_OVERFLOW):
|
||||
if args.tracks:
|
||||
await process_tracks(args.tracks)
|
||||
else:
|
||||
await process_tracks_loop(args.loop_delay)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
30
api/tools/reimport_tracks.py
Executable file
30
api/tools/reimport_tracks.py
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.db import connect_db, make_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
await reimport_tracks()
|
||||
|
||||
|
||||
async def reimport_tracks():
|
||||
|
||||
async with connect_db(
|
||||
app.config.POSTGRES_URL,
|
||||
app.config.POSTGRES_POOL_SIZE,
|
||||
app.config.POSTGRES_MAX_OVERFLOW,
|
||||
):
|
||||
async with make_session() as session:
|
||||
await session.execute(text("UPDATE track SET processing_status = 'queued';"))
|
||||
await session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue