diff --git a/CHANGELOG.md b/CHANGELOG.md index ab435d1..187af5c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## 0.8.0 + +### Features + +* Bulk actions on users owned tracks (reprocess, download, make private, make public, delete) (#269, #38) +* Easy sorting by device for "multi-device users" (e.g. group lending out OBSes) +* Region display at higher zoom levels to easily find interesting areas (#112) +* Export of road statistics on top of the already-existing event statistics (#341) + +### Improvements + +* Refactored database access to hopefully combat portal crashes (#337) +* New infrastructure for map imports that makes import of larger maps possible on small VMs (#334) +* Reference current postgres and postgis versions in docker-compose.yaml files (#286) +* Configurable terms-and-conditions link (#320) +* French translation by @cbiteau (#303) + +### Bug Fixes + +* Logout not working (#285) +* Duplicate road usage hashes (#335, #253) +* cannot import name .... (#338) + ## 0.7.0 ### Features diff --git a/Dockerfile b/Dockerfile index df43f0c..f4db357 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,44 +1,5 @@ # This dockerfile is for the API + Frontend production image -############################################# -# Build osm2pgsql AS builder -############################################# - -# This image should be the same as final one, because of the lib versions -FROM python:3.9.7-bullseye as osm2pgsql-builder - -ARG DEBIAN_FRONTEND=noninteractive -ENV TZ=Europe/Berlin -ENV OSM2PGSQL_VERSION=1.5.1 - -# Dependencies -RUN apt-get update &&\ - apt-get install -y \ - make \ - cmake \ - g++ \ - libboost-dev \ - libboost-system-dev \ - libboost-filesystem-dev \ - libexpat1-dev \ - zlib1g-dev \ - libbz2-dev \ - libpq-dev \ - libproj-dev \ - lua5.3 \ - liblua5.3-dev \ - git &&\ - rm -rf /var/lib/apt/lists/* - -# Clone & Build -RUN git clone --branch $OSM2PGSQL_VERSION https://github.com/openstreetmap/osm2pgsql.git &&\ - cd osm2pgsql/ &&\ - mkdir build &&\ - cd build &&\ - cmake .. &&\ - make -j4 &&\ - make install - ############################################# # Build the frontend AS builder ############################################# @@ -60,7 +21,7 @@ RUN npm run build # Build the API and add the built frontend to it ############################################# -FROM python:3.9.7-bullseye +FROM python:3.11.3-bullseye RUN apt-get update &&\ apt-get install -y \ @@ -93,11 +54,7 @@ ADD api/obs /opt/obs/api/obs/ ADD api/tools /opt/obs/api/tools/ RUN pip install -e /opt/obs/api/ -ADD roads_import.lua /opt/obs/api/tools -ADD osm2pgsql.sh /opt/obs/api/tools - COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build -COPY --from=osm2pgsql-builder /usr/local/bin/osm2pgsql /usr/local/bin/osm2pgsql EXPOSE 3000 diff --git a/README.md b/README.md index 1fbbd7f..ddc79ed 100644 --- a/README.md +++ b/README.md @@ -36,10 +36,11 @@ git submodule update --init --recursive ## Production setup -There is a guide for a deployment based on docker in the -[deployment](deployment) folder. Lots of non-docker deployment strategy are -possible, but they are not "officially" supported, so please do not expect the -authors of the software to assist in troubleshooting. +There is a guide for a deployment based on docker at +[docs/production-deployment.md](docs/production-deployment.md). Lots of +non-docker deployment strategies are possible, but they are not "officially" +supported, so please do not expect the authors of the software to assist in +troubleshooting. This is a rather complex application, and it is expected that you know the basics of deploying a modern web application securely onto a production server. @@ -52,7 +53,8 @@ Please note that you will always need to install your own reverse proxy that terminates TLS for you and handles certificates. We do not support TLS directly in the application, instead, please use this prefered method. -Upgrading and migrating is descrube +Upgrading and migrating is described in [UPGRADING.md](./UPGRADING.md) for each +version. ### Migrating (Production) @@ -75,18 +77,6 @@ docker-compose run --rm api alembic upgrade head docker-compose run --rm api tools/prepare_sql_tiles ``` - -docker-compose run --rm api alembic upgrade head - -### Upgrading from v0.2 to v0.3 - -After v0.2 we switched the underlying technology of the API and the database. -We now have no more MongoDB, instead, everything has moved to the PostgreSQL -installation. For development setups, it is advised to just reset the whole -state (remove the `local` folder) and start fresh. For production upgrades, -please follow the relevant section in [`UPGRADING.md`](./UPGRADING.md). - - ## Development setup We've moved the whole development setup into Docker to make it easy for @@ -101,7 +91,6 @@ Then clone the repository as described above. ### Configure Keycloak - Login will not be possible until you configure the keycloak realm correctly. Boot your keycloak instance: ```bash @@ -164,7 +153,7 @@ You will need to re-run this command after updates, to migrate the database and (re-)create the functions in the SQL database that are used when generating vector tiles. -You should also import OpenStreetMap data now, see below for instructions. +You should also [import OpenStreetMap data](docs/osm-import.md) now. ### Boot the application @@ -190,48 +179,6 @@ docker-compose run --rm api alembic upgrade head ``` -## Import OpenStreetMap data - -**Hint:** This step may be skipped if you are using [Lean mode](./docs/lean-mode.md). - -You need to import road information from OpenStreetMap for the portal to work. -This information is stored in your PostgreSQL database and used when processing -tracks (instead of querying the Overpass API), as well as for vector tile -generation. The process applies to both development and production setups. For -development, you should choose a small area for testing, such as your local -county or city, to keep the amount of data small. For production use you have -to import the whole region you are serving. - -* Install `osm2pgsql`. -* Download the area(s) you would like to import from [GeoFabrik](https://download.geofabrik.de). -* Import each file like this: - - ```bash - osm2pgsql --create --hstore --style roads_import.lua -O flex \ - -H localhost -d obs -U obs -W \ - path/to/downloaded/myarea-latest.osm.pbf - ``` - -You might need to adjust the host, database and username (`-H`, `-d`, `-U`) to -your setup, and also provide the correct password when queried. For the -development setup the password is `obs`. For production, you might need to -expose the containers port and/or create a TCP tunnel, for example with SSH, -such that you can run the import from your local host and write to the remote -database. - -The import process should take a few seconds to minutes, depending on the area -size. A whole country might even take one or more hours. You should probably -not try to import `planet.osm.pbf`. - -You can run the process multiple times, with the same or different area files, -to import or update the data. However, for this to work, the actual [command -line arguments](https://osm2pgsql.org/doc/manual.html#running-osm2pgsql) are a -bit different each time, including when first importing, and the disk space -required is much higher. - -Refer to the documentation of `osm2pgsql` for assistance. We are using "flex -mode", the provided script `roads_import.lua` describes the transformations -and extractions to perform on the original data. ## Troubleshooting diff --git a/UPGRADING.md b/UPGRADING.md index 8189613..482d109 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,9 +1,32 @@ # Upgrading - This document describes the general steps to upgrade between major changes. Simple migrations, e.g. for adding schema changes, are not documented explicitly. Their general usage is described in the [README](./README.md) (for -development) and [deployment/README.md](deployment/README.md) (for production). +development) and [docs/production-deployment.md](docs/production-deployment.md) (for production). + + +## 0.8.0 +Upgrade to `0.7.x` first. See below for details. Then follow these steps: + +> **Warning** The update includes a reprocessing of tracks after import. Depending on the number of tracks this can take a few hours. The portal is reachable during that time but events disappear and incrementally reappear during reimport. + +> **Info** With this version the import process for OpenStreetMap data has changed: the [new process](docs/osm-import.md) is easier on resources and finally permits to import a full country on a low-end VM. + +- Do your [usual backup](docs/production-deployment.md) +- get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``) +- Rebuild images ``docker-compose build`` +- Stop your portal and worker services ``docker-compose stop worker portal`` +- run upgrade + ```bash + docker-compose run --rm portal tools/upgrade.py + ``` + this automatically does the following + - Migration of database schema using alembic. + - Upgrade of SQL tile schema to new schema. + - Import the nuts-regions from the web into the database. + - Trigger a re-import of all tracks. +- Start your portal and worker services. ``docker-compose up -d worker portal`` + ## 0.7.0 @@ -57,7 +80,7 @@ You can, but do not have to, reimport all tracks. This will generate a GPX file for each track and allow the users to download those. If a GPX file has not yet been created, the download will fail. To reimport all tracks, log in to your PostgreSQL database (instructions are in [README.md](./README.md) for -development and [deployment/README.md](./deployment/README.md) for production) +development and [docs/production-deployment.md](./docs/production-deployment.md) for production) and run: ```sql @@ -77,7 +100,7 @@ Make sure your worker is running to process the queue. `POSTGRES_MAX_OVERFLOW`. Check the example config for sane default values. * Re-run `tools/prepare_sql_tiles.py` again (see README) * It has been made easier to import OSM data, check - [deployment/README.md](deployment/README.md) for the sections "Download + [docs/production-deployment.md](./docs/production-deployment.md) for the sections "Download OpenStreetMap maps" and "Import OpenStreetMap data". You can now download multiple .pbf files and then import them at once, using the docker image built with the `Dockerfile`. Alternatively, you can choose to enable [lean @@ -132,5 +155,5 @@ Make sure your worker is running to process the queue. `export/users.json` into your realm, it will re-add all the users from the old installation. You should delete the file and `export/` folder afterwards. * Start `portal`. -* Consider configuring a worker service. See [deployment/README.md](deployment/README.md). +* Consider configuring a worker service. See [docs/production-deployment.md](./docs/production-deployment.md). diff --git a/api/Dockerfile b/api/Dockerfile index 18c4ecb..5b31147 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-bullseye +FROM python:3.11.3-bullseye WORKDIR /opt/obs/api diff --git a/api/config.dev.py b/api/config.dev.py index e146973..43a9ec7 100644 --- a/api/config.dev.py +++ b/api/config.dev.py @@ -2,9 +2,8 @@ HOST = "0.0.0.0" PORT = 3000 DEBUG = True VERBOSE = False -AUTO_RESTART = True +AUTO_RELOAD = True SECRET = "!!!!!!!!!!!!CHANGE ME!!!!!!!!!!!!" -LEAN_MODE = False POSTGRES_URL = "postgresql+asyncpg://obs:obs@postgres/obs" POSTGRES_POOL_SIZE = 20 POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE @@ -30,5 +29,7 @@ ADDITIONAL_CORS_ORIGINS = [ "http://localhost:8880/", # for maputnik on 8880 "http://localhost:8888/", # for maputnik on 8888 ] +TILE_SEMAPHORE_SIZE = 4 +EXPORT_SEMAPHORE_SIZE = 4 # vim: set ft=python : diff --git a/api/config.py.example b/api/config.py.example index 2310250..93be821 100644 --- a/api/config.py.example +++ b/api/config.py.example @@ -5,12 +5,7 @@ PORT = 3000 # Extended log output, but slower DEBUG = False VERBOSE = DEBUG -AUTO_RESTART = DEBUG - -# Turn on lean mode to simplify the setup. Lots of features will be -# unavailable, but you will not need to manage OpenStreetMap data. Please make -# sure to configure the OBS_FACE_CACHE_DIR correctly for lean mode. -LEAN_MODE = False +AUTO_RELOAD = DEBUG # Required to encrypt or sign sessions, cookies, tokens, etc. SECRET = "!!!<<>>!!!" @@ -66,4 +61,13 @@ TILES_FILE = None # default. Python list, or whitespace separated string. ADDITIONAL_CORS_ORIGINS = None +# How many asynchronous requests may be sent to the database to generate tile +# information. Should be less than POSTGRES_POOL_SIZE to leave some connections +# to the other features of the API ;) +TILE_SEMAPHORE_SIZE = 4 + +# How many asynchronous requests may generate exported data simultaneously. +# Keep this small. +EXPORT_SEMAPHORE_SIZE = 1 + # vim: set ft=python : diff --git a/api/migrations/versions/35e7f1768f9b_create_table_road.py b/api/migrations/versions/35e7f1768f9b_create_table_road.py index 17d2582..8eb3930 100644 --- a/api/migrations/versions/35e7f1768f9b_create_table_road.py +++ b/api/migrations/versions/35e7f1768f9b_create_table_road.py @@ -22,13 +22,16 @@ def upgrade(): op.create_table( "road", sa.Column( - "way_id", sa.BIGINT, autoincrement=True, primary_key=True, index=True + "way_id", sa.BIGINT, primary_key=True, index=True, autoincrement=False ), sa.Column("zone", dbtype("zone_type")), - sa.Column("name", sa.String), - sa.Column("geometry", dbtype("GEOMETRY"), index=True), + sa.Column("name", sa.Text), + sa.Column("geometry", dbtype("geometry(LINESTRING,3857)")), sa.Column("directionality", sa.Integer), - sa.Column("oenway", sa.Boolean), + sa.Column("oneway", sa.Boolean), + ) + op.execute( + "CREATE INDEX road_geometry_idx ON road USING GIST (geometry) WITH (FILLFACTOR=100);" ) diff --git a/api/migrations/versions/587e69ecb466_transform_overtaking_event_geometry_to_.py b/api/migrations/versions/587e69ecb466_transform_overtaking_event_geometry_to_.py new file mode 100644 index 0000000..8bb36f2 --- /dev/null +++ b/api/migrations/versions/587e69ecb466_transform_overtaking_event_geometry_to_.py @@ -0,0 +1,30 @@ +"""transform overtaking_event geometry to 3857 + +Revision ID: 587e69ecb466 +Revises: f4b0f460254d +Create Date: 2023-04-01 14:30:49.927505 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "587e69ecb466" +down_revision = "f4b0f460254d" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 3857);") + op.execute( + "ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry(POINT, 3857);" + ) + + +def downgrade(): + op.execute( + "ALTER TABLE overtaking_event ALTER COLUMN geometry TYPE geometry;" + ) + op.execute("UPDATE overtaking_event SET geometry = ST_Transform(geometry, 4326);") diff --git a/api/migrations/versions/a049e5eb24dd_create_table_region.py b/api/migrations/versions/a049e5eb24dd_create_table_region.py new file mode 100644 index 0000000..aa434fc --- /dev/null +++ b/api/migrations/versions/a049e5eb24dd_create_table_region.py @@ -0,0 +1,35 @@ +"""create table region + +Revision ID: a049e5eb24dd +Revises: a9627f63fbed +Create Date: 2022-04-02 21:28:43.124521 + +""" +from alembic import op +import sqlalchemy as sa + +from migrations.utils import dbtype + + +# revision identifiers, used by Alembic. +revision = "a049e5eb24dd" +down_revision = "99a3d2eb08f9" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "region", + sa.Column("id", sa.String(24), primary_key=True, index=True), + sa.Column("name", sa.Text), + sa.Column("geometry", dbtype("GEOMETRY(GEOMETRY,3857)"), index=False), + sa.Column("admin_level", sa.Integer, index=True), + ) + op.execute( + "CREATE INDEX region_geometry_idx ON region USING GIST (geometry) WITH (FILLFACTOR=100);" + ) + + +def downgrade(): + op.drop_table("region") diff --git a/api/migrations/versions/b8b0fbae50a4_add_import_groups.py b/api/migrations/versions/b8b0fbae50a4_add_import_groups.py new file mode 100644 index 0000000..6a36c6b --- /dev/null +++ b/api/migrations/versions/b8b0fbae50a4_add_import_groups.py @@ -0,0 +1,39 @@ +"""add import groups + +Revision ID: b8b0fbae50a4 +Revises: f7b21148126a +Create Date: 2023-03-26 09:41:36.621203 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "b8b0fbae50a4" +down_revision = "f7b21148126a" +branch_labels = None +depends_on = None + + +def upgrade(): + op.add_column( + "road", + sa.Column("import_group", sa.String(), nullable=True), + ) + op.add_column( + "region", + sa.Column("import_group", sa.String(), nullable=True), + ) + + # Set existing to "osm2pgsql" + road = sa.table("road", sa.column("import_group", sa.String)) + op.execute(road.update().values(import_group="osm2pgsql")) + + region = sa.table("region", sa.column("import_group", sa.String)) + op.execute(region.update().values(import_group="osm2pgsql")) + + +def downgrade(): + op.drop_column("road", "import_group") + op.drop_column("region", "import_group") diff --git a/api/migrations/versions/f4b0f460254d_add_osm_id_indexes.py b/api/migrations/versions/f4b0f460254d_add_osm_id_indexes.py new file mode 100644 index 0000000..8a9d972 --- /dev/null +++ b/api/migrations/versions/f4b0f460254d_add_osm_id_indexes.py @@ -0,0 +1,24 @@ +"""add osm id indexes + +Revision ID: f4b0f460254d +Revises: b8b0fbae50a4 +Create Date: 2023-03-30 10:56:22.066768 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "f4b0f460254d" +down_revision = "b8b0fbae50a4" +branch_labels = None +depends_on = None + + +def upgrade(): + op.execute("CREATE INDEX IF NOT EXISTS ix_road_way_id ON road (way_id);") + + +def downgrade(): + op.drop_index("ix_road_way_id") diff --git a/api/migrations/versions/f7b21148126a_add_user_device.py b/api/migrations/versions/f7b21148126a_add_user_device.py new file mode 100644 index 0000000..2e65451 --- /dev/null +++ b/api/migrations/versions/f7b21148126a_add_user_device.py @@ -0,0 +1,41 @@ +"""add user_device + +Revision ID: f7b21148126a +Revises: a9627f63fbed +Create Date: 2022-09-15 17:48:06.764342 + +""" +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision = "f7b21148126a" +down_revision = "a049e5eb24dd" +branch_labels = None +depends_on = None + + +def upgrade(): + op.create_table( + "user_device", + sa.Column("id", sa.Integer, autoincrement=True, primary_key=True), + sa.Column("user_id", sa.Integer, sa.ForeignKey("user.id", ondelete="CASCADE")), + sa.Column("identifier", sa.String, nullable=False), + sa.Column("display_name", sa.String, nullable=True), + sa.Index("user_id_identifier", "user_id", "identifier", unique=True), + ) + op.add_column( + "track", + sa.Column( + "user_device_id", + sa.Integer, + sa.ForeignKey("user_device.id", ondelete="RESTRICT"), + nullable=True, + ), + ) + + +def downgrade(): + op.drop_column("track", "user_device_id") + op.drop_table("user_device") diff --git a/api/obs/api/app.py b/api/obs/api/app.py index d70dd83..9693256 100644 --- a/api/obs/api/app.py +++ b/api/obs/api/app.py @@ -1,3 +1,4 @@ +import asyncio import logging import re @@ -21,16 +22,60 @@ from sqlalchemy import select from sqlalchemy.ext.asyncio import AsyncSession from obs.api.db import User, make_session, connect_db +from obs.api.cors import setup_options, add_cors_headers from obs.api.utils import get_single_arg -from sqlalchemy.util import asyncio log = logging.getLogger(__name__) + +class SanicAccessMessageFilter(logging.Filter): + """ + A filter that modifies the log message of a sanic.access log entry to + include useful information. + """ + + def filter(self, record): + record.msg = f"{record.request} -> {record.status}" + return True + + +def configure_sanic_logging(): + for logger_name in ["sanic.root", "sanic.access", "sanic.error"]: + logger = logging.getLogger(logger_name) + for handler in logger.handlers: + logger.removeHandler(handler) + + logger = logging.getLogger("sanic.access") + for filter_ in logger.filters: + logger.removeFilter(filter_) + logger.addFilter(SanicAccessMessageFilter()) + logging.getLogger("sanic.root").setLevel(logging.WARNING) + + app = Sanic( "openbikesensor-api", env_prefix="OBS_", - log_config={}, ) +configure_sanic_logging() + +app.config.update( + dict( + DEBUG=False, + VERBOSE=False, + AUTO_RELOAD=False, + POSTGRES_POOL_SIZE=20, + POSTGRES_MAX_OVERFLOW=40, + DEDICATED_WORKER=True, + FRONTEND_URL=None, + FRONTEND_HTTPS=True, + TILES_FILE=None, + TILE_SEMAPHORE_SIZE=4, + EXPORT_SEMAPHORE_SIZE=1, + ) +) + +# overwrite from defaults again +app.config.load_environment_vars("OBS_") if isfile("./config.py"): app.update_config("./config.py") @@ -59,6 +104,39 @@ class NoConnectionLostFilter(logging.Filter): logging.getLogger("sanic.error").addFilter(NoConnectionLostFilter) +def setup_cors(app): + frontend_url = app.config.get("FRONTEND_URL") + additional_origins = app.config.get("ADDITIONAL_CORS_ORIGINS") + if not frontend_url and not additional_origins: + # No CORS configured + return + + origins = [] + if frontend_url: + u = urlparse(frontend_url) + origins.append(f"{u.scheme}://{u.netloc}") + + if isinstance(additional_origins, str): + origins += re.split(r"\s+", additional_origins) + elif isinstance(additional_origins, list): + origins += additional_origins + elif additional_origins is not None: + raise ValueError( + "invalid option type for ADDITIONAL_CORS_ORIGINS, must be list or space separated str" + ) + + app.ctx.cors_origins = origins + + # Add OPTIONS handlers to any route that is missing it + app.register_listener(setup_options, "before_server_start") + + # Fill in CORS headers + app.register_middleware(add_cors_headers, "response") + + +setup_cors(app) + + @app.exception(SanicException, BaseException) async def _handle_sanic_errors(_request, exception): if isinstance(exception, asyncio.CancelledError): @@ -95,39 +173,6 @@ def configure_paths(c): configure_paths(app.config) -def setup_cors(app): - frontend_url = app.config.get("FRONTEND_URL") - additional_origins = app.config.get("ADDITIONAL_CORS_ORIGINS") - if not frontend_url and not additional_origins: - # No CORS configured - return - - origins = [] - if frontend_url: - u = urlparse(frontend_url) - origins.append(f"{u.scheme}://{u.netloc}") - - if isinstance(additional_origins, str): - origins += re.split(r"\s+", additional_origins) - elif isinstance(additional_origins, list): - origins += additional_origins - elif additional_origins is not None: - raise ValueError( - "invalid option type for ADDITIONAL_CORS_ORIGINS, must be list or space separated str" - ) - - from sanic_cors import CORS - - CORS( - app, - origins=origins, - supports_credentials=True, - expose_headers={"Content-Disposition"}, - ) - - -setup_cors(app) - # TODO: use a different interface, maybe backed by the PostgreSQL, to allow # scaling the API Session(app, interface=InMemorySessionInterface()) @@ -142,6 +187,12 @@ async def app_connect_db(app, loop): ) app.ctx._db_engine = await app.ctx._db_engine_ctx.__aenter__() + if app.config.TILE_SEMAPHORE_SIZE: + app.ctx.tile_semaphore = asyncio.Semaphore(app.config.TILE_SEMAPHORE_SIZE) + + if app.config.EXPORT_SEMAPHORE_SIZE: + app.ctx.export_semaphore = asyncio.Semaphore(app.config.EXPORT_SEMAPHORE_SIZE) + @app.after_server_stop async def app_disconnect_db(app, loop): @@ -294,9 +345,7 @@ from .routes import ( exports, ) -if not app.config.LEAN_MODE: - from .routes import tiles, mapdetails - +from .routes import tiles, mapdetails from .routes import frontend diff --git a/api/obs/api/cors.py b/api/obs/api/cors.py new file mode 100644 index 0000000..3ab27e1 --- /dev/null +++ b/api/obs/api/cors.py @@ -0,0 +1,68 @@ +from collections import defaultdict +from typing import Dict, FrozenSet, Iterable + +from sanic import Sanic, response +from sanic_routing.router import Route + + +def _add_cors_headers(request, response, methods: Iterable[str]) -> None: + allow_methods = list(set(methods)) + + if "OPTIONS" not in allow_methods: + allow_methods.append("OPTIONS") + + origin = request.headers.get("origin") + if origin in request.app.ctx.cors_origins: + headers = { + "Access-Control-Allow-Methods": ",".join(allow_methods), + "Access-Control-Allow-Origin": origin, + "Access-Control-Allow-Credentials": "true", + "Access-Control-Allow-Headers": ( + "origin, content-type, accept, " + "authorization, x-xsrf-token, x-request-id" + ), + "Access-Control-Expose-Headers": "content-disposition", + } + response.headers.extend(headers) + + +def add_cors_headers(request, response): + if request.method != "OPTIONS": + methods = [method for method in request.route.methods] + _add_cors_headers(request, response, methods) + + +def _compile_routes_needing_options(routes: Dict[str, Route]) -> Dict[str, FrozenSet]: + needs_options = defaultdict(list) + # This is 21.12 and later. You will need to change this for older versions. + for route in routes.values(): + if "OPTIONS" not in route.methods: + needs_options[route.uri].extend(route.methods) + + return {uri: frozenset(methods) for uri, methods in dict(needs_options).items()} + + +def _options_wrapper(handler, methods): + def wrapped_handler(request, *args, **kwargs): + nonlocal methods + return handler(request, methods) + + return wrapped_handler + + +async def options_handler(request, methods) -> response.HTTPResponse: + resp = response.empty() + _add_cors_headers(request, resp, methods) + return resp + + +def setup_options(app: Sanic, _): + app.router.reset() + needs_options = _compile_routes_needing_options(app.router.routes_all) + for uri, methods in needs_options.items(): + app.add_route( + _options_wrapper(options_handler, methods), + uri, + methods=["OPTIONS"], + ) + app.router.finalize() diff --git a/api/obs/api/db.py b/api/obs/api/db.py index 64d8e50..74d1f14 100644 --- a/api/obs/api/db.py +++ b/api/obs/api/db.py @@ -34,8 +34,9 @@ from sqlalchemy import ( select, text, literal, + Text, ) -from sqlalchemy.dialects.postgresql import HSTORE, UUID +from sqlalchemy.dialects.postgresql import UUID log = logging.getLogger(__name__) @@ -107,6 +108,28 @@ class Geometry(UserDefinedType): return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self) +class LineString(UserDefinedType): + def get_col_spec(self): + return "geometry(LineString, 3857)" + + def bind_expression(self, bindvalue): + return func.ST_GeomFromGeoJSON(bindvalue, type_=self) + + def column_expression(self, col): + return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self) + + +class GeometryGeometry(UserDefinedType): + def get_col_spec(self): + return "geometry(GEOMETRY, 3857)" + + def bind_expression(self, bindvalue): + return func.ST_GeomFromGeoJSON(bindvalue, type_=self) + + def column_expression(self, col): + return func.ST_AsGeoJSON(func.ST_Transform(col, 4326), type_=self) + + class OvertakingEvent(Base): __tablename__ = "overtaking_event" __table_args__ = (Index("road_segment", "way_id", "direction_reversed"),) @@ -134,12 +157,23 @@ class OvertakingEvent(Base): class Road(Base): __tablename__ = "road" - way_id = Column(BIGINT, primary_key=True, index=True) + way_id = Column(BIGINT, primary_key=True, index=True, autoincrement=False) zone = Column(ZoneType) - name = Column(String) - geometry = Column(Geometry) + name = Column(Text) + geometry = Column(LineString) directionality = Column(Integer) oneway = Column(Boolean) + import_group = Column(String) + + __table_args__ = ( + # We keep the index name as osm2pgsql created it, way back when. + Index( + "road_geometry_idx", + "geometry", + postgresql_using="gist", + postgresql_with={"fillfactor": 100}, + ), + ) def to_dict(self): return { @@ -166,6 +200,12 @@ class RoadUsage(Base): def __repr__(self): return f"" + def __hash__(self): + return int(self.hex_hash, 16) + + def __eq__(self, other): + return self.hex_hash == other.hex_hash + NOW = text("NOW()") @@ -221,6 +261,12 @@ class Track(Base): Integer, ForeignKey("user.id", ondelete="CASCADE"), nullable=False ) + user_device_id = Column( + Integer, + ForeignKey("user_device.id", ondelete="RESTRICT"), + nullable=True, + ) + # Statistics... maybe we'll drop some of this if we can easily compute them from SQL recorded_at = Column(DateTime) recorded_until = Column(DateTime) @@ -253,6 +299,7 @@ class Track(Base): if for_user_id is not None and for_user_id == self.author_id: result["uploadedByUserAgent"] = self.uploaded_by_user_agent result["originalFileName"] = self.original_file_name + result["userDeviceId"] = self.user_device_id if self.author: result["author"] = self.author.to_dict(for_user_id=for_user_id) @@ -362,7 +409,7 @@ class User(Base): api_key = Column(String) # This user can be matched by the email address from the auth service - # instead of having to match by `sub`. If a matching user logs in, the + # instead of having to match by `sub`. If a matching user logs in, the # `sub` is updated to the new sub and this flag is disabled. This is for # migrating *to* the external authentication scheme. match_by_username_email = Column(Boolean, server_default=false()) @@ -409,6 +456,28 @@ class User(Base): self.username = new_name +class UserDevice(Base): + __tablename__ = "user_device" + id = Column(Integer, autoincrement=True, primary_key=True) + user_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE")) + identifier = Column(String, nullable=False) + display_name = Column(String, nullable=True) + + __table_args__ = ( + Index("user_id_identifier", "user_id", "identifier", unique=True), + ) + + def to_dict(self, for_user_id=None): + if for_user_id != self.user_id: + return {} + + return { + "id": self.id, + "identifier": self.identifier, + "displayName": self.display_name, + } + + class Comment(Base): __tablename__ = "comment" id = Column(Integer, autoincrement=True, primary_key=True) @@ -432,6 +501,26 @@ class Comment(Base): } +class Region(Base): + __tablename__ = "region" + + id = Column(String(24), primary_key=True, index=True) + name = Column(Text) + geometry = Column(GeometryGeometry) + admin_level = Column(Integer, index=True) + import_group = Column(String) + + __table_args__ = ( + # We keep the index name as osm2pgsql created it, way back when. + Index( + "region_geometry_idx", + "geometry", + postgresql_using="gist", + postgresql_with={"fillfactor": 100}, + ), + ) + + Comment.author = relationship("User", back_populates="authored_comments") User.authored_comments = relationship( "Comment", @@ -458,6 +547,14 @@ Track.overtaking_events = relationship( passive_deletes=True, ) +Track.user_device = relationship("UserDevice", back_populates="tracks") +UserDevice.tracks = relationship( + "Track", + order_by=Track.created_at, + back_populates="user_device", + passive_deletes=False, +) + # 0..4 Night, 4..10 Morning, 10..14 Noon, 14..18 Afternoon, 18..22 Evening, 22..00 Night # Two hour intervals diff --git a/api/obs/api/process.py b/api/obs/api/process.py index 6fc2c5e..354bed3 100644 --- a/api/obs/api/process.py +++ b/api/obs/api/process.py @@ -8,7 +8,7 @@ import pytz from os.path import join from datetime import datetime -from sqlalchemy import delete, select +from sqlalchemy import delete, func, select, and_ from sqlalchemy.orm import joinedload from obs.face.importer import ImportMeasurementsCsv @@ -25,9 +25,9 @@ from obs.face.filter import ( RequiredFieldsFilter, ) -from obs.face.osm import DataSource, DatabaseTileSource, OverpassTileSource +from obs.face.osm import DataSource, DatabaseTileSource -from obs.api.db import OvertakingEvent, RoadUsage, Track, make_session +from obs.api.db import OvertakingEvent, RoadUsage, Track, UserDevice, make_session from obs.api.app import app log = logging.getLogger(__name__) @@ -39,12 +39,7 @@ def get_data_source(): mode, the OverpassTileSource is used to fetch data on demand. In normal mode, the roads database is used. """ - if app.config.LEAN_MODE: - tile_source = OverpassTileSource(cache_dir=app.config.OBS_FACE_CACHE_DIR) - else: - tile_source = DatabaseTileSource() - - return DataSource(tile_source) + return DataSource(DatabaseTileSource()) async def process_tracks_loop(delay): @@ -144,10 +139,11 @@ async def process_track(session, track, data_source): os.makedirs(output_dir, exist_ok=True) log.info("Annotating and filtering CSV file") - imported_data, statistics = ImportMeasurementsCsv().read( + imported_data, statistics, track_metadata = ImportMeasurementsCsv().read( original_file_path, user_id="dummy", # TODO: user username or id or nothing? dataset_id=Track.slug, # TODO: use track id or slug or nothing? + return_metadata=True, ) annotator = AnnotateMeasurements( @@ -217,6 +213,36 @@ async def process_track(session, track, data_source): await clear_track_data(session, track) await session.commit() + device_identifier = track_metadata.get("DeviceId") + if device_identifier: + if isinstance(device_identifier, list): + device_identifier = device_identifier[0] + + log.info("Finding or creating device %s", device_identifier) + user_device = ( + await session.execute( + select(UserDevice).where( + and_( + UserDevice.user_id == track.author_id, + UserDevice.identifier == device_identifier, + ) + ) + ) + ).scalar() + + log.debug("user_device is %s", user_device) + + if not user_device: + user_device = UserDevice( + user_id=track.author_id, identifier=device_identifier + ) + log.debug("Create new device for this user") + session.add(user_device) + + track.user_device = user_device + else: + log.info("No DeviceId in track metadata.") + log.info("Import events into database...") await import_overtaking_events(session, track, overtaking_events) @@ -280,11 +306,16 @@ async def import_overtaking_events(session, track, overtaking_events): hex_hash=hex_hash, way_id=m.get("OSM_way_id"), direction_reversed=m.get("OSM_way_orientation", 0) < 0, - geometry=json.dumps( - { - "type": "Point", - "coordinates": [m["longitude"], m["latitude"]], - } + geometry=func.ST_Transform( + func.ST_GeomFromGeoJSON( + json.dumps( + { + "type": "Point", + "coordinates": [m["longitude"], m["latitude"]], + } + ) + ), + 3857, ), latitude=m["latitude"], longitude=m["longitude"], diff --git a/api/obs/api/routes/exports.py b/api/obs/api/routes/exports.py index 90218fd..4fa1ce3 100644 --- a/api/obs/api/routes/exports.py +++ b/api/obs/api/routes/exports.py @@ -3,15 +3,17 @@ from enum import Enum from contextlib import contextmanager import zipfile import io +import re from sqlite3 import connect import shapefile from obs.api.db import OvertakingEvent -from sqlalchemy import select, func +from sqlalchemy import select, func, text from sanic.response import raw from sanic.exceptions import InvalidUsage from obs.api.app import api, json as json_response +from obs.api.utils import use_request_semaphore class ExportFormat(str, Enum): @@ -26,7 +28,7 @@ def parse_bounding_box(input_string): func.ST_Point(left, bottom), func.ST_Point(right, top), ), - 3857, + 4326, ) @@ -38,11 +40,11 @@ PROJECTION_4326 = ( @contextmanager -def shapefile_zip(): +def shapefile_zip(shape_type=shapefile.POINT, basename="events"): zip_buffer = io.BytesIO() shp, shx, dbf = (io.BytesIO() for _ in range(3)) writer = shapefile.Writer( - shp=shp, shx=shx, dbf=dbf, shapeType=shapefile.POINT, encoding="utf8" + shp=shp, shx=shx, dbf=dbf, shapeType=shape_type, encoding="utf8" ) yield writer, zip_buffer @@ -51,67 +53,140 @@ def shapefile_zip(): writer.close() zip_file = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) - zip_file.writestr("events.shp", shp.getbuffer()) - zip_file.writestr("events.shx", shx.getbuffer()) - zip_file.writestr("events.dbf", dbf.getbuffer()) - zip_file.writestr("events.prj", PROJECTION_4326) + zip_file.writestr(f"{basename}.shp", shp.getbuffer()) + zip_file.writestr(f"{basename}.shx", shx.getbuffer()) + zip_file.writestr(f"{basename}.dbf", dbf.getbuffer()) + zip_file.writestr(f"{basename}.prj", PROJECTION_4326) zip_file.close() @api.get(r"/export/events") async def export_events(req): - bbox = req.ctx.get_single_arg( - "bbox", default="-180,-90,180,90", convert=parse_bounding_box - ) - fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat) + async with use_request_semaphore(req, "export_semaphore", timeout=30): + bbox = req.ctx.get_single_arg( + "bbox", default="-180,-90,180,90", convert=parse_bounding_box + ) + fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat) - events = await req.ctx.db.stream_scalars( - select(OvertakingEvent).where(OvertakingEvent.geometry.bool_op("&&")(bbox)) - ) + events = await req.ctx.db.stream_scalars( + select(OvertakingEvent).where( + OvertakingEvent.geometry.bool_op("&&")(func.ST_Transform(bbox, 3857)) + ) + ) - if fmt == ExportFormat.SHAPEFILE: - with shapefile_zip() as (writer, zip_buffer): - writer.field("distance_overtaker", "N", decimal=4) - writer.field("distance_stationary", "N", decimal=4) - writer.field("way_id", "N", decimal=0) - writer.field("direction", "N", decimal=0) - writer.field("course", "N", decimal=4) - writer.field("speed", "N", decimal=4) + if fmt == ExportFormat.SHAPEFILE: + with shapefile_zip(basename="events") as (writer, zip_buffer): + writer.field("distance_overtaker", "N", decimal=4) + writer.field("distance_stationary", "N", decimal=4) + writer.field("way_id", "N", decimal=0) + writer.field("direction", "N", decimal=0) + writer.field("course", "N", decimal=4) + writer.field("speed", "N", decimal=4) + async for event in events: + writer.point(event.longitude, event.latitude) + writer.record( + distance_overtaker=event.distance_overtaker, + distance_stationary=event.distance_stationary, + direction=-1 if event.direction_reversed else 1, + way_id=event.way_id, + course=event.course, + speed=event.speed, + # "time"=event.time, + ) + + return raw(zip_buffer.getbuffer()) + + if fmt == ExportFormat.GEOJSON: + features = [] async for event in events: - writer.point(event.longitude, event.latitude) - writer.record( - distance_overtaker=event.distance_overtaker, - distance_stationary=event.distance_stationary, - direction=-1 if event.direction_reversed else 1, - way_id=event.way_id, - course=event.course, - speed=event.speed, - # "time"=event.time, + features.append( + { + "type": "Feature", + "geometry": json.loads(event.geometry), + "properties": { + "distance_overtaker": event.distance_overtaker, + "distance_stationary": event.distance_stationary, + "direction": -1 if event.direction_reversed else 1, + "way_id": event.way_id, + "course": event.course, + "speed": event.speed, + "time": event.time, + }, + } ) - return raw(zip_buffer.getbuffer()) + geojson = {"type": "FeatureCollection", "features": features} + return json_response(geojson) - if fmt == ExportFormat.GEOJSON: - features = [] - async for event in events: - features.append( - { - "type": "Feature", - "geometry": json.loads(event.geometry), - "properties": { - "distance_overtaker": event.distance_overtaker, - "distance_stationary": event.distance_stationary, - "direction": -1 if event.direction_reversed else 1, - "way_id": event.way_id, - "course": event.course, - "speed": event.speed, - "time": event.time, - }, - } + raise InvalidUsage("unknown export format") + + +@api.get(r"/export/segments") +async def export_segments(req): + async with use_request_semaphore(req, "export_semaphore", timeout=30): + bbox = req.ctx.get_single_arg( + "bbox", default="-180,-90,180,90" + ) + assert re.match(r"(-?\d+\.?\d+,?){4}", bbox) + fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat) + segments = await req.ctx.db.stream( + text( + f"select ST_AsGeoJSON(ST_Transform(geometry,4326)) AS geometry, way_id, distance_overtaker_mean, distance_overtaker_min,distance_overtaker_max,distance_overtaker_median,overtaking_event_count,usage_count,direction,zone,offset_direction,distance_overtaker_array from layer_obs_roads(ST_Transform(ST_MakeEnvelope({bbox},4326),3857),11,NULL,'1900-01-01'::timestamp,'2100-01-01'::timestamp) WHERE usage_count>0" ) + ) - geojson = {"type": "FeatureCollection", "features": features} - return json_response(geojson) + if fmt == ExportFormat.SHAPEFILE: + with shapefile_zip(shape_type=3, basename="segments") as (writer, zip_buffer): + writer.field("distance_overtaker_mean", "N", decimal=4) + writer.field("distance_overtaker_max", "N", decimal=4) + writer.field("distance_overtaker_min", "N", decimal=4) + writer.field("distance_overtaker_median", "N", decimal=4) + writer.field("overtaking_event_count", "N", decimal=4) + writer.field("usage_count", "N", decimal=4) + writer.field("way_id", "N", decimal=0) + writer.field("direction", "N", decimal=0) + writer.field("zone", "C") - raise InvalidUsage("unknown export format") + async for segment in segments: + geom = json.loads(segment.st_asgeojson) + writer.line([geom["coordinates"]]) + writer.record( + distance_overtaker_mean=segment.distance_overtaker_mean, + distance_overtaker_median=segment.distance_overtaker_median, + distance_overtaker_max=segment.distance_overtaker_max, + distance_overtaker_min=segment.distance_overtaker_min, + usage_count=segment.usage_count, + overtaking_event_count=segment.overtaking_event_count, + direction=segment.direction, + way_id=segment.way_id, + zone=segment.zone, + ) + + return raw(zip_buffer.getbuffer()) + + if fmt == ExportFormat.GEOJSON: + features = [] + async for segment in segments: + features.append( + { + "type": "Feature", + "geometry": json.loads(segment.geometry), + "properties": { + "distance_overtaker_mean": segment.distance_overtaker_mean, + "distance_overtaker_max": segment.distance_overtaker_max, + "distance_overtaker_median": segment.distance_overtaker_median, + "overtaking_event_count": segment.overtaking_event_count, + "usage_count": segment.usage_count, + "distance_overtaker_array": segment.distance_overtaker_array, + "direction": segment.direction, + "way_id": segment.way_id, + "zone": segment.zone, + }, + } + ) + + geojson = {"type": "FeatureCollection", "features": features} + return json_response(geojson) + + raise InvalidUsage("unknown export format") diff --git a/api/obs/api/routes/frontend.py b/api/obs/api/routes/frontend.py index fb681a0..6f6e25f 100644 --- a/api/obs/api/routes/frontend.py +++ b/api/obs/api/routes/frontend.py @@ -14,22 +14,18 @@ if app.config.FRONTEND_CONFIG: **req.app.config.FRONTEND_CONFIG, "apiUrl": f"{req.ctx.api_url}/api", "loginUrl": f"{req.ctx.api_url}/login", - "obsMapSource": ( - None - if app.config.LEAN_MODE - else { - "type": "vector", - "tiles": [ - req.ctx.api_url - + req.app.url_for("tiles", zoom="000", x="111", y="222.pbf") - .replace("000", "{z}") - .replace("111", "{x}") - .replace("222", "{y}") - ], - "minzoom": 12, - "maxzoom": 14, - } - ), + "obsMapSource": { + "type": "vector", + "tiles": [ + req.ctx.api_url + + req.app.url_for("tiles", zoom="000", x="111", y="222.pbf") + .replace("000", "{z}") + .replace("111", "{x}") + .replace("222", "{y}") + ], + "minzoom": 0, + "maxzoom": 14, + }, } return response.json(result) diff --git a/api/obs/api/routes/login.py b/api/obs/api/routes/login.py index 1c4e80a..53ef8a1 100644 --- a/api/obs/api/routes/login.py +++ b/api/obs/api/routes/login.py @@ -170,4 +170,4 @@ async def logout(req): auth_req = client.construct_EndSessionRequest(state=session["state"]) logout_url = auth_req.request(client.end_session_endpoint) - return redirect(logout_url + f"&redirect_uri={req.ctx.api_url}/logout") + return redirect(logout_url + f"&post_logout_redirect_uri={req.ctx.api_url}/logout") diff --git a/api/obs/api/routes/mapdetails.py b/api/obs/api/routes/mapdetails.py index 5b0eba3..1565fc1 100644 --- a/api/obs/api/routes/mapdetails.py +++ b/api/obs/api/routes/mapdetails.py @@ -18,14 +18,16 @@ round_speed = partial(round_to, multiples=0.1) log = logging.getLogger(__name__) -def get_bearing(a, b): + +def get_bearing(b, a): # longitude, latitude dL = b[0] - a[0] X = numpy.cos(b[1]) * numpy.sin(dL) Y = numpy.cos(a[1]) * numpy.sin(b[1]) - numpy.sin(a[1]) * numpy.cos( b[1] ) * numpy.cos(dL) - return numpy.arctan2(X, Y) + return numpy.arctan2(Y, X) + 0.5 * math.pi + # Bins for histogram on overtaker distances. 0, 0.25, ... 2.25, infinity DISTANCE_BINS = numpy.arange(0, 2.5, 0.25).tolist() + [float('inf')] @@ -82,11 +84,11 @@ async def mapdetails_road(req): arrays = numpy.array(arrays).T if len(arrays) == 0: - arrays = numpy.array([[], [], [], []], dtype=numpy.float) + arrays = numpy.array([[], [], [], []], dtype=float) data, mask = arrays[:-1], arrays[-1] data = data.astype(numpy.float64) - mask = mask.astype(numpy.bool) + mask = mask.astype(bool) def partition(arr, cond): return arr[:, cond], arr[:, ~cond] diff --git a/api/obs/api/routes/stats.py b/api/obs/api/routes/stats.py index 8f5603c..54bc82a 100644 --- a/api/obs/api/routes/stats.py +++ b/api/obs/api/routes/stats.py @@ -4,12 +4,12 @@ from typing import Optional from operator import and_ from functools import reduce -from sqlalchemy import select, func +from sqlalchemy import distinct, select, func, desc from sanic.response import json from obs.api.app import api -from obs.api.db import Track, OvertakingEvent, User +from obs.api.db import Track, OvertakingEvent, User, Region, UserDevice from obs.api.utils import round_to @@ -92,6 +92,14 @@ async def stats(req): .where(track_condition) ) ).scalar() + device_count = ( + await req.ctx.db.execute( + select(func.count(distinct(UserDevice.id))) + .select_from(UserDevice) + .join(Track.user_device) + .where(track_condition) + ) + ).scalar() result = { "numEvents": event_count, @@ -100,6 +108,7 @@ async def stats(req): "trackDuration": round_to(track_duration or 0, TRACK_DURATION_ROUNDING), "publicTrackCount": public_track_count, "trackCount": track_count, + "deviceCount": device_count, } return json(result) @@ -167,3 +176,31 @@ async def stats(req): # }); # }), # ); + + +@api.route("/stats/regions") +async def stats(req): + query = ( + select( + [ + Region.id, + Region.name, + func.count(OvertakingEvent.id).label("overtaking_event_count"), + ] + ) + .select_from(Region) + .join( + OvertakingEvent, + func.ST_Within(OvertakingEvent.geometry, Region.geometry), + ) + .group_by( + Region.id, + Region.name, + Region.geometry, + ) + .having(func.count(OvertakingEvent.id) > 0) + .order_by(desc("overtaking_event_count")) + ) + + regions = list(map(dict, (await req.ctx.db.execute(query)).all())) + return json(regions) diff --git a/api/obs/api/routes/tiles.py b/api/obs/api/routes/tiles.py index 9b6b652..f0452a5 100644 --- a/api/obs/api/routes/tiles.py +++ b/api/obs/api/routes/tiles.py @@ -7,10 +7,10 @@ import dateutil.parser from sanic.exceptions import Forbidden, InvalidUsage from sanic.response import raw -from sqlalchemy import select, text -from sqlalchemy.sql.expression import table, column +from sqlalchemy import text from obs.api.app import app +from obs.api.utils import use_request_semaphore def get_tile(filename, zoom, x, y): @@ -87,24 +87,25 @@ def get_filter_options( @app.route(r"/tiles///") async def tiles(req, zoom: int, x: int, y: str): - if app.config.get("TILES_FILE"): - tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y)) + async with use_request_semaphore(req, "tile_semaphore"): + if app.config.get("TILES_FILE"): + tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y)) - else: - user_id, start, end = get_filter_options(req) + else: + user_id, start, end = get_filter_options(req) - tile = await req.ctx.db.scalar( - text( - f"select data from getmvt(:zoom, :x, :y, :user_id, :min_time, :max_time) as b(data, key);" - ).bindparams( - zoom=int(zoom), - x=int(x), - y=int(y), - user_id=user_id, - min_time=start, - max_time=end, + tile = await req.ctx.db.scalar( + text( + f"select data from getmvt(:zoom, :x, :y, :user_id, :min_time, :max_time) as b(data, key);" + ).bindparams( + zoom=int(zoom), + x=int(x), + y=int(y), + user_id=user_id, + min_time=start, + max_time=end, + ) ) - ) gzip = "gzip" in req.headers["accept-encoding"] diff --git a/api/obs/api/routes/tracks.py b/api/obs/api/routes/tracks.py index 8ae1c6a..9868fb5 100644 --- a/api/obs/api/routes/tracks.py +++ b/api/obs/api/routes/tracks.py @@ -1,16 +1,18 @@ import logging import re +from datetime import date from json import load as jsonload from os.path import join, exists, isfile -from sqlalchemy import select, func +from sanic.exceptions import InvalidUsage, NotFound, Forbidden +from sanic.response import file_stream, empty +from slugify import slugify +from sqlalchemy import select, func, and_ from sqlalchemy.orm import joinedload -from obs.api.db import Track, User, Comment, DuplicateTrackFileError from obs.api.app import api, require_auth, read_api_key, json - -from sanic.response import file_stream, empty -from sanic.exceptions import InvalidUsage, NotFound, Forbidden +from obs.api.db import Track, Comment, DuplicateTrackFileError +from obs.api.utils import tar_of_tracks log = logging.getLogger(__name__) @@ -23,8 +25,8 @@ def normalize_user_agent(user_agent): return m[0] if m else None -async def _return_tracks(req, extend_query, limit, offset): - if limit <= 0 or limit > 100: +async def _return_tracks(req, extend_query, limit, offset, order_by=None): + if limit <= 0 or limit > 1000: raise InvalidUsage("invalid limit") if offset < 0: @@ -39,7 +41,7 @@ async def _return_tracks(req, extend_query, limit, offset): extend_query(select(Track).options(joinedload(Track.author))) .limit(limit) .offset(offset) - .order_by(Track.created_at.desc()) + .order_by(order_by if order_by is not None else Track.created_at) ) tracks = (await req.ctx.db.execute(query)).scalars() @@ -76,16 +78,101 @@ async def get_tracks(req): return await _return_tracks(req, extend_query, limit, offset) +def parse_boolean(s): + if s is None: + return None + + s = s.lower() + if s in ("true", "1", "yes", "y", "t"): + return True + if s in ("false", "0", "no", "n", "f"): + return False + + raise ValueError("invalid value for boolean") + + @api.get("/tracks/feed") @require_auth async def get_feed(req): limit = req.ctx.get_single_arg("limit", default=20, convert=int) offset = req.ctx.get_single_arg("offset", default=0, convert=int) + user_device_id = req.ctx.get_single_arg("user_device_id", default=None, convert=int) + + order_by_columns = { + "recordedAt": Track.recorded_at, + "title": Track.title, + "visibility": Track.public, + "length": Track.length, + "duration": Track.duration, + "user_device_id": Track.user_device_id, + } + order_by = req.ctx.get_single_arg( + "order_by", default=None, convert=order_by_columns.get + ) + + reversed_ = req.ctx.get_single_arg("reversed", convert=parse_boolean, default=False) + if reversed_: + order_by = order_by.desc() + + public = req.ctx.get_single_arg("public", convert=parse_boolean, default=None) def extend_query(q): - return q.where(Track.author_id == req.ctx.user.id) + q = q.where(Track.author_id == req.ctx.user.id) - return await _return_tracks(req, extend_query, limit, offset) + if user_device_id is not None: + q = q.where(Track.user_device_id == user_device_id) + + if public is not None: + q = q.where(Track.public == public) + + return q + + return await _return_tracks(req, extend_query, limit, offset, order_by) + + +@api.post("/tracks/bulk") +@require_auth +async def tracks_bulk_action(req): + body = req.json + action = body["action"] + track_slugs = body["tracks"] + + if action not in ("delete", "makePublic", "makePrivate", "reprocess", "download"): + raise InvalidUsage("invalid action") + + query = select(Track).where( + and_(Track.author_id == req.ctx.user.id, Track.slug.in_(track_slugs)) + ) + + files = set() + + for track in (await req.ctx.db.execute(query)).scalars(): + if action == "delete": + await req.ctx.db.delete(track) + elif action == "makePublic": + if not track.public: + track.queue_processing() + track.public = True + elif action == "makePrivate": + if track.public: + track.queue_processing() + track.public = False + elif action == "reprocess": + track.queue_processing() + elif action == "download": + files.add(track.get_original_file_path(req.app.config)) + + await req.ctx.db.commit() + + if action == "download": + username_slug = slugify(req.ctx.user.username, separator="-") + date_str = date.today().isoformat() + file_basename = f"tracks_{username_slug}_{date_str}" + + await tar_of_tracks(req, files, file_basename) + return + + return empty() @api.post("/tracks") diff --git a/api/obs/api/routes/users.py b/api/obs/api/routes/users.py index 60b0c19..ceb0efc 100644 --- a/api/obs/api/routes/users.py +++ b/api/obs/api/routes/users.py @@ -1,9 +1,11 @@ import logging from sanic.response import json -from sanic.exceptions import InvalidUsage +from sanic.exceptions import InvalidUsage, Forbidden, NotFound +from sqlalchemy import and_, select from obs.api.app import api, require_auth +from obs.api.db import UserDevice log = logging.getLogger(__name__) @@ -28,6 +30,48 @@ async def get_user(req): return json(user_to_json(req.ctx.user) if req.ctx.user else None) +@api.get("/user/devices") +async def get_user_devices(req): + if not req.ctx.user: + raise Forbidden() + + query = ( + select(UserDevice) + .where(UserDevice.user_id == req.ctx.user.id) + .order_by(UserDevice.id) + ) + + devices = (await req.ctx.db.execute(query)).scalars() + + return json([device.to_dict(req.ctx.user.id) for device in devices]) + + +@api.put("/user/devices/") +async def put_user_device(req, device_id): + if not req.ctx.user: + raise Forbidden() + + body = req.json + + query = ( + select(UserDevice) + .where(and_(UserDevice.user_id == req.ctx.user.id, UserDevice.id == device_id)) + .limit(1) + ) + + device = (await req.ctx.db.execute(query)).scalar() + + if device is None: + raise NotFound() + + new_name = body.get("displayName", "").strip() + if new_name and device.display_name != new_name: + device.display_name = new_name + await req.ctx.db.commit() + + return json(device.to_dict()) + + @api.put("/user") @require_auth async def put_user(req): diff --git a/api/obs/api/utils.py b/api/obs/api/utils.py index b9a50e3..7197d43 100644 --- a/api/obs/api/utils.py +++ b/api/obs/api/utils.py @@ -1,6 +1,15 @@ +import asyncio +from contextlib import asynccontextmanager from datetime import datetime +import logging +from os.path import commonpath, join, relpath +import queue +import tarfile + import dateutil.parser -from sanic.exceptions import InvalidUsage +from sanic.exceptions import InvalidUsage, ServiceUnavailable + +log = logging.getLogger(__name__) RAISE = object() @@ -30,3 +39,124 @@ def round_to(value: float, multiples: float) -> float: if value is None: return None return round(value / multiples) * multiples + + +def chunk_list(lst, n): + for s in range(0, len(lst), n): + yield lst[s : s + n] + + +class chunk: + def __init__(self, iterable, n): + self.iterable = iterable + self.n = n + + def __iter__(self): + if isinstance(self.iterable, list): + yield from chunk_list(self.iterable, self.n) + return + + it = iter(self.iterable) + while True: + current = [] + try: + for _ in range(self.n): + current.append(next(it)) + yield current + except StopIteration: + if current: + yield current + break + + async def __aiter__(self): + if hasattr(self.iterable, "__iter__"): + for item in self: + yield item + return + + it = self.iterable.__aiter__() + while True: + current = [] + try: + for _ in range(self.n): + current.append(await it.__anext__()) + yield current + except StopAsyncIteration: + if len(current): + yield current + break + + +async def tar_of_tracks(req, files, file_basename="tracks"): + response = await req.respond( + content_type="application/x-gtar", + headers={ + "content-disposition": f'attachment; filename="{file_basename}.tar.bz2"' + }, + ) + + helper = StreamerHelper(response) + + tar = tarfile.open(name=None, fileobj=helper, mode="w|bz2", bufsize=256 * 512) + + root = commonpath(list(files)) + for fname in files: + log.info("Write file to tar: %s", fname) + with open(fname, "rb") as fobj: + tarinfo = tar.gettarinfo(fname) + tarinfo.name = join(file_basename, relpath(fname, root)) + tar.addfile(tarinfo, fobj) + await helper.send_all() + tar.close() + await helper.send_all() + + await response.eof() + + +class StreamerHelper: + def __init__(self, response): + self.response = response + self.towrite = queue.Queue() + + def write(self, data): + self.towrite.put(data) + + async def send_all(self): + while True: + try: + tosend = self.towrite.get(block=False) + await self.response.send(tosend) + except queue.Empty: + break + + +@asynccontextmanager +async def use_request_semaphore(req, semaphore_name, timeout=10): + """ + If configured, acquire a semaphore for the map tile request and release it + after the context has finished. + + If the semaphore cannot be acquired within the timeout, issue a 503 Service + Unavailable error response that describes that the database is overloaded, + so users know what the problem is. + + Operates as a noop when the tile semaphore is not enabled. + """ + semaphore = getattr(req.app.ctx, semaphore_name, None) + + if semaphore is None: + yield + return + + try: + await asyncio.wait_for(semaphore.acquire(), timeout) + + try: + yield + finally: + semaphore.release() + + except asyncio.TimeoutError: + raise ServiceUnavailable( + "Too many requests, database overloaded. Please retry later." + ) diff --git a/api/obs/bin/openbikesensor_api.py b/api/obs/bin/openbikesensor_api.py index 19938aa..c43ae2a 100755 --- a/api/obs/bin/openbikesensor_api.py +++ b/api/obs/bin/openbikesensor_api.py @@ -58,7 +58,7 @@ def main(): port=app.config.PORT, debug=debug, auto_reload=app.config.get("AUTO_RELOAD", debug), - # access_log=False, + access_log=True, ) diff --git a/api/requirements.txt b/api/requirements.txt index 837e553..aaf9bf6 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -1,14 +1,22 @@ coloredlogs~=15.0.1 -sanic~=22.6.0 -oic~=1.3.0 +sanic==22.6.2 +oic~=1.5.0 sanic-session~=0.8.0 -sanic-cors~=2.0.1 python-slugify~=6.1.2 -motor~=3.0.0 +motor~=3.1.1 pyyaml<6 -e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools -sqlparse~=0.4.2 -sqlalchemy[asyncio]~=1.4.39 -asyncpg~=0.24.0 +sqlparse~=0.4.3 +sqlalchemy[asyncio]~=1.4.46 +asyncpg~=0.27.0 pyshp~=2.3.1 -alembic~=1.7.7 +alembic~=1.9.4 +stream-zip~=0.0.50 +msgpack~=1.0.5 +osmium~=3.6.0 +psycopg~=3.1.8 +shapely~=2.0.1 +pyproj~=3.4.1 +aiohttp~=3.8.1 +# sanic requires websocets and chockes on >=10 in 2022.6.2 +websockets<11 diff --git a/api/scripts b/api/scripts index 8e9395f..f513117 160000 --- a/api/scripts +++ b/api/scripts @@ -1 +1 @@ -Subproject commit 8e9395fd3cd0f1e83b4413546bc2d3cb0c726738 +Subproject commit f513117e275be20008afa1e1fd2499698313a81d diff --git a/api/setup.py b/api/setup.py index e76b57f..a5395ba 100644 --- a/api/setup.py +++ b/api/setup.py @@ -11,19 +11,19 @@ setup( package_data={}, install_requires=[ "coloredlogs~=15.0.1", - "sanic>=21.9.3,<22.7.0", + "sanic==22.6.2", "oic>=1.3.0, <2", "sanic-session~=0.8.0", - "sanic-cors~=2.0.1", "python-slugify>=5.0.2,<6.2.0", - "motor>=2.5.1,<3.1.0", + "motor>=2.5.1,<3.1.2", "pyyaml<6", - "sqlparse~=0.4.2", + "sqlparse~=0.4.3", "openmaptiles-tools", # install from git "pyshp>=2.2,<2.4", - "sqlalchemy[asyncio]~=1.4.25", - "asyncpg~=0.24.0", - "alembic~=1.7.7", + "sqlalchemy[asyncio]~=1.4.46", + "asyncpg~=0.27.0", + "alembic~=1.9.4", + "stream-zip~=0.0.50", ], entry_points={ "console_scripts": [ diff --git a/api/tools/import_osm.py b/api/tools/import_osm.py new file mode 100755 index 0000000..68808ce --- /dev/null +++ b/api/tools/import_osm.py @@ -0,0 +1,108 @@ +#!/usr/bin/env python3 + +from dataclasses import dataclass +import asyncio +from os.path import basename, splitext +import sys +import logging + +import msgpack +import psycopg + +from obs.api.app import app +from obs.api.utils import chunk + +log = logging.getLogger(__name__) + + +ROAD_BUFFER = 1000 +AREA_BUFFER = 100 + + +@dataclass +class Road: + way_id: int + name: str + zone: str + directionality: int + oneway: int + geometry: bytes + + +def read_file(filename): + """ + Reads a file iteratively, yielding + appear. Those may be mixed. + """ + + with open(filename, "rb") as f: + unpacker = msgpack.Unpacker(f) + try: + while True: + type_id, *data = unpacker.unpack() + + if type_id == b"\x01": + yield Road(*data) + + except msgpack.OutOfData: + pass + + +async def import_osm(connection, filename, import_group=None): + if import_group is None: + import_group = splitext(basename(filename))[0] + + # Pass 1: Find IDs only + road_ids = [] + for item in read_file(filename): + road_ids.append(item.way_id) + + async with connection.cursor() as cursor: + log.info("Pass 1: Delete previously imported data") + + log.debug("Delete import group %s", import_group) + await cursor.execute( + "DELETE FROM road WHERE import_group = %s", (import_group,) + ) + + log.debug("Delete roads by way_id") + for ids in chunk(road_ids, 10000): + await cursor.execute("DELETE FROM road WHERE way_id = ANY(%s)", (ids,)) + + # Pass 2: Import + log.info("Pass 2: Import roads") + amount = 0 + for items in chunk(read_file(filename), 10000): + amount += 10000 + log.info(f"...{amount}/{len(road_ids)} ({100*amount/len(road_ids)}%)") + async with cursor.copy( + "COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN" + ) as copy: + for item in items: + await copy.write_row( + ( + item.way_id, + item.name, + item.zone, + item.directionality, + item.oneway, + bytes.hex(item.geometry), + import_group, + ) + ) + + +async def main(): + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + + url = app.config.POSTGRES_URL + url = url.replace("+asyncpg", "") + + async with await psycopg.AsyncConnection.connect(url) as connection: + for filename in sys.argv[1:]: + log.debug("Loading file: %s", filename) + await import_osm(connection, filename) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/api/tools/import_regions.py b/api/tools/import_regions.py new file mode 100755 index 0000000..8649b92 --- /dev/null +++ b/api/tools/import_regions.py @@ -0,0 +1,93 @@ +#!/usr/bin/env python3 + +""" +This script downloads and/or imports regions for statistical analysis into the +PostGIS database. The regions are sourced from: + +* EU countries are covered by + [NUTS](https://ec.europa.eu/eurostat/web/gisco/geodata/reference-data/administrative-units-statistical-units/nuts). +""" + +import tempfile +from dataclasses import dataclass +import json +import asyncio +from os.path import basename, splitext +import sys +import logging +from typing import Optional + +import aiohttp +import psycopg + +from obs.api.app import app +from obs.api.utils import chunk + +log = logging.getLogger(__name__) + +NUTS_URL = "https://gisco-services.ec.europa.eu/distribution/v2/nuts/geojson/NUTS_RG_01M_2021_3857.geojson" + +from pyproj import Transformer + +project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform +from shapely.ops import transform +from shapely.geometry import shape +import shapely.wkb as wkb + + +async def import_nuts( + connection, filename=None, level: int = 3, import_group: Optional[str] = None +): + if import_group is None: + import_group = f"nuts{level}" + + if filename: + log.info("Load NUTS from file") + with open(filename) as f: + data = json.load(f) + else: + log.info("Download NUTS regions from europa.eu") + async with aiohttp.ClientSession() as session: + async with session.get(NUTS_URL) as resp: + data = await resp.json(content_type=None) + + async with connection.cursor() as cursor: + log.info( + "Delete previously imported regions with import group %s", import_group + ) + await cursor.execute( + "DELETE FROM region WHERE import_group = %s", (import_group,) + ) + + log.info("Import regions") + async with cursor.copy( + "COPY region (id, name, geometry, import_group) FROM STDIN" + ) as copy: + for feature in data["features"]: + if feature["properties"]["LEVL_CODE"] == level: + geometry = shape(feature["geometry"]) + # geometry = transform(project, geometry) + geometry = wkb.dumps(geometry) + geometry = bytes.hex(geometry) + await copy.write_row( + ( + feature["properties"]["NUTS_ID"], + feature["properties"]["NUTS_NAME"], + geometry, + import_group, + ) + ) + + +async def main(): + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + + url = app.config.POSTGRES_URL + url = url.replace("+asyncpg", "") + + async with await psycopg.AsyncConnection.connect(url) as connection: + await import_nuts(connection, sys.argv[1] if len(sys.argv) > 1 else None) + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/api/tools/reimport_tracks.py b/api/tools/reimport_tracks.py new file mode 100755 index 0000000..4f201db --- /dev/null +++ b/api/tools/reimport_tracks.py @@ -0,0 +1,30 @@ +#!/usr/bin/env python3 +import logging +import asyncio + +from sqlalchemy import text + +from obs.api.app import app +from obs.api.db import connect_db, make_session + +log = logging.getLogger(__name__) + +async def main(): + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + await reimport_tracks() + + +async def reimport_tracks(): + + async with connect_db( + app.config.POSTGRES_URL, + app.config.POSTGRES_POOL_SIZE, + app.config.POSTGRES_MAX_OVERFLOW, + ): + async with make_session() as session: + await session.execute(text("UPDATE track SET processing_status = 'queued';")) + await session.commit() + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/api/tools/transform_osm.py b/api/tools/transform_osm.py new file mode 100755 index 0000000..cfccf40 --- /dev/null +++ b/api/tools/transform_osm.py @@ -0,0 +1,181 @@ +#!/usr/bin/env python3 + +import sys +import re +import msgpack + +import osmium +import shapely.wkb as wkb +from shapely.ops import transform + +HIGHWAY_TYPES = { + "trunk", + "primary", + "secondary", + "tertiary", + "unclassified", + "residential", + "trunk_link", + "primary_link", + "secondary_link", + "tertiary_link", + "living_street", + "service", + "track", + "road", +} +ZONE_TYPES = { + "urban", + "rural", + "motorway", +} +URBAN_TYPES = { + "residential", + "living_street", + "road", +} +MOTORWAY_TYPES = { + "motorway", + "motorway_link", +} + +ADMIN_LEVEL_MIN = 2 +ADMIN_LEVEL_MAX = 8 +MINSPEED_RURAL = 60 + +ONEWAY_YES = {"yes", "true", "1"} +ONEWAY_REVERSE = {"reverse", "-1"} + + +def parse_number(tag): + if not tag: + return None + + match = re.search(r"[0-9]+", tag) + if not match: + return None + + digits = match.group(0) + try: + return int(digits) + except ValueError: + return None + + +def determine_zone(tags): + highway = tags.get("highway") + zone = tags.get("zone:traffic") + + if zone is not None: + if "rural" in zone: + return "rural" + + if "motorway" in zone: + return "motorway" + + return "urban" + + # From here on we are guessing based on other tags + + if highway in URBAN_TYPES: + return "urban" + + if highway in MOTORWAY_TYPES: + return "motorway" + + maxspeed_source = tags.get("source:maxspeed") + if maxspeed_source and "rural" in maxspeed_source: + return "rural" + if maxspeed_source and "urban" in maxspeed_source: + return "urban" + + for key in ["maxspeed", "maxspeed:forward", "maxspeed:backward"]: + maxspeed = parse_number(tags.get(key)) + if maxspeed is not None and maxspeed > MINSPEED_RURAL: + return "rural" + + # default to urban if we have no idea + return "urban" + + +def determine_direction(tags, zone): + if ( + tags.get("oneway") in ONEWAY_YES + or tags.get("junction") == "roundabout" + or zone == "motorway" + ): + return 1, True + + if tags.get("oneway") in ONEWAY_REVERSE: + return -1, True + + return 0, False + + +class StreamPacker: + def __init__(self, stream, *args, **kwargs): + self.stream = stream + self.packer = msgpack.Packer(*args, autoreset=False, **kwargs) + + def _write_out(self): + if hasattr(self.packer, "getbuffer"): + chunk = self.packer.getbuffer() + else: + chunk = self.packer.bytes() + + self.stream.write(chunk) + self.packer.reset() + + def pack(self, *args, **kwargs): + self.packer.pack(*args, **kwargs) + self._write_out() + + def pack_array_header(self, *args, **kwargs): + self.packer.pack_array_header(*args, **kwargs) + self._write_out() + + def pack_map_header(self, *args, **kwargs): + self.packer.pack_map_header(*args, **kwargs) + self._write_out() + + def pack_map_pairs(self, *args, **kwargs): + self.packer.pack_map_pairs(*args, **kwargs) + self._write_out() + + +# A global factory that creates WKB from a osmium geometry +wkbfab = osmium.geom.WKBFactory() + +from pyproj import Transformer + +project = Transformer.from_crs("EPSG:4326", "EPSG:3857", always_xy=True).transform + + +class OSMHandler(osmium.SimpleHandler): + def __init__(self, packer): + self.packer = packer + super().__init__() + + def way(self, way): + tags = way.tags + + highway = tags.get("highway") + if not highway or highway not in HIGHWAY_TYPES: + return + + zone = determine_zone(tags) + directionality, oneway = determine_direction(tags, zone) + name = tags.get("name") + + geometry = wkb.loads(wkbfab.create_linestring(way), hex=True) + geometry = transform(project, geometry) + geometry = wkb.dumps(geometry) + self.packer.pack( + [b"\x01", way.id, name, zone, directionality, oneway, geometry] + ) + + +with open(sys.argv[2], "wb") as fout: + packer = StreamPacker(fout) + osmhandler = OSMHandler(packer) + osmhandler.apply_file(sys.argv[1], locations=True) diff --git a/api/tools/upgrade.py b/api/tools/upgrade.py index 86ff1d7..8b1c83d 100755 --- a/api/tools/upgrade.py +++ b/api/tools/upgrade.py @@ -1,14 +1,15 @@ #!/usr/bin/env python3 -import logging import asyncio -from alembic.config import Config -from alembic import command -from os.path import join, dirname +import logging log = logging.getLogger(__name__) from prepare_sql_tiles import prepare_sql_tiles, _run +from import_regions import main as import_nuts + +from reimport_tracks import main as reimport_tracks + async def _migrate(): await _run("alembic upgrade head") @@ -20,7 +21,11 @@ async def main(): await _migrate() log.info("Preparing SQL tiles...") await prepare_sql_tiles() - log.info("Upgraded") + log.info("Importing nuts regions...") + await import_nuts() + log.info("Nuts regions imported, scheduling reimport of tracks") + await reimport_tracks() + if __name__ == "__main__": diff --git a/deployment/examples/.env b/deployment/.env similarity index 100% rename from deployment/examples/.env rename to deployment/.env diff --git a/deployment/examples/config.py b/deployment/config/config.py similarity index 74% rename from deployment/examples/config.py rename to deployment/config/config.py index 5a65d36..646b7ed 100644 --- a/deployment/examples/config.py +++ b/deployment/config/config.py @@ -1,35 +1,30 @@ # Bind address of the server -#HOST = "127.0.0.1" -#PORT = 3000 +# HOST = "127.0.0.1" +# PORT = 3000 # Extended log output, but slower DEBUG = False VERBOSE = DEBUG -AUTO_RESTART = DEBUG - -# Turn on lean mode to simplify the setup. Lots of features will be -# unavailable, but you will not need to manage OpenStreetMap data. Please make -# sure to configure the OBS_FACE_CACHE_DIR correctly for lean mode. -LEAN_MODE = False +AUTO_RELOAD = DEBUG # Required to encrypt or sign sessions, cookies, tokens, etc. -#SECRET = "!!!<<>>!!!" +# SECRET = "!!!<<>>!!!" # Connection to the database -#POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname" -#POSTGRES_POOL_SIZE = 20 -#POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE +# POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname" +# POSTGRES_POOL_SIZE = 20 +# POSTGRES_MAX_OVERFLOW = 2 * POSTGRES_POOL_SIZE # URL to the keycloak realm, as reachable by the API service. This is not # necessarily its publicly reachable URL, keycloak advertises that iself. -#KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/" +# KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/" # Auth client credentials -#KEYCLOAK_CLIENT_ID = "portal" -#KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000" +# KEYCLOAK_CLIENT_ID = "portal" +# KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000" # Whether the API should run the worker loop, or a dedicated worker is used -#DEDICATED_WORKER = True +# DEDICATED_WORKER = True # The root of the frontend. Needed for redirecting after login, and for CORS. # Set to None if frontend is served by the API. diff --git a/deployment/examples/traefik.toml b/deployment/config/traefik.toml similarity index 100% rename from deployment/examples/traefik.toml rename to deployment/config/traefik.toml diff --git a/deployment/examples/docker-compose.yaml b/deployment/docker-compose.yaml similarity index 98% rename from deployment/examples/docker-compose.yaml rename to deployment/docker-compose.yaml index 1f888d4..3d1487f 100644 --- a/deployment/examples/docker-compose.yaml +++ b/deployment/docker-compose.yaml @@ -14,7 +14,7 @@ services: ############################################################ postgres: - image: "openmaptiles/postgis:6.0" + image: "openmaptiles/postgis:7.0" environment: - POSTGRES_DB=${OBS_POSTGRES_DB} - POSTGRES_USER=${OBS_POSTGRES_USER} @@ -136,7 +136,7 @@ services: - "traefik.docker.network=gateway" postgres-keycloak: - image: postgres:13.3 + image: postgres:15 restart: always networks: - backend diff --git a/docker-compose.yaml b/docker-compose.yaml index ac68236..3bba9a4 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -8,7 +8,7 @@ version: '3' services: postgres: - image: "openmaptiles/postgis:6.0" + image: "openmaptiles/postgis:7.0" environment: POSTGRES_USER: obs POSTGRES_PASSWORD: obs @@ -20,6 +20,7 @@ services: api: image: openbikesensor-api + tty: true build: context: ./api/ dockerfile: Dockerfile @@ -35,6 +36,8 @@ services: - ./tile-generator/data/:/tiles - ./api/migrations:/opt/obs/api/migrations - ./api/alembic.ini:/opt/obs/api/alembic.ini + - ./local/pbf:/pbf + - ./local/obsdata:/obsdata depends_on: - postgres - keycloak @@ -46,6 +49,7 @@ services: worker: image: openbikesensor-api + tty: true build: context: ./api/ dockerfile: Dockerfile diff --git a/docs/lean-mode.md b/docs/lean-mode.md deleted file mode 100644 index 7bf3751..0000000 --- a/docs/lean-mode.md +++ /dev/null @@ -1,57 +0,0 @@ -# Lean mode - -The application can be configured in "lean mode" through the `LEAN_MODE` -setting in `config.py`. A lean installation is easier to set up, as a few steps -can be skipped. However, the performance of the application will degrade in -lean mode, and lots of advanced features will not be available. - -Lean mode is meant as an entrypoint to get started with collecting data, -without the hassle of importing and maintaining OpenStreetMap data. - -## Disabled features in lean mode - -* No map tiles are generated. -* The frontend will not show an overview map, only per-track maps. -* The `roads` database table is not used, neither for processing tracks, nor - for generating map tiles. -* The API will not generate auxiliary information for display on the - (nonexistent) map, such as per-road statistics. - -## Switch to/from lean mode - -To enable lean mode, set the following in your `config.py` (or in -`config.overrides.py`, especially in development setups): - -```python -LEAN_MODE = True -``` - -To disable lean mode, set it to `False` instead. - -For lean mode, it is important that the config variable `OBS_FACE_CACHE_DIR` is -properly set, or that you are happy with its default value of using -`$DATA_DIR/obs-face-cache`. - -When turning off lean mode, make sure to fill your `roads` table properly, as -otherwise the track processing will not work. When turning on lean mode, you -may truncate the `roads` table to save space, but you don't need to, it simply -becomes unused. - -## Benefits - -* When using lean mode, you can skip the import of OpenStreetMap data during - setup, and you also do not need to keep it updated. -* People can already start uploading data and the data is also processed, - giving you as a maintainer more time to set up the full application, if you - want to. - -## Drawbacks - -* Lean mode is less performant when processing tracks. -* Lean mode track processing depends on the Overpass API data source, which may - be slow, unavailable, or rate limiting the requests, so processing may fail. - We use caching to prevent some issues, but as we depend on a third party - service here that is accessed for free and that generates a lot of server - load, we really can't ask for much. If you frequently run into issues, the - best bet is to manage OSM data yourself and turn off lean mode. -* Of course some features are missing. diff --git a/docs/osm-import.md b/docs/osm-import.md new file mode 100644 index 0000000..939f7e1 --- /dev/null +++ b/docs/osm-import.md @@ -0,0 +1,103 @@ +# Importing OpenStreetMap data + +The application requires a lot of data from the OpenStreetMap to work. + +The required information is stored in the PostgreSQL database and used when +processing tracks, as well as for vector tile generation. The process applies +to both development and production setups. For development, you should choose a +small area for testing, such as your local county or city, to keep the amount +of data small. For production use you have to import the whole region you are +serving. + +## General pipeline overview + +1. Download OpenStreetMap data as one or more `.osm.pbf` files. +2. Transform this data to generate geometry data for all roads and regions, so + we don't need to look up nodes separately. This step requires a lot of CPU + and memory, so it can be done "offline" on a high power machine. +3. Import the transformed data into the PostgreSQL/PostGIS database. + +## Community hosted transformed data + +Since the first two steps are the same for everybody, the community will soon +provide a service where relatively up-to-date transformed data can be +downloaded for direct import. Stay tuned. + +## Download data + +[GeoFabrik](https://download.geofabrik.de) kindly hosts extracts of the +OpenStreetMap planet by region. Download all regions you're interested in from +there in `.osm.pbf` format, with the tool of your choice, e. g.: + +```bash +wget -P local/pbf/ https://download.geofabrik.de/europe/germany/baden-wuerttemberg-latest.osm.pbf +``` + +## Transform data + +To transform downloaded data, you can either use the docker image from a +development or production environment, or locally install the API into your +python environment. Then run the `api/tools/transform_osm.py` script on the data. + +```bash +api/tools/transform_osm.py baden-wuerttemberg-latest.osm.pbf baden-wuerttemberg-latest.msgpack +``` + +In dockerized setups, make sure to mount your data somewhere in the container +and also mount a directory where the result can be written. The development +setup takes care of this, so you can use: + +```bash +docker-compose run --rm api tools/transform_osm.py \ + /pbf/baden-wuerttemberg-latest.osm.pbf /obsdata/baden-wuerttemberg-latest.msgpack +``` + +Repeat this command for every file you want to transform. + +## Import transformed data + +The command for importing looks like this: + +```bash +api/tools/import_osm.py baden-wuerttemberg-latest.msgpack +``` + +This tool reads your application config from `config.py`, so set that up first +as if you were setting up your application. + +In dockerized setups, make sure to mount your data somewhere in the container. +Again, the development setup takes care of this, so you can use: + +```bash +docker-compose run --rm api tools/import_osm.py \ + /obsdata/baden-wuerttemberg-latest.msgpack +``` + +The transform process should take a few seconds to minutes, depending on the area +size. You can run the process multiple times, with the same or different area +files, to import or update the data. You can update only one region and leave +the others as they are, or add more filenames to the command line to +bulk-import data. + +## How this works + +* The transformation is done with a python script that uses + [pyosmium](https://osmcode.org/pyosmium/) to read the `.osm.pbf` file. This + script then filters the data for only the required objects (such as road + segments and administrative areas), and extracts the interesting information + from those objects. +* The node geolocations are looked up to generate a geometry for each object. + This requires a lot of memory to run efficiently. +* The geometry is projected to [Web Mercator](https://epsg.io/3857) in this + step to avoid continous transformation when tiles are generated later. Most + operations will work fine in this projection. Projection is done with the + [pyproj](https://pypi.org/project/pyproj/) library. +* The output is written to a binary file in a very simple format using + [msgpack](https://github.com/msgpack/msgpack-python), which is way more + efficient that (Geo-)JSON for example. This format is stremable, so the + generated file is never fully written or read into memory. +* The import script reads the msgpack file and sends it to the database using + [psycopg](https://www.psycopg.org/). This is done because it supports + PostgreSQL's `COPY FROM` statement, which enables much faster writes to the + database that a traditionional `INSERT VALUES`. The file is streamed directly + to the database, so it is never read into memory. diff --git a/deployment/README.md b/docs/production-deployment.md similarity index 92% rename from deployment/README.md rename to docs/production-deployment.md index aa1aef4..710944d 100644 --- a/deployment/README.md +++ b/docs/production-deployment.md @@ -55,12 +55,7 @@ git clone --recursive https://github.com/openbikesensor/portal source/ ```bash mkdir -p /opt/openbikesensor/config cd /opt/openbikesensor/ - -cp source/deployment/examples/docker-compose.yaml docker-compose.yaml -cp source/deployment/examples/.env .env - -cp source/deployment/examples/traefik.toml config/traefik.toml -cp source/deployment/examples/config.py config/config.py +cp -r source/deployment/config source/deployment/docker-compose.yaml source/deployment/.env . ``` ### Create a Docker network @@ -224,18 +219,6 @@ docker-compose build portal *Hint*: This may take up to 10 minutes. In the future, we will provide a prebuild image. -#### Download OpenStreetMap maps - -Download the area(s) you would like to import from -[GeoFabrik](https://download.geofabrik.de) into `data/pbf`, for example: - -```bash -cd /opt/openbikesensor/ -wget https://download.geofabrik.de/europe/germany/schleswig-holstein-latest.osm.pbf -P data/pbf -``` - -*Hint*: Start with a small region/city, since the import can take some hours for huge areas. - #### Prepare database Run the following scripts to prepare the database: @@ -248,13 +231,7 @@ For more details, see [README.md](../README.md) under "Prepare database". #### Import OpenStreetMap data -Run the following script, to import the OSM data: - -``` -docker-compose run --rm portal tools/osm2pgsql.sh -``` - -For more details. see [README.md](../README.md) under "Import OpenStreetMap data". +Follow [these instructions](./osm-import.md). #### Configure portal @@ -320,7 +297,7 @@ You should see smth. like: When you click on *My Tracks*, you should see it on a map. -#### Configre the map position +#### Configure the map position Open the tab *Map** an zoom to the desired position. The URL contains the corresponding GPS position, for example: @@ -341,10 +318,6 @@ docker-compose restart portal The tab *Map* should be the selected map section now. When you uploaded some tracks, you map should show a colors overlay on the streets. -#### Verify osm2pgsql - -If you zoom in the tab *Map* at the imported region/city, you should see dark grey lines on the streets. - ## Miscellaneous ### Logs diff --git a/frontend/config.example.json b/frontend/config.example.json index 6566c6e..2918934 100644 --- a/frontend/config.example.json +++ b/frontend/config.example.json @@ -12,7 +12,7 @@ "obsMapSource": { "type": "vector", "tiles": ["https://portal.example.com/tiles/{z}/{x}/{y}.pbf"], - "minzoom": 12, + "minzoom": 0, "maxzoom": 14 } } diff --git a/frontend/src/App.module.less b/frontend/src/App.module.less index 5c1d07f..98c5a3b 100644 --- a/frontend/src/App.module.less +++ b/frontend/src/App.module.less @@ -120,6 +120,15 @@ } } +@media @mobile { + .menu.menu { + > :global(.ui.container) { + height: @menuHeightMobile; + align-items: stretch; + } + } +} + .banner { padding: 8px; z-index: 100; diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx index 9c66e8e..5ba7c29 100644 --- a/frontend/src/App.tsx +++ b/frontend/src/App.tsx @@ -1,19 +1,27 @@ -import React from 'react' -import classnames from 'classnames' -import {connect} from 'react-redux' -import {List, Grid, Container, Menu, Header, Dropdown} from 'semantic-ui-react' -import {BrowserRouter as Router, Switch, Route, Link} from 'react-router-dom' -import {useObservable} from 'rxjs-hooks' -import {from} from 'rxjs' -import {pluck} from 'rxjs/operators' -import {Helmet} from "react-helmet"; -import {useTranslation} from 'react-i18next' +import React from "react"; +import classnames from "classnames"; +import { connect } from "react-redux"; +import { + List, + Grid, + Container, + Menu, + Header, + Dropdown, +} from "semantic-ui-react"; +import { BrowserRouter as Router, Switch, Route, Link } from "react-router-dom"; +import { useObservable } from "rxjs-hooks"; +import { from } from "rxjs"; +import { pluck } from "rxjs/operators"; +import { Helmet } from "react-helmet"; +import { useTranslation } from "react-i18next"; -import {useConfig} from 'config' -import styles from './App.module.less' -import {AVAILABLE_LOCALES, setLocale} from 'i18n' +import { useConfig } from "config"; +import styles from "./App.module.less"; +import { AVAILABLE_LOCALES, setLocale } from "i18n"; import { + AcknowledgementsPage, ExportPage, HomePage, LoginRedirectPage, @@ -25,50 +33,61 @@ import { TrackPage, TracksPage, UploadPage, -} from 'pages' -import {Avatar, LoginButton} from 'components' -import api from 'api' + MyTracksPage, +} from "pages"; +import { Avatar, LoginButton } from "components"; +import api from "api"; // This component removes the "navigate" prop before rendering a Menu.Item, // which is a workaround for an annoying warning that is somehow caused by the // and combination. -function MenuItemForLink({navigate, ...props}) { +function MenuItemForLink({ navigate, ...props }) { return ( { - e.preventDefault() - navigate() + e.preventDefault(); + navigate(); }} /> - ) + ); } -function DropdownItemForLink({navigate, ...props}) { +function DropdownItemForLink({ navigate, ...props }) { return ( { - e.preventDefault() - navigate() + e.preventDefault(); + navigate(); }} /> - ) + ); } -function Banner({text, style = 'warning'}: {text: string; style: 'warning' | 'info'}) { - return
{text}
+function Banner({ + text, + style = "warning", +}: { + text: string; + style: "warning" | "info"; +}) { + return
{text}
; } -const App = connect((state) => ({login: state.login}))(function App({login}) { - const {t} = useTranslation() - const config = useConfig() - const apiVersion = useObservable(() => from(api.get('/info')).pipe(pluck('version'))) +const App = connect((state) => ({ login: state.login }))(function App({ + login, +}) { + const { t } = useTranslation(); + const config = useConfig(); + const apiVersion = useObservable(() => + from(api.get("/info")).pipe(pluck("version")) + ); - const hasMap = Boolean(config?.obsMapSource) + const hasMap = Boolean(config?.obsMapSource); React.useEffect(() => { - api.loadUser() - }, []) + api.loadUser(); + }, []); return config ? ( @@ -77,38 +96,61 @@ const App = connect((state) => ({login: state.login}))(function App({login}) { OpenBikeSensor Portal {config?.banner && } - + - + OpenBikeSensor {hasMap && ( - - {t('App.menu.map')} - + + {t("App.menu.map")} + )} - {t('App.menu.tracks')} + {t("App.menu.tracks")} - {t('App.menu.export')} + {t("App.menu.export")} {login ? ( <> - {t('App.menu.myTracks')} + {t("App.menu.myTracks")} - }> + } + > - - + + - + @@ -125,14 +167,16 @@ const App = connect((state) => ({login: state.login}))(function App({login}) { - {hasMap && - - } + {hasMap && ( + + + + )} - + @@ -143,6 +187,9 @@ const App = connect((state) => ({login: state.login}))(function App({login}) { + + + @@ -169,12 +216,14 @@ const App = connect((state) => ({login: state.login}))(function App({login}) { -
- {t('App.footer.aboutTheProject')} -
+
{t("App.footer.aboutTheProject")}
- + openbikesensor.org @@ -182,68 +231,96 @@ const App = connect((state) => ({login: state.login}))(function App({login}) {
-
- {t('App.footer.getInvolved')} -
+
{t("App.footer.getInvolved")}
- - {t('App.footer.getHelpInForum')} + + {t("App.footer.getHelpInForum")} - - {t('App.footer.reportAnIssue')} + + {t("App.footer.reportAnIssue")} - - {t('App.footer.development')} + + {t("App.footer.development")}
-
- {t('App.footer.thisInstallation')} -
+
{t("App.footer.thisInstallation")}
- - {t('App.footer.privacyPolicy')} + + {t("App.footer.privacyPolicy")} - - {t('App.footer.imprint')} + + {t("App.footer.imprint")} - { config?.termsUrl && - - - {t('App.footer.terms')} - - - } + {config?.termsUrl && ( + + + {t("App.footer.terms")} + + + )} - {apiVersion ? t('App.footer.version', {apiVersion}) : t('App.footer.versionLoading')} + {apiVersion + ? t("App.footer.version", { apiVersion }) + : t("App.footer.versionLoading")}
-
{t('App.footer.changeLanguage')}
+
{t("App.footer.changeLanguage")}
- {AVAILABLE_LOCALES.map(locale => setLocale(locale)}>{t(`locales.${locale}`)})} + {AVAILABLE_LOCALES.map((locale) => ( + + setLocale(locale)}> + {t(`locales.${locale}`)} + + + ))}
@@ -251,7 +328,7 @@ const App = connect((state) => ({login: state.login}))(function App({login}) {
- ) : null -}) + ) : null; +}); -export default App +export default App; diff --git a/frontend/src/components/ColorMapLegend.tsx b/frontend/src/components/ColorMapLegend.tsx index f0c4d48..ca09860 100644 --- a/frontend/src/components/ColorMapLegend.tsx +++ b/frontend/src/components/ColorMapLegend.tsx @@ -59,7 +59,7 @@ export function DiscreteColorMapLegend({map}: {map: ColorMap}) { ) } -export default function ColorMapLegend({map, twoTicks = false}: {map: ColorMap, twoTicks?: boolean}) { +export default function ColorMapLegend({map, twoTicks = false, digits=2}: {map: ColorMap, twoTicks?: boolean, digits?: number}) { const min = map[0][0] const max = map[map.length - 1][0] const normalizeValue = (v) => (v - min) / (max - min) @@ -81,7 +81,7 @@ export default function ColorMapLegend({map, twoTicks = false}: {map: ColorMap, {tickValues.map(([value]) => ( - {value.toFixed(2)} + {value.toFixed(digits)} ))} diff --git a/frontend/src/components/Map/index.tsx b/frontend/src/components/Map/index.tsx index 149fb92..657b748 100644 --- a/frontend/src/components/Map/index.tsx +++ b/frontend/src/components/Map/index.tsx @@ -1,75 +1,70 @@ -import React, { useState, useCallback, useMemo, useEffect } from "react"; -import classnames from "classnames"; -import { connect } from "react-redux"; -import _ from "lodash"; -import ReactMapGl, { - WebMercatorViewport, - ScaleControl, - NavigationControl, - AttributionControl, -} from "react-map-gl"; -import turfBbox from "@turf/bbox"; -import { useHistory, useLocation } from "react-router-dom"; +import React, {useState, useCallback, useMemo, useEffect} from 'react' +import classnames from 'classnames' +import {connect} from 'react-redux' +import _ from 'lodash' +import ReactMapGl, {WebMercatorViewport, ScaleControl, NavigationControl, AttributionControl} from 'react-map-gl' +import turfBbox from '@turf/bbox' +import {useHistory, useLocation} from 'react-router-dom' -import { useConfig } from "config"; +import {useConfig} from 'config' -import { useCallbackRef } from "../../utils"; -import { baseMapStyles } from "../../mapstyles"; +import {useCallbackRef} from '../../utils' +import {baseMapStyles} from '../../mapstyles' -import styles from "./styles.module.less"; +import styles from './styles.module.less' interface Viewport { - longitude: number; - latitude: number; - zoom: number; + longitude: number + latitude: number + zoom: number } -const EMPTY_VIEWPORT: Viewport = { longitude: 0, latitude: 0, zoom: 0 }; +const EMPTY_VIEWPORT: Viewport = {longitude: 0, latitude: 0, zoom: 0} export const withBaseMapStyle = connect((state) => ({ - baseMapStyle: state.mapConfig?.baseMap?.style ?? "positron", -})); + baseMapStyle: state.mapConfig?.baseMap?.style ?? 'positron', +})) function parseHash(v: string): Viewport | null { - if (!v) return null; - const m = v.match(/^#([0-9\.]+)\/([0-9\.\-]+)\/([0-9\.\-]+)$/); - if (!m) return null; + if (!v) return null + const m = v.match(/^#([0-9\.]+)\/([0-9\.\-]+)\/([0-9\.\-]+)$/) + if (!m) return null return { zoom: Number.parseFloat(m[1]), latitude: Number.parseFloat(m[2]), longitude: Number.parseFloat(m[3]), - }; + } } function buildHash(v: Viewport): string { - return `${v.zoom.toFixed(2)}/${v.latitude}/${v.longitude}`; + return `${v.zoom.toFixed(2)}/${v.latitude}/${v.longitude}` } const setViewportToHash = _.debounce((history, viewport) => { history.replace({ hash: buildHash(viewport), - }); -}, 200); + }) +}, 200) function useViewportFromUrl(): [Viewport | null, (v: Viewport) => void] { - const history = useHistory(); - const location = useLocation(); + const history = useHistory() + const location = useLocation() - const [cachedValue, setCachedValue] = useState(parseHash(location.hash)); + const [cachedValue, setCachedValue] = useState(parseHash(location.hash)) // when the location hash changes, set the new value to the cache useEffect(() => { - setCachedValue(parseHash(location.hash)); - }, [location.hash]); + setCachedValue(parseHash(location.hash)) + }, [location.hash]) const setter = useCallback( (v) => { - setCachedValue(v); - setViewportToHash(history, v); + setCachedValue(v) + setViewportToHash(history, v) }, [history] - ); + ) - return [cachedValue || EMPTY_VIEWPORT, setter]; + return [cachedValue || EMPTY_VIEWPORT, setter] } function Map({ @@ -78,57 +73,54 @@ function Map({ boundsFromJson, baseMapStyle, hasToolbar, + onViewportChange, ...props }: { - viewportFromUrl?: boolean; - children: React.ReactNode; - boundsFromJson: GeoJSON.Geometry; - baseMapStyle: string; - hasToolbar?: boolean; + viewportFromUrl?: boolean + children: React.ReactNode + boundsFromJson: GeoJSON.Geometry + baseMapStyle: string + hasToolbar?: boolean + onViewportChange: (viewport: Viewport) => void }) { - const [viewportState, setViewportState] = useState(EMPTY_VIEWPORT); - const [viewportUrl, setViewportUrl] = useViewportFromUrl(); + const [viewportState, setViewportState] = useState(EMPTY_VIEWPORT) + const [viewportUrl, setViewportUrl] = useViewportFromUrl() - const [viewport, setViewport] = viewportFromUrl - ? [viewportUrl, setViewportUrl] - : [viewportState, setViewportState]; + const [viewport, setViewport_] = viewportFromUrl ? [viewportUrl, setViewportUrl] : [viewportState, setViewportState] + const setViewport = useCallback( + (viewport: Viewport) => { + setViewport_(viewport) + onViewportChange?.(viewport) + }, + [setViewport_, onViewportChange] + ) - const config = useConfig(); + const config = useConfig() useEffect(() => { - if ( - config?.mapHome && - viewport?.latitude === 0 && - viewport?.longitude === 0 && - !boundsFromJson - ) { - setViewport(config.mapHome); + if (config?.mapHome && viewport?.latitude === 0 && viewport?.longitude === 0 && !boundsFromJson) { + setViewport(config.mapHome) } - }, [config, boundsFromJson]); + }, [config, boundsFromJson]) const mapSourceHosts = useMemo( - () => - _.uniq( - config?.obsMapSource?.tiles?.map( - (tileUrl: string) => new URL(tileUrl).host - ) ?? [] - ), + () => _.uniq(config?.obsMapSource?.tiles?.map((tileUrl: string) => new URL(tileUrl).host) ?? []), [config?.obsMapSource] - ); + ) const transformRequest = useCallbackRef((url, resourceType) => { - if (resourceType === "Tile" && mapSourceHosts.includes(new URL(url).host)) { + if (resourceType === 'Tile' && mapSourceHosts.includes(new URL(url).host)) { return { url, - credentials: "include", - }; + credentials: 'include', + } } - }); + }) useEffect(() => { if (boundsFromJson) { - const bbox = turfBbox(boundsFromJson); + const bbox = turfBbox(boundsFromJson) if (bbox.every((v) => Math.abs(v) !== Infinity)) { - const [minX, minY, maxX, maxY] = bbox; + const [minX, minY, maxX, maxY] = bbox const vp = new WebMercatorViewport({ width: 1000, height: 800, @@ -141,11 +133,11 @@ function Map({ padding: 20, offset: [0, -100], } - ); - setViewport(_.pick(vp, ["zoom", "latitude", "longitude"])); + ) + setViewport(_.pick(vp, ['zoom', 'latitude', 'longitude'])) } } - }, [boundsFromJson]); + }, [boundsFromJson]) return ( - - - + + + {children} - ); + ) } -export default withBaseMapStyle(Map); +export default withBaseMapStyle(Map) diff --git a/frontend/src/components/RegionStats/index.tsx b/frontend/src/components/RegionStats/index.tsx new file mode 100644 index 0000000..6aafa1e --- /dev/null +++ b/frontend/src/components/RegionStats/index.tsx @@ -0,0 +1,73 @@ +import React, {useState, useCallback} from 'react' +import {pickBy} from 'lodash' +import {Loader, Statistic, Pagination, Segment, Header, Menu, Table, Icon} from 'semantic-ui-react' +import {useObservable} from 'rxjs-hooks' +import {of, from, concat, combineLatest} from 'rxjs' +import {map, switchMap, distinctUntilChanged} from 'rxjs/operators' +import {Duration, DateTime} from 'luxon' + +import api from 'api' +import {useTranslation} from 'react-i18next' + +function formatDuration(seconds) { + return ( + Duration.fromMillis((seconds ?? 0) * 1000) + .as('hours') + .toFixed(1) + ' h' + ) +} + +export default function Stats() { + const {t} = useTranslation() + const [page, setPage] = useState(1) + const PER_PAGE = 10 + const stats = useObservable( + () => of(null).pipe(switchMap(() => concat(of(null), from(api.get('/stats/regions'))))), + null + ) + + const pageCount = stats ? Math.ceil(stats.length / PER_PAGE) : 1 + + return ( + <> +
{t('RegionStats.title')}
+ +
+ + + + + + {t('RegionStats.regionName')} + {t('RegionStats.eventCount')} + + + + + {stats?.slice((page - 1) * PER_PAGE, page * PER_PAGE)?.map((area) => ( + + {area.name} + {area.overtaking_event_count} + + ))} + + + {pageCount > 1 && ( + + + + setPage(data.activePage as number)} + /> + + + + )} +
+
+ + ) +} diff --git a/frontend/src/components/Stats/index.tsx b/frontend/src/components/Stats/index.tsx index 2c7abd3..b558930 100644 --- a/frontend/src/components/Stats/index.tsx +++ b/frontend/src/components/Stats/index.tsx @@ -1,118 +1,152 @@ -import React, {useState, useCallback} from 'react' -import {pickBy} from 'lodash' -import {Loader, Statistic, Segment, Header, Menu} from 'semantic-ui-react' -import {useObservable} from 'rxjs-hooks' -import {of, from, concat, combineLatest} from 'rxjs' -import {map, switchMap, distinctUntilChanged} from 'rxjs/operators' -import {Duration, DateTime} from 'luxon' -import {useTranslation} from 'react-i18next' +import React, { useState, useCallback } from "react"; +import { pickBy } from "lodash"; +import { Loader, Statistic, Segment, Header, Menu } from "semantic-ui-react"; +import { useObservable } from "rxjs-hooks"; +import { of, from, concat, combineLatest } from "rxjs"; +import { map, switchMap, distinctUntilChanged } from "rxjs/operators"; +import { Duration, DateTime } from "luxon"; +import { useTranslation } from "react-i18next"; -import api from 'api' +import api from "api"; function formatDuration(seconds) { return ( Duration.fromMillis((seconds ?? 0) * 1000) - .as('hours') - .toFixed(1) + ' h' - ) + .as("hours") + .toFixed(1) + " h" + ); } -export default function Stats({user = null}: {user?: null | string}) { - const {t} = useTranslation() - const [timeframe, setTimeframe] = useState('all_time') - const onClick = useCallback((_e, {name}) => setTimeframe(name), [setTimeframe]) +export default function Stats({ user = null }: { user?: null | string }) { + const { t } = useTranslation(); + const [timeframe, setTimeframe] = useState("all_time"); + const onClick = useCallback( + (_e, { name }) => setTimeframe(name), + [setTimeframe] + ); const stats = useObservable( (_$, inputs$) => { const timeframe$ = inputs$.pipe( map((inputs) => inputs[0]), distinctUntilChanged() - ) + ); const user$ = inputs$.pipe( map((inputs) => inputs[1]), distinctUntilChanged() - ) + ); return combineLatest(timeframe$, user$).pipe( map(([timeframe_, user_]) => { - const now = DateTime.now() + const now = DateTime.now(); - let start, end + let start, end; switch (timeframe_) { - case 'this_month': - start = now.startOf('month') - end = now.endOf('month') - break + case "this_month": + start = now.startOf("month"); + end = now.endOf("month"); + break; - case 'this_year': - start = now.startOf('year') - end = now.endOf('year') - break + case "this_year": + start = now.startOf("year"); + end = now.endOf("year"); + break; } return pickBy({ start: start?.toISODate(), end: end?.toISODate(), user: user_, - }) + }); }), - switchMap((query) => concat(of(null), from(api.get('/stats', {query})))) - ) + switchMap((query) => + concat(of(null), from(api.get("/stats", { query }))) + ) + ); }, null, [timeframe, user] - ) + ); - const placeholder = t('Stats.placeholder') + const placeholder = t("Stats.placeholder"); return ( <> -
{user ? t('Stats.titleUser') : t('Stats.title')}
-
- {stats ? `${Number(stats?.trackLength / 1000).toFixed(1)} km` : placeholder} - {t('Stats.totalTrackLength')} + + {stats + ? `${Number(stats?.trackLength / 1000).toFixed(1)} km` + : placeholder} + + {t("Stats.totalTrackLength")} - {stats ? formatDuration(stats?.trackDuration) : placeholder} - {t('Stats.timeRecorded')} + + {stats ? formatDuration(stats?.trackDuration) : placeholder} + + {t("Stats.timeRecorded")} - {stats?.numEvents ?? placeholder} - {t('Stats.eventsConfirmed')} + + {stats?.numEvents ?? placeholder} + + {t("Stats.eventsConfirmed")} - {user ? ( - - {stats?.trackCount ?? placeholder} - {t('Stats.tracksRecorded')} - - ) : ( - - {stats?.userCount ?? placeholder} - {t('Stats.membersJoined')} - + + + {stats?.trackCount ?? placeholder} + + {t("Stats.tracksRecorded")} + + {!user && ( + <> + + + {stats?.userCount ?? placeholder} + + {t("Stats.membersJoined")} + + + + {stats?.deviceCount ?? placeholder} + + {t("Stats.deviceCount")} + + )} - - {t('Stats.thisMonth')} + + {t("Stats.thisMonth")} - - {t('Stats.thisYear')} + + {t("Stats.thisYear")} - - {t('Stats.allTime')} + + {t("Stats.allTime")}
- ) + ); } diff --git a/frontend/src/components/index.js b/frontend/src/components/index.js index e5e4c3f..8ea61ec 100644 --- a/frontend/src/components/index.js +++ b/frontend/src/components/index.js @@ -1,4 +1,5 @@ export {default as Avatar} from './Avatar' +export {default as Chart} from './Chart' export {default as ColorMapLegend, DiscreteColorMapLegend} from './ColorMapLegend' export {default as FileDrop} from './FileDrop' export {default as FileUploadField} from './FileUploadField' @@ -6,7 +7,7 @@ export {default as FormattedDate} from './FormattedDate' export {default as LoginButton} from './LoginButton' export {default as Map} from './Map' export {default as Page} from './Page' +export {default as RegionStats} from './RegionStats' export {default as Stats} from './Stats' export {default as StripMarkdown} from './StripMarkdown' -export {default as Chart} from './Chart' export {default as Visibility} from './Visibility' diff --git a/frontend/src/mapstyles/index.js b/frontend/src/mapstyles/index.js index 30067d0..e484359 100644 --- a/frontend/src/mapstyles/index.js +++ b/frontend/src/mapstyles/index.js @@ -1,135 +1,209 @@ -import _ from 'lodash' -import produce from 'immer' +import _ from "lodash"; +import produce from "immer"; -import bright from './bright.json' -import positron from './positron.json' +import bright from "./bright.json"; +import positron from "./positron.json"; -import viridisBase from 'colormap/res/res/viridis' +import viridisBase from "colormap/res/res/viridis"; -export {bright, positron} -export const baseMapStyles = {bright, positron} +export { bright, positron }; +export const baseMapStyles = { bright, positron }; function simplifyColormap(colormap, maxCount = 16) { - const result = [] - const step = Math.ceil(colormap.length / maxCount) + const result = []; + const step = Math.ceil(colormap.length / maxCount); for (let i = 0; i < colormap.length; i += step) { - result.push(colormap[i]) + result.push(colormap[i]); } - return result + return result; } function rgbArrayToColor(arr) { - return ['rgb', ...arr.map((v) => Math.round(v * 255))] + return ["rgb", ...arr.map((v) => Math.round(v * 255))]; } function rgbArrayToHtml(arr) { - return "#" + arr.map((v) => Math.round(v * 255).toString(16)).map(v => (v.length == 1 ? '0' : '') + v).join('') + return ( + "#" + + arr + .map((v) => Math.round(v * 255).toString(16)) + .map((v) => (v.length == 1 ? "0" : "") + v) + .join("") + ); } export function colormapToScale(colormap, value, min, max) { return [ - 'interpolate-hcl', - ['linear'], + "interpolate-hcl", + ["linear"], value, - ...colormap.flatMap((v, i, a) => [(i / (a.length - 1)) * (max - min) + min, v]), - ] + ...colormap.flatMap((v, i, a) => [ + (i / (a.length - 1)) * (max - min) + min, + v, + ]), + ]; } -export const viridis = simplifyColormap(viridisBase.map(rgbArrayToColor), 20) -export const viridisSimpleHtml = simplifyColormap(viridisBase.map(rgbArrayToHtml), 10) -export const grayscale = ['#FFFFFF', '#000000'] -export const reds = [ - 'rgba( 255, 0, 0, 0)', - 'rgba( 255, 0, 0, 255)', -] +export const viridis = simplifyColormap(viridisBase.map(rgbArrayToColor), 20); +export const viridisSimpleHtml = simplifyColormap( + viridisBase.map(rgbArrayToHtml), + 10 +); +export const grayscale = ["#FFFFFF", "#000000"]; +export const reds = ["rgba( 255, 0, 0, 0)", "rgba( 255, 0, 0, 255)"]; -export function colorByCount(attribute = 'event_count', maxCount, colormap = viridis) { - return colormapToScale(colormap, ['case', isValidAttribute(attribute), ['get', attribute], 0], 0, maxCount) +export function colorByCount( + attribute = "event_count", + maxCount, + colormap = viridis +) { + return colormapToScale( + colormap, + ["case", isValidAttribute(attribute), ["get", attribute], 0], + 0, + maxCount + ); } -var steps = {'rural': [1.6,1.8,2.0,2.2], - 'urban': [1.1,1.3,1.5,1.7]} +var steps = { rural: [1.6, 1.8, 2.0, 2.2], urban: [1.1, 1.3, 1.5, 1.7] }; export function isValidAttribute(attribute) { - if (attribute.endsWith('zone')) { - return ['in', ['get', attribute], ['literal', ['rural', 'urban']]] + if (attribute.endsWith("zone")) { + return ["in", ["get", attribute], ["literal", ["rural", "urban"]]]; } - return ['to-boolean', ['get', attribute]] + return ["to-boolean", ["get", attribute]]; } export function borderByZone() { - return ["match", ['get', 'zone'], - "rural", "cyan", - "urban", "blue", - "purple" - ] + return ["match", ["get", "zone"], "rural", "cyan", "urban", "blue", "purple"]; } -export function colorByDistance(attribute = 'distance_overtaker_mean', fallback = '#ABC', zone='urban') { - +export function colorByDistance( + attribute = "distance_overtaker_mean", + fallback = "#ABC", + zone = "urban" +) { return [ - 'case', - ['!', isValidAttribute(attribute)], + "case", + ["!", isValidAttribute(attribute)], fallback, - ["match", ['get', 'zone'], "rural", [ - 'step', - ['get', attribute], - 'rgba(150, 0, 0, 1)', - steps['rural'][0], - 'rgba(255, 0, 0, 1)', - steps['rural'][1], - 'rgba(255, 220, 0, 1)', - steps['rural'][2], - 'rgba(67, 200, 0, 1)', - steps['rural'][3], - 'rgba(67, 150, 0, 1)', - ], "urban", - [ - 'step', - ['get', attribute], - 'rgba(150, 0, 0, 1)', - steps['urban'][0], - 'rgba(255, 0, 0, 1)', - steps['urban'][1], - 'rgba(255, 220, 0, 1)', - steps['urban'][2], - 'rgba(67, 200, 0, 1)', - steps['urban'][3], - 'rgba(67, 150, 0, 1)', + "match", + ["get", "zone"], + "rural", + [ + "step", + ["get", attribute], + "rgba(150, 0, 0, 1)", + steps["rural"][0], + "rgba(255, 0, 0, 1)", + steps["rural"][1], + "rgba(255, 220, 0, 1)", + steps["rural"][2], + "rgba(67, 200, 0, 1)", + steps["rural"][3], + "rgba(67, 150, 0, 1)", + ], + "urban", + [ + "step", + ["get", attribute], + "rgba(150, 0, 0, 1)", + steps["urban"][0], + "rgba(255, 0, 0, 1)", + steps["urban"][1], + "rgba(255, 220, 0, 1)", + steps["urban"][2], + "rgba(67, 200, 0, 1)", + steps["urban"][3], + "rgba(67, 150, 0, 1)", + ], + [ + "step", + ["get", attribute], + "rgba(150, 0, 0, 1)", + steps["urban"][0], + "rgba(255, 0, 0, 1)", + steps["urban"][1], + "rgba(255, 220, 0, 1)", + steps["urban"][2], + "rgba(67, 200, 0, 1)", + steps["urban"][3], + "rgba(67, 150, 0, 1)", + ], ], - [ - 'step', - ['get', attribute], - 'rgba(150, 0, 0, 1)', - steps['urban'][0], - 'rgba(255, 0, 0, 1)', - steps['urban'][1], - 'rgba(255, 220, 0, 1)', - steps['urban'][2], - 'rgba(67, 200, 0, 1)', - steps['urban'][3], - 'rgba(67, 150, 0, 1)', - ] - ] - ] + ]; } export const trackLayer = { - type: 'line', + type: "line", paint: { - 'line-width': ['interpolate', ['linear'], ['zoom'], 14, 2, 17, 5], - 'line-color': '#F06292', - 'line-opacity': 0.6, + "line-width": ["interpolate", ["linear"], ["zoom"], 14, 2, 17, 5], + "line-color": "#F06292", + "line-opacity": 0.6, }, -} +}; -export const trackLayerRaw = produce(trackLayer, draft => { +export const getRegionLayers = ( + adminLevel = 6, + baseColor = "#00897B", + maxValue = 5000 +) => [ + { + id: "region", + type: "fill", + source: "obs", + "source-layer": "obs_regions", + minzoom: 0, + maxzoom: 10, + // filter: [">", "overtaking_event_count", 0], + paint: { + "fill-color": baseColor, + "fill-antialias": true, + "fill-opacity": [ + "interpolate", + ["linear"], + ["log10", ["max",["get", "overtaking_event_count"],1]], + 0, + 0, + Math.log10(maxValue), + 0.9, + ], + }, + }, + { + id: "region-border", + type: "line", + source: "obs", + "source-layer": "obs_regions", + minzoom: 0, + maxzoom: 10, + // filter: [">", "overtaking_event_count", 0], + paint: { + "line-width": [ + "interpolate", + ["linear"], + ["log10", ["max",["get", "overtaking_event_count"],1]], + 0, + 0.2, + Math.log10(maxValue), + 1.5, + ], + "line-color": baseColor, + }, + layout: { + "line-join": "round", + "line-cap": "round", + }, + }, +]; + +export const trackLayerRaw = produce(trackLayer, (draft) => { // draft.paint['line-color'] = '#81D4FA' - draft.paint['line-width'][4] = 1 - draft.paint['line-width'][6] = 2 - draft.paint['line-dasharray'] = [3, 3] - delete draft.paint['line-opacity'] -}) + draft.paint["line-width"][4] = 1; + draft.paint["line-width"][6] = 2; + draft.paint["line-dasharray"] = [3, 3]; + delete draft.paint["line-opacity"]; +}); -export const basemap = positron +export const basemap = positron; diff --git a/frontend/src/pages/AcknowledgementsPage.tsx b/frontend/src/pages/AcknowledgementsPage.tsx new file mode 100644 index 0000000..cc9cb19 --- /dev/null +++ b/frontend/src/pages/AcknowledgementsPage.tsx @@ -0,0 +1,18 @@ +import React from "react"; +import { Header } from "semantic-ui-react"; +import { useTranslation } from "react-i18next"; +import Markdown from "react-markdown"; + +import { Page } from "components"; + +export default function AcknowledgementsPage() { + const { t } = useTranslation(); + const title = t("AcknowledgementsPage.title"); + + return ( + +
{title}
+ {t("AcknowledgementsPage.information")} +
+ ); +} diff --git a/frontend/src/pages/ExportPage/index.tsx b/frontend/src/pages/ExportPage/index.tsx index e8329d1..65cdac5 100644 --- a/frontend/src/pages/ExportPage/index.tsx +++ b/frontend/src/pages/ExportPage/index.tsx @@ -104,7 +104,7 @@ const BoundingBoxSelector = React.forwardRef( } ); -const MODES = ["events"]; +const MODES = ["events", "segments"]; const FORMATS = ["geojson", "shapefile"]; export default function ExportPage() { @@ -112,7 +112,6 @@ export default function ExportPage() { const [bbox, setBbox] = useState("8.294678,49.651182,9.059601,50.108249"); const [fmt, setFmt] = useState("geojson"); const config = useConfig(); - const exportUrl = `${config?.apiUrl}/export/events?bbox=${bbox}&fmt=${fmt}`; const { t } = useTranslation(); return ( @@ -163,7 +162,7 @@ export default function ExportPage() { + + + + + {t('MapPage.regionInfo.eventCount')} + {region.properties.overtaking_event_count ?? 0} + + + + ) + + return content && mapInfoPortal + ? createPortal(
{content}
, mapInfoPortal) + : null +} diff --git a/frontend/src/pages/MapPage/RoadInfo.tsx b/frontend/src/pages/MapPage/RoadInfo.tsx index a4c988a..80116cc 100644 --- a/frontend/src/pages/MapPage/RoadInfo.tsx +++ b/frontend/src/pages/MapPage/RoadInfo.tsx @@ -1,74 +1,57 @@ -import React, { useState, useCallback } from "react"; -import _ from "lodash"; -import { - Segment, - Menu, - Header, - Label, - Icon, - Table, - Message, - Button, -} from "semantic-ui-react"; -import { Layer, Source } from "react-map-gl"; -import { of, from, concat } from "rxjs"; -import { useObservable } from "rxjs-hooks"; -import { switchMap, distinctUntilChanged } from "rxjs/operators"; -import { Chart } from "components"; -import { pairwise } from "utils"; -import { useTranslation } from "react-i18next"; +import React, {useState, useCallback} from 'react' +import {createPortal} from 'react-dom' +import _ from 'lodash' +import {Segment, Menu, Header, Label, Icon, Table, Message, Button} from 'semantic-ui-react' +import {Layer, Source} from 'react-map-gl' +import {of, from, concat} from 'rxjs' +import {useObservable} from 'rxjs-hooks' +import {switchMap, distinctUntilChanged} from 'rxjs/operators' +import {Chart} from 'components' +import {pairwise} from 'utils' +import {useTranslation} from 'react-i18next' -import type { Location } from "types"; -import api from "api"; -import { colorByDistance, borderByZone } from "mapstyles"; +import type {Location} from 'types' +import api from 'api' +import {colorByDistance, borderByZone} from 'mapstyles' -import styles from "./styles.module.less"; +import styles from './styles.module.less' function selectFromColorMap(colormap, value) { - let last = null; + let last = null for (let i = 0; i < colormap.length; i += 2) { if (colormap[i + 1] > value) { - return colormap[i]; + return colormap[i] } } - return colormap[colormap.length - 1]; + return colormap[colormap.length - 1] } const UNITS = { - distanceOvertaker: "m", - distanceStationary: "m", - speed: "km/h", -}; -const ZONE_COLORS = { urban: "blue", rural: "cyan", motorway: "purple" }; -const CARDINAL_DIRECTIONS = [ - "north", - "northEast", - "east", - "southEast", - "south", - "southWest", - "west", - "northWest", -]; + distanceOvertaker: 'm', + distanceStationary: 'm', + speed: 'km/h', +} +const ZONE_COLORS = {urban: 'blue', rural: 'cyan', motorway: 'purple'} +const CARDINAL_DIRECTIONS = ['north', 'northEast', 'east', 'southEast', 'south', 'southWest', 'west', 'northWest'] const getCardinalDirection = (t, bearing) => { if (bearing == null) { - return t("MapPage.roadInfo.cardinalDirections.unknown"); + return t('MapPage.roadInfo.cardinalDirections.unknown') } else { - const n = CARDINAL_DIRECTIONS.length; - const i = Math.floor(((bearing / 360.0) * n + 0.5) % n); - const name = CARDINAL_DIRECTIONS[i]; - return t(`MapPage.roadInfo.cardinalDirections.${name}`); + const n = CARDINAL_DIRECTIONS.length + const i = Math.floor(((bearing / 360.0) * n + 0.5) % n) + const name = CARDINAL_DIRECTIONS[i] + return t(`MapPage.roadInfo.cardinalDirections.${name}`) } -}; +} -function RoadStatsTable({ data }) { - const { t } = useTranslation(); +function RoadStatsTable({data}) { + const {t} = useTranslation() return ( - {["distanceOvertaker", "distanceStationary", "speed"].map((prop) => ( + {['distanceOvertaker', 'distanceStationary', 'speed'].map((prop) => ( {t(`MapPage.roadInfo.${prop}`)} @@ -76,58 +59,52 @@ function RoadStatsTable({ data }) { - {["count", "min", "median", "max", "mean"].map((stat) => ( + {['count', 'min', 'median', 'max', 'mean'].map((stat) => ( {t(`MapPage.roadInfo.${stat}`)} - {["distanceOvertaker", "distanceStationary", "speed"].map( - (prop) => ( - - {( - data[prop]?.statistics?.[stat] * - (prop === `speed` && stat != "count" ? 3.6 : 1) - ).toFixed(stat === "count" ? 0 : 2)} - {stat !== "count" && ` ${UNITS[prop]}`} - - ) - )} + {['distanceOvertaker', 'distanceStationary', 'speed'].map((prop) => ( + + {(data[prop]?.statistics?.[stat] * (prop === `speed` && stat != 'count' ? 3.6 : 1)).toFixed( + stat === 'count' ? 0 : 2 + )} + {stat !== 'count' && ` ${UNITS[prop]}`} + + ))} ))}
- ); + ) } -function HistogramChart({ bins, counts, zone }) { - const diff = bins[1] - bins[0]; - const colortype = zone === "rural" ? 3 : 5; +function HistogramChart({bins, counts, zone}) { + const diff = bins[1] - bins[0] + const colortype = zone === 'rural' ? 3 : 5 const data = _.zip( bins.slice(0, bins.length - 1).map((v) => v + diff / 2), counts ).map((value) => ({ value, itemStyle: { - color: selectFromColorMap( - colorByDistance()[3][colortype].slice(2), - value[0] - ), + color: selectFromColorMap(colorByDistance()[3][colortype].slice(2), value[0]), }, - })); + })) return ( `${Math.round(v * 100)} cm` }, + type: 'value', + axisLabel: {formatter: (v) => `${Math.round(v * 100)} cm`}, min: 0, max: 2.5, }, yAxis: {}, series: [ { - type: "bar", + type: 'bar', data, barMaxWidth: 20, @@ -135,142 +112,120 @@ function HistogramChart({ bins, counts, zone }) { ], }} /> - ); + ) +} + +interface ArrayStats { + statistics: { + count: number + mean: number + min: number + max: number + median: number + } + histogram: { + bins: number[] + counts: number[] + } + values: number[] +} + +export interface RoadDirectionInfo { + bearing: number + distanceOvertaker: ArrayStats + distanceStationary: ArrayStats + speed: ArrayStats +} + +export interface RoadInfoType { + road: { + way_id: number + zone: 'urban' | 'rural' | null + name: string + directionality: -1 | 0 | 1 + oneway: boolean + geometry: Object + } + forwards: RoadDirectionInfo + backwards: RoadDirectionInfo } export default function RoadInfo({ - clickLocation, + roadInfo: info, hasFilters, onClose, + mapInfoPortal, }: { - clickLocation: Location | null; - hasFilters: boolean; - onClose: () => void; + roadInfo: RoadInfoType + hasFilters: boolean + onClose: () => void + mapInfoPortal: HTMLElement }) { - const { t } = useTranslation(); - const [direction, setDirection] = useState("forwards"); + const {t} = useTranslation() + const [direction, setDirection] = useState('forwards') const onClickDirection = useCallback( - (e, { name }) => { - e.preventDefault(); - e.stopPropagation(); - setDirection(name); + (e, {name}) => { + e.preventDefault() + e.stopPropagation() + setDirection(name) }, [setDirection] - ); + ) - const info = useObservable( - (_$, inputs$) => - inputs$.pipe( - distinctUntilChanged(_.isEqual), - switchMap(([location]) => - location - ? concat( - of(null), - from( - api.get("/mapdetails/road", { - query: { - ...location, - radius: 100, - }, - }) - ) - ) - : of(null) - ) - ), - null, - [clickLocation] - ); + // TODO: change based on left-hand/right-hand traffic + const offsetDirection = info.road.oneway ? 0 : direction === 'forwards' ? 1 : -1 - if (!clickLocation) { - return null; - } + const content = ( + <> +
+
{info?.road.name || t('MapPage.roadInfo.unnamedWay')}
+ +
- const loading = info == null; + {hasFilters && ( + + + {t('MapPage.roadInfo.hintFiltersNotApplied')} + + )} - const offsetDirection = info?.road?.oneway - ? 0 - : direction === "forwards" - ? 1 - : -1; // TODO: change based on left-hand/right-hand traffic + {info?.road.zone && ( + + )} - const content = - !loading && !info.road ? ( - "No road found." - ) : ( - <> -
- {loading - ? "..." - : info?.road.name || t("MapPage.roadInfo.unnamedWay")} + {info?.road.oneway && ( + + )} -
+ {info?.road.oneway ? null : ( + + {t('MapPage.roadInfo.direction')} + + {getCardinalDirection(t, info?.forwards?.bearing)} + + + {getCardinalDirection(t, info?.backwards?.bearing)} + + + )} - {hasFilters && ( - - - - {t("MapPage.roadInfo.hintFiltersNotApplied")} - - - )} + {info?.[direction] && } - {info?.road.zone && ( - - )} - - {info?.road.oneway && ( - - )} - - {info?.road.oneway ? null : ( - - {t("MapPage.roadInfo.direction")} - - {getCardinalDirection(t, info?.forwards?.bearing)} - - - {getCardinalDirection(t, info?.backwards?.bearing)} - - - )} - - {info?.[direction] && } - - {info?.[direction]?.distanceOvertaker?.histogram && ( - <> -
- {t("MapPage.roadInfo.overtakerDistanceDistribution")} -
- - - )} - - ); + {info?.[direction]?.distanceOvertaker?.histogram && ( + <> +
{t('MapPage.roadInfo.overtakerDistanceDistribution')}
+ + + )} + + ) return ( <> @@ -280,22 +235,14 @@ export default function RoadInfo({ id="route" type="line" paint={{ - "line-width": [ - "interpolate", - ["linear"], - ["zoom"], - 14, - 6, - 17, - 12, - ], - "line-color": "#18FFFF", - "line-opacity": 0.5, + 'line-width': ['interpolate', ['linear'], ['zoom'], 14, 6, 17, 12], + 'line-color': '#18FFFF', + 'line-opacity': 0.5, ...{ - "line-offset": [ - "interpolate", - ["exponential", 1.5], - ["zoom"], + 'line-offset': [ + 'interpolate', + ['exponential', 1.5], + ['zoom'], 12, offsetDirection, 19, @@ -307,11 +254,7 @@ export default function RoadInfo({ )} - {content && ( -
- {content} -
- )} + {content && mapInfoPortal && createPortal(
{content}
, mapInfoPortal)} - ); + ) } diff --git a/frontend/src/pages/MapPage/index.tsx b/frontend/src/pages/MapPage/index.tsx index 921130c..e215077 100644 --- a/frontend/src/pages/MapPage/index.tsx +++ b/frontend/src/pages/MapPage/index.tsx @@ -1,4 +1,4 @@ -import React, { useState, useCallback, useMemo } from "react"; +import React, { useState, useCallback, useMemo, useRef } from "react"; import _ from "lodash"; import { connect } from "react-redux"; import { Button } from "semantic-ui-react"; @@ -6,19 +6,21 @@ import { Layer, Source } from "react-map-gl"; import produce from "immer"; import classNames from "classnames"; +import api from "api"; import type { Location } from "types"; import { Page, Map } from "components"; import { useConfig } from "config"; import { colorByDistance, colorByCount, + getRegionLayers, borderByZone, - reds, isValidAttribute, } from "mapstyles"; import { useMapConfig } from "reducers/mapConfig"; -import RoadInfo from "./RoadInfo"; +import RoadInfo, { RoadInfoType } from "./RoadInfo"; +import RegionInfo from "./RegionInfo"; import LayerSidebar from "./LayerSidebar"; import styles from "./styles.module.less"; @@ -27,6 +29,7 @@ const untaggedRoadsLayer = { type: "line", source: "obs", "source-layer": "obs_roads", + minzoom: 12, filter: ["!", ["to-boolean", ["get", "distance_overtaker_mean"]]], layout: { "line-cap": "round", @@ -35,7 +38,7 @@ const untaggedRoadsLayer = { paint: { "line-width": ["interpolate", ["exponential", 1.5], ["zoom"], 12, 2, 17, 2], "line-color": "#ABC", - "line-opacity": ["interpolate", ["linear"], ["zoom"], 14, 0, 15, 1], + // "line-opacity": ["interpolate", ["linear"], ["zoom"], 14, 0, 15, 1], "line-offset": [ "interpolate", ["exponential", 1.5], @@ -46,10 +49,9 @@ const untaggedRoadsLayer = { ["*", ["get", "offset_direction"], 8], ], }, - minzoom: 12, }; -const getUntaggedRoadsLayer = (colorAttribute, maxCount) => +const getUntaggedRoadsLayer = (colorAttribute) => produce(untaggedRoadsLayer, (draft) => { draft.filter = ["!", isValidAttribute(colorAttribute)]; }); @@ -58,6 +60,7 @@ const getRoadsLayer = (colorAttribute, maxCount) => produce(untaggedRoadsLayer, (draft) => { draft.id = "obs_roads_normal"; draft.filter = isValidAttribute(colorAttribute); + draft.minzoom = 10; draft.paint["line-width"][6] = 6; // scale bigger on zoom draft.paint["line-color"] = colorAttribute.startsWith("distance_") ? colorByDistance(colorAttribute) @@ -66,8 +69,8 @@ const getRoadsLayer = (colorAttribute, maxCount) => : colorAttribute.endsWith("zone") ? borderByZone() : "#DDD"; - draft.paint["line-opacity"][3] = 12; - draft.paint["line-opacity"][5] = 13; + // draft.paint["line-opacity"][3] = 12; + // draft.paint["line-opacity"][5] = 13; }); const getEventsLayer = () => ({ @@ -77,9 +80,10 @@ const getEventsLayer = () => ({ "source-layer": "obs_events", paint: { "circle-radius": ["interpolate", ["linear"], ["zoom"], 14, 3, 17, 8], + "circle-opacity": ["interpolate",["linear"],["zoom"],8,0.1,9,0.3,10,0.5,11,1], "circle-color": colorByDistance("distance_overtaker"), }, - minzoom: 11, + minzoom: 8, }); const getEventsTextLayer = () => ({ @@ -110,14 +114,39 @@ const getEventsTextLayer = () => ({ }, }); +interface RegionInfo { + properties: { + admin_level: number; + name: string; + overtaking_event_count: number; + }; +} + +type Details = + | { type: "road"; road: RoadInfoType } + | { type: "region"; region: RegionInfo }; + function MapPage({ login }) { const { obsMapSource, banner } = useConfig() || {}; - const [clickLocation, setClickLocation] = useState(null); + const [details, setDetails] = useState(null); + + const onCloseDetails = useCallback(() => setDetails(null), [setDetails]); const mapConfig = useMapConfig(); + const viewportRef = useRef(); + const mapInfoPortal = useRef(); + + const onViewportChange = useCallback( + (viewport) => { + viewportRef.current = viewport; + }, + [viewportRef] + ); + const onClick = useCallback( - (e) => { + async (e) => { + // check if we clicked inside the mapInfoBox, if so, early exit let node = e.target; while (node) { if ( @@ -130,13 +159,28 @@ function MapPage({ login }) { node = node.parentNode; } - setClickLocation({ longitude: e.lngLat[0], latitude: e.lngLat[1] }); + const { zoom } = viewportRef.current; + + if (zoom < 10) { + const clickedRegion = e.features?.find( + (f) => f.source === "obs" && f.sourceLayer === "obs_regions" + ); + setDetails( + clickedRegion ? { type: "region", region: clickedRegion } : null + ); + } else { + const road = await api.get("/mapdetails/road", { + query: { + longitude: e.lngLat[0], + latitude: e.lngLat[1], + radius: 100, + }, + }); + setDetails(road?.road ? { type: "road", road } : null); + } }, - [setClickLocation] + [setDetails] ); - const onCloseRoadInfo = useCallback(() => { - setClickLocation(null); - }, [setClickLocation]); const [layerSidebar, setLayerSidebar] = useState(true); @@ -162,8 +206,14 @@ function MapPage({ login }) { layers.push(roadsLayer); } + const regionLayers = useMemo(() => getRegionLayers(), []); + if (mapConfig.obsRegions.show) { + layers.push(...regionLayers); + } + const eventsLayer = useMemo(() => getEventsLayer(), []); const eventsTextLayer = useMemo(() => getEventsTextLayer(), []); + if (mapConfig.obsEvents.show) { layers.push(eventsLayer); layers.push(eventsTextLayer); @@ -221,6 +271,7 @@ function MapPage({ login }) { styles.mapContainer, banner ? styles.hasBanner : null )} + ref={mapInfoPortal} > {layerSidebar && (
@@ -228,7 +279,12 @@ function MapPage({ login }) {
)}
- +
diff --git a/frontend/src/pages/MapPage/styles.module.less b/frontend/src/pages/MapPage/styles.module.less index 9af8b42..8519d84 100644 --- a/frontend/src/pages/MapPage/styles.module.less +++ b/frontend/src/pages/MapPage/styles.module.less @@ -21,15 +21,22 @@ .map { flex: 1 1 0; + overflow: hidden; } .mapInfoBox { - position: absolute; - right: 16px; - top: 32px; - max-height: 100%; width: 36rem; overflow: auto; + border-left: 1px solid @borderColor; + background: white; + padding: 16px; +} + +.copyright { + color: #888; + font-size: 0.8em; + line-height: 1.4; + margin-block-start: 1em; } .mapToolbar { @@ -37,3 +44,35 @@ left: 16px; top: 16px; } + +.closeHeader { + display: flex; + align-items: baseline; + justify-content: space-between; +} + + +@media @mobile { + .mapContainer { + height: auto; + min-height: calc(100vh - @menuHeightMobile); + &.hasBanner { + height: calc(100vh - @menuHeightMobile - 50px); + } + flex-direction: column; + } + + .map { + height: 60vh; + } + + .mapSidebar { + width: auto; + height: auto; + } + + .mapInfoBox { + width: auto; + height: auto; + } +} diff --git a/frontend/src/pages/MyTracksPage.tsx b/frontend/src/pages/MyTracksPage.tsx new file mode 100644 index 0000000..38f0d4c --- /dev/null +++ b/frontend/src/pages/MyTracksPage.tsx @@ -0,0 +1,429 @@ +import React, { useCallback, useMemo, useState } from "react"; +import { connect } from "react-redux"; +import { + Accordion, + Button, + Checkbox, + Confirm, + Header, + Icon, + Item, + List, + Loader, + Dropdown, + SemanticCOLORS, + SemanticICONS, + Table, +} from "semantic-ui-react"; +import { useObservable } from "rxjs-hooks"; +import { Link } from "react-router-dom"; +import { of, from, concat, BehaviorSubject, combineLatest } from "rxjs"; +import { map, switchMap, distinctUntilChanged } from "rxjs/operators"; +import _ from "lodash"; +import { useTranslation } from "react-i18next"; + +import type { ProcessingStatus, Track, UserDevice } from "types"; +import { Page, FormattedDate, Visibility } from "components"; +import api from "api"; +import { useCallbackRef, formatDistance, formatDuration } from "utils"; + +import download from "downloadjs"; + +const COLOR_BY_STATUS: Record = { + error: "red", + complete: "green", + created: "grey", + queued: "orange", + processing: "orange", +}; + +const ICON_BY_STATUS: Record = { + error: "warning sign", + complete: "check circle outline", + created: "bolt", + queued: "bolt", + processing: "bolt", +}; + +function ProcessingStatusLabel({ status }: { status: ProcessingStatus }) { + const { t } = useTranslation(); + return ( + + + + ); +} + +function SortableHeader({ + children, + setOrderBy, + orderBy, + reversed, + setReversed, + name, + ...props +}) { + const toggleSort = (e) => { + e.preventDefault(); + e.stopPropagation(); + + if (orderBy === name) { + if (!reversed) { + setReversed(true); + } else { + setReversed(false); + setOrderBy(null); + } + } else { + setReversed(false); + setOrderBy(name); + } + }; + + let icon = + orderBy === name ? (reversed ? "sort descending" : "sort ascending") : null; + + return ( + +
+ {children} + +
+
+ ); +} + +type Filters = { + userDeviceId?: null | number; + visibility?: null | boolean; +}; + +function TrackFilters({ + filters, + setFilters, + deviceNames, +}: { + filters: Filters; + setFilters: (f: Filters) => void; + deviceNames: null | Record; +}) { + return ( + + + Device + ({ + value: Number(deviceId), + key: deviceId, + text: deviceName, + }) + ), + ]} + value={filters?.userDeviceId ?? 0} + onChange={(_e, { value }) => + setFilters({ ...filters, userDeviceId: (value as number) || null }) + } + /> + + + + Visibility + + setFilters({ + ...filters, + visibility: value === "none" ? null : (value as boolean), + }) + } + /> + + + ); +} + +function TracksTable({ title }) { + const [orderBy, setOrderBy] = useState("recordedAt"); + const [reversed, setReversed] = useState(false); + const [showFilters, setShowFilters] = useState(false); + const [filters, setFilters] = useState({}); + const [selectedTracks, setSelectedTracks] = useState>( + {} + ); + + const toggleTrackSelection = useCallbackRef( + (slug: string, selected?: boolean) => { + const newSelected = selected ?? !selectedTracks[slug]; + setSelectedTracks( + _.pickBy({ ...selectedTracks, [slug]: newSelected }, _.identity) + ); + } + ); + + const query = _.pickBy( + { + limit: 1000, + offset: 0, + order_by: orderBy, + reversed: reversed ? "true" : "false", + user_device_id: filters?.userDeviceId, + public: filters?.visibility, + }, + (x) => x != null + ); + + const forceUpdate$ = useMemo(() => new BehaviorSubject(null), []); + const tracks: Track[] | null = useObservable( + (_$, inputs$) => + combineLatest([ + inputs$.pipe( + map(([query]) => query), + distinctUntilChanged(_.isEqual) + ), + forceUpdate$, + ]).pipe( + switchMap(([query]) => + concat( + of(null), + from(api.get("/tracks/feed", { query }).then((r) => r.tracks)) + ) + ) + ), + null, + [query] + ); + + const deviceNames: null | Record = useObservable(() => + from(api.get("/user/devices")).pipe( + map((response: UserDevice[]) => + Object.fromEntries( + response.map((device) => [ + device.id, + device.displayName || device.identifier, + ]) + ) + ) + ) + ); + + const { t } = useTranslation(); + + const p = { orderBy, setOrderBy, reversed, setReversed }; + + const selectedCount = Object.keys(selectedTracks).length; + const noneSelected = selectedCount === 0; + const allSelected = selectedCount === tracks?.length; + const selectAll = () => { + setSelectedTracks( + Object.fromEntries(tracks?.map((t) => [t.slug, true]) ?? []) + ); + }; + const selectNone = () => { + setSelectedTracks({}); + }; + + const bulkAction = async (action: string) => { + const response = await api.post("/tracks/bulk", { + body: { + action, + tracks: Object.keys(selectedTracks), + }, + returnResponse: true, + }); + if (action === "download") { + const contentType = + response.headers.get("content-type") ?? "application/x-gtar"; + + const filename = + response.headers + .get("content-disposition") + ?.match(/filename="([^"]+)"/)?.[1] ?? "tracks.tar.bz2"; + download(await response.blob(), filename, contentType); + } + + setShowBulkDelete(false); + setSelectedTracks({}); + forceUpdate$.next(null); + }; + const [showBulkDelete, setShowBulkDelete] = useState(false); + + return ( + <> +
+ + + + Selection of {selectedCount} tracks + + bulkAction("makePrivate")}> + Make private + + bulkAction("makePublic")}> + Make public + + bulkAction("reprocess")}> + Reprocess + + bulkAction("download")}> + Download + + setShowBulkDelete(true)}> + Delete + + + + +
+ +
{title}
+
+ + + + setShowFilters(!showFilters)} + > + + Filters + + + + + + + setShowBulkDelete(false)} + onConfirm={() => bulkAction("delete")} + content={`Are you sure you want to delete ${selectedCount} tracks?`} + confirmButton={t("general.delete")} + cancelButton={t("general.cancel")} + /> + + + + + + (noneSelected ? selectAll() : selectNone())} + /> + + + + Title + + + Recorded at + + + Visibility + + + Length + + + Duration + + + Device + + + + + + {tracks?.map((track: Track) => ( + + + toggleTrackSelection(track.slug)} + checked={selectedTracks[track.slug] ?? false} + /> + + + {track.processingStatus == null ? null : ( + + )} + + {track.title || t("general.unnamedTrack")} + + + + + + + + + {track.public == null ? null : ( + + )} + + + + {formatDistance(track.length)} + + + + {formatDuration(track.duration)} + + + + {track.userDeviceId + ? deviceNames?.[track.userDeviceId] ?? "..." + : null} + + + ))} + +
+
+ + ); +} + +function UploadButton({ navigate, ...props }) { + const { t } = useTranslation(); + const onClick = useCallback( + (e) => { + e.preventDefault(); + navigate(); + }, + [navigate] + ); + return ( + + ); +} + +const MyTracksPage = connect((state) => ({ login: (state as any).login }))( + function MyTracksPage({ login }) { + const { t } = useTranslation(); + + const title = t("TracksPage.titleUser"); + + return ( + + + + ); + } +); + +export default MyTracksPage; diff --git a/frontend/src/pages/SettingsPage.tsx b/frontend/src/pages/SettingsPage.tsx deleted file mode 100644 index 11be6e5..0000000 --- a/frontend/src/pages/SettingsPage.tsx +++ /dev/null @@ -1,227 +0,0 @@ -import React from "react"; -import { connect } from "react-redux"; -import { - Message, - Icon, - Grid, - Form, - Button, - TextArea, - Ref, - Input, - Header, - Divider, - Popup, -} from "semantic-ui-react"; -import { useForm } from "react-hook-form"; -import Markdown from "react-markdown"; -import { useTranslation } from "react-i18next"; - -import { setLogin } from "reducers/login"; -import { Page, Stats } from "components"; -import api from "api"; -import { findInput } from "utils"; -import { useConfig } from "config"; - -const SettingsPage = connect((state) => ({ login: state.login }), { setLogin })( - function SettingsPage({ login, setLogin }) { - const { t } = useTranslation(); - const { register, handleSubmit } = useForm(); - const [loading, setLoading] = React.useState(false); - const [errors, setErrors] = React.useState(null); - - const onSave = React.useCallback( - async (changes) => { - setLoading(true); - setErrors(null); - try { - const response = await api.put("/user", { body: changes }); - setLogin(response); - } catch (err) { - setErrors(err.errors); - } finally { - setLoading(false); - } - }, - [setLoading, setLogin, setErrors] - ); - - const onGenerateNewKey = React.useCallback(async () => { - setLoading(true); - setErrors(null); - try { - const response = await api.put("/user", { - body: { updateApiKey: true }, - }); - setLogin(response); - } catch (err) { - setErrors(err.errors); - } finally { - setLoading(false); - } - }, [setLoading, setLogin, setErrors]); - - return ( - - - - -
{t("SettingsPage.profile.title")}
- -
- - - - - - {t("SettingsPage.profile.username.hint")} - - - - {t("SettingsPage.profile.publicNotice")} - - - - - - - - - {t("SettingsPage.profile.displayName.fallbackNotice")} - - - - - - -