Live tiles, API executes openmaptiles-tools

This commit is contained in:
Paul Bienkowski 2021-11-21 17:05:46 +01:00
parent bde1b77b48
commit 7add8caaa1
18 changed files with 368 additions and 155 deletions

View file

@ -28,11 +28,14 @@ WORKDIR /opt/obs/api
ADD api/requirements.txt /opt/obs/api/
RUN pip install -r requirements.txt
ADD tile-generator /opt/obs/tile-generator
ADD api/scripts /opt/obs/scripts
RUN pip install -e /opt/obs/scripts
ADD api/setup.py /opt/obs/api/
ADD api/obs /opt/obs/api/obs/
ADD api/tools /opt/obs/api/tools/
RUN pip install -e /opt/obs/api/
COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build

View file

@ -5,7 +5,9 @@ WORKDIR /opt/obs/api
ADD scripts /opt/obs/scripts
RUN pip install -e /opt/obs/scripts
ADD requirements.txt setup.py /opt/obs/api/
ADD requirements.txt /opt/obs/api/
RUN pip install -r requirements.txt
ADD setup.py /opt/obs/api/
ADD obs /opt/obs/api/obs/
RUN pip install -e .

View file

@ -11,7 +11,7 @@ DEDICATED_WORKER = True
FRONTEND_URL = "http://localhost:3001/"
FRONTEND_DIR = None
FRONTEND_CONFIG = None
TILES_FILE = "/tiles/tiles.mbtiles"
TILES_FILE = None # "/tiles/tiles.mbtiles"
DATA_DIR = "/data"
ADDITIONAL_CORS_ORIGINS = [
"http://localhost:8880/", # for maputnik on 8880

View file

@ -166,11 +166,16 @@ if INDEX_HTML and exists(INDEX_HTML):
if req.app.config.get("TILES_FILE"):
result["obsMapSource"] = {
"type": "vector",
"tiles": [req.app.url_for("tiles", zoom="{zoom}", x="{x}", y="{y}")],
"tiles": [
req.app.url_for("tiles", zoom="000", x="111", y="222.pbf")
.replace("000", "{z}")
.replace("111", "{x}")
.replace("222", "{y}")
],
"minzoom": 12,
"maxzoom": 14,
}
return json_response()
return json_response(result)
@app.get("/<path:path>")
def get_frontend_static(req, path):

View file

@ -69,7 +69,7 @@ async def connect_db(url):
engine = create_async_engine(url, echo=False)
sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False)
yield
yield engine
# for AsyncEngine created in function scope, close and
# clean-up pooled connections

View file

@ -25,42 +25,50 @@ from obs.face.filter import (
RequiredFieldsFilter,
)
from obs.face.osm import DataSource, DatabaseTileSource
from obs.api.db import OvertakingEvent, Track, make_session
from obs.api.app import app
log = logging.getLogger(__name__)
async def process_tracks_loop(data_source, delay):
async def process_tracks_loop(delay):
while True:
async with make_session() as session:
track = (
await session.execute(
select(Track)
.where(Track.processing_status == "queued")
.order_by(Track.processing_queued_at)
.options(joinedload(Track.author))
)
).scalar()
try:
async with make_session() as session:
track = (
await session.execute(
select(Track)
.where(Track.processing_status == "queued")
.order_by(Track.processing_queued_at)
.options(joinedload(Track.author))
)
).scalar()
if track is None:
await asyncio.sleep(delay)
continue
if track is None:
await asyncio.sleep(delay)
continue
tile_source = DatabaseTileSource()
data_source = DataSource(tile_source)
try:
await process_track(session, track, data_source)
except:
log.exception("Failed to process track %s. Will continue.", track.slug)
await asyncio.sleep(1)
continue
except:
log.exception("Failed to process track. Will continue.")
await asyncio.sleep(1)
continue
async def process_tracks(data_source, tracks):
async def process_tracks(tracks):
"""
Processes the tracks and writes event data to the database.
:param tracks: A list of strings which
"""
tile_source = DatabaseTileSource()
data_source = DataSource(tile_source)
async with make_session() as session:
for track_id_or_slug in tracks:
track = (
@ -202,7 +210,9 @@ async def import_overtaking_events(session, track, overtaking_events):
event_models = []
for m in overtaking_events:
hex_hash = hashlib.sha256(
struct.pack("QQ", track.id, int(m["time"].timestamp()))
struct.pack(
"ddQ", m["latitude"], m["longitude"], int(m["time"].timestamp())
)
).hexdigest()
event_models.append(

View file

@ -77,7 +77,10 @@ async def login_redirect(req):
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
sub = userinfo["sub"]
preferred_username = userinfo["preferred_username"]
email = userinfo["email"]
email = userinfo.get("email")
if email is None:
raise ValueError("user has no email set, please configure keycloak to require emails")
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()

View file

@ -1,7 +1,10 @@
import gzip
from gzip import decompress
from sqlite3 import connect
from sanic.response import raw
from sqlalchemy import select, text
from sqlalchemy.sql.expression import table, column
from obs.api.app import app
@ -30,28 +33,43 @@ def get_tile(filename, zoom, x, y):
# regenerate approx. once each day
TILE_CACHE_MAX_AGE = 3600 * 24
if app.config.get("TILES_FILE"):
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
async def tiles(req, zoom: int, x: int, y: str):
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
async def tiles(req, zoom: int, x: int, y: str):
if app.config.get("TILES_FILE"):
tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y))
gzip = "gzip" in req.headers["accept-encoding"]
else:
data = column("data")
key = column("key")
mvts = table("mvts", data, key)
headers = {}
headers["Vary"] = "Accept-Encoding"
tile = await req.ctx.db.scalar(
text(f"select data from getmvt(:zoom, :x, :y) as b(data, key);").bindparams(
zoom=int(zoom),
x=int(x),
y=int(y),
)
)
print("TILE", tile)
if req.app.config.DEBUG:
headers["Cache-Control"] = "no-cache"
else:
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
gzip = "gzip" in req.headers["accept-encoding"]
# The tiles in the mbtiles file are gzip-compressed already, so we
# serve them actually as-is, and only decompress them if the browser
# doesn't accept gzip
if gzip:
headers["Content-Encoding"] = "gzip"
else:
tile = gzip.decompress(tile)
headers = {}
headers["Vary"] = "Accept-Encoding"
return raw(tile, content_type="application/x-protobuf", headers=headers)
if req.app.config.DEBUG:
headers["Cache-Control"] = "no-cache"
else:
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
# The tiles in the mbtiles file are gzip-compressed already, so we
# serve them actually as-is, and only decompress them if the browser
# doesn't accept gzip
if gzip:
headers["Content-Encoding"] = "gzip"
if not gzip:
tile = decompress(tile)
return raw(tile, content_type="application/x-protobuf", headers=headers)

View file

@ -5,3 +5,6 @@ sanicargs~=2.1.0
sanic-cors~=1.0.1
python-slugify~=5.0.2
motor~=2.5.1
pyyaml<6
-e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools
sqlparse~=0.4.2

View file

@ -1,8 +1,5 @@
from setuptools import setup, find_packages
with open("requirements.txt") as f:
requires = list(f.readlines())
setup(
name="openbikesensor-api",
version="0.0.1",
@ -12,7 +9,17 @@ setup(
url="https://github.com/openbikesensor/portal",
packages=find_packages(),
package_data={},
install_requires=requires,
install_requires=[
"sanic~=21.9.1",
"oic>=1.3.0, <2",
"sanic-session~=0.8.0",
"sanicargs~=2.1.0",
"sanic-cors~=1.0.1",
"python-slugify~=5.0.2",
"motor~=2.5.1",
"sqlparse~=0.4.2",
"openmaptiles-tools", # install from git
],
entry_points={
"console_scripts": [
"openbikesensor-api=obs.bin.openbikesensor_api:main",

179
api/tools/build_tiles.py Executable file
View file

@ -0,0 +1,179 @@
#!/usr/bin/env python3
import argparse
import logging
import asyncio
import tempfile
import re
import os
import glob
from os.path import normpath, abspath, join
from sqlalchemy import text
import sqlparse
from obs.api.app import app
from obs.api.db import connect_db, make_session
log = logging.getLogger(__name__)
TILE_GENERATOR = normpath(
abspath(join(app.config.API_ROOT_DIR, "..", "tile-generator"))
)
TILESET_FILE = join(TILE_GENERATOR, "openmaptiles.yaml")
def parse_pg_url(url=app.config.POSTGRES_URL):
m = re.match(
r"^postgresql\+asyncpg://(?P<user>.*):(?P<password>.*)@(?P<host>.*)(:(?P<port>\d+))?/(?P<database>[^/]+)$",
url,
)
return (
m["user"] or "",
m["password"] or "",
m["host"],
m["port"] or "5432",
m["database"],
)
async def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
description="processes a single track for use in the portal, "
"using the obs.face algorithms"
)
parser.add_argument(
"--prepare",
action="store_true",
help="prepare and import SQL functions for tile generation",
)
args = parser.parse_args()
if args.prepare:
with tempfile.TemporaryDirectory() as build_dir:
await generate_data_yml(build_dir)
sql_snippets = await generate_sql(build_dir)
await import_sql(sql_snippets)
await generate_tiles()
async def _run(cmd):
if isinstance(cmd, list):
cmd = " ".join(cmd)
proc = await asyncio.create_subprocess_shell(
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
)
stdout, stderr = await proc.communicate()
if proc.returncode != 0:
log.error(stderr.decode("utf-8"))
raise RuntimeError("external program failed: %s" % str(cmd))
return stdout.decode("utf-8")
async def generate_data_yml(build_dir):
stdout = await _run(
[
"python",
"$(which generate-tm2source)",
TILESET_FILE,
*sum(
zip(
["--user", "--password", "--host", "--port", "--database"],
parse_pg_url(),
),
(),
),
]
)
tm2source = join(build_dir, "openmaptiles.tm2source")
os.makedirs(tm2source, exist_ok=True)
with open(join(tm2source, "data.yml"), "wt") as f:
f.write(stdout)
async def generate_sql(build_dir):
sql_dir = join(build_dir, "sql")
await _run(f"python $(which generate-sql) {TILESET_FILE!r} --dir {sql_dir!r}")
sql_snippet_files = [
*sorted(
glob.glob(
join(
app.config.API_ROOT_DIR, "src", "openmaptiles-tools", "sql", "*.sql"
)
)
),
join(sql_dir, "run_first.sql"),
*sorted(glob.glob(join(sql_dir, "parallel", "*.sql"))),
join(sql_dir, "run_last.sql"),
]
sql_snippets = [
"CREATE EXTENSION IF NOT EXISTS hstore;"
"CREATE EXTENSION IF NOT EXISTS postgis;"
]
for filename in sql_snippet_files:
with open(filename, "rt") as f:
sql_snippets.append(f.read())
getmvt_sql = await _run(
f"python $(which generate-sqltomvt) {TILESET_FILE!r} --key --gzip --postgis-ver 3.0.1 --function --fname=getmvt"
)
sql_snippets.append(getmvt_sql)
return sql_snippets
async def import_sql(sql_snippets):
statements = sum(map(sqlparse.split, sql_snippets), [])
async with connect_db(app.config.POSTGRES_URL):
for i, statement in enumerate(statements):
clean_statement = sqlparse.format(
statement,
truncate_strings=20,
strip_comments=True,
keyword_case="upper",
)
if not clean_statement:
continue
log.debug(
"Running SQL statement %d of %d (%s...)",
i + 1,
len(statements),
clean_statement[:40],
)
async with make_session() as session:
await session.execute(text(statement))
await session.commit()
async def generate_tiles():
pass
# .PHONY: generate-tiles-pg
# generate-tiles-pg: all start-db
# @echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()..."
# @rm -rf "$(MBTILES_LOCAL_FILE)"
# # For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop.
# $(DOCKER_COMPOSE) run -T $(DC_OPTS) openmaptiles-tools generate-tiles
# @echo "Updating generated tile metadata ..."
# $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
# mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges
if __name__ == "__main__":
asyncio.run(main())

View file

@ -3,9 +3,7 @@ import argparse
import logging
import asyncio
from obs.face.osm import DataSource, DatabaseTileSource, OverpassTileSource
from obs.api.db import make_session, connect_db, make_session
from obs.api.db import connect_db
from obs.api.app import app
from obs.api.process import process_tracks, process_tracks_loop
@ -38,15 +36,10 @@ async def main():
args = parser.parse_args()
async with connect_db(app.config.POSTGRES_URL):
log.info("Loading OpenStreetMap data")
tile_source = DatabaseTileSource()
# tile_source = OverpassTileSource(app.config.OBS_FACE_CACHE_DIR)
data_source = DataSource(tile_source)
if args.tracks:
await process_tracks(data_source, args.tracks)
await process_tracks(args.tracks)
else:
await process_tracks_loop(data_source, args.loop_delay)
await process_tracks_loop(args.loop_delay)
if __name__ == "__main__":

View file

@ -55,40 +55,36 @@ vim docker-compose.yaml
Change the domain where it occurs, such as in `Host()` rules.
### Configure frontend
### Create a keycloak instance
Follow the official guides to create your own keycloak server:
https://www.keycloak.org/documentation
Documenting the details of this is out of scope for our project. Please make sure to configure:
* an admin account for yourself
* a realm for the portal
* a client in that realm with "Access Type" set to "confidential" and a
redirect URL of this pattern: `https://portal.example.com/login/redirect`
### Configure portal
```bash
cp source/frontend/config.example.json config/frontend.json
vim frontend/src/config.json
cp source/api/config.py.example config/config.py
```
* Change all URLs to your domain
* Create a UUID by using `uuidgen` and set the `clientId`
* Change the coordinates of the map center to your liking
### Configure API
```bash
cp source/api/config.json.example config/api.json
vim config/api.json
```
* Change all URLs to your domain
* Generate and set a random `cookieSecret` (for example with `uuidgen`)
* Generate and set a random `jwtSecret` (for example with `uuidgen`)
* Configure you SMTP mail server
* Set the `clientId` for the `oAuth2Client` of the portal (from step 3)
Then edit `config/config.py` to your heart's content (and matching the
configuration of the keycloak). Do not forget to generate a secure secret
string.
### Build container and run them
```bash
docker-compose up -d
docker-compose build portal
docker-compose up -d portal
```
The services are being built the first time this is run. It can take some
minutes.
## Miscellaneous
### Logs
@ -106,10 +102,6 @@ docker-compose build
docker-compose up -d
```
#### Common issues
- Errors about TLS issues on User cration point to something amiss in the mail server configuration.
- Errors about unknown client point to ClientID mismatch between ``api.json`` and ``frontend.json``
### Updates
Before updating make sure that you have properly backed-up your instance so you

View file

@ -17,74 +17,20 @@ services:
networks:
- backend
redis:
image: redis
volumes:
- ./data/redis:/data
command: redis-server --appendonly yes
restart: on-failure
networks:
- backend
api:
image: openbikesensor-api
portal:
image: openbikesensor-portal
build:
context: ./source/api
context: ./source
volumes:
- ./data/api-data:/data
- ./config/api.json:/opt/obs/api/config.json
environment:
- MONGODB_URL=mongo://mongo/obs
- ./config/config.py:/opt/obs/api/config.py
- ./data/tiles/:/tiles
restart: on-failure
labels:
- traefik.http.middlewares.obsapi-prefix.stripprefix.prefixes=/api
- traefik.http.middlewares.obsapi-wellknown.replacepathregex.regex=^/\.well-known/oauth-authorization-server/api$$
- traefik.http.middlewares.obsapi-wellknown.replacepathregex.replacement=/.well-known/oauth-authorization-server
- traefik.http.routers.obsapi.rule=Host(`portal.example.com`) && (PathPrefix(`/api/`) || Path(`/.well-known/oauth-authorization-server/api`))
- traefik.http.routers.obsapi.entrypoints=websecure
- traefik.http.routers.obsapi.tls=true
- traefik.http.routers.obsapi.tls.certresolver=leresolver
- traefik.http.routers.obsapi.middlewares=obsapi-prefix@docker,obsapi-wellknown@docker
- traefik.docker.network=gateway
networks:
- gateway
- backend
worker:
image: openbikesensor-api
build:
context: ./source/api
volumes:
- ./data/api-data:/data
- ./config/api.json:/opt/obs/api/config.json
links:
- mongo
- redis
restart: on-failure
command:
- npm
- run
- start:worker
networks:
- backend
# Not requred for traefik, but to reach overpass-api.de
- gateway
frontend:
image: obs-frontend
build:
context: ./source/frontend
dockerfile: Dockerfile-prod
volumes:
- ./config/frontend.json:/usr/local/apache2/htdocs/config.json
links:
- api
restart: on-failure
labels:
- traefik.http.routers.obsfrontend.rule=Host(`portal.example.com`)
- traefik.http.routers.obsfrontend.entrypoints=websecure
- traefik.http.routers.obsfrontend.tls=true
- traefik.http.routers.obsfrontend.tls.certresolver=leresolver
- traefik.http.routers.portal.rule=Host(`portal.example.com`)
- traefik.http.routers.portal.entrypoints=websecure
- traefik.http.routers.portal.tls=true
- traefik.http.routers.portal.tls.certresolver=leresolver
- traefik.docker.network=gateway
networks:
- gateway
@ -125,3 +71,54 @@ services:
# - "traefik.http.routers.traefik.tls.certresolver=leresolver"
# - "traefik.http.routers.traefik.middlewares=basic-auth"
# - "traefik.http.middlewares.basic-auth.basicauth.usersfile=/usersfile"
openmaptiles-tools:
image: openmaptiles/openmaptiles-tools:6.0
environment:
# Must match the version of this file (first line)
# download-osm will use it when generating a composer file
MAKE_DC_VERSION: "3"
# Allow DIFF_MODE, MIN_ZOOM, and MAX_ZOOM to be overwritten from shell
DIFF_MODE: ${DIFF_MODE}
MIN_ZOOM: ${MIN_ZOOM}
MAX_ZOOM: ${MAX_ZOOM}
#Provide BBOX from *.bbox file if exists, else from .env
BBOX: ${BBOX}
# Imposm configuration file describes how to load updates when enabled
IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE}
# Control import-sql processes
MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL}
PGDATABASE: obs
PGUSER: obs
PGPASSWORD: obs
PGHOST: postgres
PGPORT: 5432
volumes:
- ./source/tile-generator/:/tileset
- ./data/tiles:/import
- ./data/tiles:/export
- ./data/tiles-build/sql:/sql
- ./data/tiles-build:/mapping
- ./data/tiles-cache:/cache
generate-vectortiles:
image: openmaptiles/generate-vectortiles:6.0
volumes:
- ./data/tiles:/export
- ./data/tiles-build/openmaptiles.tm2source:/tm2source
environment:
MBTILES_NAME: ${MBTILES_FILE}
BBOX: ${BBOX}
MIN_ZOOM: ${MIN_ZOOM}
MAX_ZOOM: ${MAX_ZOOM}
# Control tilelive-copy threads
COPY_CONCURRENCY: ${COPY_CONCURRENCY}
#
PGDATABASE: obs
PGUSER: obs
PGPASSWORD: obs
PGHOST: postgres
PGPORT: 5432

View file

@ -38,6 +38,7 @@ services:
- ./api/tools:/opt/obs/api/tools
- ./api/config.dev.py:/opt/obs/api/config.py
- ./frontend/build:/opt/obs/frontend/build
- ./tile-generator:/opt/obs/tile-generator
- ./local/api-data:/data
- ./tile-generator/data/:/tiles
links:

View file

@ -12,5 +12,5 @@
}
.fullScreen {
margin: none;
margin: 0;
}

View file

@ -26,7 +26,7 @@ function addRoadsStyle(style, mapSource) {
["exponential", 1.5],
["zoom"],
12,
1,
2,
17,
[
"case",

View file

@ -40,7 +40,7 @@ function TrackList({privateTracks}: {privateTracks: boolean}) {
const data: {
tracks: Track[]
tracksCount: number
trackCount: number
} | null = useObservable(
(_$, inputs$) =>
inputs$.pipe(
@ -56,9 +56,9 @@ function TrackList({privateTracks}: {privateTracks: boolean}) {
[page, privateTracks]
)
const {tracks, tracksCount} = data || {tracks: [], tracksCount: 0}
const {tracks, trackCount} = data || {tracks: [], trackCount: 0}
const loading = !data
const totalPages = Math.ceil(tracksCount / pageSize)
const totalPages = Math.ceil(trackCount / pageSize)
return (
<div>