Live tiles, API executes openmaptiles-tools
This commit is contained in:
parent
bde1b77b48
commit
7add8caaa1
|
@ -28,11 +28,14 @@ WORKDIR /opt/obs/api
|
||||||
ADD api/requirements.txt /opt/obs/api/
|
ADD api/requirements.txt /opt/obs/api/
|
||||||
RUN pip install -r requirements.txt
|
RUN pip install -r requirements.txt
|
||||||
|
|
||||||
|
ADD tile-generator /opt/obs/tile-generator
|
||||||
|
|
||||||
ADD api/scripts /opt/obs/scripts
|
ADD api/scripts /opt/obs/scripts
|
||||||
RUN pip install -e /opt/obs/scripts
|
RUN pip install -e /opt/obs/scripts
|
||||||
|
|
||||||
ADD api/setup.py /opt/obs/api/
|
ADD api/setup.py /opt/obs/api/
|
||||||
ADD api/obs /opt/obs/api/obs/
|
ADD api/obs /opt/obs/api/obs/
|
||||||
|
ADD api/tools /opt/obs/api/tools/
|
||||||
RUN pip install -e /opt/obs/api/
|
RUN pip install -e /opt/obs/api/
|
||||||
|
|
||||||
COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build
|
COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build
|
||||||
|
|
|
@ -5,7 +5,9 @@ WORKDIR /opt/obs/api
|
||||||
ADD scripts /opt/obs/scripts
|
ADD scripts /opt/obs/scripts
|
||||||
RUN pip install -e /opt/obs/scripts
|
RUN pip install -e /opt/obs/scripts
|
||||||
|
|
||||||
ADD requirements.txt setup.py /opt/obs/api/
|
ADD requirements.txt /opt/obs/api/
|
||||||
|
RUN pip install -r requirements.txt
|
||||||
|
ADD setup.py /opt/obs/api/
|
||||||
ADD obs /opt/obs/api/obs/
|
ADD obs /opt/obs/api/obs/
|
||||||
RUN pip install -e .
|
RUN pip install -e .
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ DEDICATED_WORKER = True
|
||||||
FRONTEND_URL = "http://localhost:3001/"
|
FRONTEND_URL = "http://localhost:3001/"
|
||||||
FRONTEND_DIR = None
|
FRONTEND_DIR = None
|
||||||
FRONTEND_CONFIG = None
|
FRONTEND_CONFIG = None
|
||||||
TILES_FILE = "/tiles/tiles.mbtiles"
|
TILES_FILE = None # "/tiles/tiles.mbtiles"
|
||||||
DATA_DIR = "/data"
|
DATA_DIR = "/data"
|
||||||
ADDITIONAL_CORS_ORIGINS = [
|
ADDITIONAL_CORS_ORIGINS = [
|
||||||
"http://localhost:8880/", # for maputnik on 8880
|
"http://localhost:8880/", # for maputnik on 8880
|
||||||
|
|
|
@ -166,11 +166,16 @@ if INDEX_HTML and exists(INDEX_HTML):
|
||||||
if req.app.config.get("TILES_FILE"):
|
if req.app.config.get("TILES_FILE"):
|
||||||
result["obsMapSource"] = {
|
result["obsMapSource"] = {
|
||||||
"type": "vector",
|
"type": "vector",
|
||||||
"tiles": [req.app.url_for("tiles", zoom="{zoom}", x="{x}", y="{y}")],
|
"tiles": [
|
||||||
|
req.app.url_for("tiles", zoom="000", x="111", y="222.pbf")
|
||||||
|
.replace("000", "{z}")
|
||||||
|
.replace("111", "{x}")
|
||||||
|
.replace("222", "{y}")
|
||||||
|
],
|
||||||
"minzoom": 12,
|
"minzoom": 12,
|
||||||
"maxzoom": 14,
|
"maxzoom": 14,
|
||||||
}
|
}
|
||||||
return json_response()
|
return json_response(result)
|
||||||
|
|
||||||
@app.get("/<path:path>")
|
@app.get("/<path:path>")
|
||||||
def get_frontend_static(req, path):
|
def get_frontend_static(req, path):
|
||||||
|
|
|
@ -69,7 +69,7 @@ async def connect_db(url):
|
||||||
engine = create_async_engine(url, echo=False)
|
engine = create_async_engine(url, echo=False)
|
||||||
sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False)
|
sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False)
|
||||||
|
|
||||||
yield
|
yield engine
|
||||||
|
|
||||||
# for AsyncEngine created in function scope, close and
|
# for AsyncEngine created in function scope, close and
|
||||||
# clean-up pooled connections
|
# clean-up pooled connections
|
||||||
|
|
|
@ -25,42 +25,50 @@ from obs.face.filter import (
|
||||||
RequiredFieldsFilter,
|
RequiredFieldsFilter,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from obs.face.osm import DataSource, DatabaseTileSource
|
||||||
|
|
||||||
from obs.api.db import OvertakingEvent, Track, make_session
|
from obs.api.db import OvertakingEvent, Track, make_session
|
||||||
from obs.api.app import app
|
from obs.api.app import app
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def process_tracks_loop(data_source, delay):
|
async def process_tracks_loop(delay):
|
||||||
while True:
|
while True:
|
||||||
async with make_session() as session:
|
try:
|
||||||
track = (
|
async with make_session() as session:
|
||||||
await session.execute(
|
track = (
|
||||||
select(Track)
|
await session.execute(
|
||||||
.where(Track.processing_status == "queued")
|
select(Track)
|
||||||
.order_by(Track.processing_queued_at)
|
.where(Track.processing_status == "queued")
|
||||||
.options(joinedload(Track.author))
|
.order_by(Track.processing_queued_at)
|
||||||
)
|
.options(joinedload(Track.author))
|
||||||
).scalar()
|
)
|
||||||
|
).scalar()
|
||||||
|
|
||||||
if track is None:
|
if track is None:
|
||||||
await asyncio.sleep(delay)
|
await asyncio.sleep(delay)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
tile_source = DatabaseTileSource()
|
||||||
|
data_source = DataSource(tile_source)
|
||||||
|
|
||||||
try:
|
|
||||||
await process_track(session, track, data_source)
|
await process_track(session, track, data_source)
|
||||||
except:
|
except:
|
||||||
log.exception("Failed to process track %s. Will continue.", track.slug)
|
log.exception("Failed to process track. Will continue.")
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(1)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
|
||||||
async def process_tracks(data_source, tracks):
|
async def process_tracks(tracks):
|
||||||
"""
|
"""
|
||||||
Processes the tracks and writes event data to the database.
|
Processes the tracks and writes event data to the database.
|
||||||
|
|
||||||
:param tracks: A list of strings which
|
:param tracks: A list of strings which
|
||||||
"""
|
"""
|
||||||
|
tile_source = DatabaseTileSource()
|
||||||
|
data_source = DataSource(tile_source)
|
||||||
|
|
||||||
async with make_session() as session:
|
async with make_session() as session:
|
||||||
for track_id_or_slug in tracks:
|
for track_id_or_slug in tracks:
|
||||||
track = (
|
track = (
|
||||||
|
@ -202,7 +210,9 @@ async def import_overtaking_events(session, track, overtaking_events):
|
||||||
event_models = []
|
event_models = []
|
||||||
for m in overtaking_events:
|
for m in overtaking_events:
|
||||||
hex_hash = hashlib.sha256(
|
hex_hash = hashlib.sha256(
|
||||||
struct.pack("QQ", track.id, int(m["time"].timestamp()))
|
struct.pack(
|
||||||
|
"ddQ", m["latitude"], m["longitude"], int(m["time"].timestamp())
|
||||||
|
)
|
||||||
).hexdigest()
|
).hexdigest()
|
||||||
|
|
||||||
event_models.append(
|
event_models.append(
|
||||||
|
|
|
@ -77,7 +77,10 @@ async def login_redirect(req):
|
||||||
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
|
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
|
||||||
sub = userinfo["sub"]
|
sub = userinfo["sub"]
|
||||||
preferred_username = userinfo["preferred_username"]
|
preferred_username = userinfo["preferred_username"]
|
||||||
email = userinfo["email"]
|
email = userinfo.get("email")
|
||||||
|
|
||||||
|
if email is None:
|
||||||
|
raise ValueError("user has no email set, please configure keycloak to require emails")
|
||||||
|
|
||||||
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()
|
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
import gzip
|
from gzip import decompress
|
||||||
from sqlite3 import connect
|
from sqlite3 import connect
|
||||||
from sanic.response import raw
|
from sanic.response import raw
|
||||||
|
|
||||||
|
from sqlalchemy import select, text
|
||||||
|
from sqlalchemy.sql.expression import table, column
|
||||||
|
|
||||||
from obs.api.app import app
|
from obs.api.app import app
|
||||||
|
|
||||||
|
|
||||||
|
@ -30,28 +33,43 @@ def get_tile(filename, zoom, x, y):
|
||||||
# regenerate approx. once each day
|
# regenerate approx. once each day
|
||||||
TILE_CACHE_MAX_AGE = 3600 * 24
|
TILE_CACHE_MAX_AGE = 3600 * 24
|
||||||
|
|
||||||
if app.config.get("TILES_FILE"):
|
|
||||||
|
|
||||||
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
|
@app.route(r"/tiles/<zoom:int>/<x:int>/<y:(\d+)\.pbf>")
|
||||||
async def tiles(req, zoom: int, x: int, y: str):
|
async def tiles(req, zoom: int, x: int, y: str):
|
||||||
|
if app.config.get("TILES_FILE"):
|
||||||
tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y))
|
tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y))
|
||||||
|
|
||||||
gzip = "gzip" in req.headers["accept-encoding"]
|
else:
|
||||||
|
data = column("data")
|
||||||
|
key = column("key")
|
||||||
|
mvts = table("mvts", data, key)
|
||||||
|
|
||||||
headers = {}
|
tile = await req.ctx.db.scalar(
|
||||||
headers["Vary"] = "Accept-Encoding"
|
text(f"select data from getmvt(:zoom, :x, :y) as b(data, key);").bindparams(
|
||||||
|
zoom=int(zoom),
|
||||||
|
x=int(x),
|
||||||
|
y=int(y),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print("TILE", tile)
|
||||||
|
|
||||||
if req.app.config.DEBUG:
|
gzip = "gzip" in req.headers["accept-encoding"]
|
||||||
headers["Cache-Control"] = "no-cache"
|
|
||||||
else:
|
|
||||||
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
|
|
||||||
|
|
||||||
# The tiles in the mbtiles file are gzip-compressed already, so we
|
headers = {}
|
||||||
# serve them actually as-is, and only decompress them if the browser
|
headers["Vary"] = "Accept-Encoding"
|
||||||
# doesn't accept gzip
|
|
||||||
if gzip:
|
|
||||||
headers["Content-Encoding"] = "gzip"
|
|
||||||
else:
|
|
||||||
tile = gzip.decompress(tile)
|
|
||||||
|
|
||||||
return raw(tile, content_type="application/x-protobuf", headers=headers)
|
if req.app.config.DEBUG:
|
||||||
|
headers["Cache-Control"] = "no-cache"
|
||||||
|
else:
|
||||||
|
headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}"
|
||||||
|
|
||||||
|
# The tiles in the mbtiles file are gzip-compressed already, so we
|
||||||
|
# serve them actually as-is, and only decompress them if the browser
|
||||||
|
# doesn't accept gzip
|
||||||
|
if gzip:
|
||||||
|
headers["Content-Encoding"] = "gzip"
|
||||||
|
|
||||||
|
if not gzip:
|
||||||
|
tile = decompress(tile)
|
||||||
|
|
||||||
|
return raw(tile, content_type="application/x-protobuf", headers=headers)
|
||||||
|
|
|
@ -5,3 +5,6 @@ sanicargs~=2.1.0
|
||||||
sanic-cors~=1.0.1
|
sanic-cors~=1.0.1
|
||||||
python-slugify~=5.0.2
|
python-slugify~=5.0.2
|
||||||
motor~=2.5.1
|
motor~=2.5.1
|
||||||
|
pyyaml<6
|
||||||
|
-e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools
|
||||||
|
sqlparse~=0.4.2
|
||||||
|
|
15
api/setup.py
15
api/setup.py
|
@ -1,8 +1,5 @@
|
||||||
from setuptools import setup, find_packages
|
from setuptools import setup, find_packages
|
||||||
|
|
||||||
with open("requirements.txt") as f:
|
|
||||||
requires = list(f.readlines())
|
|
||||||
|
|
||||||
setup(
|
setup(
|
||||||
name="openbikesensor-api",
|
name="openbikesensor-api",
|
||||||
version="0.0.1",
|
version="0.0.1",
|
||||||
|
@ -12,7 +9,17 @@ setup(
|
||||||
url="https://github.com/openbikesensor/portal",
|
url="https://github.com/openbikesensor/portal",
|
||||||
packages=find_packages(),
|
packages=find_packages(),
|
||||||
package_data={},
|
package_data={},
|
||||||
install_requires=requires,
|
install_requires=[
|
||||||
|
"sanic~=21.9.1",
|
||||||
|
"oic>=1.3.0, <2",
|
||||||
|
"sanic-session~=0.8.0",
|
||||||
|
"sanicargs~=2.1.0",
|
||||||
|
"sanic-cors~=1.0.1",
|
||||||
|
"python-slugify~=5.0.2",
|
||||||
|
"motor~=2.5.1",
|
||||||
|
"sqlparse~=0.4.2",
|
||||||
|
"openmaptiles-tools", # install from git
|
||||||
|
],
|
||||||
entry_points={
|
entry_points={
|
||||||
"console_scripts": [
|
"console_scripts": [
|
||||||
"openbikesensor-api=obs.bin.openbikesensor_api:main",
|
"openbikesensor-api=obs.bin.openbikesensor_api:main",
|
||||||
|
|
179
api/tools/build_tiles.py
Executable file
179
api/tools/build_tiles.py
Executable file
|
@ -0,0 +1,179 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
import argparse
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
import tempfile
|
||||||
|
import re
|
||||||
|
import os
|
||||||
|
import glob
|
||||||
|
from os.path import normpath, abspath, join
|
||||||
|
|
||||||
|
from sqlalchemy import text
|
||||||
|
import sqlparse
|
||||||
|
|
||||||
|
from obs.api.app import app
|
||||||
|
from obs.api.db import connect_db, make_session
|
||||||
|
|
||||||
|
log = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
TILE_GENERATOR = normpath(
|
||||||
|
abspath(join(app.config.API_ROOT_DIR, "..", "tile-generator"))
|
||||||
|
)
|
||||||
|
TILESET_FILE = join(TILE_GENERATOR, "openmaptiles.yaml")
|
||||||
|
|
||||||
|
|
||||||
|
def parse_pg_url(url=app.config.POSTGRES_URL):
|
||||||
|
m = re.match(
|
||||||
|
r"^postgresql\+asyncpg://(?P<user>.*):(?P<password>.*)@(?P<host>.*)(:(?P<port>\d+))?/(?P<database>[^/]+)$",
|
||||||
|
url,
|
||||||
|
)
|
||||||
|
|
||||||
|
return (
|
||||||
|
m["user"] or "",
|
||||||
|
m["password"] or "",
|
||||||
|
m["host"],
|
||||||
|
m["port"] or "5432",
|
||||||
|
m["database"],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(
|
||||||
|
description="processes a single track for use in the portal, "
|
||||||
|
"using the obs.face algorithms"
|
||||||
|
)
|
||||||
|
|
||||||
|
parser.add_argument(
|
||||||
|
"--prepare",
|
||||||
|
action="store_true",
|
||||||
|
help="prepare and import SQL functions for tile generation",
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
if args.prepare:
|
||||||
|
with tempfile.TemporaryDirectory() as build_dir:
|
||||||
|
await generate_data_yml(build_dir)
|
||||||
|
sql_snippets = await generate_sql(build_dir)
|
||||||
|
await import_sql(sql_snippets)
|
||||||
|
|
||||||
|
await generate_tiles()
|
||||||
|
|
||||||
|
|
||||||
|
async def _run(cmd):
|
||||||
|
if isinstance(cmd, list):
|
||||||
|
cmd = " ".join(cmd)
|
||||||
|
proc = await asyncio.create_subprocess_shell(
|
||||||
|
cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE
|
||||||
|
)
|
||||||
|
|
||||||
|
stdout, stderr = await proc.communicate()
|
||||||
|
|
||||||
|
if proc.returncode != 0:
|
||||||
|
log.error(stderr.decode("utf-8"))
|
||||||
|
raise RuntimeError("external program failed: %s" % str(cmd))
|
||||||
|
|
||||||
|
return stdout.decode("utf-8")
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_data_yml(build_dir):
|
||||||
|
stdout = await _run(
|
||||||
|
[
|
||||||
|
"python",
|
||||||
|
"$(which generate-tm2source)",
|
||||||
|
TILESET_FILE,
|
||||||
|
*sum(
|
||||||
|
zip(
|
||||||
|
["--user", "--password", "--host", "--port", "--database"],
|
||||||
|
parse_pg_url(),
|
||||||
|
),
|
||||||
|
(),
|
||||||
|
),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
tm2source = join(build_dir, "openmaptiles.tm2source")
|
||||||
|
os.makedirs(tm2source, exist_ok=True)
|
||||||
|
|
||||||
|
with open(join(tm2source, "data.yml"), "wt") as f:
|
||||||
|
f.write(stdout)
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_sql(build_dir):
|
||||||
|
sql_dir = join(build_dir, "sql")
|
||||||
|
|
||||||
|
await _run(f"python $(which generate-sql) {TILESET_FILE!r} --dir {sql_dir!r}")
|
||||||
|
|
||||||
|
sql_snippet_files = [
|
||||||
|
*sorted(
|
||||||
|
glob.glob(
|
||||||
|
join(
|
||||||
|
app.config.API_ROOT_DIR, "src", "openmaptiles-tools", "sql", "*.sql"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
join(sql_dir, "run_first.sql"),
|
||||||
|
*sorted(glob.glob(join(sql_dir, "parallel", "*.sql"))),
|
||||||
|
join(sql_dir, "run_last.sql"),
|
||||||
|
]
|
||||||
|
|
||||||
|
sql_snippets = [
|
||||||
|
"CREATE EXTENSION IF NOT EXISTS hstore;"
|
||||||
|
"CREATE EXTENSION IF NOT EXISTS postgis;"
|
||||||
|
]
|
||||||
|
for filename in sql_snippet_files:
|
||||||
|
with open(filename, "rt") as f:
|
||||||
|
sql_snippets.append(f.read())
|
||||||
|
|
||||||
|
getmvt_sql = await _run(
|
||||||
|
f"python $(which generate-sqltomvt) {TILESET_FILE!r} --key --gzip --postgis-ver 3.0.1 --function --fname=getmvt"
|
||||||
|
)
|
||||||
|
sql_snippets.append(getmvt_sql)
|
||||||
|
|
||||||
|
return sql_snippets
|
||||||
|
|
||||||
|
|
||||||
|
async def import_sql(sql_snippets):
|
||||||
|
statements = sum(map(sqlparse.split, sql_snippets), [])
|
||||||
|
async with connect_db(app.config.POSTGRES_URL):
|
||||||
|
for i, statement in enumerate(statements):
|
||||||
|
clean_statement = sqlparse.format(
|
||||||
|
statement,
|
||||||
|
truncate_strings=20,
|
||||||
|
strip_comments=True,
|
||||||
|
keyword_case="upper",
|
||||||
|
)
|
||||||
|
|
||||||
|
if not clean_statement:
|
||||||
|
continue
|
||||||
|
|
||||||
|
log.debug(
|
||||||
|
"Running SQL statement %d of %d (%s...)",
|
||||||
|
i + 1,
|
||||||
|
len(statements),
|
||||||
|
clean_statement[:40],
|
||||||
|
)
|
||||||
|
|
||||||
|
async with make_session() as session:
|
||||||
|
await session.execute(text(statement))
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
|
||||||
|
async def generate_tiles():
|
||||||
|
pass
|
||||||
|
# .PHONY: generate-tiles-pg
|
||||||
|
# generate-tiles-pg: all start-db
|
||||||
|
# @echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()..."
|
||||||
|
# @rm -rf "$(MBTILES_LOCAL_FILE)"
|
||||||
|
# # For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop.
|
||||||
|
# $(DOCKER_COMPOSE) run -T $(DC_OPTS) openmaptiles-tools generate-tiles
|
||||||
|
# @echo "Updating generated tile metadata ..."
|
||||||
|
# $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
|
||||||
|
# mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
asyncio.run(main())
|
|
@ -3,9 +3,7 @@ import argparse
|
||||||
import logging
|
import logging
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from obs.face.osm import DataSource, DatabaseTileSource, OverpassTileSource
|
from obs.api.db import connect_db
|
||||||
|
|
||||||
from obs.api.db import make_session, connect_db, make_session
|
|
||||||
from obs.api.app import app
|
from obs.api.app import app
|
||||||
from obs.api.process import process_tracks, process_tracks_loop
|
from obs.api.process import process_tracks, process_tracks_loop
|
||||||
|
|
||||||
|
@ -38,15 +36,10 @@ async def main():
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
async with connect_db(app.config.POSTGRES_URL):
|
async with connect_db(app.config.POSTGRES_URL):
|
||||||
log.info("Loading OpenStreetMap data")
|
|
||||||
tile_source = DatabaseTileSource()
|
|
||||||
# tile_source = OverpassTileSource(app.config.OBS_FACE_CACHE_DIR)
|
|
||||||
data_source = DataSource(tile_source)
|
|
||||||
|
|
||||||
if args.tracks:
|
if args.tracks:
|
||||||
await process_tracks(data_source, args.tracks)
|
await process_tracks(args.tracks)
|
||||||
else:
|
else:
|
||||||
await process_tracks_loop(data_source, args.loop_delay)
|
await process_tracks_loop(args.loop_delay)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -55,40 +55,36 @@ vim docker-compose.yaml
|
||||||
|
|
||||||
Change the domain where it occurs, such as in `Host()` rules.
|
Change the domain where it occurs, such as in `Host()` rules.
|
||||||
|
|
||||||
### Configure frontend
|
### Create a keycloak instance
|
||||||
|
|
||||||
|
Follow the official guides to create your own keycloak server:
|
||||||
|
|
||||||
|
https://www.keycloak.org/documentation
|
||||||
|
|
||||||
|
Documenting the details of this is out of scope for our project. Please make sure to configure:
|
||||||
|
|
||||||
|
* an admin account for yourself
|
||||||
|
* a realm for the portal
|
||||||
|
* a client in that realm with "Access Type" set to "confidential" and a
|
||||||
|
redirect URL of this pattern: `https://portal.example.com/login/redirect`
|
||||||
|
|
||||||
|
### Configure portal
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
cp source/frontend/config.example.json config/frontend.json
|
cp source/api/config.py.example config/config.py
|
||||||
vim frontend/src/config.json
|
|
||||||
```
|
```
|
||||||
|
|
||||||
* Change all URLs to your domain
|
Then edit `config/config.py` to your heart's content (and matching the
|
||||||
* Create a UUID by using `uuidgen` and set the `clientId`
|
configuration of the keycloak). Do not forget to generate a secure secret
|
||||||
* Change the coordinates of the map center to your liking
|
string.
|
||||||
|
|
||||||
### Configure API
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cp source/api/config.json.example config/api.json
|
|
||||||
vim config/api.json
|
|
||||||
```
|
|
||||||
|
|
||||||
* Change all URLs to your domain
|
|
||||||
* Generate and set a random `cookieSecret` (for example with `uuidgen`)
|
|
||||||
* Generate and set a random `jwtSecret` (for example with `uuidgen`)
|
|
||||||
* Configure you SMTP mail server
|
|
||||||
* Set the `clientId` for the `oAuth2Client` of the portal (from step 3)
|
|
||||||
|
|
||||||
### Build container and run them
|
### Build container and run them
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
docker-compose up -d
|
docker-compose build portal
|
||||||
|
docker-compose up -d portal
|
||||||
```
|
```
|
||||||
|
|
||||||
The services are being built the first time this is run. It can take some
|
|
||||||
minutes.
|
|
||||||
|
|
||||||
|
|
||||||
## Miscellaneous
|
## Miscellaneous
|
||||||
|
|
||||||
### Logs
|
### Logs
|
||||||
|
@ -106,10 +102,6 @@ docker-compose build
|
||||||
docker-compose up -d
|
docker-compose up -d
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Common issues
|
|
||||||
- Errors about TLS issues on User cration point to something amiss in the mail server configuration.
|
|
||||||
- Errors about unknown client point to ClientID mismatch between ``api.json`` and ``frontend.json``
|
|
||||||
|
|
||||||
### Updates
|
### Updates
|
||||||
|
|
||||||
Before updating make sure that you have properly backed-up your instance so you
|
Before updating make sure that you have properly backed-up your instance so you
|
||||||
|
|
|
@ -17,74 +17,20 @@ services:
|
||||||
networks:
|
networks:
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
redis:
|
portal:
|
||||||
image: redis
|
image: openbikesensor-portal
|
||||||
volumes:
|
|
||||||
- ./data/redis:/data
|
|
||||||
command: redis-server --appendonly yes
|
|
||||||
restart: on-failure
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
|
|
||||||
api:
|
|
||||||
image: openbikesensor-api
|
|
||||||
build:
|
build:
|
||||||
context: ./source/api
|
context: ./source
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/api-data:/data
|
- ./data/api-data:/data
|
||||||
- ./config/api.json:/opt/obs/api/config.json
|
- ./config/config.py:/opt/obs/api/config.py
|
||||||
environment:
|
- ./data/tiles/:/tiles
|
||||||
- MONGODB_URL=mongo://mongo/obs
|
|
||||||
restart: on-failure
|
restart: on-failure
|
||||||
labels:
|
labels:
|
||||||
- traefik.http.middlewares.obsapi-prefix.stripprefix.prefixes=/api
|
- traefik.http.routers.portal.rule=Host(`portal.example.com`)
|
||||||
- traefik.http.middlewares.obsapi-wellknown.replacepathregex.regex=^/\.well-known/oauth-authorization-server/api$$
|
- traefik.http.routers.portal.entrypoints=websecure
|
||||||
- traefik.http.middlewares.obsapi-wellknown.replacepathregex.replacement=/.well-known/oauth-authorization-server
|
- traefik.http.routers.portal.tls=true
|
||||||
- traefik.http.routers.obsapi.rule=Host(`portal.example.com`) && (PathPrefix(`/api/`) || Path(`/.well-known/oauth-authorization-server/api`))
|
- traefik.http.routers.portal.tls.certresolver=leresolver
|
||||||
- traefik.http.routers.obsapi.entrypoints=websecure
|
|
||||||
- traefik.http.routers.obsapi.tls=true
|
|
||||||
- traefik.http.routers.obsapi.tls.certresolver=leresolver
|
|
||||||
- traefik.http.routers.obsapi.middlewares=obsapi-prefix@docker,obsapi-wellknown@docker
|
|
||||||
- traefik.docker.network=gateway
|
|
||||||
networks:
|
|
||||||
- gateway
|
|
||||||
- backend
|
|
||||||
|
|
||||||
worker:
|
|
||||||
image: openbikesensor-api
|
|
||||||
build:
|
|
||||||
context: ./source/api
|
|
||||||
volumes:
|
|
||||||
- ./data/api-data:/data
|
|
||||||
- ./config/api.json:/opt/obs/api/config.json
|
|
||||||
links:
|
|
||||||
- mongo
|
|
||||||
- redis
|
|
||||||
restart: on-failure
|
|
||||||
command:
|
|
||||||
- npm
|
|
||||||
- run
|
|
||||||
- start:worker
|
|
||||||
networks:
|
|
||||||
- backend
|
|
||||||
# Not requred for traefik, but to reach overpass-api.de
|
|
||||||
- gateway
|
|
||||||
|
|
||||||
frontend:
|
|
||||||
image: obs-frontend
|
|
||||||
build:
|
|
||||||
context: ./source/frontend
|
|
||||||
dockerfile: Dockerfile-prod
|
|
||||||
volumes:
|
|
||||||
- ./config/frontend.json:/usr/local/apache2/htdocs/config.json
|
|
||||||
links:
|
|
||||||
- api
|
|
||||||
restart: on-failure
|
|
||||||
labels:
|
|
||||||
- traefik.http.routers.obsfrontend.rule=Host(`portal.example.com`)
|
|
||||||
- traefik.http.routers.obsfrontend.entrypoints=websecure
|
|
||||||
- traefik.http.routers.obsfrontend.tls=true
|
|
||||||
- traefik.http.routers.obsfrontend.tls.certresolver=leresolver
|
|
||||||
- traefik.docker.network=gateway
|
- traefik.docker.network=gateway
|
||||||
networks:
|
networks:
|
||||||
- gateway
|
- gateway
|
||||||
|
@ -125,3 +71,54 @@ services:
|
||||||
# - "traefik.http.routers.traefik.tls.certresolver=leresolver"
|
# - "traefik.http.routers.traefik.tls.certresolver=leresolver"
|
||||||
# - "traefik.http.routers.traefik.middlewares=basic-auth"
|
# - "traefik.http.routers.traefik.middlewares=basic-auth"
|
||||||
# - "traefik.http.middlewares.basic-auth.basicauth.usersfile=/usersfile"
|
# - "traefik.http.middlewares.basic-auth.basicauth.usersfile=/usersfile"
|
||||||
|
|
||||||
|
openmaptiles-tools:
|
||||||
|
image: openmaptiles/openmaptiles-tools:6.0
|
||||||
|
environment:
|
||||||
|
# Must match the version of this file (first line)
|
||||||
|
# download-osm will use it when generating a composer file
|
||||||
|
MAKE_DC_VERSION: "3"
|
||||||
|
# Allow DIFF_MODE, MIN_ZOOM, and MAX_ZOOM to be overwritten from shell
|
||||||
|
DIFF_MODE: ${DIFF_MODE}
|
||||||
|
MIN_ZOOM: ${MIN_ZOOM}
|
||||||
|
MAX_ZOOM: ${MAX_ZOOM}
|
||||||
|
#Provide BBOX from *.bbox file if exists, else from .env
|
||||||
|
BBOX: ${BBOX}
|
||||||
|
# Imposm configuration file describes how to load updates when enabled
|
||||||
|
IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE}
|
||||||
|
# Control import-sql processes
|
||||||
|
MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL}
|
||||||
|
|
||||||
|
PGDATABASE: obs
|
||||||
|
PGUSER: obs
|
||||||
|
PGPASSWORD: obs
|
||||||
|
PGHOST: postgres
|
||||||
|
PGPORT: 5432
|
||||||
|
volumes:
|
||||||
|
- ./source/tile-generator/:/tileset
|
||||||
|
- ./data/tiles:/import
|
||||||
|
- ./data/tiles:/export
|
||||||
|
- ./data/tiles-build/sql:/sql
|
||||||
|
- ./data/tiles-build:/mapping
|
||||||
|
- ./data/tiles-cache:/cache
|
||||||
|
|
||||||
|
|
||||||
|
generate-vectortiles:
|
||||||
|
image: openmaptiles/generate-vectortiles:6.0
|
||||||
|
volumes:
|
||||||
|
- ./data/tiles:/export
|
||||||
|
- ./data/tiles-build/openmaptiles.tm2source:/tm2source
|
||||||
|
environment:
|
||||||
|
MBTILES_NAME: ${MBTILES_FILE}
|
||||||
|
BBOX: ${BBOX}
|
||||||
|
MIN_ZOOM: ${MIN_ZOOM}
|
||||||
|
MAX_ZOOM: ${MAX_ZOOM}
|
||||||
|
# Control tilelive-copy threads
|
||||||
|
COPY_CONCURRENCY: ${COPY_CONCURRENCY}
|
||||||
|
#
|
||||||
|
PGDATABASE: obs
|
||||||
|
PGUSER: obs
|
||||||
|
PGPASSWORD: obs
|
||||||
|
PGHOST: postgres
|
||||||
|
PGPORT: 5432
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,7 @@ services:
|
||||||
- ./api/tools:/opt/obs/api/tools
|
- ./api/tools:/opt/obs/api/tools
|
||||||
- ./api/config.dev.py:/opt/obs/api/config.py
|
- ./api/config.dev.py:/opt/obs/api/config.py
|
||||||
- ./frontend/build:/opt/obs/frontend/build
|
- ./frontend/build:/opt/obs/frontend/build
|
||||||
|
- ./tile-generator:/opt/obs/tile-generator
|
||||||
- ./local/api-data:/data
|
- ./local/api-data:/data
|
||||||
- ./tile-generator/data/:/tiles
|
- ./tile-generator/data/:/tiles
|
||||||
links:
|
links:
|
||||||
|
|
|
@ -12,5 +12,5 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
.fullScreen {
|
.fullScreen {
|
||||||
margin: none;
|
margin: 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,7 +26,7 @@ function addRoadsStyle(style, mapSource) {
|
||||||
["exponential", 1.5],
|
["exponential", 1.5],
|
||||||
["zoom"],
|
["zoom"],
|
||||||
12,
|
12,
|
||||||
1,
|
2,
|
||||||
17,
|
17,
|
||||||
[
|
[
|
||||||
"case",
|
"case",
|
||||||
|
|
|
@ -40,7 +40,7 @@ function TrackList({privateTracks}: {privateTracks: boolean}) {
|
||||||
|
|
||||||
const data: {
|
const data: {
|
||||||
tracks: Track[]
|
tracks: Track[]
|
||||||
tracksCount: number
|
trackCount: number
|
||||||
} | null = useObservable(
|
} | null = useObservable(
|
||||||
(_$, inputs$) =>
|
(_$, inputs$) =>
|
||||||
inputs$.pipe(
|
inputs$.pipe(
|
||||||
|
@ -56,9 +56,9 @@ function TrackList({privateTracks}: {privateTracks: boolean}) {
|
||||||
[page, privateTracks]
|
[page, privateTracks]
|
||||||
)
|
)
|
||||||
|
|
||||||
const {tracks, tracksCount} = data || {tracks: [], tracksCount: 0}
|
const {tracks, trackCount} = data || {tracks: [], trackCount: 0}
|
||||||
const loading = !data
|
const loading = !data
|
||||||
const totalPages = Math.ceil(tracksCount / pageSize)
|
const totalPages = Math.ceil(trackCount / pageSize)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
|
|
Loading…
Reference in a new issue