From 7add8caaa1fa4feaf722c6db989bbb1dd97e016e Mon Sep 17 00:00:00 2001 From: Paul Bienkowski Date: Sun, 21 Nov 2021 17:05:46 +0100 Subject: [PATCH] Live tiles, API executes openmaptiles-tools --- Dockerfile | 3 + api/Dockerfile | 4 +- api/config.dev.py | 2 +- api/obs/api/app.py | 9 +- api/obs/api/db.py | 2 +- api/obs/api/process.py | 50 +++-- api/obs/api/routes/login.py | 5 +- api/obs/api/routes/tiles.py | 56 ++++-- api/requirements.txt | 3 + api/setup.py | 15 +- api/tools/build_tiles.py | 179 ++++++++++++++++++ api/tools/process_track.py | 13 +- deployment/README.md | 48 ++--- deployment/examples/docker-compose.yaml | 123 ++++++------ docker-compose.yaml | 1 + frontend/src/components/Page/Page.module.scss | 2 +- frontend/src/mapstyles/index.js | 2 +- frontend/src/pages/TracksPage.tsx | 6 +- 18 files changed, 368 insertions(+), 155 deletions(-) create mode 100755 api/tools/build_tiles.py diff --git a/Dockerfile b/Dockerfile index 070fc8a..c5c0a58 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,11 +28,14 @@ WORKDIR /opt/obs/api ADD api/requirements.txt /opt/obs/api/ RUN pip install -r requirements.txt +ADD tile-generator /opt/obs/tile-generator + ADD api/scripts /opt/obs/scripts RUN pip install -e /opt/obs/scripts ADD api/setup.py /opt/obs/api/ ADD api/obs /opt/obs/api/obs/ +ADD api/tools /opt/obs/api/tools/ RUN pip install -e /opt/obs/api/ COPY --from=frontend-builder /opt/obs/frontend/build /opt/obs/frontend/build diff --git a/api/Dockerfile b/api/Dockerfile index a488df1..18c4ecb 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -5,7 +5,9 @@ WORKDIR /opt/obs/api ADD scripts /opt/obs/scripts RUN pip install -e /opt/obs/scripts -ADD requirements.txt setup.py /opt/obs/api/ +ADD requirements.txt /opt/obs/api/ +RUN pip install -r requirements.txt +ADD setup.py /opt/obs/api/ ADD obs /opt/obs/api/obs/ RUN pip install -e . diff --git a/api/config.dev.py b/api/config.dev.py index 6cdf31f..c33f561 100644 --- a/api/config.dev.py +++ b/api/config.dev.py @@ -11,7 +11,7 @@ DEDICATED_WORKER = True FRONTEND_URL = "http://localhost:3001/" FRONTEND_DIR = None FRONTEND_CONFIG = None -TILES_FILE = "/tiles/tiles.mbtiles" +TILES_FILE = None # "/tiles/tiles.mbtiles" DATA_DIR = "/data" ADDITIONAL_CORS_ORIGINS = [ "http://localhost:8880/", # for maputnik on 8880 diff --git a/api/obs/api/app.py b/api/obs/api/app.py index 098c55f..babd7e7 100644 --- a/api/obs/api/app.py +++ b/api/obs/api/app.py @@ -166,11 +166,16 @@ if INDEX_HTML and exists(INDEX_HTML): if req.app.config.get("TILES_FILE"): result["obsMapSource"] = { "type": "vector", - "tiles": [req.app.url_for("tiles", zoom="{zoom}", x="{x}", y="{y}")], + "tiles": [ + req.app.url_for("tiles", zoom="000", x="111", y="222.pbf") + .replace("000", "{z}") + .replace("111", "{x}") + .replace("222", "{y}") + ], "minzoom": 12, "maxzoom": 14, } - return json_response() + return json_response(result) @app.get("/") def get_frontend_static(req, path): diff --git a/api/obs/api/db.py b/api/obs/api/db.py index c728bd1..0e025ca 100644 --- a/api/obs/api/db.py +++ b/api/obs/api/db.py @@ -69,7 +69,7 @@ async def connect_db(url): engine = create_async_engine(url, echo=False) sessionmaker = SessionMaker(engine, class_=AsyncSession, expire_on_commit=False) - yield + yield engine # for AsyncEngine created in function scope, close and # clean-up pooled connections diff --git a/api/obs/api/process.py b/api/obs/api/process.py index 338d7a2..d845396 100644 --- a/api/obs/api/process.py +++ b/api/obs/api/process.py @@ -25,42 +25,50 @@ from obs.face.filter import ( RequiredFieldsFilter, ) +from obs.face.osm import DataSource, DatabaseTileSource + from obs.api.db import OvertakingEvent, Track, make_session from obs.api.app import app log = logging.getLogger(__name__) -async def process_tracks_loop(data_source, delay): +async def process_tracks_loop(delay): while True: - async with make_session() as session: - track = ( - await session.execute( - select(Track) - .where(Track.processing_status == "queued") - .order_by(Track.processing_queued_at) - .options(joinedload(Track.author)) - ) - ).scalar() + try: + async with make_session() as session: + track = ( + await session.execute( + select(Track) + .where(Track.processing_status == "queued") + .order_by(Track.processing_queued_at) + .options(joinedload(Track.author)) + ) + ).scalar() - if track is None: - await asyncio.sleep(delay) - continue + if track is None: + await asyncio.sleep(delay) + continue + + tile_source = DatabaseTileSource() + data_source = DataSource(tile_source) - try: await process_track(session, track, data_source) - except: - log.exception("Failed to process track %s. Will continue.", track.slug) - await asyncio.sleep(1) - continue + except: + log.exception("Failed to process track. Will continue.") + await asyncio.sleep(1) + continue -async def process_tracks(data_source, tracks): +async def process_tracks(tracks): """ Processes the tracks and writes event data to the database. :param tracks: A list of strings which """ + tile_source = DatabaseTileSource() + data_source = DataSource(tile_source) + async with make_session() as session: for track_id_or_slug in tracks: track = ( @@ -202,7 +210,9 @@ async def import_overtaking_events(session, track, overtaking_events): event_models = [] for m in overtaking_events: hex_hash = hashlib.sha256( - struct.pack("QQ", track.id, int(m["time"].timestamp())) + struct.pack( + "ddQ", m["latitude"], m["longitude"], int(m["time"].timestamp()) + ) ).hexdigest() event_models.append( diff --git a/api/obs/api/routes/login.py b/api/obs/api/routes/login.py index 1132e37..ff5ec05 100644 --- a/api/obs/api/routes/login.py +++ b/api/obs/api/routes/login.py @@ -77,7 +77,10 @@ async def login_redirect(req): # {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'} sub = userinfo["sub"] preferred_username = userinfo["preferred_username"] - email = userinfo["email"] + email = userinfo.get("email") + + if email is None: + raise ValueError("user has no email set, please configure keycloak to require emails") user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar() diff --git a/api/obs/api/routes/tiles.py b/api/obs/api/routes/tiles.py index a00b118..1fa19a0 100644 --- a/api/obs/api/routes/tiles.py +++ b/api/obs/api/routes/tiles.py @@ -1,7 +1,10 @@ -import gzip +from gzip import decompress from sqlite3 import connect from sanic.response import raw +from sqlalchemy import select, text +from sqlalchemy.sql.expression import table, column + from obs.api.app import app @@ -30,28 +33,43 @@ def get_tile(filename, zoom, x, y): # regenerate approx. once each day TILE_CACHE_MAX_AGE = 3600 * 24 -if app.config.get("TILES_FILE"): - @app.route(r"/tiles///") - async def tiles(req, zoom: int, x: int, y: str): +@app.route(r"/tiles///") +async def tiles(req, zoom: int, x: int, y: str): + if app.config.get("TILES_FILE"): tile = get_tile(req.app.config.TILES_FILE, int(zoom), int(x), int(y)) - gzip = "gzip" in req.headers["accept-encoding"] + else: + data = column("data") + key = column("key") + mvts = table("mvts", data, key) - headers = {} - headers["Vary"] = "Accept-Encoding" + tile = await req.ctx.db.scalar( + text(f"select data from getmvt(:zoom, :x, :y) as b(data, key);").bindparams( + zoom=int(zoom), + x=int(x), + y=int(y), + ) + ) + print("TILE", tile) - if req.app.config.DEBUG: - headers["Cache-Control"] = "no-cache" - else: - headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}" + gzip = "gzip" in req.headers["accept-encoding"] - # The tiles in the mbtiles file are gzip-compressed already, so we - # serve them actually as-is, and only decompress them if the browser - # doesn't accept gzip - if gzip: - headers["Content-Encoding"] = "gzip" - else: - tile = gzip.decompress(tile) + headers = {} + headers["Vary"] = "Accept-Encoding" - return raw(tile, content_type="application/x-protobuf", headers=headers) + if req.app.config.DEBUG: + headers["Cache-Control"] = "no-cache" + else: + headers["Cache-Control"] = f"public, max-age={TILE_CACHE_MAX_AGE}" + + # The tiles in the mbtiles file are gzip-compressed already, so we + # serve them actually as-is, and only decompress them if the browser + # doesn't accept gzip + if gzip: + headers["Content-Encoding"] = "gzip" + + if not gzip: + tile = decompress(tile) + + return raw(tile, content_type="application/x-protobuf", headers=headers) diff --git a/api/requirements.txt b/api/requirements.txt index da6ae5c..c9565b4 100644 --- a/api/requirements.txt +++ b/api/requirements.txt @@ -5,3 +5,6 @@ sanicargs~=2.1.0 sanic-cors~=1.0.1 python-slugify~=5.0.2 motor~=2.5.1 +pyyaml<6 +-e git+https://github.com/openmaptiles/openmaptiles-tools#egg=openmaptiles-tools +sqlparse~=0.4.2 diff --git a/api/setup.py b/api/setup.py index 8654539..ff46f83 100644 --- a/api/setup.py +++ b/api/setup.py @@ -1,8 +1,5 @@ from setuptools import setup, find_packages -with open("requirements.txt") as f: - requires = list(f.readlines()) - setup( name="openbikesensor-api", version="0.0.1", @@ -12,7 +9,17 @@ setup( url="https://github.com/openbikesensor/portal", packages=find_packages(), package_data={}, - install_requires=requires, + install_requires=[ + "sanic~=21.9.1", + "oic>=1.3.0, <2", + "sanic-session~=0.8.0", + "sanicargs~=2.1.0", + "sanic-cors~=1.0.1", + "python-slugify~=5.0.2", + "motor~=2.5.1", + "sqlparse~=0.4.2", + "openmaptiles-tools", # install from git + ], entry_points={ "console_scripts": [ "openbikesensor-api=obs.bin.openbikesensor_api:main", diff --git a/api/tools/build_tiles.py b/api/tools/build_tiles.py new file mode 100755 index 0000000..23668cc --- /dev/null +++ b/api/tools/build_tiles.py @@ -0,0 +1,179 @@ +#!/usr/bin/env python3 +import argparse +import logging +import asyncio +import tempfile +import re +import os +import glob +from os.path import normpath, abspath, join + +from sqlalchemy import text +import sqlparse + +from obs.api.app import app +from obs.api.db import connect_db, make_session + +log = logging.getLogger(__name__) + + +TILE_GENERATOR = normpath( + abspath(join(app.config.API_ROOT_DIR, "..", "tile-generator")) +) +TILESET_FILE = join(TILE_GENERATOR, "openmaptiles.yaml") + + +def parse_pg_url(url=app.config.POSTGRES_URL): + m = re.match( + r"^postgresql\+asyncpg://(?P.*):(?P.*)@(?P.*)(:(?P\d+))?/(?P[^/]+)$", + url, + ) + + return ( + m["user"] or "", + m["password"] or "", + m["host"], + m["port"] or "5432", + m["database"], + ) + + +async def main(): + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + + parser = argparse.ArgumentParser( + description="processes a single track for use in the portal, " + "using the obs.face algorithms" + ) + + parser.add_argument( + "--prepare", + action="store_true", + help="prepare and import SQL functions for tile generation", + ) + + args = parser.parse_args() + + if args.prepare: + with tempfile.TemporaryDirectory() as build_dir: + await generate_data_yml(build_dir) + sql_snippets = await generate_sql(build_dir) + await import_sql(sql_snippets) + + await generate_tiles() + + +async def _run(cmd): + if isinstance(cmd, list): + cmd = " ".join(cmd) + proc = await asyncio.create_subprocess_shell( + cmd, stdout=asyncio.subprocess.PIPE, stderr=asyncio.subprocess.PIPE + ) + + stdout, stderr = await proc.communicate() + + if proc.returncode != 0: + log.error(stderr.decode("utf-8")) + raise RuntimeError("external program failed: %s" % str(cmd)) + + return stdout.decode("utf-8") + + +async def generate_data_yml(build_dir): + stdout = await _run( + [ + "python", + "$(which generate-tm2source)", + TILESET_FILE, + *sum( + zip( + ["--user", "--password", "--host", "--port", "--database"], + parse_pg_url(), + ), + (), + ), + ] + ) + + tm2source = join(build_dir, "openmaptiles.tm2source") + os.makedirs(tm2source, exist_ok=True) + + with open(join(tm2source, "data.yml"), "wt") as f: + f.write(stdout) + + +async def generate_sql(build_dir): + sql_dir = join(build_dir, "sql") + + await _run(f"python $(which generate-sql) {TILESET_FILE!r} --dir {sql_dir!r}") + + sql_snippet_files = [ + *sorted( + glob.glob( + join( + app.config.API_ROOT_DIR, "src", "openmaptiles-tools", "sql", "*.sql" + ) + ) + ), + join(sql_dir, "run_first.sql"), + *sorted(glob.glob(join(sql_dir, "parallel", "*.sql"))), + join(sql_dir, "run_last.sql"), + ] + + sql_snippets = [ + "CREATE EXTENSION IF NOT EXISTS hstore;" + "CREATE EXTENSION IF NOT EXISTS postgis;" + ] + for filename in sql_snippet_files: + with open(filename, "rt") as f: + sql_snippets.append(f.read()) + + getmvt_sql = await _run( + f"python $(which generate-sqltomvt) {TILESET_FILE!r} --key --gzip --postgis-ver 3.0.1 --function --fname=getmvt" + ) + sql_snippets.append(getmvt_sql) + + return sql_snippets + + +async def import_sql(sql_snippets): + statements = sum(map(sqlparse.split, sql_snippets), []) + async with connect_db(app.config.POSTGRES_URL): + for i, statement in enumerate(statements): + clean_statement = sqlparse.format( + statement, + truncate_strings=20, + strip_comments=True, + keyword_case="upper", + ) + + if not clean_statement: + continue + + log.debug( + "Running SQL statement %d of %d (%s...)", + i + 1, + len(statements), + clean_statement[:40], + ) + + async with make_session() as session: + await session.execute(text(statement)) + await session.commit() + + +async def generate_tiles(): + pass + # .PHONY: generate-tiles-pg + # generate-tiles-pg: all start-db + # @echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()..." + # @rm -rf "$(MBTILES_LOCAL_FILE)" + # # For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop. + # $(DOCKER_COMPOSE) run -T $(DC_OPTS) openmaptiles-tools generate-tiles + # @echo "Updating generated tile metadata ..." + # $(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \ + # mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/api/tools/process_track.py b/api/tools/process_track.py index 76d12ad..9302612 100755 --- a/api/tools/process_track.py +++ b/api/tools/process_track.py @@ -3,9 +3,7 @@ import argparse import logging import asyncio -from obs.face.osm import DataSource, DatabaseTileSource, OverpassTileSource - -from obs.api.db import make_session, connect_db, make_session +from obs.api.db import connect_db from obs.api.app import app from obs.api.process import process_tracks, process_tracks_loop @@ -38,15 +36,10 @@ async def main(): args = parser.parse_args() async with connect_db(app.config.POSTGRES_URL): - log.info("Loading OpenStreetMap data") - tile_source = DatabaseTileSource() - # tile_source = OverpassTileSource(app.config.OBS_FACE_CACHE_DIR) - data_source = DataSource(tile_source) - if args.tracks: - await process_tracks(data_source, args.tracks) + await process_tracks(args.tracks) else: - await process_tracks_loop(data_source, args.loop_delay) + await process_tracks_loop(args.loop_delay) if __name__ == "__main__": diff --git a/deployment/README.md b/deployment/README.md index c8d60c8..666d10e 100644 --- a/deployment/README.md +++ b/deployment/README.md @@ -55,40 +55,36 @@ vim docker-compose.yaml Change the domain where it occurs, such as in `Host()` rules. -### Configure frontend +### Create a keycloak instance + +Follow the official guides to create your own keycloak server: + +https://www.keycloak.org/documentation + +Documenting the details of this is out of scope for our project. Please make sure to configure: + +* an admin account for yourself +* a realm for the portal +* a client in that realm with "Access Type" set to "confidential" and a + redirect URL of this pattern: `https://portal.example.com/login/redirect` + +### Configure portal ```bash -cp source/frontend/config.example.json config/frontend.json -vim frontend/src/config.json +cp source/api/config.py.example config/config.py ``` -* Change all URLs to your domain -* Create a UUID by using `uuidgen` and set the `clientId` -* Change the coordinates of the map center to your liking - -### Configure API - -```bash -cp source/api/config.json.example config/api.json -vim config/api.json -``` - -* Change all URLs to your domain -* Generate and set a random `cookieSecret` (for example with `uuidgen`) -* Generate and set a random `jwtSecret` (for example with `uuidgen`) -* Configure you SMTP mail server -* Set the `clientId` for the `oAuth2Client` of the portal (from step 3) +Then edit `config/config.py` to your heart's content (and matching the +configuration of the keycloak). Do not forget to generate a secure secret +string. ### Build container and run them ```bash -docker-compose up -d +docker-compose build portal +docker-compose up -d portal ``` -The services are being built the first time this is run. It can take some -minutes. - - ## Miscellaneous ### Logs @@ -106,10 +102,6 @@ docker-compose build docker-compose up -d ``` -#### Common issues -- Errors about TLS issues on User cration point to something amiss in the mail server configuration. -- Errors about unknown client point to ClientID mismatch between ``api.json`` and ``frontend.json`` - ### Updates Before updating make sure that you have properly backed-up your instance so you diff --git a/deployment/examples/docker-compose.yaml b/deployment/examples/docker-compose.yaml index fc28fbb..d2417d2 100644 --- a/deployment/examples/docker-compose.yaml +++ b/deployment/examples/docker-compose.yaml @@ -17,74 +17,20 @@ services: networks: - backend - redis: - image: redis - volumes: - - ./data/redis:/data - command: redis-server --appendonly yes - restart: on-failure - networks: - - backend - - api: - image: openbikesensor-api + portal: + image: openbikesensor-portal build: - context: ./source/api + context: ./source volumes: - ./data/api-data:/data - - ./config/api.json:/opt/obs/api/config.json - environment: - - MONGODB_URL=mongo://mongo/obs + - ./config/config.py:/opt/obs/api/config.py + - ./data/tiles/:/tiles restart: on-failure labels: - - traefik.http.middlewares.obsapi-prefix.stripprefix.prefixes=/api - - traefik.http.middlewares.obsapi-wellknown.replacepathregex.regex=^/\.well-known/oauth-authorization-server/api$$ - - traefik.http.middlewares.obsapi-wellknown.replacepathregex.replacement=/.well-known/oauth-authorization-server - - traefik.http.routers.obsapi.rule=Host(`portal.example.com`) && (PathPrefix(`/api/`) || Path(`/.well-known/oauth-authorization-server/api`)) - - traefik.http.routers.obsapi.entrypoints=websecure - - traefik.http.routers.obsapi.tls=true - - traefik.http.routers.obsapi.tls.certresolver=leresolver - - traefik.http.routers.obsapi.middlewares=obsapi-prefix@docker,obsapi-wellknown@docker - - traefik.docker.network=gateway - networks: - - gateway - - backend - - worker: - image: openbikesensor-api - build: - context: ./source/api - volumes: - - ./data/api-data:/data - - ./config/api.json:/opt/obs/api/config.json - links: - - mongo - - redis - restart: on-failure - command: - - npm - - run - - start:worker - networks: - - backend - # Not requred for traefik, but to reach overpass-api.de - - gateway - - frontend: - image: obs-frontend - build: - context: ./source/frontend - dockerfile: Dockerfile-prod - volumes: - - ./config/frontend.json:/usr/local/apache2/htdocs/config.json - links: - - api - restart: on-failure - labels: - - traefik.http.routers.obsfrontend.rule=Host(`portal.example.com`) - - traefik.http.routers.obsfrontend.entrypoints=websecure - - traefik.http.routers.obsfrontend.tls=true - - traefik.http.routers.obsfrontend.tls.certresolver=leresolver + - traefik.http.routers.portal.rule=Host(`portal.example.com`) + - traefik.http.routers.portal.entrypoints=websecure + - traefik.http.routers.portal.tls=true + - traefik.http.routers.portal.tls.certresolver=leresolver - traefik.docker.network=gateway networks: - gateway @@ -125,3 +71,54 @@ services: # - "traefik.http.routers.traefik.tls.certresolver=leresolver" # - "traefik.http.routers.traefik.middlewares=basic-auth" # - "traefik.http.middlewares.basic-auth.basicauth.usersfile=/usersfile" + + openmaptiles-tools: + image: openmaptiles/openmaptiles-tools:6.0 + environment: + # Must match the version of this file (first line) + # download-osm will use it when generating a composer file + MAKE_DC_VERSION: "3" + # Allow DIFF_MODE, MIN_ZOOM, and MAX_ZOOM to be overwritten from shell + DIFF_MODE: ${DIFF_MODE} + MIN_ZOOM: ${MIN_ZOOM} + MAX_ZOOM: ${MAX_ZOOM} + #Provide BBOX from *.bbox file if exists, else from .env + BBOX: ${BBOX} + # Imposm configuration file describes how to load updates when enabled + IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE} + # Control import-sql processes + MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL} + + PGDATABASE: obs + PGUSER: obs + PGPASSWORD: obs + PGHOST: postgres + PGPORT: 5432 + volumes: + - ./source/tile-generator/:/tileset + - ./data/tiles:/import + - ./data/tiles:/export + - ./data/tiles-build/sql:/sql + - ./data/tiles-build:/mapping + - ./data/tiles-cache:/cache + + + generate-vectortiles: + image: openmaptiles/generate-vectortiles:6.0 + volumes: + - ./data/tiles:/export + - ./data/tiles-build/openmaptiles.tm2source:/tm2source + environment: + MBTILES_NAME: ${MBTILES_FILE} + BBOX: ${BBOX} + MIN_ZOOM: ${MIN_ZOOM} + MAX_ZOOM: ${MAX_ZOOM} + # Control tilelive-copy threads + COPY_CONCURRENCY: ${COPY_CONCURRENCY} + # + PGDATABASE: obs + PGUSER: obs + PGPASSWORD: obs + PGHOST: postgres + PGPORT: 5432 + diff --git a/docker-compose.yaml b/docker-compose.yaml index 34725b6..ccf9f8d 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -38,6 +38,7 @@ services: - ./api/tools:/opt/obs/api/tools - ./api/config.dev.py:/opt/obs/api/config.py - ./frontend/build:/opt/obs/frontend/build + - ./tile-generator:/opt/obs/tile-generator - ./local/api-data:/data - ./tile-generator/data/:/tiles links: diff --git a/frontend/src/components/Page/Page.module.scss b/frontend/src/components/Page/Page.module.scss index b678c6e..ceb337d 100644 --- a/frontend/src/components/Page/Page.module.scss +++ b/frontend/src/components/Page/Page.module.scss @@ -12,5 +12,5 @@ } .fullScreen { - margin: none; + margin: 0; } diff --git a/frontend/src/mapstyles/index.js b/frontend/src/mapstyles/index.js index 0f17cd9..648851d 100644 --- a/frontend/src/mapstyles/index.js +++ b/frontend/src/mapstyles/index.js @@ -26,7 +26,7 @@ function addRoadsStyle(style, mapSource) { ["exponential", 1.5], ["zoom"], 12, - 1, + 2, 17, [ "case", diff --git a/frontend/src/pages/TracksPage.tsx b/frontend/src/pages/TracksPage.tsx index e2fbf78..c161a93 100644 --- a/frontend/src/pages/TracksPage.tsx +++ b/frontend/src/pages/TracksPage.tsx @@ -40,7 +40,7 @@ function TrackList({privateTracks}: {privateTracks: boolean}) { const data: { tracks: Track[] - tracksCount: number + trackCount: number } | null = useObservable( (_$, inputs$) => inputs$.pipe( @@ -56,9 +56,9 @@ function TrackList({privateTracks}: {privateTracks: boolean}) { [page, privateTracks] ) - const {tracks, tracksCount} = data || {tracks: [], tracksCount: 0} + const {tracks, trackCount} = data || {tracks: [], trackCount: 0} const loading = !data - const totalPages = Math.ceil(tracksCount / pageSize) + const totalPages = Math.ceil(trackCount / pageSize) return (