Start working on PostGIS supported tile generation

This commit is contained in:
Paul Bienkowski 2021-10-10 12:32:28 +02:00
parent a866eb3ab7
commit ec60fc3873
38 changed files with 3988 additions and 229 deletions

View file

@ -66,7 +66,7 @@ Compose](https://docs.docker.com/compose/install/) onto your machine, and
cloning the repository, all you need to do is:
```bash
docker-compose up -d --build
docker-compose up -d --build frontend
```
If this does not work, please open an issue and describe the problem you're
@ -97,7 +97,7 @@ somebody to help you ;)
You are advised not to use the dockerized mongodb service and instead do a
proper MongoDB setup on a server that is backed up and secured.
You can run the API in docker, but it is prefered to run it as a restricted
You can run the application in docker, but it is prefered to run it as a restricted
user in its own directory somewhere where it cannot escape ;)
The frontend should be built using `npm run build` and then served from a
@ -168,3 +168,10 @@ to the logged email content that *would* have been sent, check your docker log:
```bash
docker-compose logs -f api
```
## Tileserver generation
The above instructions do not include the serving of vector tiles with the
collected data. That is to be set up separately. Please follow the instructions
in [tile-generator](./tile-generator/README.md).

View file

@ -12,7 +12,10 @@ RUN echo update-notifier=false >> ~/.npmrc
RUN npm ci
ADD scripts /opt/obs/api/scripts/
RUN cd scripts && pip install -e .
ADD tools /opt/obs/api/tools/
ADD requirements.txt /opt/obs/api/
RUN pip install -r requirements.txt
RUN pip install -e ./scripts
ADD views /opt/obs/api/views/
ADD src /opt/obs/api/src/

View file

@ -6,7 +6,10 @@
"mail": false,
"mongodb": {
"url": "mongodb://mongo/obsTest",
"debug": true
"debug": false
},
"postgres": {
"url": "postgresql+asyncpg://obs:obs@postgres/obs"
},
"redisUrl": "redis://redis",
"oAuth2Clients": [

View file

@ -17,6 +17,9 @@
"url": "mongodb://mongo/obs",
"debug": false
},
"postgres": {
"url": "postgresql+asyncpg://user:pass@host/dbname"
},
"redisUrl": "redis://redis",
"oAuth2Clients": [
{

16
api/postgres-schema.sql Normal file
View file

@ -0,0 +1,16 @@
CREATE TYPE zone_type IF NOT EXISTS AS ENUM ('urban', 'rural');
CREATE TABLE road_annotations IF NOT EXISTS (
way_id integer,
reverse boolean,
name text,
zone zone_type,
distance_overtaker_mean float,
distance_overtaker_median float,
distance_overtaker_minimum float,
distance_overtaker_n integer,
distance_overtaker_n_below_limit integer,
distance_overtaker_n_above_limit integer,
distance_overtaker_limit float,
distance_overtaker_measurements integer ARRAY,
);

3
api/requirements.txt Normal file
View file

@ -0,0 +1,3 @@
./scripts
sqlalchemy[asyncio]
asyncpg

97
api/roads_import.lua Normal file
View file

@ -0,0 +1,97 @@
--
-- To use this file, see
-- https://mygisnotes.wordpress.com/2015/10/09/openstreepmap-import-data-into-a-postgis-database-and-incrementally-update-it/
-- for general instructions:
-- 1. Download PBF
-- 2. Convert and filter to your needs
-- 3. Run the import like this:
--
-- osm2pgsql --create --hstore --style api/roads_import.lua -O flex \
-- --proj 32629 -H localhost -d obs -U obs -W \
-- YOUR_FILE.o5m
local function contains(table, val)
for i=1,#table do
if table[i] == val then
return true
end
end
return false
end
local HIGHWAY_TYPES = {
"trunk",
"primary",
"secondary",
"tertiary",
"unclassified",
"residential",
"trunk_link",
"primary_link",
"secondary_link",
"tertiary_link",
"living_street",
"service",
"track",
"road",
}
local ZONE_TYPES = {
"urban",
"rural",
"motorway",
}
local URBAN_TYPES = {
"residential",
"living_street",
"road",
}
local MOTORWAY_TYPES = {
"motorway",
"motorway_link",
}
local roads = osm2pgsql.define_way_table('road', {
{ column = 'zone', type = 'text', sql_type="zone_type" },
{ column = 'name', type = 'text' },
{ column = 'geometry', type = 'linestring' },
{ column = 'tags', type = 'hstore' },
})
function osm2pgsql.process_way(object)
if object.tags.highway and contains(HIGHWAY_TYPES, object.tags.highway) then
local tags = object.tags
local zone = nil
if tags["zone:traffic"] then
zone = tags["zone:traffic"]
if zone == "DE:urban" then
zone = "urban"
elseif zone == "DE:rural" then
zone = "rural"
elseif zone == "DE:motorway" then
zone = "motorway"
elseif string.match(zone, "rural") then
zone = "rural"
elseif string.match(zone, "urban") then
zone = "urban"
elseif string.match(zone, "motorway") then
zone = "motorway"
elseif contains(URBAN_TYPES, tags.highway) then
zone = "urban"
elseif contains(MOTORWAY_TYPES, tags.highway) then
zone = "motorway"
else
-- we can't figure it out
zone = nil
end
end
roads:add_row({
geom = { create = 'linear' },
name = tags.name,
zone = zone,
tags = tags,
})
end
end

@ -1 +1 @@
Subproject commit 6beab2ebfede7e41a1184b5ae1d0be0c83f8f95c
Subproject commit 118cc1d9f9dbd1dd8816a61c0698deaf404cf0ff

View file

@ -30,6 +30,10 @@ const configSchema = Joi.object({
debug: Joi.boolean().default(process.env.NODE_ENV !== 'production'),
}).required(),
postgres: Joi.object({
url: Joi.string().required(),
}).required(),
redisUrl: Joi.string().required(),
oAuth2Clients: Joi.array()

View file

@ -1,188 +0,0 @@
import argparse
import logging
import os
import tempfile
import json
from obs.face.importer import ImportMeasurementsCsv
from obs.face.annotate import AnnotateMeasurements
from obs.face.filter import (
AnonymizationMode,
ChainFilter,
ConfirmedFilter,
DistanceMeasuredFilter,
PrivacyFilter,
PrivacyZone,
PrivacyZonesFilter,
RequiredFieldsFilter,
)
from obs.face.osm import DataSource as OSMDataSource
log = logging.getLogger(__name__)
def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
description="processes a single track for use in the portal, "
"using the obs.face algorithms"
)
parser.add_argument(
"-i", "--input", required=True, action="store", help="path to input CSV file"
)
parser.add_argument(
"-o", "--output", required=True, action="store", help="path to output directory"
)
parser.add_argument(
"--path-cache",
action="store",
default=None,
dest="cache_dir",
help="path where the visualization data will be stored",
)
parser.add_argument(
"--settings",
type=argparse.FileType("rt", encoding="utf-8"),
default=None,
help="path where the visualization data will be stored",
)
args = parser.parse_args()
if args.cache_dir is None:
with tempfile.TemporaryDirectory() as cache_dir:
args.cache_dir = cache_dir
process(args)
else:
process(args)
def process(args):
log.info("Loading OpenStreetMap data")
osm = OSMDataSource(cache_dir=args.cache_dir)
filename_input = os.path.abspath(args.input)
dataset_id = os.path.splitext(os.path.basename(args.input))[0]
os.makedirs(args.output, exist_ok=True)
log.info("Loading settings")
settings = json.load(args.settings)
log.info("Annotating and filtering CSV file")
measurements, statistics = ImportMeasurementsCsv().read(
filename_input,
user_id="dummy",
dataset_id=dataset_id,
)
measurements = AnnotateMeasurements(osm, cache_dir=args.cache_dir).annotate(
measurements
)
filters_from_settings = []
for filter_description in settings.get("filters", []):
filter_type = filter_description.get("type")
if filter_type == "PrivacyZonesFilter":
privacy_zones = [
PrivacyZone(
latitude=zone.get("latitude"),
longitude=zone.get("longitude"),
radius=zone.get("radius"),
)
for zone in filter_description.get("config", {}).get("privacyZones", [])
]
filters_from_settings.append(PrivacyZonesFilter(privacy_zones))
else:
log.warning("Ignoring unknown filter type %r in settings file", filter_type)
input_filter = ChainFilter(
RequiredFieldsFilter(),
PrivacyFilter(
user_id_mode=AnonymizationMode.REMOVE,
measurement_id_mode=AnonymizationMode.REMOVE,
),
*filters_from_settings,
)
events_filter = DistanceMeasuredFilter()
confirmed_filter = ChainFilter(
ConfirmedFilter(),
)
track_measurements = input_filter.filter(measurements, log=log)
event_measurements = events_filter.filter(track_measurements , log=log)
confirmed_measurements = confirmed_filter.filter(track_measurements, log=log)
# write out
confirmed_measurements_json = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [m["latitude"], m["longitude"]],
},
"properties": {
"distanceOvertaker": m["distance_overtaker"],
"distanceStationary": m["distance_stationary"],
"confirmed": True,
},
}
for m in confirmed_measurements
],
}
all_measurements_json = {
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [m["latitude"], m["longitude"]],
},
"properties": {
"distanceOvertaker": m["distance_overtaker"],
"distanceStationary": m["distance_stationary"],
"confirmed": m in confirmed_measurements,
},
}
for m in event_measurements
],
}
track_json = {
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[m["latitude"], m["longitude"]] for m in track_measurements
],
},
}
statistics_json = {
"recordedAt": statistics["t_min"].isoformat(),
"recordedUntil": statistics["t_max"].isoformat(),
"duration": statistics["t"],
"length": statistics["d"],
"segments": statistics["n_segments"],
"numEvents": statistics["n_confirmed"],
"numMeasurements": statistics["n_measurements"],
"numValid": statistics["n_valid"],
}
for output_filename, data in [
("all_measurements.json", all_measurements_json),
("confirmed_measurements.json", confirmed_measurements_json),
("track.json", track_json),
("statistics.json", statistics_json),
]:
with open(os.path.join(args.output, output_filename), "w") as fp:
json.dump(data, fp, indent=4)
if __name__ == "__main__":
main()

View file

@ -364,8 +364,8 @@ router.get(
auth.optional,
wrapRoute(async (req, res) => {
const FILE_BY_KEY = {
allMeasurements: 'all_measurements.json',
confirmedMeasurements: 'confirmed_measurements.json',
measurements: 'measurements.json',
overtakingEvents: 'overtakingEvents.json',
track: 'track.json',
};

View file

@ -6,6 +6,7 @@ const queue = require('./queue');
require('./db');
const { Track } = require('./models');
const { API_ROOT_DIR, PROCESSING_DIR, OBS_FACE_CACHE_DIR, PROCESSING_OUTPUT_DIR } = require('./paths');
const config = require('./config');
queue.process('processTrack', async (job) => {
const track = await Track.findById(job.data.trackId);
@ -47,6 +48,7 @@ queue.process('processTrack', async (job) => {
const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
console.log(`[${track.slug}] Create settings at ${settingsFilePath}`);
const settings = {
trackId: String(track._id),
settingsGeneratedAt: new Date().getTime(),
filters: [
// TODO: Add actual privacy zones from user database
@ -69,11 +71,10 @@ queue.process('processTrack', async (job) => {
// TODO: Generate track transformation settings (privacy zones etc)
// const settingsFilePath = path.join(inputDirectory, 'track-settings.json');
//
const child = spawn(
'python',
'python3',
[
path.join(API_ROOT_DIR, 'src', 'process_track.py'),
path.join(API_ROOT_DIR, 'tools', 'process_track.py'),
'--input',
inputFilePath,
'--output',
@ -87,6 +88,9 @@ queue.process('processTrack', async (job) => {
],
{
cwd: PROCESSING_DIR,
env: {
POSTGRES_URL: config.postgres.url,
},
},
);

112
api/tools/db.py Normal file
View file

@ -0,0 +1,112 @@
from contextvars import ContextVar
from contextlib import asynccontextmanager
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.orm import sessionmaker
from sqlalchemy.types import UserDefinedType
from sqlalchemy import (
Column,
String,
Integer,
Boolean,
select,
DateTime,
Float,
Index,
Enum as SqlEnum,
func,
)
Base = declarative_base()
engine = ContextVar("engine")
async_session = ContextVar("async_session")
@asynccontextmanager
async def make_session():
async with async_session.get()() as session:
yield session
async def init_models():
async with engine.get().begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.run_sync(Base.metadata.create_all)
@asynccontextmanager
async def connect_db(url):
engine_ = create_async_engine(url, echo=False)
t1 = engine.set(engine_)
async_session_ = sessionmaker(engine_, class_=AsyncSession, expire_on_commit=False)
t2 = async_session.set(async_session_)
yield
# for AsyncEngine created in function scope, close and
# clean-up pooled connections
await engine_.dispose()
engine.reset(t1)
async_session.reset(t2)
ZoneType = SqlEnum("rural", "urban", "motorway", name="zone_type")
class Geometry(UserDefinedType):
def get_col_spec(self):
return "GEOMETRY"
def bind_expression(self, bindvalue):
return func.ST_GeomFromGeoJSON(json.dumps(bindvalue), type_=self)
def column_expression(self, col):
return json.loads(func.ST_AsGeoJSON(col, type_=self))
class OvertakingEvent(Base):
__tablename__ = "overtaking_event"
__table_args__ = (Index("road_segment", "way_id", "direction_reversed"),)
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
track_id = Column(String, index=True)
hex_hash = Column(String, unique=True, index=True)
way_id = Column(Integer, index=True)
# whether we were traveling along the way in reverse direction
direction_reversed = Column(Boolean)
geometry = Column(Geometry)
latitude = Column(Float)
longitude = Column(Float)
time = Column(DateTime)
distance_overtaker = Column(Float)
distance_stationary = Column(Float)
course = Column(Float)
speed = Column(Float)
def __repr__(self):
return f"<OvertakingEvent {self.id}>"
class Road(Base):
__tablename__ = "road"
way_id = Column(Integer, primary_key=True, index=True)
zone = Column(ZoneType)
name = Column(String)
geometry = Column(Geometry)
class RoadSegment(Base):
__tablename__ = "bike_lane"
way_id = Column(Integer, primary_key=True, index=True)
direction_reversed = Column(Boolean)
geometry = Column(Geometry)

231
api/tools/process_track.py Executable file
View file

@ -0,0 +1,231 @@
#!/usr/bin/env python3
import argparse
import logging
import os
import tempfile
import json
import shutil
import asyncio
import hashlib
import struct
import pytz
from obs.face.importer import ImportMeasurementsCsv
from obs.face.geojson import ExportMeasurements
from obs.face.annotate import AnnotateMeasurements
from obs.face.filter import (
AnonymizationMode,
ChainFilter,
ConfirmedFilter,
DistanceMeasuredFilter,
PrivacyFilter,
PrivacyZone,
PrivacyZonesFilter,
RequiredFieldsFilter,
)
from obs.face.osm import DataSource, OverpassTileSource
from sqlalchemy import delete, func, select
from db import make_session, connect_db, OvertakingEvent
log = logging.getLogger(__name__)
async def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
description="processes a single track for use in the portal, "
"using the obs.face algorithms"
)
parser.add_argument(
"-i", "--input", required=True, action="store", help="path to input CSV file"
)
parser.add_argument(
"-o", "--output", required=True, action="store", help="path to output directory"
)
parser.add_argument(
"--path-cache",
action="store",
default=None,
dest="cache_dir",
help="path where the visualization data will be stored",
)
parser.add_argument(
"--settings",
dest="settings_file",
required=True,
default=None,
help="path to track settings file",
)
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING
postgres_url_default = os.environ.get("POSTGRES_URL")
parser.add_argument(
"--postgres-url",
required=False,
action="store",
help="connection string for postgres database, if set, the track result is imported there",
default=postgres_url_default,
)
args = parser.parse_args()
if args.cache_dir is None:
with tempfile.TemporaryDirectory() as cache_dir:
args.cache_dir = cache_dir
await process(args)
else:
await process(args)
async def process(args):
log.info("Loading OpenStreetMap data")
tile_source = OverpassTileSource(cache_dir=args.cache_dir)
data_source = DataSource(tile_source)
filename_input = os.path.abspath(args.input)
dataset_id = os.path.splitext(os.path.basename(args.input))[0]
os.makedirs(args.output, exist_ok=True)
log.info("Loading settings")
settings_path = os.path.abspath(args.settings_file)
with open(settings_path, "rt") as f:
settings = json.load(f)
settings_output_path = os.path.abspath(
os.path.join(args.output, "track-settings.json")
)
if settings_path != settings_output_path:
log.info("Copy settings to output directory")
shutil.copyfile(settings_path, settings_output_path)
log.info("Annotating and filtering CSV file")
imported_data, statistics = ImportMeasurementsCsv().read(
filename_input,
user_id="dummy",
dataset_id=dataset_id,
)
input_data = AnnotateMeasurements(data_source, cache_dir=args.cache_dir).annotate(
imported_data
)
filters_from_settings = []
for filter_description in settings.get("filters", []):
filter_type = filter_description.get("type")
if filter_type == "PrivacyZonesFilter":
privacy_zones = [
PrivacyZone(
latitude=zone.get("latitude"),
longitude=zone.get("longitude"),
radius=zone.get("radius"),
)
for zone in filter_description.get("config", {}).get("privacyZones", [])
]
filters_from_settings.append(PrivacyZonesFilter(privacy_zones))
else:
log.warning("Ignoring unknown filter type %r in settings file", filter_type)
track_filter = ChainFilter(
RequiredFieldsFilter(),
PrivacyFilter(
user_id_mode=AnonymizationMode.REMOVE,
measurement_id_mode=AnonymizationMode.REMOVE,
),
*filters_from_settings,
)
measurements_filter = DistanceMeasuredFilter()
overtaking_events_filter = ConfirmedFilter()
track_points = track_filter.filter(input_data, log=log)
measurements = measurements_filter.filter(track_points, log=log)
overtaking_events = overtaking_events_filter.filter(measurements, log=log)
exporter = ExportMeasurements("measurements.dummy")
exporter.add_measurements(measurements)
measurements_json = exporter.get_data()
del exporter
exporter = ExportMeasurements("overtaking_events.dummy")
exporter.add_measurements(overtaking_events)
overtaking_events_json = exporter.get_data()
del exporter
track_json = {
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [[m["latitude"], m["longitude"]] for m in track_points],
},
}
statistics_json = {
"recordedAt": statistics["t_min"].isoformat(),
"recordedUntil": statistics["t_max"].isoformat(),
"duration": statistics["t"],
"length": statistics["d"],
"segments": statistics["n_segments"],
"numEvents": statistics["n_confirmed"],
"numMeasurements": statistics["n_measurements"],
"numValid": statistics["n_valid"],
}
for output_filename, data in [
("measurements.json", measurements_json),
("overtakingEvents.json", overtaking_events_json),
("track.json", track_json),
("statistics.json", statistics_json),
]:
with open(os.path.join(args.output, output_filename), "w") as fp:
json.dump(data, fp, indent=4)
if args.postgres_url:
log.info("Importing to database.")
async with connect_db(args.postgres_url):
async with make_session() as session:
await clear_track_data(session, settings["trackId"])
await import_overtaking_events(
session, settings["trackId"], overtaking_events
)
await session.commit()
async def clear_track_data(session, track_id):
await session.execute(
delete(OvertakingEvent).where(OvertakingEvent.track_id == track_id)
)
async def import_overtaking_events(session, track_id, overtaking_events):
event_models = []
for m in overtaking_events:
sha = hashlib.sha256()
sha.update(track_id.encode("utf-8"))
sha.update(struct.pack("Q", int(m["time"].timestamp())))
hex_hash = sha.hexdigest()
event_models.append(
OvertakingEvent(
track_id=track_id,
hex_hash=hex_hash,
way_id=m["OSM_way_id"],
direction_reversed=m["OSM_way_orientation"] < 0,
geometry={"type": "Point", "coordinates": [m["longitude"], m["latitude"]]},
latitude=m["latitude"],
longitude=m["longitude"],
time=m["time"].astimezone(pytz.utc).replace(tzinfo=None),
distance_overtaker=m["distance_overtaker"],
distance_stationary=m["distance_stationary"],
course=m["course"],
speed=m["speed"],
)
)
session.add_all(event_models)
if __name__ == "__main__":
asyncio.run(main())

37
api/tools/reset_database.py Executable file
View file

@ -0,0 +1,37 @@
#!/usr/bin/env python3
import argparse
import logging
import os
import asyncio
from db import init_models, connect_db
log = logging.getLogger(__name__)
async def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
description="clears the postgresql database and initializes the schema"
)
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING
postgres_url_default = os.environ.get("POSTGRES_URL")
parser.add_argument(
"--postgres-url",
required=postgres_url_default is None,
action="store",
help="connection string for postgres database, if set, the track result is imported there",
default=postgres_url_default,
)
args = parser.parse_args()
async with connect_db(args.postgres_url):
await init_models()
log.info("Database initialized.")
if __name__ == "__main__":
asyncio.run(main())

View file

@ -23,12 +23,24 @@ services:
command: redis-server --appendonly yes
restart: on-failure
postgres:
image: "openmaptiles/postgis:6.0"
environment:
POSTGRES_USER: obs
POSTGRES_PASSWORD: obs
POSTGRES_DB: obs
ports:
- '5432:5432'
volumes:
- ./local/postgres/data:/var/lib/postgresql/data
api:
image: obs-api
build:
context: ./api
volumes:
- ./api/src:/opt/obs/api/src
- ./api/tools:/opt/obs/api/tools
- ./api/scripts/obs:/opt/obs/api/scripts/obs
- ./api/views:/opt/obs/api/views
- ./local/api-data:/data
@ -39,6 +51,7 @@ services:
- PORT=3000
- MONGODB_URL=mongodb://mongo/obsTest
- DATA_DIR=/data
- POSTGRES_URL="postgresql+asyncpg://obs:obs@localhost/obs"
links:
- mongo
- redis
@ -56,6 +69,7 @@ services:
context: ./api
volumes:
- ./api/src:/opt/obs/api/src
- ./api/tools:/opt/obs/api/tools
- ./api/scripts/obs:/opt/obs/api/scripts/obs
- ./api/views:/opt/obs/api/views
- ./local/api-data:/data
@ -66,10 +80,7 @@ services:
- mongo
- redis
restart: on-failure
command:
- npm
- run
- dev:worker
command: 'bash -c "pip install geopy && npm run dev:worker"'
frontend:
@ -94,3 +105,67 @@ services:
command:
- npm
- start
openmaptiles-tools:
image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}"
env_file: tile-generator/.env
environment:
# Must match the version of this file (first line)
# download-osm will use it when generating a composer file
MAKE_DC_VERSION: "3"
# Allow DIFF_MODE, MIN_ZOOM, and MAX_ZOOM to be overwritten from shell
DIFF_MODE: ${DIFF_MODE}
MIN_ZOOM: ${MIN_ZOOM}
MAX_ZOOM: ${MAX_ZOOM}
#Provide BBOX from *.bbox file if exists, else from .env
BBOX: ${BBOX}
# Imposm configuration file describes how to load updates when enabled
IMPOSM_CONFIG_FILE: ${IMPOSM_CONFIG_FILE}
# Control import-sql processes
MAX_PARALLEL_PSQL: ${MAX_PARALLEL_PSQL}
PGDATABASE: obs
PGUSER: obs
PGPASSWORD: obs
PGHOST: postgres
PGPORT: 5432
volumes:
- ./tile-generator/:/tileset
- ./tile-generator/data:/import
- ./tile-generator/data:/export
- ./tile-generator/build/sql:/sql
- ./tile-generator/build:/mapping
- ./tile-generator/cache:/cache
generate-vectortiles:
image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}"
volumes:
- ./tile-generator/data:/export
- ./tile-generator/build/openmaptiles.tm2source:/tm2source
env_file: tile-generator/.env
environment:
FILTER_MAPNIK_OUTPUT: ${FILTER_MAPNIK_OUTPUT}
MBTILES_NAME: ${MBTILES_FILE}
BBOX: ${BBOX}
MIN_ZOOM: ${MIN_ZOOM}
MAX_ZOOM: ${MAX_ZOOM}
# Control tilelive-copy threads
COPY_CONCURRENCY: ${COPY_CONCURRENCY}
#
PGDATABASE: obs
PGUSER: obs
PGPASSWORD: obs
PGHOST: postgres
PGPORT: 5432
tileserver:
image: klokantech/tileserver-gl
ports:
- 3002:80
volumes:
- ./tile-generator/tileserver-gl-config.json:/config/tileserver.json
- ./tile-generator/data/:/data
command:
- --config
- /config/tileserver.json
# - /data/tiles.mbtiles

View file

@ -15,7 +15,7 @@
},
"mapHome": {
"zoom": 15,
"longitude": 9.1797,
"latitude": 48.7784
"longitude": 7.8302,
"latitude": 47.9755
}
}

View file

@ -1,3 +1,18 @@
module.exports = {
plugins: [{ plugin: require('@semantic-ui-react/craco-less') }],
plugins: [
{plugin: require('@semantic-ui-react/craco-less')},
{
plugin: {
overrideWebpackConfig: ({webpackConfig, cracoConfig, pluginOptions, context: {env, paths}}) => {
webpackConfig.resolve.alias = {
...webpackConfig.resolve.alias,
'mapbox-gl': 'maplibre-gl',
}
return webpackConfig
},
options: {},
},
},
],
}

View file

@ -1,6 +1,6 @@
{
"name": "react-frontend",
"version": "0.1.0",
"name": "openbikesensor-portal-frontend",
"version": "0.2.0-pre",
"lockfileVersion": 1,
"requires": true,
"dependencies": {
@ -1637,15 +1637,36 @@
"chalk": "^4.0.0"
}
},
"@mapbox/geojson-rewind": {
"version": "0.5.1",
"resolved": "https://registry.npmjs.org/@mapbox/geojson-rewind/-/geojson-rewind-0.5.1.tgz",
"integrity": "sha512-eL7fMmfTBKjrb+VFHXCGv9Ot0zc3C0U+CwXo1IrP+EPwDczLoXv34Tgq3y+2mPSFNVUXgU42ILWJTC7145KPTA==",
"requires": {
"get-stream": "^6.0.1",
"minimist": "^1.2.5"
},
"dependencies": {
"get-stream": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
"integrity": "sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg=="
}
}
},
"@mapbox/geojson-types": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/@mapbox/geojson-types/-/geojson-types-1.0.2.tgz",
"integrity": "sha512-e9EBqHHv3EORHrSfbR9DqecPNn+AmuAoQxV6aL8Xu30bJMJR1o8PZLZzpk1Wq7/NfCbuhmakHTPYRhoqLsXRnw=="
},
"@mapbox/jsonlint-lines-primitives": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@mapbox/jsonlint-lines-primitives/-/jsonlint-lines-primitives-2.0.2.tgz",
"integrity": "sha1-zlblOfg1UrWNENZy6k1vya3HsjQ="
},
"@mapbox/mapbox-gl-style-spec": {
"version": "13.19.0",
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-style-spec/-/mapbox-gl-style-spec-13.19.0.tgz",
"integrity": "sha512-qA9P4WHU4a1iLKM/W2EIxCxcwlxa6isPF6P+jSPaIs4VlZKYO1DMVWNiY03SXu6a+K3dB3GEhRLvEh1f/8VG2w==",
"version": "13.22.0",
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-style-spec/-/mapbox-gl-style-spec-13.22.0.tgz",
"integrity": "sha512-35skPiyM1reMRHA+X+DgbT3WG8hXMpqy1Ncs66ZvtVWUvvA9CtERSx5kq+o5S1ZrvDISuyBzrVzyty7PkuStkQ==",
"requires": {
"@mapbox/jsonlint-lines-primitives": "~2.0.2",
"@mapbox/point-geometry": "^0.1.0",
@ -1657,16 +1678,55 @@
"sort-object": "^0.3.2"
}
},
"@mapbox/mapbox-gl-supported": {
"version": "1.5.0",
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-supported/-/mapbox-gl-supported-1.5.0.tgz",
"integrity": "sha512-/PT1P6DNf7vjEEiPkVIRJkvibbqWtqnyGaBz3nfRdcxclNSnSdaLU5tfAgcD7I8Yt5i+L19s406YLl1koLnLbg=="
},
"@mapbox/point-geometry": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/@mapbox/point-geometry/-/point-geometry-0.1.0.tgz",
"integrity": "sha1-ioP5M1x4YO/6Lu7KJUMyqgru2PI="
},
"@mapbox/tiny-sdf": {
"version": "1.2.5",
"resolved": "https://registry.npmjs.org/@mapbox/tiny-sdf/-/tiny-sdf-1.2.5.tgz",
"integrity": "sha512-cD8A/zJlm6fdJOk6DqPUV8mcpyJkRz2x2R+/fYcWDYG3oWbG7/L7Yl/WqQ1VZCjnL9OTIMAn6c+BC5Eru4sQEw=="
},
"@mapbox/unitbezier": {
"version": "0.0.0",
"resolved": "https://registry.npmjs.org/@mapbox/unitbezier/-/unitbezier-0.0.0.tgz",
"integrity": "sha1-FWUb1VOme4WB+zmIEMmK2Go0Uk4="
},
"@mapbox/vector-tile": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/@mapbox/vector-tile/-/vector-tile-1.3.1.tgz",
"integrity": "sha512-MCEddb8u44/xfQ3oD+Srl/tNcQoqTw3goGk2oLsrFxOTc3dUp+kAnby3PvAeeBYSMSjSPD1nd1AJA6W49WnoUw==",
"requires": {
"@mapbox/point-geometry": "~0.1.0"
}
},
"@mapbox/whoots-js": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/@mapbox/whoots-js/-/whoots-js-3.1.0.tgz",
"integrity": "sha512-Es6WcD0nO5l+2BOQS4uLfNPYQaNDfbot3X1XUoloz+x0mPDS3eeORZJl06HXjwBG1fOGwCRnzK88LMdxKRrd6Q=="
},
"@math.gl/web-mercator": {
"version": "3.5.6",
"resolved": "https://registry.npmjs.org/@math.gl/web-mercator/-/web-mercator-3.5.6.tgz",
"integrity": "sha512-siWHLJGp9o8fDEM1t0Rby+JXftl6il0z3927liWGzkHqFftXPHY858ShPy45ThDU8q5lyCftg8aVgrv4nfD+Zw==",
"requires": {
"@babel/runtime": "^7.12.0",
"gl-matrix": "~3.3.0"
},
"dependencies": {
"gl-matrix": {
"version": "3.3.0",
"resolved": "https://registry.npmjs.org/gl-matrix/-/gl-matrix-3.3.0.tgz",
"integrity": "sha512-COb7LDz+SXaHtl/h4LeaFcNdJdAQSDeVqjiIihSXNrkWObZLhDI4hIkZC11Aeqp7bcE72clzB0BnDXr2SmslRA=="
}
}
},
"@nodelib/fs.scandir": {
"version": "2.1.4",
"resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz",
@ -2057,8 +2117,7 @@
"@types/geojson": {
"version": "7946.0.7",
"resolved": "https://registry.npmjs.org/@types/geojson/-/geojson-7946.0.7.tgz",
"integrity": "sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ==",
"dev": true
"integrity": "sha512-wE2v81i4C4Ol09RtsWFAqg3BUitWbHSpSlIo+bNdsCJijO9sjme+zm+73ZMCa/qMC8UEERxzGbvmr1cffo2SiQ=="
},
"@types/glob": {
"version": "7.1.3",
@ -2143,6 +2202,14 @@
"integrity": "sha512-DvmZHoHTFJ8zhVYwCLWbQ7uAbYQEk52Ev2/ZiQ7Y7gQGeV9pjBqjnQpECMHfKS1rCYAhMI7LHVxwyZLZinJgdw==",
"dev": true
},
"@types/mapbox-gl": {
"version": "2.4.2",
"resolved": "https://registry.npmjs.org/@types/mapbox-gl/-/mapbox-gl-2.4.2.tgz",
"integrity": "sha512-mKgjmhUN780YGy9ZEyJK0Sr9gMtERmTQimGsIa5WrBHPlBXdmjYfqtz8nSMI7hOnQFphcuSMyqQswaQESFLHsA==",
"requires": {
"@types/geojson": "*"
}
},
"@types/mdast": {
"version": "3.0.3",
"resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.3.tgz",
@ -5284,6 +5351,11 @@
}
}
},
"earcut": {
"version": "2.2.3",
"resolved": "https://registry.npmjs.org/earcut/-/earcut-2.2.3.tgz",
"integrity": "sha512-iRDI1QeCQIhMCZk48DRDMVgQSSBDmbzzNhnxIo+pwx3swkfjMh6vh0nWLq1NdvGHLKH6wIrAM3vQWeTj6qeoug=="
},
"ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
@ -6906,6 +6978,11 @@
"resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz",
"integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg=="
},
"geojson-vt": {
"version": "3.2.1",
"resolved": "https://registry.npmjs.org/geojson-vt/-/geojson-vt-3.2.1.tgz",
"integrity": "sha512-EvGQQi/zPrDA6zr6BnJD/YhwAkBP8nnJ9emh3EnHQKVMfg/MRVtPbMYdgVy/IaEmn4UfagD2a6fafPDL5hbtwg=="
},
"get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
@ -6957,6 +7034,11 @@
"assert-plus": "^1.0.0"
}
},
"gl-matrix": {
"version": "3.4.3",
"resolved": "https://registry.npmjs.org/gl-matrix/-/gl-matrix-3.4.3.tgz",
"integrity": "sha512-wcCp8vu8FT22BnvKVPjXa/ICBWRq/zjFfdofZy1WSpQZpphblv12/bOQLBC1rMM7SGOFS9ltVmKOHil5+Ml7gA=="
},
"glob": {
"version": "7.1.6",
"resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
@ -7029,6 +7111,11 @@
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz",
"integrity": "sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw=="
},
"grid-index": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/grid-index/-/grid-index-1.1.0.tgz",
"integrity": "sha512-HZRwumpOGUrHyxO5bqKZL0B0GlUpwtCAzZ42sgxUPniu33R1LSFH5yrIcBCHjkctCAh3mtWKcKd9J4vDDdeVHA=="
},
"growly": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/growly/-/growly-1.3.0.tgz",
@ -7044,6 +7131,11 @@
"pify": "^4.0.1"
}
},
"hammerjs": {
"version": "2.0.8",
"resolved": "https://registry.npmjs.org/hammerjs/-/hammerjs-2.0.8.tgz",
"integrity": "sha1-BO93hiz/K7edMPdpIJWTAiK/YPE="
},
"handle-thing": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/handle-thing/-/handle-thing-2.0.1.tgz",
@ -9114,6 +9206,11 @@
"object.assign": "^4.1.2"
}
},
"kdbush": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/kdbush/-/kdbush-3.0.0.tgz",
"integrity": "sha512-hRkd6/XW4HTsA9vjVpY9tuXJYLSlelnkTmVFu4M9/7MIYQtFcHpbugAU7UbOfjOiVSVYl2fqgBuJ32JUmRo5Ew=="
},
"keyboard-key": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/keyboard-key/-/keyboard-key-1.1.0.tgz",
@ -9410,11 +9507,81 @@
"object-visit": "^1.0.0"
}
},
"mapbox-gl": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/mapbox-gl/-/mapbox-gl-2.5.1.tgz",
"integrity": "sha512-mUYOe8z+00fWEtBDAtiNeVDH2wsoEQlOK0UskbIPKnG1XRTDYzzofh8f/1BHe1Q3OLxce2TQ+Ou3uo1yAlePaA==",
"requires": {
"@mapbox/geojson-rewind": "^0.5.0",
"@mapbox/geojson-types": "^1.0.2",
"@mapbox/jsonlint-lines-primitives": "^2.0.2",
"@mapbox/mapbox-gl-supported": "^2.0.0",
"@mapbox/point-geometry": "^0.1.0",
"@mapbox/tiny-sdf": "^2.0.2",
"@mapbox/unitbezier": "^0.0.0",
"@mapbox/vector-tile": "^1.3.1",
"@mapbox/whoots-js": "^3.1.0",
"csscolorparser": "~1.0.3",
"earcut": "^2.2.2",
"geojson-vt": "^3.2.1",
"gl-matrix": "^3.3.0",
"grid-index": "^1.1.0",
"minimist": "^1.2.5",
"murmurhash-js": "^1.0.0",
"pbf": "^3.2.1",
"potpack": "^1.0.1",
"quickselect": "^2.0.0",
"rw": "^1.3.3",
"supercluster": "^7.1.3",
"tinyqueue": "^2.0.3",
"vt-pbf": "^3.1.1"
},
"dependencies": {
"@mapbox/mapbox-gl-supported": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/@mapbox/mapbox-gl-supported/-/mapbox-gl-supported-2.0.0.tgz",
"integrity": "sha512-zu4udqYiBrKMQKwpKJ4hhPON7tz0QR/JZ3iGpHnNWFmH3Sv/ysxlICATUtGCFpsyJf2v1WpFhlzaZ3GhhKmPMA=="
},
"@mapbox/tiny-sdf": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/@mapbox/tiny-sdf/-/tiny-sdf-2.0.2.tgz",
"integrity": "sha512-XBQG3wvIaya9t2OHcWLFYv8cdg48roqOj8XhKzKSvAIg5D1scC+a+tlq0wGjPZkL+k6dL8TyOBR7RKDGh3kefQ=="
}
}
},
"mapbox-to-css-font": {
"version": "2.4.0",
"resolved": "https://registry.npmjs.org/mapbox-to-css-font/-/mapbox-to-css-font-2.4.0.tgz",
"integrity": "sha512-v674D0WtpxCXlA6E+sBlG1QJWdUkz/s9qAD91bJSXBGuBL5lL4tJXpoJEftecphCh2SVQCjWMS2vhylc3AIQTg=="
},
"maplibre-gl": {
"version": "2.0.0-pre.1",
"resolved": "https://registry.npmjs.org/maplibre-gl/-/maplibre-gl-2.0.0-pre.1.tgz",
"integrity": "sha512-F1K/BjrRziHl70reObbPJ725NoVqZN5OzoRDqVfk5iVE2cGQ8ZJfH0b3tF3G93lkMIgftpp6NRxVKc0/S6Cj4Q==",
"requires": {
"@mapbox/geojson-rewind": "^0.5.0",
"@mapbox/jsonlint-lines-primitives": "^2.0.2",
"@mapbox/mapbox-gl-supported": "^1.5.0",
"@mapbox/tiny-sdf": "^1.1.1",
"@mapbox/unitbezier": "^0.0.0",
"@mapbox/vector-tile": "^1.3.1",
"@mapbox/whoots-js": "^3.1.0",
"csscolorparser": "~1.0.3",
"earcut": "^2.2.2",
"geojson-vt": "^3.2.1",
"gl-matrix": "^3.2.1",
"grid-index": "^1.1.0",
"minimist": "^1.2.5",
"murmurhash-js": "^1.0.0",
"pbf": "^3.2.1",
"potpack": "^1.0.1",
"quickselect": "^2.0.0",
"rw": "^1.3.3",
"supercluster": "^7.1.0",
"tinyqueue": "^2.0.3",
"vt-pbf": "^3.1.1"
}
},
"md5.js": {
"version": "1.3.5",
"resolved": "https://registry.npmjs.org/md5.js/-/md5.js-1.3.5.tgz",
@ -9858,6 +10025,15 @@
}
}
},
"mjolnir.js": {
"version": "2.6.0",
"resolved": "https://registry.npmjs.org/mjolnir.js/-/mjolnir.js-2.6.0.tgz",
"integrity": "sha512-rGA7+BJKvXI0ypxQD/+rQE/sW26kmc8UIZWhmQrjhwCf/zvhbcBlsu2vPB6w0Kv/rVnVFEONTSQqC0vFEpQvIA==",
"requires": {
"@babel/runtime": "^7.0.0",
"hammerjs": "^2.0.8"
}
},
"mkdirp": {
"version": "0.5.5",
"resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.5.tgz",
@ -9898,6 +10074,11 @@
"resolved": "https://registry.npmjs.org/multicast-dns-service-types/-/multicast-dns-service-types-1.1.0.tgz",
"integrity": "sha1-iZ8R2WhuXgXLkbNdXw5jt3PPyQE="
},
"murmurhash-js": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/murmurhash-js/-/murmurhash-js-1.0.0.tgz",
"integrity": "sha1-sGJ44h/Gw3+lMTcysEEry2rhX1E="
},
"nan": {
"version": "2.14.2",
"resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz",
@ -10440,11 +10621,11 @@
}
},
"ol-mapbox-style": {
"version": "6.3.1",
"resolved": "https://registry.npmjs.org/ol-mapbox-style/-/ol-mapbox-style-6.3.1.tgz",
"integrity": "sha512-hZsvPVkk1Y+qmifxRX/gCaZJ5Mo04vWj6lbFhXpHDloQquHD3kTY0q8o3xbg4FehucuG7HyQteKWeFJRh3FMww==",
"version": "6.5.1",
"resolved": "https://registry.npmjs.org/ol-mapbox-style/-/ol-mapbox-style-6.5.1.tgz",
"integrity": "sha512-diGjCUlYjCA855vJjQjPzxXLn/skm0iQLD2/yDsXaKdNxFd35hNfRm5Li+Vxh/FxraCodxRvd8IplhrhvXoqbQ==",
"requires": {
"@mapbox/mapbox-gl-style-spec": "^13.14.0",
"@mapbox/mapbox-gl-style-spec": "^13.20.1",
"mapbox-to-css-font": "^2.4.0",
"webfont-matcher": "^1.1.0"
}
@ -11972,6 +12153,11 @@
"uniq": "^1.0.1"
}
},
"potpack": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/potpack/-/potpack-1.0.1.tgz",
"integrity": "sha512-15vItUAbViaYrmaB/Pbw7z6qX2xENbFSTA7Ii4tgbPtasxm5v6ryKhKtL91tpWovDJzTiZqdwzhcFBCwiMVdVw=="
},
"prelude-ls": {
"version": "1.2.1",
"resolved": "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz",
@ -12437,6 +12623,21 @@
"resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz",
"integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ=="
},
"react-map-gl": {
"version": "6.1.17",
"resolved": "https://registry.npmjs.org/react-map-gl/-/react-map-gl-6.1.17.tgz",
"integrity": "sha512-SKuMtvs7aQpHMJehf/GzUQnEhPWRIypTX7X2wVXGME2RBKdY0PnC1YTiy8W3aA4uwcolPvCCYB+ki3xal51ZXQ==",
"requires": {
"@babel/runtime": "^7.0.0",
"@types/geojson": "^7946.0.7",
"@types/mapbox-gl": "^2.0.3",
"mapbox-gl": "^2.3.0",
"mjolnir.js": "^2.5.0",
"prop-types": "^15.7.2",
"resize-observer-polyfill": "^1.5.1",
"viewport-mercator-project": "^7.0.4"
}
},
"react-markdown": {
"version": "5.0.3",
"resolved": "https://registry.npmjs.org/react-markdown/-/react-markdown-5.0.3.tgz",
@ -12958,6 +13159,11 @@
"resolved": "https://registry.npmjs.org/requires-port/-/requires-port-1.0.0.tgz",
"integrity": "sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8="
},
"resize-observer-polyfill": {
"version": "1.5.1",
"resolved": "https://registry.npmjs.org/resize-observer-polyfill/-/resize-observer-polyfill-1.5.1.tgz",
"integrity": "sha512-LwZrotdHOo12nQuZlHEmtuXdqGoOD0OhaxopaNFxWzInpEgaLWoVuAMbTzixuosCx2nEG58ngzW3vxdWoxIgdg=="
},
"resolve": {
"version": "1.18.1",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.18.1.tgz",
@ -14620,6 +14826,14 @@
}
}
},
"supercluster": {
"version": "7.1.4",
"resolved": "https://registry.npmjs.org/supercluster/-/supercluster-7.1.4.tgz",
"integrity": "sha512-GhKkRM1jMR6WUwGPw05fs66pOFWhf59lXq+Q3J3SxPvhNcmgOtLRV6aVQPMRsmXdpaeFJGivt+t7QXUPL3ff4g==",
"requires": {
"kdbush": "^3.0.0"
}
},
"supports-color": {
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
@ -14963,6 +15177,11 @@
"resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz",
"integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA=="
},
"tinyqueue": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/tinyqueue/-/tinyqueue-2.0.3.tgz",
"integrity": "sha512-ppJZNDuKGgxzkHihX8v9v9G5f+18gzaTfrukGrq6ueg0lmH4nqVnA2IPG0AEH3jKEk2GRJCUhDoqpoiw3PHLBA=="
},
"tmpl": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz",
@ -15577,11 +15796,29 @@
"unist-util-stringify-position": "^2.0.0"
}
},
"viewport-mercator-project": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/viewport-mercator-project/-/viewport-mercator-project-7.0.4.tgz",
"integrity": "sha512-0jzpL6pIMocCKWg1C3mqi/N4UPgZC3FzwghEm1H+XsUo8hNZAyJc3QR7YqC816ibOR8aWT5pCsV+gCu8/BMJgg==",
"requires": {
"@math.gl/web-mercator": "^3.5.5"
}
},
"vm-browserify": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/vm-browserify/-/vm-browserify-1.1.2.tgz",
"integrity": "sha512-2ham8XPWTONajOR0ohOKOHXkm3+gaBmGut3SRuu75xLd/RRaY6vqgh8NBYYk7+RW3u5AtzPQZG8F10LHkl0lAQ=="
},
"vt-pbf": {
"version": "3.1.3",
"resolved": "https://registry.npmjs.org/vt-pbf/-/vt-pbf-3.1.3.tgz",
"integrity": "sha512-2LzDFzt0mZKZ9IpVF2r69G9bXaP2Q2sArJCmcCgvfTdCCZzSyz4aCLoQyUilu37Ll56tCblIZrXFIjNUpGIlmA==",
"requires": {
"@mapbox/point-geometry": "0.1.0",
"@mapbox/vector-tile": "^1.3.1",
"pbf": "^3.2.1"
}
},
"w3c-hr-time": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz",

View file

@ -13,13 +13,16 @@
"classnames": "^2.3.1",
"downloadjs": "^1.4.7",
"luxon": "^1.27.0",
"maplibre-gl": "^2.0.0-pre.1",
"node-sass": "^4.14.1",
"ol": "^6.5.0",
"ol-mapbox-style": "^6.5.1",
"pkce": "^1.0.0-beta2",
"proj4": "^2.7.2",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-hook-form": "^6.15.7",
"react-map-gl": "^6.1.17",
"react-markdown": "^5.0.3",
"react-redux": "^7.2.4",
"react-router-dom": "^5.2.0",

View file

@ -22,7 +22,7 @@ proj4.defs(
)
register(proj4)
const MapContext = React.createContext()
export const MapContext = React.createContext()
const MapLayerContext = React.createContext()
export function Map({children, ...props}) {
@ -162,6 +162,7 @@ Map.GroupLayer = GroupLayer
Map.TileLayer = TileLayer
Map.VectorLayer = VectorLayer
Map.View = View
Map.Layer = Layer
Map.BaseLayer = BaseLayer
export default Map

View file

@ -2,6 +2,11 @@ import React from 'react'
interface Config {
apiUrl: string
mapHome: {
latitude: number
longitude: number
zoom: number
}
}
async function loadConfig(): Promise<Config> {

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,61 @@
import _ from 'lodash'
import bright from './bright.json'
function getRoadsStyle(sourceUrl = "http://localhost:3002/data/v3.json") {
return {
"version": 8,
"name": "OBS Roads",
"sources": {
"obs": {"type": "vector", "url": sourceUrl}
},
"layers": [
{
"id": "obs",
"type": "line",
"source": "obs",
"source-layer": "obs_roads",
"layout": {"line-cap": "round", "line-join": "round"},
"paint": {
"line-width": {"stops": [[14, 2], [17, 8]]},
"line-color": [
"interpolate-hcl",
["linear"],
["get", "distance_overtaker_mean"],
1,
"rgba(255, 0, 0, 1)",
1.3,
"rgba(255, 200, 0, 1)",
1.5,
"rgba(67, 200, 0, 1)",
1.7,
"rgba(67, 150, 0, 1)"
],
"line-opacity": 1,
"line-offset": {"stops": [[14, 1], [17, 7]]}
}
}
],
"id": "obs-roads"
}
}
function mergeStyles(baseStyle, ...extensions) {
const style = _.cloneDeep(baseStyle)
for (const extension of extensions) {
for (const key of Object.keys(extension)) {
if (['sources', 'layers', 'id', 'name', 'version'].includes(key)) {
continue
}
throw new Error(`cannot use style ${extension.id ?? extension.name} as extension style, it defines ${key}`)
}
style.sources = {...style.sources, ...extension.sources}
style.layers = [...style.layers, ...extension.layers]
}
return style
}
export const basemap = bright
export const obsRoads = (sourceUrl) => mergeStyles(basemap, getRoadsStyle(sourceUrl))

View file

@ -1,6 +1,7 @@
.welcomeMap {
height: 60rem;
max-height: 70vh;
position: relative;
@media screen and (max-width: 767px) {
margin: -35px -32px 0 -32px;

View file

@ -4,21 +4,37 @@ import {Message, Grid, Loader, Header, Item} from 'semantic-ui-react'
import {useObservable} from 'rxjs-hooks'
import {of, from} from 'rxjs'
import {map, switchMap} from 'rxjs/operators'
import {fromLonLat} from 'ol/proj'
import api from 'api'
import {Stats, Map, Page, RoadsLayer} from 'components'
import {Stats, Page} from 'components'
import {useConfig} from 'config'
import {TrackListItem} from './TracksPage'
import styles from './HomePage.module.scss'
import 'ol/ol.css';
import {obsRoads} from '../mapstyles'
import ReactMapGl from 'react-map-gl'
function WelcomeMap() {
const config = useConfig()
const mapStyle = React.useMemo(() => obsRoads(), [])
const [viewport, setViewport] = React.useState({
longitude: 0,
latitude: 0,
zoom: 0,
});
React.useEffect(() => {
if (config?.mapHome) {
setViewport(config.mapHome)
}
}, [config])
return (
<Map className={styles.welcomeMap}>
<RoadsLayer />
<Map.BaseLayer />
<Map.View />
</Map>
<div className={styles.welcomeMap}>
<ReactMapGl mapStyle={mapStyle} width="100%" height="100%" onViewportChange={setViewport } {...viewport } />
</div>
)
}

View file

@ -155,7 +155,7 @@ export default function TrackMap({trackData, show, ...props}: {trackData: TrackD
const trackPointsD2: Feature<Point>[] = []
const trackPointsUntaggedD1: Feature<Point>[] = []
const trackPointsUntaggedD2: Feature<Point>[] = []
const filteredPoints: TrackPoint[] = trackData?.allMeasurements?.features.filter(isValidTrackPoint) ?? []
const filteredPoints: TrackPoint[] = trackData?.measurements?.features.filter(isValidTrackPoint) ?? []
for (const feature of filteredPoints) {
const {
@ -198,7 +198,7 @@ export default function TrackMap({trackData, show, ...props}: {trackData: TrackD
const viewExtent = points.length ? trackVectorSource.getExtent() : null
return {trackVectorSource, trackPointsD1, trackPointsD2, trackPointsUntaggedD1, trackPointsUntaggedD2, viewExtent}
}, [trackData?.allMeasurements?.features])
}, [trackData?.measurements?.features])
return (
<Map {...props}>

View file

@ -8,8 +8,8 @@ export type UserProfile = {
export type TrackData = {
track: Feature<LineString>,
allMeasurements: FeatureCollection,
confirmedMeasurements: FeatureCollection,
measurements: FeatureCollection,
overtakingEvents: FeatureCollection,
}
export type TrackStatistics = {

22
tile-generator/.gitignore vendored Normal file
View file

@ -0,0 +1,22 @@
# Mapnik XML
data.xml
*.thumb.png
*.swp
*.pbf
# quickstart
quickstart.log
# imput / output data
data/*
# generated source files
build/*
# Import cache
cache/*
# any IDE files
.idea/
.vscode/

60
tile-generator/LICENSE.md Normal file
View file

@ -0,0 +1,60 @@
Copyright (c) 2016, KlokanTech.com & OpenMapTiles contributors.
All rights reserved.
The vector tile schema has been developed by Klokan Technologies GmbH and
was initially modelled after the cartography of the CARTO's Positron basemap
with permission from CartoDB Inc.
The vector tile schema has been refined and improved in cooperation with
the Wikimedia Foundation and is heavily influenced by years of
Paul Norman's experience of creating maps from OpenStreetMap data.
# Code license: BSD 3-Clause License
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Design license: CC-BY 4.0
The cartography and visual design features of the map tile schema (also known as
the "look and feel" of the map) are licensed under the Creative Commons
Attribution 4.0 license.
To view a copy of the license, visit http://creativecommons.org/licenses/by/4.0/.
Products or services using maps derived from OpenMapTiles schema need to visibly
credit "OpenMapTiles.org" or reference "OpenMapTiles" with a link to
http://openmaptiles.org/.
For a browsable electronic map based on OpenMapTiles and OpenStreetMap data, the
credit should appear in the corner of the map. For example:
[© OpenMapTiles](http://openmaptiles.org/) [© OpenStreetMap contributors](http://www.openstreetmap.org/copyright)
For printed and static maps a similar attribution should be made in a textual
description near the image, in the same fashion as if you cite a photograph.
Exceptions to OpenMapTiles attribution requirement can be in a written form granted
by Klokan Technologies GmbH (info@klokantech.com).
The project contributors grant Klokan Technologies GmbH the license to give such
exceptions on a commercial basis.

591
tile-generator/Makefile Normal file
View file

@ -0,0 +1,591 @@
#
# First section - common variable initialization
#
# Ensure that errors don't hide inside pipes
SHELL = /bin/bash
.SHELLFLAGS = -o pipefail -c
# Make all .env variables available for make targets
include .env
# Layers definition and meta data
TILESET_FILE ?= openmaptiles.yaml
# Options to run with docker and docker-compose - ensure the container is destroyed on exit
# Containers run as the current user rather than root (so that created files are not root-owned)
DC_OPTS ?= --rm --user=$(shell id -u):$(shell id -g)
# If set to a non-empty value, will use postgis-preloaded instead of postgis docker image
USE_PRELOADED_IMAGE ?=
# Local port to use with postserve
PPORT ?= 8090
export PPORT
# Local port to use with tileserver
TPORT ?= 8080
export TPORT
# Allow a custom docker-compose project name
ifeq ($(strip $(DC_PROJECT)),)
DC_PROJECT := $(notdir $(shell pwd))
DOCKER_COMPOSE := docker-compose
else
DOCKER_COMPOSE := docker-compose --project-name $(DC_PROJECT)
endif
# Make some operations quieter (e.g. inside the test script)
ifeq ($(strip $(QUIET)),)
QUIET_FLAG :=
else
QUIET_FLAG := --quiet
endif
# Use `xargs --no-run-if-empty` flag, if supported
XARGS := xargs $(shell xargs --no-run-if-empty </dev/null 2>/dev/null && echo --no-run-if-empty)
# If running in the test mode, compare files rather than copy them
TEST_MODE?=no
ifeq ($(TEST_MODE),yes)
# create images in ./build/devdoc and compare them to ./layers
GRAPH_PARAMS=./build/devdoc ./layers
else
# update graphs in the ./layers dir
GRAPH_PARAMS=./layers
endif
# Set OpenMapTiles host
OMT_HOST := http://$(firstword $(subst :, ,$(subst tcp://,,$(DOCKER_HOST))) localhost)
export OMT_HOST
# This defines an easy $(newline) value to act as a "\n". Make sure to keep exactly two empty lines after newline.
define newline
endef
# use the old postgres connection values if they are existing
PGHOST := $(or $(POSTGRES_HOST),$(PGHOST))
PGPORT := $(or $(POSTGRES_PORT),$(PGPORT))
PGDATABASE := $(or $(POSTGRES_DB),$(PGDATABASE))
PGUSER := $(or $(POSTGRES_USER),$(PGUSER))
PGPASSWORD := $(or $(POSTGRES_PASSWORD),$(PGPASSWORD))
#
# Determine area to work on
# If $(area) parameter is not set, and only one *.osm.pbf file is found in ./data, use it as $(area).
# Otherwise, all make targets requiring an area will show an error.
# Note: If no *.osm.pbf files are found, once the users call "make download area=..."
# they will not need to use an "area=" parameter again because there will be just a single file.
#
# historically we have been using $(area) rather than $(AREA), so make both work
area ?= $(AREA)
# Ensure the $(area) param is set, or try to automatically determine it based on available data files
ifeq ($(strip $(area)),)
# An $(area) parameter is not set. If only one *.osm.pbf file is found in ./data, use it as $(area).
data_files := $(shell find data -name '*.osm.pbf' 2>/dev/null)
ifneq ($(word 2,$(data_files)),)
define assert_area_is_given
@echo ""
@echo "ERROR: The 'area' parameter or environment variable have not been set, and there several 'area' options:"
@$(patsubst data/%.osm.pbf,echo " '%'";,$(data_files))
@echo ""
@echo "To specify an area use:"
@echo " make $@ area=<area-id>"
@echo ""
@exit 1
endef
else
ifeq ($(word 1,$(data_files)),)
define assert_area_is_given
@echo ""
@echo "ERROR: The 'area' parameter (or env var) has not been set, and there are no data/*.osm.pbf files"
@echo ""
@echo "To specify an area use"
@echo " make $@ area=<area-id>"
@echo ""
@echo "To download an area, use make download area=<area-id>"
@echo "To list downloadable areas, use make list-geofabrik and/or make list-bbbike"
@exit 1
@echo ""
endef
else
# Keep just the name of the data file, without the .osm.pbf extension
area := $(patsubst data/%.osm.pbf,%,$(data_files))
# Rename area-latest.osm.pbf to area.osm.pbf
# TODO: This if statement could be removed in a few months once everyone is using the file without the `-latest`?
ifneq ($(area),$(area:-latest=))
$(shell mv "data/$(area).osm.pbf" "data/$(area:-latest=).osm.pbf")
area := $(area:-latest=)
$(warning ATTENTION: File data/$(area)-latest.osm.pbf was renamed to $(area).osm.pbf.)
AREA_INFO := Detected area=$(area) based on finding a 'data/$(area)-latest.osm.pbf' file - renamed to '$(area).osm.pbf'. Use 'area' parameter or environment variable to override.
else
AREA_INFO := Detected area=$(area) based on finding a 'data/$(area).osm.pbf' file. Use 'area' parameter or environment variable to override.
endif
endif
endif
endif
ifneq ($(strip $(AREA_INFO)),)
define assert_area_is_given
@echo "$(AREA_INFO)"
endef
endif
# If set, this file will be downloaded in download-osm and imported in the import-osm targets
PBF_FILE ?= data/$(area).osm.pbf
# For download-osm, allow URL parameter to download file from a given URL. Area param must still be provided.
ifneq ($(strip $(url)),)
DOWNLOAD_AREA := $(url)
else
DOWNLOAD_AREA := $(area)
endif
# The file is placed into the $EXPORT_DIR=/export (mapped to ./data)
export MBTILES_FILE ?= $(area).mbtiles
MBTILES_LOCAL_FILE = data/$(MBTILES_FILE)
ifeq ($(strip $(DIFF_MODE)),true)
# import-osm implementation requires IMPOSM_CONFIG_FILE to be set to a valid file
# For static (no-updates) import, we don't need to override the default value
# For the update mode, set location of the dynamically-generated area-based config file
export IMPOSM_CONFIG_FILE = data/$(area).repl.json
endif
# Load area-specific bbox file that gets generated by the download-osm --bbox
AREA_BBOX_FILE ?= data/$(area).bbox
ifneq (,$(wildcard $(AREA_BBOX_FILE)))
cat := $(if $(filter $(OS),Windows_NT),type,cat)
BBOX := $(shell $(cat) ${AREA_BBOX_FILE})
export BBOX
endif
define HELP_MESSAGE
==============================================================================
OpenMapTiles https://github.com/openmaptiles/openmaptiles
Hints for testing areas
make list-geofabrik # list actual geofabrik OSM extracts for download -> <<your-area>>
./quickstart.sh <<your-area>> # example: ./quickstart.sh madagascar
Hints for designers:
make start-maputnik # start Maputnik Editor + dynamic tile server [ see $(OMT_HOST):8088 ]
make start-postserve # start dynamic tile server [ see $(OMT_HOST):$(PPORT) ]
make stop-postserve # stop dynamic tile server
make start-tileserver # start maptiler/tileserver-gl [ see $(OMT_HOST):$(TPORT) ]
Hints for developers:
make # build source code
make bash # start openmaptiles-tools /bin/bash terminal
make generate-bbox-file # compute bounding box of a data file and store it in a file
make generate-devdoc # generate devdoc including graphs for all layers [./layers/...]
make generate-qa # statistics for a given layer's field
make generate-tiles-pg # generate vector tiles based on .env settings using PostGIS ST_MVT()
make generate-tiles # generate vector tiles based on .env settings using Mapnik (obsolete)
cat .env # list PG database and MIN_ZOOM and MAX_ZOOM information
cat quickstart.log # transcript of the last ./quickstart.sh run
make help # help about available commands
Hints for downloading & importing data:
make list-geofabrik # list actual geofabrik OSM extracts for download
make list-bbbike # list actual BBBike OSM extracts for download
make download area=albania # download OSM data from any source and create config file
make download-geofabrik area=albania # download OSM data from geofabrik.de and create config file
make download-osmfr area=asia/qatar # download OSM data from openstreetmap.fr and create config file
make download-bbbike area=Amsterdam # download OSM data from bbbike.org and create config file
make import-data # Import data from OpenStreetMapData, Natural Earth and OSM Lake Labels.
make import-osm # Import OSM data with the mapping rules from build/mapping.yaml
make import-wikidata # Import labels from Wikidata
make import-sql # Import layers (run this after modifying layer SQL)
Hints for database management:
make psql # start PostgreSQL console
make psql-list-tables # list all PostgreSQL tables
make list-views # list PostgreSQL public schema views
make list-tables # list PostgreSQL public schema tables
make vacuum-db # PostgreSQL: VACUUM ANALYZE
make analyze-db # PostgreSQL: ANALYZE
make destroy-db # remove docker containers and PostgreSQL data volume
make start-db # start PostgreSQL, creating it if it doesn't exist
make start-db-preloaded # start PostgreSQL, creating data-prepopulated one if it doesn't exist
make stop-db # stop PostgreSQL database without destroying the data
Hints for Docker management:
make clean-unnecessary-docker # clean unnecessary docker image(s) and container(s)
make refresh-docker-images # refresh openmaptiles docker images from Docker HUB
make remove-docker-images # remove openmaptiles docker images
make list-docker-images # show a list of available docker images
==============================================================================
endef
export HELP_MESSAGE
#
# TARGETS
#
.PHONY: all
all: init-dirs build/openmaptiles.tm2source/data.yml build/mapping.yaml build-sql
.PHONY: help
help:
@echo "$$HELP_MESSAGE" | less
define win_fs_error
( \
echo "" ;\
echo "ERROR: Windows native filesystem" ;\
echo "" ;\
echo "Please avoid running OpenMapTiles in a Windows filesystem." ;\
echo "See https://github.com/openmaptiles/openmaptiles/issues/1095#issuecomment-817095465" ;\
echo "" ;\
exit 1 ;\
)
endef
.PHONY: init-dirs
init-dirs:
@mkdir -p build/sql/parallel
@mkdir -p build/openmaptiles.tm2source
@mkdir -p data
@mkdir -p cache
@ ! ($(DOCKER_COMPOSE) 2>/dev/null run $(DC_OPTS) openmaptiles-tools df --output=fstype /tileset| grep -q 9p) < /dev/null || ($(win_fs_error))
build/openmaptiles.tm2source/data.yml: init-dirs
ifeq (,$(wildcard build/openmaptiles.tm2source/data.yml))
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \
'generate-tm2source $(TILESET_FILE) --host="$(PGHOST)" --port=$(PGPORT) --database="$(PGDATABASE)" --user="$(PGUSER)" --password="$(PGPASSWORD)" > $@'
endif
build/mapping.yaml: init-dirs
ifeq (,$(wildcard build/mapping.yaml))
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \
'generate-imposm3 $(TILESET_FILE) > $@'
endif
.PHONY: build-sql
build-sql: init-dirs
ifeq (,$(wildcard build/sql/run_last.sql))
@mkdir -p build/sql/parallel
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash -c \
'generate-sql $(TILESET_FILE) --dir ./build/sql \
&& generate-sqltomvt $(TILESET_FILE) \
--key --gzip --postgis-ver 3.0.1 \
--function --fname=getmvt >> ./build/sql/run_last.sql'
endif
.PHONY: clean
clean:
rm -rf build
.PHONY: destroy-db
# TODO: Use https://stackoverflow.com/a/27852388/177275
destroy-db: DC_PROJECT := $(shell echo $(DC_PROJECT) | tr A-Z a-z)
destroy-db:
$(DOCKER_COMPOSE) down -v --remove-orphans
$(DOCKER_COMPOSE) rm -fv
docker volume ls -q -f "name=^$(DC_PROJECT)_" | $(XARGS) docker volume rm
rm -rf cache
mkdir cache
.PHONY: start-db-nowait
start-db-nowait: init-dirs
@echo "Starting postgres docker compose target using $${POSTGIS_IMAGE:-default} image (no recreate if exists)" && \
$(DOCKER_COMPOSE) up --no-recreate -d postgres
.PHONY: start-db
start-db: start-db-nowait
@echo "Wait for PostgreSQL to start..."
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools pgwait
# Wrap start-db target but use the preloaded image
.PHONY: start-db-preloaded
start-db-preloaded: export POSTGIS_IMAGE=openmaptiles/postgis-preloaded
start-db-preloaded: export COMPOSE_HTTP_TIMEOUT=180
start-db-preloaded: start-db
.PHONY: stop-db
stop-db:
@echo "Stopping PostgreSQL..."
$(DOCKER_COMPOSE) stop postgres
.PHONY: list-geofabrik
list-geofabrik: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm list geofabrik
.PHONY: list-bbbike
list-bbbike: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm list bbbike
#
# download, download-geofabrik, download-osmfr, and download-bbbike are handled here
# The --imposm-cfg will fail for some of the sources, but we ignore that error -- only needed for diff mode
#
OSM_SERVERS := geofabrik osmfr bbbike
ALL_DOWNLOADS := $(addprefix download-,$(OSM_SERVERS)) download
OSM_SERVER=$(patsubst download,,$(patsubst download-%,%,$@))
.PHONY: $(ALL_DOWNLOADS)
$(ALL_DOWNLOADS): init-dirs
@$(assert_area_is_given)
ifneq ($(strip $(url)),)
$(if $(OSM_SERVER),$(error url parameter can only be used with non-specific download target:$(newline) make download area=$(area) url="$(url)"$(newline)))
endif
ifeq (,$(wildcard $(PBF_FILE)))
ifeq ($(strip $(DIFF_MODE)),true)
@echo "Downloading $(DOWNLOAD_AREA) with replication support into $(PBF_FILE) and $(IMPOSM_CONFIG_FILE) from $(if $(OSM_SERVER),$(OSM_SERVER),any source)"
@$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm $(OSM_SERVER) "$(DOWNLOAD_AREA)" \
--imposm-cfg "$(IMPOSM_CONFIG_FILE)" \
--bbox "$(AREA_BBOX_FILE)" \
--output "$(PBF_FILE)"
else
@echo "Downloading $(DOWNLOAD_AREA) into $(PBF_FILE) from $(if $(OSM_SERVER),$(OSM_SERVER),any source)"
@$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm $(OSM_SERVER) "$(DOWNLOAD_AREA)" \
--bbox "$(AREA_BBOX_FILE)" \
--output "$(PBF_FILE)"
endif
@echo ""
else
ifeq ($(strip $(DIFF_MODE)),true)
ifeq (,$(wildcard $(IMPOSM_CONFIG_FILE)))
$(error \
$(newline) Data files $(PBF_FILE) already exists, but $(IMPOSM_CONFIG_FILE) does not. \
$(newline) You probably downloaded the data file before setting DIFF_MODE=true. \
$(newline) You can delete the data file $(PBF_FILE) and re-run make download \
$(newline) to re-download and generate config, or manually create $(IMPOSM_CONFIG_FILE) \
$(newline) See example https://github.com/openmaptiles/openmaptiles-tools/blob/v5.2/bin/config/repl_config.json \
$(newline))
else
@echo "Data files $(PBF_FILE) and replication config $(IMPOSM_CONFIG_FILE) already exists, skipping the download."
endif
else
@echo "Data files $(PBF_FILE) already exists, skipping the download."
endif
endif
.PHONY: generate-bbox-file
generate-bbox-file:
@$(assert_area_is_given)
ifeq (,$(wildcard $(AREA_BBOX_FILE)))
@$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools download-osm bbox "$(PBF_FILE)" "$(AREA_BBOX_FILE)"
else
@echo "Configuration file $(AREA_BBOX_FILE) already exists, no need to regenerate. BBOX=$(BBOX)"
endif
.PHONY: psql
psql: start-db-nowait
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c 'pgwait && psql.sh'
# Special cache handling for Docker Toolbox on Windows
ifeq ($(MSYSTEM),MINGW64)
DC_CONFIG_CACHE := -f docker-compose.yml -f docker-compose-$(MSYSTEM).yml
DC_OPTS_CACHE := $(strip $(filter-out --user=%,$(DC_OPTS)))
else
DC_OPTS_CACHE := $(DC_OPTS)
endif
.PHONY: import-osm
import-osm: all start-db-nowait
@$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-osm $(PBF_FILE)'
.PHONY: update-osm
update-osm: all start-db-nowait
@$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-update'
.PHONY: import-diff
import-diff: all start-db-nowait
@$(assert_area_is_given)
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools sh -c 'pgwait && import-diff'
.PHONY: import-data
import-data: start-db
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) import-data
.PHONY: import-sql
import-sql: all start-db-nowait
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c 'pgwait && import-sql' | \
awk -v s=": WARNING:" '1{print; fflush()} $$0~s{print "\n*** WARNING detected, aborting"; exit(1)}' | \
awk '1{print; fflush()} $$0~".*ERROR" {txt=$$0} END{ if(txt){print "\n*** ERROR detected, aborting:"; print txt; exit(1)} }'
.PHONY: generate-tiles
generate-tiles: all start-db
@echo "WARNING: This Mapnik-based method of tile generation is obsolete. Use generate-tiles-pg instead."
@echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists)..."
@rm -rf "$(MBTILES_LOCAL_FILE)"
$(DOCKER_COMPOSE) run $(DC_OPTS) generate-vectortiles
@echo "Updating generated tile metadata ..."
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges
.PHONY: generate-tiles-pg
generate-tiles-pg: all start-db
@echo "Generating tiles into $(MBTILES_LOCAL_FILE) (will delete if already exists) using PostGIS ST_MVT()..."
@rm -rf "$(MBTILES_LOCAL_FILE)"
# For some reason Ctrl+C doesn't work here without the -T. Must be pressed twice to stop.
$(DOCKER_COMPOSE) run -T $(DC_OPTS) openmaptiles-tools generate-tiles
@echo "Updating generated tile metadata ..."
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
mbtiles-tools meta-generate "$(MBTILES_LOCAL_FILE)" $(TILESET_FILE) --auto-minmax --show-ranges
.PHONY: start-tileserver
start-tileserver: init-dirs
@echo " "
@echo "***********************************************************"
@echo "* "
@echo "* Download/refresh maptiler/tileserver-gl docker image"
@echo "* see documentation: https://github.com/maptiler/tileserver-gl"
@echo "* "
@echo "***********************************************************"
@echo " "
docker pull maptiler/tileserver-gl
@echo " "
@echo "***********************************************************"
@echo "* "
@echo "* Start maptiler/tileserver-gl "
@echo "* ----------------------------> check $(OMT_HOST):$(TPORT) "
@echo "* "
@echo "***********************************************************"
@echo " "
docker run $(DC_OPTS) -it --name tileserver-gl -v $$(pwd)/data:/data -p $(TPORT):$(TPORT) maptiler/tileserver-gl --port $(TPORT)
.PHONY: start-postserve
start-postserve: start-db
@echo " "
@echo "***********************************************************"
@echo "* "
@echo "* Bring up postserve at $(OMT_HOST):$(PPORT)"
@echo "* --> can view it locally (use make start-maputnik)"
@echo "* --> or can use https://maputnik.github.io/editor"
@echo "* "
@echo "* set data source / TileJSON URL to $(OMT_HOST):$(PPORT)"
@echo "* "
@echo "***********************************************************"
@echo " "
$(DOCKER_COMPOSE) up -d postserve
.PHONY: stop-postserve
stop-postserve:
$(DOCKER_COMPOSE) stop postserve
.PHONY: start-maputnik
start-maputnik: stop-maputnik start-postserve
@echo " "
@echo "***********************************************************"
@echo "* "
@echo "* Start maputnik/editor "
@echo "* ---> go to $(OMT_HOST):8088 "
@echo "* ---> set data source / TileJSON URL to $(OMT_HOST):$(PPORT)"
@echo "* "
@echo "***********************************************************"
@echo " "
docker run $(DC_OPTS) --name maputnik_editor -d -p 8088:8888 maputnik/editor
.PHONY: stop-maputnik
stop-maputnik:
-docker rm -f maputnik_editor
# STAT_FUNCTION=frequency|toplength|variance
.PHONY: generate-qa
generate-qa: all start-db-nowait
@echo " "
@echo "e.g. make generate-qa STAT_FUNCTION=frequency LAYER=transportation ATTRIBUTE=class"
@echo " "
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools \
layer-stats $(STAT_FUNCTION) $(TILESET_FILE) $(LAYER) $(ATTRIBUTE) -m 0 -n 14 -v
# generate all etl and mapping graphs
.PHONY: generate-devdoc
generate-devdoc: init-dirs
mkdir -p ./build/devdoc && \
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools sh -c \
'generate-etlgraph $(TILESET_FILE) $(GRAPH_PARAMS) && \
generate-mapping-graph $(TILESET_FILE) $(GRAPH_PARAMS)'
.PHONY: bash
bash: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools bash
.PHONY: import-wikidata
import-wikidata: init-dirs
$(DOCKER_COMPOSE) $(DC_CONFIG_CACHE) run $(DC_OPTS_CACHE) openmaptiles-tools import-wikidata --cache /cache/wikidata-cache.json $(TILESET_FILE)
.PHONY: reset-db-stats
reset-db-stats: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c 'SELECT pg_stat_statements_reset();'
.PHONY: list-views
list-views: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -A -F"," -P pager=off -P footer=off \
-c "select viewname from pg_views where schemaname='public' order by viewname;"
.PHONY: list-tables
list-tables: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -A -F"," -P pager=off -P footer=off \
-c "select tablename from pg_tables where schemaname='public' order by tablename;"
.PHONY: psql-list-tables
psql-list-tables: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c "\d+"
.PHONY: vacuum-db
vacuum-db: init-dirs
@echo "Start - postgresql: VACUUM ANALYZE VERBOSE;"
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c 'VACUUM ANALYZE VERBOSE;'
.PHONY: analyze-db
analyze-db: init-dirs
@echo "Start - postgresql: ANALYZE VERBOSE;"
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools psql.sh -v ON_ERROR_STOP=1 -P pager=off -c 'ANALYZE VERBOSE;'
.PHONY: list-docker-images
list-docker-images:
docker images | grep openmaptiles
.PHONY: refresh-docker-images
refresh-docker-images: init-dirs
ifneq ($(strip $(NO_REFRESH)),)
@echo "Skipping docker image refresh"
else
@echo ""
@echo "Refreshing docker images... Use NO_REFRESH=1 to skip."
ifneq ($(strip $(USE_PRELOADED_IMAGE)),)
POSTGIS_IMAGE=openmaptiles/postgis-preloaded \
docker-compose pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres
else
docker-compose pull --ignore-pull-failures $(QUIET_FLAG) openmaptiles-tools generate-vectortiles postgres import-data
endif
endif
.PHONY: remove-docker-images
remove-docker-images:
@echo "Deleting all openmaptiles related docker image(s)..."
@$(DOCKER_COMPOSE) down
@docker images "openmaptiles/*" -q | $(XARGS) docker rmi -f
@docker images "maputnik/editor" -q | $(XARGS) docker rmi -f
@docker images "maptiler/tileserver-gl" -q | $(XARGS) docker rmi -f
.PHONY: clean-unnecessary-docker
clean-unnecessary-docker:
@echo "Deleting unnecessary container(s)..."
@docker ps -a -q --filter "status=exited" | $(XARGS) docker rm
@echo "Deleting unnecessary image(s)..."
@docker images | awk -F" " '/<none>/{print $$3}' | $(XARGS) docker rmi
.PHONY: test-perf-null
test-perf-null: init-dirs
$(DOCKER_COMPOSE) run $(DC_OPTS) openmaptiles-tools test-perf $(TILESET_FILE) --test null --no-color
.PHONY: build-test-pbf
build-test-pbf: init-dirs
docker-compose run $(DC_OPTS) openmaptiles-tools /tileset/.github/workflows/build-test-data.sh
.PHONY: debug
debug: ## Use this target when developing Makefile itself to verify loaded environment variables
@$(assert_area_is_given)
@echo file_exists = $(wildcard $(AREA_BBOX_FILE))
@echo AREA_BBOX_FILE = $(AREA_BBOX_FILE) , $$AREA_ENV_FILE
@echo BBOX = $(BBOX) , $$BBOX
@echo MIN_ZOOM = $(MIN_ZOOM) , $$MIN_ZOOM
@echo MAX_ZOOM = $(MAX_ZOOM) , $$MAX_ZOOM

84
tile-generator/README.md Normal file
View file

@ -0,0 +1,84 @@
# Tile Generation
To display the collected data we generate vector tiles which can be rendered by
different map renderers, such as
[maplibre-gl-js](https://github.com/MapLibre/maplibre-gl-js) or
[QGIS](https://www.qgis.org/en/site/).
The whole process requires a dockerized setup. Of course you can try to install
and run the tools without docker, but that is probably going to be very
complicated, and we're not documenting it here.
## Data sources
There are two main sources of data. Both feed into a PostgreSQL database into
separate tables, such that they can be joined for processing.
### Application data
The **API** imports tracks separately and stores the imported data into the
`overtaking_event` table. This is already part of the application and does not
need configuration, apart from specifying the correct `postgres.url` in the API
config.
### Importing OpenStreetMap data
This is the road information imported from OpenStreetMap itself. Download the
area(s) you would like to import from
[GeoFabrik](https://download.geofabrik.de). Then import the files like this:
```bash
osm2pgsql --create --hstore --style api/roads_import.lua -O flex \
-H localhost -d obs -U obs -W \
path/to/downloaded/myarea-latest.osm.pbf
```
You might need to adjust the host, database and username (`-H`, `-d`, `-U`) to
your setup, and also provide the correct password when queried. This process
should take a few seconds to minutes, depending on the area size. You can run
the process multiple times, with the same or different area files, to import or
update the data. You can also truncate the `road` table before importing if you
want to remove outdated road information.
## Configure
Edit the file `tile-generator/.env` and adjust the following variables:
* `PGDATABASE, PGUSER, ...` if you have different PostgreSQL credentials
* `BBOX`, a bounding box for the area you want to generate (keep it small). Use
[this tool](https://boundingbox.klokantech.com/) to draw an area on a map.
## Generate SQL functions
The [OpenMapTiles](https://openmaptiles.org/) project is used to generate the
vector tiles. For this, a lot of logic is generated and imported into the
PostgreSQL database in the form of user functions. To generate and import these, run::
```bash
cd tile-generator/
make clean
make
make import-sql
```
## Generate `.mbtiles` file
This file contains all the vector tiles for the selected area and zoom levels,
and different layers of information (according to the layer descriptions in
`tile-generator/layers/` and `tile-generator/openmaptiles.yaml`). It is
generated like this:
```bash
make generate-tiles-pg
```
## Publish vector tiles
The tool [tileserver-gl](http://tileserver.org/) is used to publish the vector
tiles separately through HTTP. The tileserver runs inside docker, so all you need to do for a development setup is start it:
```
docker compose up -d tileserver
```
It is now available at [http://localhost:3002/](http://localhost:3002/).

View file

@ -0,0 +1,16 @@
CREATE OR REPLACE FUNCTION layer_obs_events(bbox geometry, zoom_level int)
RETURNS TABLE(event_id bigint, geometry geometry, distance_overtaker float, distance_stationary float, direction int, course float, speed float, way_id bigint) AS $$
SELECT
id::bigint as event_id,
ST_Transform(overtaking_event.geometry, 3857) as geometry,
distance_overtaker,
distance_stationary,
(case when direction_reversed then -1 else 1 end)::int as direction,
course,
speed,
way_id::bigint as way_id
FROM overtaking_event
WHERE ST_Transform(overtaking_event.geometry, 3857) && bbox;
$$ LANGUAGE SQL IMMUTABLE;

View file

@ -0,0 +1,29 @@
layer:
id: "obs_events"
description: |
Single overtaking events with attached measurements
buffer_size: 4
fields:
distance_overtaker: |
Overtaker distance (left side in right side traffic), in meters.
distance_stationary: |
Distance to stationary traffic (right side in right side traffic), in meters.
direction: |
-1 for driving backwards, 1 for forwards
way_id: |
The ID of the OSM way (road segment) that was determined for this event.
course: |
Direction of travel, as reported by GPS, in degree from North.
speed: |
Speed of travel, as reported by GPS, in meters per second (?).
defaults:
srs: EPSG:3785
datasource:
srid: 3857
geometry_field: geometry
key_field: event_id
key_field_as_attribute: no
query: (SELECT event_id, geometry, distance_overtaker, distance_stationary, direction, course, speed, way_id FROM layer_obs_events(!bbox!, z(!scale_denominator!))) AS t
schema:
- ./layer.sql

View file

@ -0,0 +1,15 @@
CREATE OR REPLACE FUNCTION layer_obs_roads(bbox geometry, zoom_level int)
RETURNS TABLE(way_id bigint, geometry geometry, distance_overtaker_mean float, direction int) AS $$
SELECT
road.way_id::bigint as way_id,
road.geometry as geometry,
avg(distance_overtaker) as distance_overtaker_mean,
r.dir as direction
FROM road
JOIN overtaking_event on road.way_id = overtaking_event.way_id
JOIN (VALUES (1, TRUE), (-1, FALSE)) AS r(dir, rev) ON overtaking_event.direction_reversed = r.rev
WHERE road.geometry && bbox
GROUP BY road.way_id, road.geometry, direction;
$$ LANGUAGE SQL IMMUTABLE;

View file

@ -0,0 +1,23 @@
layer:
id: "obs_roads"
description: |
Road segment statistics for OBS events
buffer_size: 4
fields:
distance_overtaker_mean: |
Overtaker mean distance in meters.
direction: |
Contains -1 for events while going along the way backwards, 1 for
forwards. Each road is emitted twice, if it has data for both directions,
even if it is oneway.
defaults:
srs: EPSG:3785
datasource:
srid: 3857
geometry_field: geometry
key_field: way_id
key_field_as_attribute: no
query: (SELECT way_id, geometry, distance_overtaker_mean, direction FROM layer_obs_roads(!bbox!, z(!scale_denominator!))) AS t
schema:
- ./layer.sql

View file

@ -0,0 +1,21 @@
tileset:
layers:
- layers/obs_events/obs_events.yaml
- layers/obs_roads/obs_roads.yaml
name: OpenMapTiles
version: 3.12.1
id: openmaptiles
description: "A tileset for OpenBikeSensor data. https://openbikesensor.org"
attribution: '<a href="https://www.openbikesensor.org/" target="_blank">&copy; OpenBikeSensor</a> <a href="https://www.openmaptiles.org/" target="_blank">&copy; OpenMapTiles</a>'
center: [0, 0, 1]
bounds: [7.796887,47.966839,7.879628,48.021061]
maxzoom: 14
minzoom: 0
pixel_scale: 256
languages:
- de
- en
defaults:
srs: +proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0.0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs +over
datasource:
srid: 900913

View file

@ -0,0 +1,39 @@
{
"options": {
"paths": {
"root": "/usr/src/app/node_modules/tileserver-gl-styles",
"fonts": "fonts",
"styles": "styles",
"mbtiles": "/data"
}
},
"styles": {
"klokantech-basic": {
"style": "klokantech-basic/style.json",
"tilejson": {
"bounds": [
7.487897,
47.80556,
8.212994,
48.224458
]
}
},
"osm-bright": {
"style": "osm-bright/style.json",
"tilejson": {
"bounds": [
7.487897,
47.80556,
8.212994,
48.224458
]
}
}
},
"data": {
"v3": {
"mbtiles": "tiles.mbtiles"
}
}
}