diff --git a/Dockerfile b/Dockerfile index 7d0e822..f4db357 100644 --- a/Dockerfile +++ b/Dockerfile @@ -21,7 +21,7 @@ RUN npm run build # Build the API and add the built frontend to it ############################################# -FROM python:3.9.7-bullseye +FROM python:3.11.3-bullseye RUN apt-get update &&\ apt-get install -y \ diff --git a/api/Dockerfile b/api/Dockerfile index 18c4ecb..5b31147 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,4 +1,4 @@ -FROM python:3.9.7-bullseye +FROM python:3.11.3-bullseye WORKDIR /opt/obs/api diff --git a/api/obs/api/routes/exports.py b/api/obs/api/routes/exports.py index 13083c3..4fa1ce3 100644 --- a/api/obs/api/routes/exports.py +++ b/api/obs/api/routes/exports.py @@ -3,11 +3,12 @@ from enum import Enum from contextlib import contextmanager import zipfile import io +import re from sqlite3 import connect import shapefile from obs.api.db import OvertakingEvent -from sqlalchemy import select, func +from sqlalchemy import select, func, text from sanic.response import raw from sanic.exceptions import InvalidUsage @@ -39,11 +40,11 @@ PROJECTION_4326 = ( @contextmanager -def shapefile_zip(): +def shapefile_zip(shape_type=shapefile.POINT, basename="events"): zip_buffer = io.BytesIO() shp, shx, dbf = (io.BytesIO() for _ in range(3)) writer = shapefile.Writer( - shp=shp, shx=shx, dbf=dbf, shapeType=shapefile.POINT, encoding="utf8" + shp=shp, shx=shx, dbf=dbf, shapeType=shape_type, encoding="utf8" ) yield writer, zip_buffer @@ -52,10 +53,10 @@ def shapefile_zip(): writer.close() zip_file = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) - zip_file.writestr("events.shp", shp.getbuffer()) - zip_file.writestr("events.shx", shx.getbuffer()) - zip_file.writestr("events.dbf", dbf.getbuffer()) - zip_file.writestr("events.prj", PROJECTION_4326) + zip_file.writestr(f"{basename}.shp", shp.getbuffer()) + zip_file.writestr(f"{basename}.shx", shx.getbuffer()) + zip_file.writestr(f"{basename}.dbf", dbf.getbuffer()) + zip_file.writestr(f"{basename}.prj", PROJECTION_4326) zip_file.close() @@ -74,7 +75,7 @@ async def export_events(req): ) if fmt == ExportFormat.SHAPEFILE: - with shapefile_zip() as (writer, zip_buffer): + with shapefile_zip(basename="events") as (writer, zip_buffer): writer.field("distance_overtaker", "N", decimal=4) writer.field("distance_stationary", "N", decimal=4) writer.field("way_id", "N", decimal=0) @@ -119,3 +120,73 @@ async def export_events(req): return json_response(geojson) raise InvalidUsage("unknown export format") + + +@api.get(r"/export/segments") +async def export_segments(req): + async with use_request_semaphore(req, "export_semaphore", timeout=30): + bbox = req.ctx.get_single_arg( + "bbox", default="-180,-90,180,90" + ) + assert re.match(r"(-?\d+\.?\d+,?){4}", bbox) + fmt = req.ctx.get_single_arg("fmt", convert=ExportFormat) + segments = await req.ctx.db.stream( + text( + f"select ST_AsGeoJSON(ST_Transform(geometry,4326)) AS geometry, way_id, distance_overtaker_mean, distance_overtaker_min,distance_overtaker_max,distance_overtaker_median,overtaking_event_count,usage_count,direction,zone,offset_direction,distance_overtaker_array from layer_obs_roads(ST_Transform(ST_MakeEnvelope({bbox},4326),3857),11,NULL,'1900-01-01'::timestamp,'2100-01-01'::timestamp) WHERE usage_count>0" + ) + ) + + if fmt == ExportFormat.SHAPEFILE: + with shapefile_zip(shape_type=3, basename="segments") as (writer, zip_buffer): + writer.field("distance_overtaker_mean", "N", decimal=4) + writer.field("distance_overtaker_max", "N", decimal=4) + writer.field("distance_overtaker_min", "N", decimal=4) + writer.field("distance_overtaker_median", "N", decimal=4) + writer.field("overtaking_event_count", "N", decimal=4) + writer.field("usage_count", "N", decimal=4) + writer.field("way_id", "N", decimal=0) + writer.field("direction", "N", decimal=0) + writer.field("zone", "C") + + async for segment in segments: + geom = json.loads(segment.st_asgeojson) + writer.line([geom["coordinates"]]) + writer.record( + distance_overtaker_mean=segment.distance_overtaker_mean, + distance_overtaker_median=segment.distance_overtaker_median, + distance_overtaker_max=segment.distance_overtaker_max, + distance_overtaker_min=segment.distance_overtaker_min, + usage_count=segment.usage_count, + overtaking_event_count=segment.overtaking_event_count, + direction=segment.direction, + way_id=segment.way_id, + zone=segment.zone, + ) + + return raw(zip_buffer.getbuffer()) + + if fmt == ExportFormat.GEOJSON: + features = [] + async for segment in segments: + features.append( + { + "type": "Feature", + "geometry": json.loads(segment.geometry), + "properties": { + "distance_overtaker_mean": segment.distance_overtaker_mean, + "distance_overtaker_max": segment.distance_overtaker_max, + "distance_overtaker_median": segment.distance_overtaker_median, + "overtaking_event_count": segment.overtaking_event_count, + "usage_count": segment.usage_count, + "distance_overtaker_array": segment.distance_overtaker_array, + "direction": segment.direction, + "way_id": segment.way_id, + "zone": segment.zone, + }, + } + ) + + geojson = {"type": "FeatureCollection", "features": features} + return json_response(geojson) + + raise InvalidUsage("unknown export format") diff --git a/frontend/src/pages/ExportPage/index.tsx b/frontend/src/pages/ExportPage/index.tsx index e8329d1..65cdac5 100644 --- a/frontend/src/pages/ExportPage/index.tsx +++ b/frontend/src/pages/ExportPage/index.tsx @@ -104,7 +104,7 @@ const BoundingBoxSelector = React.forwardRef( } ); -const MODES = ["events"]; +const MODES = ["events", "segments"]; const FORMATS = ["geojson", "shapefile"]; export default function ExportPage() { @@ -112,7 +112,6 @@ export default function ExportPage() { const [bbox, setBbox] = useState("8.294678,49.651182,9.059601,50.108249"); const [fmt, setFmt] = useState("geojson"); const config = useConfig(); - const exportUrl = `${config?.apiUrl}/export/events?bbox=${bbox}&fmt=${fmt}`; const { t } = useTranslation(); return ( @@ -163,7 +162,7 @@ export default function ExportPage() {