Remove broken duplicate route

This commit is contained in:
Paul Bienkowski 2022-01-19 20:36:03 +01:00
parent 0c256d8923
commit 3da467800d

View file

@ -14,30 +14,6 @@ from obs.api.app import app, json as json_response
from .mapdetails import get_single_arg from .mapdetails import get_single_arg
@app.get(r"/export/events.json")
async def tiles(req):
x = get_single_arg(req, "x", convert=int)
y = get_single_arg(req, "y", convert=int)
zoom = get_single_arg(req, "zoom", convert=int)
features = []
async for event in get_events(req.ctx.db, zoom, x, y):
features.append({
"type": "Feature",
"geometry": json.loads(event.geometry),
"properties": {
"distance_overtaker": event.distance_overtaker,
"distance_stationary": event.distance_stationary,
"direction": -1 if event.direction_reversed else 1,
"way_id": event.way_id,
"course": event.course,
"speed": event.speed,
"time": event.time,
}
})
geojson = {"type": "FeatureCollection", "features": features}
return json_response(geojson)
class ExportFormat(str, Enum): class ExportFormat(str, Enum):
SHAPEFILE = "shapefile" SHAPEFILE = "shapefile"
@ -54,12 +30,16 @@ def parse_bounding_box(s):
3857, 3857,
) )
@contextmanager @contextmanager
def shapefile_zip(): def shapefile_zip():
import io, shapefile import io, shapefile
zip_buffer = io.BytesIO() zip_buffer = io.BytesIO()
shp, shx, dbf = (io.BytesIO() for _ in range(3)) shp, shx, dbf = (io.BytesIO() for _ in range(3))
writer = shapefile.Writer(shp=shp, shx=shx, dbf=dbf, shapeType=shapefile.POINT, encoding="utf8") writer = shapefile.Writer(
shp=shp, shx=shx, dbf=dbf, shapeType=shapefile.POINT, encoding="utf8"
)
yield writer, zip_buffer yield writer, zip_buffer
@ -73,21 +53,24 @@ def shapefile_zip():
) )
import zipfile import zipfile
zf = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False) zf = zipfile.ZipFile(zip_buffer, "a", zipfile.ZIP_DEFLATED, False)
zf.writestr('events.shp', shp.getbuffer()) zf.writestr("events.shp", shp.getbuffer())
zf.writestr('events.shx', shx.getbuffer()) zf.writestr("events.shx", shx.getbuffer())
zf.writestr('events.dbf', dbf.getbuffer()) zf.writestr("events.dbf", dbf.getbuffer())
zf.writestr('events.prj', PRJ) zf.writestr("events.prj", PRJ)
zf.close() zf.close()
@app.get(r"/export/events") @app.get(r"/export/events")
async def export_events(req): async def export_events(req):
bbox = get_single_arg(req, "bbox", default="-180,-90,180,90", convert=parse_bounding_box) bbox = get_single_arg(
req, "bbox", default="-180,-90,180,90", convert=parse_bounding_box
)
fmt = get_single_arg(req, "fmt", convert=ExportFormat) fmt = get_single_arg(req, "fmt", convert=ExportFormat)
events = await req.ctx.db.stream_scalars( events = await req.ctx.db.stream_scalars(
select(OvertakingEvent) select(OvertakingEvent).where(OvertakingEvent.geometry.bool_op("&&")(bbox))
.where(OvertakingEvent.geometry.bool_op("&&")(bbox))
) )
if fmt == ExportFormat.SHAPEFILE: if fmt == ExportFormat.SHAPEFILE:
@ -116,7 +99,8 @@ async def export_events(req):
elif fmt == ExportFormat.GEOJSON: elif fmt == ExportFormat.GEOJSON:
features = [] features = []
async for event in events: async for event in events:
features.append({ features.append(
{
"type": "Feature", "type": "Feature",
"geometry": json.loads(event.geometry), "geometry": json.loads(event.geometry),
"properties": { "properties": {
@ -127,8 +111,9 @@ async def export_events(req):
"course": event.course, "course": event.course,
"speed": event.speed, "speed": event.speed,
"time": event.time, "time": event.time,
},
} }
}) )
geojson = {"type": "FeatureCollection", "features": features} geojson = {"type": "FeatureCollection", "features": features}
return json_response(geojson) return json_response(geojson)