Move API to python.

This commit is contained in:
Paul Bienkowski 2021-11-04 18:13:24 +01:00
parent 32e86be1d1
commit da33d814ae
96 changed files with 1867 additions and 577 deletions

View file

@ -1,31 +1,13 @@
FROM node:15.14-buster
# Install python3, pip3, and make them the default for `python` and `pip` commands
RUN apt-get update && apt-get install -y python3 python3-pip
RUN ln -s $(which python3) /usr/local/bin/python
RUN ln -s $(which pip3) /usr/local/bin/pip
FROM python:3.9.7-bullseye
WORKDIR /opt/obs/api
ADD package.json package-lock.json /opt/obs/api/
RUN echo update-notifier=false >> ~/.npmrc
RUN npm ci
ADD scripts /opt/obs/scripts
RUN pip install -e /opt/obs/scripts
ADD scripts /opt/obs/api/scripts/
ADD tools /opt/obs/api/tools/
ADD requirements.txt /opt/obs/api/
RUN pip install -r requirements.txt
RUN pip install -e ./scripts
ADD requirements.txt setup.py obs /opt/obs/api/
RUN pip install -e .
ADD views /opt/obs/api/views/
ADD src /opt/obs/api/src/
EXPOSE 8000
#ADD .migrations.js .
#ADD migrations .
EXPOSE 3000
ENV PORT=3000
ENV DATA_DIR=/data
CMD ["npm", "run", "start"]
CMD ["obs-api"]

View file

@ -1,34 +0,0 @@
{
"cookieSecret": "CHANGEME!!!!!!!!!!@##@!!$$$$$$$$$$$$$!!",
"jwtSecret": "CHANGEME??????????????////3212321;312kjbkasjd",
"baseUrl": "http://localhost:3000/",
"mainFrontendUrl": "http://localhost:3001/",
"mail": false,
"mongodb": {
"url": "mongodb://mongo/obsTest",
"debug": false
},
"postgres": {
"url": "postgresql+asyncpg://obs:obs@postgres/obs"
},
"redisUrl": "redis://redis",
"oAuth2Clients": [
{
"clientId": "b730f8d2-d93c-4c68-9ff0-dfac8da76ee2",
"validRedirectUris": ["http://localhost:3001/redirect"],
"refreshTokenExpirySeconds": 604800,
"maxScope": "*",
"title": "OBS Portal",
"autoAccept": true
},
{
"clientId": "a2958209-4045-4ec9-8cb3-1156abba7de3",
"validRedirectUris": ["__LOCAL__"],
"maxScope": "track.upload",
"refreshTokenExpirySeconds": 86400000,
"title": "OpenBikeSensor"
}
],
"imprintUrl": "https://example.com/imprint",
"privacyPolicyUrl": "https://example.com/privacy"
}

39
api/config.dev.py Normal file
View file

@ -0,0 +1,39 @@
# Bind address of the server
HOST = "0.0.0.0"
PORT = 3000
# Extended log output, but slower
DEBUG = True
# Required to encrypt or sign sessions, cookies, tokens, etc.
SECRET = "CHANGEME!!!!!!!!!!@##@!!$$$$$$$$$$$$$!!"
# Connection to the database
POSTGRES_URL = "postgresql+asyncpg://obs:obs@postgres/obs"
# URL to the keycloak realm, as reachable by the API service. This is not
# necessarily its publicly reachable URL, keycloak advertises that iself.
KEYCLOAK_URL = "http://keycloak:8080/auth/realms/OBS%20Dev/"
# Auth client credentials
KEYCLOAK_CLIENT_ID = "portal"
KEYCLOAK_CLIENT_SECRET = "76b84224-dc24-4824-bb98-9e1ba15bd58f"
# The root of the frontend. Needed for redirecting after login, and for CORS.
MAIN_FRONTEND_URL = "http://localhost:3001/"
# Mail settings
MAIL_ENABLED = False
# Urls to important documents, hosted elsewhere
IMPRINT_URL = "https://example.com/imprint"
PRIVACY_POLICY_URL = "https://example.com/privacy"
# API_ROOT_DIR = "??" # default: api/ inside repository
DATA_DIR = "/data"
# PROCESSING_DIR = "??" # default: DATA_DIR/processing
# PROCESSING_OUTPUT_DIR = "??" # default: DATA_DIR/processing-output
# TRACKS_DIR = "??" # default: DATA_DIR/tracks
# OBS_FACE_CACHE_DIR = "??" # default: DATA_DIR/obs-face-cache
# vim: set ft=python :

View file

@ -1,42 +0,0 @@
{
"cookieSecret": "!!!<<<CHANGEME>>>!!!",
"jwtSecret": "!!!<<<CHANGEME>>>!!!",
"baseUrl": "https://portal.example.com/api",
"mainFrontendUrl": "https://portal.example.com/",
"mail": {
"from": "Sender Name <sender@example.com>",
"smtp" : {
"host": "mail.example.com",
"port": 465,
"starttls": false,
"username": "sender@example.com",
"password": "!!!<<<CHANGEME>>>!!!"
}
},
"mongodb": {
"url": "mongodb://mongo/obs",
"debug": false
},
"postgres": {
"url": "postgresql+asyncpg://user:pass@host/dbname"
},
"redisUrl": "redis://redis",
"oAuth2Clients": [
{
"clientId": "!!!<<<CHANGEME>>>!!!",
"validRedirectUris": ["https://portal.example.com/redirect"],
"refreshTokenExpirySeconds": 604800,
"maxScope": "*",
"title": "OBS Portal"
},
{
"clientId": "!!!<<<CHANGEME>>>!!!",
"validRedirectUris": ["__LOCAL__"],
"maxScope": "track.upload",
"refreshTokenExpirySeconds": 86400000,
"title": "OpenBikeSensor"
}
],
"imprintUrl": "https://example.com/imprint",
"privacyPolicyUrl": "https://example.com/privacy"
}

45
api/config.py.example Normal file
View file

@ -0,0 +1,45 @@
# Bind address of the server
HOST = "127.0.0.1"
PORT = 3000
# Extended log output, but slower
DEBUG = False
# Required to encrypt or sign sessions, cookies, tokens, etc.
SECRET = "!!!<<<CHANGEME>>>!!!"
# Connection to the database
POSTGRES_URL = "postgresql+asyncpg://user:pass@host/dbname"
# URL to the keycloak realm, as reachable by the API service. This is not
# necessarily its publicly reachable URL, keycloak advertises that iself.
KEYCLOAK_URL = "http://localhost:1234/auth/realms/obs/"
# Auth client credentials
KEYCLOAK_CLIENT_ID = "portal"
KEYCLOAK_CLIENT_SECRET = "00000000-0000-0000-0000-000000000000"
# The root of the frontend. Needed for redirecting after login, and for CORS.
MAIN_FRONTEND_URL = "https://portal.example.com/"
# Mail settings
MAIL_ENABLED = False
MAIL_FROM = "Sender Name <sender@example.com>"
MAIL_SMTP_HOST = "mail.example.com"
MAIL_SMTP_PORT = 465
MAIL_STARTTLS = False
MAIL_SMTP_USERNAME = "sender@example.com"
# Urls to important documents, hosted elsewhere
IMPRINT_URL = "https://example.com/imprint"
PRIVACY_POLICY_URL = "https://example.com/privacy"
# Path overrides:
# API_ROOT_DIR = "??" # default: api/ inside repository
# DATA_DIR = "??" # default: $API_ROOT_DIR/..
# PROCESSING_DIR = "??" # default: DATA_DIR/processing
# PROCESSING_OUTPUT_DIR = "??" # default: DATA_DIR/processing-output
# TRACKS_DIR = "??" # default: DATA_DIR/tracks
# OBS_FACE_CACHE_DIR = "??" # default: DATA_DIR/obs-face-cache
# vim: set ft=python :

1
api/obs/__init__.py Normal file
View file

@ -0,0 +1 @@
__path__ = __import__("pkgutil").extend_path(__path__, __name__)

1
api/obs/api/__init__.py Normal file
View file

@ -0,0 +1 @@
__version__ = "0.2.0"

118
api/obs/api/app.py Normal file
View file

@ -0,0 +1,118 @@
import logging
import os
from json import JSONEncoder, dumps
from functools import wraps, partial
from urllib.parse import urlparse
from os.path import dirname, join, normpath, abspath
from datetime import datetime, date
from sanic import Sanic
from sanic.response import text, json as json_response
from sanic.exceptions import Unauthorized
from sanic_session import Session, InMemorySessionInterface
from sanic_cors import CORS
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine
from sqlalchemy.orm import sessionmaker
from obs.api.db import User, async_session
from sanic_session.base import BaseSessionInterface
from sanic_session.utils import ExpiringDict
log = logging.getLogger(__name__)
app = Sanic("OpenBikeSensor Portal API")
app.update_config("./config.py")
c = app.config
# Configure paths
c.API_ROOT_DIR = c.get("API_ROOT_DIR") or abspath(join(dirname(__file__), "..", ".."))
c.DATA_DIR = c.get("DATA_DIR") or normpath(join(c.API_ROOT_DIR, "../data"))
c.PROCESSING_DIR = c.get("PROCESSING_DIR") or join(c.DATA_DIR, "processing")
c.PROCESSING_OUTPUT_DIR = c.get("PROCESSING_OUTPUT_DIR") or join(
c.DATA_DIR, "processing-output"
)
c.TRACKS_DIR = c.get("TRACKS_DIR") or join(c.DATA_DIR, "tracks")
c.OBS_FACE_CACHE_DIR = c.get("OBS_FACE_CACHE_DIR") or join(c.DATA_DIR, "obs-face-cache")
main_frontend_url = urlparse(c.MAIN_FRONTEND_URL)
CORS(
app,
origins=[f"{main_frontend_url.scheme}://{main_frontend_url.netloc}"],
supports_credentials=True,
)
# TODO: use a different interface, maybe backed by the PostgreSQL, to allow
# scaling the API
Session(app, interface=InMemorySessionInterface())
@app.before_server_start
async def app_connect_db(app, loop):
app.ctx._db_engine = create_async_engine(c.POSTGRES_URL, echo=c.DEBUG)
@app.after_server_stop
async def app_disconnect_db(app, loop):
if app.ctx._db_engine:
await app.ctx._db_engine.dispose()
@app.middleware("request")
async def inject_session(req):
req.ctx.db = sessionmaker(
req.app.ctx._db_engine, class_=AsyncSession, expire_on_commit=False
)()
req.ctx._db_session_ctx_token = async_session.set(req.ctx.db)
@app.middleware("response")
async def close_session(req, response):
if hasattr(req.ctx, "_db_session_ctx_token"):
async_session.reset(req.ctx._db_session_ctx_token)
await req.ctx.db.close()
@app.middleware("request")
async def load_user(req):
user_id = req.ctx.session.get("user_id")
user = None
if user_id:
user = (
await req.ctx.db.execute(select(User).where(User.id == user_id))
).scalar()
req.ctx.user = user
@app.route("/")
def index(req):
return text("Hello, %s!" % (req.ctx.user.username if req.ctx.user else "World"))
def require_auth(fn):
@wraps(fn)
def wrapper(req, *args, **kwargs):
if not req.ctx.user:
raise Unauthorized("Login required")
return fn(req, *args, **kwargs)
return wrapper
class CustomJsonEncoder(JSONEncoder):
def default(self, obj):
if isinstance(obj, (datetime, date)):
return obj.isoformat() + "+0000" # explicit UTC for javascript <3
# Let the base class default method raise the TypeError
return super().default(obj)
def json(*args, **kwargs):
return json_response(*args, **kwargs, dumps=partial(dumps, cls=CustomJsonEncoder))
from . import routes

17
api/obs/api/config.py Normal file
View file

@ -0,0 +1,17 @@
# Configure paths
config.API_ROOT_DIR = config.get("API_ROOT_DIR") or abspath(
join(dirname(__file__), "..", "..")
)
config.DATA_DIR = config.get("DATA_DIR") or normpath(
join(config.API_ROOT_DIR, "../data")
)
config.PROCESSING_DIR = config.get("PROCESSING_DIR") or join(
config.DATA_DIR, "processing"
)
config.PROCESSING_OUTPUT_DIR = config.get("PROCESSING_OUTPUT_DIR") or join(
config.DATA_DIR, "processing-output"
)
config.TRACKS_DIR = config.get("TRACKS_DIR") or join(config.DATA_DIR, "tracks")
config.OBS_FACE_CACHE_DIR = config.get("OBS_FACE_CACHE_DIR") or join(
config.DATA_DIR, "obs-face-cache"
)

395
api/obs/api/db.py Normal file
View file

@ -0,0 +1,395 @@
import hashlib
from contextvars import ContextVar
from contextlib import asynccontextmanager
from datetime import datetime
import os
from os.path import join, dirname
import re
import math
import aiofiles
import random
import string
from slugify import slugify
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.ext.asyncio import create_async_engine
from sqlalchemy.orm import sessionmaker, relationship
from sqlalchemy.types import UserDefinedType, BIGINT, TEXT
from sqlalchemy import (
Boolean,
Column,
DateTime,
Enum as SqlEnum,
Float,
ForeignKey,
Index,
Integer,
String,
false,
func,
select,
text,
literal,
)
from sqlalchemy.dialects.postgresql import HSTORE, UUID
Base = declarative_base()
engine = ContextVar("engine")
async_session = ContextVar("async_session")
@asynccontextmanager
async def make_session():
async with async_session.get()() as session:
yield session
async def init_models():
async with engine.get().begin() as conn:
await conn.run_sync(Base.metadata.drop_all)
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "hstore";'))
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "postgis";'))
await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";'))
await conn.run_sync(Base.metadata.create_all)
def random_string(length):
letters = string.ascii_lowercase + string.digits
return "".join(random.choice(letters) for i in range(length))
@asynccontextmanager
async def connect_db(url):
engine_ = create_async_engine(url, echo=False)
t1 = engine.set(engine_)
async_session_ = sessionmaker(engine_, class_=AsyncSession, expire_on_commit=False)
t2 = async_session.set(async_session_)
yield
# for AsyncEngine created in function scope, close and
# clean-up pooled connections
await engine_.dispose()
engine.reset(t1)
async_session.reset(t2)
ZoneType = SqlEnum("rural", "urban", "motorway", name="zone_type")
ProcessingStatus = SqlEnum(
"created", "queued", "processing", "complete", "error", name="processing_status"
)
class Geometry(UserDefinedType):
def get_col_spec(self):
return "GEOMETRY"
def bind_expression(self, bindvalue):
return func.ST_GeomFromGeoJSON(bindvalue, type_=self)
def column_expression(self, col):
return func.ST_AsGeoJSON(col, type_=self)
class OvertakingEvent(Base):
__tablename__ = "overtaking_event"
__table_args__ = (Index("road_segment", "way_id", "direction_reversed"),)
id = Column(Integer, autoincrement=True, primary_key=True, index=True)
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
hex_hash = Column(String, unique=True, index=True)
way_id = Column(BIGINT, index=True)
# whether we were traveling along the way in reverse direction
direction_reversed = Column(Boolean)
geometry = Column(Geometry)
latitude = Column(Float)
longitude = Column(Float)
time = Column(DateTime)
distance_overtaker = Column(Float)
distance_stationary = Column(Float)
course = Column(Float)
speed = Column(Float)
def __repr__(self):
return f"<OvertakingEvent {self.id}>"
class Road(Base):
__tablename__ = "road"
way_id = Column(BIGINT, primary_key=True, index=True)
zone = Column(ZoneType)
name = Column(String)
geometry = Column(Geometry)
tags = Column(HSTORE)
directionality = Column(Integer)
NOW = text("NOW()")
class Track(Base):
__tablename__ = "track"
id = Column(Integer, primary_key=True, autoincrement=True)
slug = Column(String, unique=True, nullable=False, index=True)
created_at = Column(DateTime, nullable=False, server_default=NOW)
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
title = Column(String)
processing_status = Column(ProcessingStatus, server_default=literal("created"))
processing_queued_at = Column(DateTime)
processed_at = Column(DateTime)
processing_log = Column(TEXT)
# Set to true if the user customized the title. Disables auto-generating
# an updated title when the track is (re-)processed.
customized_title = Column(Boolean, server_default=false(), nullable=False)
# A user-provided description of the track. May contain markdown.
description = Column(TEXT)
# Whether this track is visible (anonymized) in the public track list or not.
public = Column(Boolean, server_default=false())
# Whether this track should be exported to the public track database
# (after anonymization).
# include_in_public_database = Column(Boolean, server_default=false())
# The user agent string, or a part thereof, that was used to upload this
# track. Usually contains only the OBS version, other user agents are
# discarded due to being irrelevant.
uploaded_by_user_agent = Column(String)
# The name of the original file, as provided during upload. Used for
# providing a download with the same name, and for display in the
# frontend.
original_file_name = Column(String)
# A hash of the original file's contents. Nobody can upload the same track twice.
original_file_hash = Column(String, nullable=False)
author_id = Column(
Integer, ForeignKey("user.id", ondelete="CASCADE"), nullable=False
)
# Statistics... maybe we'll drop some of this if we can easily compute them from SQL
recorded_at = Column(DateTime)
recorded_until = Column(DateTime)
duration = Column(Float)
length = Column(Float)
segments = Column(Integer)
num_events = Column(Integer)
num_measurements = Column(Integer)
num_valid = Column(Integer)
def to_dict(self, for_user_id=None):
result = {
"id": self.id,
"slug": self.slug,
"title": self.title,
"description": self.description,
"createdAt": self.created_at,
"updatedAt": self.updated_at,
"public": self.public,
"processingStatus": self.processing_status,
"recordedAt": self.recorded_at,
"recordedUntil": self.recorded_until,
"duration": self.duration,
"length": self.length,
"numEvents": self.num_events,
"numValid": self.num_valid,
"numMeasurements": self.num_measurements,
}
if for_user_id is not None and for_user_id == self.author_id:
result["uploadedByUserAgent"] = self.uploaded_by_user_agent
result["originalFileName"] = self.original_file_name
if self.author:
result["author"] = self.author.to_dict(for_user_id=for_user_id)
return result
def is_visible_to_private(self, user):
return user is not None and user.id == self.author_id
def is_visible_to(self, user):
return self.is_visible_to_private(user) or self.public
def generate_slug(self, new_title_or_filename=None):
input_text = (
new_title_or_filename or self.title or self.original_file_name or "track"
)
self.slug = slugify(input_text, separator="_") + "-" + random_string(6)
async def prevent_duplicates(self, session, file_body):
hex_hash = hashlib.sha512(file_body).hexdigest()
duplicate_count = await session.scalar(
select(func.count())
.select_from(Track)
.where(
Track.original_file_hash == hex_hash
and Track.author_id == self.author_id
and Track.id != self.id
)
)
if duplicate_count:
raise ValueError("duplicate file")
self.original_file_hash = hex_hash
async def write_to_original_file(self, config, body):
mode = "wb" if isinstance(body, bytes) else "wt"
target = self.get_original_file_path(config)
os.makedirs(dirname(target), exist_ok=True)
async with aiofiles.open(target, mode=mode) as f:
await f.write(body)
def queue_processing(self):
self.processing_status = "queued"
self.processing_queued_at = datetime.utcnow()
def auto_generate_title(self):
if self.customized_title:
return
# Try to figure out when this file was recorded. Either we have it in then
# statistics, e.g. after parsing and processing the track, or we can maybe
# derive it from the filename.
recorded_at = self.recorded_at
if not recorded_at and self.original_file_name:
match = re.match(
r"^[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}\.[0-9]{2}\.[0-9]{2}",
self.original_file_name,
)
if match:
try:
recorded_at = datetime.fromisoformat(match[0])
except ValueError:
pass
if recorded_at:
daytime = _get_daytime(recorded_at)
self.title = f"{daytime} ride on {recorded_at.strftime('%a, %x')}"
return
# Detecting recording date failed, use filename
if self.original_file_name:
words = self.original_file_name
words = re.sub(r"(\.obsdata)?\.csv$", "", words)
words = re.split(r"\W+", words)
words[0] = words[0][0].upper() + words[0][1:]
self.title = " ".join(words)
@property
def file_path(self):
return join(self.author.username, self.slug)
def get_original_file_path(self, config):
return join(config.TRACKS_DIR, self.file_path, "original.csv")
class User(Base):
__tablename__ = "user"
id = Column(Integer, autoincrement=True, primary_key=True)
created_at = Column(DateTime, nullable=False, server_default=NOW)
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
sub = Column(String, unique=True, nullable=False)
username = Column(String, unique=True, nullable=False)
email = Column(String, nullable=False)
bio = Column(TEXT)
image = Column(String)
are_tracks_visible_for_all = Column(Boolean, server_default=false(), nullable=False)
api_key = Column(String)
# This user can be matched by the email address from the auth service
# instead of having to match by `sub`. If a matching user logs in, the
# `sub` is updated to the new sub and this flag is disabled. This is for
# migrating *to* the external authentication scheme.
match_by_username_email = Column(Boolean, server_default=false())
def to_dict(self, for_user_id=None):
return {
"username": self.username,
"bio": self.bio,
"image": self.image,
}
class Comment(Base):
__tablename__ = "comment"
id = Column(Integer, autoincrement=True, primary_key=True)
uid = Column(UUID, server_default=func.uuid_generate_v4())
created_at = Column(DateTime, nullable=False, server_default=NOW)
updated_at = Column(DateTime, nullable=False, server_default=NOW, onupdate=NOW)
body = Column(TEXT)
author_id = Column(Integer, ForeignKey("user.id", ondelete="CASCADE"))
track_id = Column(Integer, ForeignKey("track.id", ondelete="CASCADE"))
def to_dict(self, for_user_id=None):
return {
"id": self.uid,
"body": self.body,
"author": self.author.to_dict(for_user_id=for_user_id),
"createdAt": self.created_at,
}
Comment.author = relationship("User", back_populates="authored_comments")
User.authored_comments = relationship(
"Comment", order_by=Comment.created_at, back_populates="author"
)
Track.author = relationship("User", back_populates="authored_tracks")
User.authored_tracks = relationship(
"Track", order_by=Track.created_at, back_populates="author"
)
Comment.track = relationship("Track", back_populates="comments")
Track.comments = relationship(
"Comment", order_by=Comment.created_at, back_populates="track"
)
OvertakingEvent.track = relationship("Track", back_populates="overtaking_events")
Track.overtaking_events = relationship(
"OvertakingEvent", order_by=OvertakingEvent.time, back_populates="track"
)
# 0..4 Night, 4..10 Morning, 10..14 Noon, 14..18 Afternoon, 18..22 Evening, 22..00 Night
# Two hour intervals
_DAYTIMES = [
"Night", # 0h - 2h
"Night", # 2h - 4h
"Morning", # 4h - 6h
"Morning", # 6h - 8h
"Morning", # 8h - 10h
"Noon", # 10h - 12h
"Noon", # 12h - 14h
"Afternoon", # 14h - 16h
"Afternoon", # 16h - 18h
"Evening", # 18h - 20h
"Evening", # 20h - 22h
"Night", # 22h - 24h
]
def _get_daytime(d):
return _DAYTIMES[math.floor((d.hour % 24) / 2)]

View file

@ -0,0 +1,7 @@
from . import (
login,
stats,
tracks,
info,
users,
)

View file

@ -0,0 +1,20 @@
import logging
# from sqlalchemy import select
from obs.api.app import app
from sanic.response import json
log = logging.getLogger(__name__)
from obs.api import __version__ as version
@app.route("/info")
async def info(req):
return json(
{
"version": version,
}
)

118
api/obs/api/routes/login.py Normal file
View file

@ -0,0 +1,118 @@
import logging
import os
from sqlalchemy import select
from oic import rndstr
from oic.oic import Client
from oic.oic.message import AuthorizationResponse, RegistrationResponse
from oic.utils.authn.client import CLIENT_AUTHN_METHOD
from obs.api.app import app
from obs.api.db import User
from sanic.response import json, redirect
from sanicargs import parse_parameters
log = logging.getLogger(__name__)
client = Client(client_authn_method=CLIENT_AUTHN_METHOD)
@app.before_server_start
async def connect_auth_client(app, loop):
client.allow["issuer_mismatch"] = True
pc = client.provider_config(app.config.KEYCLOAK_URL)
client.store_registration_info(
RegistrationResponse(
client_id=app.config.KEYCLOAK_CLIENT_ID,
client_secret=app.config.KEYCLOAK_CLIENT_SECRET,
)
)
@app.route("/login")
@parse_parameters
async def login(req, next: str = None):
session = req.ctx.session
session["state"] = rndstr()
session["nonce"] = rndstr()
session["next"] = next
args = {
"client_id": client.client_id,
"response_type": "code",
"scope": ["openid"],
"nonce": session["nonce"],
"redirect_uri": req.scheme + "://" + req.host + "/login/redirect",
"state": session["state"],
}
auth_req = client.construct_AuthorizationRequest(request_args=args)
login_url = auth_req.request(client.authorization_endpoint)
return redirect(login_url)
@app.route("/login/redirect")
async def login_redirect(req):
session = req.ctx.session
auth_response = client.parse_response(
AuthorizationResponse, info=dict(req.query_args), sformat="dict"
)
code = auth_response["code"]
state = auth_response["state"]
assert "state" in session
assert state == session["state"]
client.do_access_token_request(
state=state,
request_args={"code": code},
authn_method="client_secret_basic",
)
userinfo = client.do_user_info_request(state=state)
# {'sub': '3798e2da-b208-4a1a-98c0-08fecfea1345', 'email_verified': True, 'preferred_username': 'test', 'email': 'test@example.com'}
sub = userinfo["sub"]
preferred_username = userinfo["preferred_username"]
email = userinfo["email"]
user = (await req.ctx.db.execute(select(User).where(User.sub == sub))).scalar()
if user is None:
user = (
await req.ctx.db.execute(
select(User).where(User.email == email and User.username = preferred_username and User.match_by_username_email)
)
).scalar()
if user:
user.match_by_username_email = False
if user is None:
log.info(
"Registering new user with sub %r (preferred username: %s)",
sub,
preferred_username,
)
user = User(sub=sub, username=preferred_username, email=email)
req.ctx.db.add(user)
else:
log.info("Logged in known user (id: %s).", user.id)
if email != user.email:
log.debug("Updating user (id: %s) email from auth system.", user.id)
user.email = email
if preferred_username != user.username:
log.debug("Updating user (id: %s) username from auth system.", user.id)
user.username = preferred_username
await req.ctx.db.commit()
session["user_id"] = user.id
next_ = session.pop("next", "/") or "/"
return redirect(next_)

171
api/obs/api/routes/stats.py Normal file
View file

@ -0,0 +1,171 @@
import logging
from datetime import datetime
from typing import Optional
from operator import and_
from functools import reduce
from sqlalchemy import select, func
from sanic.response import json
from sanicargs import parse_parameters
from obs.api.app import app
from obs.api.db import Track, OvertakingEvent, User
log = logging.getLogger(__name__)
# round to this number of meters for privacy reasons
TRACK_LENGTH_ROUNDING = 1000
# round to this number of seconds for privacy reasons
TRACK_DURATION_ROUNDING = 120
# Everything before this date is probably parsed incorrectly
MINUMUM_RECORDING_DATE = datetime(2010, 1, 1)
def round_to(value: float, multiples: float) -> float:
return round(value / multiples) * multiples
@app.route("/stats")
@parse_parameters
async def stats(req, user: str = None, start: datetime = None, end: datetime = None):
conditions = [
Track.recorded_at != None,
Track.recorded_at > MINUMUM_RECORDING_DATE,
]
if start is not None:
conditions.append(Track.recorded_at >= start)
if end is not None:
conditions.append(Track.recorded_at < end)
# Only the user can look for their own stats, for now
by_user = (
user is not None and req.ctx.user is not None and req.ctx.user.username == user
)
if by_user:
conditions.append(Track.author_id == req.ctx.user.id)
print(conditions)
track_condition = reduce(and_, conditions)
public_track_condition = Track.public and track_condition
query = (
select(
[
func.count().label("publicTrackCount"),
func.sum(Track.duration).label("trackDuration"),
func.sum(Track.length).label("trackLength"),
]
)
.select_from(Track)
.where(public_track_condition)
)
public_track_count, track_duration, track_length = (
await req.ctx.db.execute(query)
).first()
# This is required because SQL returns NULL when the input set to a
# SUM() aggregation is empty.
track_duration = track_duration or 0
track_length = track_length or 0
user_count = (
1
if by_user
else (await req.ctx.db.execute(select(func.count()).select_from(User))).scalar()
)
track_count = (
await req.ctx.db.execute(
select(func.count()).select_from(Track).where(track_condition)
)
).scalar()
event_count = (
await req.ctx.db.execute(
select(func.count())
.select_from(OvertakingEvent)
.join(OvertakingEvent.track)
.where(track_condition)
)
).scalar()
result = {
"numEvents": event_count,
"userCount": user_count,
"trackLength": round_to(track_length or 0, TRACK_LENGTH_ROUNDING),
"trackDuration": round_to(track_duration or 0, TRACK_DURATION_ROUNDING),
"publicTrackCount": public_track_count,
"trackCount": track_count,
}
return json(result)
# const trackCount = await Track.find(trackFilter).count();
#
# const publicTrackCount = await Track.find({
# ...trackFilter,
# public: true,
# }).count();
#
# const userCount = await User.find({
# ...(userFilter
# ? { _id: userFilter }
# : {
# createdAt: dateFilter,
# }),
# }).count();
#
# const trackStats = await Track.aggregate([
# { $match: trackFilter },
# {
# $addFields: {
# trackLength: {
# $cond: [{ $lt: ['$statistics.length', 500000] }, '$statistics.length', 0],
# },
# numEvents: '$statistics.numEvents',
# trackDuration: {
# $cond: [
# { $and: ['$statistics.recordedUntil', { $gt: ['$statistics.recordedAt', new Date('2010-01-01')] }] },
# { $subtract: ['$statistics.recordedUntil', '$statistics.recordedAt'] },
# 0,
# ],
# },
# },
# },
# { $project: { trackLength: true, numEvents: true, trackDuration: true } },
# {
# $group: {
# _id: 'sum',
# trackLength: { $sum: '$trackLength' },
# numEvents: { $sum: '$numEvents' },
# trackDuration: { $sum: '$trackDuration' },
# },
# },
# ]);
#
# const [trackLength, numEvents, trackDuration] =
# trackStats.length > 0
# ? [trackStats[0].trackLength, trackStats[0].numEvents, trackStats[0].trackDuration]
# : [0, 0, 0];
#
# const trackLengthPrivatized = Math.floor(trackLength / TRACK_LENGTH_ROUNDING) * TRACK_LENGTH_ROUNDING;
# const trackDurationPrivatized =
# Math.round(trackDuration / 1000 / TRACK_DURATION_ROUNDING) * TRACK_DURATION_ROUNDING;
#
# return res.json({
# publicTrackCount,
# trackLength: trackLengthPrivatized,
# trackDuration: trackDurationPrivatized,
# numEvents,
# trackCount,
# userCount,
# });
# }),
# );

View file

@ -0,0 +1,362 @@
import logging
import re
from json import load as jsonload
from os.path import join, exists, isfile
from sqlalchemy import select, func
from sqlalchemy.orm import joinedload
from obs.api.db import Track, User, Comment
from obs.api.app import app, require_auth, json
from sanic.response import file_stream, empty
from sanic.exceptions import InvalidUsage, NotFound, Forbidden
from sanicargs import parse_parameters
log = logging.getLogger(__name__)
def normalize_user_agent(user_agent):
if not user_agent:
return None
m = re.match(r"\bOBS\/[^\s]+", user_agent)
return m[0] if m else None
async def _return_tracks(req, extend_query, limit, offset):
if limit <= 0 or limit > 100:
raise InvalidUsage("invalid limit")
if offset < 0:
raise InvalidUsage("offset must be positive")
count_query = extend_query(
select(func.count()).select_from(Track).join(Track.author)
)
track_count = await req.ctx.db.scalar(count_query)
query = (
extend_query(select(Track).options(joinedload(Track.author)))
.limit(limit)
.offset(offset)
.order_by(Track.created_at.desc())
)
tracks = (await req.ctx.db.execute(query)).scalars()
return json(
{
"trackCount": track_count,
"tracks": list(
map(
lambda t: t.to_dict(
for_user_id=req.ctx.user.id if req.ctx.user else None
),
tracks,
)
),
},
)
@app.get("/tracks")
@parse_parameters
async def get_tracks(req, limit: int = 20, offset: int = 0, author: str = None):
def extend_query(q):
q = q.where(Track.public)
if author is not None:
q = q.where(User.username == author)
return q
return await _return_tracks(req, extend_query, limit, offset)
@app.get("/tracks/feed")
@require_auth
@parse_parameters
async def get_feed(req, limit: int = 20, offset: int = 0):
def extend_query(q):
return q.where(Track.author_id == req.ctx.user.id)
return await _return_tracks(req, extend_query, limit, offset)
@app.post("/tracks")
@require_auth
async def post_track(req):
try:
file = req.files["body"][0]
except LookupError as e:
raise InvalidUsage(
'Track upload needs a single file in "body" multipart field'
) from e
try:
body = req.json["track"]
except (LookupError, InvalidUsage):
body = {}
title = body.get("title")
public = body.get("public")
track = Track(
title=title,
customized_title=bool(title),
author=req.ctx.user,
public=public
if public is not None
else req.ctx.user.are_tracks_visible_for_all,
)
track.generate_slug()
await track.prevent_duplicates(req.ctx.db, file.body)
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
track.original_file_name = file.name
await track.write_to_original_file(req.app.config, file.body)
track.queue_processing()
track.auto_generate_title()
req.ctx.db.add(track)
await req.ctx.db.commit()
return await get_track(req, track.slug)
async def _load_track(req, slug, raise_not_found=True):
track = (
await req.ctx.db.execute(
select(Track)
.where(Track.slug == slug)
.options(joinedload(Track.author))
.limit(1)
)
).scalar()
if raise_not_found and track is None:
raise NotFound()
if not track.is_visible_to(req.ctx.user):
raise Forbidden()
return track
@app.get("/tracks/<slug:str>")
async def get_track(req, slug: str):
track = await _load_track(req, slug)
return json(
{"track": track.to_dict(for_user_id=req.ctx.user.id if req.ctx.user else None)},
)
@app.delete("/tracks/<slug:str>")
@require_auth
async def delete_track(req, slug: str):
track = await _load_track(req, slug)
if not track.is_visible_to_private(req.ctx.user):
raise Forbidden()
await req.ctx.db.delete(track)
await req.ctx.db.commit()
return empty()
@app.get("/tracks/<slug:str>/data")
async def get_track_data(req, slug: str):
track = await _load_track(req, slug)
FILE_BY_KEY = {
"measurements": "measurements.json",
"overtakingEvents": "overtakingEvents.json",
"track": "track.json",
}
result = {}
for key, filename in FILE_BY_KEY.items():
file_path = join(
req.app.config.PROCESSING_OUTPUT_DIR, track.file_path, filename
)
if not exists(file_path) or not isfile(file_path):
continue
with open(file_path) as f:
result[key] = jsonload(f)
return json(
result,
)
@app.get("/tracks/<slug:str>/download/original.csv")
async def download_original_file(req, slug: str):
track = await _load_track(req, slug)
if not track.is_visible_to_private(req.ctx.user):
raise Forbidden()
return await file_stream(track.get_original_file_path(app.config))
@app.put("/tracks/<slug:str>")
@require_auth
async def put_track(req, slug: str):
track = await _load_track(req, slug)
if track.author_id != req.ctx.user.id:
raise Forbidden()
try:
body = req.json["track"]
except:
body = {}
if "title" in body:
track.title = (body["title"] or "").strip() or None
track.customized_title = track.title is not None
if "description" in body:
track.description = (body["description"] or "").strip() or None
process = False
if "public" in body:
public = bool(body["public"])
process = process or (public != track.public) # if changed
track.public = public
if "body" in req.files:
try:
file = req.files["body"][0]
except LookupError as e:
raise InvalidUsage(
'Track upload needs a single file in "body" multipart field'
) from e
await track.prevent_duplicates(req.ctx.db, file.body)
track.uploaded_by_user_agent = normalize_user_agent(req.headers["user-agent"])
track.original_file_name = file.name or (track.slug + ".csv")
await track.write_to_original_file(req.app.config, file.body)
process = True
if process:
track.queue_processing()
track.auto_generate_title()
await req.ctx.db.commit()
track = await _load_track(req, track.slug)
return json(
{"track": track.to_dict(for_user_id=req.ctx.user.id)},
)
@app.get("/tracks/<slug:str>/comments")
@parse_parameters
async def get_track_comments(req, slug: str, limit: int = 20, offset: int = 0):
track = await _load_track(req, slug)
comment_count = await req.ctx.db.scalar(
select(func.count()).select_from(Comment).where(Comment.track_id == track.id)
)
query = (
select(Comment)
.options(joinedload(Comment.author))
.where(Comment.track_id == track.id)
.order_by(Comment.created_at.desc())
.limit(limit)
.offset(offset)
)
comments = (await req.ctx.db.execute(query)).scalars()
return json(
{
"commentCount": comment_count,
"comments": list(
map(
lambda c: c.to_dict(
for_user_id=req.ctx.user.id if req.ctx.user else None
),
comments,
)
),
},
)
@app.post("/tracks/<slug:str>/comments")
@require_auth
async def post_track_comment(req, slug: str):
track = await _load_track(req, slug)
body = req.json.get("comment", {}).get("body")
if not isinstance(body, str):
raise InvalidUsage("no comment given")
# Ensure body is not empty
body = body.strip()
if not body:
raise InvalidUsage("empty comment")
comment = Comment(
body=body,
track_id=track.id,
author_id=req.ctx.user.id,
)
req.ctx.db.add(comment)
await req.ctx.db.commit()
await req.ctx.db.refresh(comment)
comment = (
await req.ctx.db.execute(
select(Comment)
.options(joinedload(Comment.author))
.where(Comment.id == comment.id)
.limit(1)
)
).scalar()
return json({"comment": comment.to_dict(for_user_id=req.ctx.user.id)})
@app.delete("/tracks/<slug:str>/comments/<uid:str>")
@require_auth
async def delete_track_comment(req, slug: str, uid: str):
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
print("XXXXXXXXXXXXXX")
track = await _load_track(req, slug)
print("trackid", track.id, " uid", uid)
comment = (
await req.ctx.db.execute(
select(Comment)
.options(joinedload(Comment.author))
.where(Comment.track_id == track.id and Comment.uid == uid)
.limit(1)
)
).scalar()
if not comment:
raise NotFound()
if comment.author_id != req.ctx.user.id:
raise Forbidden()
await req.ctx.db.delete(comment)
await req.ctx.db.commit()
return empty()

View file

@ -0,0 +1,42 @@
import logging
from sanic.response import json
from obs.api.app import app, require_auth
log = logging.getLogger(__name__)
from obs.api import __version__ as version
def user_to_json(user):
return {
"username": user.username,
"email": user.email,
"bio": user.bio,
"image": user.image,
"areTracksVisibleForAll": user.are_tracks_visible_for_all,
# "apiKey": user.api_key,
}
@app.get("/user")
@require_auth
async def get_user(req):
return json(user_to_json(req.ctx.user))
@app.put("/user")
@require_auth
async def put_user(req):
user = req.ctx.user
for key in ["username", "email", "bio", "image"]:
if key in req.json and isinstance(req.json[key], (str, type(None))):
setattr(user, key, req.json[key])
if "areTracksVisibleForAll" in req.json:
user.are_tracks_visible_for_all = bool(req.json["areTracksVisibleForAll"])
await req.ctx.db.commit()
return json(user_to_json(req.ctx.user))

1
api/obs/bin/__init__.py Normal file
View file

@ -0,0 +1 @@
__path__ = __import__("pkgutil").extend_path(__path__, __name__)

17
api/obs/bin/obs_api.py Executable file
View file

@ -0,0 +1,17 @@
#!/usr/bin/env python3
import sys
import os
import argparse
import asyncio
from obs.api.app import app
from obs.api.db import connect_db
def main():
app.run(host=app.config.HOST, port=app.config.PORT, debug=app.config.DEBUG)
if __name__ == "__main__":
main()

View file

@ -1,16 +0,0 @@
CREATE TYPE zone_type IF NOT EXISTS AS ENUM ('urban', 'rural');
CREATE TABLE road_annotations IF NOT EXISTS (
way_id integer,
reverse boolean,
name text,
zone zone_type,
distance_overtaker_mean float,
distance_overtaker_median float,
distance_overtaker_minimum float,
distance_overtaker_n integer,
distance_overtaker_n_below_limit integer,
distance_overtaker_n_above_limit integer,
distance_overtaker_limit float,
distance_overtaker_measurements integer ARRAY,
);

View file

@ -1,3 +1,7 @@
./scripts
sqlalchemy[asyncio]
asyncpg
sanic~=21.9.1
oic~=1.3.0
sanic-session~=0.8.0
sanicargs~=2.1.0
sanic-cors~=1.0.1
python-slugify~=5.0.2
motor~=2.5.1

@ -1 +1 @@
Subproject commit e67a2a98f7d8d53c687bc44d72774b5429b049c3
Subproject commit 145b06a80d4607ff2c4a8dcf80e2f5fb0e1c8f1a

21
api/setup.py Normal file
View file

@ -0,0 +1,21 @@
from setuptools import setup, find_packages
with open("requirements.txt") as f:
requires = list(f.readlines())
setup(
name="obs-api",
version="0.0.1",
author="OpenBikeSensor Contributors",
license="LGPL-3.0",
description="OpenBikeSensor Portal API",
url="https://github.com/openbikesensor/portal",
packages=find_packages(),
package_data={},
install_requires=requires,
entry_points={
"console_scripts": [
"obs-api=obs.bin.obs_api:main",
]
},
)

View file

@ -0,0 +1,132 @@
#!/usr/bin/env python3
import argparse
import asyncio
import logging
from datetime import datetime
from uuid import uuid4
from sqlalchemy import select
from motor.motor_asyncio import AsyncIOMotorClient
from obs.api.db import make_session, connect_db, User, Track, async_session, Comment
from obs.api.app import app
log = logging.getLogger(__name__)
async def main():
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
parser = argparse.ArgumentParser(
description="processes a single track for use in the portal, "
"using the obs.face algorithms"
)
parser.add_argument(
"mongodb_url",
metavar="MONGODB_URL",
help="url to the mongodb, in format mongodb://user:pass@host/dbname",
)
args = parser.parse_args()
async with connect_db(app.config.POSTGRES_URL):
async with make_session() as session:
mongo = AsyncIOMotorClient(args.mongodb_url).get_default_database()
log.debug("Connected to mongodb and postgres.")
user_id_map = await import_users(mongo, session)
await import_tracks(mongo, session, user_id_map)
await session.commit()
async def import_users(mongo, session):
old_id_by_email = {}
async for user in mongo.users.find({}):
old_id_by_email[user["email"]] = user["_id"]
new_user = User(
sub=str(uuid4()),
match_by_username_email=True,
email=user["email"],
username=user["username"],
bio=user.get("bio"),
image=user.get("image"),
are_tracks_visible_for_all=user.get("areTracksVisibleForAll") or False,
api_key=str(user["_id"]),
created_at=user.get("createdAt") or datetime.utcnow(),
updated_at=user.get("updatedAt") or datetime.utcnow(),
)
session.add(new_user)
log.info("Creating user %s", new_user.username)
await session.commit()
id_map = {}
result = await session.scalars(select(User))
for user in result:
id_map[old_id_by_email[user.email]] = user.id
return id_map
def parse_datetime(s):
if isinstance(s, str):
return datetime.fromisoformat(s)
return s
async def import_tracks(mongo, session, user_id_map):
async for track in mongo.tracks.find({}):
stats = track.get("statistics") or {}
new_track = Track(
created_at=parse_datetime(track.get("createdAt")) or datetime.utcnow(),
updated_at=parse_datetime(track.get("updatedAt")) or datetime.utcnow(),
slug=track["slug"],
title=track.get("title"),
processing_status=track.get("processingStatus") or "pending",
processing_log=track.get("processingLog"),
customized_title=bool(track.get("customizedTitle")),
description=track.get("description"),
public=track.get("public"),
uploaded_by_user_agent=track.get("uploadedByUserAgent"),
original_file_name=track.get("originalFileName"),
original_file_hash=track.get("originalFileHash"),
# statistics
recorded_at=parse_datetime(stats.get("recordedAt")),
recorded_until=parse_datetime(stats.get("recordedUntil")),
duration=stats.get("duration"),
length=stats.get("length"),
segments=stats.get("segments"),
num_events=stats.get("num_events"),
num_measurements=stats.get("num_measurements"),
num_valid=stats.get("numValid"),
author_id=user_id_map[track["author"]],
)
session.add(new_track)
log.info("Creating track %s", new_track.slug)
comment_ids = track.get("comments") or []
if comment_ids:
async for comment in mongo.comments.find({"_id": {"$in": comment_ids}}):
new_comment = Comment(
created_at=parse_datetime(comment.get("createdAt"))
or datetime.utcnow(),
updated_at=parse_datetime(comment.get("updatedAt"))
or datetime.utcnow(),
body=comment.get("body"),
author_id=user_id_map[comment["author"]],
)
new_track.comments.append(new_comment)
session.add(new_comment)
await session.commit()
if __name__ == "__main__":
asyncio.run(main())

View file

@ -2,13 +2,18 @@
import argparse
import logging
import os
import tempfile
import sys
import json
import shutil
import asyncio
import hashlib
import struct
import pytz
from os.path import join, dirname, abspath
from datetime import datetime
from sqlalchemy import delete, select
from sqlalchemy.orm import joinedload
from obs.face.importer import ImportMeasurementsCsv
from obs.face.geojson import ExportMeasurements
@ -24,9 +29,9 @@ from obs.face.filter import (
RequiredFieldsFilter,
)
from obs.face.osm import DataSource, DatabaseTileSource, OverpassTileSource
from sqlalchemy import delete
from obs.face.db import make_session, connect_db, OvertakingEvent, async_session
from obs.api.db import make_session, connect_db, OvertakingEvent, async_session, Track
from obs.api.app import app
log = logging.getLogger(__name__)
@ -40,96 +45,110 @@ async def main():
)
parser.add_argument(
"-i", "--input", required=True, action="store", help="path to input CSV file"
)
parser.add_argument(
"-o", "--output", required=True, action="store", help="path to output directory"
)
parser.add_argument(
"--path-cache",
"--loop-delay",
action="store",
default=None,
dest="cache_dir",
help="path where the visualization data will be stored",
)
parser.add_argument(
"--settings",
dest="settings_file",
required=True,
default=None,
help="path to track settings file",
type=int,
default=10,
help="delay between loops, if no track was found in the queue (polling)",
)
# https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING
postgres_url_default = os.environ.get("POSTGRES_URL")
parser.add_argument(
"--postgres-url",
required=not postgres_url_default,
action="store",
help="connection string for postgres database",
default=postgres_url_default,
"tracks",
metavar="ID_OR_SLUG",
nargs="*",
help="ID or slug of tracks to process, if not passed, the queue is processed in a loop",
)
args = parser.parse_args()
async with connect_db(args.postgres_url):
if args.cache_dir is None:
with tempfile.TemporaryDirectory() as cache_dir:
args.cache_dir = cache_dir
await process(args)
else:
await process(args)
async def process(args):
async with connect_db(app.config.POSTGRES_URL):
async with make_session() as session:
log.info("Loading OpenStreetMap data")
tile_source = DatabaseTileSource(async_session.get())
# tile_source = OverpassTileSource(args.cache_dir)
# tile_source = OverpassTileSource(app.config.OBS_FACE_CACHE_DIR)
data_source = DataSource(tile_source)
filename_input = os.path.abspath(args.input)
dataset_id = os.path.splitext(os.path.basename(args.input))[0]
if args.tracks:
await process_tracks(session, data_source, args.tracks)
else:
await process_tracks_loop(session, data_source, args.loop_delay)
os.makedirs(args.output, exist_ok=True)
log.info("Loading settings")
settings_path = os.path.abspath(args.settings_file)
with open(settings_path, "rt") as f:
settings = json.load(f)
settings_output_path = os.path.abspath(
os.path.join(args.output, "track-settings.json")
async def process_tracks_loop(session, data_source, delay):
while True:
track = (
await session.execute(
select(Track)
.where(Track.processing_status == "queued")
.order_by(Track.processing_queued_at)
.options(joinedload(Track.author))
)
if settings_path != settings_output_path:
log.info("Copy settings to output directory")
shutil.copyfile(settings_path, settings_output_path)
).scalar()
if track is None:
await asyncio.sleep(delay)
else:
try:
await process_track(session, track, data_source)
except:
log.exception("Failed to process track %s. Will continue.", track.slug)
async def process_tracks(session, data_source, tracks):
"""
Processes the tracks and writes event data to the database.
:param tracks: A list of strings which
"""
for track_id_or_slug in tracks:
track = (
await session.execute(
select(Track)
.where(
Track.id == track_id_or_slug
if isinstance(track_id_or_slug, int)
else Track.slug == track_id_or_slug
)
.options(joinedload(Track.author))
)
).scalar()
if not track:
raise ValueError(f"Track {track_id_or_slug!r} not found.")
await process_track(session, track, data_source)
def to_naive_utc(t):
if t is None:
return None
return t.astimezone(pytz.UTC).replace(tzinfo=None)
async def process_track(session, track, data_source):
try:
track.processing_status = "complete"
track.processed_at = datetime.utcnow()
await session.commit()
original_file_path = track.get_original_file_path(app.config)
output_dir = join(
app.config.PROCESSING_OUTPUT_DIR, track.author.username, track.slug
)
os.makedirs(output_dir, exist_ok=True)
log.info("Annotating and filtering CSV file")
imported_data, statistics = ImportMeasurementsCsv().read(
filename_input,
user_id="dummy",
dataset_id=dataset_id,
original_file_path,
user_id="dummy", # TODO: user username or id or nothing?
dataset_id=Track.slug, # TODO: use track id or slug or nothing?
)
input_data = await AnnotateMeasurements(
data_source, cache_dir=args.cache_dir
).annotate(imported_data)
filters_from_settings = []
for filter_description in settings.get("filters", []):
filter_type = filter_description.get("type")
if filter_type == "PrivacyZonesFilter":
privacy_zones = [
PrivacyZone(
latitude=zone.get("latitude"),
longitude=zone.get("longitude"),
radius=zone.get("radius"),
annotator = AnnotateMeasurements(
data_source, cache_dir=app.config.OBS_FACE_CACHE_DIR
)
for zone in filter_description.get("config", {}).get("privacyZones", [])
]
filters_from_settings.append(PrivacyZonesFilter(privacy_zones))
else:
log.warning("Ignoring unknown filter type %r in settings file", filter_type)
input_data = await annotator.annotate(imported_data)
track_filter = ChainFilter(
RequiredFieldsFilter(),
@ -137,7 +156,7 @@ async def process(args):
user_id_mode=AnonymizationMode.REMOVE,
measurement_id_mode=AnonymizationMode.REMOVE,
),
*filters_from_settings,
# TODO: load user privacy zones and create a PrivacyZonesFilter() from them
)
measurements_filter = DistanceMeasuredFilter()
overtaking_events_filter = ConfirmedFilter()
@ -164,54 +183,69 @@ async def process(args):
},
}
statistics_json = {
"recordedAt": statistics["t_min"].isoformat()
if statistics["t_min"] is not None
else None,
"recordedUntil": statistics["t_max"].isoformat()
if statistics["t_max"] is not None
else None,
"duration": statistics["t"],
"length": statistics["d"],
"segments": statistics["n_segments"],
"numEvents": statistics["n_confirmed"],
"numMeasurements": statistics["n_measurements"],
"numValid": statistics["n_valid"],
}
for output_filename, data in [
("measurements.json", measurements_json),
("overtakingEvents.json", overtaking_events_json),
("track.json", track_json),
("statistics.json", statistics_json),
]:
with open(os.path.join(args.output, output_filename), "w") as fp:
target = join(output_dir, output_filename)
log.debug("Writing file %s", target)
with open(target, "w") as fp:
json.dump(data, fp, indent=4)
log.info("Importing to database.")
async with make_session() as session:
await clear_track_data(session, settings["trackId"])
await import_overtaking_events(session, settings["trackId"], overtaking_events)
log.info("Import events into database...")
await clear_track_data(session, track)
await import_overtaking_events(session, track, overtaking_events)
log.info("Write track statistics and update status...")
track.recorded_at = to_naive_utc(statistics["t_min"])
track.recorded_until = to_naive_utc(statistics["t_max"])
track.duration = statistics["t"]
track.length = statistics["d"]
track.segments = statistics["n_segments"]
track.num_events = statistics["n_confirmed"]
track.num_measurements = statistics["n_measurements"]
track.num_valid = statistics["n_valid"]
track.processing_status = "complete"
track.processed_at = datetime.utcnow()
await session.commit()
log.info("Track %s imported.", track.slug)
except BaseException as e:
await clear_track_data(session, track)
track.processing_status = "error"
track.processing_log = str(e)
track.processed_at = datetime.utcnow()
await session.commit()
raise
async def clear_track_data(session, track):
track.recorded_at = None
track.recorded_until = None
track.duration = None
track.length = None
track.segments = None
track.num_events = None
track.num_measurements = None
track.num_valid = None
async def clear_track_data(session, track_id):
await session.execute(
delete(OvertakingEvent).where(OvertakingEvent.track_id == track_id)
delete(OvertakingEvent).where(OvertakingEvent.track_id == track.id)
)
async def import_overtaking_events(session, track_id, overtaking_events):
async def import_overtaking_events(session, track, overtaking_events):
event_models = []
for m in overtaking_events:
sha = hashlib.sha256()
sha.update(track_id.encode("utf-8"))
sha.update(struct.pack("Q", int(m["time"].timestamp())))
hex_hash = sha.hexdigest()
hex_hash = hashlib.sha256(
struct.pack("QQ", track.id, int(m["time"].timestamp()))
).hexdigest()
event_models.append(
OvertakingEvent(
track_id=track_id,
track_id=track.id,
hex_hash=hex_hash,
way_id=m["OSM_way_id"],
direction_reversed=m["OSM_way_orientation"] < 0,

View file

@ -1,19 +0,0 @@
require('../src/db');
const { Track } = require('../src/models');
async function main() {
for (const track of await Track.find()) {
console.log('queuing', track.slug);
await track.queueProcessing();
}
}
main()
.then(() => {
process.exit(1);
})
.catch((e) => {
console.error(e);
process.exit(1);
});

View file

@ -4,7 +4,7 @@ import logging
import os
import asyncio
from obs.face.db import init_models, connect_db
from obs.api.db import init_models, connect_db
log = logging.getLogger(__name__)

View file

@ -16,13 +16,6 @@ services:
- '27017:27017'
restart: on-failure
redis:
image: redis
volumes:
- ./local/redis:/data
command: redis-server --appendonly yes
restart: on-failure
postgres:
image: "openmaptiles/postgis:6.0"
environment:
@ -37,51 +30,41 @@ services:
api:
image: obs-api
build:
context: ./api
context: ./api/
dockerfile: Dockerfile
volumes:
- ./api/src:/opt/obs/api/src
- ./api/obs:/opt/obs/api/obs
- ./api/scripts/obs:/opt/obs/scripts/obs
- ./api/tools:/opt/obs/api/tools
- ./api/scripts/obs:/opt/obs/api/scripts/obs
- ./api/views:/opt/obs/api/views
- ./api/config.dev.py:/opt/obs/api/config.py
- ./local/api-data:/data
- ./api/.migrations.js:/opt/obs/api/.migrations.js
- ./api/migrations:/opt/obs/api/migrations/
- ./api/config.dev.json:/opt/obs/api/config.json
environment:
- PORT=3000
- MONGODB_URL=mongodb://mongo/obsTest
- DATA_DIR=/data
- POSTGRES_URL="postgresql+asyncpg://obs:obs@localhost/obs"
links:
- mongo
- redis
- postgres
- keycloak
ports:
- '3000:3000'
restart: on-failure
command:
- npm
- run
- dev
- obs-api
worker:
image: obs-api
build:
context: ./api
context: ./api/
dockerfile: Dockerfile
volumes:
- ./api/src:/opt/obs/api/src
- ./api/obs:/opt/obs/api/obs
- ./api/scripts/obs:/opt/obs/scripts/obs
- ./api/tools:/opt/obs/api/tools
- ./api/scripts/obs:/opt/obs/api/scripts/obs
- ./api/views:/opt/obs/api/views
- ./api/config.dev.py:/opt/obs/api/config.py
- ./local/api-data:/data
- ./api/config.dev.json:/opt/obs/api/config.json
environment:
- DATA_DIR=/data
links:
- mongo
- redis
- postgres
- keycloak
restart: on-failure
command: 'bash -c "pip install geopy && npm run dev:worker"'
command:
- python
- tools/process_track.py
frontend:
image: obs-frontend
@ -99,6 +82,7 @@ services:
- PORT=3000
links:
- api
- tileserver
ports:
- '3001:3000'
restart: on-failure
@ -107,8 +91,7 @@ services:
- start
openmaptiles-tools:
image: "openmaptiles/openmaptiles-tools:${TOOLS_VERSION}"
env_file: tile-generator/.env
image: openmaptiles/openmaptiles-tools:6.0
environment:
# Must match the version of this file (first line)
# download-osm will use it when generating a composer file
@ -138,13 +121,11 @@ services:
- ./tile-generator/cache:/cache
generate-vectortiles:
image: "openmaptiles/generate-vectortiles:${TOOLS_VERSION}"
image: openmaptiles/generate-vectortiles:6.0
volumes:
- ./tile-generator/data:/export
- ./tile-generator/build/openmaptiles.tm2source:/tm2source
env_file: tile-generator/.env
environment:
FILTER_MAPNIK_OUTPUT: ${FILTER_MAPNIK_OUTPUT}
MBTILES_NAME: ${MBTILES_FILE}
BBOX: ${BBOX}
MIN_ZOOM: ${MIN_ZOOM}
@ -168,4 +149,20 @@ services:
command:
- --config
- /config/tileserver.json
# - /data/tiles.mbtiles
keycloak:
image: jboss/keycloak
ports:
- 3003:8080
links:
- postgres
environment:
KEYCLOAK_USER: admin
KEYCLOAK_PASSWORD: admin
KEYCLOAK_FRONTEND_URL: http://localhost:3003/auth/
DB_VENDOR: postgres
DB_ADDR: postgres
DB_DATABASE: obs
DB_USER: obs
DB_PASSWORD: obs
# DB_SCHEMA: keycloak

View file

@ -1,11 +1,5 @@
{
"apiUrl": "http://localhost:3001",
"auth": {
"server": "http://localhost:3000",
"clientId": "b730f8d2-d93c-4c68-9ff0-dfac8da76ee2",
"scope": "*",
"redirectUri": "http://localhost:3001/redirect"
},
"apiUrl": "http://localhost:3000",
"imprintUrl": "https://example.com/imprint",
"privacyPolicyUrl": "https://example.com/privacy",
"mapTileset": {

View file

@ -42,6 +42,10 @@ const App = connect((state) => ({login: state.login}))(function App({login}) {
const config = useConfig()
const apiVersion = useObservable(() => from(api.get('/info')).pipe(pluck('version')))
React.useEffect(() => {
api.loadUser()
}, [])
return (
<Router basename={process.env.PUBLIC_URL || '/'}>
<Menu fixed="top">

View file

@ -1,9 +1,7 @@
import {stringifyParams} from 'query'
import globalStore from 'store'
import {setAuth, invalidateAccessToken, resetAuth} from 'reducers/auth'
import {setLogin} from 'reducers/login'
import configPromise from 'config'
import {create as createPkce} from 'pkce'
import download from 'downloadjs'
function getFileNameFromContentDispostionHeader(contentDisposition: string): string | undefined {
@ -29,196 +27,42 @@ class RequestError extends Error {
class API {
constructor(store) {
this.store = store
this._getValidAccessTokenPromise = null
}
/**
* Fetches or directly returns from cache the metadata information from the
* authorization server, according to https://tools.ietf.org/html/rfc8414.
* Also validates compatibility with this metadata server, i.e. checking that
* it supports PKCE.
*/
async getAuthorizationServerMetadata() {
const config = await configPromise
const url = new URL(config.auth.server)
const pathSuffix = url.pathname.replace(/^\/+|\/+$/, '')
url.pathname = '/.well-known/oauth-authorization-server' + (pathSuffix ? '/' + pathSuffix : '')
const response = await window.fetch(url.toString())
const metadata = await response.json()
const {
authorization_endpoint: authorizationEndpoint,
token_endpoint: tokenEndpoint,
response_types_supported: responseTypesSupported,
code_challenge_methods_supported: codeChallengeMethodsSupported,
} = metadata
if (!authorizationEndpoint) {
throw new Error('No authorization endpoint')
async loadUser() {
try {
const result = await this.get('/user')
this.store.dispatch(setLogin(result))
} catch {
this.store.dispatch(setLogin(null))
}
if (!authorizationEndpoint.startsWith(config.auth.server)) {
throw new Error('Invalid authorization endpoint')
}
if (!tokenEndpoint) {
throw new Error('No token endpoint')
}
if (!tokenEndpoint.startsWith(config.auth.server)) {
throw new Error('Invalid token endpoint')
}
if (!Array.isArray(responseTypesSupported) || !responseTypesSupported.includes('code')) {
throw new Error('Authorization code flow not supported or no support advertised.')
}
if (!Array.isArray(codeChallengeMethodsSupported) || !codeChallengeMethodsSupported.includes('S256')) {
throw new Error('PKCE with S256 not supported or no support advertised.')
}
return {authorizationEndpoint, tokenEndpoint}
}
/**
* Return an access token, if it is (still) valid. If not, and a refresh
* token exists, use the refresh token to issue a new access token. If that
* fails, or neither is available, return `null`. This should usually result
* in a redirect to login.
*/
async getValidAccessToken() {
// prevent multiple parallel refresh processes
if (this._getValidAccessTokenPromise) {
return await this._getValidAccessTokenPromise
} else {
this._getValidAccessTokenPromise = this._getValidAccessToken()
const result = await this._getValidAccessTokenPromise
this._getValidAccessTokenPromise = null
return result
}
}
async _getValidAccessToken() {
let {auth} = this.store.getState()
if (!auth) {
return null
}
const {tokenType, accessToken, refreshToken, expiresAt} = auth
// access token is valid
if (accessToken && expiresAt > new Date().getTime()) {
return `${tokenType} ${accessToken}`
}
if (!refreshToken) {
return null
}
// Try to use the refresh token
const {tokenEndpoint} = await this.getAuthorizationServerMetadata()
const config = await configPromise
const url = new URL(tokenEndpoint)
url.searchParams.append('refresh_token', refreshToken)
url.searchParams.append('grant_type', 'refresh_token')
url.searchParams.append('scope', config.auth.scope)
const response = await window.fetch(url.toString())
const json = await response.json()
if (response.status === 200 && json != null && json.error == null) {
auth = this.getAuthFromTokenResponse(json)
this.store.dispatch(setAuth(auth))
return `${auth.tokenType} ${auth.accessToken}`
} else {
console.warn('Could not use refresh token, error response:', json)
this.store.dispatch(resetAuth())
return null
}
}
async exchangeAuthorizationCode(code) {
const codeVerifier = localStorage.getItem('codeVerifier')
if (!codeVerifier) {
throw new Error('No code verifier found')
}
const {tokenEndpoint} = await this.getAuthorizationServerMetadata()
const config = await configPromise
const url = new URL(tokenEndpoint)
url.searchParams.append('code', code)
url.searchParams.append('grant_type', 'authorization_code')
url.searchParams.append('client_id', config.auth.clientId)
url.searchParams.append('redirect_uri', config.auth.redirectUri)
url.searchParams.append('code_verifier', codeVerifier)
const response = await window.fetch(url.toString())
const json = await response.json()
if (json.error) {
return json
}
const auth = api.getAuthFromTokenResponse(json)
this.store.dispatch(setAuth(auth))
const {user} = await this.get('/user')
this.store.dispatch(setLogin(user))
return true
}
async logout() {
// 1. Tell the store to forget that we're logged in.
this.store.dispatch(resetAuth())
// 2. Log out session in API.
const {tokenEndpoint} = await this.getAuthorizationServerMetadata()
const url = new URL(tokenEndpoint.replace(/\/token$/, '/logout'))
url.searchParams.append('redirectTo', window.location.href) // bring us back to the current page
const config = await configPromise
const url = new URL(config.apiUrl + '/logout')
url.searchParams.append('next', window.location.href) // bring us back to the current page
window.location.href = url.toString()
}
async makeLoginUrl() {
const {authorizationEndpoint} = await this.getAuthorizationServerMetadata()
const config = await configPromise
const {codeVerifier, codeChallenge} = createPkce()
localStorage.setItem('codeVerifier', codeVerifier)
const loginUrl = new URL(authorizationEndpoint)
loginUrl.searchParams.append('client_id', config.auth.clientId)
loginUrl.searchParams.append('scope', config.auth.scope)
loginUrl.searchParams.append('redirect_uri', config.auth.redirectUri)
loginUrl.searchParams.append('response_type', 'code')
loginUrl.searchParams.append('code_challenge', codeChallenge)
loginUrl.searchParams.append('code_challenge_method', 'S256')
// TODO: Implement PKCE
return loginUrl.toString()
const url = new URL(config.apiUrl + '/login')
url.searchParams.append('next', window.location.href) // bring us back to the current page
return url.toString()
}
async fetch(url, options = {}) {
const accessToken = await this.getValidAccessToken()
const config = await configPromise
const {returnResponse = false, ...fetchOptions} = options
const response = await window.fetch(config.apiUrl + '/api' + url, {
const response = await window.fetch(config.apiUrl + url, {
...fetchOptions,
headers: {
...(fetchOptions.headers || {}),
Authorization: accessToken,
},
credentials: 'include',
})
if (response.status === 401) {
// Unset login, since 401 means that we're not logged in. On the next
// request with `getValidAccessToken()`, this will be detected and the
// refresh token is used (if still valid).
this.store.dispatch(invalidateAccessToken())
throw new Error('401 Unauthorized')
}
@ -278,16 +122,6 @@ class API {
return await this.post(url, {...options, method: 'put'})
}
getAuthFromTokenResponse(tokenResponse) {
return {
tokenType: tokenResponse.token_type,
accessToken: tokenResponse.access_token,
refreshToken: tokenResponse.refresh_token,
expiresAt: new Date().getTime() + tokenResponse.expires_in * 1000,
scope: tokenResponse.scope,
}
}
async downloadFile(url, options = {}) {
const res = await this.fetch(url, {returnResponse: true, ...options})
const blob = await res.blob()

View file

@ -7,9 +7,11 @@ export default function LoginButton(props) {
const [busy, setBusy] = React.useState(false)
const onClick = React.useCallback(async (e) => {
console.log('aa')
e.preventDefault()
setBusy(true)
const url = await api.makeLoginUrl()
console.log('go', url)
window.location.href = url
setBusy(false)
}, [setBusy])

View file

@ -6,7 +6,7 @@ import styles from './Page.module.scss'
export default function Page({small, children, fullScreen}: {small?: boolean, children: ReactNode, fullScreen?: boolean}) {
return (
<main className={classnames({page: true, small, fullScreen})}>
<main className={classnames(styles.page, small && styles.small, fullScreen && styles.fullScreen)}>
{fullScreen ? children : <Container>{children}</Container>}
</main>
)

View file

@ -8,6 +8,13 @@ export interface Config {
zoom: number
}
obsMapSource?: string
imprintUrl?: string
privacyPolicyUrl?: string
mapTileset?: {
url?: string
minZoom?: number
maxZoom?: number
}
}
async function loadConfig(): Promise<Config> {

View file

@ -10,8 +10,10 @@ const LogoutPage = connect(
)(function LogoutPage({loggedIn}) {
React.useEffect(() => {
// no await, just trigger it
if (loggedIn) {
api.logout()
})
}
}, [loggedIn])
return loggedIn ? <Loader active /> : <Redirect to="/" />
})

View file

@ -37,10 +37,10 @@ export default function TrackDetails({track, isAuthor}) {
</List.Item>
)}
{track.statistics?.duration != null && (
{track.duration != null && (
<List.Item>
<List.Header>Duration</List.Header>
{formatDuration(track.statistics.duration)}
{formatDuration(track.duration)}
</List.Item>
)}
@ -51,24 +51,32 @@ export default function TrackDetails({track, isAuthor}) {
</List.Item>
)}
{track?.statistics?.recordedAt != null && (
{track?.recordedAt != null && (
<List.Item>
<List.Header>Recorded on</List.Header>
<FormattedDate date={track?.statistics.recordedAt} />
<FormattedDate date={track?.recordedAt} />
</List.Item>
)}
{track?.statistics?.numEvents != null && (
{track?.numEvents != null && (
<List.Item>
<List.Header>Confirmed events</List.Header>
{track?.statistics.numEvents}
{track?.numEvents}
</List.Item>
)}
{track?.statistics?.trackLength != null && (
{track?.length != null && (
<List.Item>
<List.Header>Length</List.Header>
{(track?.statistics.trackLength / 1000).toFixed(2)} km
{(track?.length / 1000).toFixed(2)} km
</List.Item>
)}
{track?.processingStatus != null && (
<List.Item>
<List.Header>Processing</List.Header>
{track.processingStatus}
</List.Item>
)}
</List>

View file

@ -57,7 +57,6 @@ const TrackPage = connect((state) => ({login: state.login}))(function TrackPage(
of(undefined),
from(api.get(url)).pipe(
catchError(() => {
// history.replace('/tracks')
return of(null)
})
)
@ -72,7 +71,7 @@ const TrackPage = connect((state) => ({login: state.login}))(function TrackPage(
switchMap((url) =>
from(api.get(url)).pipe(
catchError(() => {
history.replace('/tracks')
return of(null)
})
)
),
@ -117,8 +116,10 @@ const TrackPage = connect((state) => ({login: state.login}))(function TrackPage(
const {track, trackData, comments} = data || {}
console.log({track, trackData})
const loading = track == null || trackData === undefined
const processing = ['processing', 'pending'].includes(track?.processingStatus)
const processing = ['processing', 'queued', 'created'].includes(track?.processingStatus)
const error = track?.processingStatus === 'error'
const [left, setLeft] = React.useState(true)
const [right, setRight] = React.useState(false)
@ -135,6 +136,15 @@ const TrackPage = connect((state) => ({login: state.login}))(function TrackPage(
</Message>
)}
{error && (
<Message error>
<Message.Content>
The processing of this track failed, please ask your site
administrator for help in debugging the issue.
</Message.Content>
</Message>
)}
<Grid stackable>
<Grid.Row>
<Grid.Column width={12}>

View file

@ -94,6 +94,14 @@ function maxLength(t, max) {
}
}
const COLOR_BY_STATUS = {
error: 'red',
complete: 'green',
created: 'gray',
queued: 'orange',
processing: 'orange',
}
export function TrackListItem({track, privateTracks = false}) {
return (
<Item key={track.slug}>
@ -121,6 +129,10 @@ export function TrackListItem({track, privateTracks = false}) {
<Icon name="eye slash" fitted /> Private
</>
)}
<span style={{marginLeft: '1em'}}>
<Icon color={COLOR_BY_STATUS[track.processingStatus]} name="bolt" fitted /> Processing {track.processingStatus}
</span>
</Item.Extra>
)}
</Item.Content>

View file

@ -63,6 +63,7 @@ export function FileUploadStatus({
formData.append('body', file)
xhr = new XMLHttpRequest()
xhr.withCredentials = true
const onProgress = (e) => {
const progress = (e.loaded || 0) / (e.total || 1)
@ -79,14 +80,14 @@ export function FileUploadStatus({
const config = await configPromise
if (slug) {
xhr.open('PUT', `${config.apiUrl}/api/tracks/${slug}`)
xhr.open('PUT', `${config.apiUrl}/tracks/${slug}`)
} else {
xhr.open('POST', `${config.apiUrl}/api/tracks`)
xhr.open('POST', `${config.apiUrl}/tracks`)
}
const accessToken = await api.getValidAccessToken()
// const accessToken = await api.getValidAccessToken()
xhr.setRequestHeader('Authorization', accessToken)
// xhr.setRequestHeader('Authorization', accessToken)
xhr.send(formData)
}

View file

@ -1,30 +0,0 @@
const initialState = null
export function setAuth(auth) {
return {type: 'AUTH.SET', payload: {auth}}
}
export function resetAuth() {
return {type: 'AUTH.RESET'}
}
export function invalidateAccessToken() {
return {type: 'AUTH.INVALIDATE_ACCESS_TOKEN'}
}
export default function loginReducer(state = initialState, action) {
switch (action.type) {
case 'AUTH.SET':
return action.payload.auth
case 'AUTH.INVALIDATE_ACCESS_TOKEN':
return state && {
...state,
accessToken: null,
expiresAt: 0,
}
case 'AUTH.RESET':
return null
default:
return state
}
}

View file

@ -1,6 +1,5 @@
import {combineReducers} from 'redux'
import login from './login'
import auth from './auth'
export default combineReducers({login, auth})
export default combineReducers({login})

View file

@ -4,12 +4,16 @@ export function setLogin(user) {
return {type: 'LOGIN.SET', payload: {user}}
}
export function resetLogin() {
return {type: 'LOGIN.RESET'}
}
export default function loginReducer(state = initialState, action) {
switch (action.type) {
case 'LOGIN.SET':
return action.payload.user
case 'AUTH.RESET': // cross reducer action :)
case 'LOGIN.RESET':
return null
default:

View file

@ -3,7 +3,7 @@ import persistState from 'redux-localstorage'
import rootReducer from './reducers'
const enhancer = compose(persistState(['login', 'auth']))
const enhancer = compose(persistState(['login']))
const store = createStore(rootReducer, undefined, enhancer)

View file

@ -12,7 +12,13 @@ export type TrackData = {
overtakingEvents: FeatureCollection,
}
export type TrackStatistics = {
export type Track = {
slug: string
author: UserProfile
title: string
description?: string
createdAt: string
public?: boolean
recordedAt?: Date
recordedUntil?: Date
duration?: number
@ -23,16 +29,6 @@ export type TrackStatistics = {
numValid?: number
}
export type Track = {
slug: string
author: UserProfile
title: string
description?: string
createdAt: string
public?: boolean
statistics?: TrackStatistics
}
export type TrackPoint = {
type: 'Feature',
geometry: Point,

31
old-api/Dockerfile Normal file
View file

@ -0,0 +1,31 @@
FROM node:15.14-buster
# Install python3, pip3, and make them the default for `python` and `pip` commands
RUN apt-get update && apt-get install -y python3 python3-pip
RUN ln -s $(which python3) /usr/local/bin/python
RUN ln -s $(which pip3) /usr/local/bin/pip
WORKDIR /opt/obs/api
ADD package.json package-lock.json /opt/obs/api/
RUN echo update-notifier=false >> ~/.npmrc
RUN npm ci
ADD scripts /opt/obs/api/scripts/
ADD tools /opt/obs/api/tools/
ADD requirements.txt /opt/obs/api/
RUN pip install -r requirements.txt
RUN pip install -e ./scripts
ADD views /opt/obs/api/views/
ADD src /opt/obs/api/src/
#ADD .migrations.js .
#ADD migrations .
EXPOSE 3000
ENV PORT=3000
ENV DATA_DIR=/data
CMD ["npm", "run", "start"]

3
old-api/requirements.txt Normal file
View file

@ -0,0 +1,3 @@
./scripts
sqlalchemy[asyncio]
asyncpg

View file

@ -7,7 +7,7 @@ SHELL = /bin/bash
.SHELLFLAGS = -o pipefail -c
# Make all .env variables available for make targets
include .env
include ../.env
# Layers definition and meta data
TILESET_FILE ?= openmaptiles.yaml