Merge pull request #343 from openbikesensor/next-document-upgrade
Next document upgrade
This commit is contained in:
commit
c897412f99
23
CHANGELOG.md
23
CHANGELOG.md
|
@ -1,5 +1,28 @@
|
|||
# Changelog
|
||||
|
||||
## 0.8.0
|
||||
|
||||
### Features
|
||||
|
||||
* Bulk actions on users owned tracks (reprocess, download, make private, make public, delete) (#269, #38)
|
||||
* Easy sorting by device for "multi-device users" (e.g. group lending out OBSes)
|
||||
* Region display at higher zoom levels to easily find interesting areas (#112)
|
||||
* Export of road statistics on top of the already-existing event statistics (#341)
|
||||
|
||||
### Improvements
|
||||
|
||||
* Refactored database access to hopefully combat portal crashes (#337)
|
||||
* New infrastructure for map imports that makes import of larger maps possible on small VMs (#334)
|
||||
* Reference current postgres and postgis versions in docker-compose.yaml files (#286)
|
||||
* Configurable terms-and-conditions link (#320)
|
||||
* French translation by @cbiteau (#303)
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* Logout not working (#285)
|
||||
* Duplicate road usage hashes (#335, #253)
|
||||
* cannot import name .... (#338)
|
||||
|
||||
## 0.7.0
|
||||
|
||||
### Features
|
||||
|
|
25
UPGRADING.md
25
UPGRADING.md
|
@ -1,10 +1,33 @@
|
|||
# Upgrading
|
||||
|
||||
This document describes the general steps to upgrade between major changes.
|
||||
Simple migrations, e.g. for adding schema changes, are not documented
|
||||
explicitly. Their general usage is described in the [README](./README.md) (for
|
||||
development) and [docs/production-deployment.md](docs/production-deployment.md) (for production).
|
||||
|
||||
|
||||
## 0.8.0
|
||||
Upgrade to `0.7.x` first. See below for details. Then follow these steps:
|
||||
|
||||
> **Warning** The update includes a reprocessing of tracks after import. Depending on the number of tracks this can take a few hours. The portal is reachable during that time but events disappear and incrementally reappear during reimport.
|
||||
|
||||
> **Info** With this version the import process for OpenStreetMap data has changed: the [new process](docs/osm-import.md) is easier on resources and finally permits to import a full country on a low-end VM.
|
||||
|
||||
- Do your [usual backup](docs/production-deployment.md)
|
||||
- get the release in your source folder (``git pull; git checkout 0.8.0`` and update submodules ``git submodule update --recursive``)
|
||||
- Rebuild images ``docker-compose build``
|
||||
- Stop your portal and worker services ``docker-compose stop worker portal``
|
||||
- run upgrade
|
||||
```bash
|
||||
docker-compose run --rm portal tools/upgrade.py
|
||||
```
|
||||
this automatically does the following
|
||||
- Migration of database schema using alembic.
|
||||
- Upgrade of SQL tile schema to new schema.
|
||||
- Import the nuts-regions from the web into the database.
|
||||
- Trigger a re-import of all tracks.
|
||||
- Start your portal and worker services. ``docker-compose up -d worker portal``
|
||||
|
||||
|
||||
## 0.7.0
|
||||
|
||||
Upgrade to `0.6.x` first. See below for details. Then follow these steps:
|
||||
|
|
|
@ -88,7 +88,7 @@ async def mapdetails_road(req):
|
|||
|
||||
data, mask = arrays[:-1], arrays[-1]
|
||||
data = data.astype(numpy.float64)
|
||||
mask = mask.astype(numpy.bool)
|
||||
mask = mask.astype(bool)
|
||||
|
||||
def partition(arr, cond):
|
||||
return arr[:, cond], arr[:, ~cond]
|
||||
|
|
|
@ -71,21 +71,25 @@ async def import_osm(connection, filename, import_group=None):
|
|||
|
||||
# Pass 2: Import
|
||||
log.info("Pass 2: Import roads")
|
||||
async with cursor.copy(
|
||||
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
||||
) as copy:
|
||||
for item in read_file(filename):
|
||||
await copy.write_row(
|
||||
(
|
||||
item.way_id,
|
||||
item.name,
|
||||
item.zone,
|
||||
item.directionality,
|
||||
item.oneway,
|
||||
bytes.hex(item.geometry),
|
||||
import_group,
|
||||
amount = 0
|
||||
for items in chunk(read_file(filename), 10000):
|
||||
amount += 10000
|
||||
log.info(f"...{amount}/{len(road_ids)} ({100*amount/len(road_ids)}%)")
|
||||
async with cursor.copy(
|
||||
"COPY road (way_id, name, zone, directionality, oneway, geometry, import_group) FROM STDIN"
|
||||
) as copy:
|
||||
for item in items:
|
||||
await copy.write_row(
|
||||
(
|
||||
item.way_id,
|
||||
item.name,
|
||||
item.zone,
|
||||
item.directionality,
|
||||
item.oneway,
|
||||
bytes.hex(item.geometry),
|
||||
import_group,
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
async def main():
|
||||
|
|
30
api/tools/reimport_tracks.py
Executable file
30
api/tools/reimport_tracks.py
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.db import connect_db, make_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
await reimport_tracks()
|
||||
|
||||
|
||||
async def reimport_tracks():
|
||||
|
||||
async with connect_db(
|
||||
app.config.POSTGRES_URL,
|
||||
app.config.POSTGRES_POOL_SIZE,
|
||||
app.config.POSTGRES_MAX_OVERFLOW,
|
||||
):
|
||||
async with make_session() as session:
|
||||
await session.execute(text("UPDATE track SET processing_status = 'queued';"))
|
||||
await session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
|
@ -1,14 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
from alembic.config import Config
|
||||
from alembic import command
|
||||
from os.path import join, dirname
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from prepare_sql_tiles import prepare_sql_tiles, _run
|
||||
|
||||
from import_regions import main as import_nuts
|
||||
|
||||
from reimport_tracks import main as reimport_tracks
|
||||
|
||||
|
||||
async def _migrate():
|
||||
await _run("alembic upgrade head")
|
||||
|
@ -20,7 +21,11 @@ async def main():
|
|||
await _migrate()
|
||||
log.info("Preparing SQL tiles...")
|
||||
await prepare_sql_tiles()
|
||||
log.info("Upgraded")
|
||||
log.info("Importing nuts regions...")
|
||||
await import_nuts()
|
||||
log.info("Nuts regions imported, scheduling reimport of tracks")
|
||||
await reimport_tracks()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
|
@ -14,7 +14,7 @@ services:
|
|||
############################################################
|
||||
|
||||
postgres:
|
||||
image: "openmaptiles/postgis:6.0"
|
||||
image: "openmaptiles/postgis:7.0"
|
||||
environment:
|
||||
- POSTGRES_DB=${OBS_POSTGRES_DB}
|
||||
- POSTGRES_USER=${OBS_POSTGRES_USER}
|
||||
|
@ -136,7 +136,7 @@ services:
|
|||
- "traefik.docker.network=gateway"
|
||||
|
||||
postgres-keycloak:
|
||||
image: postgres:13.3
|
||||
image: postgres:15
|
||||
restart: always
|
||||
networks:
|
||||
- backend
|
||||
|
|
|
@ -8,7 +8,7 @@ version: '3'
|
|||
|
||||
services:
|
||||
postgres:
|
||||
image: "openmaptiles/postgis:6.0"
|
||||
image: "openmaptiles/postgis:7.0"
|
||||
environment:
|
||||
POSTGRES_USER: obs
|
||||
POSTGRES_PASSWORD: obs
|
||||
|
|
Loading…
Reference in a new issue