Add a first shot at upgrade documentation (WIP)
This commit is contained in:
parent
e1763e0d3c
commit
497e1b739a
24
UPGRADING.md
24
UPGRADING.md
|
@ -1,10 +1,32 @@
|
|||
# Upgrading
|
||||
|
||||
This document describes the general steps to upgrade between major changes.
|
||||
Simple migrations, e.g. for adding schema changes, are not documented
|
||||
explicitly. Their general usage is described in the [README](./README.md) (for
|
||||
development) and [docs/production-deployment.md](docs/production-deployment.md) (for production).
|
||||
|
||||
|
||||
## 0.8.0
|
||||
Upgrade to `0.7.x` first. See below for details. Then follow these steps:
|
||||
|
||||
> **Warning** The update includes a reprocessing of tracks after import. Depending on the number of tracks this can take a few hours. The portal is reachable during that time but events disappear and incrementally reappear during reimport.
|
||||
|
||||
> **Info** With this version the Import process for OpenStreetMap data has changed: the [new process](docs/osm-import.md) is easier on resources and finally permits to import a full country on a low-end VM.
|
||||
|
||||
- Do your [usual backup](docs/production-deployment.md)
|
||||
- Rebuild images
|
||||
- Stop your portal and worker services
|
||||
- run upgrade
|
||||
```bash
|
||||
docker-compose run --rm portal tools/upgrade.py
|
||||
```
|
||||
this automatically does the following
|
||||
- Migration of database schema using alembic.
|
||||
- Upgrade of SQL tile schema to new schema.
|
||||
- Import the nuts-regions from the web into the database.
|
||||
- Trigger a re-import of all tracks.
|
||||
- Start your portal and worker services.
|
||||
|
||||
|
||||
## 0.7.0
|
||||
|
||||
Upgrade to `0.6.x` first. See below for details. Then follow these steps:
|
||||
|
|
30
api/tools/reimport_tracks.py
Executable file
30
api/tools/reimport_tracks.py
Executable file
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
from obs.api.app import app
|
||||
from obs.api.db import connect_db, make_session
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
async def main():
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s")
|
||||
await reimport_tracks()
|
||||
|
||||
|
||||
async def reimport_tracks():
|
||||
|
||||
async with connect_db(
|
||||
app.config.POSTGRES_URL,
|
||||
app.config.POSTGRES_POOL_SIZE,
|
||||
app.config.POSTGRES_MAX_OVERFLOW,
|
||||
):
|
||||
async with make_session() as session:
|
||||
await session.execute(text("UPDATE track SET processing_status = 'queued';"))
|
||||
await session.commit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(main())
|
|
@ -1,14 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
import logging
|
||||
import asyncio
|
||||
from alembic.config import Config
|
||||
from alembic import command
|
||||
from os.path import join, dirname
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
from prepare_sql_tiles import prepare_sql_tiles, _run
|
||||
|
||||
from import_regions import main as import_nuts
|
||||
|
||||
from reimport_tracks import main as reimport_tracks
|
||||
|
||||
|
||||
async def _migrate():
|
||||
await _run("alembic upgrade head")
|
||||
|
@ -20,7 +21,11 @@ async def main():
|
|||
await _migrate()
|
||||
log.info("Preparing SQL tiles...")
|
||||
await prepare_sql_tiles()
|
||||
log.info("Upgraded")
|
||||
log.info("Importing nuts regions...")
|
||||
await import_nuts()
|
||||
log.info("Nuts regions imported, scheduling reimport of tracks")
|
||||
await reimport_tracks()
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
|
Loading…
Reference in a new issue