From 4d0002e6d8071acacbe12dd143cb6bd07464a9d9 Mon Sep 17 00:00:00 2001 From: Paul Bienkowski Date: Sat, 2 Apr 2022 20:43:20 +0200 Subject: [PATCH] Create one-in-all upgrade script (fixes #220) --- CHANGELOG.md | 16 ++++++++++++++++ README.md | 31 +++++++++++++++++++------------ UPGRADING.md | 5 ++++- api/obs/api/db.py | 6 +++++- api/tools/prepare_sql_tiles.py | 9 ++++++++- api/tools/reset_database.py | 20 ++++++++++++++++++-- api/tools/upgrade.py | 27 +++++++++++++++++++++++++++ deployment/README.md | 3 +-- 8 files changed, 98 insertions(+), 19 deletions(-) create mode 100755 api/tools/upgrade.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 694aaa3..534a9c4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,21 @@ # Changelog +## 0.5.2 + +Starting in this version, the database schema is created through migrations +instead of using the `reset_database.py` script. This means that for both the +initial setup, as well as for upgrades, only the migrations have to be run. + +After updating and migrating, it is good practice to regenerate the SQL tile +functions (`api/tools/prepare_sql_tiles.py`) as well. It doesn't matter if you +do this when it is not required, so we've written a simple all-in-one update +script that you can run to do all upgrade tasks. This is now in +`api/tools/upgrade.py`. + +Please check [`UPGRADING.md`](./UPGRADING.md) for more details if you're +upgrading an existing installation. It contains an important note for this +upgrade in particular. + ## 0.5.1 Maintenance release, only includes build, deployment and documentation changes. diff --git a/README.md b/README.md index 1246abe..5315aa0 100644 --- a/README.md +++ b/README.md @@ -62,9 +62,22 @@ its documentation for help. Most of the time, running this command will do all the migrations you need: ```bash -docker-compose run --rm api alembic upgrade head +docker-compose run --rm api tools/upgrade.py ``` +This command is equivalent to running migrations through *alembic*, then +regenerating the SQL functions that compute vector tiles directly in the +database: + +```bash +# equivalent to the above command, you don't usually run these +docker-compose run --rm api alembic upgrade head +docker-compose run --rm api tools/prepare_sql_tiles +``` + + +docker-compose run --rm api alembic upgrade head + ### Upgrading from v0.2 to v0.3 After v0.2 we switched the underlying technology of the API and the database. @@ -141,21 +154,15 @@ If you don't wait long enough, the following commands might fail. In this case, you can always stop the container, remove the data directory (`local/postgres`) and restart the process. -Next, initialize an empty database, which applies the database schema for the -application: +Next, run the upgrade command to generate the database schema: ```bash -docker-compose run --rm api tools/reset_database.py +docker-compose run --rm api tools/upgrade.py ``` -To be able serve dynamic vector tiles from the API, run the following command once: - -```bash -docker-compose run --rm api tools/prepare_sql_tiles.py -``` - -You might need to re-run this command after updates, to (re-)create the -functions in the SQL database that are used when generating vector tiles. +You will need to re-run this command after updates, to migrate the database and +(re-)create the functions in the SQL database that are used when generating +vector tiles. You should also import OpenStreetMap data now, see below for instructions. diff --git a/UPGRADING.md b/UPGRADING.md index 2e599eb..940515f 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -18,7 +18,10 @@ For this update, run these steps: - Build new images - Stop portal and worker services -- Migrate with alembic (see note above, this should be a no-op if done right) +- Run the new upgrade tool: + ```bash + docker-compose run --rm portal tools/upgrade.py + ``` - Start portal and worker services ## 0.5.0 diff --git a/api/obs/api/db.py b/api/obs/api/db.py index d0796fb..00f849d 100644 --- a/api/obs/api/db.py +++ b/api/obs/api/db.py @@ -50,9 +50,13 @@ async def make_session(): yield session -async def init_models(): +async def drop_all(): async with engine.begin() as conn: await conn.run_sync(Base.metadata.drop_all) + + +async def init_models(): + async with engine.begin() as conn: await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "hstore";')) await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "postgis";')) await conn.execute(text('CREATE EXTENSION IF NOT EXISTS "uuid-ossp";')) diff --git a/api/tools/prepare_sql_tiles.py b/api/tools/prepare_sql_tiles.py index 993205f..36d96dd 100755 --- a/api/tools/prepare_sql_tiles.py +++ b/api/tools/prepare_sql_tiles.py @@ -39,7 +39,10 @@ def parse_pg_url(url=app.config.POSTGRES_URL): async def main(): logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + await prepare_sql_tiles() + +async def prepare_sql_tiles(): with tempfile.TemporaryDirectory() as build_dir: await generate_data_yml(build_dir) sql_snippets = await generate_sql(build_dir) @@ -121,7 +124,11 @@ async def generate_sql(build_dir): async def import_sql(sql_snippets): statements = sum(map(sqlparse.split, sql_snippets), []) - async with connect_db(app.config.POSTGRES_URL, app.config.POSTGRES_POOL_SIZE, app.config.POSTGRES_MAX_OVERFLOW): + async with connect_db( + app.config.POSTGRES_URL, + app.config.POSTGRES_POOL_SIZE, + app.config.POSTGRES_MAX_OVERFLOW, + ): for i, statement in enumerate(statements): clean_statement = sqlparse.format( statement, diff --git a/api/tools/reset_database.py b/api/tools/reset_database.py index 2c5e08c..01cb0ad 100755 --- a/api/tools/reset_database.py +++ b/api/tools/reset_database.py @@ -1,18 +1,34 @@ #!/usr/bin/env python3 import logging import asyncio +import argparse -from obs.api.db import init_models, connect_db +from obs.api.db import drop_all, init_models, connect_db from obs.api.app import app log = logging.getLogger(__name__) async def main(): + parser = argparse.ArgumentParser( + description="drops the whole database, and possibly creates new table schema" + ) + + parser.add_argument( + "-s", + "--create-schema", + action="store_true", + help="create the schema", + ) + + args = parser.parse_args() + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") async with connect_db(app.config.POSTGRES_URL): - await init_models() + await drop_all() + if args.create_schema: + await init_models() log.info("Database initialized.") diff --git a/api/tools/upgrade.py b/api/tools/upgrade.py new file mode 100755 index 0000000..86ff1d7 --- /dev/null +++ b/api/tools/upgrade.py @@ -0,0 +1,27 @@ +#!/usr/bin/env python3 +import logging +import asyncio +from alembic.config import Config +from alembic import command +from os.path import join, dirname + +log = logging.getLogger(__name__) + +from prepare_sql_tiles import prepare_sql_tiles, _run + + +async def _migrate(): + await _run("alembic upgrade head") + + +async def main(): + logging.basicConfig(level=logging.DEBUG, format="%(levelname)s: %(message)s") + log.info("Running migrations...") + await _migrate() + log.info("Preparing SQL tiles...") + await prepare_sql_tiles() + log.info("Upgraded") + + +if __name__ == "__main__": + asyncio.run(main()) diff --git a/deployment/README.md b/deployment/README.md index a248cf5..12a290d 100644 --- a/deployment/README.md +++ b/deployment/README.md @@ -241,8 +241,7 @@ wget https://download.geofabrik.de/europe/germany/schleswig-holstein-latest.osm. Run the following scripts to prepare the database: ```bash -docker-compose run --rm portal tools/reset_database.py -docker-compose run --rm portal tools/prepare_sql_tiles.py +docker-compose run --rm portal tools/upgrade.py ``` For more details, see [README.md](../README.md) under "Prepare database".