matrix-docker-ansible-deploy/roles/matrix-postgres/tasks/import_sqlite_db.yml
Ugurtan 5ace3f4a1c
fix for importing sqllite database
the current version fails the import, because the volume for the media is missing. It still fails if you have the optional shared secret password provider is enabled, so that might need another mount. Commenting out the password provider in the hoimeserver.yaml during the run works as well.
2020-05-27 18:13:36 +02:00

86 lines
2.7 KiB
YAML

---
# Pre-checks
- name: Fail if Postgres not enabled
fail:
msg: "Postgres via the matrix-postgres role is not enabled (`matrix_postgres_enabled`). Cannot import."
when: "not matrix_postgres_enabled|bool"
- name: Fail if playbook called incorrectly
fail:
msg: "The `server_path_homeserver_db` variable needs to be provided to this playbook, via --extra-vars"
when: "server_path_homeserver_db is not defined or server_path_homeserver_db.startswith('<')"
- name: Check if the provided SQLite homeserver.db file exists
stat:
path: "{{ server_path_homeserver_db }}"
register: result_server_path_homeserver_db_stat
- name: Fail if provided SQLite homeserver.db file doesn't exist
fail:
msg: "File cannot be found on the server at {{ server_path_homeserver_db }}"
when: "not result_server_path_homeserver_db_stat.stat.exists"
# Defaults
- name: Set postgres_start_wait_time, if not provided
set_fact:
postgres_start_wait_time: 15
when: "postgres_start_wait_time|default('') == ''"
# Actual import work
- name: Ensure matrix-postgres is stopped
service:
name: matrix-postgres
state: stopped
daemon_reload: yes
- name: Ensure postgres data is wiped out
file:
path: "{{ matrix_postgres_data_path }}"
state: absent
- name: Ensure postgres data path exists
file:
path: "{{ matrix_postgres_data_path }}"
state: directory
mode: 0700
owner: "{{ matrix_user_username }}"
group: "{{ matrix_user_groupname }}"
- name: Ensure matrix-postgres is started
service:
name: matrix-postgres
state: restarted
daemon_reload: yes
- name: Wait a bit, so that Postgres can start
wait_for:
timeout: "{{ postgres_start_wait_time }}"
delegate_to: 127.0.0.1
become: false
# We don't use the `docker_container` module, because using it with `cap_drop` requires
# a very recent version, which is not available for a lot of people yet.
#
# Also, some old `docker_container` versions were buggy and would leave containers behind
# on failure, which we had to work around to allow retries (by re-running the playbook).
- name: Import SQLite database into Postgres
command: |
docker run
--rm
--name=matrix-synapse-migrate
--user={{ matrix_user_uid }}:{{ matrix_user_gid }}
--cap-drop=ALL
--network={{ matrix_docker_network }}
--entrypoint=python
-v {{ matrix_synapse_config_dir_path }}:/data
-v {{ matrix_synapse_config_dir_path }}:/matrix-media-store-parent/media-store
-v {{ server_path_homeserver_db }}:/{{ server_path_homeserver_db|basename }}:ro
{{ matrix_synapse_docker_image }}
/usr/local/bin/synapse_port_db --sqlite-database /{{ server_path_homeserver_db|basename }} --postgres-config /data/homeserver.yaml