diff --git a/changelog.d/8264.misc b/changelog.d/8264.misc new file mode 100644 index 000000000000..600b1756552d --- /dev/null +++ b/changelog.d/8264.misc @@ -0,0 +1 @@ +Add more logging to debug slow startup. diff --git a/synapse/storage/databases/__init__.py b/synapse/storage/databases/__init__.py index 7f08bd82857b..985b12df9132 100644 --- a/synapse/storage/databases/__init__.py +++ b/synapse/storage/databases/__init__.py @@ -47,9 +47,14 @@ def __init__(self, main_store_class, hs): engine = create_engine(database_config.config) with make_conn(database_config, engine) as db_conn: - logger.info("Preparing database %r...", db_name) - + logger.info("[database config %r]: Checking database server", db_name) engine.check_database(db_conn) + + logger.info( + "[database config %r]: Preparing for databases %r", + db_name, + database_config.databases, + ) prepare_database( db_conn, engine, hs.config, databases=database_config.databases, ) @@ -57,7 +62,9 @@ def __init__(self, main_store_class, hs): database = DatabasePool(hs, database_config, engine) if "main" in database_config.databases: - logger.info("Starting 'main' data store") + logger.info( + "[database config %r]: Starting 'main' database", db_name + ) # Sanity check we don't try and configure the main store on # multiple databases. @@ -72,7 +79,9 @@ def __init__(self, main_store_class, hs): persist_events = PersistEventsStore(hs, database, main) if "state" in database_config.databases: - logger.info("Starting 'state' data store") + logger.info( + "[database config %r]: Starting 'state' database", db_name + ) # Sanity check we don't try and configure the state store on # multiple databases. @@ -85,7 +94,7 @@ def __init__(self, main_store_class, hs): self.databases.append(database) - logger.info("Database %r prepared", db_name) + logger.info("[database config %r]: prepared", db_name) # Closing the context manager doesn't close the connection. # psycopg will close the connection when the object gets GCed, but *only* @@ -98,10 +107,10 @@ def __init__(self, main_store_class, hs): # Sanity check that we have actually configured all the required stores. if not main: - raise Exception("No 'main' data store configured") + raise Exception("No 'main' database configured") if not state: - raise Exception("No 'state' data store configured") + raise Exception("No 'state' database configured") # We use local variables here to ensure that the databases do not have # optional types. diff --git a/synapse/storage/databases/main/__init__.py b/synapse/storage/databases/main/__init__.py index 99890ffbf32f..b315ae66d279 100644 --- a/synapse/storage/databases/main/__init__.py +++ b/synapse/storage/databases/main/__init__.py @@ -591,6 +591,7 @@ def check_database_before_upgrade(cur, database_engine, config: HomeServerConfig """Called before upgrading an existing database to check that it is broadly sane compared with the configuration. """ + logger.info("Running sanity-checks on database...") domain = config.server_name sql = database_engine.convert_param_style( diff --git a/synapse/storage/prepare_database.py b/synapse/storage/prepare_database.py index 964d8d9eb876..a03b37a4361f 100644 --- a/synapse/storage/prepare_database.py +++ b/synapse/storage/prepare_database.py @@ -66,10 +66,18 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state try: cur = db_conn.cursor() + + logger.info("%r: Checking existing schema version", databases) version_info = _get_or_create_schema_state(cur, database_engine) if version_info: user_version, delta_files, upgraded = version_info + logger.info( + "%r: Existing schema is %i (+%i deltas)", + databases, + user_version, + len(delta_files), + ) if config is None: if user_version != SCHEMA_VERSION: @@ -90,6 +98,7 @@ def prepare_database(db_conn, database_engine, config, databases=["main", "state databases=databases, ) else: + logger.info("%r: Initialising new database", databases) _setup_new_database(cur, database_engine, databases=databases) # check if any of our configured dynamic modules want a database diff --git a/synapse/storage/util/id_generators.py b/synapse/storage/util/id_generators.py index 76bc3afdfae8..b7eb4f8ac90e 100644 --- a/synapse/storage/util/id_generators.py +++ b/synapse/storage/util/id_generators.py @@ -15,6 +15,7 @@ import contextlib import heapq +import logging import threading from collections import deque from typing import Dict, List, Set @@ -24,6 +25,8 @@ from synapse.storage.database import DatabasePool, LoggingTransaction from synapse.storage.util.sequence import PostgresSequenceGenerator +logger = logging.getLogger(__name__) + class IdGenerator: def __init__(self, db_conn, table, column): @@ -48,6 +51,8 @@ def _load_current_id(db_conn, table, column, step=1): Returns: int """ + # debug logging for https://github.com/matrix-org/synapse/issues/7968 + logger.info("initialising stream generator for %s(%s)", table, column) cur = db_conn.cursor() if step == 1: cur.execute("SELECT MAX(%s) FROM %s" % (column, table))