Browse Source

Mark AS users with their AS's ID

Daniel Wagner-Hall 8 years ago
parent
commit
763360594d

+ 7 - 2
scripts/synapse_port_db

@@ -309,8 +309,8 @@ class Porter(object):
                 **self.postgres_config["args"]
             )
 
-            sqlite_engine = create_engine("sqlite3")
-            postgres_engine = create_engine("psycopg2")
+            sqlite_engine = create_engine(FakeConfig(sqlite_config))
+            postgres_engine = create_engine(FakeConfig(postgres_config))
 
             self.sqlite_store = Store(sqlite_db_pool, sqlite_engine)
             self.postgres_store = Store(postgres_db_pool, postgres_engine)
@@ -792,3 +792,8 @@ if __name__ == "__main__":
     if end_error_exec_info:
         exc_type, exc_value, exc_traceback = end_error_exec_info
         traceback.print_exception(exc_type, exc_value, exc_traceback)
+
+
+class FakeConfig:
+    def __init__(self, database_config):
+        self.database_config = database_config

+ 1 - 1
synapse/app/homeserver.py

@@ -382,7 +382,7 @@ def setup(config_options):
 
     tls_server_context_factory = context_factory.ServerContextFactory(config)
 
-    database_engine = create_engine(config.database_config["name"])
+    database_engine = create_engine(config)
     config.database_config["args"]["cp_openfun"] = database_engine.on_new_connection
 
     hs = SynapseHomeServer(

+ 21 - 13
synapse/storage/appservice.py

@@ -34,8 +34,8 @@ class ApplicationServiceStore(SQLBaseStore):
     def __init__(self, hs):
         super(ApplicationServiceStore, self).__init__(hs)
         self.hostname = hs.hostname
-        self.services_cache = []
-        self._populate_appservice_cache(
+        self.services_cache = ApplicationServiceStore.load_appservices(
+            hs.hostname,
             hs.config.app_service_config_files
         )
 
@@ -144,21 +144,23 @@ class ApplicationServiceStore(SQLBaseStore):
 
         return rooms_for_user_matching_user_id
 
-    def _load_appservice(self, as_info):
+    @classmethod
+    def _load_appservice(cls, hostname, as_info, config_filename):
         required_string_fields = [
-            # TODO: Add id here when it's stable to release
-            "url", "as_token", "hs_token", "sender_localpart"
+            "id", "url", "as_token", "hs_token", "sender_localpart"
         ]
         for field in required_string_fields:
             if not isinstance(as_info.get(field), basestring):
-                raise KeyError("Required string field: '%s'", field)
+                raise KeyError("Required string field: '%s' (%s)" % (
+                    field, config_filename,
+                ))
 
         localpart = as_info["sender_localpart"]
         if urllib.quote(localpart) != localpart:
             raise ValueError(
                 "sender_localpart needs characters which are not URL encoded."
             )
-        user = UserID(localpart, self.hostname)
+        user = UserID(localpart, hostname)
         user_id = user.to_string()
 
         # namespace checks
@@ -188,25 +190,30 @@ class ApplicationServiceStore(SQLBaseStore):
             namespaces=as_info["namespaces"],
             hs_token=as_info["hs_token"],
             sender=user_id,
-            id=as_info["id"] if "id" in as_info else as_info["as_token"],
+            id=as_info["id"],
         )
 
-    def _populate_appservice_cache(self, config_files):
-        """Populates a cache of Application Services from the config files."""
+    @classmethod
+    def load_appservices(cls, hostname, config_files):
+        """Returns a list of Application Services from the config files."""
         if not isinstance(config_files, list):
             logger.warning(
                 "Expected %s to be a list of AS config files.", config_files
             )
-            return
+            return []
 
         # Dicts of value -> filename
         seen_as_tokens = {}
         seen_ids = {}
 
+        appservices = []
+
         for config_file in config_files:
             try:
                 with open(config_file, 'r') as f:
-                    appservice = self._load_appservice(yaml.load(f))
+                    appservice = ApplicationServiceStore._load_appservice(
+                        hostname, yaml.load(f), config_file
+                    )
                     if appservice.id in seen_ids:
                         raise ConfigError(
                             "Cannot reuse ID across application services: "
@@ -226,11 +233,12 @@ class ApplicationServiceStore(SQLBaseStore):
                         )
                     seen_as_tokens[appservice.token] = config_file
                     logger.info("Loaded application service: %s", appservice)
-                    self.services_cache.append(appservice)
+                    appservices.append(appservice)
             except Exception as e:
                 logger.error("Failed to load appservice from '%s'", config_file)
                 logger.exception(e)
                 raise
+        return appservices
 
 
 class ApplicationServiceTransactionStore(SQLBaseStore):

+ 3 - 2
synapse/storage/engines/__init__.py

@@ -26,12 +26,13 @@ SUPPORTED_MODULE = {
 }
 
 
-def create_engine(name):
+def create_engine(config):
+    name = config.database_config["name"]
     engine_class = SUPPORTED_MODULE.get(name, None)
 
     if engine_class:
         module = importlib.import_module(name)
-        return engine_class(module)
+        return engine_class(module, config=config)
 
     raise RuntimeError(
         "Unsupported database engine '%s'" % (name,)

+ 3 - 2
synapse/storage/engines/postgres.py

@@ -21,9 +21,10 @@ from ._base import IncorrectDatabaseSetup
 class PostgresEngine(object):
     single_threaded = False
 
-    def __init__(self, database_module):
+    def __init__(self, database_module, config):
         self.module = database_module
         self.module.extensions.register_type(self.module.extensions.UNICODE)
+        self.config = config
 
     def check_database(self, txn):
         txn.execute("SHOW SERVER_ENCODING")
@@ -44,7 +45,7 @@ class PostgresEngine(object):
         )
 
     def prepare_database(self, db_conn):
-        prepare_database(db_conn, self)
+        prepare_database(db_conn, self, config=self.config)
 
     def is_deadlock(self, error):
         if isinstance(error, self.module.DatabaseError):

+ 3 - 2
synapse/storage/engines/sqlite3.py

@@ -23,8 +23,9 @@ import struct
 class Sqlite3Engine(object):
     single_threaded = True
 
-    def __init__(self, database_module):
+    def __init__(self, database_module, config):
         self.module = database_module
+        self.config = config
 
     def check_database(self, txn):
         pass
@@ -38,7 +39,7 @@ class Sqlite3Engine(object):
 
     def prepare_database(self, db_conn):
         prepare_sqlite3_database(db_conn)
-        prepare_database(db_conn, self)
+        prepare_database(db_conn, self, config=self.config)
 
     def is_deadlock(self, error):
         return False

+ 8 - 7
synapse/storage/prepare_database.py

@@ -25,7 +25,7 @@ logger = logging.getLogger(__name__)
 
 # Remember to update this number every time a change is made to database
 # schema files, so the users will be informed on server restarts.
-SCHEMA_VERSION = 29
+SCHEMA_VERSION = 30
 
 dir_path = os.path.abspath(os.path.dirname(__file__))
 
@@ -50,7 +50,7 @@ class UpgradeDatabaseException(PrepareDatabaseException):
     pass
 
 
-def prepare_database(db_conn, database_engine):
+def prepare_database(db_conn, database_engine, config):
     """Prepares a database for usage. Will either create all necessary tables
     or upgrade from an older schema version.
     """
@@ -61,10 +61,10 @@ def prepare_database(db_conn, database_engine):
         if version_info:
             user_version, delta_files, upgraded = version_info
             _upgrade_existing_database(
-                cur, user_version, delta_files, upgraded, database_engine
+                cur, user_version, delta_files, upgraded, database_engine, config
             )
         else:
-            _setup_new_database(cur, database_engine)
+            _setup_new_database(cur, database_engine, config)
 
         # cur.execute("PRAGMA user_version = %d" % (SCHEMA_VERSION,))
 
@@ -75,7 +75,7 @@ def prepare_database(db_conn, database_engine):
         raise
 
 
-def _setup_new_database(cur, database_engine):
+def _setup_new_database(cur, database_engine, config):
     """Sets up the database by finding a base set of "full schemas" and then
     applying any necessary deltas.
 
@@ -148,11 +148,12 @@ def _setup_new_database(cur, database_engine):
         applied_delta_files=[],
         upgraded=False,
         database_engine=database_engine,
+        config=config,
     )
 
 
 def _upgrade_existing_database(cur, current_version, applied_delta_files,
-                               upgraded, database_engine):
+                               upgraded, database_engine, config):
     """Upgrades an existing database.
 
     Delta files can either be SQL stored in *.sql files, or python modules
@@ -245,7 +246,7 @@ def _upgrade_existing_database(cur, current_version, applied_delta_files,
                         module_name, absolute_path, python_file
                     )
                 logger.debug("Running script %s", relative_path)
-                module.run_upgrade(cur, database_engine)
+                module.run_upgrade(cur, database_engine, config=config)
             elif ext == ".pyc":
                 # Sometimes .pyc files turn up anyway even though we've
                 # disabled their generation; e.g. from distribution package

+ 59 - 0
synapse/storage/schema/delta/30/as_users.py

@@ -0,0 +1,59 @@
+# Copyright 2016 OpenMarket Ltd
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+from synapse.storage.appservice import ApplicationServiceStore
+
+
+logger = logging.getLogger(__name__)
+
+
+def run_upgrade(cur, database_engine, config, *args, **kwargs):
+    # NULL indicates user was not registered by an appservice.
+    cur.execute("ALTER TABLE users ADD COLUMN appservice_id TEXT")
+
+    cur.execute("SELECT name FROM users")
+    rows = cur.fetchall()
+
+    config_files = []
+    try:
+        config_files = config.app_service_config_files
+    except AttributeError:
+        logger.warning("Could not get app_service_config_files from config")
+        pass
+
+    appservices = ApplicationServiceStore.load_appservices(
+        config.server_name, config_files
+    )
+
+    owned = {}
+
+    for row in rows:
+        user_id = row[0]
+        for appservice in appservices:
+            if appservice.is_exclusive_user(user_id):
+                if user_id in owned.keys():
+                    logger.error(
+                        "user_id %s was owned by more than one application"
+                        " service (IDs %s and %s); assigning arbitrarily to %s" %
+                        (user_id, owned[user_id], appservice.id, owned[user_id])
+                    )
+                owned[user_id] = appservice.id
+
+    for user_id, as_id in owned.items():
+        cur.execute(
+            database_engine.convert_param_style(
+                "UPDATE users SET appservice_id = ? WHERE name = ?"
+            ),
+            (as_id, user_id)
+        )

+ 2 - 1
tests/storage/test_base.py

@@ -48,11 +48,12 @@ class SQLBaseStoreTestCase(unittest.TestCase):
 
         config = Mock()
         config.event_cache_size = 1
+        config.database_config = {"name": "sqlite3"}
         hs = HomeServer(
             "test",
             db_pool=self.db_pool,
             config=config,
-            database_engine=create_engine("sqlite3"),
+            database_engine=create_engine(config),
         )
 
         self.datastore = SQLBaseStore(hs)

+ 14 - 6
tests/utils.py

@@ -51,6 +51,8 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
         config.server_name = "server.under.test"
         config.trusted_third_party_id_servers = []
 
+    config.database_config = {"name": "sqlite3"}
+
     if "clock" not in kargs:
         kargs["clock"] = MockClock()
 
@@ -60,7 +62,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
         hs = HomeServer(
             name, db_pool=db_pool, config=config,
             version_string="Synapse/tests",
-            database_engine=create_engine("sqlite3"),
+            database_engine=create_engine(config),
             get_db_conn=db_pool.get_db_conn,
             **kargs
         )
@@ -69,7 +71,7 @@ def setup_test_homeserver(name="test", datastore=None, config=None, **kargs):
         hs = HomeServer(
             name, db_pool=None, datastore=datastore, config=config,
             version_string="Synapse/tests",
-            database_engine=create_engine("sqlite3"),
+            database_engine=create_engine(config),
             **kargs
         )
 
@@ -277,18 +279,24 @@ class SQLiteMemoryDbPool(ConnectionPool, object):
             cp_max=1,
         )
 
+        self.config = Mock()
+        self.config.database_config = {"name": "sqlite3"}
+
     def prepare(self):
-        engine = create_engine("sqlite3")
+        engine = self.create_engine()
         return self.runWithConnection(
-            lambda conn: prepare_database(conn, engine)
+            lambda conn: prepare_database(conn, engine, self.config)
         )
 
     def get_db_conn(self):
         conn = self.connect()
-        engine = create_engine("sqlite3")
-        prepare_database(conn, engine)
+        engine = self.create_engine()
+        prepare_database(conn, engine, self.config)
         return conn
 
+    def create_engine(self):
+        return create_engine(self.config)
+
 
 class MemoryDataStore(object):