Browse Source

Move logging utilities out of the side drawer of util/ and into logging/ (#5606)

Amber Brown 4 years ago
parent
commit
463b072b12
98 changed files with 249 additions and 233 deletions
  1. 1 0
      changelog.d/5606.misc
  2. 4 4
      contrib/example_log_config.yaml
  3. 1 1
      contrib/experiments/test_messaging.py
  4. 1 1
      contrib/systemd/log_config.yaml
  5. 3 0
      debian/changelog
  6. 1 1
      debian/log.yaml
  7. 1 1
      docker/conf/log.config
  8. 19 19
      docs/log_contexts.rst
  9. 1 1
      synapse/app/_base.py
  10. 1 1
      synapse/app/appservice.py
  11. 1 1
      synapse/app/client_reader.py
  12. 1 1
      synapse/app/event_creator.py
  13. 1 1
      synapse/app/federation_reader.py
  14. 1 1
      synapse/app/federation_sender.py
  15. 1 1
      synapse/app/frontend_proxy.py
  16. 1 1
      synapse/app/homeserver.py
  17. 1 1
      synapse/app/media_repository.py
  18. 1 1
      synapse/app/pusher.py
  19. 1 1
      synapse/app/synchrotron.py
  20. 1 1
      synapse/app/user_dir.py
  21. 1 1
      synapse/appservice/scheduler.py
  22. 2 2
      synapse/config/logger.py
  23. 8 7
      synapse/crypto/keyring.py
  24. 1 1
      synapse/events/snapshot.py
  25. 15 9
      synapse/federation/federation_base.py
  26. 4 4
      synapse/federation/federation_client.py
  27. 2 2
      synapse/federation/federation_server.py
  28. 1 1
      synapse/federation/persistence.py
  29. 8 4
      synapse/federation/sender/__init__.py
  30. 1 1
      synapse/federation/transport/client.py
  31. 1 1
      synapse/federation/transport/server.py
  32. 1 1
      synapse/groups/attestations.py
  33. 1 1
      synapse/handlers/account_validity.py
  34. 1 1
      synapse/handlers/appservice.py
  35. 3 3
      synapse/handlers/auth.py
  36. 1 1
      synapse/handlers/e2e_keys.py
  37. 1 1
      synapse/handlers/events.py
  38. 22 21
      synapse/handlers/federation.py
  39. 1 1
      synapse/handlers/initial_sync.py
  40. 1 1
      synapse/handlers/message.py
  41. 1 1
      synapse/handlers/pagination.py
  42. 2 2
      synapse/handlers/presence.py
  43. 1 1
      synapse/handlers/sync.py
  44. 1 1
      synapse/handlers/typing.py
  45. 1 1
      synapse/http/client.py
  46. 1 1
      synapse/http/federation/matrix_federation_agent.py
  47. 1 1
      synapse/http/federation/srv_resolver.py
  48. 1 1
      synapse/http/matrixfederationclient.py
  49. 1 1
      synapse/http/request_metrics.py
  50. 1 1
      synapse/http/server.py
  51. 1 1
      synapse/http/site.py
  52. 0 0
      synapse/logging/__init__.py
  53. 0 0
      synapse/logging/context.py
  54. 0 0
      synapse/logging/formatter.py
  55. 0 0
      synapse/logging/utils.py
  56. 1 1
      synapse/metrics/background_process_metrics.py
  57. 2 2
      synapse/notifier.py
  58. 1 1
      synapse/push/mailer.py
  59. 1 1
      synapse/replication/tcp/protocol.py
  60. 1 1
      synapse/rest/client/transactions.py
  61. 2 4
      synapse/rest/media/v1/_base.py
  62. 6 6
      synapse/rest/media/v1/media_repository.py
  63. 2 3
      synapse/rest/media/v1/media_storage.py
  64. 1 1
      synapse/rest/media/v1/preview_url_resource.py
  65. 2 3
      synapse/rest/media/v1/storage_provider.py
  66. 1 1
      synapse/state/__init__.py
  67. 1 1
      synapse/storage/_base.py
  68. 2 2
      synapse/storage/events.py
  69. 3 3
      synapse/storage/events_worker.py
  70. 1 1
      synapse/storage/stream.py
  71. 8 4
      synapse/util/__init__.py
  72. 4 5
      synapse/util/async_helpers.py
  73. 6 5
      synapse/util/caches/descriptors.py
  74. 2 2
      synapse/util/caches/response_cache.py
  75. 1 1
      synapse/util/distributor.py
  76. 1 1
      synapse/util/file_consumer.py
  77. 1 1
      synapse/util/metrics.py
  78. 1 1
      synapse/util/ratelimitutils.py
  79. 2 2
      synapse/util/retryutils.py
  80. 1 1
      tests/appservice/test_scheduler.py
  81. 8 5
      tests/crypto/test_keyring.py
  82. 1 1
      tests/http/federation/test_matrix_federation_agent.py
  83. 1 1
      tests/http/federation/test_srv_resolver.py
  84. 1 1
      tests/http/test_fedclient.py
  85. 1 1
      tests/patch_inline_callbacks.py
  86. 1 1
      tests/push/test_http.py
  87. 1 1
      tests/rest/client/test_transactions.py
  88. 1 1
      tests/rest/media/v1/test_media_storage.py
  89. 1 1
      tests/test_federation.py
  90. 1 1
      tests/test_server.py
  91. 1 1
      tests/test_utils/logging_setup.py
  92. 1 1
      tests/unittest.py
  93. 20 29
      tests/util/caches/test_descriptors.py
  94. 3 4
      tests/util/test_async_utils.py
  95. 5 4
      tests/util/test_linearizer.py
  96. 15 9
      tests/util/test_logcontext.py
  97. 1 1
      tests/util/test_logformatter.py
  98. 1 1
      tests/utils.py

+ 1 - 0
changelog.d/5606.misc

@@ -0,0 +1 @@
+Move logging code out of `synapse.util` and into `synapse.logging`.

+ 4 - 4
contrib/example_log_config.yaml

@@ -1,7 +1,7 @@
-# Example log_config file for synapse. To enable, point `log_config` to it in 
+# Example log_config file for synapse. To enable, point `log_config` to it in
 # `homeserver.yaml`, and restart synapse.
 #
-# This configuration will produce similar results to the defaults within 
+# This configuration will produce similar results to the defaults within
 # synapse, but can be edited to give more flexibility.
 
 version: 1
@@ -12,7 +12,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:
@@ -35,7 +35,7 @@ handlers:
 root:
     level: INFO
     handlers: [console] # to use file handler instead, switch to [file]
-    
+
 loggers:
     synapse:
         level: INFO

+ 1 - 1
contrib/experiments/test_messaging.py

@@ -36,7 +36,7 @@ from synapse.util import origin_from_ucid
 
 from synapse.app.homeserver import SynapseHomeServer
 
-# from synapse.util.logutils import log_function
+# from synapse.logging.utils import log_function
 
 from twisted.internet import reactor, defer
 from twisted.python import log

+ 1 - 1
contrib/systemd/log_config.yaml

@@ -8,7 +8,7 @@ formatters:
 
 filters:
     context:
-        (): synapse.util.logcontext.LoggingContextFilter
+        (): synapse.logging.context.LoggingContextFilter
         request: ""
 
 handlers:

+ 3 - 0
debian/changelog

@@ -3,6 +3,9 @@ matrix-synapse-py3 (1.0.0+nmu1) UNRELEASED; urgency=medium
   [ Silke Hofstra ]
   * Include systemd-python to allow logging to the systemd journal.
 
+  [ Amber Brown ]
+  * Update logging config defaults to match API changes in Synapse.
+
  -- Silke Hofstra <silke@slxh.eu>  Wed, 29 May 2019 09:45:29 +0200
 
 matrix-synapse-py3 (1.0.0) stable; urgency=medium

+ 1 - 1
debian/log.yaml

@@ -7,7 +7,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:

+ 1 - 1
docker/conf/log.config

@@ -6,7 +6,7 @@ formatters:
 
 filters:
   context:
-    (): synapse.util.logcontext.LoggingContextFilter
+    (): synapse.logging.context.LoggingContextFilter
     request: ""
 
 handlers:

+ 19 - 19
docs/log_contexts.rst

@@ -1,4 +1,4 @@
-Log contexts
+Log Contexts
 ============
 
 .. contents::
@@ -12,7 +12,7 @@ record.
 Logcontexts are also used for CPU and database accounting, so that we can track
 which requests were responsible for high CPU use or database activity.
 
-The ``synapse.util.logcontext`` module provides a facilities for managing the
+The ``synapse.logging.context`` module provides a facilities for managing the
 current log context (as well as providing the ``LoggingContextFilter`` class).
 
 Deferreds make the whole thing complicated, so this document describes how it
@@ -27,19 +27,19 @@ found them:
 
 .. code:: python
 
-    from synapse.util import logcontext         # omitted from future snippets
+    from synapse.logging import context         # omitted from future snippets
 
     def handle_request(request_id):
-        request_context = logcontext.LoggingContext()
+        request_context = context.LoggingContext()
 
-        calling_context = logcontext.LoggingContext.current_context()
-        logcontext.LoggingContext.set_current_context(request_context)
+        calling_context = context.LoggingContext.current_context()
+        context.LoggingContext.set_current_context(request_context)
         try:
             request_context.request = request_id
             do_request_handling()
             logger.debug("finished")
         finally:
-            logcontext.LoggingContext.set_current_context(calling_context)
+            context.LoggingContext.set_current_context(calling_context)
 
     def do_request_handling():
         logger.debug("phew")  # this will be logged against request_id
@@ -51,7 +51,7 @@ written much more succinctly as:
 .. code:: python
 
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             do_request_handling()
             logger.debug("finished")
@@ -74,7 +74,7 @@ blocking operation, and returns a deferred:
 
     @defer.inlineCallbacks
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             yield do_request_handling()
             logger.debug("finished")
@@ -179,7 +179,7 @@ though, we need to make up a new Deferred, or we get a Deferred back from
 external code. We need to make it follow our rules.
 
 The easy way to do it is with a combination of ``defer.inlineCallbacks``, and
-``logcontext.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
+``context.PreserveLoggingContext``. Suppose we want to implement ``sleep``,
 which returns a deferred which will run its callbacks after a given number of
 seconds. That might look like:
 
@@ -204,13 +204,13 @@ That doesn't follow the rules, but we can fix it by wrapping it with
 This technique works equally for external functions which return deferreds,
 or deferreds we have made ourselves.
 
-You can also use ``logcontext.make_deferred_yieldable``, which just does the
+You can also use ``context.make_deferred_yieldable``, which just does the
 boilerplate for you, so the above could be written:
 
 .. code:: python
 
     def sleep(seconds):
-        return logcontext.make_deferred_yieldable(get_sleep_deferred(seconds))
+        return context.make_deferred_yieldable(get_sleep_deferred(seconds))
 
 
 Fire-and-forget
@@ -279,7 +279,7 @@ Obviously that option means that the operations done in
 that might be fixed by setting a different logcontext via a ``with
 LoggingContext(...)`` in ``background_operation``).
 
-The second option is to use ``logcontext.run_in_background``, which wraps a
+The second option is to use ``context.run_in_background``, which wraps a
 function so that it doesn't reset the logcontext even when it returns an
 incomplete deferred, and adds a callback to the returned deferred to reset the
 logcontext. In other words, it turns a function that follows the Synapse rules
@@ -293,7 +293,7 @@ It can be used like this:
     def do_request_handling():
         yield foreground_operation()
 
-        logcontext.run_in_background(background_operation)
+        context.run_in_background(background_operation)
 
         # this will now be logged against the request context
         logger.debug("Request handling complete")
@@ -332,7 +332,7 @@ gathered:
             result = yield defer.gatherResults([d1, d2])
 
 In this case particularly, though, option two, of using
-``logcontext.preserve_fn`` almost certainly makes more sense, so that
+``context.preserve_fn`` almost certainly makes more sense, so that
 ``operation1`` and ``operation2`` are both logged against the original
 logcontext. This looks like:
 
@@ -340,8 +340,8 @@ logcontext. This looks like:
 
     @defer.inlineCallbacks
     def do_request_handling():
-        d1 = logcontext.preserve_fn(operation1)()
-        d2 = logcontext.preserve_fn(operation2)()
+        d1 = context.preserve_fn(operation1)()
+        d2 = context.preserve_fn(operation2)()
 
         with PreserveLoggingContext():
             result = yield defer.gatherResults([d1, d2])
@@ -381,7 +381,7 @@ off the background process, and then leave the ``with`` block to wait for it:
 .. code:: python
 
     def handle_request(request_id):
-        with logcontext.LoggingContext() as request_context:
+        with context.LoggingContext() as request_context:
             request_context.request = request_id
             d = do_request_handling()
 
@@ -414,7 +414,7 @@ runs its callbacks in the original logcontext, all is happy.
 
 The business of a Deferred which runs its callbacks in the original logcontext
 isn't hard to achieve — we have it today, in the shape of
-``logcontext._PreservingContextDeferred``:
+``context._PreservingContextDeferred``:
 
 .. code:: python
 

+ 1 - 1
synapse/app/_base.py

@@ -27,7 +27,7 @@ from twisted.protocols.tls import TLSMemoryBIOFactory
 import synapse
 from synapse.app import check_bind_error
 from synapse.crypto import context_factory
-from synapse.util import PreserveLoggingContext
+from synapse.logging.context import PreserveLoggingContext
 from synapse.util.async_helpers import Linearizer
 from synapse.util.rlimit import change_resource_limit
 from synapse.util.versionstring import get_version_string

+ 1 - 1
synapse/app/appservice.py

@@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage.appservice import SlavedApplicationServiceStore
@@ -36,7 +37,6 @@ from synapse.replication.tcp.client import ReplicationClientHandler
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/client_reader.py

@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -64,7 +65,6 @@ from synapse.rest.client.versions import VersionsRestServlet
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/event_creator.py

@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -59,7 +60,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.user_directory import UserDirectoryStore
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/federation_reader.py

@@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.federation.transport.server import TransportLayerServer
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -48,7 +49,6 @@ from synapse.rest.key.v2 import KeyApiV2Resource
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/federation_sender.py

@@ -27,6 +27,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.federation import send_queue
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
@@ -44,7 +45,6 @@ from synapse.storage.engines import create_engine
 from synapse.types import ReadReceipt
 from synapse.util.async_helpers import Linearizer
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/frontend_proxy.py

@@ -29,6 +29,7 @@ from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.servlet import RestServlet, parse_json_object_from_request
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -41,7 +42,6 @@ from synapse.rest.client.v2_alpha._base import client_patterns
 from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/homeserver.py

@@ -54,6 +54,7 @@ from synapse.federation.transport.server import TransportLayerServer
 from synapse.http.additional_resource import AdditionalResource
 from synapse.http.server import RootRedirect
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
@@ -72,7 +73,6 @@ from synapse.storage.engines import IncorrectDatabaseSetup, create_engine
 from synapse.storage.prepare_database import UpgradeDatabaseException, prepare_database
 from synapse.util.caches import CACHE_SIZE_FACTOR
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.module_loader import load_module
 from synapse.util.rlimit import change_resource_limit

+ 1 - 1
synapse/app/media_repository.py

@@ -27,6 +27,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -40,7 +41,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.media_repository import MediaRepositoryStore
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/pusher.py

@@ -26,6 +26,7 @@ from synapse.config._base import ConfigError
 from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import __func__
@@ -38,7 +39,6 @@ from synapse.server import HomeServer
 from synapse.storage import DataStore
 from synapse.storage.engines import create_engine
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/app/synchrotron.py

@@ -31,6 +31,7 @@ from synapse.config.logger import setup_logging
 from synapse.handlers.presence import PresenceHandler, get_interested_parties
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore, __func__
@@ -57,7 +58,6 @@ from synapse.server import HomeServer
 from synapse.storage.engines import create_engine
 from synapse.storage.presence import UserPresenceState
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.stringutils import random_string
 from synapse.util.versionstring import get_version_string

+ 1 - 1
synapse/app/user_dir.py

@@ -28,6 +28,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.logger import setup_logging
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite
+from synapse.logging.context import LoggingContext, run_in_background
 from synapse.metrics import RegistryProxy
 from synapse.metrics.resource import METRICS_PREFIX, MetricsResource
 from synapse.replication.slave.storage._base import BaseSlavedStore
@@ -46,7 +47,6 @@ from synapse.storage.engines import create_engine
 from synapse.storage.user_directory import UserDirectoryStore
 from synapse.util.caches.stream_change_cache import StreamChangeCache
 from synapse.util.httpresourcetree import create_resource_tree
-from synapse.util.logcontext import LoggingContext, run_in_background
 from synapse.util.manhole import manhole
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/appservice/scheduler.py

@@ -53,8 +53,8 @@ import logging
 from twisted.internet import defer
 
 from synapse.appservice import ApplicationServiceState
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import run_in_background
 
 logger = logging.getLogger(__name__)
 

+ 2 - 2
synapse/config/logger.py

@@ -24,7 +24,7 @@ from twisted.logger import STDLibLogObserver, globalLogBeginner
 
 import synapse
 from synapse.app import _base as appbase
-from synapse.util.logcontext import LoggingContextFilter
+from synapse.logging.context import LoggingContextFilter
 from synapse.util.versionstring import get_version_string
 
 from ._base import Config
@@ -40,7 +40,7 @@ formatters:
 
 filters:
     context:
-        (): synapse.util.logcontext.LoggingContextFilter
+        (): synapse.logging.context.LoggingContextFilter
         request: ""
 
 handlers:

+ 8 - 7
synapse/crypto/keyring.py

@@ -44,15 +44,16 @@ from synapse.api.errors import (
     RequestSendFailed,
     SynapseError,
 )
-from synapse.storage.keys import FetchKeyResult
-from synapse.util import logcontext, unwrapFirstError
-from synapse.util.async_helpers import yieldable_gather_results
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     LoggingContext,
     PreserveLoggingContext,
+    make_deferred_yieldable,
     preserve_fn,
     run_in_background,
 )
+from synapse.storage.keys import FetchKeyResult
+from synapse.util import unwrapFirstError
+from synapse.util.async_helpers import yieldable_gather_results
 from synapse.util.metrics import Measure
 from synapse.util.retryutils import NotRetryingDestination
 
@@ -140,7 +141,7 @@ class Keyring(object):
         """
         req = VerifyJsonRequest(server_name, json_object, validity_time, request_name)
         requests = (req,)
-        return logcontext.make_deferred_yieldable(self._verify_objects(requests)[0])
+        return make_deferred_yieldable(self._verify_objects(requests)[0])
 
     def verify_json_objects_for_server(self, server_and_json):
         """Bulk verifies signatures of json objects, bulk fetching keys as
@@ -557,7 +558,7 @@ class BaseV2KeyFetcher(object):
 
         signed_key_json_bytes = encode_canonical_json(signed_key_json)
 
-        yield logcontext.make_deferred_yieldable(
+        yield make_deferred_yieldable(
             defer.gatherResults(
                 [
                     run_in_background(
@@ -612,7 +613,7 @@ class PerspectivesKeyFetcher(BaseV2KeyFetcher):
 
             defer.returnValue({})
 
-        results = yield logcontext.make_deferred_yieldable(
+        results = yield make_deferred_yieldable(
             defer.gatherResults(
                 [run_in_background(get_key, server) for server in self.key_servers],
                 consumeErrors=True,

+ 1 - 1
synapse/events/snapshot.py

@@ -19,7 +19,7 @@ from frozendict import frozendict
 
 from twisted.internet import defer
 
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 
 
 class EventContext(object):

+ 15 - 9
synapse/federation/federation_base.py

@@ -27,8 +27,14 @@ from synapse.crypto.event_signing import check_event_content_hash
 from synapse.events import event_type_from_format_version
 from synapse.events.utils import prune_event
 from synapse.http.servlet import assert_params_in_dict
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+    preserve_fn,
+)
 from synapse.types import get_domain_from_id
-from synapse.util import logcontext, unwrapFirstError
+from synapse.util import unwrapFirstError
 
 logger = logging.getLogger(__name__)
 
@@ -73,7 +79,7 @@ class FederationBase(object):
         @defer.inlineCallbacks
         def handle_check_result(pdu, deferred):
             try:
-                res = yield logcontext.make_deferred_yieldable(deferred)
+                res = yield make_deferred_yieldable(deferred)
             except SynapseError:
                 res = None
 
@@ -102,10 +108,10 @@ class FederationBase(object):
 
             defer.returnValue(res)
 
-        handle = logcontext.preserve_fn(handle_check_result)
+        handle = preserve_fn(handle_check_result)
         deferreds2 = [handle(pdu, deferred) for pdu, deferred in zip(pdus, deferreds)]
 
-        valid_pdus = yield logcontext.make_deferred_yieldable(
+        valid_pdus = yield make_deferred_yieldable(
             defer.gatherResults(deferreds2, consumeErrors=True)
         ).addErrback(unwrapFirstError)
 
@@ -115,7 +121,7 @@ class FederationBase(object):
             defer.returnValue([p for p in valid_pdus if p])
 
     def _check_sigs_and_hash(self, room_version, pdu):
-        return logcontext.make_deferred_yieldable(
+        return make_deferred_yieldable(
             self._check_sigs_and_hashes(room_version, [pdu])[0]
         )
 
@@ -133,14 +139,14 @@ class FederationBase(object):
               * returns a redacted version of the event (if the signature
                 matched but the hash did not)
               * throws a SynapseError if the signature check failed.
-            The deferreds run their callbacks in the sentinel logcontext.
+            The deferreds run their callbacks in the sentinel
         """
         deferreds = _check_sigs_on_pdus(self.keyring, room_version, pdus)
 
-        ctx = logcontext.LoggingContext.current_context()
+        ctx = LoggingContext.current_context()
 
         def callback(_, pdu):
-            with logcontext.PreserveLoggingContext(ctx):
+            with PreserveLoggingContext(ctx):
                 if not check_event_content_hash(pdu):
                     # let's try to distinguish between failures because the event was
                     # redacted (which are somewhat expected) vs actual ball-tampering
@@ -178,7 +184,7 @@ class FederationBase(object):
 
         def errback(failure, pdu):
             failure.trap(SynapseError)
-            with logcontext.PreserveLoggingContext(ctx):
+            with PreserveLoggingContext(ctx):
                 logger.warn(
                     "Signature check failed for %s: %s",
                     pdu.event_id,

+ 4 - 4
synapse/federation/federation_client.py

@@ -39,10 +39,10 @@ from synapse.api.room_versions import (
 )
 from synapse.events import builder, room_version_to_event_format
 from synapse.federation.federation_base import FederationBase, event_from_pdu_json
-from synapse.util import logcontext, unwrapFirstError
+from synapse.logging.context import make_deferred_yieldable, run_in_background
+from synapse.logging.utils import log_function
+from synapse.util import unwrapFirstError
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
-from synapse.util.logutils import log_function
 from synapse.util.retryutils import NotRetryingDestination
 
 logger = logging.getLogger(__name__)
@@ -207,7 +207,7 @@ class FederationClient(FederationBase):
         ]
 
         # FIXME: We should handle signature failures more gracefully.
-        pdus[:] = yield logcontext.make_deferred_yieldable(
+        pdus[:] = yield make_deferred_yieldable(
             defer.gatherResults(
                 self._check_sigs_and_hashes(room_version, pdus), consumeErrors=True
             ).addErrback(unwrapFirstError)

+ 2 - 2
synapse/federation/federation_server.py

@@ -42,6 +42,8 @@ from synapse.federation.federation_base import FederationBase, event_from_pdu_js
 from synapse.federation.persistence import TransactionActions
 from synapse.federation.units import Edu, Transaction
 from synapse.http.endpoint import parse_server_name
+from synapse.logging.context import nested_logging_context
+from synapse.logging.utils import log_function
 from synapse.replication.http.federation import (
     ReplicationFederationSendEduRestServlet,
     ReplicationGetQueryRestServlet,
@@ -50,8 +52,6 @@ from synapse.types import get_domain_from_id
 from synapse.util import glob_to_regex
 from synapse.util.async_helpers import Linearizer, concurrently_execute
 from synapse.util.caches.response_cache import ResponseCache
-from synapse.util.logcontext import nested_logging_context
-from synapse.util.logutils import log_function
 
 # when processing incoming transactions, we try to handle multiple rooms in
 # parallel, up to this limit.

+ 1 - 1
synapse/federation/persistence.py

@@ -23,7 +23,7 @@ import logging
 
 from twisted.internet import defer
 
-from synapse.util.logutils import log_function
+from synapse.logging.utils import log_function
 
 logger = logging.getLogger(__name__)
 

+ 8 - 4
synapse/federation/sender/__init__.py

@@ -26,6 +26,11 @@ from synapse.federation.sender.per_destination_queue import PerDestinationQueue
 from synapse.federation.sender.transaction_manager import TransactionManager
 from synapse.federation.units import Edu
 from synapse.handlers.presence import get_interested_remotes
+from synapse.logging.context import (
+    make_deferred_yieldable,
+    preserve_fn,
+    run_in_background,
+)
 from synapse.metrics import (
     LaterGauge,
     event_processing_loop_counter,
@@ -33,7 +38,6 @@ from synapse.metrics import (
     events_processed_counter,
 )
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util import logcontext
 from synapse.util.metrics import measure_func
 
 logger = logging.getLogger(__name__)
@@ -210,10 +214,10 @@ class FederationSender(object):
                 for event in events:
                     events_by_room.setdefault(event.room_id, []).append(event)
 
-                yield logcontext.make_deferred_yieldable(
+                yield make_deferred_yieldable(
                     defer.gatherResults(
                         [
-                            logcontext.run_in_background(handle_room_events, evs)
+                            run_in_background(handle_room_events, evs)
                             for evs in itervalues(events_by_room)
                         ],
                         consumeErrors=True,
@@ -360,7 +364,7 @@ class FederationSender(object):
         for queue in queues:
             queue.flush_read_receipts_for_room(room_id)
 
-    @logcontext.preserve_fn  # the caller should not yield on this
+    @preserve_fn  # the caller should not yield on this
     @defer.inlineCallbacks
     def send_presence(self, states):
         """Send the new presence states to the appropriate destinations.

+ 1 - 1
synapse/federation/transport/client.py

@@ -22,7 +22,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import Membership
 from synapse.api.urls import FEDERATION_V1_PREFIX, FEDERATION_V2_PREFIX
-from synapse.util.logutils import log_function
+from synapse.logging.utils import log_function
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/federation/transport/server.py

@@ -36,8 +36,8 @@ from synapse.http.servlet import (
     parse_json_object_from_request,
     parse_string_from_args,
 )
+from synapse.logging.context import run_in_background
 from synapse.types import ThirdPartyInstanceID, get_domain_from_id
-from synapse.util.logcontext import run_in_background
 from synapse.util.ratelimitutils import FederationRateLimiter
 from synapse.util.versionstring import get_version_string
 

+ 1 - 1
synapse/groups/attestations.py

@@ -43,9 +43,9 @@ from signedjson.sign import sign_json
 from twisted.internet import defer
 
 from synapse.api.errors import HttpResponseException, RequestSendFailed, SynapseError
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import get_domain_from_id
-from synapse.util.logcontext import run_in_background
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/handlers/account_validity.py

@@ -22,10 +22,10 @@ from email.mime.text import MIMEText
 from twisted.internet import defer
 
 from synapse.api.errors import StoreError
+from synapse.logging.context import make_deferred_yieldable
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import UserID
 from synapse.util import stringutils
-from synapse.util.logcontext import make_deferred_yieldable
 
 try:
     from synapse.push.mailer import load_jinja2_templates

+ 1 - 1
synapse/handlers/appservice.py

@@ -23,13 +23,13 @@ from twisted.internet import defer
 
 import synapse
 from synapse.api.constants import EventTypes
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics import (
     event_processing_loop_counter,
     event_processing_loop_room_count,
 )
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.util import log_failure
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)

+ 3 - 3
synapse/handlers/auth.py

@@ -36,9 +36,9 @@ from synapse.api.errors import (
     SynapseError,
 )
 from synapse.api.ratelimiting import Ratelimiter
+from synapse.logging.context import defer_to_thread
 from synapse.module_api import ModuleApi
 from synapse.types import UserID
-from synapse.util import logcontext
 from synapse.util.caches.expiringcache import ExpiringCache
 
 from ._base import BaseHandler
@@ -987,7 +987,7 @@ class AuthHandler(BaseHandler):
                 bcrypt.gensalt(self.bcrypt_rounds),
             ).decode("ascii")
 
-        return logcontext.defer_to_thread(self.hs.get_reactor(), _do_hash)
+        return defer_to_thread(self.hs.get_reactor(), _do_hash)
 
     def validate_hash(self, password, stored_hash):
         """Validates that self.hash(password) == stored_hash.
@@ -1013,7 +1013,7 @@ class AuthHandler(BaseHandler):
             if not isinstance(stored_hash, bytes):
                 stored_hash = stored_hash.encode("ascii")
 
-            return logcontext.defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
+            return defer_to_thread(self.hs.get_reactor(), _do_validate_hash)
         else:
             return defer.succeed(False)
 

+ 1 - 1
synapse/handlers/e2e_keys.py

@@ -23,8 +23,8 @@ from canonicaljson import encode_canonical_json, json
 from twisted.internet import defer
 
 from synapse.api.errors import CodeMessageException, FederationDeniedError, SynapseError
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.types import UserID, get_domain_from_id
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.retryutils import NotRetryingDestination
 
 logger = logging.getLogger(__name__)

+ 1 - 1
synapse/handlers/events.py

@@ -21,8 +21,8 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, SynapseError
 from synapse.events import EventBase
+from synapse.logging.utils import log_function
 from synapse.types import UserID
-from synapse.util.logutils import log_function
 from synapse.visibility import filter_events_for_client
 
 from ._base import BaseHandler

+ 22 - 21
synapse/handlers/federation.py

@@ -45,6 +45,13 @@ from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, RoomVersions
 from synapse.crypto.event_signing import compute_event_signature
 from synapse.event_auth import auth_types_for_event
 from synapse.events.validator import EventValidator
+from synapse.logging.context import (
+    make_deferred_yieldable,
+    nested_logging_context,
+    preserve_fn,
+    run_in_background,
+)
+from synapse.logging.utils import log_function
 from synapse.replication.http.federation import (
     ReplicationCleanRoomRestServlet,
     ReplicationFederationSendEventsRestServlet,
@@ -52,10 +59,9 @@ from synapse.replication.http.federation import (
 from synapse.replication.http.membership import ReplicationUserJoinedLeftRoomRestServlet
 from synapse.state import StateResolutionStore, resolve_events_with_store
 from synapse.types import UserID, get_domain_from_id
-from synapse.util import logcontext, unwrapFirstError
+from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import Linearizer
 from synapse.util.distributor import user_joined_room
-from synapse.util.logutils import log_function
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.visibility import filter_events_for_server
 
@@ -338,7 +344,7 @@ class FederationHandler(BaseHandler):
 
                         room_version = yield self.store.get_room_version(room_id)
 
-                        with logcontext.nested_logging_context(p):
+                        with nested_logging_context(p):
                             # note that if any of the missing prevs share missing state or
                             # auth events, the requests to fetch those events are deduped
                             # by the get_pdu_cache in federation_client.
@@ -532,7 +538,7 @@ class FederationHandler(BaseHandler):
                 event_id,
                 ev.event_id,
             )
-            with logcontext.nested_logging_context(ev.event_id):
+            with nested_logging_context(ev.event_id):
                 try:
                     yield self.on_receive_pdu(origin, ev, sent_to_us_directly=False)
                 except FederationError as e:
@@ -725,10 +731,10 @@ class FederationHandler(BaseHandler):
                     missing_auth - failed_to_fetch,
                 )
 
-                results = yield logcontext.make_deferred_yieldable(
+                results = yield make_deferred_yieldable(
                     defer.gatherResults(
                         [
-                            logcontext.run_in_background(
+                            run_in_background(
                                 self.federation_client.get_pdu,
                                 [dest],
                                 event_id,
@@ -994,10 +1000,8 @@ class FederationHandler(BaseHandler):
         event_ids = list(extremities.keys())
 
         logger.debug("calling resolve_state_groups in _maybe_backfill")
-        resolve = logcontext.preserve_fn(
-            self.state_handler.resolve_state_groups_for_events
-        )
-        states = yield logcontext.make_deferred_yieldable(
+        resolve = preserve_fn(self.state_handler.resolve_state_groups_for_events)
+        states = yield make_deferred_yieldable(
             defer.gatherResults(
                 [resolve(room_id, [e]) for e in event_ids], consumeErrors=True
             )
@@ -1171,7 +1175,7 @@ class FederationHandler(BaseHandler):
             # lots of requests for missing prev_events which we do actually
             # have. Hence we fire off the deferred, but don't wait for it.
 
-            logcontext.run_in_background(self._handle_queued_pdus, room_queue)
+            run_in_background(self._handle_queued_pdus, room_queue)
 
         defer.returnValue(True)
 
@@ -1191,7 +1195,7 @@ class FederationHandler(BaseHandler):
                     p.event_id,
                     p.room_id,
                 )
-                with logcontext.nested_logging_context(p.event_id):
+                with nested_logging_context(p.event_id):
                     yield self.on_receive_pdu(origin, p, sent_to_us_directly=True)
             except Exception as e:
                 logger.warn(
@@ -1610,7 +1614,7 @@ class FederationHandler(BaseHandler):
             success = True
         finally:
             if not success:
-                logcontext.run_in_background(
+                run_in_background(
                     self.store.remove_push_actions_from_staging, event.event_id
                 )
 
@@ -1629,7 +1633,7 @@ class FederationHandler(BaseHandler):
         @defer.inlineCallbacks
         def prep(ev_info):
             event = ev_info["event"]
-            with logcontext.nested_logging_context(suffix=event.event_id):
+            with nested_logging_context(suffix=event.event_id):
                 res = yield self._prep_event(
                     origin,
                     event,
@@ -1639,12 +1643,9 @@ class FederationHandler(BaseHandler):
                 )
             defer.returnValue(res)
 
-        contexts = yield logcontext.make_deferred_yieldable(
+        contexts = yield make_deferred_yieldable(
             defer.gatherResults(
-                [
-                    logcontext.run_in_background(prep, ev_info)
-                    for ev_info in event_infos
-                ],
+                [run_in_background(prep, ev_info) for ev_info in event_infos],
                 consumeErrors=True,
             )
         )
@@ -2106,10 +2107,10 @@ class FederationHandler(BaseHandler):
 
         room_version = yield self.store.get_room_version(event.room_id)
 
-        different_events = yield logcontext.make_deferred_yieldable(
+        different_events = yield make_deferred_yieldable(
             defer.gatherResults(
                 [
-                    logcontext.run_in_background(
+                    run_in_background(
                         self.store.get_event, d, allow_none=True, allow_rejected=False
                     )
                     for d in different_auth

+ 1 - 1
synapse/handlers/initial_sync.py

@@ -21,12 +21,12 @@ from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError, Codes, SynapseError
 from synapse.events.validator import EventValidator
 from synapse.handlers.presence import format_user_presence_state
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.streams.config import PaginationConfig
 from synapse.types import StreamToken, UserID
 from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.snapshot_cache import SnapshotCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.visibility import filter_events_for_client
 
 from ._base import BaseHandler

+ 1 - 1
synapse/handlers/message.py

@@ -34,13 +34,13 @@ from synapse.api.errors import (
 from synapse.api.room_versions import RoomVersions
 from synapse.api.urls import ConsentURIBuilder
 from synapse.events.validator import EventValidator
+from synapse.logging.context import run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.replication.http.send_event import ReplicationSendEventRestServlet
 from synapse.storage.state import StateFilter
 from synapse.types import RoomAlias, UserID, create_requester
 from synapse.util.async_helpers import Linearizer
 from synapse.util.frozenutils import frozendict_json_encoder
-from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import measure_func
 from synapse.visibility import filter_events_for_client
 

+ 1 - 1
synapse/handlers/pagination.py

@@ -20,10 +20,10 @@ from twisted.python.failure import Failure
 
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import SynapseError
+from synapse.logging.context import run_in_background
 from synapse.storage.state import StateFilter
 from synapse.types import RoomStreamToken
 from synapse.util.async_helpers import ReadWriteLock
-from synapse.util.logcontext import run_in_background
 from synapse.util.stringutils import random_string
 from synapse.visibility import filter_events_for_client
 

+ 2 - 2
synapse/handlers/presence.py

@@ -34,14 +34,14 @@ from twisted.internet import defer
 import synapse.metrics
 from synapse.api.constants import EventTypes, Membership, PresenceState
 from synapse.api.errors import SynapseError
+from synapse.logging.context import run_in_background
+from synapse.logging.utils import log_function
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.presence import UserPresenceState
 from synapse.types import UserID, get_domain_from_id
 from synapse.util.async_helpers import Linearizer
 from synapse.util.caches.descriptors import cachedInlineCallbacks
-from synapse.util.logcontext import run_in_background
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 from synapse.util.wheel_timer import WheelTimer
 

+ 1 - 1
synapse/handlers/sync.py

@@ -25,6 +25,7 @@ from prometheus_client import Counter
 from twisted.internet import defer
 
 from synapse.api.constants import EventTypes, Membership
+from synapse.logging.context import LoggingContext
 from synapse.push.clientformat import format_push_rules_for_user
 from synapse.storage.roommember import MemberSummary
 from synapse.storage.state import StateFilter
@@ -33,7 +34,6 @@ from synapse.util.async_helpers import concurrently_execute
 from synapse.util.caches.expiringcache import ExpiringCache
 from synapse.util.caches.lrucache import LruCache
 from synapse.util.caches.response_cache import ResponseCache
-from synapse.util.logcontext import LoggingContext
 from synapse.util.metrics import Measure, measure_func
 from synapse.visibility import filter_events_for_client
 

+ 1 - 1
synapse/handlers/typing.py

@@ -19,9 +19,9 @@ from collections import namedtuple
 from twisted.internet import defer
 
 from synapse.api.errors import AuthError, SynapseError
+from synapse.logging.context import run_in_background
 from synapse.types import UserID, get_domain_from_id
 from synapse.util.caches.stream_change_cache import StreamChangeCache
-from synapse.util.logcontext import run_in_background
 from synapse.util.metrics import Measure
 from synapse.util.wheel_timer import WheelTimer
 

+ 1 - 1
synapse/http/client.py

@@ -45,9 +45,9 @@ from synapse.http import (
     cancelled_to_request_timed_out_error,
     redact_uri,
 )
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.async_helpers import timeout_deferred
 from synapse.util.caches import CACHE_SIZE_FACTOR
-from synapse.util.logcontext import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/http/federation/matrix_federation_agent.py

@@ -30,9 +30,9 @@ from twisted.web.http_headers import Headers
 from twisted.web.iweb import IAgent
 
 from synapse.http.federation.srv_resolver import SrvResolver, pick_server_from_list
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util import Clock
 from synapse.util.caches.ttlcache import TTLCache
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.metrics import Measure
 
 # period to cache .well-known results for by default

+ 1 - 1
synapse/http/federation/srv_resolver.py

@@ -25,7 +25,7 @@ from twisted.internet.error import ConnectError
 from twisted.names import client, dns
 from twisted.names.error import DNSNameError, DomainError
 
-from synapse.util.logcontext import make_deferred_yieldable
+from synapse.logging.context import make_deferred_yieldable
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/http/matrixfederationclient.py

@@ -48,8 +48,8 @@ from synapse.api.errors import (
 from synapse.http import QuieterFileBodyProducer
 from synapse.http.client import BlacklistingAgentWrapper, IPBlacklistingResolver
 from synapse.http.federation.matrix_federation_agent import MatrixFederationAgent
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.async_helpers import timeout_deferred
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)

+ 1 - 1
synapse/http/request_metrics.py

@@ -19,8 +19,8 @@ import threading
 
 from prometheus_client.core import Counter, Histogram
 
+from synapse.logging.context import LoggingContext
 from synapse.metrics import LaterGauge
-from synapse.util.logcontext import LoggingContext
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/http/server.py

@@ -39,8 +39,8 @@ from synapse.api.errors import (
     SynapseError,
     UnrecognizedRequestError,
 )
+from synapse.logging.context import preserve_fn
 from synapse.util.caches import intern_dict
-from synapse.util.logcontext import preserve_fn
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/http/site.py

@@ -19,7 +19,7 @@ from twisted.web.server import Request, Site
 
 from synapse.http import redact_uri
 from synapse.http.request_metrics import RequestMetrics, requests_counter
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 
 logger = logging.getLogger(__name__)
 

+ 0 - 0
synapse/logging/__init__.py


+ 0 - 0
synapse/util/logcontext.py → synapse/logging/context.py


+ 0 - 0
synapse/util/logformatter.py → synapse/logging/formatter.py


+ 0 - 0
synapse/util/logutils.py → synapse/logging/utils.py


+ 1 - 1
synapse/metrics/background_process_metrics.py

@@ -22,7 +22,7 @@ from prometheus_client.core import REGISTRY, Counter, GaugeMetricFamily
 
 from twisted.internet import defer
 
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 
 logger = logging.getLogger(__name__)
 

+ 2 - 2
synapse/notifier.py

@@ -23,12 +23,12 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes, Membership
 from synapse.api.errors import AuthError
 from synapse.handlers.presence import format_user_presence_state
+from synapse.logging.context import PreserveLoggingContext
+from synapse.logging.utils import log_function
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.types import StreamToken
 from synapse.util.async_helpers import ObservableDeferred, timeout_deferred
-from synapse.util.logcontext import PreserveLoggingContext
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 from synapse.visibility import filter_events_for_client
 

+ 1 - 1
synapse/push/mailer.py

@@ -29,6 +29,7 @@ from twisted.internet import defer
 
 from synapse.api.constants import EventTypes
 from synapse.api.errors import StoreError
+from synapse.logging.context import make_deferred_yieldable
 from synapse.push.presentable_names import (
     calculate_room_name,
     descriptor_from_member_events,
@@ -36,7 +37,6 @@ from synapse.push.presentable_names import (
 )
 from synapse.types import UserID
 from synapse.util.async_helpers import concurrently_execute
-from synapse.util.logcontext import make_deferred_yieldable
 from synapse.visibility import filter_events_for_client
 
 logger = logging.getLogger(__name__)

+ 1 - 1
synapse/replication/tcp/protocol.py

@@ -62,9 +62,9 @@ from twisted.internet import defer
 from twisted.protocols.basic import LineOnlyReceiver
 from twisted.python.failure import Failure
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics import LaterGauge
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import random_string
 
 from .commands import (

+ 1 - 1
synapse/rest/client/transactions.py

@@ -17,8 +17,8 @@
 to ensure idempotency when performing PUTs using the REST API."""
 import logging
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.util.async_helpers import ObservableDeferred
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 

+ 2 - 4
synapse/rest/media/v1/_base.py

@@ -25,7 +25,7 @@ from twisted.protocols.basic import FileSender
 
 from synapse.api.errors import Codes, SynapseError, cs_error
 from synapse.http.server import finish_request, respond_with_json
-from synapse.util import logcontext
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util.stringutils import is_ascii
 
 logger = logging.getLogger(__name__)
@@ -75,9 +75,7 @@ def respond_with_file(request, media_type, file_path, file_size=None, upload_nam
         add_file_headers(request, media_type, file_size, upload_name)
 
         with open(file_path, "rb") as f:
-            yield logcontext.make_deferred_yieldable(
-                FileSender().beginFileTransfer(f, request)
-            )
+            yield make_deferred_yieldable(FileSender().beginFileTransfer(f, request))
 
         finish_request(request)
     else:

+ 6 - 6
synapse/rest/media/v1/media_repository.py

@@ -33,8 +33,8 @@ from synapse.api.errors import (
     RequestSendFailed,
     SynapseError,
 )
+from synapse.logging.context import defer_to_thread
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util import logcontext
 from synapse.util.async_helpers import Linearizer
 from synapse.util.retryutils import NotRetryingDestination
 from synapse.util.stringutils import random_string
@@ -463,7 +463,7 @@ class MediaRepository(object):
         )
 
         thumbnailer = Thumbnailer(input_path)
-        t_byte_source = yield logcontext.defer_to_thread(
+        t_byte_source = yield defer_to_thread(
             self.hs.get_reactor(),
             self._generate_thumbnail,
             thumbnailer,
@@ -511,7 +511,7 @@ class MediaRepository(object):
         )
 
         thumbnailer = Thumbnailer(input_path)
-        t_byte_source = yield logcontext.defer_to_thread(
+        t_byte_source = yield defer_to_thread(
             self.hs.get_reactor(),
             self._generate_thumbnail,
             thumbnailer,
@@ -596,7 +596,7 @@ class MediaRepository(object):
             return
 
         if thumbnailer.transpose_method is not None:
-            m_width, m_height = yield logcontext.defer_to_thread(
+            m_width, m_height = yield defer_to_thread(
                 self.hs.get_reactor(), thumbnailer.transpose
             )
 
@@ -616,11 +616,11 @@ class MediaRepository(object):
         for (t_width, t_height, t_type), t_method in iteritems(thumbnails):
             # Generate the thumbnail
             if t_method == "crop":
-                t_byte_source = yield logcontext.defer_to_thread(
+                t_byte_source = yield defer_to_thread(
                     self.hs.get_reactor(), thumbnailer.crop, t_width, t_height, t_type
                 )
             elif t_method == "scale":
-                t_byte_source = yield logcontext.defer_to_thread(
+                t_byte_source = yield defer_to_thread(
                     self.hs.get_reactor(), thumbnailer.scale, t_width, t_height, t_type
                 )
             else:

+ 2 - 3
synapse/rest/media/v1/media_storage.py

@@ -24,9 +24,8 @@ import six
 from twisted.internet import defer
 from twisted.protocols.basic import FileSender
 
-from synapse.util import logcontext
+from synapse.logging.context import defer_to_thread, make_deferred_yieldable
 from synapse.util.file_consumer import BackgroundFileConsumer
-from synapse.util.logcontext import make_deferred_yieldable
 
 from ._base import Responder
 
@@ -65,7 +64,7 @@ class MediaStorage(object):
 
         with self.store_into_file(file_info) as (f, fname, finish_cb):
             # Write to the main repository
-            yield logcontext.defer_to_thread(
+            yield defer_to_thread(
                 self.hs.get_reactor(), _write_file_synchronously, source, f
             )
             yield finish_cb()

+ 1 - 1
synapse/rest/media/v1/preview_url_resource.py

@@ -42,11 +42,11 @@ from synapse.http.server import (
     wrap_json_request_handler,
 )
 from synapse.http.servlet import parse_integer, parse_string
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.rest.media.v1._base import get_filename_from_headers
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 from synapse.util.stringutils import random_string
 
 from ._base import FileInfo

+ 2 - 3
synapse/rest/media/v1/storage_provider.py

@@ -20,8 +20,7 @@ import shutil
 from twisted.internet import defer
 
 from synapse.config._base import Config
-from synapse.util import logcontext
-from synapse.util.logcontext import run_in_background
+from synapse.logging.context import defer_to_thread, run_in_background
 
 from .media_storage import FileResponder
 
@@ -125,7 +124,7 @@ class FileStorageProviderBackend(StorageProvider):
         if not os.path.exists(dirname):
             os.makedirs(dirname)
 
-        return logcontext.defer_to_thread(
+        return defer_to_thread(
             self.hs.get_reactor(), shutil.copyfile, primary_fname, backup_fname
         )
 

+ 1 - 1
synapse/state/__init__.py

@@ -28,11 +28,11 @@ from twisted.internet import defer
 from synapse.api.constants import EventTypes
 from synapse.api.room_versions import KNOWN_ROOM_VERSIONS, StateResolutionVersions
 from synapse.events.snapshot import EventContext
+from synapse.logging.utils import log_function
 from synapse.state import v1, v2
 from synapse.util.async_helpers import Linearizer
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.expiringcache import ExpiringCache
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)

+ 1 - 1
synapse/storage/_base.py

@@ -30,12 +30,12 @@ from prometheus_client import Histogram
 from twisted.internet import defer
 
 from synapse.api.errors import StoreError
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.storage.engines import PostgresEngine, Sqlite3Engine
 from synapse.types import get_domain_from_id
 from synapse.util import batch_iter
 from synapse.util.caches.descriptors import Cache
-from synapse.util.logcontext import LoggingContext, PreserveLoggingContext
 from synapse.util.stringutils import exception_to_unicode
 
 # import a function which will return a monotonic time, in seconds

+ 2 - 2
synapse/storage/events.py

@@ -33,6 +33,8 @@ from synapse.api.constants import EventTypes
 from synapse.api.errors import SynapseError
 from synapse.events import EventBase  # noqa: F401
 from synapse.events.snapshot import EventContext  # noqa: F401
+from synapse.logging.context import PreserveLoggingContext, make_deferred_yieldable
+from synapse.logging.utils import log_function
 from synapse.metrics import BucketCollector
 from synapse.metrics.background_process_metrics import run_as_background_process
 from synapse.state import StateResolutionStore
@@ -45,8 +47,6 @@ from synapse.util import batch_iter
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches.descriptors import cached, cachedInlineCallbacks
 from synapse.util.frozenutils import frozendict_json_encoder
-from synapse.util.logcontext import PreserveLoggingContext, make_deferred_yieldable
-from synapse.util.logutils import log_function
 from synapse.util.metrics import Measure
 
 logger = logging.getLogger(__name__)

+ 3 - 3
synapse/storage/events_worker.py

@@ -29,14 +29,14 @@ from synapse.api.room_versions import EventFormatVersions
 from synapse.events import FrozenEvent, event_type_from_format_version  # noqa: F401
 from synapse.events.snapshot import EventContext  # noqa: F401
 from synapse.events.utils import prune_event
-from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.types import get_domain_from_id
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     LoggingContext,
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,
 )
+from synapse.metrics.background_process_metrics import run_as_background_process
+from synapse.types import get_domain_from_id
 from synapse.util.metrics import Measure
 
 from ._base import SQLBaseStore

+ 1 - 1
synapse/storage/stream.py

@@ -41,12 +41,12 @@ from six.moves import range
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.storage._base import SQLBaseStore
 from synapse.storage.engines import PostgresEngine
 from synapse.storage.events_worker import EventsWorkerStore
 from synapse.types import RoomStreamToken
 from synapse.util.caches.stream_change_cache import StreamChangeCache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 

+ 8 - 4
synapse/util/__init__.py

@@ -21,10 +21,14 @@ import attr
 
 from twisted.internet import defer, task
 
-from synapse.util.logcontext import PreserveLoggingContext
+from synapse.logging import context, formatter
 
 logger = logging.getLogger(__name__)
 
+# Compatibility alias, for existing logconfigs.
+logcontext = context
+logformatter = formatter
+
 
 def unwrapFirstError(failure):
     # defer.gatherResults and DeferredLists wrap failures.
@@ -46,7 +50,7 @@ class Clock(object):
     @defer.inlineCallbacks
     def sleep(self, seconds):
         d = defer.Deferred()
-        with PreserveLoggingContext():
+        with context.PreserveLoggingContext():
             self._reactor.callLater(seconds, d.callback, seconds)
             res = yield d
         defer.returnValue(res)
@@ -91,10 +95,10 @@ class Clock(object):
         """
 
         def wrapped_callback(*args, **kwargs):
-            with PreserveLoggingContext():
+            with context.PreserveLoggingContext():
                 callback(*args, **kwargs)
 
-        with PreserveLoggingContext():
+        with context.PreserveLoggingContext():
             return self._reactor.callLater(delay, wrapped_callback, *args, **kwargs)
 
     def cancel_call_later(self, timer, ignore_errs=False):

+ 4 - 5
synapse/util/async_helpers.py

@@ -23,13 +23,12 @@ from twisted.internet import defer
 from twisted.internet.defer import CancelledError
 from twisted.python import failure
 
-from synapse.util import Clock, logcontext, unwrapFirstError
-
-from .logcontext import (
+from synapse.logging.context import (
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,
 )
+from synapse.util import Clock, unwrapFirstError
 
 logger = logging.getLogger(__name__)
 
@@ -153,7 +152,7 @@ def concurrently_execute(func, args, limit):
         except StopIteration:
             pass
 
-    return logcontext.make_deferred_yieldable(
+    return make_deferred_yieldable(
         defer.gatherResults(
             [run_in_background(_concurrently_execute_inner) for _ in range(limit)],
             consumeErrors=True,
@@ -174,7 +173,7 @@ def yieldable_gather_results(func, iter, *args, **kwargs):
         Deferred[list]: Resolved when all functions have been invoked, or errors if
         one of the function calls fails.
     """
-    return logcontext.make_deferred_yieldable(
+    return make_deferred_yieldable(
         defer.gatherResults(
             [run_in_background(func, item, *args, **kwargs) for item in iter],
             consumeErrors=True,

+ 6 - 5
synapse/util/caches/descriptors.py

@@ -24,7 +24,8 @@ from six import itervalues, string_types
 
 from twisted.internet import defer
 
-from synapse.util import logcontext, unwrapFirstError
+from synapse.logging.context import make_deferred_yieldable, preserve_fn
+from synapse.util import unwrapFirstError
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import get_cache_factor_for
 from synapse.util.caches.lrucache import LruCache
@@ -388,7 +389,7 @@ class CacheDescriptor(_CacheDescriptorBase):
 
             except KeyError:
                 ret = defer.maybeDeferred(
-                    logcontext.preserve_fn(self.function_to_call), obj, *args, **kwargs
+                    preserve_fn(self.function_to_call), obj, *args, **kwargs
                 )
 
                 def onErr(f):
@@ -408,7 +409,7 @@ class CacheDescriptor(_CacheDescriptorBase):
                 observer = result_d.observe()
 
             if isinstance(observer, defer.Deferred):
-                return logcontext.make_deferred_yieldable(observer)
+                return make_deferred_yieldable(observer)
             else:
                 return observer
 
@@ -563,7 +564,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
 
                 cached_defers.append(
                     defer.maybeDeferred(
-                        logcontext.preserve_fn(self.function_to_call), **args_to_call
+                        preserve_fn(self.function_to_call), **args_to_call
                     ).addCallbacks(complete_all, errback)
                 )
 
@@ -571,7 +572,7 @@ class CacheListDescriptor(_CacheDescriptorBase):
                 d = defer.gatherResults(cached_defers, consumeErrors=True).addCallbacks(
                     lambda _: results, unwrapFirstError
                 )
-                return logcontext.make_deferred_yieldable(d)
+                return make_deferred_yieldable(d)
             else:
                 return results
 

+ 2 - 2
synapse/util/caches/response_cache.py

@@ -16,9 +16,9 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.util.async_helpers import ObservableDeferred
 from synapse.util.caches import register_cache
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 
@@ -78,7 +78,7 @@ class ResponseCache(object):
 
         *deferred* should run its callbacks in the sentinel logcontext (ie,
         you should wrap normal synapse deferreds with
-        logcontext.run_in_background).
+        synapse.logging.context.run_in_background).
 
         Can return either a new Deferred (which also doesn't follow the synapse
         logcontext rules), or, if *deferred* was already complete, the actual

+ 1 - 1
synapse/util/distributor.py

@@ -17,8 +17,8 @@ import logging
 
 from twisted.internet import defer
 
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 from synapse.metrics.background_process_metrics import run_as_background_process
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/util/file_consumer.py

@@ -17,7 +17,7 @@ from six.moves import queue
 
 from twisted.internet import threads
 
-from synapse.util.logcontext import make_deferred_yieldable, run_in_background
+from synapse.logging.context import make_deferred_yieldable, run_in_background
 
 
 class BackgroundFileConsumer(object):

+ 1 - 1
synapse/util/metrics.py

@@ -20,8 +20,8 @@ from prometheus_client import Counter
 
 from twisted.internet import defer
 
+from synapse.logging.context import LoggingContext
 from synapse.metrics import InFlightGauge
-from synapse.util.logcontext import LoggingContext
 
 logger = logging.getLogger(__name__)
 

+ 1 - 1
synapse/util/ratelimitutils.py

@@ -20,7 +20,7 @@ import logging
 from twisted.internet import defer
 
 from synapse.api.errors import LimitExceededError
-from synapse.util.logcontext import (
+from synapse.logging.context import (
     PreserveLoggingContext,
     make_deferred_yieldable,
     run_in_background,

+ 2 - 2
synapse/util/retryutils.py

@@ -17,7 +17,7 @@ import random
 
 from twisted.internet import defer
 
-import synapse.util.logcontext
+import synapse.logging.context
 from synapse.api.errors import CodeMessageException
 
 logger = logging.getLogger(__name__)
@@ -225,4 +225,4 @@ class RetryDestinationLimiter(object):
                 logger.exception("Failed to store destination_retry_timings")
 
         # we deliberately do this in the background.
-        synapse.util.logcontext.run_in_background(store_retry_timings)
+        synapse.logging.context.run_in_background(store_retry_timings)

+ 1 - 1
tests/appservice/test_scheduler.py

@@ -22,7 +22,7 @@ from synapse.appservice.scheduler import (
     _ServiceQueuer,
     _TransactionController,
 )
-from synapse.util.logcontext import make_deferred_yieldable
+from synapse.logging.context import make_deferred_yieldable
 
 from tests import unittest
 

+ 8 - 5
tests/crypto/test_keyring.py

@@ -30,9 +30,12 @@ from synapse.crypto.keyring import (
     ServerKeyFetcher,
     StoreKeyFetcher,
 )
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+)
 from synapse.storage.keys import FetchKeyResult
-from synapse.util import logcontext
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 
@@ -131,7 +134,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
         @defer.inlineCallbacks
         def get_perspectives(**kwargs):
             self.assertEquals(LoggingContext.current_context().request, "11")
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 yield persp_deferred
             defer.returnValue(persp_resp)
 
@@ -158,7 +161,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
                 self.assertFalse(res_deferreds[0].called)
                 res_deferreds[0].addBoth(self.check_context, None)
 
-                yield logcontext.make_deferred_yieldable(res_deferreds[0])
+                yield make_deferred_yieldable(res_deferreds[0])
 
                 # let verify_json_objects_for_server finish its work before we kill the
                 # logcontext
@@ -184,7 +187,7 @@ class KeyringTestCase(unittest.HomeserverTestCase):
                     [("server10", json1, 0, "test")]
                 )
                 res_deferreds_2[0].addBoth(self.check_context, None)
-                yield logcontext.make_deferred_yieldable(res_deferreds_2[0])
+                yield make_deferred_yieldable(res_deferreds_2[0])
 
                 # let verify_json_objects_for_server finish its work before we kill the
                 # logcontext

+ 1 - 1
tests/http/federation/test_matrix_federation_agent.py

@@ -36,8 +36,8 @@ from synapse.http.federation.matrix_federation_agent import (
     _cache_period_from_headers,
 )
 from synapse.http.federation.srv_resolver import Server
+from synapse.logging.context import LoggingContext
 from synapse.util.caches.ttlcache import TTLCache
-from synapse.util.logcontext import LoggingContext
 
 from tests.http import TestServerTLSConnectionFactory, get_test_ca_cert_file
 from tests.server import FakeTransport, ThreadedMemoryReactorClock

+ 1 - 1
tests/http/federation/test_srv_resolver.py

@@ -22,7 +22,7 @@ from twisted.internet.error import ConnectError
 from twisted.names import dns, error
 
 from synapse.http.federation.srv_resolver import SrvResolver
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import LoggingContext
 
 from tests import unittest
 from tests.utils import MockClock

+ 1 - 1
tests/http/test_fedclient.py

@@ -29,7 +29,7 @@ from synapse.http.matrixfederationclient import (
     MatrixFederationHttpClient,
     MatrixFederationRequest,
 )
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import LoggingContext
 
 from tests.server import FakeTransport
 from tests.unittest import HomeserverTestCase

+ 1 - 1
tests/patch_inline_callbacks.py

@@ -28,7 +28,7 @@ def do_patch():
     Patch defer.inlineCallbacks so that it checks the state of the logcontext on exit
     """
 
-    from synapse.util.logcontext import LoggingContext
+    from synapse.logging.context import LoggingContext
 
     orig_inline_callbacks = defer.inlineCallbacks
 

+ 1 - 1
tests/push/test_http.py

@@ -18,8 +18,8 @@ from mock import Mock
 from twisted.internet.defer import Deferred
 
 import synapse.rest.admin
+from synapse.logging.context import make_deferred_yieldable
 from synapse.rest.client.v1 import login, room
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests.unittest import HomeserverTestCase
 

+ 1 - 1
tests/rest/client/test_transactions.py

@@ -2,9 +2,9 @@ from mock import Mock, call
 
 from twisted.internet import defer, reactor
 
+from synapse.logging.context import LoggingContext
 from synapse.rest.client.transactions import CLEANUP_PERIOD_MS, HttpTransactionCache
 from synapse.util import Clock
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 from tests.utils import MockClock

+ 1 - 1
tests/rest/media/v1/test_media_storage.py

@@ -24,11 +24,11 @@ from six.moves.urllib import parse
 
 from twisted.internet.defer import Deferred
 
+from synapse.logging.context import make_deferred_yieldable
 from synapse.rest.media.v1._base import FileInfo
 from synapse.rest.media.v1.filepath import MediaFilePaths
 from synapse.rest.media.v1.media_storage import MediaStorage
 from synapse.rest.media.v1.storage_provider import FileStorageProviderBackend
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests import unittest
 

+ 1 - 1
tests/test_federation.py

@@ -3,9 +3,9 @@ from mock import Mock
 from twisted.internet.defer import maybeDeferred, succeed
 
 from synapse.events import FrozenEvent
+from synapse.logging.context import LoggingContext
 from synapse.types import Requester, UserID
 from synapse.util import Clock
-from synapse.util.logcontext import LoggingContext
 
 from tests import unittest
 from tests.server import ThreadedMemoryReactorClock, setup_test_homeserver

+ 1 - 1
tests/test_server.py

@@ -26,8 +26,8 @@ from twisted.web.server import NOT_DONE_YET
 from synapse.api.errors import Codes, SynapseError
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseSite, logger
+from synapse.logging.context import make_deferred_yieldable
 from synapse.util import Clock
-from synapse.util.logcontext import make_deferred_yieldable
 
 from tests import unittest
 from tests.server import (

+ 1 - 1
tests/test_utils/logging_setup.py

@@ -17,7 +17,7 @@ import os
 
 import twisted.logger
 
-from synapse.util.logcontext import LoggingContextFilter
+from synapse.logging.context import LoggingContextFilter
 
 
 class ToTwistedHandler(logging.Handler):

+ 1 - 1
tests/unittest.py

@@ -33,9 +33,9 @@ from synapse.api.constants import EventTypes
 from synapse.config.homeserver import HomeServerConfig
 from synapse.http.server import JsonResource
 from synapse.http.site import SynapseRequest
+from synapse.logging.context import LoggingContext
 from synapse.server import HomeServer
 from synapse.types import Requester, UserID, create_requester
-from synapse.util.logcontext import LoggingContext
 
 from tests.server import get_clock, make_request, render, setup_test_homeserver
 from tests.test_utils.logging_setup import setup_logging

+ 20 - 29
tests/util/caches/test_descriptors.py

@@ -21,7 +21,11 @@ import mock
 from twisted.internet import defer, reactor
 
 from synapse.api.errors import SynapseError
-from synapse.util import logcontext
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+)
 from synapse.util.caches import descriptors
 
 from tests import unittest
@@ -32,7 +36,7 @@ logger = logging.getLogger(__name__)
 def run_on_reactor():
     d = defer.Deferred()
     reactor.callLater(0, d.callback, 0)
-    return logcontext.make_deferred_yieldable(d)
+    return make_deferred_yieldable(d)
 
 
 class CacheTestCase(unittest.TestCase):
@@ -153,7 +157,7 @@ class DescriptorTestCase(unittest.TestCase):
             def fn(self, arg1):
                 @defer.inlineCallbacks
                 def inner_fn():
-                    with logcontext.PreserveLoggingContext():
+                    with PreserveLoggingContext():
                         yield complete_lookup
                     defer.returnValue(1)
 
@@ -161,10 +165,10 @@ class DescriptorTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def do_lookup():
-            with logcontext.LoggingContext() as c1:
+            with LoggingContext() as c1:
                 c1.name = "c1"
                 r = yield obj.fn(1)
-                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+                self.assertEqual(LoggingContext.current_context(), c1)
             defer.returnValue(r)
 
         def check_result(r):
@@ -174,18 +178,12 @@ class DescriptorTestCase(unittest.TestCase):
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
         d1.addCallback(check_result)
 
         # and another
         d2 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
         d2.addCallback(check_result)
 
         # let the lookup complete
@@ -210,29 +208,25 @@ class DescriptorTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def do_lookup():
-            with logcontext.LoggingContext() as c1:
+            with LoggingContext() as c1:
                 c1.name = "c1"
                 try:
                     d = obj.fn(1)
                     self.assertEqual(
-                        logcontext.LoggingContext.current_context(),
-                        logcontext.LoggingContext.sentinel,
+                        LoggingContext.current_context(), LoggingContext.sentinel
                     )
                     yield d
                     self.fail("No exception thrown")
                 except SynapseError:
                     pass
 
-                self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+                self.assertEqual(LoggingContext.current_context(), c1)
 
         obj = Cls()
 
         # set off a deferred which will do a cache lookup
         d1 = do_lookup()
-        self.assertEqual(
-            logcontext.LoggingContext.current_context(),
-            logcontext.LoggingContext.sentinel,
-        )
+        self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
 
         return d1
 
@@ -288,23 +282,20 @@ class CachedListDescriptorTestCase(unittest.TestCase):
 
             @descriptors.cachedList("fn", "args1", inlineCallbacks=True)
             def list_fn(self, args1, arg2):
-                assert logcontext.LoggingContext.current_context().request == "c1"
+                assert LoggingContext.current_context().request == "c1"
                 # we want this to behave like an asynchronous function
                 yield run_on_reactor()
-                assert logcontext.LoggingContext.current_context().request == "c1"
+                assert LoggingContext.current_context().request == "c1"
                 defer.returnValue(self.mock(args1, arg2))
 
-        with logcontext.LoggingContext() as c1:
+        with LoggingContext() as c1:
             c1.request = "c1"
             obj = Cls()
             obj.mock.return_value = {10: "fish", 20: "chips"}
             d1 = obj.list_fn([10, 20], 2)
-            self.assertEqual(
-                logcontext.LoggingContext.current_context(),
-                logcontext.LoggingContext.sentinel,
-            )
+            self.assertEqual(LoggingContext.current_context(), LoggingContext.sentinel)
             r = yield d1
-            self.assertEqual(logcontext.LoggingContext.current_context(), c1)
+            self.assertEqual(LoggingContext.current_context(), c1)
             obj.mock.assert_called_once_with([10, 20], 2)
             self.assertEqual(r, {10: "fish", 20: "chips"})
             obj.mock.reset_mock()

+ 3 - 4
tests/util/test_async_utils.py

@@ -16,9 +16,8 @@ from twisted.internet import defer
 from twisted.internet.defer import CancelledError, Deferred
 from twisted.internet.task import Clock
 
-from synapse.util import logcontext
+from synapse.logging.context import LoggingContext, PreserveLoggingContext
 from synapse.util.async_helpers import timeout_deferred
-from synapse.util.logcontext import LoggingContext
 
 from tests.unittest import TestCase
 
@@ -69,14 +68,14 @@ class TimeoutDeferredTest(TestCase):
         @defer.inlineCallbacks
         def blocking():
             non_completing_d = Deferred()
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 try:
                     yield non_completing_d
                 except CancelledError:
                     blocking_was_cancelled[0] = True
                     raise
 
-        with logcontext.LoggingContext("one") as context_one:
+        with LoggingContext("one") as context_one:
             # the errbacks should be run in the test logcontext
             def errback(res, deferred_name):
                 self.assertIs(

+ 5 - 4
tests/util/test_linearizer.py

@@ -19,7 +19,8 @@ from six.moves import range
 from twisted.internet import defer, reactor
 from twisted.internet.defer import CancelledError
 
-from synapse.util import Clock, logcontext
+from synapse.logging.context import LoggingContext
+from synapse.util import Clock
 from synapse.util.async_helpers import Linearizer
 
 from tests import unittest
@@ -51,13 +52,13 @@ class LinearizerTestCase(unittest.TestCase):
 
         @defer.inlineCallbacks
         def func(i, sleep=False):
-            with logcontext.LoggingContext("func(%s)" % i) as lc:
+            with LoggingContext("func(%s)" % i) as lc:
                 with (yield linearizer.queue("")):
-                    self.assertEqual(logcontext.LoggingContext.current_context(), lc)
+                    self.assertEqual(LoggingContext.current_context(), lc)
                     if sleep:
                         yield Clock(reactor).sleep(0)
 
-                self.assertEqual(logcontext.LoggingContext.current_context(), lc)
+                self.assertEqual(LoggingContext.current_context(), lc)
 
         func(0, sleep=True)
         for i in range(1, 100):

+ 15 - 9
tests/util/test_logcontext.py

@@ -1,8 +1,14 @@
 import twisted.python.failure
 from twisted.internet import defer, reactor
 
-from synapse.util import Clock, logcontext
-from synapse.util.logcontext import LoggingContext
+from synapse.logging.context import (
+    LoggingContext,
+    PreserveLoggingContext,
+    make_deferred_yieldable,
+    nested_logging_context,
+    run_in_background,
+)
+from synapse.util import Clock
 
 from .. import unittest
 
@@ -43,7 +49,7 @@ class LoggingContextTestCase(unittest.TestCase):
             context_one.request = "one"
 
             # fire off function, but don't wait on it.
-            d2 = logcontext.run_in_background(function)
+            d2 = run_in_background(function)
 
             def cb(res):
                 callback_completed[0] = True
@@ -85,7 +91,7 @@ class LoggingContextTestCase(unittest.TestCase):
     def test_run_in_background_with_non_blocking_fn(self):
         @defer.inlineCallbacks
         def nonblocking_function():
-            with logcontext.PreserveLoggingContext():
+            with PreserveLoggingContext():
                 yield defer.succeed(None)
 
         return self._test_run_in_background(nonblocking_function)
@@ -94,7 +100,7 @@ class LoggingContextTestCase(unittest.TestCase):
         # a function which returns a deferred which looks like it has been
         # called, but is actually paused
         def testfunc():
-            return logcontext.make_deferred_yieldable(_chained_deferred_function())
+            return make_deferred_yieldable(_chained_deferred_function())
 
         return self._test_run_in_background(testfunc)
 
@@ -128,7 +134,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable(blocking_function())
+            d1 = make_deferred_yieldable(blocking_function())
             # make sure that the context was reset by make_deferred_yieldable
             self.assertIs(LoggingContext.current_context(), sentinel_context)
 
@@ -144,7 +150,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable(_chained_deferred_function())
+            d1 = make_deferred_yieldable(_chained_deferred_function())
             # make sure that the context was reset by make_deferred_yieldable
             self.assertIs(LoggingContext.current_context(), sentinel_context)
 
@@ -161,7 +167,7 @@ class LoggingContextTestCase(unittest.TestCase):
         with LoggingContext() as context_one:
             context_one.request = "one"
 
-            d1 = logcontext.make_deferred_yieldable("bum")
+            d1 = make_deferred_yieldable("bum")
             self._check_test_key("one")
 
             r = yield d1
@@ -170,7 +176,7 @@ class LoggingContextTestCase(unittest.TestCase):
 
     def test_nested_logging_context(self):
         with LoggingContext(request="foo"):
-            nested_context = logcontext.nested_logging_context(suffix="bar")
+            nested_context = nested_logging_context(suffix="bar")
             self.assertEqual(nested_context.request, "foo-bar")
 
 

+ 1 - 1
tests/util/test_logformatter.py

@@ -14,7 +14,7 @@
 # limitations under the License.
 import sys
 
-from synapse.util.logformatter import LogFormatter
+from synapse.logging.formatter import LogFormatter
 
 from tests import unittest
 

+ 1 - 1
tests/utils.py

@@ -34,6 +34,7 @@ from synapse.config.homeserver import HomeServerConfig
 from synapse.config.server import DEFAULT_ROOM_VERSION
 from synapse.federation.transport import server as federation_server
 from synapse.http.server import HttpServer
+from synapse.logging.context import LoggingContext
 from synapse.server import HomeServer
 from synapse.storage import DataStore
 from synapse.storage.engines import PostgresEngine, create_engine
@@ -42,7 +43,6 @@ from synapse.storage.prepare_database import (
     _setup_new_database,
     prepare_database,
 )
-from synapse.util.logcontext import LoggingContext
 from synapse.util.ratelimitutils import FederationRateLimiter
 
 # set this to True to run the tests against postgres instead of sqlite.