repository.py 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014, 2015 matrix.org
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. import os
  16. from collections import namedtuple
  17. from synapse.util.module_loader import load_module
  18. from ._base import Config, ConfigError
  19. MISSING_NETADDR = (
  20. "Missing netaddr library. This is required for URL preview API."
  21. )
  22. MISSING_LXML = (
  23. """Missing lxml library. This is required for URL preview API.
  24. Install by running:
  25. pip install lxml
  26. Requires libxslt1-dev system package.
  27. """
  28. )
  29. ThumbnailRequirement = namedtuple(
  30. "ThumbnailRequirement", ["width", "height", "method", "media_type"]
  31. )
  32. MediaStorageProviderConfig = namedtuple(
  33. "MediaStorageProviderConfig", (
  34. "store_local", # Whether to store newly uploaded local files
  35. "store_remote", # Whether to store newly downloaded remote files
  36. "store_synchronous", # Whether to wait for successful storage for local uploads
  37. ),
  38. )
  39. def parse_thumbnail_requirements(thumbnail_sizes):
  40. """ Takes a list of dictionaries with "width", "height", and "method" keys
  41. and creates a map from image media types to the thumbnail size, thumbnailing
  42. method, and thumbnail media type to precalculate
  43. Args:
  44. thumbnail_sizes(list): List of dicts with "width", "height", and
  45. "method" keys
  46. Returns:
  47. Dictionary mapping from media type string to list of
  48. ThumbnailRequirement tuples.
  49. """
  50. requirements = {}
  51. for size in thumbnail_sizes:
  52. width = size["width"]
  53. height = size["height"]
  54. method = size["method"]
  55. jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
  56. png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
  57. requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
  58. requirements.setdefault("image/gif", []).append(png_thumbnail)
  59. requirements.setdefault("image/png", []).append(png_thumbnail)
  60. return {
  61. media_type: tuple(thumbnails)
  62. for media_type, thumbnails in requirements.items()
  63. }
  64. class ContentRepositoryConfig(Config):
  65. def read_config(self, config):
  66. self.max_upload_size = self.parse_size(config["max_upload_size"])
  67. self.max_image_pixels = self.parse_size(config["max_image_pixels"])
  68. self.max_spider_size = self.parse_size(config["max_spider_size"])
  69. self.media_store_path = self.ensure_directory(config["media_store_path"])
  70. backup_media_store_path = config.get("backup_media_store_path")
  71. synchronous_backup_media_store = config.get(
  72. "synchronous_backup_media_store", False
  73. )
  74. storage_providers = config.get("media_storage_providers", [])
  75. if backup_media_store_path:
  76. if storage_providers:
  77. raise ConfigError(
  78. "Cannot use both 'backup_media_store_path' and 'storage_providers'"
  79. )
  80. storage_providers = [{
  81. "module": "file_system",
  82. "store_local": True,
  83. "store_synchronous": synchronous_backup_media_store,
  84. "store_remote": True,
  85. "config": {
  86. "directory": backup_media_store_path,
  87. }
  88. }]
  89. # This is a list of config that can be used to create the storage
  90. # providers. The entries are tuples of (Class, class_config,
  91. # MediaStorageProviderConfig), where Class is the class of the provider,
  92. # the class_config the config to pass to it, and
  93. # MediaStorageProviderConfig are options for StorageProviderWrapper.
  94. #
  95. # We don't create the storage providers here as not all workers need
  96. # them to be started.
  97. self.media_storage_providers = []
  98. for provider_config in storage_providers:
  99. # We special case the module "file_system" so as not to need to
  100. # expose FileStorageProviderBackend
  101. if provider_config["module"] == "file_system":
  102. provider_config["module"] = (
  103. "synapse.rest.media.v1.storage_provider"
  104. ".FileStorageProviderBackend"
  105. )
  106. provider_class, parsed_config = load_module(provider_config)
  107. wrapper_config = MediaStorageProviderConfig(
  108. provider_config.get("store_local", False),
  109. provider_config.get("store_remote", False),
  110. provider_config.get("store_synchronous", False),
  111. )
  112. self.media_storage_providers.append(
  113. (provider_class, parsed_config, wrapper_config,)
  114. )
  115. self.uploads_path = self.ensure_directory(config["uploads_path"])
  116. self.dynamic_thumbnails = config["dynamic_thumbnails"]
  117. self.thumbnail_requirements = parse_thumbnail_requirements(
  118. config["thumbnail_sizes"]
  119. )
  120. self.url_preview_enabled = config.get("url_preview_enabled", False)
  121. if self.url_preview_enabled:
  122. try:
  123. import lxml
  124. lxml # To stop unused lint.
  125. except ImportError:
  126. raise ConfigError(MISSING_LXML)
  127. try:
  128. from netaddr import IPSet
  129. except ImportError:
  130. raise ConfigError(MISSING_NETADDR)
  131. if "url_preview_ip_range_blacklist" in config:
  132. self.url_preview_ip_range_blacklist = IPSet(
  133. config["url_preview_ip_range_blacklist"]
  134. )
  135. else:
  136. raise ConfigError(
  137. "For security, you must specify an explicit target IP address "
  138. "blacklist in url_preview_ip_range_blacklist for url previewing "
  139. "to work"
  140. )
  141. self.url_preview_ip_range_whitelist = IPSet(
  142. config.get("url_preview_ip_range_whitelist", ())
  143. )
  144. self.url_preview_url_blacklist = config.get(
  145. "url_preview_url_blacklist", ()
  146. )
  147. def default_config(self, data_dir_path, **kwargs):
  148. media_store = os.path.join(data_dir_path, "media_store")
  149. uploads_path = os.path.join(data_dir_path, "uploads")
  150. return r"""
  151. # Directory where uploaded images and attachments are stored.
  152. media_store_path: "%(media_store)s"
  153. # Media storage providers allow media to be stored in different
  154. # locations.
  155. # media_storage_providers:
  156. # - module: file_system
  157. # # Whether to write new local files.
  158. # store_local: false
  159. # # Whether to write new remote media
  160. # store_remote: false
  161. # # Whether to block upload requests waiting for write to this
  162. # # provider to complete
  163. # store_synchronous: false
  164. # config:
  165. # directory: /mnt/some/other/directory
  166. # Directory where in-progress uploads are stored.
  167. uploads_path: "%(uploads_path)s"
  168. # The largest allowed upload size in bytes
  169. max_upload_size: "10M"
  170. # Maximum number of pixels that will be thumbnailed
  171. max_image_pixels: "32M"
  172. # Whether to generate new thumbnails on the fly to precisely match
  173. # the resolution requested by the client. If true then whenever
  174. # a new resolution is requested by the client the server will
  175. # generate a new thumbnail. If false the server will pick a thumbnail
  176. # from a precalculated list.
  177. dynamic_thumbnails: false
  178. # List of thumbnail to precalculate when an image is uploaded.
  179. thumbnail_sizes:
  180. - width: 32
  181. height: 32
  182. method: crop
  183. - width: 96
  184. height: 96
  185. method: crop
  186. - width: 320
  187. height: 240
  188. method: scale
  189. - width: 640
  190. height: 480
  191. method: scale
  192. - width: 800
  193. height: 600
  194. method: scale
  195. # Is the preview URL API enabled? If enabled, you *must* specify
  196. # an explicit url_preview_ip_range_blacklist of IPs that the spider is
  197. # denied from accessing.
  198. url_preview_enabled: False
  199. # List of IP address CIDR ranges that the URL preview spider is denied
  200. # from accessing. There are no defaults: you must explicitly
  201. # specify a list for URL previewing to work. You should specify any
  202. # internal services in your network that you do not want synapse to try
  203. # to connect to, otherwise anyone in any Matrix room could cause your
  204. # synapse to issue arbitrary GET requests to your internal services,
  205. # causing serious security issues.
  206. #
  207. # url_preview_ip_range_blacklist:
  208. # - '127.0.0.0/8'
  209. # - '10.0.0.0/8'
  210. # - '172.16.0.0/12'
  211. # - '192.168.0.0/16'
  212. # - '100.64.0.0/10'
  213. # - '169.254.0.0/16'
  214. # - '::1/128'
  215. # - 'fe80::/64'
  216. # - 'fc00::/7'
  217. #
  218. # List of IP address CIDR ranges that the URL preview spider is allowed
  219. # to access even if they are specified in url_preview_ip_range_blacklist.
  220. # This is useful for specifying exceptions to wide-ranging blacklisted
  221. # target IP ranges - e.g. for enabling URL previews for a specific private
  222. # website only visible in your network.
  223. #
  224. # url_preview_ip_range_whitelist:
  225. # - '192.168.1.1'
  226. # Optional list of URL matches that the URL preview spider is
  227. # denied from accessing. You should use url_preview_ip_range_blacklist
  228. # in preference to this, otherwise someone could define a public DNS
  229. # entry that points to a private IP address and circumvent the blacklist.
  230. # This is more useful if you know there is an entire shape of URL that
  231. # you know that will never want synapse to try to spider.
  232. #
  233. # Each list entry is a dictionary of url component attributes as returned
  234. # by urlparse.urlsplit as applied to the absolute form of the URL. See
  235. # https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
  236. # The values of the dictionary are treated as an filename match pattern
  237. # applied to that component of URLs, unless they start with a ^ in which
  238. # case they are treated as a regular expression match. If all the
  239. # specified component matches for a given list item succeed, the URL is
  240. # blacklisted.
  241. #
  242. # url_preview_url_blacklist:
  243. # # blacklist any URL with a username in its URI
  244. # - username: '*'
  245. #
  246. # # blacklist all *.google.com URLs
  247. # - netloc: 'google.com'
  248. # - netloc: '*.google.com'
  249. #
  250. # # blacklist all plain HTTP URLs
  251. # - scheme: 'http'
  252. #
  253. # # blacklist http(s)://www.acme.com/foo
  254. # - netloc: 'www.acme.com'
  255. # path: '/foo'
  256. #
  257. # # blacklist any URL with a literal IPv4 address
  258. # - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
  259. # The largest allowed URL preview spidering size in bytes
  260. max_spider_size: "10M"
  261. """ % locals()