repository.py 8.2 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218
  1. # -*- coding: utf-8 -*-
  2. # Copyright 2014, 2015 matrix.org
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # http://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. from ._base import Config, ConfigError
  16. from collections import namedtuple
  17. MISSING_NETADDR = (
  18. "Missing netaddr library. This is required for URL preview API."
  19. )
  20. MISSING_LXML = (
  21. """Missing lxml library. This is required for URL preview API.
  22. Install by running:
  23. pip install lxml
  24. Requires libxslt1-dev system package.
  25. """
  26. )
  27. ThumbnailRequirement = namedtuple(
  28. "ThumbnailRequirement", ["width", "height", "method", "media_type"]
  29. )
  30. def parse_thumbnail_requirements(thumbnail_sizes):
  31. """ Takes a list of dictionaries with "width", "height", and "method" keys
  32. and creates a map from image media types to the thumbnail size, thumbnailing
  33. method, and thumbnail media type to precalculate
  34. Args:
  35. thumbnail_sizes(list): List of dicts with "width", "height", and
  36. "method" keys
  37. Returns:
  38. Dictionary mapping from media type string to list of
  39. ThumbnailRequirement tuples.
  40. """
  41. requirements = {}
  42. for size in thumbnail_sizes:
  43. width = size["width"]
  44. height = size["height"]
  45. method = size["method"]
  46. jpeg_thumbnail = ThumbnailRequirement(width, height, method, "image/jpeg")
  47. png_thumbnail = ThumbnailRequirement(width, height, method, "image/png")
  48. requirements.setdefault("image/jpeg", []).append(jpeg_thumbnail)
  49. requirements.setdefault("image/gif", []).append(png_thumbnail)
  50. requirements.setdefault("image/png", []).append(png_thumbnail)
  51. return {
  52. media_type: tuple(thumbnails)
  53. for media_type, thumbnails in requirements.items()
  54. }
  55. class ContentRepositoryConfig(Config):
  56. def read_config(self, config):
  57. self.max_upload_size = self.parse_size(config["max_upload_size"])
  58. self.max_image_pixels = self.parse_size(config["max_image_pixels"])
  59. self.max_spider_size = self.parse_size(config["max_spider_size"])
  60. self.media_store_path = self.ensure_directory(config["media_store_path"])
  61. self.uploads_path = self.ensure_directory(config["uploads_path"])
  62. self.dynamic_thumbnails = config["dynamic_thumbnails"]
  63. self.thumbnail_requirements = parse_thumbnail_requirements(
  64. config["thumbnail_sizes"]
  65. )
  66. self.url_preview_enabled = config.get("url_preview_enabled", False)
  67. if self.url_preview_enabled:
  68. try:
  69. import lxml
  70. lxml # To stop unused lint.
  71. except ImportError:
  72. raise ConfigError(MISSING_LXML)
  73. try:
  74. from netaddr import IPSet
  75. except ImportError:
  76. raise ConfigError(MISSING_NETADDR)
  77. if "url_preview_ip_range_blacklist" in config:
  78. self.url_preview_ip_range_blacklist = IPSet(
  79. config["url_preview_ip_range_blacklist"]
  80. )
  81. else:
  82. raise ConfigError(
  83. "For security, you must specify an explicit target IP address "
  84. "blacklist in url_preview_ip_range_blacklist for url previewing "
  85. "to work"
  86. )
  87. self.url_preview_ip_range_whitelist = IPSet(
  88. config.get("url_preview_ip_range_whitelist", ())
  89. )
  90. self.url_preview_url_blacklist = config.get(
  91. "url_preview_url_blacklist", ()
  92. )
  93. def default_config(self, **kwargs):
  94. media_store = self.default_path("media_store")
  95. uploads_path = self.default_path("uploads")
  96. return """
  97. # Directory where uploaded images and attachments are stored.
  98. media_store_path: "%(media_store)s"
  99. # Directory where in-progress uploads are stored.
  100. uploads_path: "%(uploads_path)s"
  101. # The largest allowed upload size in bytes
  102. max_upload_size: "10M"
  103. # Maximum number of pixels that will be thumbnailed
  104. max_image_pixels: "32M"
  105. # Whether to generate new thumbnails on the fly to precisely match
  106. # the resolution requested by the client. If true then whenever
  107. # a new resolution is requested by the client the server will
  108. # generate a new thumbnail. If false the server will pick a thumbnail
  109. # from a precalculated list.
  110. dynamic_thumbnails: false
  111. # List of thumbnail to precalculate when an image is uploaded.
  112. thumbnail_sizes:
  113. - width: 32
  114. height: 32
  115. method: crop
  116. - width: 96
  117. height: 96
  118. method: crop
  119. - width: 320
  120. height: 240
  121. method: scale
  122. - width: 640
  123. height: 480
  124. method: scale
  125. - width: 800
  126. height: 600
  127. method: scale
  128. # Is the preview URL API enabled? If enabled, you *must* specify
  129. # an explicit url_preview_ip_range_blacklist of IPs that the spider is
  130. # denied from accessing.
  131. url_preview_enabled: False
  132. # List of IP address CIDR ranges that the URL preview spider is denied
  133. # from accessing. There are no defaults: you must explicitly
  134. # specify a list for URL previewing to work. You should specify any
  135. # internal services in your network that you do not want synapse to try
  136. # to connect to, otherwise anyone in any Matrix room could cause your
  137. # synapse to issue arbitrary GET requests to your internal services,
  138. # causing serious security issues.
  139. #
  140. # url_preview_ip_range_blacklist:
  141. # - '127.0.0.0/8'
  142. # - '10.0.0.0/8'
  143. # - '172.16.0.0/12'
  144. # - '192.168.0.0/16'
  145. #
  146. # List of IP address CIDR ranges that the URL preview spider is allowed
  147. # to access even if they are specified in url_preview_ip_range_blacklist.
  148. # This is useful for specifying exceptions to wide-ranging blacklisted
  149. # target IP ranges - e.g. for enabling URL previews for a specific private
  150. # website only visible in your network.
  151. #
  152. # url_preview_ip_range_whitelist:
  153. # - '192.168.1.1'
  154. # Optional list of URL matches that the URL preview spider is
  155. # denied from accessing. You should use url_preview_ip_range_blacklist
  156. # in preference to this, otherwise someone could define a public DNS
  157. # entry that points to a private IP address and circumvent the blacklist.
  158. # This is more useful if you know there is an entire shape of URL that
  159. # you know that will never want synapse to try to spider.
  160. #
  161. # Each list entry is a dictionary of url component attributes as returned
  162. # by urlparse.urlsplit as applied to the absolute form of the URL. See
  163. # https://docs.python.org/2/library/urlparse.html#urlparse.urlsplit
  164. # The values of the dictionary are treated as an filename match pattern
  165. # applied to that component of URLs, unless they start with a ^ in which
  166. # case they are treated as a regular expression match. If all the
  167. # specified component matches for a given list item succeed, the URL is
  168. # blacklisted.
  169. #
  170. # url_preview_url_blacklist:
  171. # # blacklist any URL with a username in its URI
  172. # - username: '*'
  173. #
  174. # # blacklist all *.google.com URLs
  175. # - netloc: 'google.com'
  176. # - netloc: '*.google.com'
  177. #
  178. # # blacklist all plain HTTP URLs
  179. # - scheme: 'http'
  180. #
  181. # # blacklist http(s)://www.acme.com/foo
  182. # - netloc: 'www.acme.com'
  183. # path: '/foo'
  184. #
  185. # # blacklist any URL with a literal IPv4 address
  186. # - netloc: '^[0-9]+\.[0-9]+\.[0-9]+\.[0-9]+$'
  187. # The largest allowed URL preview spidering size in bytes
  188. max_spider_size: "10M"
  189. """ % locals()