TestSiteDownload.py 15 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297
  1. import time
  2. import pytest
  3. import mock
  4. import gevent
  5. from Connection import ConnectionServer
  6. from Config import config
  7. from File import FileRequest
  8. from File import FileServer
  9. from Site import Site
  10. import Spy
  11. @pytest.mark.usefixtures("resetTempSettings")
  12. @pytest.mark.usefixtures("resetSettings")
  13. class TestSiteDownload:
  14. def testDownload(self, file_server, site, site_temp):
  15. file_server.ip_incoming = {} # Reset flood protection
  16. assert site.storage.directory == config.data_dir + "/" + site.address
  17. assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
  18. # Init source server
  19. site.connection_server = file_server
  20. file_server.sites[site.address] = site
  21. # Init client server
  22. client = ConnectionServer("127.0.0.1", 1545)
  23. site_temp.connection_server = client
  24. site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
  25. site_temp.addPeer("127.0.0.1", 1544)
  26. with Spy.Spy(FileRequest, "route") as requests:
  27. def boostRequest(inner_path):
  28. # I really want these file
  29. if inner_path == "index.html":
  30. site_temp.needFile("data/img/multiuser.png", priority=15, blocking=False)
  31. site_temp.needFile("data/img/direct_domains.png", priority=15, blocking=False)
  32. site_temp.onFileDone.append(boostRequest)
  33. site_temp.download(blind_includes=True).join(timeout=5)
  34. file_requests = [request[2]["inner_path"] for request in requests if request[0] in ("getFile", "streamFile")]
  35. # Test priority
  36. assert file_requests[0:2] == ["content.json", "index.html"] # Must-have files
  37. assert file_requests[2:4] == ["data/img/multiuser.png", "data/img/direct_domains.png"] # Directly requested files
  38. assert file_requests[4:6] == ["css/all.css", "js/all.js"] # Important assets
  39. assert file_requests[6] == "dbschema.json" # Database map
  40. assert "-default" in file_requests[-1] # Put default files for cloning to the end
  41. # Check files
  42. bad_files = site_temp.storage.verifyFiles(quick_check=True)
  43. # -1 because data/users/1J6... user has invalid cert
  44. assert len(site_temp.content_manager.contents) == len(site.content_manager.contents) - 1
  45. assert not bad_files
  46. assert site_temp.storage.deleteFiles()
  47. [connection.close() for connection in file_server.connections]
  48. def testArchivedDownload(self, file_server, site, site_temp):
  49. file_server.ip_incoming = {} # Reset flood protection
  50. # Init source server
  51. site.connection_server = file_server
  52. file_server.sites[site.address] = site
  53. # Init client server
  54. client = FileServer("127.0.0.1", 1545)
  55. client.sites[site_temp.address] = site_temp
  56. site_temp.connection_server = client
  57. # Download normally
  58. site_temp.addPeer("127.0.0.1", 1544)
  59. site_temp.download(blind_includes=True).join(timeout=5)
  60. bad_files = site_temp.storage.verifyFiles(quick_check=True)
  61. assert not bad_files
  62. assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" in site_temp.content_manager.contents
  63. assert site_temp.storage.isFile("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json")
  64. assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 2
  65. # Add archived data
  66. assert not "archived" in site.content_manager.contents["data/users/content.json"]["user_contents"]
  67. assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", time.time()-1)
  68. site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"] = {"1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q": time.time()}
  69. site.content_manager.sign("data/users/content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
  70. date_archived = site.content_manager.contents["data/users/content.json"]["user_contents"]["archived"]["1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q"]
  71. assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived-1)
  72. assert site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived)
  73. assert not site.content_manager.isArchived("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json", date_archived+1) # Allow user to update archived data later
  74. # Push archived update
  75. assert not "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
  76. site.publish()
  77. site_temp.download(blind_includes=True).join(timeout=5) # Wait for download
  78. # The archived content should disappear from remote client
  79. assert "archived" in site_temp.content_manager.contents["data/users/content.json"]["user_contents"]
  80. assert "data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q/content.json" not in site_temp.content_manager.contents
  81. assert not site_temp.storage.isDir("data/users/1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q")
  82. assert len(list(site_temp.storage.query("SELECT * FROM comment"))) == 1
  83. assert len(list(site_temp.storage.query("SELECT * FROM json WHERE directory LIKE '%1C5sgvWaSgfaTpV5kjBCnCiKtENNMYo69q%'"))) == 0
  84. assert site_temp.storage.deleteFiles()
  85. [connection.close() for connection in file_server.connections]
  86. # Test when connected peer has the optional file
  87. def testOptionalDownload(self, file_server, site, site_temp):
  88. file_server.ip_incoming = {} # Reset flood protection
  89. # Init source server
  90. site.connection_server = file_server
  91. file_server.sites[site.address] = site
  92. # Init client server
  93. client = ConnectionServer("127.0.0.1", 1545)
  94. site_temp.connection_server = client
  95. site_temp.announce = mock.MagicMock(return_value=True) # Don't try to find peers from the net
  96. site_temp.addPeer("127.0.0.1", 1544)
  97. # Download site
  98. site_temp.download(blind_includes=True).join(timeout=5)
  99. # Download optional data/optional.txt
  100. site.storage.verifyFiles(quick_check=True) # Find what optional files we have
  101. optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
  102. assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
  103. assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
  104. assert not site_temp.storage.isFile("data/optional.txt")
  105. assert site.storage.isFile("data/optional.txt")
  106. site_temp.needFile("data/optional.txt")
  107. assert site_temp.storage.isFile("data/optional.txt")
  108. # Optional user file
  109. assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  110. optional_file_info = site_temp.content_manager.getFileInfo(
  111. "data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif"
  112. )
  113. assert site.content_manager.hashfield.hasHash(optional_file_info["sha512"])
  114. assert not site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
  115. site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  116. assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  117. assert site_temp.content_manager.hashfield.hasHash(optional_file_info["sha512"])
  118. assert site_temp.storage.deleteFiles()
  119. [connection.close() for connection in file_server.connections]
  120. # Test when connected peer does not has the file, so ask him if he know someone who has it
  121. def testFindOptional(self, file_server, site, site_temp):
  122. file_server.ip_incoming = {} # Reset flood protection
  123. # Init source server
  124. site.connection_server = file_server
  125. file_server.sites[site.address] = site
  126. # Init full source server (has optional files)
  127. site_full = Site("1TeSTvb4w2PWE81S2rEELgmX2GCCExQGT")
  128. file_server_full = FileServer("127.0.0.1", 1546)
  129. site_full.connection_server = file_server_full
  130. gevent.spawn(lambda: ConnectionServer.start(file_server_full))
  131. time.sleep(0.001) # Port opening
  132. file_server_full.sites[site_full.address] = site_full # Add site
  133. site_full.storage.verifyFiles(quick_check=True) # Check optional files
  134. site_full_peer = site.addPeer("127.0.0.1", 1546) # Add it to source server
  135. hashfield = site_full_peer.updateHashfield() # Update hashfield
  136. assert len(site_full.content_manager.hashfield) == 8
  137. assert hashfield
  138. assert site_full.storage.isFile("data/optional.txt")
  139. assert site_full.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  140. assert len(site_full_peer.hashfield) == 8
  141. # Remove hashes from source server
  142. for hash in list(site.content_manager.hashfield):
  143. site.content_manager.hashfield.remove(hash)
  144. # Init client server
  145. site_temp.connection_server = ConnectionServer("127.0.0.1", 1545)
  146. site_temp.addPeer("127.0.0.1", 1544) # Add source server
  147. # Download normal files
  148. site_temp.log.info("Start Downloading site")
  149. site_temp.download(blind_includes=True).join(timeout=5)
  150. # Download optional data/optional.txt
  151. optional_file_info = site_temp.content_manager.getFileInfo("data/optional.txt")
  152. optional_file_info2 = site_temp.content_manager.getFileInfo("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  153. assert not site_temp.storage.isFile("data/optional.txt")
  154. assert not site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  155. assert not site.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source server don't know he has the file
  156. assert not site.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source server don't know he has the file
  157. assert site_full_peer.hashfield.hasHash(optional_file_info["sha512"]) # Source full peer on source server has the file
  158. assert site_full_peer.hashfield.hasHash(optional_file_info2["sha512"]) # Source full peer on source server has the file
  159. assert site_full.content_manager.hashfield.hasHash(optional_file_info["sha512"]) # Source full server he has the file
  160. assert site_full.content_manager.hashfield.hasHash(optional_file_info2["sha512"]) # Source full server he has the file
  161. site_temp.log.info("Request optional files")
  162. with Spy.Spy(FileRequest, "route") as requests:
  163. # Request 2 file same time
  164. threads = []
  165. threads.append(site_temp.needFile("data/optional.txt", blocking=False))
  166. threads.append(site_temp.needFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif", blocking=False))
  167. gevent.joinall(threads)
  168. assert len([request for request in requests if request[0] == "findHashIds"]) == 1 # findHashids should call only once
  169. assert site_temp.storage.isFile("data/optional.txt")
  170. assert site_temp.storage.isFile("data/users/1CjfbrbwtP8Y2QjPy12vpTATkUT7oSiPQ9/peanut-butter-jelly-time.gif")
  171. assert site_temp.storage.deleteFiles()
  172. file_server_full.stop()
  173. [connection.close() for connection in file_server.connections]
  174. def testUpdate(self, file_server, site, site_temp):
  175. file_server.ip_incoming = {} # Reset flood protection
  176. assert site.storage.directory == config.data_dir + "/" + site.address
  177. assert site_temp.storage.directory == config.data_dir + "-temp/" + site.address
  178. # Init source server
  179. site.connection_server = file_server
  180. file_server.sites[site.address] = site
  181. # Init client server
  182. client = FileServer("127.0.0.1", 1545)
  183. client.sites[site_temp.address] = site_temp
  184. site_temp.connection_server = client
  185. # Don't try to find peers from the net
  186. site.announce = mock.MagicMock(return_value=True)
  187. site_temp.announce = mock.MagicMock(return_value=True)
  188. # Connect peers
  189. site_temp.addPeer("127.0.0.1", 1544)
  190. # Download site from site to site_temp
  191. site_temp.download(blind_includes=True).join(timeout=5)
  192. # Update file
  193. data_original = site.storage.open("data/data.json").read()
  194. data_new = data_original.replace('"ZeroBlog"', '"UpdatedZeroBlog"')
  195. assert data_original != data_new
  196. site.storage.open("data/data.json", "wb").write(data_new)
  197. assert site.storage.open("data/data.json").read() == data_new
  198. assert site_temp.storage.open("data/data.json").read() == data_original
  199. site.log.info("Publish new data.json without patch")
  200. # Publish without patch
  201. with Spy.Spy(FileRequest, "route") as requests:
  202. site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
  203. site.publish()
  204. time.sleep(0.1)
  205. site_temp.download(blind_includes=True).join(timeout=5)
  206. assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 1
  207. assert site_temp.storage.open("data/data.json").read() == data_new
  208. # Close connection to avoid update spam limit
  209. site.peers.values()[0].remove()
  210. site.addPeer("127.0.0.1", 1545)
  211. site_temp.peers.values()[0].ping() # Connect back
  212. time.sleep(0.1)
  213. # Update with patch
  214. data_new = data_original.replace('"ZeroBlog"', '"PatchedZeroBlog"')
  215. assert data_original != data_new
  216. site.storage.open("data/data.json-new", "wb").write(data_new)
  217. assert site.storage.open("data/data.json-new").read() == data_new
  218. assert site_temp.storage.open("data/data.json").read() != data_new
  219. # Generate diff
  220. diffs = site.content_manager.getDiffs("content.json")
  221. assert not site.storage.isFile("data/data.json-new") # New data file removed
  222. assert site.storage.open("data/data.json").read() == data_new # -new postfix removed
  223. assert "data/data.json" in diffs
  224. assert diffs["data/data.json"] == [('=', 2), ('-', 29), ('+', ['\t"title": "PatchedZeroBlog",\n']), ('=', 31102)]
  225. # Publish with patch
  226. site.log.info("Publish new data.json with patch")
  227. with Spy.Spy(FileRequest, "route") as requests:
  228. site.content_manager.sign("content.json", privatekey="5KUh3PvNm5HUWoCfSUfcYvfQ2g3PrRNJWr6Q9eqdBGu23mtMntv")
  229. site.publish(diffs=diffs)
  230. site_temp.download(blind_includes=True).join(timeout=5)
  231. assert len([request for request in requests if request[0] in ("getFile", "streamFile")]) == 0
  232. assert site_temp.storage.open("data/data.json").read() == data_new
  233. assert site_temp.storage.deleteFiles()
  234. [connection.close() for connection in file_server.connections]