Browse Source

version 0.2.8, Namecoin domains using internal resolver site, --disable_zeromq option to skip backward compatiblity layer and save some memory, connectionserver firstchar error fixes, missing unpacker crash fix, sitemanager class to allow extensions, add loaded plugin list to websocket api, faster content publishing, mark updating file as bad, remove coppersurfer tracker add eddie4, internal server error with error displaying, allow site domains in UiRequest, better progress bar, wait for siteinfo before before using localstorage, csslater hide only if opacity is 0

HelloZeroNet 9 years ago
parent
commit
b122f47100

+ 72 - 0
plugins/Zeroname/SiteManagerPlugin.py

@@ -0,0 +1,72 @@
+import logging, json, os, re, sys, time
+import gevent
+from Plugin import PluginManager
+from Config import config
+from Debug import Debug
+
+allow_reload = False # No reload supported
+
+log = logging.getLogger("ZeronamePlugin")
+
+
+@PluginManager.registerTo("SiteManager")
+class SiteManagerPlugin(object):
+	zeroname_address = "1Name2NXVi1RDPDgf5617UoW7xA6YrhM9F"
+	site_zeroname = None
+
+	# Checks if its a valid address
+	def isAddress(self, address):
+		if self.isDomain(address): 
+			return True
+		else:
+			return super(SiteManagerPlugin, self).isAddress(address)
+
+
+	# Return: True if the address is domain
+	def isDomain(self, address):
+		return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
+
+
+	# Resolve domain
+	# Return: The address or None
+	def resolveDomain(self, domain):
+		domain = domain.lower()
+		if not self.site_zeroname:
+			self.site_zeroname = self.need(self.zeroname_address)
+		self.site_zeroname.needFile("data/names.json", priority=10)
+		db = self.site_zeroname.storage.loadJson("data/names.json")
+		return db.get(domain)
+
+
+	# Return or create site and start download site files
+	# Return: Site or None if dns resolve failed
+	def need(self, address, all_file=True):
+		if self.isDomain(address): # Its looks like a domain
+			address_resolved = self.resolveDomain(address)
+			if address_resolved:
+				address = address_resolved
+			else:
+				return None
+		
+		return super(SiteManagerPlugin, self).need(address, all_file)
+
+
+	# Return: Site object or None if not found
+	def get(self, address):
+		if self.sites == None: # Not loaded yet
+			self.load()
+		if self.isDomain(address): # Its looks like a domain
+			address_resolved = self.resolveDomain(address)
+			if address_resolved: # Domain found
+				site = self.sites.get(address_resolved)
+				if site:
+					site_domain = site.settings.get("domain")
+					if site_domain != address:
+						site.settings["domain"] = address
+			else: # Domain not found
+				site = self.sites.get(address)
+
+		else: # Access by site address
+			site = self.sites.get(address)
+		return site
+

+ 34 - 0
plugins/Zeroname/UiRequestPlugin.py

@@ -0,0 +1,34 @@
+import re
+from Plugin import PluginManager
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+	def __init__(self, server = None):
+		from Site import SiteManager
+		self.site_manager = SiteManager.site_manager
+		super(UiRequestPlugin, self).__init__(server)
+
+
+	# Media request
+	def actionSiteMedia(self, path):
+		match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
+		if match: # Its a valid domain, resolve first
+			domain = match.group("address")
+			address = self.site_manager.resolveDomain(domain)
+			if address:
+				path = "/media/"+address+match.group("inner_path")
+		return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
+
+
+	# Is mediarequest allowed from that referer
+	def isMediaRequestAllowed(self, site_address, referer):
+		referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
+		referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
+
+		if referer_site_address == site_address: # Referer site address as simple address
+			return True
+		elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
+			return True
+		else: # Invalid referer
+			return False
+

+ 2 - 0
plugins/Zeroname/__init__.py

@@ -0,0 +1,2 @@
+import UiRequestPlugin
+import SiteManagerPlugin

+ 115 - 0
plugins/Zeroname/updater/zeroname_updater.py

@@ -0,0 +1,115 @@
+from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
+import time, json, os, sys, re
+
+
+def publish():
+	print "* Signing..."
+	os.system("python zeronet.py siteSign %s %s" % (config["site"], config["privatekey"]))
+	print "* Publishing..."
+	os.system("python zeronet.py sitePublish %s" % config["site"])
+
+
+
+def processNameOp(domain, value):
+	if not value.startswith("{"): return False
+	try:
+		data = json.loads(value)
+	except Exception, err:
+		print "Json load error: %s" % err
+		return False
+	if "zeronet" not in data: 
+		print "No zeronet in ", data.keys()
+		return False
+
+	if "slave" in sys.argv: 
+		print "Waiting for master update arrive"
+		time.sleep(30) # Wait 30 sec to allow master updater
+
+	names_raw = open(names_path, "rb").read()
+	names = json.loads(names_raw)
+	for subdomain, address in data["zeronet"].items():
+		print subdomain, domain, "->", address
+		if subdomain:
+			names["%s.%s.bit" % (subdomain, domain)] = address
+		else:
+			names["%s.bit" % domain] = address
+
+	new_names_raw = json.dumps(names, indent=2)
+	if new_names_raw != names_raw:
+		open(names_path, "wb").write(new_names_raw)
+		return True
+	else:
+		print "names not changed"
+		return False
+
+
+
+
+def processBlock(block_id):
+	print "Processing block #%s..." % block_id
+	block_hash = rpc.getblockhash(block_id)
+	block = rpc.getblock(block_hash)
+
+	print "Checking %s tx" % len(block["tx"])
+	updated = 0
+	for tx in block["tx"]:
+		transaction = rpc.getrawtransaction(tx, 1)
+		for vout in transaction.get("vout",[]):
+			if "scriptPubKey" in vout and "nameOp" in vout["scriptPubKey"] and "name" in vout["scriptPubKey"]["nameOp"]:
+				name_op = vout["scriptPubKey"]["nameOp"]
+				updated += processNameOp(name_op["name"].replace("d/", ""), name_op["value"])
+	print "Done (updated %s)." % updated
+	if updated:
+		publish()
+
+
+# Loading config...
+config_path = os.path.expanduser("~/.namecoin/zeroname_config.json")
+if not os.path.isfile(config_path): # Create sample config
+	open(config_path, "w").write(
+		json.dumps({'site': 'site', 'zeronet_path': '/home/zeronet/', 'privatekey': '', 'lastprocessed': None}, indent=2)
+	)
+	print "Example config written to %s" % config_path
+	sys.exit(0)
+
+config = json.load(open(config_path))
+names_path = "%s/data/%s/data/names.json" % (config["zeronet_path"], config["site"])
+os.chdir(config["zeronet_path"]) # Change working dir
+
+# Getting rpc connect details
+namecoin_conf = open(os.path.expanduser("~/.namecoin/namecoin.conf")).read()
+
+# Connecting to RPC
+rpc_user = re.search("rpcuser=(.*)$", namecoin_conf, re.M).group(1)
+rpc_pass = re.search("rpcpassword=(.*)$", namecoin_conf, re.M).group(1)
+rpc_url = "http://%s:%s@127.0.0.1:8336" % (rpc_user, rpc_pass)
+rpc = AuthServiceProxy(rpc_url, timeout=60*5)
+
+last_block = int(rpc.getinfo()["blocks"])
+
+if not config["lastprocessed"]: # Start processing from last block
+	config["lastprocessed"] = last_block
+
+# Processing skipped blocks
+print "Processing block from #%s to #%s..." % (config["lastprocessed"], last_block)
+for block_id in range(config["lastprocessed"], last_block+1):
+	processBlock(block_id)
+
+#processBlock(223911) # Testing
+
+while 1:
+	print "Waiting for new block..."
+	while 1:
+		try:
+			rpc = AuthServiceProxy(rpc_url, timeout=60*5)
+			if (int(rpc.getinfo()["blocks"]) > last_block): break
+			time.sleep(1)
+			rpc.waitforblock()
+			break # Block found
+		except Exception, err: # Timeout
+			pass
+	last_block = int(rpc.getinfo()["blocks"])
+	processBlock(last_block)
+
+	config["lastprocessed"] = last_block
+	open(config_path, "w").write(json.dumps(config, indent=2))

+ 153 - 0
plugins/disabled-Dnschain/SiteManagerPlugin.py

@@ -0,0 +1,153 @@
+import logging, json, os, re, sys, time
+import gevent
+from Plugin import PluginManager
+from Config import config
+from util import Http
+from Debug import Debug
+
+allow_reload = False # No reload supported
+
+log = logging.getLogger("DnschainPlugin")
+
+@PluginManager.registerTo("SiteManager")
+class SiteManagerPlugin(object):
+	dns_cache_path = "data/dns_cache.json"
+	dns_cache = None
+
+	# Checks if its a valid address
+	def isAddress(self, address):
+		if self.isDomain(address): 
+			return True
+		else:
+			return super(SiteManagerPlugin, self).isAddress(address)
+
+
+	# Return: True if the address is domain
+	def isDomain(self, address):
+		return re.match("(.*?)([A-Za-z0-9_-]+\.[A-Za-z0-9]+)$", address)
+
+
+	# Load dns entries from data/dns_cache.json
+	def loadDnsCache(self):
+		if os.path.isfile(self.dns_cache_path):
+			self.dns_cache = json.load(open(self.dns_cache_path))
+		else:
+			self.dns_cache = {}
+		log.debug("Loaded dns cache, entries: %s" % len(self.dns_cache))
+
+
+	# Save dns entries to data/dns_cache.json
+	def saveDnsCache(self):
+		json.dump(self.dns_cache, open(self.dns_cache_path, "wb"), indent=2)
+
+
+	# Resolve domain using dnschain.net
+	# Return: The address or None
+	def resolveDomainDnschainNet(self, domain):
+		try:
+			match = self.isDomain(domain)
+			sub_domain = match.group(1).strip(".")
+			top_domain = match.group(2)
+			if not sub_domain: sub_domain = "@"
+			address = None
+			with gevent.Timeout(5, Exception("Timeout: 5s")):
+				res = Http.get("https://api.dnschain.net/v1/namecoin/key/%s" % top_domain).read()
+				data = json.loads(res)["data"]["value"]
+				if "zeronet" in data:
+					for key, val in data["zeronet"].iteritems():
+						self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
+					self.saveDnsCache()
+					return data["zeronet"].get(sub_domain)
+			# Not found
+			return address
+		except Exception, err:
+			log.debug("Dnschain.net %s resolve error: %s" % (domain, Debug.formatException(err)))
+
+
+	# Resolve domain using dnschain.info
+	# Return: The address or None
+	def resolveDomainDnschainInfo(self, domain):
+		try:
+			match = self.isDomain(domain)
+			sub_domain = match.group(1).strip(".")
+			top_domain = match.group(2)
+			if not sub_domain: sub_domain = "@"
+			address = None
+			with gevent.Timeout(5, Exception("Timeout: 5s")):
+				res = Http.get("https://dnschain.info/bit/d/%s" % re.sub("\.bit$", "", top_domain)).read()
+				data = json.loads(res)["value"]
+				for key, val in data["zeronet"].iteritems():
+					self.dns_cache[key+"."+top_domain] = [val, time.time()+60*60*5] # Cache for 5 hours
+				self.saveDnsCache()
+				return data["zeronet"].get(sub_domain)
+			# Not found
+			return address
+		except Exception, err:
+			log.debug("Dnschain.info %s resolve error: %s" % (domain, Debug.formatException(err)))
+
+
+	# Resolve domain
+	# Return: The address or None
+	def resolveDomain(self, domain):
+		domain = domain.lower()
+		if self.dns_cache == None:
+			self.loadDnsCache()
+		if domain.count(".") < 2: # Its a topleved request, prepend @. to it
+			domain = "@."+domain
+
+		domain_details = self.dns_cache.get(domain)
+		if domain_details and time.time() < domain_details[1]: # Found in cache and its not expired
+			return domain_details[0]
+		else:
+			# Resovle dns using dnschain
+			thread_dnschain_info = gevent.spawn(self.resolveDomainDnschainInfo, domain)
+			thread_dnschain_net = gevent.spawn(self.resolveDomainDnschainNet, domain)
+			gevent.joinall([thread_dnschain_net, thread_dnschain_info]) # Wait for finish
+
+			if thread_dnschain_info.value and thread_dnschain_net.value: # Booth successfull
+				if thread_dnschain_info.value == thread_dnschain_net.value: # Same returned value
+					return thread_dnschain_info.value 
+				else:
+					log.error("Dns %s missmatch: %s != %s" % (domain, thread_dnschain_info.value, thread_dnschain_net.value))
+
+			# Problem during resolve
+			if domain_details: # Resolve failed, but we have it in the cache
+				domain_details[1] = time.time()+60*60 # Dont try again for 1 hour
+				return domain_details[0]
+			else: # Not found in cache
+				self.dns_cache[domain] = [None, time.time()+60] # Don't check again for 1 min
+				return None
+
+
+	# Return or create site and start download site files
+	# Return: Site or None if dns resolve failed
+	def need(self, address, all_file=True):
+		if self.isDomain(address): # Its looks like a domain
+			address_resolved = self.resolveDomain(address)
+			if address_resolved:
+				address = address_resolved
+			else:
+				return None
+		
+		return super(SiteManagerPlugin, self).need(address, all_file)
+
+
+	# Return: Site object or None if not found
+	def get(self, address):
+		if self.sites == None: # Not loaded yet
+			self.load()
+		if self.isDomain(address): # Its looks like a domain
+			address_resolved = self.resolveDomain(address)
+			if address_resolved: # Domain found
+				site = self.sites.get(address_resolved)
+				if site:
+					site_domain = site.settings.get("domain")
+					if site_domain != address:
+						site.settings["domain"] = address
+			else: # Domain not found
+				site = self.sites.get(address)
+
+		else: # Access by site address
+			site = self.sites.get(address)
+		return site
+

+ 34 - 0
plugins/disabled-Dnschain/UiRequestPlugin.py

@@ -0,0 +1,34 @@
+import re
+from Plugin import PluginManager
+
+@PluginManager.registerTo("UiRequest")
+class UiRequestPlugin(object):
+	def __init__(self, server = None):
+		from Site import SiteManager
+		self.site_manager = SiteManager.site_manager
+		super(UiRequestPlugin, self).__init__(server)
+
+
+	# Media request
+	def actionSiteMedia(self, path):
+		match = re.match("/media/(?P<address>[A-Za-z0-9]+\.[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", path)
+		if match: # Its a valid domain, resolve first
+			domain = match.group("address")
+			address = self.site_manager.resolveDomain(domain)
+			if address:
+				path = "/media/"+address+match.group("inner_path")
+		return super(UiRequestPlugin, self).actionSiteMedia(path) # Get the wrapper frame output
+
+
+	# Is mediarequest allowed from that referer
+	def isMediaRequestAllowed(self, site_address, referer):
+		referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
+		referer_site_address = re.match("/(?P<address>[A-Za-z0-9\.]+)(?P<inner_path>/.*|$)", referer_path).group("address")
+
+		if referer_site_address == site_address: # Referer site address as simple address
+			return True
+		elif self.site_manager.resolveDomain(referer_site_address) == site_address: # Referer site address as dns
+			return True
+		else: # Invalid referer
+			return False
+

+ 2 - 0
plugins/disabled-Dnschain/__init__.py

@@ -0,0 +1,2 @@
+import DnschainPlugin
+import SiteManagerPlugin

+ 2 - 1
src/Config.py

@@ -3,7 +3,7 @@ import ConfigParser
 
 class Config(object):
 	def __init__(self):
-		self.version = "0.2.7"
+		self.version = "0.2.8"
 		self.parser = self.createArguments()
 		argv = sys.argv[:] # Copy command line arguments
 		argv = self.parseConfig(argv) # Add arguments from config file
@@ -86,6 +86,7 @@ class Config(object):
 
 		parser.add_argument('--fileserver_ip', 	help='FileServer bind address', default="*", metavar='ip')
 		parser.add_argument('--fileserver_port',help='FileServer bind port', default=15441, type=int, metavar='port')
+		parser.add_argument('--disable_zeromq', help='Disable compatibility with old clients', action='store_true')
 
 		parser.add_argument('--ip_external',	help='External ip (tested on start if None)', metavar='ip')
 

+ 12 - 6
src/Connection/Connection.py

@@ -3,10 +3,14 @@ from cStringIO import StringIO
 import gevent, msgpack
 from Config import config
 from Debug import Debug
-try:
-	import zmq.green as zmq
-except:
-	zmq = None
+zmq = None
+if not config.disable_zeromq:
+	try:
+		import zmq.green as zmq
+	except:
+		zmq = None
+
+
 
 class Connection:
 	def __init__(self, server, ip, port, sock=None):
@@ -75,7 +79,8 @@ class Connection:
 		try:
 			firstchar = sock.recv(1) # Find out if pure socket or zeromq
 		except Exception, err:
-			self.log.debug("Socket firstchar error: %s" % Debug.formatException(err))
+			if self.log:
+				self.log.debug("Socket firstchar error: %s" % Debug.formatException(err))
 			self.close()
 			return False
 		if firstchar == "\xff": # Backward compatiblity: forward data to zmq
@@ -106,7 +111,7 @@ class Connection:
 		try:
 			if not firstchar: firstchar = sock.recv(1)
 		except Exception, err:
-			self.log.debug("Socket firstchar error: %s" % Debug.formatException(err))
+			if self.log: self.log.debug("Socket firstchar error: %s" % Debug.formatException(err))
 			self.close()
 			return False
 		if firstchar == "\xff": # Backward compatibility to zmq
@@ -294,3 +299,4 @@ class Connection:
 		del self.log
 		del self.unpacker
 		del self.sock
+		self.unpacker = None

+ 4 - 1
src/Connection/ConnectionServer.py

@@ -26,7 +26,7 @@ class ConnectionServer:
 		self.zmq_last_connection = None # Last incoming message client
 
 		self.peer_id = "-ZN0"+config.version.replace(".", "")+"-"+''.join(random.choice(string.ascii_uppercase + string.ascii_lowercase + string.digits) for _ in range(12)) # Bittorrent style peerid
-		
+
 		if port: # Listen server on a port
 			self.zmq_port = port-1
 			self.pool = Pool(1000) # do not accept more than 1000 connections
@@ -144,6 +144,9 @@ class ConnectionServer:
 
 
 	def zmqServer(self):
+		if config.disable_zeromq:
+			self.log.debug("ZeroMQ disabled by config")
+			return False
 		self.log.debug("Starting ZeroMQ on: tcp://127.0.0.1:%s..." % self.zmq_port)
 		try:
 			import zmq.green as zmq

+ 1 - 2
src/File/FileServer.py

@@ -1,6 +1,5 @@
 import os, logging, urllib2, re, time
 import gevent, msgpack
-import zmq.green as zmq
 from Config import config
 from FileRequest import FileRequest
 from Site import SiteManager
@@ -17,7 +16,7 @@ class FileServer(ConnectionServer):
 			SiteManager.peer_blacklist.append((config.ip_external, self.port)) # Add myself to peer blacklist
 		else:
 			self.port_opened = None # Is file server opened on router
-		self.sites = SiteManager.list()
+		self.sites = SiteManager.site_manager.list()
 
 
 	# Handle request to fileserver

+ 0 - 1
src/Peer/Peer.py

@@ -1,5 +1,4 @@
 import os, logging, gevent, time, msgpack, sys
-import zmq.green as zmq
 from cStringIO import StringIO
 from Config import config
 from Debug import Debug

+ 2 - 0
src/Plugin/PluginManager.py

@@ -7,6 +7,7 @@ class PluginManager:
 		self.log = logging.getLogger("PluginManager")
 		self.plugin_path = "plugins" # Plugin directory
 		self.plugins = {} # Registered plugins (key: class name, value: list of plugins for class)
+		self.plugin_names = [] # Loaded plugin names
 
 		sys.path.append(self.plugin_path)
 
@@ -30,6 +31,7 @@ class PluginManager:
 				__import__(dir_name)
 			except Exception, err:
 				self.log.error("Plugin %s load error: %s" % (dir_name, Debug.formatException(err)))
+			if dir_name not in self.plugin_names: self.plugin_names.append(dir_name)
 
 
 	# Reload all plugins

+ 12 - 6
src/Site/Site.py

@@ -181,10 +181,12 @@ class Site:
 
 
 	# Publish worker
-	def publisher(self, inner_path, peers, published, limit):
+	def publisher(self, inner_path, peers, published, limit, event_done):
 		timeout = 5+int(self.storage.getSize(inner_path)/1024) # Timeout: 5sec + size in kb
 		while 1:
-			if not peers or len(published) >= limit: break # All peers done, or published engouht
+			if not peers or len(published) >= limit:
+				event_done.set(True)
+				break # All peers done, or published engouht
 			peer = peers.pop(0)
 			result = {"exception": "Timeout"}
 
@@ -216,11 +218,14 @@ class Site:
 		peers = self.peers.values()
 
 		random.shuffle(peers)
-		for i in range(limit):
-			publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit)
+		event_done = gevent.event.AsyncResult()
+		for i in range(min(1+len(self.peers), limit)/2):
+			publisher = gevent.spawn(self.publisher, inner_path, peers, published, limit, event_done)
 			publishers.append(publisher)
 
-		gevent.joinall(publishers) # Wait for all publishers
+		event_done.get() # Wait for done
+		if len(published) < min(len(self.peers), limit): time.sleep(0.2) # If less than we need sleep a bit
+		if len(published) == 0: gevent.join(publishers) # No successful publish, wait for all publisher
 
 		self.log.info("Successfuly published to %s peers" % len(published))
 		return len(published)
@@ -233,6 +238,7 @@ class Site:
 		elif self.settings["serving"] == False: # Site not serving
 			return False
 		else: # Wait until file downloaded
+			self.bad_files[inner_path] = True # Mark as bad file
 			if not self.content_manager.contents.get("content.json"): # No content.json, download it first!
 				self.log.debug("Need content.json first")
 				self.announce()
@@ -348,7 +354,7 @@ class Site:
 	def fileDone(self, inner_path):
 		# File downloaded, remove it from bad files
 		if inner_path in self.bad_files:
-			self.log.info("Bad file solved: %s" % inner_path)
+			self.log.debug("Bad file solved: %s" % inner_path)
 			del(self.bad_files[inner_path])
 
 		# Update content.json last downlad time

+ 78 - 63
src/Site/SiteManager.py

@@ -1,75 +1,90 @@
 import json, logging, time, re, os
 import gevent
+from Plugin import PluginManager
 
 TRACKERS = [
 	("udp", "open.demonii.com", 1337),
 	("udp", "sugoi.pomf.se", 2710),
-	("udp", "tracker.coppersurfer.tk", 80),
+	#("udp", "tracker.coppersurfer.tk", 80),
 	("udp", "tracker.leechers-paradise.org", 6969),
 	("udp", "9.rarbg.com", 2710),
-	#("udp", "www.eddie4.nl", 6969), Backup trackers
-	#("udp", "trackr.sytes.net", 80),
+	("udp", "www.eddie4.nl", 6969), 
+	#("udp", "trackr.sytes.net", 80), #Backup trackers
 	#("udp", "tracker4.piratux.com", 6969)
 ]
 
-# Load all sites from data/sites.json
-def load():
-	from Site import Site
-	global sites
-	if not sites: sites = {}
-	address_found = []
-	added = 0
-	# Load new adresses
-	for address in json.load(open("data/sites.json")):
-		if address not in sites and os.path.isfile("data/%s/content.json" % address):
-			sites[address] = Site(address)
-			added += 1
-		address_found.append(address)
-
-	# Remove deleted adresses
-	for address in sites.keys():
-		if address not in address_found: 
-			del(sites[address])
-			logging.debug("Removed site: %s" % address)
-
-	if added: logging.debug("SiteManager added %s sites" % added)
-
-
-# Checks if its a valid address
-def isAddress(address):
-	return re.match("^[A-Za-z0-9]{26,35}$", address)
-
-
-# Return site and start download site files
-def need(address, all_file=True):
-	from Site import Site
-	new = False
-	if address not in sites: # Site not exits yet
-		if not isAddress(address): return False # Not address: %s % address
-		logging.debug("Added new site: %s" % address)
-		sites[address] = Site(address)
-		if not sites[address].settings["serving"]: # Maybe it was deleted before
-			sites[address].settings["serving"] = True
-			sites[address].saveSettings()
-		new = True
-			
-	site = sites[address]
-	if all_file: site.download()
-	return site
-
-
-def delete(address):
-	global sites
-	logging.debug("SiteManager deleted site: %s" % address)
-	del(sites[address])
-
-
-# Lazy load sites
-def list():
-	if sites == None: # Not loaded yet
-		load()
-	return sites
-
-
-sites = None
+
+@PluginManager.acceptPlugins
+class SiteManager(object):
+	def __init__(self):
+		self.sites = None
+
+	# Load all sites from data/sites.json
+	def load(self):
+		from Site import Site
+		if not self.sites: self.sites = {}
+		address_found = []
+		added = 0
+		# Load new adresses
+		for address in json.load(open("data/sites.json")):
+			if address not in self.sites and os.path.isfile("data/%s/content.json" % address):
+				self.sites[address] = Site(address)
+				added += 1
+			address_found.append(address)
+
+		# Remove deleted adresses
+		for address in self.sites.keys():
+			if address not in address_found: 
+				del(self.sites[address])
+				logging.debug("Removed site: %s" % address)
+
+		if added: logging.debug("SiteManager added %s sites" % added)
+
+
+	# Checks if its a valid address
+	def isAddress(self, address):
+		return re.match("^[A-Za-z0-9]{26,35}$", address)
+
+
+	# Return: Site object or None if not found
+	def get(self, address):
+		if self.sites == None: # Not loaded yet
+			self.load()
+		return self.sites.get(address)
+
+
+	# Return or create site and start download site files
+	def need(self, address, all_file=True):
+		from Site import Site
+		new = False
+		site = self.get(address)
+		if not site: # Site not exits yet
+			if not self.isAddress(address): return False # Not address: %s % address
+			logging.debug("Added new site: %s" % address)
+			site = Site(address)
+			self.sites[address] = site
+			if not site.settings["serving"]: # Maybe it was deleted before
+				site.settings["serving"] = True
+				site.saveSettings()
+			new = True
+
+		if all_file: site.download()
+		return site
+
+
+	def delete(self, address):
+		logging.debug("SiteManager deleted site: %s" % address)
+		del(self.sites[address])
+
+
+	# Lazy load sites
+	def list(self):
+		if self.sites == None: # Not loaded yet
+			self.load()
+		return self.sites
+
+
+
+site_manager = SiteManager() # Singletone
+
 peer_blacklist = [] # Dont download from this peers

+ 39 - 20
src/Ui/UiRequest.py

@@ -10,6 +10,7 @@ status_texts = {
 	400: "400 Bad Request",
 	403: "403 Forbidden",
 	404: "404 Not Found",
+	500: "500 Internal Server Error",
 }
 
 
@@ -125,27 +126,32 @@ class UiRequest(object):
 
 
 	# Render a file from media with iframe site wrapper
-	def actionWrapper(self, path, extra_headers=[]):
-		if "." in path and not path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame
+	def actionWrapper(self, path, extra_headers=None):
+		if not extra_headers: extra_headers = []
 		if self.get.get("wrapper") == "False": return self.actionSiteMedia("/media"+path) # Only serve html files with frame
-		if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403() # No ajax allowed on wrapper
 
-		match = re.match("/(?P<site>[A-Za-z0-9]+)(?P<inner_path>/.*|$)", path)
+		match = re.match("/(?P<address>[A-Za-z0-9\._-]+)(?P<inner_path>/.*|$)", path)
 		if match:
+			address = match.group("address")
 			inner_path = match.group("inner_path").lstrip("/")
+			if "." in inner_path and not inner_path.endswith(".html"): return self.actionSiteMedia("/media"+path) # Only serve html files with frame
+			if self.env.get("HTTP_X_REQUESTED_WITH"): return self.error403("Ajax request not allowed to load wrapper") # No ajax allowed on wrapper
+
 			if not inner_path: inner_path = "index.html" # If inner path defaults to index.html
 
-			site = self.server.sites.get(match.group("site"))
+			site = SiteManager.site_manager.get(address)
+
 			if site and site.content_manager.contents.get("content.json") and (not site.getReachableBadFiles() or site.settings["own"]): # Its downloaded or own
 				title = site.content_manager.contents["content.json"]["title"]
 			else:
-				title = "Loading %s..." % match.group("site")
-				site = SiteManager.need(match.group("site")) # Start download site
+				title = "Loading %s..." % address
+				site = SiteManager.site_manager.need(address) # Start download site
+					
 				if not site: return False
 
 			extra_headers.append(("X-Frame-Options", "DENY"))
 
-			self.sendHeader(extra_headers=extra_headers)
+			self.sendHeader(extra_headers=extra_headers[:])
 
 			# Wrapper variable inits
 			query_string = ""
@@ -162,7 +168,7 @@ class UiRequest(object):
 
 			return self.render("src/Ui/template/wrapper.html", 
 				inner_path=inner_path, 
-				address=match.group("site"), 
+				address=address, 
 				title=title, 
 				body_style=body_style,
 				meta_tags=meta_tags,
@@ -177,33 +183,39 @@ class UiRequest(object):
 			return False
 
 
+	# Returns if media request allowed from that referer
+	def isMediaRequestAllowed(self, site_address, referer):
+		referer_path = re.sub("http[s]{0,1}://.*?/", "/", referer).replace("/media", "") # Remove site address
+		return referer_path.startswith("/"+site_address)
+
+
 	# Serve a media for site
 	def actionSiteMedia(self, path):
 		path = path.replace("/index.html/", "/") # Base Backward compatibility fix
 		
-		match = re.match("/media/(?P<site>[A-Za-z0-9]+)/(?P<inner_path>.*)", path)
+		match = re.match("/media/(?P<address>[A-Za-z0-9\._-]+)/(?P<inner_path>.*)", path)
 
 		referer = self.env.get("HTTP_REFERER")
-		if referer: # Only allow same site to receive media
-			referer = re.sub("http://.*?/", "/", referer) # Remove server address
-			referer = referer.replace("/media", "") # Media
-			if not referer.startswith("/"+match.group("site")): return self.error403() # Referer not starts same address as requested path
+		if referer and match: # Only allow same site to receive media
+			if not self.isMediaRequestAllowed(match.group("address"), referer):
+				return self.error403("Media referer error") # Referer not starts same address as requested path				
 
 		if match: # Looks like a valid path
-			file_path = "data/%s/%s" % (match.group("site"), match.group("inner_path"))
-			allowed_dir = os.path.abspath("data/%s" % match.group("site")) # Only files within data/sitehash allowed
+			address = match.group("address")
+			file_path = "data/%s/%s" % (address, match.group("inner_path"))
+			allowed_dir = os.path.abspath("data/%s" % address) # Only files within data/sitehash allowed
 			if ".." in file_path or not os.path.dirname(os.path.abspath(file_path)).startswith(allowed_dir): # File not in allowed path
 				return self.error403()
 			else:
 				if config.debug and file_path.split("/")[-1].startswith("all."): # When debugging merge *.css to all.css and *.js to all.js
-					site = self.server.sites.get(match.group("site"))
+					site = self.server.sites.get(address)
 					if site.settings["own"]:
 						from Debug import DebugMedia
 						DebugMedia.merge(file_path)
 				if os.path.isfile(file_path): # File exits
 					return self.actionFile(file_path)
 				else: # File not exits, try to download
-					site = SiteManager.need(match.group("site"), all_file=False)
+					site = SiteManager.site_manager.need(address, all_file=False)
 					self.sendHeader(content_type=self.getContentType(file_path)) # ?? Get Exception without this
 					result = site.needFile(match.group("inner_path"), priority=1) # Wait until file downloads
 					return self.actionFile(file_path)
@@ -323,9 +335,9 @@ class UiRequest(object):
 
 
 	# You are not allowed to access this
-	def error403(self):
+	def error403(self, message="Forbidden"):
 		self.sendHeader(403)
-		return "Forbidden"
+		return message
 
 
 	# Send file not found error
@@ -333,6 +345,13 @@ class UiRequest(object):
 		self.sendHeader(404)
 		return "Not Found: %s" % path
 
+
+	# Internal server error
+	def error500(self, message = ":("):
+		self.sendHeader(500)
+		return "<h1>Server error</h1>%s" % cgi.escape(message)
+
+
 	# - Reload for eaiser developing -
 	def reload(self):
 		import imp, sys

+ 10 - 5
src/Ui/UiServer.py

@@ -1,4 +1,3 @@
-from gevent import monkey; monkey.patch_all(thread = False)
 import logging, time, cgi, string, random
 from gevent.pywsgi import WSGIServer
 from gevent.pywsgi import WSGIHandler
@@ -29,7 +28,7 @@ class UiWSGIHandler(WSGIHandler):
 			try:
 				return super(UiWSGIHandler, self).run_application()
 			except Exception, err:
-				logging.debug("UiWSGIHandler error: %s" % Debug.formatException(err))
+				logging.error("UiWSGIHandler error: %s" % Debug.formatException(err))
 				if config.debug: # Allow websocket errors to appear on /Debug 
 					import sys
 					sys.modules["main"].DebugHook.handleError() 
@@ -43,7 +42,7 @@ class UiServer:
 		if self.ip == "*": self.ip = "" # Bind all
 		#self.sidebar_websockets = [] # Sidebar websocket connections
 		#self.auth_key = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(12)) # Global admin auth key
-		self.sites = SiteManager.list()
+		self.sites = SiteManager.site_manager.list()
 		self.log = logging.getLogger(__name__)
 		
 		self.ui_request = UiRequest(self)
@@ -58,8 +57,14 @@ class UiServer:
 			self.ui_request.get = dict(cgi.parse_qsl(env['QUERY_STRING']))
 		else:
 			self.ui_request.get = {}
-		return self.ui_request.route(path)
-
+		if config.debug: # Let the exception catched by werkezung
+			return self.ui_request.route(path)
+		else: # Catch and display the error
+			try:
+				return self.ui_request.route(path)
+			except Exception, err:
+				logging.debug("UiRequest error: %s" % Debug.formatException(err))
+				return self.ui_request.error500("Err: %s" % Debug.formatException(err))
 
 	# Reload the UiRequest class to prevent restarts in debug mode
 	def reload(self):

+ 5 - 4
src/Ui/UiWebsocket.py

@@ -220,7 +220,8 @@ class UiWebsocket(object):
 			"ui_ip": config.ui_ip,
 			"ui_port": config.ui_port,
 			"version": config.version,
-			"debug": config.debug
+			"debug": config.debug,
+			"plugins": PluginManager.plugin_manager.plugin_names
 		}
 
 
@@ -327,10 +328,10 @@ class UiWebsocket(object):
 	# List all site info
 	def actionSiteList(self, to):
 		ret = []
-		SiteManager.load() # Reload sites
+		SiteManager.site_manager.load() # Reload sites
 		for site in self.server.sites.values():
 			if not site.content_manager.contents.get("content.json"): continue # Broken site
-			ret.append(self.formatSiteInfo(site, create_user=False))
+			ret.append(self.formatSiteInfo(site, create_user=False)) # Dont generate the auth_address on listing
 		self.response(to, ret)
 
 
@@ -386,7 +387,7 @@ class UiWebsocket(object):
 			site.worker_manager.running = False
 			site.worker_manager.stopWorkers()
 			site.storage.deleteFiles()
-			SiteManager.delete(address)
+			SiteManager.site_manager.delete(address)
 			site.updateWebsocket()
 		else:
 			self.response(to, {"error": "Unknown site: %s" % address})

+ 2 - 1
src/Ui/media/Loading.coffee

@@ -7,7 +7,8 @@ class Loading
 		$(".progressbar").css("width", percent*100+"%").css("opacity", "1").css("display", "block")
 
 	hideProgress: ->
-		$(".progressbar").css("width", "100%").css("opacity", "0").cssLater("display", "none", 1000)
+		console.log "hideProgress"
+		$(".progressbar").css("width", "100%").css("opacity", "0").hideLater(1000)
 
 
 	showScreen: ->

+ 11 - 5
src/Ui/media/Wrapper.coffee

@@ -17,10 +17,12 @@ class Wrapper
 		@ws_error = null # Ws error message
 
 		@site_info = null # Hold latest site info
+		@event_site_info =  $.Deferred() # Event when site_info received
 		@inner_loaded = false # If iframe loaded or not
 		@inner_ready = false # Inner frame ready to receive messages
 		@wrapperWsInited = false # Wrapper notified on websocket open
 		@site_error = null # Latest failed file download
+		@address = null
 
 		window.onload = @onLoad # On iframe loaded
 		$(window).on "hashchange", => # On hash change
@@ -47,7 +49,7 @@ class Wrapper
 				@ws.response message.id, res
 		else if cmd == "setSiteInfo"
 			@sendInner message # Pass to inner frame
-			if message.params.address == window.address # Current page
+			if message.params.address == @address # Current page
 				@setSiteInfo message.params
 		else if cmd == "updating" # Close connection
 			@ws.ws.close()
@@ -159,13 +161,14 @@ class Wrapper
 
 
 	actionGetLocalStorage: (message) ->
-		data = localStorage.getItem "site.#{window.address}"
-		if data then data = JSON.parse(data)
-		@sendInner {"cmd": "response", "to": message.id, "result": data}
+		$.when(@event_site_info).done => 
+			data = localStorage.getItem "site.#{@site_info.address}"
+			if data then data = JSON.parse(data)
+			@sendInner {"cmd": "response", "to": message.id, "result": data}
 
 
 	actionSetLocalStorage: (message) ->
-		back = localStorage.setItem "site.#{window.address}", JSON.stringify(message.params)
+		back = localStorage.setItem "site.#{@site_info.address}", JSON.stringify(message.params)
 
 
 	# EOF actions
@@ -221,7 +224,9 @@ class Wrapper
 	# Get site info from UiServer
 	reloadSiteInfo: ->
 		@ws.cmd "siteInfo", {}, (site_info) =>
+			@address = site_info.address
 			@setSiteInfo site_info
+
 			window.document.title = site_info.content.title+" - ZeroNet"
 			@log "Setting title to", window.document.title
 
@@ -282,6 +287,7 @@ class Wrapper
 			@loading.hideProgress()
 
 		@site_info = site_info
+		@event_site_info.resolve()
 
 
 	toHtmlSafe: (values) ->

+ 27 - 17
src/Ui/media/all.js

@@ -214,7 +214,9 @@ jQuery.fx.step.scale = function(fx) {
     }
     elem = this;
     setTimeout((function() {
-      return elem.css("display", "none");
+      if (elem.css("opacity") === 0) {
+        return elem.css("display", "none");
+      }
     }), time);
     return this;
   };
@@ -474,7 +476,8 @@ jQuery.extend( jQuery.easing,
     };
 
     Loading.prototype.hideProgress = function() {
-      return $(".progressbar").css("width", "100%").css("opacity", "0").cssLater("display", "none", 1000);
+      console.log("hideProgress");
+      return $(".progressbar").css("width", "100%").css("opacity", "0").hideLater(1000);
     };
 
     Loading.prototype.showScreen = function() {
@@ -660,7 +663,6 @@ jQuery.extend( jQuery.easing,
 }).call(this);
 
 
-
 /* ---- src/Ui/media/Sidebar.coffee ---- */
 
 
@@ -756,10 +758,12 @@ jQuery.extend( jQuery.easing,
       this.ws.connect();
       this.ws_error = null;
       this.site_info = null;
+      this.event_site_info = $.Deferred();
       this.inner_loaded = false;
       this.inner_ready = false;
       this.wrapperWsInited = false;
       this.site_error = null;
+      this.address = null;
       window.onload = this.onLoad;
       $(window).on("hashchange", (function(_this) {
         return function() {
@@ -794,7 +798,7 @@ jQuery.extend( jQuery.easing,
         })(this));
       } else if (cmd === "setSiteInfo") {
         this.sendInner(message);
-        if (message.params.address === window.address) {
+        if (message.params.address === this.address) {
           return this.setSiteInfo(message.params);
         }
       } else if (cmd === "updating") {
@@ -947,21 +951,25 @@ jQuery.extend( jQuery.easing,
     };
 
     Wrapper.prototype.actionGetLocalStorage = function(message) {
-      var data;
-      data = localStorage.getItem("site." + window.address);
-      if (data) {
-        data = JSON.parse(data);
-      }
-      return this.sendInner({
-        "cmd": "response",
-        "to": message.id,
-        "result": data
-      });
+      return $.when(this.event_site_info).done((function(_this) {
+        return function() {
+          var data;
+          data = localStorage.getItem("site." + _this.site_info.address);
+          if (data) {
+            data = JSON.parse(data);
+          }
+          return _this.sendInner({
+            "cmd": "response",
+            "to": message.id,
+            "result": data
+          });
+        };
+      })(this));
     };
 
     Wrapper.prototype.actionSetLocalStorage = function(message) {
       var back;
-      return back = localStorage.setItem("site." + window.address, JSON.stringify(message.params));
+      return back = localStorage.setItem("site." + this.site_info.address, JSON.stringify(message.params));
     };
 
     Wrapper.prototype.onOpenWebsocket = function(e) {
@@ -1032,6 +1040,7 @@ jQuery.extend( jQuery.easing,
     Wrapper.prototype.reloadSiteInfo = function() {
       return this.ws.cmd("siteInfo", {}, (function(_this) {
         return function(site_info) {
+          _this.address = site_info.address;
           _this.setSiteInfo(site_info);
           window.document.title = site_info.content.title + " - ZeroNet";
           return _this.log("Setting title to", window.document.title);
@@ -1108,7 +1117,8 @@ jQuery.extend( jQuery.easing,
       } else {
         this.loading.hideProgress();
       }
-      return this.site_info = site_info;
+      this.site_info = site_info;
+      return this.event_site_info.resolve();
     };
 
     Wrapper.prototype.toHtmlSafe = function(values) {
@@ -1154,4 +1164,4 @@ jQuery.extend( jQuery.easing,
 
   window.wrapper = new Wrapper(ws_url);
 
-}).call(this);
+}).call(this);

+ 2 - 1
src/Ui/media/lib/jquery.csslater.coffee

@@ -16,7 +16,8 @@ jQuery.fn.removeLater = (time = 500) ->
 jQuery.fn.hideLater = (time = 500) ->
 	elem = @
 	setTimeout ( ->
-		elem.css("display", "none")
+		if elem.css("opacity") == 0
+			elem.css("display", "none")
 	), time
 	return @
 

+ 6 - 2
src/Worker/WorkerManager.py

@@ -156,7 +156,7 @@ class WorkerManager:
 				peers = None
 			task = {"evt": evt, "workers_num": 0, "site": self.site, "inner_path": inner_path, "done": False, "time_added": time.time(), "time_started": None, "peers": peers, "priority": priority, "failed": []}
 			self.tasks.append(task)
-			self.started_task_num = len(self.tasks)
+			self.started_task_num += 1
 			self.log.debug("New task: %s, peer lock: %s, priority: %s, tasks: %s" % (task["inner_path"], peers, priority, self.started_task_num))
 			self.startWorkers(peers)
 			return evt
@@ -176,6 +176,8 @@ class WorkerManager:
 		self.tasks.remove(task) # Remove from queue
 		self.site.onFileFail(task["inner_path"])
 		task["evt"].set(False)
+		if not self.tasks:
+			self.started_task_num = 0
 
 
 	# Mark a task done
@@ -184,5 +186,7 @@ class WorkerManager:
 		self.tasks.remove(task) # Remove from queue
 		self.site.onFileDone(task["inner_path"])
 		task["evt"].set(True)
-		if not self.tasks: self.site.onComplete() # No more task trigger site complete
+		if not self.tasks: 
+			self.started_task_num = 0
+			self.site.onComplete() # No more task trigger site complete
 

+ 0 - 1
src/main.py

@@ -41,7 +41,6 @@ else:
 	console_log.setLevel(logging.INFO) # Display only important info to console
 	from gevent import monkey; monkey.patch_all() # Make time, thread, socket gevent compatible
 
-
 import gevent
 import time
 

+ 49 - 0
src/util/GeventSslPatch.py

@@ -0,0 +1,49 @@
+# Re-add sslwrap to Python 2.7.9
+# https://github.com/gevent/gevent/issues/477
+ 
+import inspect
+__ssl__ = __import__('ssl')
+ 
+try:
+    _ssl = __ssl__._ssl
+except AttributeError:
+    _ssl = __ssl__._ssl2
+ 
+ 
+OldSSLSocket = __ssl__.SSLSocket
+ 
+class NewSSLSocket(OldSSLSocket):
+    """Fix SSLSocket constructor."""
+    def __init__(
+        self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
+        ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
+        suppress_ragged_eofs=True, ciphers=None,
+        server_hostname=None, _context=None
+    ):
+        OldSSLSocket.__init__(
+            self, sock, keyfile=None, certfile=None, server_side=False, cert_reqs=0,
+            ssl_version=2, ca_certs=None, do_handshake_on_connect=True,
+            suppress_ragged_eofs=True, ciphers=None
+        )
+ 
+ 
+def new_sslwrap(
+    sock, server_side=False, keyfile=None, certfile=None,
+    cert_reqs=__ssl__.CERT_NONE, ssl_version=__ssl__.PROTOCOL_SSLv23,
+    ca_certs=None, ciphers=None
+):
+    context = __ssl__.SSLContext(ssl_version)
+    context.verify_mode = cert_reqs or __ssl__.CERT_NONE
+    if ca_certs:
+        context.load_verify_locations(ca_certs)
+    if certfile:
+        context.load_cert_chain(certfile, keyfile)
+    if ciphers:
+        context.set_ciphers(ciphers)
+ 
+    caller_self = inspect.currentframe().f_back.f_locals['self']
+    return context._wrap_socket(sock, server_side=server_side, ssl_sock=caller_self)
+ 
+if not hasattr(_ssl, 'sslwrap'):
+    _ssl.sslwrap = new_sslwrap
+    __ssl__.SSLSocket = NewSSLSocket

+ 11 - 0
src/util/Http.py

@@ -0,0 +1,11 @@
+import urllib2, logging
+import GeventSslPatch
+from Config import config
+
+def get(url):
+	logging.debug("Get %s" % url)
+	req = urllib2.Request(url)
+	req.add_header('User-Agent', "ZeroNet %s (https://github.com/HelloZeroNet/ZeroNet)" % config.version)
+	req.add_header('Accept', 'application/json')
+	return urllib2.urlopen(req)
+