2
0

http_pipe.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441
  1. #!/usr/bin/python
  2. # Copyright 2012 Google Inc. All Rights Reserved.
  3. #
  4. # Licensed under the Apache License, Version 2.0 (the "License");
  5. # you may not use this file except in compliance with the License.
  6. # You may obtain a copy of the License at
  7. #
  8. # https://www.apache.org/licenses/LICENSE-2.0
  9. #
  10. # Unless required by applicable law or agreed to in writing, software
  11. # distributed under the License is distributed on an "AS IS" BASIS,
  12. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  13. # See the License for the specific language governing permissions and
  14. # limitations under the License.
  15. #
  16. # Modified by Linus Nielsen Feltzing for inclusion in the libcurl test
  17. # framework
  18. #
  19. try:
  20. import socketserver
  21. except:
  22. import SocketServer as socketserver
  23. import argparse
  24. import re
  25. import select
  26. import socket
  27. import time
  28. import pprint
  29. import os
  30. INFO_MESSAGE = '''
  31. This is a test server to test the libcurl pipelining functionality.
  32. It is a modified version if Google's HTTP pipelining test server. More
  33. information can be found here:
  34. https://dev.chromium.org/developers/design-documents/network-stack/http-pipelining
  35. Source code can be found here:
  36. https://code.google.com/archive/p/http-pipelining-test/
  37. '''
  38. MAX_REQUEST_SIZE = 1024 # bytes
  39. MIN_POLL_TIME = 0.01 # seconds. Minimum time to poll, in order to prevent
  40. # excessive looping because Python refuses to poll for
  41. # small timeouts.
  42. SEND_BUFFER_TIME = 0.5 # seconds
  43. TIMEOUT = 30 # seconds
  44. class Error(Exception):
  45. pass
  46. class RequestTooLargeError(Error):
  47. pass
  48. class ServeIndexError(Error):
  49. pass
  50. class UnexpectedMethodError(Error):
  51. pass
  52. class RequestParser(object):
  53. """Parses an input buffer looking for HTTP GET requests."""
  54. global logfile
  55. LOOKING_FOR_GET = 1
  56. READING_HEADERS = 2
  57. HEADER_RE = re.compile('([^:]+):(.*)\n')
  58. REQUEST_RE = re.compile('([^ ]+) ([^ ]+) HTTP/(\d+)\.(\d+)\n')
  59. def __init__(self):
  60. """Initializer."""
  61. self._buffer = ""
  62. self._pending_headers = {}
  63. self._pending_request = ""
  64. self._state = self.LOOKING_FOR_GET
  65. self._were_all_requests_http_1_1 = True
  66. self._valid_requests = []
  67. def ParseAdditionalData(self, data):
  68. """Finds HTTP requests in |data|.
  69. Args:
  70. data: (String) Newly received input data from the socket.
  71. Returns:
  72. (List of Tuples)
  73. (String) The request path.
  74. (Map of String to String) The header name and value.
  75. Raises:
  76. RequestTooLargeError: If the request exceeds MAX_REQUEST_SIZE.
  77. UnexpectedMethodError: On a non-GET method.
  78. Error: On a programming error.
  79. """
  80. logfile = open('log/server.input', 'a')
  81. logfile.write(data)
  82. logfile.close()
  83. self._buffer += data.replace('\r', '')
  84. should_continue_parsing = True
  85. while should_continue_parsing:
  86. if self._state == self.LOOKING_FOR_GET:
  87. should_continue_parsing = self._DoLookForGet()
  88. elif self._state == self.READING_HEADERS:
  89. should_continue_parsing = self._DoReadHeader()
  90. else:
  91. raise Error('Unexpected state: ' + self._state)
  92. if len(self._buffer) > MAX_REQUEST_SIZE:
  93. raise RequestTooLargeError(
  94. 'Request is at least %d bytes' % len(self._buffer))
  95. valid_requests = self._valid_requests
  96. self._valid_requests = []
  97. return valid_requests
  98. @property
  99. def were_all_requests_http_1_1(self):
  100. return self._were_all_requests_http_1_1
  101. def _DoLookForGet(self):
  102. """Tries to parse an HTTTP request line.
  103. Returns:
  104. (Boolean) True if a request was found.
  105. Raises:
  106. UnexpectedMethodError: On a non-GET method.
  107. """
  108. m = self.REQUEST_RE.match(self._buffer)
  109. if not m:
  110. return False
  111. method, path, http_major, http_minor = m.groups()
  112. if method != 'GET':
  113. raise UnexpectedMethodError('Unexpected method: ' + method)
  114. if path in ['/', '/index.htm', '/index.html']:
  115. raise ServeIndexError()
  116. if http_major != '1' or http_minor != '1':
  117. self._were_all_requests_http_1_1 = False
  118. # print method, path
  119. self._pending_request = path
  120. self._buffer = self._buffer[m.end():]
  121. self._state = self.READING_HEADERS
  122. return True
  123. def _DoReadHeader(self):
  124. """Tries to parse a HTTP header.
  125. Returns:
  126. (Boolean) True if it found the end of the request or a HTTP header.
  127. """
  128. if self._buffer.startswith('\n'):
  129. self._buffer = self._buffer[1:]
  130. self._state = self.LOOKING_FOR_GET
  131. self._valid_requests.append((self._pending_request,
  132. self._pending_headers))
  133. self._pending_headers = {}
  134. self._pending_request = ""
  135. return True
  136. m = self.HEADER_RE.match(self._buffer)
  137. if not m:
  138. return False
  139. header = m.group(1).lower()
  140. value = m.group(2).strip().lower()
  141. if header not in self._pending_headers:
  142. self._pending_headers[header] = value
  143. self._buffer = self._buffer[m.end():]
  144. return True
  145. class ResponseBuilder(object):
  146. """Builds HTTP responses for a list of accumulated requests."""
  147. def __init__(self):
  148. """Initializer."""
  149. self._max_pipeline_depth = 0
  150. self._requested_paths = []
  151. self._processed_end = False
  152. self._were_all_requests_http_1_1 = True
  153. def QueueRequests(self, requested_paths, were_all_requests_http_1_1):
  154. """Adds requests to the queue of requests.
  155. Args:
  156. requested_paths: (List of Strings) Requested paths.
  157. """
  158. self._requested_paths.extend(requested_paths)
  159. self._were_all_requests_http_1_1 = were_all_requests_http_1_1
  160. def Chunkify(self, data, chunksize):
  161. """ Divides a string into chunks
  162. """
  163. return [hex(chunksize)[2:] + "\r\n" + data[i:i+chunksize] + "\r\n" for i in range(0, len(data), chunksize)]
  164. def BuildResponses(self):
  165. """Converts the queue of requests into responses.
  166. Returns:
  167. (String) Buffer containing all of the responses.
  168. """
  169. result = ""
  170. self._max_pipeline_depth = max(self._max_pipeline_depth,
  171. len(self._requested_paths))
  172. for path, headers in self._requested_paths:
  173. if path == '/verifiedserver':
  174. body = "WE ROOLZ: {}\r\n".format(os.getpid());
  175. result += self._BuildResponse(
  176. '200 OK', ['Server: Apache',
  177. 'Content-Length: {}'.format(len(body)),
  178. 'Cache-Control: no-store'], body)
  179. elif path == '/alphabet.txt':
  180. body = 'abcdefghijklmnopqrstuvwxyz'
  181. result += self._BuildResponse(
  182. '200 OK', ['Server: Apache',
  183. 'Content-Length: 26',
  184. 'Cache-Control: no-store'], body)
  185. elif path == '/reverse.txt':
  186. body = 'zyxwvutsrqponmlkjihgfedcba'
  187. result += self._BuildResponse(
  188. '200 OK', ['Content-Length: 26', 'Cache-Control: no-store'], body)
  189. elif path == '/chunked.txt':
  190. body = ('7\r\nchunked\r\n'
  191. '8\r\nencoding\r\n'
  192. '2\r\nis\r\n'
  193. '3\r\nfun\r\n'
  194. '0\r\n\r\n')
  195. result += self._BuildResponse(
  196. '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'],
  197. body)
  198. elif path == '/cached.txt':
  199. body = 'azbycxdwevfugthsirjqkplomn'
  200. result += self._BuildResponse(
  201. '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60'], body)
  202. elif path == '/connection_close.txt':
  203. body = 'azbycxdwevfugthsirjqkplomn'
  204. result += self._BuildResponse(
  205. '200 OK', ['Content-Length: 26', 'Cache-Control: max-age=60', 'Connection: close'], body)
  206. self._processed_end = True
  207. elif path == '/1k.txt':
  208. body = '0123456789abcdef' * 64
  209. result += self._BuildResponse(
  210. '200 OK', ['Server: Apache',
  211. 'Content-Length: 1024',
  212. 'Cache-Control: max-age=60'], body)
  213. elif path == '/10k.txt':
  214. body = '0123456789abcdef' * 640
  215. result += self._BuildResponse(
  216. '200 OK', ['Server: Apache',
  217. 'Content-Length: 10240',
  218. 'Cache-Control: max-age=60'], body)
  219. elif path == '/100k.txt':
  220. body = '0123456789abcdef' * 6400
  221. result += self._BuildResponse(
  222. '200 OK',
  223. ['Server: Apache',
  224. 'Content-Length: 102400',
  225. 'Cache-Control: max-age=60'],
  226. body)
  227. elif path == '/100k_chunked.txt':
  228. body = self.Chunkify('0123456789abcdef' * 6400, 20480)
  229. body.append('0\r\n\r\n')
  230. body = ''.join(body)
  231. result += self._BuildResponse(
  232. '200 OK', ['Transfer-Encoding: chunked', 'Cache-Control: no-store'], body)
  233. elif path == '/stats.txt':
  234. results = {
  235. 'max_pipeline_depth': self._max_pipeline_depth,
  236. 'were_all_requests_http_1_1': int(self._were_all_requests_http_1_1),
  237. }
  238. body = ','.join(['%s:%s' % (k, v) for k, v in results.items()])
  239. result += self._BuildResponse(
  240. '200 OK',
  241. ['Content-Length: %s' % len(body), 'Cache-Control: no-store'], body)
  242. self._processed_end = True
  243. else:
  244. result += self._BuildResponse('404 Not Found', ['Content-Length: 7'], 'Go away')
  245. if self._processed_end:
  246. break
  247. self._requested_paths = []
  248. return result
  249. def WriteError(self, status, error):
  250. """Returns an HTTP response for the specified error.
  251. Args:
  252. status: (String) Response code and descrtion (e.g. "404 Not Found")
  253. Returns:
  254. (String) Text of HTTP response.
  255. """
  256. return self._BuildResponse(
  257. status, ['Connection: close', 'Content-Type: text/plain'], error)
  258. @property
  259. def processed_end(self):
  260. return self._processed_end
  261. def _BuildResponse(self, status, headers, body):
  262. """Builds an HTTP response.
  263. Args:
  264. status: (String) Response code and descrtion (e.g. "200 OK")
  265. headers: (List of Strings) Headers (e.g. "Connection: close")
  266. body: (String) Response body.
  267. Returns:
  268. (String) Text of HTTP response.
  269. """
  270. return ('HTTP/1.1 %s\r\n'
  271. '%s\r\n'
  272. '\r\n'
  273. '%s' % (status, '\r\n'.join(headers), body))
  274. class PipelineRequestHandler(socketserver.BaseRequestHandler):
  275. """Called on an incoming TCP connection."""
  276. def _GetTimeUntilTimeout(self):
  277. return self._start_time + TIMEOUT - time.time()
  278. def _GetTimeUntilNextSend(self):
  279. if not self._last_queued_time:
  280. return TIMEOUT
  281. return self._last_queued_time + SEND_BUFFER_TIME - time.time()
  282. def handle(self):
  283. self._request_parser = RequestParser()
  284. self._response_builder = ResponseBuilder()
  285. self._last_queued_time = 0
  286. self._num_queued = 0
  287. self._num_written = 0
  288. self._send_buffer = ""
  289. self._start_time = time.time()
  290. try:
  291. while not self._response_builder.processed_end or self._send_buffer:
  292. time_left = self._GetTimeUntilTimeout()
  293. time_until_next_send = self._GetTimeUntilNextSend()
  294. max_poll_time = min(time_left, time_until_next_send) + MIN_POLL_TIME
  295. rlist, wlist, xlist = [], [], []
  296. fileno = self.request.fileno()
  297. if max_poll_time > 0:
  298. rlist.append(fileno)
  299. if self._send_buffer:
  300. wlist.append(fileno)
  301. rlist, wlist, xlist = select.select(rlist, wlist, xlist, max_poll_time)
  302. if self._GetTimeUntilTimeout() <= 0:
  303. return
  304. if self._GetTimeUntilNextSend() <= 0:
  305. self._send_buffer += self._response_builder.BuildResponses()
  306. self._num_written = self._num_queued
  307. self._last_queued_time = 0
  308. if fileno in rlist:
  309. self.request.setblocking(False)
  310. new_data = self.request.recv(MAX_REQUEST_SIZE)
  311. self.request.setblocking(True)
  312. if not new_data:
  313. return
  314. new_requests = self._request_parser.ParseAdditionalData(new_data)
  315. self._response_builder.QueueRequests(
  316. new_requests, self._request_parser.were_all_requests_http_1_1)
  317. self._num_queued += len(new_requests)
  318. self._last_queued_time = time.time()
  319. elif fileno in wlist:
  320. num_bytes_sent = self.request.send(self._send_buffer[0:4096])
  321. self._send_buffer = self._send_buffer[num_bytes_sent:]
  322. time.sleep(0.05)
  323. except RequestTooLargeError as e:
  324. self.request.send(self._response_builder.WriteError(
  325. '413 Request Entity Too Large', e))
  326. raise
  327. except UnexpectedMethodError as e:
  328. self.request.send(self._response_builder.WriteError(
  329. '405 Method Not Allowed', e))
  330. raise
  331. except ServeIndexError:
  332. self.request.send(self._response_builder.WriteError(
  333. '200 OK', INFO_MESSAGE))
  334. except Exception as e:
  335. print(e)
  336. self.request.close()
  337. class PipelineServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
  338. pass
  339. parser = argparse.ArgumentParser()
  340. parser.add_argument("--port", action="store", default=0,
  341. type=int, help="port to listen on")
  342. parser.add_argument("--verbose", action="store", default=0,
  343. type=int, help="verbose output")
  344. parser.add_argument("--pidfile", action="store", default=0,
  345. help="file name for the PID")
  346. parser.add_argument("--logfile", action="store", default=0,
  347. help="file name for the log")
  348. parser.add_argument("--srcdir", action="store", default=0,
  349. help="test directory")
  350. parser.add_argument("--id", action="store", default=0,
  351. help="server ID")
  352. parser.add_argument("--ipv4", action="store_true", default=0,
  353. help="IPv4 flag")
  354. args = parser.parse_args()
  355. if args.pidfile:
  356. pid = os.getpid()
  357. f = open(args.pidfile, 'w')
  358. f.write('{}'.format(pid))
  359. f.close()
  360. server = PipelineServer(('0.0.0.0', args.port), PipelineRequestHandler)
  361. server.allow_reuse_address = True
  362. server.serve_forever()