scorecard.py 26 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. #***************************************************************************
  4. # _ _ ____ _
  5. # Project ___| | | | _ \| |
  6. # / __| | | | |_) | |
  7. # | (__| |_| | _ <| |___
  8. # \___|\___/|_| \_\_____|
  9. #
  10. # Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
  11. #
  12. # This software is licensed as described in the file COPYING, which
  13. # you should have received as part of this distribution. The terms
  14. # are also available at https://curl.se/docs/copyright.html.
  15. #
  16. # You may opt to use, copy, modify, merge, publish, distribute and/or sell
  17. # copies of the Software, and permit persons to whom the Software is
  18. # furnished to do so, under the terms of the COPYING file.
  19. #
  20. # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
  21. # KIND, either express or implied.
  22. #
  23. # SPDX-License-Identifier: curl
  24. #
  25. ###########################################################################
  26. #
  27. import argparse
  28. import json
  29. import logging
  30. import os
  31. import re
  32. import sys
  33. from statistics import mean
  34. from typing import Dict, Any, Optional, List
  35. from testenv import Env, Httpd, Nghttpx, CurlClient, Caddy, ExecResult, NghttpxQuic, RunProfile
  36. log = logging.getLogger(__name__)
  37. class ScoreCardException(Exception):
  38. pass
  39. class ScoreCard:
  40. def __init__(self, env: Env,
  41. httpd: Optional[Httpd],
  42. nghttpx: Optional[Nghttpx],
  43. caddy: Optional[Caddy],
  44. verbose: int,
  45. curl_verbose: int,
  46. download_parallel: int = 0):
  47. self.verbose = verbose
  48. self.env = env
  49. self.httpd = httpd
  50. self.nghttpx = nghttpx
  51. self.caddy = caddy
  52. self._silent_curl = not curl_verbose
  53. self._download_parallel = download_parallel
  54. def info(self, msg):
  55. if self.verbose > 0:
  56. sys.stderr.write(msg)
  57. sys.stderr.flush()
  58. def handshakes(self, proto: str) -> Dict[str, Any]:
  59. props = {}
  60. sample_size = 5
  61. self.info(f'TLS Handshake\n')
  62. for authority in [
  63. 'curl.se', 'google.com', 'cloudflare.com', 'nghttp2.org'
  64. ]:
  65. self.info(f' {authority}...')
  66. props[authority] = {}
  67. for ipv in ['ipv4', 'ipv6']:
  68. self.info(f'{ipv}...')
  69. c_samples = []
  70. hs_samples = []
  71. errors = []
  72. for i in range(sample_size):
  73. curl = CurlClient(env=self.env, silent=self._silent_curl)
  74. args = [
  75. '--http3-only' if proto == 'h3' else '--http2',
  76. f'--{ipv}', f'https://{authority}/'
  77. ]
  78. r = curl.run_direct(args=args, with_stats=True)
  79. if r.exit_code == 0 and len(r.stats) == 1:
  80. c_samples.append(r.stats[0]['time_connect'])
  81. hs_samples.append(r.stats[0]['time_appconnect'])
  82. else:
  83. errors.append(f'exit={r.exit_code}')
  84. props[authority][f'{ipv}-connect'] = mean(c_samples) \
  85. if len(c_samples) else -1
  86. props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
  87. if len(hs_samples) else -1
  88. props[authority][f'{ipv}-errors'] = errors
  89. self.info('ok.\n')
  90. return props
  91. def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
  92. fpath = os.path.join(docs_dir, fname)
  93. data1k = 1024*'x'
  94. flen = 0
  95. with open(fpath, 'w') as fd:
  96. while flen < fsize:
  97. fd.write(data1k)
  98. flen += len(data1k)
  99. return flen
  100. def _check_downloads(self, r: ExecResult, count: int):
  101. error = ''
  102. if r.exit_code != 0:
  103. error += f'exit={r.exit_code} '
  104. if r.exit_code != 0 or len(r.stats) != count:
  105. error += f'stats={len(r.stats)}/{count} '
  106. fails = [s for s in r.stats if s['response_code'] != 200]
  107. if len(fails) > 0:
  108. error += f'{len(fails)} failed'
  109. return error if len(error) > 0 else None
  110. def transfer_single(self, url: str, proto: str, count: int):
  111. sample_size = count
  112. count = 1
  113. samples = []
  114. errors = []
  115. profiles = []
  116. self.info(f'single...')
  117. for i in range(sample_size):
  118. curl = CurlClient(env=self.env, silent=self._silent_curl)
  119. r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
  120. with_headers=False, with_profile=True)
  121. err = self._check_downloads(r, count)
  122. if err:
  123. errors.append(err)
  124. else:
  125. total_size = sum([s['size_download'] for s in r.stats])
  126. samples.append(total_size / r.duration.total_seconds())
  127. profiles.append(r.profile)
  128. return {
  129. 'count': count,
  130. 'samples': sample_size,
  131. 'max-parallel': 1,
  132. 'speed': mean(samples) if len(samples) else -1,
  133. 'errors': errors,
  134. 'stats': RunProfile.AverageStats(profiles),
  135. }
  136. def transfer_serial(self, url: str, proto: str, count: int):
  137. sample_size = 1
  138. samples = []
  139. errors = []
  140. profiles = []
  141. url = f'{url}?[0-{count - 1}]'
  142. self.info(f'serial...')
  143. for i in range(sample_size):
  144. curl = CurlClient(env=self.env, silent=self._silent_curl)
  145. r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
  146. with_headers=False, with_profile=True)
  147. err = self._check_downloads(r, count)
  148. if err:
  149. errors.append(err)
  150. else:
  151. total_size = sum([s['size_download'] for s in r.stats])
  152. samples.append(total_size / r.duration.total_seconds())
  153. profiles.append(r.profile)
  154. return {
  155. 'count': count,
  156. 'samples': sample_size,
  157. 'max-parallel': 1,
  158. 'speed': mean(samples) if len(samples) else -1,
  159. 'errors': errors,
  160. 'stats': RunProfile.AverageStats(profiles),
  161. }
  162. def transfer_parallel(self, url: str, proto: str, count: int):
  163. sample_size = 1
  164. samples = []
  165. errors = []
  166. profiles = []
  167. max_parallel = self._download_parallel if self._download_parallel > 0 else count
  168. url = f'{url}?[0-{count - 1}]'
  169. self.info(f'parallel...')
  170. for i in range(sample_size):
  171. curl = CurlClient(env=self.env, silent=self._silent_curl)
  172. r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
  173. with_headers=False,
  174. with_profile=True,
  175. extra_args=['--parallel',
  176. '--parallel-max', str(max_parallel)])
  177. err = self._check_downloads(r, count)
  178. if err:
  179. errors.append(err)
  180. else:
  181. total_size = sum([s['size_download'] for s in r.stats])
  182. samples.append(total_size / r.duration.total_seconds())
  183. profiles.append(r.profile)
  184. return {
  185. 'count': count,
  186. 'samples': sample_size,
  187. 'max-parallel': max_parallel,
  188. 'speed': mean(samples) if len(samples) else -1,
  189. 'errors': errors,
  190. 'stats': RunProfile.AverageStats(profiles),
  191. }
  192. def download_url(self, label: str, url: str, proto: str, count: int):
  193. self.info(f' {count}x{label}: ')
  194. props = {
  195. 'single': self.transfer_single(url=url, proto=proto, count=10),
  196. }
  197. if count > 1:
  198. props['serial'] = self.transfer_serial(url=url, proto=proto,
  199. count=count)
  200. props['parallel'] = self.transfer_parallel(url=url, proto=proto,
  201. count=count)
  202. self.info(f'ok.\n')
  203. return props
  204. def downloads(self, proto: str, count: int,
  205. fsizes: List[int]) -> Dict[str, Any]:
  206. scores = {}
  207. if self.httpd:
  208. if proto == 'h3':
  209. port = self.env.h3_port
  210. via = 'nghttpx'
  211. descr = f'port {port}, proxying httpd'
  212. else:
  213. port = self.env.https_port
  214. via = 'httpd'
  215. descr = f'port {port}'
  216. self.info(f'{via} downloads\n')
  217. scores[via] = {
  218. 'description': descr,
  219. }
  220. for fsize in fsizes:
  221. label = self.fmt_size(fsize)
  222. fname = f'score{label}.data'
  223. self._make_docs_file(docs_dir=self.httpd.docs_dir,
  224. fname=fname, fsize=fsize)
  225. url = f'https://{self.env.domain1}:{port}/{fname}'
  226. results = self.download_url(label=label, url=url,
  227. proto=proto, count=count)
  228. scores[via][label] = results
  229. if self.caddy:
  230. port = self.caddy.port
  231. via = 'caddy'
  232. descr = f'port {port}'
  233. self.info('caddy downloads\n')
  234. scores[via] = {
  235. 'description': descr,
  236. }
  237. for fsize in fsizes:
  238. label = self.fmt_size(fsize)
  239. fname = f'score{label}.data'
  240. self._make_docs_file(docs_dir=self.caddy.docs_dir,
  241. fname=fname, fsize=fsize)
  242. url = f'https://{self.env.domain1}:{port}/{fname}'
  243. results = self.download_url(label=label, url=url,
  244. proto=proto, count=count)
  245. scores[via][label] = results
  246. return scores
  247. def do_requests(self, url: str, proto: str, count: int,
  248. max_parallel: int = 1):
  249. sample_size = 1
  250. samples = []
  251. errors = []
  252. profiles = []
  253. url = f'{url}?[0-{count - 1}]'
  254. extra_args = ['--parallel', '--parallel-max', str(max_parallel)] \
  255. if max_parallel > 1 else []
  256. self.info(f'{max_parallel}...')
  257. for i in range(sample_size):
  258. curl = CurlClient(env=self.env, silent=self._silent_curl)
  259. r = curl.http_download(urls=[url], alpn_proto=proto, no_save=True,
  260. with_headers=False, with_profile=True,
  261. extra_args=extra_args)
  262. err = self._check_downloads(r, count)
  263. if err:
  264. errors.append(err)
  265. else:
  266. for _ in r.stats:
  267. samples.append(count / r.duration.total_seconds())
  268. profiles.append(r.profile)
  269. return {
  270. 'count': count,
  271. 'samples': sample_size,
  272. 'speed': mean(samples) if len(samples) else -1,
  273. 'errors': errors,
  274. 'stats': RunProfile.AverageStats(profiles),
  275. }
  276. def requests_url(self, url: str, proto: str, count: int):
  277. self.info(f' {url}: ')
  278. props = {
  279. '1': self.do_requests(url=url, proto=proto, count=count),
  280. '6': self.do_requests(url=url, proto=proto, count=count,
  281. max_parallel=6),
  282. '25': self.do_requests(url=url, proto=proto, count=count,
  283. max_parallel=25),
  284. '50': self.do_requests(url=url, proto=proto, count=count,
  285. max_parallel=50),
  286. '100': self.do_requests(url=url, proto=proto, count=count,
  287. max_parallel=100),
  288. }
  289. self.info(f'ok.\n')
  290. return props
  291. def requests(self, proto: str, req_count) -> Dict[str, Any]:
  292. scores = {}
  293. if self.httpd:
  294. if proto == 'h3':
  295. port = self.env.h3_port
  296. via = 'nghttpx'
  297. descr = f'port {port}, proxying httpd'
  298. else:
  299. port = self.env.https_port
  300. via = 'httpd'
  301. descr = f'port {port}'
  302. self.info(f'{via} requests\n')
  303. self._make_docs_file(docs_dir=self.httpd.docs_dir,
  304. fname='reqs10.data', fsize=10*1024)
  305. url1 = f'https://{self.env.domain1}:{port}/reqs10.data'
  306. scores[via] = {
  307. 'description': descr,
  308. 'count': req_count,
  309. '10KB': self.requests_url(url=url1, proto=proto, count=req_count),
  310. }
  311. if self.caddy:
  312. port = self.caddy.port
  313. via = 'caddy'
  314. descr = f'port {port}'
  315. self.info('caddy requests\n')
  316. self._make_docs_file(docs_dir=self.caddy.docs_dir,
  317. fname='req10.data', fsize=10 * 1024)
  318. url1 = f'https://{self.env.domain1}:{port}/req10.data'
  319. scores[via] = {
  320. 'description': descr,
  321. 'count': req_count,
  322. '10KB': self.requests_url(url=url1, proto=proto, count=req_count),
  323. }
  324. return scores
  325. def score_proto(self, proto: str,
  326. handshakes: bool = True,
  327. downloads: Optional[List[int]] = None,
  328. download_count: int = 50,
  329. req_count=5000,
  330. requests: bool = True):
  331. self.info(f"scoring {proto}\n")
  332. p = {}
  333. if proto == 'h3':
  334. p['name'] = 'h3'
  335. if not self.env.have_h3_curl():
  336. raise ScoreCardException('curl does not support HTTP/3')
  337. for lib in ['ngtcp2', 'quiche', 'msh3', 'nghttp3']:
  338. if self.env.curl_uses_lib(lib):
  339. p['implementation'] = lib
  340. break
  341. elif proto == 'h2':
  342. p['name'] = 'h2'
  343. if not self.env.have_h2_curl():
  344. raise ScoreCardException('curl does not support HTTP/2')
  345. for lib in ['nghttp2', 'hyper']:
  346. if self.env.curl_uses_lib(lib):
  347. p['implementation'] = lib
  348. break
  349. elif proto == 'h1' or proto == 'http/1.1':
  350. proto = 'http/1.1'
  351. p['name'] = proto
  352. p['implementation'] = 'hyper' if self.env.curl_uses_lib('hyper')\
  353. else 'native'
  354. else:
  355. raise ScoreCardException(f"unknown protocol: {proto}")
  356. if 'implementation' not in p:
  357. raise ScoreCardException(f'did not recognized {p} lib')
  358. p['version'] = Env.curl_lib_version(p['implementation'])
  359. score = {
  360. 'curl': self.env.curl_fullname(),
  361. 'os': self.env.curl_os(),
  362. 'protocol': p,
  363. }
  364. if handshakes:
  365. score['handshakes'] = self.handshakes(proto=proto)
  366. if downloads and len(downloads) > 0:
  367. score['downloads'] = self.downloads(proto=proto,
  368. count=download_count,
  369. fsizes=downloads)
  370. if requests:
  371. score['requests'] = self.requests(proto=proto, req_count=req_count)
  372. self.info("\n")
  373. return score
  374. def fmt_ms(self, tval):
  375. return f'{int(tval*1000)} ms' if tval >= 0 else '--'
  376. def fmt_size(self, val):
  377. if val >= (1024*1024*1024):
  378. return f'{val / (1024*1024*1024):0.000f}GB'
  379. elif val >= (1024 * 1024):
  380. return f'{val / (1024*1024):0.000f}MB'
  381. elif val >= 1024:
  382. return f'{val / 1024:0.000f}KB'
  383. else:
  384. return f'{val:0.000f}B'
  385. def fmt_mbs(self, val):
  386. return f'{val/(1024*1024):0.000f} MB/s' if val >= 0 else '--'
  387. def fmt_reqs(self, val):
  388. return f'{val:0.000f} r/s' if val >= 0 else '--'
  389. def print_score(self, score):
  390. print(f'{score["protocol"]["name"].upper()} in {score["curl"]}')
  391. if 'handshakes' in score:
  392. print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
  393. print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
  394. f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}')
  395. for key, val in score["handshakes"].items():
  396. print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
  397. f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
  398. f'{self.fmt_ms(val["ipv6-connect"]):>12} '
  399. f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
  400. f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
  401. )
  402. if 'downloads' in score:
  403. # get the key names of all sizes and measurements made
  404. sizes = []
  405. measures = []
  406. m_names = {}
  407. mcol_width = 12
  408. mcol_sw = 17
  409. for server, server_score in score['downloads'].items():
  410. for sskey, ssval in server_score.items():
  411. if isinstance(ssval, str):
  412. continue
  413. if sskey not in sizes:
  414. sizes.append(sskey)
  415. for mkey, mval in server_score[sskey].items():
  416. if mkey not in measures:
  417. measures.append(mkey)
  418. m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
  419. print('Downloads')
  420. print(f' {"Server":<8} {"Size":>8}', end='')
  421. for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
  422. print(f' {"Errors":^20}')
  423. for server in score['downloads']:
  424. for size in sizes:
  425. size_score = score['downloads'][server][size]
  426. print(f' {server:<8} {size:>8}', end='')
  427. errors = []
  428. for key, val in size_score.items():
  429. if 'errors' in val:
  430. errors.extend(val['errors'])
  431. for m in measures:
  432. if m in size_score:
  433. print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
  434. s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
  435. f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
  436. print(f' {s:<{mcol_sw}}', end='')
  437. else:
  438. print(' '*mcol_width, end='')
  439. if len(errors):
  440. print(f' {"/".join(errors):<20}')
  441. else:
  442. print(f' {"-":^20}')
  443. if 'requests' in score:
  444. sizes = []
  445. measures = []
  446. m_names = {}
  447. mcol_width = 9
  448. mcol_sw = 13
  449. for server in score['requests']:
  450. server_score = score['requests'][server]
  451. for sskey, ssval in server_score.items():
  452. if isinstance(ssval, str) or isinstance(ssval, int):
  453. continue
  454. if sskey not in sizes:
  455. sizes.append(sskey)
  456. for mkey, mval in server_score[sskey].items():
  457. if mkey not in measures:
  458. measures.append(mkey)
  459. m_names[mkey] = f'{mkey}'
  460. print('Requests, max in parallel')
  461. print(f' {"Server":<8} {"Size":>6} {"Reqs":>6}', end='')
  462. for m in measures: print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
  463. print(f' {"Errors":^10}')
  464. for server in score['requests']:
  465. for size in sizes:
  466. size_score = score['requests'][server][size]
  467. count = score['requests'][server]['count']
  468. print(f' {server:<8} {size:>6} {count:>6}', end='')
  469. errors = []
  470. for key, val in size_score.items():
  471. if 'errors' in val:
  472. errors.extend(val['errors'])
  473. for m in measures:
  474. if m in size_score:
  475. print(f' {self.fmt_reqs(size_score[m]["speed"]):>{mcol_width}}', end='')
  476. s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
  477. f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
  478. print(f' {s:<{mcol_sw}}', end='')
  479. else:
  480. print(' '*mcol_width, end='')
  481. if len(errors):
  482. print(f' {"/".join(errors):<10}')
  483. else:
  484. print(f' {"-":^10}')
  485. def parse_size(s):
  486. m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
  487. if m is None:
  488. raise Exception(f'unrecognized size: {s}')
  489. size = int(m.group(1))
  490. if not m.group(2):
  491. pass
  492. elif m.group(2).lower() == 'kb':
  493. size *= 1024
  494. elif m.group(2).lower() == 'mb':
  495. size *= 1024 * 1024
  496. elif m.group(2).lower() == 'gb':
  497. size *= 1024 * 1024 * 1024
  498. return size
  499. def main():
  500. parser = argparse.ArgumentParser(prog='scorecard', description="""
  501. Run a range of tests to give a scorecard for a HTTP protocol
  502. 'h3' or 'h2' implementation in curl.
  503. """)
  504. parser.add_argument("-v", "--verbose", action='count', default=1,
  505. help="log more output on stderr")
  506. parser.add_argument("-j", "--json", action='store_true',
  507. default=False, help="print json instead of text")
  508. parser.add_argument("-H", "--handshakes", action='store_true',
  509. default=False, help="evaluate handshakes only")
  510. parser.add_argument("-d", "--downloads", action='store_true',
  511. default=False, help="evaluate downloads")
  512. parser.add_argument("--download", action='append', type=str,
  513. default=None, help="evaluate download size")
  514. parser.add_argument("--download-count", action='store', type=int,
  515. default=50, help="perform that many downloads")
  516. parser.add_argument("--download-parallel", action='store', type=int,
  517. default=0, help="perform that many downloads in parallel (default all)")
  518. parser.add_argument("-r", "--requests", action='store_true',
  519. default=False, help="evaluate requests")
  520. parser.add_argument("--request-count", action='store', type=int,
  521. default=5000, help="perform that many requests")
  522. parser.add_argument("--httpd", action='store_true', default=False,
  523. help="evaluate httpd server only")
  524. parser.add_argument("--caddy", action='store_true', default=False,
  525. help="evaluate caddy server only")
  526. parser.add_argument("--curl-verbose", action='store_true',
  527. default=False, help="run curl with `-v`")
  528. parser.add_argument("protocol", default='h2', nargs='?',
  529. help="Name of protocol to score")
  530. args = parser.parse_args()
  531. if args.verbose > 0:
  532. console = logging.StreamHandler()
  533. console.setLevel(logging.INFO)
  534. console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
  535. logging.getLogger('').addHandler(console)
  536. protocol = args.protocol
  537. handshakes = True
  538. downloads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
  539. if args.download is not None:
  540. downloads = []
  541. for x in args.download:
  542. downloads.extend([parse_size(s) for s in x.split(',')])
  543. requests = True
  544. if args.downloads or args.requests or args.handshakes:
  545. handshakes = args.handshakes
  546. if not args.downloads:
  547. downloads = None
  548. requests = args.requests
  549. test_httpd = protocol != 'h3'
  550. test_caddy = True
  551. if args.caddy or args.httpd:
  552. test_caddy = args.caddy
  553. test_httpd = args.httpd
  554. rv = 0
  555. env = Env()
  556. env.setup()
  557. env.test_timeout = None
  558. httpd = None
  559. nghttpx = None
  560. caddy = None
  561. try:
  562. if test_httpd:
  563. print(f'httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}')
  564. httpd = Httpd(env=env)
  565. assert httpd.exists(), \
  566. f'httpd not found: {env.httpd}'
  567. httpd.clear_logs()
  568. assert httpd.start()
  569. if 'h3' == protocol:
  570. nghttpx = NghttpxQuic(env=env)
  571. nghttpx.clear_logs()
  572. assert nghttpx.start()
  573. if test_caddy and env.caddy:
  574. print(f'Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}')
  575. caddy = Caddy(env=env)
  576. caddy.clear_logs()
  577. assert caddy.start()
  578. card = ScoreCard(env=env, httpd=httpd, nghttpx=nghttpx, caddy=caddy,
  579. verbose=args.verbose, curl_verbose=args.curl_verbose,
  580. download_parallel=args.download_parallel)
  581. score = card.score_proto(proto=protocol,
  582. handshakes=handshakes,
  583. downloads=downloads,
  584. download_count=args.download_count,
  585. req_count=args.request_count,
  586. requests=requests)
  587. if args.json:
  588. print(json.JSONEncoder(indent=2).encode(score))
  589. else:
  590. card.print_score(score)
  591. except ScoreCardException as ex:
  592. sys.stderr.write(f"ERROR: {str(ex)}\n")
  593. rv = 1
  594. except KeyboardInterrupt:
  595. log.warning("aborted")
  596. rv = 1
  597. finally:
  598. if caddy:
  599. caddy.stop()
  600. if nghttpx:
  601. nghttpx.stop(wait_dead=False)
  602. if httpd:
  603. httpd.stop()
  604. sys.exit(rv)
  605. if __name__ == "__main__":
  606. main()