2
0

scorecard.py 34 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. #***************************************************************************
  4. # _ _ ____ _
  5. # Project ___| | | | _ \| |
  6. # / __| | | | |_) | |
  7. # | (__| |_| | _ <| |___
  8. # \___|\___/|_| \_\_____|
  9. #
  10. # Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
  11. #
  12. # This software is licensed as described in the file COPYING, which
  13. # you should have received as part of this distribution. The terms
  14. # are also available at https://curl.se/docs/copyright.html.
  15. #
  16. # You may opt to use, copy, modify, merge, publish, distribute and/or sell
  17. # copies of the Software, and permit persons to whom the Software is
  18. # furnished to do so, under the terms of the COPYING file.
  19. #
  20. # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
  21. # KIND, either express or implied.
  22. #
  23. # SPDX-License-Identifier: curl
  24. #
  25. ###########################################################################
  26. #
  27. import argparse
  28. import json
  29. import logging
  30. import os
  31. import re
  32. import sys
  33. from statistics import mean
  34. from typing import Dict, Any, Optional, List
  35. from testenv import Env, Httpd, CurlClient, Caddy, ExecResult, NghttpxQuic, RunProfile
  36. log = logging.getLogger(__name__)
  37. class ScoreCardError(Exception):
  38. pass
  39. class ScoreCard:
  40. def __init__(self, env: Env,
  41. protocol: str,
  42. server_descr: str,
  43. server_port: int,
  44. verbose: int,
  45. curl_verbose: int,
  46. download_parallel: int = 0,
  47. server_addr: Optional[str] = None):
  48. self.verbose = verbose
  49. self.env = env
  50. self.protocol = protocol
  51. self.server_descr = server_descr
  52. self.server_addr = server_addr
  53. self.server_port = server_port
  54. self._silent_curl = not curl_verbose
  55. self._download_parallel = download_parallel
  56. def info(self, msg):
  57. if self.verbose > 0:
  58. sys.stderr.write(msg)
  59. sys.stderr.flush()
  60. def handshakes(self) -> Dict[str, Any]:
  61. props = {}
  62. sample_size = 5
  63. self.info('TLS Handshake\n')
  64. for authority in [
  65. 'curl.se', 'google.com', 'cloudflare.com', 'nghttp2.org'
  66. ]:
  67. self.info(f' {authority}...')
  68. props[authority] = {}
  69. for ipv in ['ipv4', 'ipv6']:
  70. self.info(f'{ipv}...')
  71. c_samples = []
  72. hs_samples = []
  73. errors = []
  74. for _ in range(sample_size):
  75. curl = CurlClient(env=self.env, silent=self._silent_curl,
  76. server_addr=self.server_addr)
  77. args = [
  78. '--http3-only' if self.protocol == 'h3' else '--http2',
  79. f'--{ipv}', f'https://{authority}/'
  80. ]
  81. r = curl.run_direct(args=args, with_stats=True)
  82. if r.exit_code == 0 and len(r.stats) == 1:
  83. c_samples.append(r.stats[0]['time_connect'])
  84. hs_samples.append(r.stats[0]['time_appconnect'])
  85. else:
  86. errors.append(f'exit={r.exit_code}')
  87. props[authority][f'{ipv}-connect'] = mean(c_samples) \
  88. if len(c_samples) else -1
  89. props[authority][f'{ipv}-handshake'] = mean(hs_samples) \
  90. if len(hs_samples) else -1
  91. props[authority][f'{ipv}-errors'] = errors
  92. self.info('ok.\n')
  93. return props
  94. def _make_docs_file(self, docs_dir: str, fname: str, fsize: int):
  95. fpath = os.path.join(docs_dir, fname)
  96. data1k = 1024*'x'
  97. flen = 0
  98. with open(fpath, 'w') as fd:
  99. while flen < fsize:
  100. fd.write(data1k)
  101. flen += len(data1k)
  102. return fpath
  103. def setup_resources(self, server_docs: str,
  104. downloads: Optional[List[int]] = None):
  105. for fsize in downloads:
  106. label = self.fmt_size(fsize)
  107. fname = f'score{label}.data'
  108. self._make_docs_file(docs_dir=server_docs,
  109. fname=fname, fsize=fsize)
  110. self._make_docs_file(docs_dir=server_docs,
  111. fname='reqs10.data', fsize=10*1024)
  112. def _check_downloads(self, r: ExecResult, count: int):
  113. error = ''
  114. if r.exit_code != 0:
  115. error += f'exit={r.exit_code} '
  116. if r.exit_code != 0 or len(r.stats) != count:
  117. error += f'stats={len(r.stats)}/{count} '
  118. fails = [s for s in r.stats if s['response_code'] != 200]
  119. if len(fails) > 0:
  120. error += f'{len(fails)} failed'
  121. return error if len(error) > 0 else None
  122. def transfer_single(self, url: str, count: int):
  123. sample_size = count
  124. count = 1
  125. samples = []
  126. errors = []
  127. profiles = []
  128. self.info('single...')
  129. for _ in range(sample_size):
  130. curl = CurlClient(env=self.env, silent=self._silent_curl,
  131. server_addr=self.server_addr)
  132. r = curl.http_download(urls=[url], alpn_proto=self.protocol,
  133. no_save=True, with_headers=False,
  134. with_profile=True)
  135. err = self._check_downloads(r, count)
  136. if err:
  137. errors.append(err)
  138. else:
  139. total_size = sum([s['size_download'] for s in r.stats])
  140. samples.append(total_size / r.duration.total_seconds())
  141. profiles.append(r.profile)
  142. return {
  143. 'count': count,
  144. 'samples': sample_size,
  145. 'max-parallel': 1,
  146. 'speed': mean(samples) if len(samples) else -1,
  147. 'errors': errors,
  148. 'stats': RunProfile.AverageStats(profiles),
  149. }
  150. def transfer_serial(self, url: str, count: int):
  151. sample_size = 1
  152. samples = []
  153. errors = []
  154. profiles = []
  155. url = f'{url}?[0-{count - 1}]'
  156. self.info('serial...')
  157. for _ in range(sample_size):
  158. curl = CurlClient(env=self.env, silent=self._silent_curl,
  159. server_addr=self.server_addr)
  160. r = curl.http_download(urls=[url], alpn_proto=self.protocol,
  161. no_save=True,
  162. with_headers=False, with_profile=True)
  163. err = self._check_downloads(r, count)
  164. if err:
  165. errors.append(err)
  166. else:
  167. total_size = sum([s['size_download'] for s in r.stats])
  168. samples.append(total_size / r.duration.total_seconds())
  169. profiles.append(r.profile)
  170. return {
  171. 'count': count,
  172. 'samples': sample_size,
  173. 'max-parallel': 1,
  174. 'speed': mean(samples) if len(samples) else -1,
  175. 'errors': errors,
  176. 'stats': RunProfile.AverageStats(profiles),
  177. }
  178. def transfer_parallel(self, url: str, count: int):
  179. sample_size = 1
  180. samples = []
  181. errors = []
  182. profiles = []
  183. max_parallel = self._download_parallel if self._download_parallel > 0 else count
  184. url = f'{url}?[0-{count - 1}]'
  185. self.info('parallel...')
  186. for _ in range(sample_size):
  187. curl = CurlClient(env=self.env, silent=self._silent_curl,
  188. server_addr=self.server_addr)
  189. r = curl.http_download(urls=[url], alpn_proto=self.protocol,
  190. no_save=True,
  191. with_headers=False,
  192. with_profile=True,
  193. extra_args=[
  194. '--parallel',
  195. '--parallel-max', str(max_parallel)
  196. ])
  197. err = self._check_downloads(r, count)
  198. if err:
  199. errors.append(err)
  200. else:
  201. total_size = sum([s['size_download'] for s in r.stats])
  202. samples.append(total_size / r.duration.total_seconds())
  203. profiles.append(r.profile)
  204. return {
  205. 'count': count,
  206. 'samples': sample_size,
  207. 'max-parallel': max_parallel,
  208. 'speed': mean(samples) if len(samples) else -1,
  209. 'errors': errors,
  210. 'stats': RunProfile.AverageStats(profiles),
  211. }
  212. def download_url(self, label: str, url: str, count: int):
  213. self.info(f' {count}x{label}: ')
  214. props = {
  215. 'single': self.transfer_single(url=url, count=10),
  216. }
  217. if count > 1:
  218. props['serial'] = self.transfer_serial(url=url, count=count)
  219. props['parallel'] = self.transfer_parallel(url=url, count=count)
  220. self.info('ok.\n')
  221. return props
  222. def downloads(self, count: int, fsizes: List[int]) -> Dict[str, Any]:
  223. scores = {}
  224. for fsize in fsizes:
  225. label = self.fmt_size(fsize)
  226. fname = f'score{label}.data'
  227. url = f'https://{self.env.domain1}:{self.server_port}/{fname}'
  228. scores[label] = self.download_url(label=label, url=url, count=count)
  229. return scores
  230. def _check_uploads(self, r: ExecResult, count: int):
  231. error = ''
  232. if r.exit_code != 0:
  233. error += f'exit={r.exit_code} '
  234. if r.exit_code != 0 or len(r.stats) != count:
  235. error += f'stats={len(r.stats)}/{count} '
  236. fails = [s for s in r.stats if s['response_code'] != 200]
  237. if len(fails) > 0:
  238. error += f'{len(fails)} failed'
  239. for f in fails:
  240. error += f'[{f["response_code"]}]'
  241. return error if len(error) > 0 else None
  242. def upload_single(self, url: str, fpath: str, count: int):
  243. sample_size = count
  244. count = 1
  245. samples = []
  246. errors = []
  247. profiles = []
  248. self.info('single...')
  249. for _ in range(sample_size):
  250. curl = CurlClient(env=self.env, silent=self._silent_curl,
  251. server_addr=self.server_addr)
  252. r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
  253. with_headers=False, with_profile=True)
  254. err = self._check_uploads(r, count)
  255. if err:
  256. errors.append(err)
  257. else:
  258. total_size = sum([s['size_upload'] for s in r.stats])
  259. samples.append(total_size / r.duration.total_seconds())
  260. profiles.append(r.profile)
  261. return {
  262. 'count': count,
  263. 'samples': sample_size,
  264. 'max-parallel': 1,
  265. 'speed': mean(samples) if len(samples) else -1,
  266. 'errors': errors,
  267. 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
  268. }
  269. def upload_serial(self, url: str, fpath: str, count: int):
  270. sample_size = 1
  271. samples = []
  272. errors = []
  273. profiles = []
  274. url = f'{url}?id=[0-{count - 1}]'
  275. self.info('serial...')
  276. for _ in range(sample_size):
  277. curl = CurlClient(env=self.env, silent=self._silent_curl,
  278. server_addr=self.server_addr)
  279. r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
  280. with_headers=False, with_profile=True)
  281. err = self._check_uploads(r, count)
  282. if err:
  283. errors.append(err)
  284. else:
  285. total_size = sum([s['size_upload'] for s in r.stats])
  286. samples.append(total_size / r.duration.total_seconds())
  287. profiles.append(r.profile)
  288. return {
  289. 'count': count,
  290. 'samples': sample_size,
  291. 'max-parallel': 1,
  292. 'speed': mean(samples) if len(samples) else -1,
  293. 'errors': errors,
  294. 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
  295. }
  296. def upload_parallel(self, url: str, fpath: str, count: int):
  297. sample_size = 1
  298. samples = []
  299. errors = []
  300. profiles = []
  301. max_parallel = count
  302. url = f'{url}?id=[0-{count - 1}]'
  303. self.info('parallel...')
  304. for _ in range(sample_size):
  305. curl = CurlClient(env=self.env, silent=self._silent_curl,
  306. server_addr=self.server_addr)
  307. r = curl.http_put(urls=[url], fdata=fpath, alpn_proto=self.protocol,
  308. with_headers=False, with_profile=True,
  309. extra_args=[
  310. '--parallel',
  311. '--parallel-max', str(max_parallel)
  312. ])
  313. err = self._check_uploads(r, count)
  314. if err:
  315. errors.append(err)
  316. else:
  317. total_size = sum([s['size_upload'] for s in r.stats])
  318. samples.append(total_size / r.duration.total_seconds())
  319. profiles.append(r.profile)
  320. return {
  321. 'count': count,
  322. 'samples': sample_size,
  323. 'max-parallel': max_parallel,
  324. 'speed': mean(samples) if len(samples) else -1,
  325. 'errors': errors,
  326. 'stats': RunProfile.AverageStats(profiles) if len(profiles) else {},
  327. }
  328. def upload_url(self, label: str, url: str, fpath: str, count: int):
  329. self.info(f' {count}x{label}: ')
  330. props = {
  331. 'single': self.upload_single(url=url, fpath=fpath, count=10),
  332. }
  333. if count > 1:
  334. props['serial'] = self.upload_serial(url=url, fpath=fpath, count=count)
  335. props['parallel'] = self.upload_parallel(url=url, fpath=fpath, count=count)
  336. self.info('ok.\n')
  337. return props
  338. def uploads(self, count: int, fsizes: List[int]) -> Dict[str, Any]:
  339. scores = {}
  340. url = f'https://{self.env.domain2}:{self.server_port}/curltest/put'
  341. fpaths = {}
  342. for fsize in fsizes:
  343. label = self.fmt_size(fsize)
  344. fname = f'upload{label}.data'
  345. fpaths[label] = self._make_docs_file(docs_dir=self.env.gen_dir,
  346. fname=fname, fsize=fsize)
  347. for label, fpath in fpaths.items():
  348. scores[label] = self.upload_url(label=label, url=url, fpath=fpath,
  349. count=count)
  350. return scores
  351. def do_requests(self, url: str, count: int, max_parallel: int = 1):
  352. sample_size = 1
  353. samples = []
  354. errors = []
  355. profiles = []
  356. url = f'{url}?[0-{count - 1}]'
  357. extra_args = [
  358. '-w', '%{response_code},\\n',
  359. ]
  360. if max_parallel > 1:
  361. extra_args.extend([
  362. '--parallel', '--parallel-max', str(max_parallel)
  363. ])
  364. self.info(f'{max_parallel}...')
  365. for _ in range(sample_size):
  366. curl = CurlClient(env=self.env, silent=self._silent_curl,
  367. server_addr=self.server_addr)
  368. r = curl.http_download(urls=[url], alpn_proto=self.protocol, no_save=True,
  369. with_headers=False, with_profile=True,
  370. with_stats=False, extra_args=extra_args)
  371. if r.exit_code != 0:
  372. errors.append(f'exit={r.exit_code}')
  373. else:
  374. samples.append(count / r.duration.total_seconds())
  375. non_200s = 0
  376. for line in r.stdout.splitlines():
  377. if not line.startswith('200,'):
  378. non_200s += 1
  379. if non_200s > 0:
  380. errors.append(f'responses != 200: {non_200s}')
  381. profiles.append(r.profile)
  382. return {
  383. 'count': count,
  384. 'samples': sample_size,
  385. 'speed': mean(samples) if len(samples) else -1,
  386. 'errors': errors,
  387. 'stats': RunProfile.AverageStats(profiles),
  388. }
  389. def requests_url(self, url: str, count: int):
  390. self.info(f' {url}: ')
  391. props = {}
  392. # 300 is max in curl, see tool_main.h
  393. for m in [1, 6, 25, 50, 100, 300]:
  394. props[str(m)] = self.do_requests(url=url, count=count, max_parallel=m)
  395. self.info('ok.\n')
  396. return props
  397. def requests(self, req_count) -> Dict[str, Any]:
  398. url = f'https://{self.env.domain1}:{self.server_port}/reqs10.data'
  399. return {
  400. 'count': req_count,
  401. '10KB': self.requests_url(url=url, count=req_count),
  402. }
  403. def score(self,
  404. handshakes: bool = True,
  405. downloads: Optional[List[int]] = None,
  406. download_count: int = 50,
  407. uploads: Optional[List[int]] = None,
  408. upload_count: int = 50,
  409. req_count=5000,
  410. requests: bool = True):
  411. self.info(f"scoring {self.protocol} against {self.server_descr}\n")
  412. p = {}
  413. if self.protocol == 'h3':
  414. p['name'] = 'h3'
  415. if not self.env.have_h3_curl():
  416. raise ScoreCardError('curl does not support HTTP/3')
  417. for lib in ['ngtcp2', 'quiche', 'msh3', 'nghttp3']:
  418. if self.env.curl_uses_lib(lib):
  419. p['implementation'] = lib
  420. break
  421. elif self.protocol == 'h2':
  422. p['name'] = 'h2'
  423. if not self.env.have_h2_curl():
  424. raise ScoreCardError('curl does not support HTTP/2')
  425. for lib in ['nghttp2']:
  426. if self.env.curl_uses_lib(lib):
  427. p['implementation'] = lib
  428. break
  429. elif self.protocol == 'h1' or self.protocol == 'http/1.1':
  430. proto = 'http/1.1'
  431. p['name'] = proto
  432. p['implementation'] = 'native'
  433. else:
  434. raise ScoreCardError(f"unknown protocol: {self.protocol}")
  435. if 'implementation' not in p:
  436. raise ScoreCardError(f'did not recognized {p} lib')
  437. p['version'] = Env.curl_lib_version(p['implementation'])
  438. score = {
  439. 'curl': self.env.curl_fullname(),
  440. 'os': self.env.curl_os(),
  441. 'protocol': p,
  442. 'server': self.server_descr,
  443. }
  444. if handshakes:
  445. score['handshakes'] = self.handshakes()
  446. if downloads and len(downloads) > 0:
  447. score['downloads'] = self.downloads(count=download_count,
  448. fsizes=downloads)
  449. if uploads and len(uploads) > 0:
  450. score['uploads'] = self.uploads(count=upload_count,
  451. fsizes=uploads)
  452. if requests:
  453. score['requests'] = self.requests(req_count=req_count)
  454. self.info("\n")
  455. return score
  456. def fmt_ms(self, tval):
  457. return f'{int(tval*1000)} ms' if tval >= 0 else '--'
  458. def fmt_size(self, val):
  459. if val >= (1024*1024*1024):
  460. return f'{val / (1024*1024*1024):0.000f}GB'
  461. elif val >= (1024 * 1024):
  462. return f'{val / (1024*1024):0.000f}MB'
  463. elif val >= 1024:
  464. return f'{val / 1024:0.000f}KB'
  465. else:
  466. return f'{val:0.000f}B'
  467. def fmt_mbs(self, val):
  468. return f'{val/(1024*1024):0.000f} MB/s' if val >= 0 else '--'
  469. def fmt_reqs(self, val):
  470. return f'{val:0.000f} r/s' if val >= 0 else '--'
  471. def print_score(self, score):
  472. print(f'{score["protocol"]["name"].upper()} in {score["curl"]}')
  473. if 'handshakes' in score:
  474. print(f'{"Handshakes":<24} {"ipv4":25} {"ipv6":28}')
  475. print(f' {"Host":<17} {"Connect":>12} {"Handshake":>12} '
  476. f'{"Connect":>12} {"Handshake":>12} {"Errors":<20}')
  477. for key, val in score["handshakes"].items():
  478. print(f' {key:<17} {self.fmt_ms(val["ipv4-connect"]):>12} '
  479. f'{self.fmt_ms(val["ipv4-handshake"]):>12} '
  480. f'{self.fmt_ms(val["ipv6-connect"]):>12} '
  481. f'{self.fmt_ms(val["ipv6-handshake"]):>12} '
  482. f'{"/".join(val["ipv4-errors"] + val["ipv6-errors"]):<20}'
  483. )
  484. if 'downloads' in score:
  485. # get the key names of all sizes and measurements made
  486. sizes = []
  487. measures = []
  488. m_names = {}
  489. mcol_width = 12
  490. mcol_sw = 17
  491. for sskey, ssval in score['downloads'].items():
  492. if isinstance(ssval, str):
  493. continue
  494. if sskey not in sizes:
  495. sizes.append(sskey)
  496. for mkey, mval in score['downloads'][sskey].items():
  497. if mkey not in measures:
  498. measures.append(mkey)
  499. m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
  500. print(f'Downloads from {score["server"]}')
  501. print(f' {"Size":>8}', end='')
  502. for m in measures:
  503. print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
  504. print(f' {"Errors":^20}')
  505. for size in score['downloads']:
  506. size_score = score['downloads'][size]
  507. print(f' {size:>8}', end='')
  508. errors = []
  509. for val in size_score.values():
  510. if 'errors' in val:
  511. errors.extend(val['errors'])
  512. for m in measures:
  513. if m in size_score:
  514. print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
  515. s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
  516. f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
  517. print(f' {s:<{mcol_sw}}', end='')
  518. else:
  519. print(' '*mcol_width, end='')
  520. if len(errors):
  521. print(f' {"/".join(errors):<20}')
  522. else:
  523. print(f' {"-":^20}')
  524. if 'uploads' in score:
  525. # get the key names of all sizes and measurements made
  526. sizes = []
  527. measures = []
  528. m_names = {}
  529. mcol_width = 12
  530. mcol_sw = 17
  531. for sskey, ssval in score['uploads'].items():
  532. if isinstance(ssval, str):
  533. continue
  534. if sskey not in sizes:
  535. sizes.append(sskey)
  536. for mkey, mval in ssval.items():
  537. if mkey not in measures:
  538. measures.append(mkey)
  539. m_names[mkey] = f'{mkey}({mval["count"]}x{mval["max-parallel"]})'
  540. print(f'Uploads to {score["server"]}')
  541. print(f' {"Size":>8}', end='')
  542. for m in measures:
  543. print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
  544. print(f' {"Errors":^20}')
  545. for size in sizes:
  546. size_score = score['uploads'][size]
  547. print(f' {size:>8}', end='')
  548. errors = []
  549. for val in size_score.values():
  550. if 'errors' in val:
  551. errors.extend(val['errors'])
  552. for m in measures:
  553. if m in size_score:
  554. print(f' {self.fmt_mbs(size_score[m]["speed"]):>{mcol_width}}', end='')
  555. stats = size_score[m]["stats"]
  556. if 'cpu' in stats:
  557. s = f'[{stats["cpu"]:>.1f}%/{self.fmt_size(stats["rss"])}]'
  558. else:
  559. s = '[???/???]'
  560. print(f' {s:<{mcol_sw}}', end='')
  561. else:
  562. print(' '*mcol_width, end='')
  563. if len(errors):
  564. print(f' {"/".join(errors):<20}')
  565. else:
  566. print(f' {"-":^20}')
  567. if 'requests' in score:
  568. sizes = []
  569. measures = []
  570. m_names = {}
  571. mcol_width = 9
  572. mcol_sw = 13
  573. for sskey, ssval in score['requests'].items():
  574. if isinstance(ssval, (str, int)):
  575. continue
  576. if sskey not in sizes:
  577. sizes.append(sskey)
  578. for mkey in score['requests'][sskey]:
  579. if mkey not in measures:
  580. measures.append(mkey)
  581. m_names[mkey] = f'{mkey}'
  582. print('Requests (max parallel) to {score["server"]}')
  583. print(f' {"Size":>6} {"Reqs":>6}', end='')
  584. for m in measures:
  585. print(f' {m_names[m]:>{mcol_width}} {"[cpu/rss]":<{mcol_sw}}', end='')
  586. print(f' {"Errors":^10}')
  587. for size in sizes:
  588. size_score = score['requests'][size]
  589. count = score['requests']['count']
  590. print(f' {size:>6} {count:>6}', end='')
  591. errors = []
  592. for val in size_score.values():
  593. if 'errors' in val:
  594. errors.extend(val['errors'])
  595. for m in measures:
  596. if m in size_score:
  597. print(f' {self.fmt_reqs(size_score[m]["speed"]):>{mcol_width}}', end='')
  598. s = f'[{size_score[m]["stats"]["cpu"]:>.1f}%'\
  599. f'/{self.fmt_size(size_score[m]["stats"]["rss"])}]'
  600. print(f' {s:<{mcol_sw}}', end='')
  601. else:
  602. print(' '*mcol_width, end='')
  603. if len(errors):
  604. print(f' {"/".join(errors):<10}')
  605. else:
  606. print(f' {"-":^10}')
  607. def parse_size(s):
  608. m = re.match(r'(\d+)(mb|kb|gb)?', s, re.IGNORECASE)
  609. if m is None:
  610. raise Exception(f'unrecognized size: {s}')
  611. size = int(m.group(1))
  612. if not m.group(2):
  613. pass
  614. elif m.group(2).lower() == 'kb':
  615. size *= 1024
  616. elif m.group(2).lower() == 'mb':
  617. size *= 1024 * 1024
  618. elif m.group(2).lower() == 'gb':
  619. size *= 1024 * 1024 * 1024
  620. return size
  621. def main():
  622. parser = argparse.ArgumentParser(prog='scorecard', description="""
  623. Run a range of tests to give a scorecard for a HTTP protocol
  624. 'h3' or 'h2' implementation in curl.
  625. """)
  626. parser.add_argument("-v", "--verbose", action='count', default=1,
  627. help="log more output on stderr")
  628. parser.add_argument("-j", "--json", action='store_true',
  629. default=False, help="print json instead of text")
  630. parser.add_argument("-H", "--handshakes", action='store_true',
  631. default=False, help="evaluate handshakes only")
  632. parser.add_argument("-d", "--downloads", action='store_true',
  633. default=False, help="evaluate downloads")
  634. parser.add_argument("--download", action='append', type=str,
  635. default=None, help="evaluate download size")
  636. parser.add_argument("--download-count", action='store', type=int,
  637. default=50, help="perform that many downloads")
  638. parser.add_argument("--download-parallel", action='store', type=int,
  639. default=0, help="perform that many downloads in parallel (default all)")
  640. parser.add_argument("-u", "--uploads", action='store_true',
  641. default=False, help="evaluate uploads")
  642. parser.add_argument("--upload", action='append', type=str,
  643. default=None, help="evaluate upload size")
  644. parser.add_argument("--upload-count", action='store', type=int,
  645. default=50, help="perform that many uploads")
  646. parser.add_argument("-r", "--requests", action='store_true',
  647. default=False, help="evaluate requests")
  648. parser.add_argument("--request-count", action='store', type=int,
  649. default=5000, help="perform that many requests")
  650. parser.add_argument("--httpd", action='store_true', default=False,
  651. help="evaluate httpd server only")
  652. parser.add_argument("--caddy", action='store_true', default=False,
  653. help="evaluate caddy server only")
  654. parser.add_argument("--curl-verbose", action='store_true',
  655. default=False, help="run curl with `-v`")
  656. parser.add_argument("protocol", default='h2', nargs='?',
  657. help="Name of protocol to score")
  658. parser.add_argument("--start-only", action='store_true', default=False,
  659. help="only start the servers")
  660. parser.add_argument("--remote", action='store', type=str,
  661. default=None, help="score against the remote server at <ip>:<port>")
  662. args = parser.parse_args()
  663. if args.verbose > 0:
  664. console = logging.StreamHandler()
  665. console.setLevel(logging.INFO)
  666. console.setFormatter(logging.Formatter(logging.BASIC_FORMAT))
  667. logging.getLogger('').addHandler(console)
  668. protocol = args.protocol
  669. handshakes = True
  670. downloads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
  671. if args.download is not None:
  672. downloads = []
  673. for x in args.download:
  674. downloads.extend([parse_size(s) for s in x.split(',')])
  675. uploads = [1024 * 1024, 10 * 1024 * 1024, 100 * 1024 * 1024]
  676. if args.upload is not None:
  677. uploads = []
  678. for x in args.upload:
  679. uploads.extend([parse_size(s) for s in x.split(',')])
  680. requests = True
  681. if args.downloads or args.uploads or args.requests or args.handshakes:
  682. handshakes = args.handshakes
  683. if not args.downloads:
  684. downloads = None
  685. if not args.uploads:
  686. uploads = None
  687. requests = args.requests
  688. test_httpd = protocol != 'h3'
  689. test_caddy = True
  690. if args.caddy or args.httpd:
  691. test_caddy = args.caddy
  692. test_httpd = args.httpd
  693. rv = 0
  694. env = Env()
  695. env.setup()
  696. env.test_timeout = None
  697. httpd = None
  698. nghttpx = None
  699. caddy = None
  700. try:
  701. cards = []
  702. if args.remote:
  703. m = re.match(r'^(.+):(\d+)$', args.remote)
  704. if m is None:
  705. raise ScoreCardError(f'unable to parse ip:port from --remote {args.remote}')
  706. test_httpd = False
  707. test_caddy = False
  708. remote_addr = m.group(1)
  709. remote_port = int(m.group(2))
  710. card = ScoreCard(env=env,
  711. protocol=protocol,
  712. server_descr=f'Server at {args.remote}',
  713. server_addr=remote_addr,
  714. server_port=remote_port,
  715. verbose=args.verbose, curl_verbose=args.curl_verbose,
  716. download_parallel=args.download_parallel)
  717. cards.append(card)
  718. if test_httpd:
  719. httpd = Httpd(env=env)
  720. assert httpd.exists(), \
  721. f'httpd not found: {env.httpd}'
  722. httpd.clear_logs()
  723. server_docs = httpd.docs_dir
  724. assert httpd.start()
  725. if protocol == 'h3':
  726. nghttpx = NghttpxQuic(env=env)
  727. nghttpx.clear_logs()
  728. assert nghttpx.start()
  729. server_descr = f'nghttpx: https:{env.h3_port} [backend httpd: {env.httpd_version()}, https:{env.https_port}]'
  730. server_port = env.h3_port
  731. else:
  732. server_descr = f'httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}'
  733. server_port = env.https_port
  734. card = ScoreCard(env=env,
  735. protocol=protocol,
  736. server_descr=server_descr,
  737. server_port=server_port,
  738. verbose=args.verbose, curl_verbose=args.curl_verbose,
  739. download_parallel=args.download_parallel)
  740. card.setup_resources(server_docs, downloads)
  741. cards.append(card)
  742. if test_caddy and env.caddy:
  743. backend = ''
  744. if uploads and httpd is None:
  745. backend = f' [backend httpd: {env.httpd_version()}, http:{env.http_port} https:{env.https_port}]'
  746. httpd = Httpd(env=env)
  747. assert httpd.exists(), \
  748. f'httpd not found: {env.httpd}'
  749. httpd.clear_logs()
  750. assert httpd.start()
  751. caddy = Caddy(env=env)
  752. caddy.clear_logs()
  753. assert caddy.start()
  754. server_descr = f'Caddy: {env.caddy_version()}, http:{env.caddy_http_port} https:{env.caddy_https_port}{backend}'
  755. server_port = caddy.port
  756. server_docs = caddy.docs_dir
  757. card = ScoreCard(env=env,
  758. protocol=protocol,
  759. server_descr=server_descr,
  760. server_port=server_port,
  761. verbose=args.verbose, curl_verbose=args.curl_verbose,
  762. download_parallel=args.download_parallel)
  763. card.setup_resources(server_docs, downloads)
  764. cards.append(card)
  765. if args.start_only:
  766. print('started servers:')
  767. for card in cards:
  768. print(f'{card.server_descr}')
  769. sys.stderr.write('press [RETURN] to finish')
  770. sys.stderr.flush()
  771. sys.stdin.readline()
  772. else:
  773. for card in cards:
  774. score = card.score(handshakes=handshakes,
  775. downloads=downloads,
  776. download_count=args.download_count,
  777. uploads=uploads,
  778. upload_count=args.upload_count,
  779. req_count=args.request_count,
  780. requests=requests)
  781. if args.json:
  782. print(json.JSONEncoder(indent=2).encode(score))
  783. else:
  784. card.print_score(score)
  785. except ScoreCardError as ex:
  786. sys.stderr.write(f"ERROR: {ex}\n")
  787. rv = 1
  788. except KeyboardInterrupt:
  789. log.warning("aborted")
  790. rv = 1
  791. finally:
  792. if caddy:
  793. caddy.stop()
  794. if nghttpx:
  795. nghttpx.stop(wait_dead=False)
  796. if httpd:
  797. httpd.stop()
  798. sys.exit(rv)
  799. if __name__ == "__main__":
  800. main()