2
0

curl.py 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945
  1. #!/usr/bin/env python3
  2. # -*- coding: utf-8 -*-
  3. #***************************************************************************
  4. # _ _ ____ _
  5. # Project ___| | | | _ \| |
  6. # / __| | | | |_) | |
  7. # | (__| |_| | _ <| |___
  8. # \___|\___/|_| \_\_____|
  9. #
  10. # Copyright (C) Daniel Stenberg, <daniel@haxx.se>, et al.
  11. #
  12. # This software is licensed as described in the file COPYING, which
  13. # you should have received as part of this distribution. The terms
  14. # are also available at https://curl.se/docs/copyright.html.
  15. #
  16. # You may opt to use, copy, modify, merge, publish, distribute and/or sell
  17. # copies of the Software, and permit persons to whom the Software is
  18. # furnished to do so, under the terms of the COPYING file.
  19. #
  20. # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY
  21. # KIND, either express or implied.
  22. #
  23. # SPDX-License-Identifier: curl
  24. #
  25. ###########################################################################
  26. #
  27. import json
  28. import logging
  29. import os
  30. import sys
  31. import time
  32. from threading import Thread
  33. import psutil
  34. import re
  35. import shutil
  36. import subprocess
  37. from statistics import mean, fmean
  38. from datetime import timedelta, datetime
  39. from typing import List, Optional, Dict, Union, Any
  40. from urllib.parse import urlparse
  41. from .env import Env
  42. log = logging.getLogger(__name__)
  43. class RunProfile:
  44. STAT_KEYS = ['cpu', 'rss', 'vsz']
  45. @classmethod
  46. def AverageStats(cls, profiles: List['RunProfile']):
  47. avg = {}
  48. stats = [p.stats for p in profiles]
  49. for key in cls.STAT_KEYS:
  50. vals = [s[key] for s in stats]
  51. avg[key] = mean(vals) if len(vals) else 0.0
  52. return avg
  53. def __init__(self, pid: int, started_at: datetime, run_dir):
  54. self._pid = pid
  55. self._started_at = started_at
  56. self._duration = timedelta(seconds=0)
  57. self._run_dir = run_dir
  58. self._samples = []
  59. self._psu = None
  60. self._stats = None
  61. @property
  62. def duration(self) -> timedelta:
  63. return self._duration
  64. @property
  65. def stats(self) -> Optional[Dict[str,Any]]:
  66. return self._stats
  67. def sample(self):
  68. elapsed = datetime.now() - self._started_at
  69. try:
  70. if self._psu is None:
  71. self._psu = psutil.Process(pid=self._pid)
  72. mem = self._psu.memory_info()
  73. self._samples.append({
  74. 'time': elapsed,
  75. 'cpu': self._psu.cpu_percent(),
  76. 'vsz': mem.vms,
  77. 'rss': mem.rss,
  78. })
  79. except psutil.NoSuchProcess:
  80. pass
  81. def finish(self):
  82. self._duration = datetime.now() - self._started_at
  83. if len(self._samples) > 0:
  84. weights = [s['time'].total_seconds() for s in self._samples]
  85. self._stats = {}
  86. for key in self.STAT_KEYS:
  87. self._stats[key] = fmean([s[key] for s in self._samples], weights)
  88. else:
  89. self._stats = None
  90. self._psu = None
  91. def __repr__(self):
  92. return f'RunProfile[pid={self._pid}, '\
  93. f'duration={self.duration.total_seconds():.3f}s, '\
  94. f'stats={self.stats}]'
  95. class RunTcpDump:
  96. def __init__(self, env, run_dir):
  97. self._env = env
  98. self._run_dir = run_dir
  99. self._proc = None
  100. self._stdoutfile = os.path.join(self._run_dir, 'tcpdump.out')
  101. self._stderrfile = os.path.join(self._run_dir, 'tcpdump.err')
  102. @property
  103. def stats(self) -> Optional[List[str]]:
  104. if self._proc:
  105. raise Exception('tcpdump still running')
  106. return [line
  107. for line in open(self._stdoutfile)
  108. if re.match(r'.* IP 127\.0\.0\.1\.\d+ [<>] 127\.0\.0\.1\.\d+:.*', line)]
  109. def stats_excluding(self, src_port) -> Optional[List[str]]:
  110. if self._proc:
  111. raise Exception('tcpdump still running')
  112. return [line
  113. for line in self.stats
  114. if not re.match(r'.* IP 127\.0\.0\.1\.' + str(src_port) + ' >.*', line)]
  115. @property
  116. def stderr(self) -> List[str]:
  117. if self._proc:
  118. raise Exception('tcpdump still running')
  119. return open(self._stderrfile).readlines()
  120. def sample(self):
  121. # not sure how to make that detection reliable for all platforms
  122. local_if = 'lo0' if sys.platform.startswith('darwin') else 'lo'
  123. try:
  124. tcpdump = self._env.tcpdump()
  125. if tcpdump is None:
  126. raise Exception('tcpdump not available')
  127. # look with tcpdump for TCP RST packets which indicate
  128. # we did not shut down connections cleanly
  129. args = []
  130. # at least on Linux, we need root permissions to run tcpdump
  131. if sys.platform.startswith('linux'):
  132. args.append('sudo')
  133. args.extend([
  134. tcpdump, '-i', local_if, '-n', 'tcp[tcpflags] & (tcp-rst)!=0'
  135. ])
  136. with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
  137. self._proc = subprocess.Popen(args, stdout=cout, stderr=cerr,
  138. text=True, cwd=self._run_dir,
  139. shell=False)
  140. assert self._proc
  141. assert self._proc.returncode is None
  142. while self._proc:
  143. try:
  144. self._proc.wait(timeout=1)
  145. except subprocess.TimeoutExpired:
  146. pass
  147. except Exception:
  148. log.exception('Tcpdump')
  149. def start(self):
  150. def do_sample():
  151. self.sample()
  152. t = Thread(target=do_sample)
  153. t.start()
  154. def finish(self):
  155. if self._proc:
  156. time.sleep(1)
  157. self._proc.terminate()
  158. self._proc = None
  159. class ExecResult:
  160. def __init__(self, args: List[str], exit_code: int,
  161. stdout: List[str], stderr: List[str],
  162. duration: Optional[timedelta] = None,
  163. with_stats: bool = False,
  164. exception: Optional[str] = None,
  165. profile: Optional[RunProfile] = None,
  166. tcpdump: Optional[RunTcpDump] = None):
  167. self._args = args
  168. self._exit_code = exit_code
  169. self._exception = exception
  170. self._stdout = stdout
  171. self._stderr = stderr
  172. self._profile = profile
  173. self._tcpdump = tcpdump
  174. self._duration = duration if duration is not None else timedelta()
  175. self._response = None
  176. self._responses = []
  177. self._results = {}
  178. self._assets = []
  179. self._stats = []
  180. self._json_out = None
  181. self._with_stats = with_stats
  182. if with_stats:
  183. self._parse_stats()
  184. else:
  185. # noinspection PyBroadException
  186. try:
  187. out = ''.join(self._stdout)
  188. self._json_out = json.loads(out)
  189. except: # noqa: E722
  190. pass
  191. def __repr__(self):
  192. return f"ExecResult[code={self.exit_code}, exception={self._exception}, "\
  193. f"args={self._args}, stdout={self._stdout}, stderr={self._stderr}]"
  194. def _parse_stats(self):
  195. self._stats = []
  196. for line in self._stdout:
  197. try:
  198. self._stats.append(json.loads(line))
  199. # TODO: specify specific exceptions here
  200. except: # noqa: E722
  201. log.exception(f'not a JSON stat: {line}')
  202. break
  203. @property
  204. def exit_code(self) -> int:
  205. return self._exit_code
  206. @property
  207. def args(self) -> List[str]:
  208. return self._args
  209. @property
  210. def outraw(self) -> bytes:
  211. return ''.join(self._stdout).encode()
  212. @property
  213. def stdout(self) -> str:
  214. return ''.join(self._stdout)
  215. @property
  216. def json(self) -> Optional[Dict]:
  217. """Output as JSON dictionary or None if not parseable."""
  218. return self._json_out
  219. @property
  220. def stderr(self) -> str:
  221. return ''.join(self._stderr)
  222. @property
  223. def trace_lines(self) -> List[str]:
  224. return self._stderr
  225. @property
  226. def duration(self) -> timedelta:
  227. return self._duration
  228. @property
  229. def profile(self) -> Optional[RunProfile]:
  230. return self._profile
  231. @property
  232. def tcpdump(self) -> Optional[RunTcpDump]:
  233. return self._tcpdump
  234. @property
  235. def response(self) -> Optional[Dict]:
  236. return self._response
  237. @property
  238. def responses(self) -> List[Dict]:
  239. return self._responses
  240. @property
  241. def results(self) -> Dict:
  242. return self._results
  243. @property
  244. def assets(self) -> List:
  245. return self._assets
  246. @property
  247. def with_stats(self) -> bool:
  248. return self._with_stats
  249. @property
  250. def stats(self) -> List:
  251. return self._stats
  252. @property
  253. def total_connects(self) -> Optional[int]:
  254. if len(self.stats):
  255. n = 0
  256. for stat in self.stats:
  257. n += stat['num_connects']
  258. return n
  259. return None
  260. def add_response(self, resp: Dict):
  261. self._response = resp
  262. self._responses.append(resp)
  263. def add_results(self, results: Dict):
  264. self._results.update(results)
  265. if 'response' in results:
  266. self.add_response(results['response'])
  267. def add_assets(self, assets: List):
  268. self._assets.extend(assets)
  269. def check_exit_code(self, code: Union[int, bool]):
  270. if code is True:
  271. assert self.exit_code == 0, f'expected exit code {code}, '\
  272. f'got {self.exit_code}\n{self.dump_logs()}'
  273. elif code is False:
  274. assert self.exit_code != 0, f'expected exit code {code}, '\
  275. f'got {self.exit_code}\n{self.dump_logs()}'
  276. else:
  277. assert self.exit_code == code, f'expected exit code {code}, '\
  278. f'got {self.exit_code}\n{self.dump_logs()}'
  279. def check_response(self, http_status: Optional[int] = 200,
  280. count: Optional[int] = 1,
  281. protocol: Optional[str] = None,
  282. exitcode: Optional[int] = 0,
  283. connect_count: Optional[int] = None):
  284. if exitcode:
  285. self.check_exit_code(exitcode)
  286. if self.with_stats and isinstance(exitcode, int):
  287. for idx, x in enumerate(self.stats):
  288. if 'exitcode' in x:
  289. assert int(x['exitcode']) == exitcode, \
  290. f'response #{idx} exitcode: expected {exitcode}, '\
  291. f'got {x["exitcode"]}\n{self.dump_logs()}'
  292. if self.with_stats:
  293. assert len(self.stats) == count, \
  294. f'response count: expected {count}, ' \
  295. f'got {len(self.stats)}\n{self.dump_logs()}'
  296. else:
  297. assert len(self.responses) == count, \
  298. f'response count: expected {count}, ' \
  299. f'got {len(self.responses)}\n{self.dump_logs()}'
  300. if http_status is not None:
  301. if self.with_stats:
  302. for idx, x in enumerate(self.stats):
  303. assert 'http_code' in x, \
  304. f'response #{idx} reports no http_code\n{self.dump_stat(x)}'
  305. assert x['http_code'] == http_status, \
  306. f'response #{idx} http_code: expected {http_status}, '\
  307. f'got {x["http_code"]}\n{self.dump_stat(x)}'
  308. else:
  309. for idx, x in enumerate(self.responses):
  310. assert x['status'] == http_status, \
  311. f'response #{idx} status: expected {http_status},'\
  312. f'got {x["status"]}\n{self.dump_stat(x)}'
  313. if protocol is not None:
  314. if self.with_stats:
  315. http_version = None
  316. if protocol == 'HTTP/1.1':
  317. http_version = '1.1'
  318. elif protocol == 'HTTP/2':
  319. http_version = '2'
  320. elif protocol == 'HTTP/3':
  321. http_version = '3'
  322. if http_version is not None:
  323. for idx, x in enumerate(self.stats):
  324. assert x['http_version'] == http_version, \
  325. f'response #{idx} protocol: expected http/{http_version},' \
  326. f'got version {x["http_version"]}\n{self.dump_stat(x)}'
  327. else:
  328. for idx, x in enumerate(self.responses):
  329. assert x['protocol'] == protocol, \
  330. f'response #{idx} protocol: expected {protocol},'\
  331. f'got {x["protocol"]}\n{self.dump_logs()}'
  332. if connect_count is not None:
  333. assert self.total_connects == connect_count, \
  334. f'expected {connect_count}, but {self.total_connects} '\
  335. f'were made\n{self.dump_logs()}'
  336. def check_stats(self, count: int, http_status: Optional[int] = None,
  337. exitcode: Optional[int] = None,
  338. remote_port: Optional[int] = None,
  339. remote_ip: Optional[str] = None):
  340. if exitcode is None:
  341. self.check_exit_code(0)
  342. assert len(self.stats) == count, \
  343. f'stats count: expected {count}, got {len(self.stats)}\n{self.dump_logs()}'
  344. if http_status is not None:
  345. for idx, x in enumerate(self.stats):
  346. assert 'http_code' in x, \
  347. f'status #{idx} reports no http_code\n{self.dump_stat(x)}'
  348. assert x['http_code'] == http_status, \
  349. f'status #{idx} http_code: expected {http_status}, '\
  350. f'got {x["http_code"]}\n{self.dump_stat(x)}'
  351. if exitcode is not None:
  352. for idx, x in enumerate(self.stats):
  353. if 'exitcode' in x:
  354. assert x['exitcode'] == exitcode, \
  355. f'status #{idx} exitcode: expected {exitcode}, '\
  356. f'got {x["exitcode"]}\n{self.dump_stat(x)}'
  357. if remote_port is not None:
  358. for idx, x in enumerate(self.stats):
  359. assert 'remote_port' in x, f'remote_port missing\n{self.dump_stat(x)}'
  360. assert x['remote_port'] == remote_port, \
  361. f'status #{idx} remote_port: expected {remote_port}, '\
  362. f'got {x["remote_port"]}\n{self.dump_stat(x)}'
  363. if remote_ip is not None:
  364. for idx, x in enumerate(self.stats):
  365. assert 'remote_ip' in x, f'remote_ip missing\n{self.dump_stat(x)}'
  366. assert x['remote_ip'] == remote_ip, \
  367. f'status #{idx} remote_ip: expected {remote_ip}, '\
  368. f'got {x["remote_ip"]}\n{self.dump_stat(x)}'
  369. def dump_logs(self):
  370. lines = ['>>--stdout ----------------------------------------------\n']
  371. lines.extend(self._stdout)
  372. lines.append('>>--stderr ----------------------------------------------\n')
  373. lines.extend(self._stderr)
  374. lines.append('<<-------------------------------------------------------\n')
  375. return ''.join(lines)
  376. def dump_stat(self, x):
  377. lines = [
  378. 'json stat from curl:',
  379. json.JSONEncoder(indent=2).encode(x),
  380. ]
  381. if 'xfer_id' in x:
  382. xfer_id = x['xfer_id']
  383. lines.append(f'>>--xfer {xfer_id} trace:\n')
  384. lines.extend(self.xfer_trace_for(xfer_id))
  385. else:
  386. lines.append('>>--full trace-------------------------------------------\n')
  387. lines.extend(self._stderr)
  388. lines.append('<<-------------------------------------------------------\n')
  389. return ''.join(lines)
  390. def xfer_trace_for(self, xfer_id) -> List[str]:
  391. pat = re.compile(f'^[^[]* \\[{xfer_id}-.*$')
  392. return [line for line in self._stderr if pat.match(line)]
  393. class CurlClient:
  394. ALPN_ARG = {
  395. 'http/0.9': '--http0.9',
  396. 'http/1.0': '--http1.0',
  397. 'http/1.1': '--http1.1',
  398. 'h2': '--http2',
  399. 'h2c': '--http2',
  400. 'h3': '--http3-only',
  401. }
  402. def __init__(self, env: Env,
  403. run_dir: Optional[str] = None,
  404. timeout: Optional[float] = None,
  405. silent: bool = False,
  406. run_env: Optional[Dict[str, str]] = None,
  407. server_addr: Optional[str] = None):
  408. self.env = env
  409. self._timeout = timeout if timeout else env.test_timeout
  410. self._curl = os.environ['CURL'] if 'CURL' in os.environ else env.curl
  411. self._run_dir = run_dir if run_dir else os.path.join(env.gen_dir, 'curl')
  412. self._stdoutfile = f'{self._run_dir}/curl.stdout'
  413. self._stderrfile = f'{self._run_dir}/curl.stderr'
  414. self._headerfile = f'{self._run_dir}/curl.headers'
  415. self._log_path = f'{self._run_dir}/curl.log'
  416. self._silent = silent
  417. self._run_env = run_env
  418. self._server_addr = server_addr if server_addr else '127.0.0.1'
  419. self._rmrf(self._run_dir)
  420. self._mkpath(self._run_dir)
  421. @property
  422. def run_dir(self) -> str:
  423. return self._run_dir
  424. def download_file(self, i: int) -> str:
  425. return os.path.join(self.run_dir, f'download_{i}.data')
  426. def _rmf(self, path):
  427. if os.path.exists(path):
  428. return os.remove(path)
  429. def _rmrf(self, path):
  430. if os.path.exists(path):
  431. return shutil.rmtree(path)
  432. def _mkpath(self, path):
  433. if not os.path.exists(path):
  434. return os.makedirs(path)
  435. def get_proxy_args(self, proto: str = 'http/1.1',
  436. proxys: bool = True, tunnel: bool = False,
  437. use_ip: bool = False):
  438. proxy_name = self._server_addr if use_ip else self.env.proxy_domain
  439. if proxys:
  440. pport = self.env.pts_port(proto) if tunnel else self.env.proxys_port
  441. xargs = [
  442. '--proxy', f'https://{proxy_name}:{pport}/',
  443. '--resolve', f'{proxy_name}:{pport}:{self._server_addr}',
  444. '--proxy-cacert', self.env.ca.cert_file,
  445. ]
  446. if proto == 'h2':
  447. xargs.append('--proxy-http2')
  448. else:
  449. xargs = [
  450. '--proxy', f'http://{proxy_name}:{self.env.proxy_port}/',
  451. '--resolve', f'{proxy_name}:{self.env.proxy_port}:{self._server_addr}',
  452. ]
  453. if tunnel:
  454. xargs.append('--proxytunnel')
  455. return xargs
  456. def http_get(self, url: str, extra_args: Optional[List[str]] = None,
  457. alpn_proto: Optional[str] = None,
  458. def_tracing: bool = True,
  459. with_stats: bool = False,
  460. with_profile: bool = False,
  461. with_tcpdump: bool = False):
  462. return self._raw(url, options=extra_args,
  463. with_stats=with_stats,
  464. alpn_proto=alpn_proto,
  465. def_tracing=def_tracing,
  466. with_profile=with_profile,
  467. with_tcpdump=with_tcpdump)
  468. def http_download(self, urls: List[str],
  469. alpn_proto: Optional[str] = None,
  470. with_stats: bool = True,
  471. with_headers: bool = False,
  472. with_profile: bool = False,
  473. with_tcpdump: bool = False,
  474. no_save: bool = False,
  475. extra_args: Optional[List[str]] = None):
  476. if extra_args is None:
  477. extra_args = []
  478. if no_save:
  479. extra_args.extend([
  480. '-o', '/dev/null',
  481. ])
  482. else:
  483. extra_args.extend([
  484. '-o', 'download_#1.data',
  485. ])
  486. # remove any existing ones
  487. for i in range(100):
  488. self._rmf(self.download_file(i))
  489. if with_stats:
  490. extra_args.extend([
  491. '-w', '%{json}\\n'
  492. ])
  493. return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
  494. with_stats=with_stats,
  495. with_headers=with_headers,
  496. with_profile=with_profile,
  497. with_tcpdump=with_tcpdump)
  498. def http_upload(self, urls: List[str], data: str,
  499. alpn_proto: Optional[str] = None,
  500. with_stats: bool = True,
  501. with_headers: bool = False,
  502. with_profile: bool = False,
  503. with_tcpdump: bool = False,
  504. extra_args: Optional[List[str]] = None):
  505. if extra_args is None:
  506. extra_args = []
  507. extra_args.extend([
  508. '--data-binary', data, '-o', 'download_#1.data',
  509. ])
  510. if with_stats:
  511. extra_args.extend([
  512. '-w', '%{json}\\n'
  513. ])
  514. return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
  515. with_stats=with_stats,
  516. with_headers=with_headers,
  517. with_profile=with_profile,
  518. with_tcpdump=with_tcpdump)
  519. def http_delete(self, urls: List[str],
  520. alpn_proto: Optional[str] = None,
  521. with_stats: bool = True,
  522. with_profile: bool = False,
  523. extra_args: Optional[List[str]] = None):
  524. if extra_args is None:
  525. extra_args = []
  526. extra_args.extend([
  527. '-X', 'DELETE', '-o', '/dev/null',
  528. ])
  529. if with_stats:
  530. extra_args.extend([
  531. '-w', '%{json}\\n'
  532. ])
  533. return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
  534. with_stats=with_stats,
  535. with_headers=False,
  536. with_profile=with_profile)
  537. def http_put(self, urls: List[str], data=None, fdata=None,
  538. alpn_proto: Optional[str] = None,
  539. with_stats: bool = True,
  540. with_headers: bool = False,
  541. with_profile: bool = False,
  542. extra_args: Optional[List[str]] = None):
  543. if extra_args is None:
  544. extra_args = []
  545. if fdata is not None:
  546. extra_args.extend(['-T', fdata])
  547. elif data is not None:
  548. extra_args.extend(['-T', '-'])
  549. extra_args.extend([
  550. '-o', 'download_#1.data',
  551. ])
  552. if with_stats:
  553. extra_args.extend([
  554. '-w', '%{json}\\n'
  555. ])
  556. return self._raw(urls, intext=data,
  557. alpn_proto=alpn_proto, options=extra_args,
  558. with_stats=with_stats,
  559. with_headers=with_headers,
  560. with_profile=with_profile)
  561. def http_form(self, urls: List[str], form: Dict[str, str],
  562. alpn_proto: Optional[str] = None,
  563. with_stats: bool = True,
  564. with_headers: bool = False,
  565. extra_args: Optional[List[str]] = None):
  566. if extra_args is None:
  567. extra_args = []
  568. for key, val in form.items():
  569. extra_args.extend(['-F', f'{key}={val}'])
  570. extra_args.extend([
  571. '-o', 'download_#1.data',
  572. ])
  573. if with_stats:
  574. extra_args.extend([
  575. '-w', '%{json}\\n'
  576. ])
  577. return self._raw(urls, alpn_proto=alpn_proto, options=extra_args,
  578. with_stats=with_stats,
  579. with_headers=with_headers)
  580. def ftp_get(self, urls: List[str],
  581. with_stats: bool = True,
  582. with_profile: bool = False,
  583. with_tcpdump: bool = False,
  584. no_save: bool = False,
  585. extra_args: Optional[List[str]] = None):
  586. if extra_args is None:
  587. extra_args = []
  588. if no_save:
  589. extra_args.extend([
  590. '-o', '/dev/null',
  591. ])
  592. else:
  593. extra_args.extend([
  594. '-o', 'download_#1.data',
  595. ])
  596. # remove any existing ones
  597. for i in range(100):
  598. self._rmf(self.download_file(i))
  599. if with_stats:
  600. extra_args.extend([
  601. '-w', '%{json}\\n'
  602. ])
  603. return self._raw(urls, options=extra_args,
  604. with_stats=with_stats,
  605. with_headers=False,
  606. with_profile=with_profile,
  607. with_tcpdump=with_tcpdump)
  608. def ftp_ssl_get(self, urls: List[str],
  609. with_stats: bool = True,
  610. with_profile: bool = False,
  611. with_tcpdump: bool = False,
  612. no_save: bool = False,
  613. extra_args: Optional[List[str]] = None):
  614. if extra_args is None:
  615. extra_args = []
  616. extra_args.extend([
  617. '--ssl-reqd',
  618. ])
  619. return self.ftp_get(urls=urls, with_stats=with_stats,
  620. with_profile=with_profile, no_save=no_save,
  621. with_tcpdump=with_tcpdump,
  622. extra_args=extra_args)
  623. def ftp_upload(self, urls: List[str],
  624. fupload: Optional[Any] = None,
  625. updata: Optional[str] = None,
  626. with_stats: bool = True,
  627. with_profile: bool = False,
  628. with_tcpdump: bool = False,
  629. extra_args: Optional[List[str]] = None):
  630. if extra_args is None:
  631. extra_args = []
  632. if fupload is not None:
  633. extra_args.extend([
  634. '--upload-file', fupload
  635. ])
  636. elif updata is not None:
  637. extra_args.extend([
  638. '--upload-file', '-'
  639. ])
  640. else:
  641. raise Exception('need either file or data to upload')
  642. if with_stats:
  643. extra_args.extend([
  644. '-w', '%{json}\\n'
  645. ])
  646. return self._raw(urls, options=extra_args,
  647. intext=updata,
  648. with_stats=with_stats,
  649. with_headers=False,
  650. with_profile=with_profile,
  651. with_tcpdump=with_tcpdump)
  652. def ftp_ssl_upload(self, urls: List[str],
  653. fupload: Optional[Any] = None,
  654. updata: Optional[str] = None,
  655. with_stats: bool = True,
  656. with_profile: bool = False,
  657. with_tcpdump: bool = False,
  658. extra_args: Optional[List[str]] = None):
  659. if extra_args is None:
  660. extra_args = []
  661. extra_args.extend([
  662. '--ssl-reqd',
  663. ])
  664. return self.ftp_upload(urls=urls, fupload=fupload, updata=updata,
  665. with_stats=with_stats, with_profile=with_profile,
  666. with_tcpdump=with_tcpdump,
  667. extra_args=extra_args)
  668. def response_file(self, idx: int):
  669. return os.path.join(self._run_dir, f'download_{idx}.data')
  670. def run_direct(self, args, with_stats: bool = False, with_profile: bool = False):
  671. my_args = [self._curl]
  672. if with_stats:
  673. my_args.extend([
  674. '-w', '%{json}\\n'
  675. ])
  676. my_args.extend([
  677. '-o', 'download.data',
  678. ])
  679. my_args.extend(args)
  680. return self._run(args=my_args, with_stats=with_stats, with_profile=with_profile)
  681. def _run(self, args, intext='', with_stats: bool = False,
  682. with_profile: bool = True, with_tcpdump: bool = False):
  683. self._rmf(self._stdoutfile)
  684. self._rmf(self._stderrfile)
  685. self._rmf(self._headerfile)
  686. exception = None
  687. profile = None
  688. tcpdump = None
  689. started_at = datetime.now()
  690. if with_tcpdump:
  691. tcpdump = RunTcpDump(self.env, self._run_dir)
  692. tcpdump.start()
  693. try:
  694. with open(self._stdoutfile, 'w') as cout, open(self._stderrfile, 'w') as cerr:
  695. if with_profile:
  696. end_at = started_at + timedelta(seconds=self._timeout) \
  697. if self._timeout else None
  698. log.info(f'starting: {args}')
  699. p = subprocess.Popen(args, stderr=cerr, stdout=cout,
  700. cwd=self._run_dir, shell=False,
  701. env=self._run_env)
  702. profile = RunProfile(p.pid, started_at, self._run_dir)
  703. if intext is not None and False:
  704. p.communicate(input=intext.encode(), timeout=1)
  705. ptimeout = 0.0
  706. while True:
  707. try:
  708. p.wait(timeout=ptimeout)
  709. break
  710. except subprocess.TimeoutExpired:
  711. if end_at and datetime.now() >= end_at:
  712. p.kill()
  713. raise subprocess.TimeoutExpired(cmd=args, timeout=self._timeout)
  714. profile.sample()
  715. ptimeout = 0.01
  716. exitcode = p.returncode
  717. profile.finish()
  718. log.info(f'done: exit={exitcode}, profile={profile}')
  719. else:
  720. p = subprocess.run(args, stderr=cerr, stdout=cout,
  721. cwd=self._run_dir, shell=False,
  722. input=intext.encode() if intext else None,
  723. timeout=self._timeout,
  724. env=self._run_env)
  725. exitcode = p.returncode
  726. except subprocess.TimeoutExpired:
  727. now = datetime.now()
  728. duration = now - started_at
  729. log.warning(f'Timeout at {now} after {duration.total_seconds()}s '
  730. f'(configured {self._timeout}s): {args}')
  731. exitcode = -1
  732. exception = 'TimeoutExpired'
  733. if tcpdump:
  734. tcpdump.finish()
  735. coutput = open(self._stdoutfile).readlines()
  736. cerrput = open(self._stderrfile).readlines()
  737. return ExecResult(args=args, exit_code=exitcode, exception=exception,
  738. stdout=coutput, stderr=cerrput,
  739. duration=datetime.now() - started_at,
  740. with_stats=with_stats,
  741. profile=profile, tcpdump=tcpdump)
  742. def _raw(self, urls, intext='', timeout=None, options=None, insecure=False,
  743. alpn_proto: Optional[str] = None,
  744. force_resolve=True,
  745. with_stats=False,
  746. with_headers=True,
  747. def_tracing=True,
  748. with_profile=False,
  749. with_tcpdump=False):
  750. args = self._complete_args(
  751. urls=urls, timeout=timeout, options=options, insecure=insecure,
  752. alpn_proto=alpn_proto, force_resolve=force_resolve,
  753. with_headers=with_headers, def_tracing=def_tracing)
  754. r = self._run(args, intext=intext, with_stats=with_stats,
  755. with_profile=with_profile, with_tcpdump=with_tcpdump)
  756. if r.exit_code == 0 and with_headers:
  757. self._parse_headerfile(self._headerfile, r=r)
  758. return r
  759. def _complete_args(self, urls, timeout=None, options=None,
  760. insecure=False, force_resolve=True,
  761. alpn_proto: Optional[str] = None,
  762. with_headers: bool = True,
  763. def_tracing: bool = True):
  764. if not isinstance(urls, list):
  765. urls = [urls]
  766. args = [self._curl, "-s", "--path-as-is"]
  767. if 'CURL_TEST_EVENT' in os.environ:
  768. args.append('--test-event')
  769. if with_headers:
  770. args.extend(["-D", self._headerfile])
  771. if def_tracing is not False and not self._silent:
  772. args.extend(['-v', '--trace-ids', '--trace-time'])
  773. if self.env.verbose > 1:
  774. args.extend(['--trace-config', 'http/2,http/3,h2-proxy,h1-proxy'])
  775. active_options = options
  776. if options is not None and '--next' in options:
  777. active_options = options[options.index('--next') + 1:]
  778. for url in urls:
  779. u = urlparse(urls[0])
  780. if options:
  781. args.extend(options)
  782. if alpn_proto is not None:
  783. if alpn_proto not in self.ALPN_ARG:
  784. raise Exception(f'unknown ALPN protocol: "{alpn_proto}"')
  785. args.append(self.ALPN_ARG[alpn_proto])
  786. if u.scheme == 'http':
  787. pass
  788. elif insecure:
  789. args.append('--insecure')
  790. elif active_options and "--cacert" in active_options:
  791. pass
  792. elif u.hostname:
  793. args.extend(["--cacert", self.env.ca.cert_file])
  794. if force_resolve and u.hostname and u.hostname != 'localhost' \
  795. and not re.match(r'^(\d+|\[|:).*', u.hostname):
  796. port = u.port if u.port else 443
  797. args.extend([
  798. '--resolve', f'{u.hostname}:{port}:{self._server_addr}',
  799. ])
  800. if timeout is not None and int(timeout) > 0:
  801. args.extend(["--connect-timeout", str(int(timeout))])
  802. args.append(url)
  803. return args
  804. def _parse_headerfile(self, headerfile: str, r: Optional[ExecResult] = None) -> ExecResult:
  805. lines = open(headerfile).readlines()
  806. if r is None:
  807. r = ExecResult(args=[], exit_code=0, stdout=[], stderr=[])
  808. response = None
  809. def fin_response(resp):
  810. if resp:
  811. r.add_response(resp)
  812. expected = ['status']
  813. for line in lines:
  814. line = line.strip()
  815. if re.match(r'^$', line):
  816. if 'trailer' in expected:
  817. # end of trailers
  818. fin_response(response)
  819. response = None
  820. expected = ['status']
  821. elif 'header' in expected:
  822. # end of header, another status or trailers might follow
  823. expected = ['status', 'trailer']
  824. else:
  825. assert False, f"unexpected line: '{line}'"
  826. continue
  827. if 'status' in expected:
  828. # log.debug("reading 1st response line: %s", line)
  829. m = re.match(r'^(\S+) (\d+)( .*)?$', line)
  830. if m:
  831. fin_response(response)
  832. response = {
  833. "protocol": m.group(1),
  834. "status": int(m.group(2)),
  835. "description": m.group(3),
  836. "header": {},
  837. "trailer": {},
  838. "body": r.outraw
  839. }
  840. expected = ['header']
  841. continue
  842. if 'trailer' in expected:
  843. m = re.match(r'^([^:]+):\s*(.*)$', line)
  844. if m:
  845. response['trailer'][m.group(1).lower()] = m.group(2)
  846. continue
  847. if 'header' in expected:
  848. m = re.match(r'^([^:]+):\s*(.*)$', line)
  849. if m:
  850. response['header'][m.group(1).lower()] = m.group(2)
  851. continue
  852. assert False, f"unexpected line: '{line}, expected: {expected}'"
  853. fin_response(response)
  854. return r