050-libitm-Don-t-redefine-__always_inline-in-local_atomi.patch 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092
  1. From 55f12fce4ccf77513644a247f9c401a5b1fa2402 Mon Sep 17 00:00:00 2001
  2. From: torvald <torvald@138bc75d-0d04-0410-961f-82ee72b054a4>
  3. Date: Thu, 20 Aug 2015 17:55:24 +0000
  4. Subject: [PATCH] libitm: Don't redefine __always_inline in local_atomic.
  5. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@227040 138bc75d-0d04-0410-961f-82ee72b054a4
  6. ---
  7. libitm/ChangeLog | 6 +
  8. libitm/local_atomic | 299 ++++++++++++++++++++++----------------------
  9. 2 files changed, 155 insertions(+), 150 deletions(-)
  10. diff --git a/libitm/ChangeLog b/libitm/ChangeLog
  11. index 569d5bbbf14..6285c85fd44 100644
  12. --- a/libitm/ChangeLog
  13. +++ b/libitm/ChangeLog
  14. @@ -1,3 +1,9 @@
  15. +2015-08-20 Gleb Fotengauer-Malinovskiy <glebfm@altlinux.org> (tiny change)
  16. +
  17. + PR libitm/61164
  18. + * local_atomic (__always_inline): Rename to...
  19. + (__libitm_always_inline): ... this.
  20. +
  21. 2017-10-10 Release Manager
  22. PR target/52482
  23. diff --git a/libitm/local_atomic b/libitm/local_atomic
  24. index 3119be40d09..e536275dc9f 100644
  25. --- a/libitm/local_atomic
  26. +++ b/libitm/local_atomic
  27. @@ -41,8 +41,7 @@
  28. #ifndef _GLIBCXX_ATOMIC
  29. #define _GLIBCXX_ATOMIC 1
  30. -#undef __always_inline
  31. -#define __always_inline __attribute__((always_inline))
  32. +#define __libitm_always_inline __attribute__((always_inline))
  33. // #pragma GCC system_header
  34. @@ -74,7 +73,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  35. memory_order_seq_cst
  36. } memory_order;
  37. - inline __always_inline memory_order
  38. + inline __libitm_always_inline memory_order
  39. __calculate_memory_order(memory_order __m) noexcept
  40. {
  41. const bool __cond1 = __m == memory_order_release;
  42. @@ -84,13 +83,13 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  43. return __mo2;
  44. }
  45. - inline __always_inline void
  46. + inline __libitm_always_inline void
  47. atomic_thread_fence(memory_order __m) noexcept
  48. {
  49. __atomic_thread_fence (__m);
  50. }
  51. - inline __always_inline void
  52. + inline __libitm_always_inline void
  53. atomic_signal_fence(memory_order __m) noexcept
  54. {
  55. __atomic_thread_fence (__m);
  56. @@ -280,19 +279,19 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  57. // Conversion to ATOMIC_FLAG_INIT.
  58. atomic_flag(bool __i) noexcept : __atomic_flag_base({ __i }) { }
  59. - __always_inline bool
  60. + __libitm_always_inline bool
  61. test_and_set(memory_order __m = memory_order_seq_cst) noexcept
  62. {
  63. return __atomic_test_and_set (&_M_i, __m);
  64. }
  65. - __always_inline bool
  66. + __libitm_always_inline bool
  67. test_and_set(memory_order __m = memory_order_seq_cst) volatile noexcept
  68. {
  69. return __atomic_test_and_set (&_M_i, __m);
  70. }
  71. - __always_inline void
  72. + __libitm_always_inline void
  73. clear(memory_order __m = memory_order_seq_cst) noexcept
  74. {
  75. // __glibcxx_assert(__m != memory_order_consume);
  76. @@ -302,7 +301,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  77. __atomic_clear (&_M_i, __m);
  78. }
  79. - __always_inline void
  80. + __libitm_always_inline void
  81. clear(memory_order __m = memory_order_seq_cst) volatile noexcept
  82. {
  83. // __glibcxx_assert(__m != memory_order_consume);
  84. @@ -455,7 +454,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  85. is_lock_free() const volatile noexcept
  86. { return __atomic_is_lock_free (sizeof (_M_i), &_M_i); }
  87. - __always_inline void
  88. + __libitm_always_inline void
  89. store(__int_type __i, memory_order __m = memory_order_seq_cst) noexcept
  90. {
  91. // __glibcxx_assert(__m != memory_order_acquire);
  92. @@ -465,7 +464,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  93. __atomic_store_n(&_M_i, __i, __m);
  94. }
  95. - __always_inline void
  96. + __libitm_always_inline void
  97. store(__int_type __i,
  98. memory_order __m = memory_order_seq_cst) volatile noexcept
  99. {
  100. @@ -476,7 +475,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  101. __atomic_store_n(&_M_i, __i, __m);
  102. }
  103. - __always_inline __int_type
  104. + __libitm_always_inline __int_type
  105. load(memory_order __m = memory_order_seq_cst) const noexcept
  106. {
  107. // __glibcxx_assert(__m != memory_order_release);
  108. @@ -485,7 +484,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  109. return __atomic_load_n(&_M_i, __m);
  110. }
  111. - __always_inline __int_type
  112. + __libitm_always_inline __int_type
  113. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  114. {
  115. // __glibcxx_assert(__m != memory_order_release);
  116. @@ -494,21 +493,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  117. return __atomic_load_n(&_M_i, __m);
  118. }
  119. - __always_inline __int_type
  120. + __libitm_always_inline __int_type
  121. exchange(__int_type __i,
  122. memory_order __m = memory_order_seq_cst) noexcept
  123. {
  124. return __atomic_exchange_n(&_M_i, __i, __m);
  125. }
  126. - __always_inline __int_type
  127. + __libitm_always_inline __int_type
  128. exchange(__int_type __i,
  129. memory_order __m = memory_order_seq_cst) volatile noexcept
  130. {
  131. return __atomic_exchange_n(&_M_i, __i, __m);
  132. }
  133. - __always_inline bool
  134. + __libitm_always_inline bool
  135. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  136. memory_order __m1, memory_order __m2) noexcept
  137. {
  138. @@ -519,7 +518,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  139. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
  140. }
  141. - __always_inline bool
  142. + __libitm_always_inline bool
  143. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  144. memory_order __m1,
  145. memory_order __m2) volatile noexcept
  146. @@ -531,7 +530,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  147. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 1, __m1, __m2);
  148. }
  149. - __always_inline bool
  150. + __libitm_always_inline bool
  151. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  152. memory_order __m = memory_order_seq_cst) noexcept
  153. {
  154. @@ -539,7 +538,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  155. __calculate_memory_order(__m));
  156. }
  157. - __always_inline bool
  158. + __libitm_always_inline bool
  159. compare_exchange_weak(__int_type& __i1, __int_type __i2,
  160. memory_order __m = memory_order_seq_cst) volatile noexcept
  161. {
  162. @@ -547,7 +546,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  163. __calculate_memory_order(__m));
  164. }
  165. - __always_inline bool
  166. + __libitm_always_inline bool
  167. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  168. memory_order __m1, memory_order __m2) noexcept
  169. {
  170. @@ -558,7 +557,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  171. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
  172. }
  173. - __always_inline bool
  174. + __libitm_always_inline bool
  175. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  176. memory_order __m1,
  177. memory_order __m2) volatile noexcept
  178. @@ -570,7 +569,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  179. return __atomic_compare_exchange_n(&_M_i, &__i1, __i2, 0, __m1, __m2);
  180. }
  181. - __always_inline bool
  182. + __libitm_always_inline bool
  183. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  184. memory_order __m = memory_order_seq_cst) noexcept
  185. {
  186. @@ -578,7 +577,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  187. __calculate_memory_order(__m));
  188. }
  189. - __always_inline bool
  190. + __libitm_always_inline bool
  191. compare_exchange_strong(__int_type& __i1, __int_type __i2,
  192. memory_order __m = memory_order_seq_cst) volatile noexcept
  193. {
  194. @@ -586,52 +585,52 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  195. __calculate_memory_order(__m));
  196. }
  197. - __always_inline __int_type
  198. + __libitm_always_inline __int_type
  199. fetch_add(__int_type __i,
  200. memory_order __m = memory_order_seq_cst) noexcept
  201. { return __atomic_fetch_add(&_M_i, __i, __m); }
  202. - __always_inline __int_type
  203. + __libitm_always_inline __int_type
  204. fetch_add(__int_type __i,
  205. memory_order __m = memory_order_seq_cst) volatile noexcept
  206. { return __atomic_fetch_add(&_M_i, __i, __m); }
  207. - __always_inline __int_type
  208. + __libitm_always_inline __int_type
  209. fetch_sub(__int_type __i,
  210. memory_order __m = memory_order_seq_cst) noexcept
  211. { return __atomic_fetch_sub(&_M_i, __i, __m); }
  212. - __always_inline __int_type
  213. + __libitm_always_inline __int_type
  214. fetch_sub(__int_type __i,
  215. memory_order __m = memory_order_seq_cst) volatile noexcept
  216. { return __atomic_fetch_sub(&_M_i, __i, __m); }
  217. - __always_inline __int_type
  218. + __libitm_always_inline __int_type
  219. fetch_and(__int_type __i,
  220. memory_order __m = memory_order_seq_cst) noexcept
  221. { return __atomic_fetch_and(&_M_i, __i, __m); }
  222. - __always_inline __int_type
  223. + __libitm_always_inline __int_type
  224. fetch_and(__int_type __i,
  225. memory_order __m = memory_order_seq_cst) volatile noexcept
  226. { return __atomic_fetch_and(&_M_i, __i, __m); }
  227. - __always_inline __int_type
  228. + __libitm_always_inline __int_type
  229. fetch_or(__int_type __i,
  230. memory_order __m = memory_order_seq_cst) noexcept
  231. { return __atomic_fetch_or(&_M_i, __i, __m); }
  232. - __always_inline __int_type
  233. + __libitm_always_inline __int_type
  234. fetch_or(__int_type __i,
  235. memory_order __m = memory_order_seq_cst) volatile noexcept
  236. { return __atomic_fetch_or(&_M_i, __i, __m); }
  237. - __always_inline __int_type
  238. + __libitm_always_inline __int_type
  239. fetch_xor(__int_type __i,
  240. memory_order __m = memory_order_seq_cst) noexcept
  241. { return __atomic_fetch_xor(&_M_i, __i, __m); }
  242. - __always_inline __int_type
  243. + __libitm_always_inline __int_type
  244. fetch_xor(__int_type __i,
  245. memory_order __m = memory_order_seq_cst) volatile noexcept
  246. { return __atomic_fetch_xor(&_M_i, __i, __m); }
  247. @@ -733,7 +732,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  248. is_lock_free() const volatile noexcept
  249. { return __atomic_is_lock_free (sizeof (_M_p), &_M_p); }
  250. - __always_inline void
  251. + __libitm_always_inline void
  252. store(__pointer_type __p,
  253. memory_order __m = memory_order_seq_cst) noexcept
  254. {
  255. @@ -744,7 +743,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  256. __atomic_store_n(&_M_p, __p, __m);
  257. }
  258. - __always_inline void
  259. + __libitm_always_inline void
  260. store(__pointer_type __p,
  261. memory_order __m = memory_order_seq_cst) volatile noexcept
  262. {
  263. @@ -755,7 +754,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  264. __atomic_store_n(&_M_p, __p, __m);
  265. }
  266. - __always_inline __pointer_type
  267. + __libitm_always_inline __pointer_type
  268. load(memory_order __m = memory_order_seq_cst) const noexcept
  269. {
  270. // __glibcxx_assert(__m != memory_order_release);
  271. @@ -764,7 +763,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  272. return __atomic_load_n(&_M_p, __m);
  273. }
  274. - __always_inline __pointer_type
  275. + __libitm_always_inline __pointer_type
  276. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  277. {
  278. // __glibcxx_assert(__m != memory_order_release);
  279. @@ -773,21 +772,21 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  280. return __atomic_load_n(&_M_p, __m);
  281. }
  282. - __always_inline __pointer_type
  283. + __libitm_always_inline __pointer_type
  284. exchange(__pointer_type __p,
  285. memory_order __m = memory_order_seq_cst) noexcept
  286. {
  287. return __atomic_exchange_n(&_M_p, __p, __m);
  288. }
  289. - __always_inline __pointer_type
  290. + __libitm_always_inline __pointer_type
  291. exchange(__pointer_type __p,
  292. memory_order __m = memory_order_seq_cst) volatile noexcept
  293. {
  294. return __atomic_exchange_n(&_M_p, __p, __m);
  295. }
  296. - __always_inline bool
  297. + __libitm_always_inline bool
  298. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  299. memory_order __m1,
  300. memory_order __m2) noexcept
  301. @@ -799,7 +798,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  302. return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
  303. }
  304. - __always_inline bool
  305. + __libitm_always_inline bool
  306. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  307. memory_order __m1,
  308. memory_order __m2) volatile noexcept
  309. @@ -811,22 +810,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  310. return __atomic_compare_exchange_n(&_M_p, &__p1, __p2, 0, __m1, __m2);
  311. }
  312. - __always_inline __pointer_type
  313. + __libitm_always_inline __pointer_type
  314. fetch_add(ptrdiff_t __d,
  315. memory_order __m = memory_order_seq_cst) noexcept
  316. { return __atomic_fetch_add(&_M_p, __d, __m); }
  317. - __always_inline __pointer_type
  318. + __libitm_always_inline __pointer_type
  319. fetch_add(ptrdiff_t __d,
  320. memory_order __m = memory_order_seq_cst) volatile noexcept
  321. { return __atomic_fetch_add(&_M_p, __d, __m); }
  322. - __always_inline __pointer_type
  323. + __libitm_always_inline __pointer_type
  324. fetch_sub(ptrdiff_t __d,
  325. memory_order __m = memory_order_seq_cst) noexcept
  326. { return __atomic_fetch_sub(&_M_p, __d, __m); }
  327. - __always_inline __pointer_type
  328. + __libitm_always_inline __pointer_type
  329. fetch_sub(ptrdiff_t __d,
  330. memory_order __m = memory_order_seq_cst) volatile noexcept
  331. { return __atomic_fetch_sub(&_M_p, __d, __m); }
  332. @@ -870,67 +869,67 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  333. bool
  334. is_lock_free() const volatile noexcept { return _M_base.is_lock_free(); }
  335. - __always_inline void
  336. + __libitm_always_inline void
  337. store(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  338. { _M_base.store(__i, __m); }
  339. - __always_inline void
  340. + __libitm_always_inline void
  341. store(bool __i, memory_order __m = memory_order_seq_cst) volatile noexcept
  342. { _M_base.store(__i, __m); }
  343. - __always_inline bool
  344. + __libitm_always_inline bool
  345. load(memory_order __m = memory_order_seq_cst) const noexcept
  346. { return _M_base.load(__m); }
  347. - __always_inline bool
  348. + __libitm_always_inline bool
  349. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  350. { return _M_base.load(__m); }
  351. - __always_inline bool
  352. + __libitm_always_inline bool
  353. exchange(bool __i, memory_order __m = memory_order_seq_cst) noexcept
  354. { return _M_base.exchange(__i, __m); }
  355. - __always_inline bool
  356. + __libitm_always_inline bool
  357. exchange(bool __i,
  358. memory_order __m = memory_order_seq_cst) volatile noexcept
  359. { return _M_base.exchange(__i, __m); }
  360. - __always_inline bool
  361. + __libitm_always_inline bool
  362. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  363. memory_order __m2) noexcept
  364. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  365. - __always_inline bool
  366. + __libitm_always_inline bool
  367. compare_exchange_weak(bool& __i1, bool __i2, memory_order __m1,
  368. memory_order __m2) volatile noexcept
  369. { return _M_base.compare_exchange_weak(__i1, __i2, __m1, __m2); }
  370. - __always_inline bool
  371. + __libitm_always_inline bool
  372. compare_exchange_weak(bool& __i1, bool __i2,
  373. memory_order __m = memory_order_seq_cst) noexcept
  374. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  375. - __always_inline bool
  376. + __libitm_always_inline bool
  377. compare_exchange_weak(bool& __i1, bool __i2,
  378. memory_order __m = memory_order_seq_cst) volatile noexcept
  379. { return _M_base.compare_exchange_weak(__i1, __i2, __m); }
  380. - __always_inline bool
  381. + __libitm_always_inline bool
  382. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  383. memory_order __m2) noexcept
  384. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  385. - __always_inline bool
  386. + __libitm_always_inline bool
  387. compare_exchange_strong(bool& __i1, bool __i2, memory_order __m1,
  388. memory_order __m2) volatile noexcept
  389. { return _M_base.compare_exchange_strong(__i1, __i2, __m1, __m2); }
  390. - __always_inline bool
  391. + __libitm_always_inline bool
  392. compare_exchange_strong(bool& __i1, bool __i2,
  393. memory_order __m = memory_order_seq_cst) noexcept
  394. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  395. - __always_inline bool
  396. + __libitm_always_inline bool
  397. compare_exchange_strong(bool& __i1, bool __i2,
  398. memory_order __m = memory_order_seq_cst) volatile noexcept
  399. { return _M_base.compare_exchange_strong(__i1, __i2, __m); }
  400. @@ -980,11 +979,11 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  401. store(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
  402. { __atomic_store(&_M_i, &__i, _m); }
  403. - __always_inline void
  404. + __libitm_always_inline void
  405. store(_Tp __i, memory_order _m = memory_order_seq_cst) volatile noexcept
  406. { __atomic_store(&_M_i, &__i, _m); }
  407. - __always_inline _Tp
  408. + __libitm_always_inline _Tp
  409. load(memory_order _m = memory_order_seq_cst) const noexcept
  410. {
  411. _Tp tmp;
  412. @@ -992,7 +991,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  413. return tmp;
  414. }
  415. - __always_inline _Tp
  416. + __libitm_always_inline _Tp
  417. load(memory_order _m = memory_order_seq_cst) const volatile noexcept
  418. {
  419. _Tp tmp;
  420. @@ -1000,7 +999,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  421. return tmp;
  422. }
  423. - __always_inline _Tp
  424. + __libitm_always_inline _Tp
  425. exchange(_Tp __i, memory_order _m = memory_order_seq_cst) noexcept
  426. {
  427. _Tp tmp;
  428. @@ -1008,7 +1007,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  429. return tmp;
  430. }
  431. - __always_inline _Tp
  432. + __libitm_always_inline _Tp
  433. exchange(_Tp __i,
  434. memory_order _m = memory_order_seq_cst) volatile noexcept
  435. {
  436. @@ -1017,50 +1016,50 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  437. return tmp;
  438. }
  439. - __always_inline bool
  440. + __libitm_always_inline bool
  441. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  442. memory_order __f) noexcept
  443. {
  444. return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
  445. }
  446. - __always_inline bool
  447. + __libitm_always_inline bool
  448. compare_exchange_weak(_Tp& __e, _Tp __i, memory_order __s,
  449. memory_order __f) volatile noexcept
  450. {
  451. return __atomic_compare_exchange(&_M_i, &__e, &__i, true, __s, __f);
  452. }
  453. - __always_inline bool
  454. + __libitm_always_inline bool
  455. compare_exchange_weak(_Tp& __e, _Tp __i,
  456. memory_order __m = memory_order_seq_cst) noexcept
  457. { return compare_exchange_weak(__e, __i, __m, __m); }
  458. - __always_inline bool
  459. + __libitm_always_inline bool
  460. compare_exchange_weak(_Tp& __e, _Tp __i,
  461. memory_order __m = memory_order_seq_cst) volatile noexcept
  462. { return compare_exchange_weak(__e, __i, __m, __m); }
  463. - __always_inline bool
  464. + __libitm_always_inline bool
  465. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  466. memory_order __f) noexcept
  467. {
  468. return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
  469. }
  470. - __always_inline bool
  471. + __libitm_always_inline bool
  472. compare_exchange_strong(_Tp& __e, _Tp __i, memory_order __s,
  473. memory_order __f) volatile noexcept
  474. {
  475. return __atomic_compare_exchange(&_M_i, &__e, &__i, false, __s, __f);
  476. }
  477. - __always_inline bool
  478. + __libitm_always_inline bool
  479. compare_exchange_strong(_Tp& __e, _Tp __i,
  480. memory_order __m = memory_order_seq_cst) noexcept
  481. { return compare_exchange_strong(__e, __i, __m, __m); }
  482. - __always_inline bool
  483. + __libitm_always_inline bool
  484. compare_exchange_strong(_Tp& __e, _Tp __i,
  485. memory_order __m = memory_order_seq_cst) volatile noexcept
  486. { return compare_exchange_strong(__e, __i, __m, __m); }
  487. @@ -1153,46 +1152,46 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  488. is_lock_free() const volatile noexcept
  489. { return _M_b.is_lock_free(); }
  490. - __always_inline void
  491. + __libitm_always_inline void
  492. store(__pointer_type __p,
  493. memory_order __m = memory_order_seq_cst) noexcept
  494. { return _M_b.store(__p, __m); }
  495. - __always_inline void
  496. + __libitm_always_inline void
  497. store(__pointer_type __p,
  498. memory_order __m = memory_order_seq_cst) volatile noexcept
  499. { return _M_b.store(__p, __m); }
  500. - __always_inline __pointer_type
  501. + __libitm_always_inline __pointer_type
  502. load(memory_order __m = memory_order_seq_cst) const noexcept
  503. { return _M_b.load(__m); }
  504. - __always_inline __pointer_type
  505. + __libitm_always_inline __pointer_type
  506. load(memory_order __m = memory_order_seq_cst) const volatile noexcept
  507. { return _M_b.load(__m); }
  508. - __always_inline __pointer_type
  509. + __libitm_always_inline __pointer_type
  510. exchange(__pointer_type __p,
  511. memory_order __m = memory_order_seq_cst) noexcept
  512. { return _M_b.exchange(__p, __m); }
  513. - __always_inline __pointer_type
  514. + __libitm_always_inline __pointer_type
  515. exchange(__pointer_type __p,
  516. memory_order __m = memory_order_seq_cst) volatile noexcept
  517. { return _M_b.exchange(__p, __m); }
  518. - __always_inline bool
  519. + __libitm_always_inline bool
  520. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  521. memory_order __m1, memory_order __m2) noexcept
  522. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  523. - __always_inline bool
  524. + __libitm_always_inline bool
  525. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  526. memory_order __m1,
  527. memory_order __m2) volatile noexcept
  528. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  529. - __always_inline bool
  530. + __libitm_always_inline bool
  531. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  532. memory_order __m = memory_order_seq_cst) noexcept
  533. {
  534. @@ -1200,7 +1199,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  535. __calculate_memory_order(__m));
  536. }
  537. - __always_inline bool
  538. + __libitm_always_inline bool
  539. compare_exchange_weak(__pointer_type& __p1, __pointer_type __p2,
  540. memory_order __m = memory_order_seq_cst) volatile noexcept
  541. {
  542. @@ -1208,18 +1207,18 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  543. __calculate_memory_order(__m));
  544. }
  545. - __always_inline bool
  546. + __libitm_always_inline bool
  547. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  548. memory_order __m1, memory_order __m2) noexcept
  549. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  550. - __always_inline bool
  551. + __libitm_always_inline bool
  552. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  553. memory_order __m1,
  554. memory_order __m2) volatile noexcept
  555. { return _M_b.compare_exchange_strong(__p1, __p2, __m1, __m2); }
  556. - __always_inline bool
  557. + __libitm_always_inline bool
  558. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  559. memory_order __m = memory_order_seq_cst) noexcept
  560. {
  561. @@ -1227,7 +1226,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  562. __calculate_memory_order(__m));
  563. }
  564. - __always_inline bool
  565. + __libitm_always_inline bool
  566. compare_exchange_strong(__pointer_type& __p1, __pointer_type __p2,
  567. memory_order __m = memory_order_seq_cst) volatile noexcept
  568. {
  569. @@ -1235,22 +1234,22 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  570. __calculate_memory_order(__m));
  571. }
  572. - __always_inline __pointer_type
  573. + __libitm_always_inline __pointer_type
  574. fetch_add(ptrdiff_t __d,
  575. memory_order __m = memory_order_seq_cst) noexcept
  576. { return _M_b.fetch_add(__d, __m); }
  577. - __always_inline __pointer_type
  578. + __libitm_always_inline __pointer_type
  579. fetch_add(ptrdiff_t __d,
  580. memory_order __m = memory_order_seq_cst) volatile noexcept
  581. { return _M_b.fetch_add(__d, __m); }
  582. - __always_inline __pointer_type
  583. + __libitm_always_inline __pointer_type
  584. fetch_sub(ptrdiff_t __d,
  585. memory_order __m = memory_order_seq_cst) noexcept
  586. { return _M_b.fetch_sub(__d, __m); }
  587. - __always_inline __pointer_type
  588. + __libitm_always_inline __pointer_type
  589. fetch_sub(ptrdiff_t __d,
  590. memory_order __m = memory_order_seq_cst) volatile noexcept
  591. { return _M_b.fetch_sub(__d, __m); }
  592. @@ -1544,98 +1543,98 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  593. // Function definitions, atomic_flag operations.
  594. - inline __always_inline bool
  595. + inline __libitm_always_inline bool
  596. atomic_flag_test_and_set_explicit(atomic_flag* __a,
  597. memory_order __m) noexcept
  598. { return __a->test_and_set(__m); }
  599. - inline __always_inline bool
  600. + inline __libitm_always_inline bool
  601. atomic_flag_test_and_set_explicit(volatile atomic_flag* __a,
  602. memory_order __m) noexcept
  603. { return __a->test_and_set(__m); }
  604. - inline __always_inline void
  605. + inline __libitm_always_inline void
  606. atomic_flag_clear_explicit(atomic_flag* __a, memory_order __m) noexcept
  607. { __a->clear(__m); }
  608. - inline __always_inline void
  609. + inline __libitm_always_inline void
  610. atomic_flag_clear_explicit(volatile atomic_flag* __a,
  611. memory_order __m) noexcept
  612. { __a->clear(__m); }
  613. - inline __always_inline bool
  614. + inline __libitm_always_inline bool
  615. atomic_flag_test_and_set(atomic_flag* __a) noexcept
  616. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  617. - inline __always_inline bool
  618. + inline __libitm_always_inline bool
  619. atomic_flag_test_and_set(volatile atomic_flag* __a) noexcept
  620. { return atomic_flag_test_and_set_explicit(__a, memory_order_seq_cst); }
  621. - inline __always_inline void
  622. + inline __libitm_always_inline void
  623. atomic_flag_clear(atomic_flag* __a) noexcept
  624. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  625. - inline __always_inline void
  626. + inline __libitm_always_inline void
  627. atomic_flag_clear(volatile atomic_flag* __a) noexcept
  628. { atomic_flag_clear_explicit(__a, memory_order_seq_cst); }
  629. // Function templates generally applicable to atomic types.
  630. template<typename _ITp>
  631. - __always_inline bool
  632. + __libitm_always_inline bool
  633. atomic_is_lock_free(const atomic<_ITp>* __a) noexcept
  634. { return __a->is_lock_free(); }
  635. template<typename _ITp>
  636. - __always_inline bool
  637. + __libitm_always_inline bool
  638. atomic_is_lock_free(const volatile atomic<_ITp>* __a) noexcept
  639. { return __a->is_lock_free(); }
  640. template<typename _ITp>
  641. - __always_inline void
  642. + __libitm_always_inline void
  643. atomic_init(atomic<_ITp>* __a, _ITp __i) noexcept;
  644. template<typename _ITp>
  645. - __always_inline void
  646. + __libitm_always_inline void
  647. atomic_init(volatile atomic<_ITp>* __a, _ITp __i) noexcept;
  648. template<typename _ITp>
  649. - __always_inline void
  650. + __libitm_always_inline void
  651. atomic_store_explicit(atomic<_ITp>* __a, _ITp __i,
  652. memory_order __m) noexcept
  653. { __a->store(__i, __m); }
  654. template<typename _ITp>
  655. - __always_inline void
  656. + __libitm_always_inline void
  657. atomic_store_explicit(volatile atomic<_ITp>* __a, _ITp __i,
  658. memory_order __m) noexcept
  659. { __a->store(__i, __m); }
  660. template<typename _ITp>
  661. - __always_inline _ITp
  662. + __libitm_always_inline _ITp
  663. atomic_load_explicit(const atomic<_ITp>* __a, memory_order __m) noexcept
  664. { return __a->load(__m); }
  665. template<typename _ITp>
  666. - __always_inline _ITp
  667. + __libitm_always_inline _ITp
  668. atomic_load_explicit(const volatile atomic<_ITp>* __a,
  669. memory_order __m) noexcept
  670. { return __a->load(__m); }
  671. template<typename _ITp>
  672. - __always_inline _ITp
  673. + __libitm_always_inline _ITp
  674. atomic_exchange_explicit(atomic<_ITp>* __a, _ITp __i,
  675. memory_order __m) noexcept
  676. { return __a->exchange(__i, __m); }
  677. template<typename _ITp>
  678. - __always_inline _ITp
  679. + __libitm_always_inline _ITp
  680. atomic_exchange_explicit(volatile atomic<_ITp>* __a, _ITp __i,
  681. memory_order __m) noexcept
  682. { return __a->exchange(__i, __m); }
  683. template<typename _ITp>
  684. - __always_inline bool
  685. + __libitm_always_inline bool
  686. atomic_compare_exchange_weak_explicit(atomic<_ITp>* __a,
  687. _ITp* __i1, _ITp __i2,
  688. memory_order __m1,
  689. @@ -1643,7 +1642,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  690. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  691. template<typename _ITp>
  692. - __always_inline bool
  693. + __libitm_always_inline bool
  694. atomic_compare_exchange_weak_explicit(volatile atomic<_ITp>* __a,
  695. _ITp* __i1, _ITp __i2,
  696. memory_order __m1,
  697. @@ -1651,7 +1650,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  698. { return __a->compare_exchange_weak(*__i1, __i2, __m1, __m2); }
  699. template<typename _ITp>
  700. - __always_inline bool
  701. + __libitm_always_inline bool
  702. atomic_compare_exchange_strong_explicit(atomic<_ITp>* __a,
  703. _ITp* __i1, _ITp __i2,
  704. memory_order __m1,
  705. @@ -1659,7 +1658,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  706. { return __a->compare_exchange_strong(*__i1, __i2, __m1, __m2); }
  707. template<typename _ITp>
  708. - __always_inline bool
  709. + __libitm_always_inline bool
  710. atomic_compare_exchange_strong_explicit(volatile atomic<_ITp>* __a,
  711. _ITp* __i1, _ITp __i2,
  712. memory_order __m1,
  713. @@ -1668,37 +1667,37 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  714. template<typename _ITp>
  715. - __always_inline void
  716. + __libitm_always_inline void
  717. atomic_store(atomic<_ITp>* __a, _ITp __i) noexcept
  718. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  719. template<typename _ITp>
  720. - __always_inline void
  721. + __libitm_always_inline void
  722. atomic_store(volatile atomic<_ITp>* __a, _ITp __i) noexcept
  723. { atomic_store_explicit(__a, __i, memory_order_seq_cst); }
  724. template<typename _ITp>
  725. - __always_inline _ITp
  726. + __libitm_always_inline _ITp
  727. atomic_load(const atomic<_ITp>* __a) noexcept
  728. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  729. template<typename _ITp>
  730. - __always_inline _ITp
  731. + __libitm_always_inline _ITp
  732. atomic_load(const volatile atomic<_ITp>* __a) noexcept
  733. { return atomic_load_explicit(__a, memory_order_seq_cst); }
  734. template<typename _ITp>
  735. - __always_inline _ITp
  736. + __libitm_always_inline _ITp
  737. atomic_exchange(atomic<_ITp>* __a, _ITp __i) noexcept
  738. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  739. template<typename _ITp>
  740. - __always_inline _ITp
  741. + __libitm_always_inline _ITp
  742. atomic_exchange(volatile atomic<_ITp>* __a, _ITp __i) noexcept
  743. { return atomic_exchange_explicit(__a, __i, memory_order_seq_cst); }
  744. template<typename _ITp>
  745. - __always_inline bool
  746. + __libitm_always_inline bool
  747. atomic_compare_exchange_weak(atomic<_ITp>* __a,
  748. _ITp* __i1, _ITp __i2) noexcept
  749. {
  750. @@ -1708,7 +1707,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  751. }
  752. template<typename _ITp>
  753. - __always_inline bool
  754. + __libitm_always_inline bool
  755. atomic_compare_exchange_weak(volatile atomic<_ITp>* __a,
  756. _ITp* __i1, _ITp __i2) noexcept
  757. {
  758. @@ -1718,7 +1717,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  759. }
  760. template<typename _ITp>
  761. - __always_inline bool
  762. + __libitm_always_inline bool
  763. atomic_compare_exchange_strong(atomic<_ITp>* __a,
  764. _ITp* __i1, _ITp __i2) noexcept
  765. {
  766. @@ -1728,7 +1727,7 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  767. }
  768. template<typename _ITp>
  769. - __always_inline bool
  770. + __libitm_always_inline bool
  771. atomic_compare_exchange_strong(volatile atomic<_ITp>* __a,
  772. _ITp* __i1, _ITp __i2) noexcept
  773. {
  774. @@ -1742,158 +1741,158 @@ namespace std // _GLIBCXX_VISIBILITY(default)
  775. // intergral types as specified in the standard, excluding address
  776. // types.
  777. template<typename _ITp>
  778. - __always_inline _ITp
  779. + __libitm_always_inline _ITp
  780. atomic_fetch_add_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  781. memory_order __m) noexcept
  782. { return __a->fetch_add(__i, __m); }
  783. template<typename _ITp>
  784. - __always_inline _ITp
  785. + __libitm_always_inline _ITp
  786. atomic_fetch_add_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  787. memory_order __m) noexcept
  788. { return __a->fetch_add(__i, __m); }
  789. template<typename _ITp>
  790. - __always_inline _ITp
  791. + __libitm_always_inline _ITp
  792. atomic_fetch_sub_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  793. memory_order __m) noexcept
  794. { return __a->fetch_sub(__i, __m); }
  795. template<typename _ITp>
  796. - __always_inline _ITp
  797. + __libitm_always_inline _ITp
  798. atomic_fetch_sub_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  799. memory_order __m) noexcept
  800. { return __a->fetch_sub(__i, __m); }
  801. template<typename _ITp>
  802. - __always_inline _ITp
  803. + __libitm_always_inline _ITp
  804. atomic_fetch_and_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  805. memory_order __m) noexcept
  806. { return __a->fetch_and(__i, __m); }
  807. template<typename _ITp>
  808. - __always_inline _ITp
  809. + __libitm_always_inline _ITp
  810. atomic_fetch_and_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  811. memory_order __m) noexcept
  812. { return __a->fetch_and(__i, __m); }
  813. template<typename _ITp>
  814. - __always_inline _ITp
  815. + __libitm_always_inline _ITp
  816. atomic_fetch_or_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  817. memory_order __m) noexcept
  818. { return __a->fetch_or(__i, __m); }
  819. template<typename _ITp>
  820. - __always_inline _ITp
  821. + __libitm_always_inline _ITp
  822. atomic_fetch_or_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  823. memory_order __m) noexcept
  824. { return __a->fetch_or(__i, __m); }
  825. template<typename _ITp>
  826. - __always_inline _ITp
  827. + __libitm_always_inline _ITp
  828. atomic_fetch_xor_explicit(__atomic_base<_ITp>* __a, _ITp __i,
  829. memory_order __m) noexcept
  830. { return __a->fetch_xor(__i, __m); }
  831. template<typename _ITp>
  832. - __always_inline _ITp
  833. + __libitm_always_inline _ITp
  834. atomic_fetch_xor_explicit(volatile __atomic_base<_ITp>* __a, _ITp __i,
  835. memory_order __m) noexcept
  836. { return __a->fetch_xor(__i, __m); }
  837. template<typename _ITp>
  838. - __always_inline _ITp
  839. + __libitm_always_inline _ITp
  840. atomic_fetch_add(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  841. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  842. template<typename _ITp>
  843. - __always_inline _ITp
  844. + __libitm_always_inline _ITp
  845. atomic_fetch_add(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  846. { return atomic_fetch_add_explicit(__a, __i, memory_order_seq_cst); }
  847. template<typename _ITp>
  848. - __always_inline _ITp
  849. + __libitm_always_inline _ITp
  850. atomic_fetch_sub(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  851. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  852. template<typename _ITp>
  853. - __always_inline _ITp
  854. + __libitm_always_inline _ITp
  855. atomic_fetch_sub(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  856. { return atomic_fetch_sub_explicit(__a, __i, memory_order_seq_cst); }
  857. template<typename _ITp>
  858. - __always_inline _ITp
  859. + __libitm_always_inline _ITp
  860. atomic_fetch_and(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  861. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  862. template<typename _ITp>
  863. - __always_inline _ITp
  864. + __libitm_always_inline _ITp
  865. atomic_fetch_and(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  866. { return atomic_fetch_and_explicit(__a, __i, memory_order_seq_cst); }
  867. template<typename _ITp>
  868. - __always_inline _ITp
  869. + __libitm_always_inline _ITp
  870. atomic_fetch_or(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  871. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  872. template<typename _ITp>
  873. - __always_inline _ITp
  874. + __libitm_always_inline _ITp
  875. atomic_fetch_or(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  876. { return atomic_fetch_or_explicit(__a, __i, memory_order_seq_cst); }
  877. template<typename _ITp>
  878. - __always_inline _ITp
  879. + __libitm_always_inline _ITp
  880. atomic_fetch_xor(__atomic_base<_ITp>* __a, _ITp __i) noexcept
  881. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  882. template<typename _ITp>
  883. - __always_inline _ITp
  884. + __libitm_always_inline _ITp
  885. atomic_fetch_xor(volatile __atomic_base<_ITp>* __a, _ITp __i) noexcept
  886. { return atomic_fetch_xor_explicit(__a, __i, memory_order_seq_cst); }
  887. // Partial specializations for pointers.
  888. template<typename _ITp>
  889. - __always_inline _ITp*
  890. + __libitm_always_inline _ITp*
  891. atomic_fetch_add_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
  892. memory_order __m) noexcept
  893. { return __a->fetch_add(__d, __m); }
  894. template<typename _ITp>
  895. - __always_inline _ITp*
  896. + __libitm_always_inline _ITp*
  897. atomic_fetch_add_explicit(volatile atomic<_ITp*>* __a, ptrdiff_t __d,
  898. memory_order __m) noexcept
  899. { return __a->fetch_add(__d, __m); }
  900. template<typename _ITp>
  901. - __always_inline _ITp*
  902. + __libitm_always_inline _ITp*
  903. atomic_fetch_add(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  904. { return __a->fetch_add(__d); }
  905. template<typename _ITp>
  906. - __always_inline _ITp*
  907. + __libitm_always_inline _ITp*
  908. atomic_fetch_add(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  909. { return __a->fetch_add(__d); }
  910. template<typename _ITp>
  911. - __always_inline _ITp*
  912. + __libitm_always_inline _ITp*
  913. atomic_fetch_sub_explicit(volatile atomic<_ITp*>* __a,
  914. ptrdiff_t __d, memory_order __m) noexcept
  915. { return __a->fetch_sub(__d, __m); }
  916. template<typename _ITp>
  917. - __always_inline _ITp*
  918. + __libitm_always_inline _ITp*
  919. atomic_fetch_sub_explicit(atomic<_ITp*>* __a, ptrdiff_t __d,
  920. memory_order __m) noexcept
  921. { return __a->fetch_sub(__d, __m); }
  922. template<typename _ITp>
  923. - __always_inline _ITp*
  924. + __libitm_always_inline _ITp*
  925. atomic_fetch_sub(volatile atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  926. { return __a->fetch_sub(__d); }
  927. template<typename _ITp>
  928. - __always_inline _ITp*
  929. + __libitm_always_inline _ITp*
  930. atomic_fetch_sub(atomic<_ITp*>* __a, ptrdiff_t __d) noexcept
  931. { return __a->fetch_sub(__d); }
  932. // @} group atomics
  933. --
  934. 2.19.2