atomic 83 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442
  1. // -*- C++ -*-
  2. //===--------------------------- atomic -----------------------------------===//
  3. //
  4. // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
  5. // See https://llvm.org/LICENSE.txt for license information.
  6. // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
  7. //
  8. //===----------------------------------------------------------------------===//
  9. #ifndef _LIBCPP_ATOMIC
  10. #define _LIBCPP_ATOMIC
  11. /*
  12. atomic synopsis
  13. namespace std
  14. {
  15. // feature test macro
  16. #define __cpp_lib_atomic_is_always_lock_free // as specified by SG10
  17. // order and consistency
  18. enum memory_order: unspecified // enum class in C++20
  19. {
  20. relaxed,
  21. consume, // load-consume
  22. acquire, // load-acquire
  23. release, // store-release
  24. acq_rel, // store-release load-acquire
  25. seq_cst // store-release load-acquire
  26. };
  27. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  28. inline constexpr auto memory_order_consume = memory_order::consume;
  29. inline constexpr auto memory_order_acquire = memory_order::acquire;
  30. inline constexpr auto memory_order_release = memory_order::release;
  31. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  32. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  33. template <class T> T kill_dependency(T y) noexcept;
  34. // lock-free property
  35. #define ATOMIC_BOOL_LOCK_FREE unspecified
  36. #define ATOMIC_CHAR_LOCK_FREE unspecified
  37. #define ATOMIC_CHAR16_T_LOCK_FREE unspecified
  38. #define ATOMIC_CHAR32_T_LOCK_FREE unspecified
  39. #define ATOMIC_WCHAR_T_LOCK_FREE unspecified
  40. #define ATOMIC_SHORT_LOCK_FREE unspecified
  41. #define ATOMIC_INT_LOCK_FREE unspecified
  42. #define ATOMIC_LONG_LOCK_FREE unspecified
  43. #define ATOMIC_LLONG_LOCK_FREE unspecified
  44. #define ATOMIC_POINTER_LOCK_FREE unspecified
  45. // flag type and operations
  46. typedef struct atomic_flag
  47. {
  48. bool test_and_set(memory_order m = memory_order_seq_cst) volatile noexcept;
  49. bool test_and_set(memory_order m = memory_order_seq_cst) noexcept;
  50. void clear(memory_order m = memory_order_seq_cst) volatile noexcept;
  51. void clear(memory_order m = memory_order_seq_cst) noexcept;
  52. atomic_flag() noexcept = default;
  53. atomic_flag(const atomic_flag&) = delete;
  54. atomic_flag& operator=(const atomic_flag&) = delete;
  55. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  56. } atomic_flag;
  57. bool
  58. atomic_flag_test_and_set(volatile atomic_flag* obj) noexcept;
  59. bool
  60. atomic_flag_test_and_set(atomic_flag* obj) noexcept;
  61. bool
  62. atomic_flag_test_and_set_explicit(volatile atomic_flag* obj,
  63. memory_order m) noexcept;
  64. bool
  65. atomic_flag_test_and_set_explicit(atomic_flag* obj, memory_order m) noexcept;
  66. void
  67. atomic_flag_clear(volatile atomic_flag* obj) noexcept;
  68. void
  69. atomic_flag_clear(atomic_flag* obj) noexcept;
  70. void
  71. atomic_flag_clear_explicit(volatile atomic_flag* obj, memory_order m) noexcept;
  72. void
  73. atomic_flag_clear_explicit(atomic_flag* obj, memory_order m) noexcept;
  74. #define ATOMIC_FLAG_INIT see below
  75. #define ATOMIC_VAR_INIT(value) see below
  76. template <class T>
  77. struct atomic
  78. {
  79. static constexpr bool is_always_lock_free;
  80. bool is_lock_free() const volatile noexcept;
  81. bool is_lock_free() const noexcept;
  82. void store(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  83. void store(T desr, memory_order m = memory_order_seq_cst) noexcept;
  84. T load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  85. T load(memory_order m = memory_order_seq_cst) const noexcept;
  86. operator T() const volatile noexcept;
  87. operator T() const noexcept;
  88. T exchange(T desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  89. T exchange(T desr, memory_order m = memory_order_seq_cst) noexcept;
  90. bool compare_exchange_weak(T& expc, T desr,
  91. memory_order s, memory_order f) volatile noexcept;
  92. bool compare_exchange_weak(T& expc, T desr, memory_order s, memory_order f) noexcept;
  93. bool compare_exchange_strong(T& expc, T desr,
  94. memory_order s, memory_order f) volatile noexcept;
  95. bool compare_exchange_strong(T& expc, T desr,
  96. memory_order s, memory_order f) noexcept;
  97. bool compare_exchange_weak(T& expc, T desr,
  98. memory_order m = memory_order_seq_cst) volatile noexcept;
  99. bool compare_exchange_weak(T& expc, T desr,
  100. memory_order m = memory_order_seq_cst) noexcept;
  101. bool compare_exchange_strong(T& expc, T desr,
  102. memory_order m = memory_order_seq_cst) volatile noexcept;
  103. bool compare_exchange_strong(T& expc, T desr,
  104. memory_order m = memory_order_seq_cst) noexcept;
  105. atomic() noexcept = default;
  106. constexpr atomic(T desr) noexcept;
  107. atomic(const atomic&) = delete;
  108. atomic& operator=(const atomic&) = delete;
  109. atomic& operator=(const atomic&) volatile = delete;
  110. T operator=(T) volatile noexcept;
  111. T operator=(T) noexcept;
  112. };
  113. template <>
  114. struct atomic<integral>
  115. {
  116. static constexpr bool is_always_lock_free;
  117. bool is_lock_free() const volatile noexcept;
  118. bool is_lock_free() const noexcept;
  119. void store(integral desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  120. void store(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  121. integral load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  122. integral load(memory_order m = memory_order_seq_cst) const noexcept;
  123. operator integral() const volatile noexcept;
  124. operator integral() const noexcept;
  125. integral exchange(integral desr,
  126. memory_order m = memory_order_seq_cst) volatile noexcept;
  127. integral exchange(integral desr, memory_order m = memory_order_seq_cst) noexcept;
  128. bool compare_exchange_weak(integral& expc, integral desr,
  129. memory_order s, memory_order f) volatile noexcept;
  130. bool compare_exchange_weak(integral& expc, integral desr,
  131. memory_order s, memory_order f) noexcept;
  132. bool compare_exchange_strong(integral& expc, integral desr,
  133. memory_order s, memory_order f) volatile noexcept;
  134. bool compare_exchange_strong(integral& expc, integral desr,
  135. memory_order s, memory_order f) noexcept;
  136. bool compare_exchange_weak(integral& expc, integral desr,
  137. memory_order m = memory_order_seq_cst) volatile noexcept;
  138. bool compare_exchange_weak(integral& expc, integral desr,
  139. memory_order m = memory_order_seq_cst) noexcept;
  140. bool compare_exchange_strong(integral& expc, integral desr,
  141. memory_order m = memory_order_seq_cst) volatile noexcept;
  142. bool compare_exchange_strong(integral& expc, integral desr,
  143. memory_order m = memory_order_seq_cst) noexcept;
  144. integral
  145. fetch_add(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  146. integral fetch_add(integral op, memory_order m = memory_order_seq_cst) noexcept;
  147. integral
  148. fetch_sub(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  149. integral fetch_sub(integral op, memory_order m = memory_order_seq_cst) noexcept;
  150. integral
  151. fetch_and(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  152. integral fetch_and(integral op, memory_order m = memory_order_seq_cst) noexcept;
  153. integral
  154. fetch_or(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  155. integral fetch_or(integral op, memory_order m = memory_order_seq_cst) noexcept;
  156. integral
  157. fetch_xor(integral op, memory_order m = memory_order_seq_cst) volatile noexcept;
  158. integral fetch_xor(integral op, memory_order m = memory_order_seq_cst) noexcept;
  159. atomic() noexcept = default;
  160. constexpr atomic(integral desr) noexcept;
  161. atomic(const atomic&) = delete;
  162. atomic& operator=(const atomic&) = delete;
  163. atomic& operator=(const atomic&) volatile = delete;
  164. integral operator=(integral desr) volatile noexcept;
  165. integral operator=(integral desr) noexcept;
  166. integral operator++(int) volatile noexcept;
  167. integral operator++(int) noexcept;
  168. integral operator--(int) volatile noexcept;
  169. integral operator--(int) noexcept;
  170. integral operator++() volatile noexcept;
  171. integral operator++() noexcept;
  172. integral operator--() volatile noexcept;
  173. integral operator--() noexcept;
  174. integral operator+=(integral op) volatile noexcept;
  175. integral operator+=(integral op) noexcept;
  176. integral operator-=(integral op) volatile noexcept;
  177. integral operator-=(integral op) noexcept;
  178. integral operator&=(integral op) volatile noexcept;
  179. integral operator&=(integral op) noexcept;
  180. integral operator|=(integral op) volatile noexcept;
  181. integral operator|=(integral op) noexcept;
  182. integral operator^=(integral op) volatile noexcept;
  183. integral operator^=(integral op) noexcept;
  184. };
  185. template <class T>
  186. struct atomic<T*>
  187. {
  188. static constexpr bool is_always_lock_free;
  189. bool is_lock_free() const volatile noexcept;
  190. bool is_lock_free() const noexcept;
  191. void store(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  192. void store(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  193. T* load(memory_order m = memory_order_seq_cst) const volatile noexcept;
  194. T* load(memory_order m = memory_order_seq_cst) const noexcept;
  195. operator T*() const volatile noexcept;
  196. operator T*() const noexcept;
  197. T* exchange(T* desr, memory_order m = memory_order_seq_cst) volatile noexcept;
  198. T* exchange(T* desr, memory_order m = memory_order_seq_cst) noexcept;
  199. bool compare_exchange_weak(T*& expc, T* desr,
  200. memory_order s, memory_order f) volatile noexcept;
  201. bool compare_exchange_weak(T*& expc, T* desr,
  202. memory_order s, memory_order f) noexcept;
  203. bool compare_exchange_strong(T*& expc, T* desr,
  204. memory_order s, memory_order f) volatile noexcept;
  205. bool compare_exchange_strong(T*& expc, T* desr,
  206. memory_order s, memory_order f) noexcept;
  207. bool compare_exchange_weak(T*& expc, T* desr,
  208. memory_order m = memory_order_seq_cst) volatile noexcept;
  209. bool compare_exchange_weak(T*& expc, T* desr,
  210. memory_order m = memory_order_seq_cst) noexcept;
  211. bool compare_exchange_strong(T*& expc, T* desr,
  212. memory_order m = memory_order_seq_cst) volatile noexcept;
  213. bool compare_exchange_strong(T*& expc, T* desr,
  214. memory_order m = memory_order_seq_cst) noexcept;
  215. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  216. T* fetch_add(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  217. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) volatile noexcept;
  218. T* fetch_sub(ptrdiff_t op, memory_order m = memory_order_seq_cst) noexcept;
  219. atomic() noexcept = default;
  220. constexpr atomic(T* desr) noexcept;
  221. atomic(const atomic&) = delete;
  222. atomic& operator=(const atomic&) = delete;
  223. atomic& operator=(const atomic&) volatile = delete;
  224. T* operator=(T*) volatile noexcept;
  225. T* operator=(T*) noexcept;
  226. T* operator++(int) volatile noexcept;
  227. T* operator++(int) noexcept;
  228. T* operator--(int) volatile noexcept;
  229. T* operator--(int) noexcept;
  230. T* operator++() volatile noexcept;
  231. T* operator++() noexcept;
  232. T* operator--() volatile noexcept;
  233. T* operator--() noexcept;
  234. T* operator+=(ptrdiff_t op) volatile noexcept;
  235. T* operator+=(ptrdiff_t op) noexcept;
  236. T* operator-=(ptrdiff_t op) volatile noexcept;
  237. T* operator-=(ptrdiff_t op) noexcept;
  238. };
  239. template <class T>
  240. bool
  241. atomic_is_lock_free(const volatile atomic<T>* obj) noexcept;
  242. template <class T>
  243. bool
  244. atomic_is_lock_free(const atomic<T>* obj) noexcept;
  245. template <class T>
  246. void
  247. atomic_init(volatile atomic<T>* obj, T desr) noexcept;
  248. template <class T>
  249. void
  250. atomic_init(atomic<T>* obj, T desr) noexcept;
  251. template <class T>
  252. void
  253. atomic_store(volatile atomic<T>* obj, T desr) noexcept;
  254. template <class T>
  255. void
  256. atomic_store(atomic<T>* obj, T desr) noexcept;
  257. template <class T>
  258. void
  259. atomic_store_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
  260. template <class T>
  261. void
  262. atomic_store_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
  263. template <class T>
  264. T
  265. atomic_load(const volatile atomic<T>* obj) noexcept;
  266. template <class T>
  267. T
  268. atomic_load(const atomic<T>* obj) noexcept;
  269. template <class T>
  270. T
  271. atomic_load_explicit(const volatile atomic<T>* obj, memory_order m) noexcept;
  272. template <class T>
  273. T
  274. atomic_load_explicit(const atomic<T>* obj, memory_order m) noexcept;
  275. template <class T>
  276. T
  277. atomic_exchange(volatile atomic<T>* obj, T desr) noexcept;
  278. template <class T>
  279. T
  280. atomic_exchange(atomic<T>* obj, T desr) noexcept;
  281. template <class T>
  282. T
  283. atomic_exchange_explicit(volatile atomic<T>* obj, T desr, memory_order m) noexcept;
  284. template <class T>
  285. T
  286. atomic_exchange_explicit(atomic<T>* obj, T desr, memory_order m) noexcept;
  287. template <class T>
  288. bool
  289. atomic_compare_exchange_weak(volatile atomic<T>* obj, T* expc, T desr) noexcept;
  290. template <class T>
  291. bool
  292. atomic_compare_exchange_weak(atomic<T>* obj, T* expc, T desr) noexcept;
  293. template <class T>
  294. bool
  295. atomic_compare_exchange_strong(volatile atomic<T>* obj, T* expc, T desr) noexcept;
  296. template <class T>
  297. bool
  298. atomic_compare_exchange_strong(atomic<T>* obj, T* expc, T desr) noexcept;
  299. template <class T>
  300. bool
  301. atomic_compare_exchange_weak_explicit(volatile atomic<T>* obj, T* expc,
  302. T desr,
  303. memory_order s, memory_order f) noexcept;
  304. template <class T>
  305. bool
  306. atomic_compare_exchange_weak_explicit(atomic<T>* obj, T* expc, T desr,
  307. memory_order s, memory_order f) noexcept;
  308. template <class T>
  309. bool
  310. atomic_compare_exchange_strong_explicit(volatile atomic<T>* obj,
  311. T* expc, T desr,
  312. memory_order s, memory_order f) noexcept;
  313. template <class T>
  314. bool
  315. atomic_compare_exchange_strong_explicit(atomic<T>* obj, T* expc,
  316. T desr,
  317. memory_order s, memory_order f) noexcept;
  318. template <class Integral>
  319. Integral
  320. atomic_fetch_add(volatile atomic<Integral>* obj, Integral op) noexcept;
  321. template <class Integral>
  322. Integral
  323. atomic_fetch_add(atomic<Integral>* obj, Integral op) noexcept;
  324. template <class Integral>
  325. Integral
  326. atomic_fetch_add_explicit(volatile atomic<Integral>* obj, Integral op,
  327. memory_order m) noexcept;
  328. template <class Integral>
  329. Integral
  330. atomic_fetch_add_explicit(atomic<Integral>* obj, Integral op,
  331. memory_order m) noexcept;
  332. template <class Integral>
  333. Integral
  334. atomic_fetch_sub(volatile atomic<Integral>* obj, Integral op) noexcept;
  335. template <class Integral>
  336. Integral
  337. atomic_fetch_sub(atomic<Integral>* obj, Integral op) noexcept;
  338. template <class Integral>
  339. Integral
  340. atomic_fetch_sub_explicit(volatile atomic<Integral>* obj, Integral op,
  341. memory_order m) noexcept;
  342. template <class Integral>
  343. Integral
  344. atomic_fetch_sub_explicit(atomic<Integral>* obj, Integral op,
  345. memory_order m) noexcept;
  346. template <class Integral>
  347. Integral
  348. atomic_fetch_and(volatile atomic<Integral>* obj, Integral op) noexcept;
  349. template <class Integral>
  350. Integral
  351. atomic_fetch_and(atomic<Integral>* obj, Integral op) noexcept;
  352. template <class Integral>
  353. Integral
  354. atomic_fetch_and_explicit(volatile atomic<Integral>* obj, Integral op,
  355. memory_order m) noexcept;
  356. template <class Integral>
  357. Integral
  358. atomic_fetch_and_explicit(atomic<Integral>* obj, Integral op,
  359. memory_order m) noexcept;
  360. template <class Integral>
  361. Integral
  362. atomic_fetch_or(volatile atomic<Integral>* obj, Integral op) noexcept;
  363. template <class Integral>
  364. Integral
  365. atomic_fetch_or(atomic<Integral>* obj, Integral op) noexcept;
  366. template <class Integral>
  367. Integral
  368. atomic_fetch_or_explicit(volatile atomic<Integral>* obj, Integral op,
  369. memory_order m) noexcept;
  370. template <class Integral>
  371. Integral
  372. atomic_fetch_or_explicit(atomic<Integral>* obj, Integral op,
  373. memory_order m) noexcept;
  374. template <class Integral>
  375. Integral
  376. atomic_fetch_xor(volatile atomic<Integral>* obj, Integral op) noexcept;
  377. template <class Integral>
  378. Integral
  379. atomic_fetch_xor(atomic<Integral>* obj, Integral op) noexcept;
  380. template <class Integral>
  381. Integral
  382. atomic_fetch_xor_explicit(volatile atomic<Integral>* obj, Integral op,
  383. memory_order m) noexcept;
  384. template <class Integral>
  385. Integral
  386. atomic_fetch_xor_explicit(atomic<Integral>* obj, Integral op,
  387. memory_order m) noexcept;
  388. template <class T>
  389. T*
  390. atomic_fetch_add(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
  391. template <class T>
  392. T*
  393. atomic_fetch_add(atomic<T*>* obj, ptrdiff_t op) noexcept;
  394. template <class T>
  395. T*
  396. atomic_fetch_add_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
  397. memory_order m) noexcept;
  398. template <class T>
  399. T*
  400. atomic_fetch_add_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
  401. template <class T>
  402. T*
  403. atomic_fetch_sub(volatile atomic<T*>* obj, ptrdiff_t op) noexcept;
  404. template <class T>
  405. T*
  406. atomic_fetch_sub(atomic<T*>* obj, ptrdiff_t op) noexcept;
  407. template <class T>
  408. T*
  409. atomic_fetch_sub_explicit(volatile atomic<T*>* obj, ptrdiff_t op,
  410. memory_order m) noexcept;
  411. template <class T>
  412. T*
  413. atomic_fetch_sub_explicit(atomic<T*>* obj, ptrdiff_t op, memory_order m) noexcept;
  414. // Atomics for standard typedef types
  415. typedef atomic<bool> atomic_bool;
  416. typedef atomic<char> atomic_char;
  417. typedef atomic<signed char> atomic_schar;
  418. typedef atomic<unsigned char> atomic_uchar;
  419. typedef atomic<short> atomic_short;
  420. typedef atomic<unsigned short> atomic_ushort;
  421. typedef atomic<int> atomic_int;
  422. typedef atomic<unsigned int> atomic_uint;
  423. typedef atomic<long> atomic_long;
  424. typedef atomic<unsigned long> atomic_ulong;
  425. typedef atomic<long long> atomic_llong;
  426. typedef atomic<unsigned long long> atomic_ullong;
  427. typedef atomic<char16_t> atomic_char16_t;
  428. typedef atomic<char32_t> atomic_char32_t;
  429. typedef atomic<wchar_t> atomic_wchar_t;
  430. typedef atomic<int_least8_t> atomic_int_least8_t;
  431. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  432. typedef atomic<int_least16_t> atomic_int_least16_t;
  433. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  434. typedef atomic<int_least32_t> atomic_int_least32_t;
  435. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  436. typedef atomic<int_least64_t> atomic_int_least64_t;
  437. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  438. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  439. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  440. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  441. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  442. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  443. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  444. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  445. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  446. typedef atomic<int8_t> atomic_int8_t;
  447. typedef atomic<uint8_t> atomic_uint8_t;
  448. typedef atomic<int16_t> atomic_int16_t;
  449. typedef atomic<uint16_t> atomic_uint16_t;
  450. typedef atomic<int32_t> atomic_int32_t;
  451. typedef atomic<uint32_t> atomic_uint32_t;
  452. typedef atomic<int64_t> atomic_int64_t;
  453. typedef atomic<uint64_t> atomic_uint64_t;
  454. typedef atomic<intptr_t> atomic_intptr_t;
  455. typedef atomic<uintptr_t> atomic_uintptr_t;
  456. typedef atomic<size_t> atomic_size_t;
  457. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  458. typedef atomic<intmax_t> atomic_intmax_t;
  459. typedef atomic<uintmax_t> atomic_uintmax_t;
  460. // fences
  461. void atomic_thread_fence(memory_order m) noexcept;
  462. void atomic_signal_fence(memory_order m) noexcept;
  463. } // std
  464. */
  465. #include <__config>
  466. #include <cstddef>
  467. #include <cstdint>
  468. #include <type_traits>
  469. #include <version>
  470. #if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
  471. #pragma GCC system_header
  472. #endif
  473. #ifdef _LIBCPP_HAS_NO_THREADS
  474. # error <atomic> is not supported on this single threaded system
  475. #endif
  476. #ifdef _LIBCPP_HAS_NO_ATOMIC_HEADER
  477. # error <atomic> is not implemented
  478. #endif
  479. #ifdef kill_dependency
  480. # error C++ standard library is incompatible with <stdatomic.h>
  481. #endif
  482. #define _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m) \
  483. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_consume || \
  484. __m == memory_order_acquire || \
  485. __m == memory_order_acq_rel, \
  486. "memory order argument to atomic operation is invalid")
  487. #define _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m) \
  488. _LIBCPP_DIAGNOSE_WARNING(__m == memory_order_release || \
  489. __m == memory_order_acq_rel, \
  490. "memory order argument to atomic operation is invalid")
  491. #define _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__m, __f) \
  492. _LIBCPP_DIAGNOSE_WARNING(__f == memory_order_release || \
  493. __f == memory_order_acq_rel, \
  494. "memory order argument to atomic operation is invalid")
  495. _LIBCPP_BEGIN_NAMESPACE_STD
  496. // Figure out what the underlying type for `memory_order` would be if it were
  497. // declared as an unscoped enum (accounting for -fshort-enums). Use this result
  498. // to pin the underlying type in C++20.
  499. enum __legacy_memory_order {
  500. __mo_relaxed,
  501. __mo_consume,
  502. __mo_acquire,
  503. __mo_release,
  504. __mo_acq_rel,
  505. __mo_seq_cst
  506. };
  507. typedef underlying_type<__legacy_memory_order>::type __memory_order_underlying_t;
  508. #if _LIBCPP_STD_VER > 17
  509. enum class memory_order : __memory_order_underlying_t {
  510. relaxed = __mo_relaxed,
  511. consume = __mo_consume,
  512. acquire = __mo_acquire,
  513. release = __mo_release,
  514. acq_rel = __mo_acq_rel,
  515. seq_cst = __mo_seq_cst
  516. };
  517. inline constexpr auto memory_order_relaxed = memory_order::relaxed;
  518. inline constexpr auto memory_order_consume = memory_order::consume;
  519. inline constexpr auto memory_order_acquire = memory_order::acquire;
  520. inline constexpr auto memory_order_release = memory_order::release;
  521. inline constexpr auto memory_order_acq_rel = memory_order::acq_rel;
  522. inline constexpr auto memory_order_seq_cst = memory_order::seq_cst;
  523. #else
  524. typedef enum memory_order {
  525. memory_order_relaxed = __mo_relaxed,
  526. memory_order_consume = __mo_consume,
  527. memory_order_acquire = __mo_acquire,
  528. memory_order_release = __mo_release,
  529. memory_order_acq_rel = __mo_acq_rel,
  530. memory_order_seq_cst = __mo_seq_cst,
  531. } memory_order;
  532. #endif // _LIBCPP_STD_VER > 17
  533. static_assert((is_same<underlying_type<memory_order>::type, __memory_order_underlying_t>::value),
  534. "unexpected underlying type for std::memory_order");
  535. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP) || \
  536. defined(_LIBCPP_ATOMIC_ONLY_USE_BUILTINS)
  537. // [atomics.types.generic]p1 guarantees _Tp is trivially copyable. Because
  538. // the default operator= in an object is not volatile, a byte-by-byte copy
  539. // is required.
  540. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  541. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  542. __cxx_atomic_assign_volatile(_Tp& __a_value, _Tv const& __val) {
  543. __a_value = __val;
  544. }
  545. template <typename _Tp, typename _Tv> _LIBCPP_INLINE_VISIBILITY
  546. typename enable_if<is_assignable<_Tp&, _Tv>::value>::type
  547. __cxx_atomic_assign_volatile(_Tp volatile& __a_value, _Tv volatile const& __val) {
  548. volatile char* __to = reinterpret_cast<volatile char*>(&__a_value);
  549. volatile char* __end = __to + sizeof(_Tp);
  550. volatile const char* __from = reinterpret_cast<volatile const char*>(&__val);
  551. while (__to != __end)
  552. *__to++ = *__from++;
  553. }
  554. #endif
  555. #if defined(_LIBCPP_HAS_GCC_ATOMIC_IMP)
  556. template <typename _Tp>
  557. struct __cxx_atomic_base_impl {
  558. _LIBCPP_INLINE_VISIBILITY
  559. #ifndef _LIBCPP_CXX03_LANG
  560. __cxx_atomic_base_impl() _NOEXCEPT = default;
  561. #else
  562. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  563. #endif // _LIBCPP_CXX03_LANG
  564. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
  565. : __a_value(value) {}
  566. _Tp __a_value;
  567. };
  568. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_order(memory_order __order) {
  569. // Avoid switch statement to make this a constexpr.
  570. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  571. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  572. (__order == memory_order_release ? __ATOMIC_RELEASE:
  573. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  574. (__order == memory_order_acq_rel ? __ATOMIC_ACQ_REL:
  575. __ATOMIC_CONSUME))));
  576. }
  577. _LIBCPP_INLINE_VISIBILITY inline _LIBCPP_CONSTEXPR int __to_gcc_failure_order(memory_order __order) {
  578. // Avoid switch statement to make this a constexpr.
  579. return __order == memory_order_relaxed ? __ATOMIC_RELAXED:
  580. (__order == memory_order_acquire ? __ATOMIC_ACQUIRE:
  581. (__order == memory_order_release ? __ATOMIC_RELAXED:
  582. (__order == memory_order_seq_cst ? __ATOMIC_SEQ_CST:
  583. (__order == memory_order_acq_rel ? __ATOMIC_ACQUIRE:
  584. __ATOMIC_CONSUME))));
  585. }
  586. template <typename _Tp>
  587. _LIBCPP_INLINE_VISIBILITY
  588. void __cxx_atomic_init(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  589. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  590. }
  591. template <typename _Tp>
  592. _LIBCPP_INLINE_VISIBILITY
  593. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val) {
  594. __a->__a_value = __val;
  595. }
  596. _LIBCPP_INLINE_VISIBILITY inline
  597. void __cxx_atomic_thread_fence(memory_order __order) {
  598. __atomic_thread_fence(__to_gcc_order(__order));
  599. }
  600. _LIBCPP_INLINE_VISIBILITY inline
  601. void __cxx_atomic_signal_fence(memory_order __order) {
  602. __atomic_signal_fence(__to_gcc_order(__order));
  603. }
  604. template <typename _Tp>
  605. _LIBCPP_INLINE_VISIBILITY
  606. void __cxx_atomic_store(volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  607. memory_order __order) {
  608. __atomic_store(&__a->__a_value, &__val,
  609. __to_gcc_order(__order));
  610. }
  611. template <typename _Tp>
  612. _LIBCPP_INLINE_VISIBILITY
  613. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp>* __a, _Tp __val,
  614. memory_order __order) {
  615. __atomic_store(&__a->__a_value, &__val,
  616. __to_gcc_order(__order));
  617. }
  618. template <typename _Tp>
  619. _LIBCPP_INLINE_VISIBILITY
  620. _Tp __cxx_atomic_load(const volatile __cxx_atomic_base_impl<_Tp>* __a,
  621. memory_order __order) {
  622. _Tp __ret;
  623. __atomic_load(&__a->__a_value, &__ret,
  624. __to_gcc_order(__order));
  625. return __ret;
  626. }
  627. template <typename _Tp>
  628. _LIBCPP_INLINE_VISIBILITY
  629. _Tp __cxx_atomic_load(const __cxx_atomic_base_impl<_Tp>* __a, memory_order __order) {
  630. _Tp __ret;
  631. __atomic_load(&__a->__a_value, &__ret,
  632. __to_gcc_order(__order));
  633. return __ret;
  634. }
  635. template <typename _Tp>
  636. _LIBCPP_INLINE_VISIBILITY
  637. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_base_impl<_Tp>* __a,
  638. _Tp __value, memory_order __order) {
  639. _Tp __ret;
  640. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  641. __to_gcc_order(__order));
  642. return __ret;
  643. }
  644. template <typename _Tp>
  645. _LIBCPP_INLINE_VISIBILITY
  646. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp>* __a, _Tp __value,
  647. memory_order __order) {
  648. _Tp __ret;
  649. __atomic_exchange(&__a->__a_value, &__value, &__ret,
  650. __to_gcc_order(__order));
  651. return __ret;
  652. }
  653. template <typename _Tp>
  654. _LIBCPP_INLINE_VISIBILITY
  655. bool __cxx_atomic_compare_exchange_strong(
  656. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  657. memory_order __success, memory_order __failure) {
  658. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  659. false,
  660. __to_gcc_order(__success),
  661. __to_gcc_failure_order(__failure));
  662. }
  663. template <typename _Tp>
  664. _LIBCPP_INLINE_VISIBILITY
  665. bool __cxx_atomic_compare_exchange_strong(
  666. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  667. memory_order __failure) {
  668. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  669. false,
  670. __to_gcc_order(__success),
  671. __to_gcc_failure_order(__failure));
  672. }
  673. template <typename _Tp>
  674. _LIBCPP_INLINE_VISIBILITY
  675. bool __cxx_atomic_compare_exchange_weak(
  676. volatile __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value,
  677. memory_order __success, memory_order __failure) {
  678. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  679. true,
  680. __to_gcc_order(__success),
  681. __to_gcc_failure_order(__failure));
  682. }
  683. template <typename _Tp>
  684. _LIBCPP_INLINE_VISIBILITY
  685. bool __cxx_atomic_compare_exchange_weak(
  686. __cxx_atomic_base_impl<_Tp>* __a, _Tp* __expected, _Tp __value, memory_order __success,
  687. memory_order __failure) {
  688. return __atomic_compare_exchange(&__a->__a_value, __expected, &__value,
  689. true,
  690. __to_gcc_order(__success),
  691. __to_gcc_failure_order(__failure));
  692. }
  693. template <typename _Tp>
  694. struct __skip_amt { enum {value = 1}; };
  695. template <typename _Tp>
  696. struct __skip_amt<_Tp*> { enum {value = sizeof(_Tp)}; };
  697. // FIXME: Haven't figured out what the spec says about using arrays with
  698. // atomic_fetch_add. Force a failure rather than creating bad behavior.
  699. template <typename _Tp>
  700. struct __skip_amt<_Tp[]> { };
  701. template <typename _Tp, int n>
  702. struct __skip_amt<_Tp[n]> { };
  703. template <typename _Tp, typename _Td>
  704. _LIBCPP_INLINE_VISIBILITY
  705. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_base_impl<_Tp>* __a,
  706. _Td __delta, memory_order __order) {
  707. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  708. __to_gcc_order(__order));
  709. }
  710. template <typename _Tp, typename _Td>
  711. _LIBCPP_INLINE_VISIBILITY
  712. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  713. memory_order __order) {
  714. return __atomic_fetch_add(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  715. __to_gcc_order(__order));
  716. }
  717. template <typename _Tp, typename _Td>
  718. _LIBCPP_INLINE_VISIBILITY
  719. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_base_impl<_Tp>* __a,
  720. _Td __delta, memory_order __order) {
  721. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  722. __to_gcc_order(__order));
  723. }
  724. template <typename _Tp, typename _Td>
  725. _LIBCPP_INLINE_VISIBILITY
  726. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp>* __a, _Td __delta,
  727. memory_order __order) {
  728. return __atomic_fetch_sub(&__a->__a_value, __delta * __skip_amt<_Tp>::value,
  729. __to_gcc_order(__order));
  730. }
  731. template <typename _Tp>
  732. _LIBCPP_INLINE_VISIBILITY
  733. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_base_impl<_Tp>* __a,
  734. _Tp __pattern, memory_order __order) {
  735. return __atomic_fetch_and(&__a->__a_value, __pattern,
  736. __to_gcc_order(__order));
  737. }
  738. template <typename _Tp>
  739. _LIBCPP_INLINE_VISIBILITY
  740. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp>* __a,
  741. _Tp __pattern, memory_order __order) {
  742. return __atomic_fetch_and(&__a->__a_value, __pattern,
  743. __to_gcc_order(__order));
  744. }
  745. template <typename _Tp>
  746. _LIBCPP_INLINE_VISIBILITY
  747. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_base_impl<_Tp>* __a,
  748. _Tp __pattern, memory_order __order) {
  749. return __atomic_fetch_or(&__a->__a_value, __pattern,
  750. __to_gcc_order(__order));
  751. }
  752. template <typename _Tp>
  753. _LIBCPP_INLINE_VISIBILITY
  754. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  755. memory_order __order) {
  756. return __atomic_fetch_or(&__a->__a_value, __pattern,
  757. __to_gcc_order(__order));
  758. }
  759. template <typename _Tp>
  760. _LIBCPP_INLINE_VISIBILITY
  761. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_base_impl<_Tp>* __a,
  762. _Tp __pattern, memory_order __order) {
  763. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  764. __to_gcc_order(__order));
  765. }
  766. template <typename _Tp>
  767. _LIBCPP_INLINE_VISIBILITY
  768. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp>* __a, _Tp __pattern,
  769. memory_order __order) {
  770. return __atomic_fetch_xor(&__a->__a_value, __pattern,
  771. __to_gcc_order(__order));
  772. }
  773. #define __cxx_atomic_is_lock_free(__s) __atomic_is_lock_free(__s, 0)
  774. #elif defined(_LIBCPP_HAS_C_ATOMIC_IMP)
  775. template <typename _Tp>
  776. struct __cxx_atomic_base_impl {
  777. _LIBCPP_INLINE_VISIBILITY
  778. #ifndef _LIBCPP_CXX03_LANG
  779. __cxx_atomic_base_impl() _NOEXCEPT = default;
  780. #else
  781. __cxx_atomic_base_impl() _NOEXCEPT : __a_value() {}
  782. #endif // _LIBCPP_CXX03_LANG
  783. _LIBCPP_CONSTEXPR explicit __cxx_atomic_base_impl(_Tp value) _NOEXCEPT
  784. : __a_value(value) {}
  785. _LIBCPP_DISABLE_EXTENSION_WARNING _Atomic(_Tp) __a_value;
  786. };
  787. #define __cxx_atomic_is_lock_free(__s) __c11_atomic_is_lock_free(__s)
  788. _LIBCPP_INLINE_VISIBILITY inline
  789. void __cxx_atomic_thread_fence(memory_order __order) _NOEXCEPT {
  790. __c11_atomic_thread_fence(static_cast<__memory_order_underlying_t>(__order));
  791. }
  792. _LIBCPP_INLINE_VISIBILITY inline
  793. void __cxx_atomic_signal_fence(memory_order __order) _NOEXCEPT {
  794. __c11_atomic_signal_fence(static_cast<__memory_order_underlying_t>(__order));
  795. }
  796. template<class _Tp>
  797. _LIBCPP_INLINE_VISIBILITY
  798. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val) _NOEXCEPT {
  799. __c11_atomic_init(&__a->__a_value, __val);
  800. }
  801. template<class _Tp>
  802. _LIBCPP_INLINE_VISIBILITY
  803. void __cxx_atomic_init(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val) _NOEXCEPT {
  804. __c11_atomic_init(&__a->__a_value, __val);
  805. }
  806. template<class _Tp>
  807. _LIBCPP_INLINE_VISIBILITY
  808. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __val, memory_order __order) _NOEXCEPT {
  809. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  810. }
  811. template<class _Tp>
  812. _LIBCPP_INLINE_VISIBILITY
  813. void __cxx_atomic_store(__cxx_atomic_base_impl<_Tp> * __a, _Tp __val, memory_order __order) _NOEXCEPT {
  814. __c11_atomic_store(&__a->__a_value, __val, static_cast<__memory_order_underlying_t>(__order));
  815. }
  816. template<class _Tp>
  817. _LIBCPP_INLINE_VISIBILITY
  818. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const volatile* __a, memory_order __order) _NOEXCEPT {
  819. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  820. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  821. }
  822. template<class _Tp>
  823. _LIBCPP_INLINE_VISIBILITY
  824. _Tp __cxx_atomic_load(__cxx_atomic_base_impl<_Tp> const* __a, memory_order __order) _NOEXCEPT {
  825. using __ptr_type = typename remove_const<decltype(__a->__a_value)>::type*;
  826. return __c11_atomic_load(const_cast<__ptr_type>(&__a->__a_value), static_cast<__memory_order_underlying_t>(__order));
  827. }
  828. template<class _Tp>
  829. _LIBCPP_INLINE_VISIBILITY
  830. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __value, memory_order __order) _NOEXCEPT {
  831. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  832. }
  833. template<class _Tp>
  834. _LIBCPP_INLINE_VISIBILITY
  835. _Tp __cxx_atomic_exchange(__cxx_atomic_base_impl<_Tp> * __a, _Tp __value, memory_order __order) _NOEXCEPT {
  836. return __c11_atomic_exchange(&__a->__a_value, __value, static_cast<__memory_order_underlying_t>(__order));
  837. }
  838. template<class _Tp>
  839. _LIBCPP_INLINE_VISIBILITY
  840. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  841. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
  842. }
  843. template<class _Tp>
  844. _LIBCPP_INLINE_VISIBILITY
  845. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  846. return __c11_atomic_compare_exchange_strong(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
  847. }
  848. template<class _Tp>
  849. _LIBCPP_INLINE_VISIBILITY
  850. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  851. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
  852. }
  853. template<class _Tp>
  854. _LIBCPP_INLINE_VISIBILITY
  855. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_base_impl<_Tp> * __a, _Tp* __expected, _Tp __value, memory_order __success, memory_order __failure) _NOEXCEPT {
  856. return __c11_atomic_compare_exchange_weak(&__a->__a_value, __expected, __value, static_cast<__memory_order_underlying_t>(__success), static_cast<__memory_order_underlying_t>(__failure));
  857. }
  858. template<class _Tp>
  859. _LIBCPP_INLINE_VISIBILITY
  860. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  861. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  862. }
  863. template<class _Tp>
  864. _LIBCPP_INLINE_VISIBILITY
  865. _Tp __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  866. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  867. }
  868. template<class _Tp>
  869. _LIBCPP_INLINE_VISIBILITY
  870. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  871. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  872. }
  873. template<class _Tp>
  874. _LIBCPP_INLINE_VISIBILITY
  875. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  876. return __c11_atomic_fetch_add(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  877. }
  878. template<class _Tp>
  879. _LIBCPP_INLINE_VISIBILITY
  880. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  881. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  882. }
  883. template<class _Tp>
  884. _LIBCPP_INLINE_VISIBILITY
  885. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp> * __a, _Tp __delta, memory_order __order) _NOEXCEPT {
  886. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  887. }
  888. template<class _Tp>
  889. _LIBCPP_INLINE_VISIBILITY
  890. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> volatile* __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  891. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  892. }
  893. template<class _Tp>
  894. _LIBCPP_INLINE_VISIBILITY
  895. _Tp* __cxx_atomic_fetch_sub(__cxx_atomic_base_impl<_Tp*> * __a, ptrdiff_t __delta, memory_order __order) _NOEXCEPT {
  896. return __c11_atomic_fetch_sub(&__a->__a_value, __delta, static_cast<__memory_order_underlying_t>(__order));
  897. }
  898. template<class _Tp>
  899. _LIBCPP_INLINE_VISIBILITY
  900. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  901. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  902. }
  903. template<class _Tp>
  904. _LIBCPP_INLINE_VISIBILITY
  905. _Tp __cxx_atomic_fetch_and(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  906. return __c11_atomic_fetch_and(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  907. }
  908. template<class _Tp>
  909. _LIBCPP_INLINE_VISIBILITY
  910. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  911. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  912. }
  913. template<class _Tp>
  914. _LIBCPP_INLINE_VISIBILITY
  915. _Tp __cxx_atomic_fetch_or(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  916. return __c11_atomic_fetch_or(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  917. }
  918. template<class _Tp>
  919. _LIBCPP_INLINE_VISIBILITY
  920. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> volatile* __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  921. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  922. }
  923. template<class _Tp>
  924. _LIBCPP_INLINE_VISIBILITY
  925. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_base_impl<_Tp> * __a, _Tp __pattern, memory_order __order) _NOEXCEPT {
  926. return __c11_atomic_fetch_xor(&__a->__a_value, __pattern, static_cast<__memory_order_underlying_t>(__order));
  927. }
  928. #endif // _LIBCPP_HAS_GCC_ATOMIC_IMP, _LIBCPP_HAS_C_ATOMIC_IMP
  929. template <class _Tp>
  930. _LIBCPP_INLINE_VISIBILITY
  931. _Tp kill_dependency(_Tp __y) _NOEXCEPT
  932. {
  933. return __y;
  934. }
  935. #if defined(__CLANG_ATOMIC_BOOL_LOCK_FREE)
  936. # define ATOMIC_BOOL_LOCK_FREE __CLANG_ATOMIC_BOOL_LOCK_FREE
  937. # define ATOMIC_CHAR_LOCK_FREE __CLANG_ATOMIC_CHAR_LOCK_FREE
  938. # define ATOMIC_CHAR16_T_LOCK_FREE __CLANG_ATOMIC_CHAR16_T_LOCK_FREE
  939. # define ATOMIC_CHAR32_T_LOCK_FREE __CLANG_ATOMIC_CHAR32_T_LOCK_FREE
  940. # define ATOMIC_WCHAR_T_LOCK_FREE __CLANG_ATOMIC_WCHAR_T_LOCK_FREE
  941. # define ATOMIC_SHORT_LOCK_FREE __CLANG_ATOMIC_SHORT_LOCK_FREE
  942. # define ATOMIC_INT_LOCK_FREE __CLANG_ATOMIC_INT_LOCK_FREE
  943. # define ATOMIC_LONG_LOCK_FREE __CLANG_ATOMIC_LONG_LOCK_FREE
  944. # define ATOMIC_LLONG_LOCK_FREE __CLANG_ATOMIC_LLONG_LOCK_FREE
  945. # define ATOMIC_POINTER_LOCK_FREE __CLANG_ATOMIC_POINTER_LOCK_FREE
  946. #elif defined(__GCC_ATOMIC_BOOL_LOCK_FREE)
  947. # define ATOMIC_BOOL_LOCK_FREE __GCC_ATOMIC_BOOL_LOCK_FREE
  948. # define ATOMIC_CHAR_LOCK_FREE __GCC_ATOMIC_CHAR_LOCK_FREE
  949. # define ATOMIC_CHAR16_T_LOCK_FREE __GCC_ATOMIC_CHAR16_T_LOCK_FREE
  950. # define ATOMIC_CHAR32_T_LOCK_FREE __GCC_ATOMIC_CHAR32_T_LOCK_FREE
  951. # define ATOMIC_WCHAR_T_LOCK_FREE __GCC_ATOMIC_WCHAR_T_LOCK_FREE
  952. # define ATOMIC_SHORT_LOCK_FREE __GCC_ATOMIC_SHORT_LOCK_FREE
  953. # define ATOMIC_INT_LOCK_FREE __GCC_ATOMIC_INT_LOCK_FREE
  954. # define ATOMIC_LONG_LOCK_FREE __GCC_ATOMIC_LONG_LOCK_FREE
  955. # define ATOMIC_LLONG_LOCK_FREE __GCC_ATOMIC_LLONG_LOCK_FREE
  956. # define ATOMIC_POINTER_LOCK_FREE __GCC_ATOMIC_POINTER_LOCK_FREE
  957. #endif
  958. #ifdef _LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  959. template<typename _Tp>
  960. struct __cxx_atomic_lock_impl {
  961. _LIBCPP_INLINE_VISIBILITY
  962. __cxx_atomic_lock_impl() _NOEXCEPT
  963. : __a_value(), __a_lock(0) {}
  964. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit
  965. __cxx_atomic_lock_impl(_Tp value) _NOEXCEPT
  966. : __a_value(value), __a_lock(0) {}
  967. _Tp __a_value;
  968. mutable __cxx_atomic_base_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_lock;
  969. _LIBCPP_INLINE_VISIBILITY void __lock() const volatile {
  970. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  971. /*spin*/;
  972. }
  973. _LIBCPP_INLINE_VISIBILITY void __lock() const {
  974. while(1 == __cxx_atomic_exchange(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(true), memory_order_acquire))
  975. /*spin*/;
  976. }
  977. _LIBCPP_INLINE_VISIBILITY void __unlock() const volatile {
  978. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  979. }
  980. _LIBCPP_INLINE_VISIBILITY void __unlock() const {
  981. __cxx_atomic_store(&__a_lock, _LIBCPP_ATOMIC_FLAG_TYPE(false), memory_order_release);
  982. }
  983. _LIBCPP_INLINE_VISIBILITY _Tp __read() const volatile {
  984. __lock();
  985. _Tp __old;
  986. __cxx_atomic_assign_volatile(__old, __a_value);
  987. __unlock();
  988. return __old;
  989. }
  990. _LIBCPP_INLINE_VISIBILITY _Tp __read() const {
  991. __lock();
  992. _Tp __old = __a_value;
  993. __unlock();
  994. return __old;
  995. }
  996. };
  997. template <typename _Tp>
  998. _LIBCPP_INLINE_VISIBILITY
  999. void __cxx_atomic_init(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1000. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1001. }
  1002. template <typename _Tp>
  1003. _LIBCPP_INLINE_VISIBILITY
  1004. void __cxx_atomic_init(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val) {
  1005. __a->__a_value = __val;
  1006. }
  1007. template <typename _Tp>
  1008. _LIBCPP_INLINE_VISIBILITY
  1009. void __cxx_atomic_store(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1010. __a->__lock();
  1011. __cxx_atomic_assign_volatile(__a->__a_value, __val);
  1012. __a->__unlock();
  1013. }
  1014. template <typename _Tp>
  1015. _LIBCPP_INLINE_VISIBILITY
  1016. void __cxx_atomic_store(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __val, memory_order) {
  1017. __a->__lock();
  1018. __a->__a_value = __val;
  1019. __a->__unlock();
  1020. }
  1021. template <typename _Tp>
  1022. _LIBCPP_INLINE_VISIBILITY
  1023. _Tp __cxx_atomic_load(const volatile __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1024. return __a->__read();
  1025. }
  1026. template <typename _Tp>
  1027. _LIBCPP_INLINE_VISIBILITY
  1028. _Tp __cxx_atomic_load(const __cxx_atomic_lock_impl<_Tp>* __a, memory_order) {
  1029. return __a->__read();
  1030. }
  1031. template <typename _Tp>
  1032. _LIBCPP_INLINE_VISIBILITY
  1033. _Tp __cxx_atomic_exchange(volatile __cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1034. __a->__lock();
  1035. _Tp __old;
  1036. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1037. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1038. __a->__unlock();
  1039. return __old;
  1040. }
  1041. template <typename _Tp>
  1042. _LIBCPP_INLINE_VISIBILITY
  1043. _Tp __cxx_atomic_exchange(__cxx_atomic_lock_impl<_Tp>* __a, _Tp __value, memory_order) {
  1044. __a->__lock();
  1045. _Tp __old = __a->__a_value;
  1046. __a->__a_value = __value;
  1047. __a->__unlock();
  1048. return __old;
  1049. }
  1050. template <typename _Tp>
  1051. _LIBCPP_INLINE_VISIBILITY
  1052. bool __cxx_atomic_compare_exchange_strong(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1053. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1054. __a->__lock();
  1055. _Tp temp;
  1056. __cxx_atomic_assign_volatile(temp, __a->__a_value);
  1057. bool __ret = temp == *__expected;
  1058. if(__ret)
  1059. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1060. else
  1061. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1062. __a->__unlock();
  1063. return __ret;
  1064. }
  1065. template <typename _Tp>
  1066. _LIBCPP_INLINE_VISIBILITY
  1067. bool __cxx_atomic_compare_exchange_strong(__cxx_atomic_lock_impl<_Tp>* __a,
  1068. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1069. __a->__lock();
  1070. bool __ret = __a->__a_value == *__expected;
  1071. if(__ret)
  1072. __a->__a_value = __value;
  1073. else
  1074. *__expected = __a->__a_value;
  1075. __a->__unlock();
  1076. return __ret;
  1077. }
  1078. template <typename _Tp>
  1079. _LIBCPP_INLINE_VISIBILITY
  1080. bool __cxx_atomic_compare_exchange_weak(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1081. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1082. __a->__lock();
  1083. _Tp temp;
  1084. __cxx_atomic_assign_volatile(temp, __a->__a_value);
  1085. bool __ret = temp == *__expected;
  1086. if(__ret)
  1087. __cxx_atomic_assign_volatile(__a->__a_value, __value);
  1088. else
  1089. __cxx_atomic_assign_volatile(*__expected, __a->__a_value);
  1090. __a->__unlock();
  1091. return __ret;
  1092. }
  1093. template <typename _Tp>
  1094. _LIBCPP_INLINE_VISIBILITY
  1095. bool __cxx_atomic_compare_exchange_weak(__cxx_atomic_lock_impl<_Tp>* __a,
  1096. _Tp* __expected, _Tp __value, memory_order, memory_order) {
  1097. __a->__lock();
  1098. bool __ret = __a->__a_value == *__expected;
  1099. if(__ret)
  1100. __a->__a_value = __value;
  1101. else
  1102. *__expected = __a->__a_value;
  1103. __a->__unlock();
  1104. return __ret;
  1105. }
  1106. template <typename _Tp, typename _Td>
  1107. _LIBCPP_INLINE_VISIBILITY
  1108. _Tp __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1109. _Td __delta, memory_order) {
  1110. __a->__lock();
  1111. _Tp __old;
  1112. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1113. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old + __delta));
  1114. __a->__unlock();
  1115. return __old;
  1116. }
  1117. template <typename _Tp, typename _Td>
  1118. _LIBCPP_INLINE_VISIBILITY
  1119. _Tp __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp>* __a,
  1120. _Td __delta, memory_order) {
  1121. __a->__lock();
  1122. _Tp __old = __a->__a_value;
  1123. __a->__a_value += __delta;
  1124. __a->__unlock();
  1125. return __old;
  1126. }
  1127. template <typename _Tp, typename _Td>
  1128. _LIBCPP_INLINE_VISIBILITY
  1129. _Tp* __cxx_atomic_fetch_add(volatile __cxx_atomic_lock_impl<_Tp*>* __a,
  1130. ptrdiff_t __delta, memory_order) {
  1131. __a->__lock();
  1132. _Tp* __old;
  1133. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1134. __cxx_atomic_assign_volatile(__a->__a_value, __old + __delta);
  1135. __a->__unlock();
  1136. return __old;
  1137. }
  1138. template <typename _Tp, typename _Td>
  1139. _LIBCPP_INLINE_VISIBILITY
  1140. _Tp* __cxx_atomic_fetch_add(__cxx_atomic_lock_impl<_Tp*>* __a,
  1141. ptrdiff_t __delta, memory_order) {
  1142. __a->__lock();
  1143. _Tp* __old = __a->__a_value;
  1144. __a->__a_value += __delta;
  1145. __a->__unlock();
  1146. return __old;
  1147. }
  1148. template <typename _Tp, typename _Td>
  1149. _LIBCPP_INLINE_VISIBILITY
  1150. _Tp __cxx_atomic_fetch_sub(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1151. _Td __delta, memory_order) {
  1152. __a->__lock();
  1153. _Tp __old;
  1154. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1155. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old - __delta));
  1156. __a->__unlock();
  1157. return __old;
  1158. }
  1159. template <typename _Tp, typename _Td>
  1160. _LIBCPP_INLINE_VISIBILITY
  1161. _Tp __cxx_atomic_fetch_sub(__cxx_atomic_lock_impl<_Tp>* __a,
  1162. _Td __delta, memory_order) {
  1163. __a->__lock();
  1164. _Tp __old = __a->__a_value;
  1165. __a->__a_value -= __delta;
  1166. __a->__unlock();
  1167. return __old;
  1168. }
  1169. template <typename _Tp>
  1170. _LIBCPP_INLINE_VISIBILITY
  1171. _Tp __cxx_atomic_fetch_and(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1172. _Tp __pattern, memory_order) {
  1173. __a->__lock();
  1174. _Tp __old;
  1175. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1176. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old & __pattern));
  1177. __a->__unlock();
  1178. return __old;
  1179. }
  1180. template <typename _Tp>
  1181. _LIBCPP_INLINE_VISIBILITY
  1182. _Tp __cxx_atomic_fetch_and(__cxx_atomic_lock_impl<_Tp>* __a,
  1183. _Tp __pattern, memory_order) {
  1184. __a->__lock();
  1185. _Tp __old = __a->__a_value;
  1186. __a->__a_value &= __pattern;
  1187. __a->__unlock();
  1188. return __old;
  1189. }
  1190. template <typename _Tp>
  1191. _LIBCPP_INLINE_VISIBILITY
  1192. _Tp __cxx_atomic_fetch_or(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1193. _Tp __pattern, memory_order) {
  1194. __a->__lock();
  1195. _Tp __old;
  1196. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1197. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old | __pattern));
  1198. __a->__unlock();
  1199. return __old;
  1200. }
  1201. template <typename _Tp>
  1202. _LIBCPP_INLINE_VISIBILITY
  1203. _Tp __cxx_atomic_fetch_or(__cxx_atomic_lock_impl<_Tp>* __a,
  1204. _Tp __pattern, memory_order) {
  1205. __a->__lock();
  1206. _Tp __old = __a->__a_value;
  1207. __a->__a_value |= __pattern;
  1208. __a->__unlock();
  1209. return __old;
  1210. }
  1211. template <typename _Tp>
  1212. _LIBCPP_INLINE_VISIBILITY
  1213. _Tp __cxx_atomic_fetch_xor(volatile __cxx_atomic_lock_impl<_Tp>* __a,
  1214. _Tp __pattern, memory_order) {
  1215. __a->__lock();
  1216. _Tp __old;
  1217. __cxx_atomic_assign_volatile(__old, __a->__a_value);
  1218. __cxx_atomic_assign_volatile(__a->__a_value, _Tp(__old ^ __pattern));
  1219. __a->__unlock();
  1220. return __old;
  1221. }
  1222. template <typename _Tp>
  1223. _LIBCPP_INLINE_VISIBILITY
  1224. _Tp __cxx_atomic_fetch_xor(__cxx_atomic_lock_impl<_Tp>* __a,
  1225. _Tp __pattern, memory_order) {
  1226. __a->__lock();
  1227. _Tp __old = __a->__a_value;
  1228. __a->__a_value ^= __pattern;
  1229. __a->__unlock();
  1230. return __old;
  1231. }
  1232. #ifdef __cpp_lib_atomic_is_always_lock_free
  1233. template<typename _Tp> struct __cxx_is_always_lock_free {
  1234. enum { __value = __atomic_always_lock_free(sizeof(_Tp), 0) }; };
  1235. #else
  1236. template<typename _Tp> struct __cxx_is_always_lock_free { enum { __value = false }; };
  1237. // Implementations must match the C ATOMIC_*_LOCK_FREE macro values.
  1238. template<> struct __cxx_is_always_lock_free<bool> { enum { __value = 2 == ATOMIC_BOOL_LOCK_FREE }; };
  1239. template<> struct __cxx_is_always_lock_free<char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1240. template<> struct __cxx_is_always_lock_free<signed char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1241. template<> struct __cxx_is_always_lock_free<unsigned char> { enum { __value = 2 == ATOMIC_CHAR_LOCK_FREE }; };
  1242. template<> struct __cxx_is_always_lock_free<char16_t> { enum { __value = 2 == ATOMIC_CHAR16_T_LOCK_FREE }; };
  1243. template<> struct __cxx_is_always_lock_free<char32_t> { enum { __value = 2 == ATOMIC_CHAR32_T_LOCK_FREE }; };
  1244. template<> struct __cxx_is_always_lock_free<wchar_t> { enum { __value = 2 == ATOMIC_WCHAR_T_LOCK_FREE }; };
  1245. template<> struct __cxx_is_always_lock_free<short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1246. template<> struct __cxx_is_always_lock_free<unsigned short> { enum { __value = 2 == ATOMIC_SHORT_LOCK_FREE }; };
  1247. template<> struct __cxx_is_always_lock_free<int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1248. template<> struct __cxx_is_always_lock_free<unsigned int> { enum { __value = 2 == ATOMIC_INT_LOCK_FREE }; };
  1249. template<> struct __cxx_is_always_lock_free<long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1250. template<> struct __cxx_is_always_lock_free<unsigned long> { enum { __value = 2 == ATOMIC_LONG_LOCK_FREE }; };
  1251. template<> struct __cxx_is_always_lock_free<long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1252. template<> struct __cxx_is_always_lock_free<unsigned long long> { enum { __value = 2 == ATOMIC_LLONG_LOCK_FREE }; };
  1253. template<typename _Tp> struct __cxx_is_always_lock_free<_Tp*> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1254. template<> struct __cxx_is_always_lock_free<std::nullptr_t> { enum { __value = 2 == ATOMIC_POINTER_LOCK_FREE }; };
  1255. #endif //__cpp_lib_atomic_is_always_lock_free
  1256. template <typename _Tp,
  1257. typename _Base = typename conditional<__cxx_is_always_lock_free<_Tp>::__value,
  1258. __cxx_atomic_base_impl<_Tp>,
  1259. __cxx_atomic_lock_impl<_Tp> >::type>
  1260. #else
  1261. template <typename _Tp,
  1262. typename _Base = __cxx_atomic_base_impl<_Tp> >
  1263. #endif //_LIBCPP_ATOMIC_ONLY_USE_BUILTINS
  1264. struct __cxx_atomic_impl : public _Base {
  1265. #if _GNUC_VER >= 501
  1266. static_assert(is_trivially_copyable<_Tp>::value,
  1267. "std::atomic<Tp> requires that 'Tp' be a trivially copyable type");
  1268. #endif
  1269. _LIBCPP_INLINE_VISIBILITY __cxx_atomic_impl() _NOEXCEPT _LIBCPP_DEFAULT
  1270. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR explicit __cxx_atomic_impl(_Tp value) _NOEXCEPT
  1271. : _Base(value) {}
  1272. };
  1273. // general atomic<T>
  1274. template <class _Tp, bool = is_integral<_Tp>::value && !is_same<_Tp, bool>::value>
  1275. struct __atomic_base // false
  1276. {
  1277. mutable __cxx_atomic_impl<_Tp> __a_;
  1278. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1279. static _LIBCPP_CONSTEXPR bool is_always_lock_free = __atomic_always_lock_free(sizeof(__a_), 0);
  1280. #endif
  1281. _LIBCPP_INLINE_VISIBILITY
  1282. bool is_lock_free() const volatile _NOEXCEPT
  1283. {return __cxx_atomic_is_lock_free(sizeof(_Tp));}
  1284. _LIBCPP_INLINE_VISIBILITY
  1285. bool is_lock_free() const _NOEXCEPT
  1286. {return static_cast<__atomic_base const volatile*>(this)->is_lock_free();}
  1287. _LIBCPP_INLINE_VISIBILITY
  1288. void store(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1289. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1290. {__cxx_atomic_store(&__a_, __d, __m);}
  1291. _LIBCPP_INLINE_VISIBILITY
  1292. void store(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1293. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1294. {__cxx_atomic_store(&__a_, __d, __m);}
  1295. _LIBCPP_INLINE_VISIBILITY
  1296. _Tp load(memory_order __m = memory_order_seq_cst) const volatile _NOEXCEPT
  1297. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1298. {return __cxx_atomic_load(&__a_, __m);}
  1299. _LIBCPP_INLINE_VISIBILITY
  1300. _Tp load(memory_order __m = memory_order_seq_cst) const _NOEXCEPT
  1301. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1302. {return __cxx_atomic_load(&__a_, __m);}
  1303. _LIBCPP_INLINE_VISIBILITY
  1304. operator _Tp() const volatile _NOEXCEPT {return load();}
  1305. _LIBCPP_INLINE_VISIBILITY
  1306. operator _Tp() const _NOEXCEPT {return load();}
  1307. _LIBCPP_INLINE_VISIBILITY
  1308. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1309. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1310. _LIBCPP_INLINE_VISIBILITY
  1311. _Tp exchange(_Tp __d, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1312. {return __cxx_atomic_exchange(&__a_, __d, __m);}
  1313. _LIBCPP_INLINE_VISIBILITY
  1314. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1315. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1316. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1317. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1318. _LIBCPP_INLINE_VISIBILITY
  1319. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1320. memory_order __s, memory_order __f) _NOEXCEPT
  1321. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1322. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __s, __f);}
  1323. _LIBCPP_INLINE_VISIBILITY
  1324. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1325. memory_order __s, memory_order __f) volatile _NOEXCEPT
  1326. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1327. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1328. _LIBCPP_INLINE_VISIBILITY
  1329. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1330. memory_order __s, memory_order __f) _NOEXCEPT
  1331. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1332. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __s, __f);}
  1333. _LIBCPP_INLINE_VISIBILITY
  1334. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1335. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1336. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1337. _LIBCPP_INLINE_VISIBILITY
  1338. bool compare_exchange_weak(_Tp& __e, _Tp __d,
  1339. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1340. {return __cxx_atomic_compare_exchange_weak(&__a_, &__e, __d, __m, __m);}
  1341. _LIBCPP_INLINE_VISIBILITY
  1342. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1343. memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1344. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1345. _LIBCPP_INLINE_VISIBILITY
  1346. bool compare_exchange_strong(_Tp& __e, _Tp __d,
  1347. memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1348. {return __cxx_atomic_compare_exchange_strong(&__a_, &__e, __d, __m, __m);}
  1349. _LIBCPP_INLINE_VISIBILITY
  1350. __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
  1351. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  1352. __atomic_base(_Tp __d) _NOEXCEPT : __a_(__d) {}
  1353. #ifndef _LIBCPP_CXX03_LANG
  1354. __atomic_base(const __atomic_base&) = delete;
  1355. __atomic_base& operator=(const __atomic_base&) = delete;
  1356. __atomic_base& operator=(const __atomic_base&) volatile = delete;
  1357. #else
  1358. private:
  1359. __atomic_base(const __atomic_base&);
  1360. __atomic_base& operator=(const __atomic_base&);
  1361. __atomic_base& operator=(const __atomic_base&) volatile;
  1362. #endif
  1363. };
  1364. #if defined(__cpp_lib_atomic_is_always_lock_free)
  1365. template <class _Tp, bool __b>
  1366. _LIBCPP_CONSTEXPR bool __atomic_base<_Tp, __b>::is_always_lock_free;
  1367. #endif
  1368. // atomic<Integral>
  1369. template <class _Tp>
  1370. struct __atomic_base<_Tp, true>
  1371. : public __atomic_base<_Tp, false>
  1372. {
  1373. typedef __atomic_base<_Tp, false> __base;
  1374. _LIBCPP_INLINE_VISIBILITY
  1375. __atomic_base() _NOEXCEPT _LIBCPP_DEFAULT
  1376. _LIBCPP_INLINE_VISIBILITY
  1377. _LIBCPP_CONSTEXPR __atomic_base(_Tp __d) _NOEXCEPT : __base(__d) {}
  1378. _LIBCPP_INLINE_VISIBILITY
  1379. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1380. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1381. _LIBCPP_INLINE_VISIBILITY
  1382. _Tp fetch_add(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1383. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1384. _LIBCPP_INLINE_VISIBILITY
  1385. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1386. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1387. _LIBCPP_INLINE_VISIBILITY
  1388. _Tp fetch_sub(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1389. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1390. _LIBCPP_INLINE_VISIBILITY
  1391. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1392. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1393. _LIBCPP_INLINE_VISIBILITY
  1394. _Tp fetch_and(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1395. {return __cxx_atomic_fetch_and(&this->__a_, __op, __m);}
  1396. _LIBCPP_INLINE_VISIBILITY
  1397. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1398. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1399. _LIBCPP_INLINE_VISIBILITY
  1400. _Tp fetch_or(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1401. {return __cxx_atomic_fetch_or(&this->__a_, __op, __m);}
  1402. _LIBCPP_INLINE_VISIBILITY
  1403. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  1404. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1405. _LIBCPP_INLINE_VISIBILITY
  1406. _Tp fetch_xor(_Tp __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1407. {return __cxx_atomic_fetch_xor(&this->__a_, __op, __m);}
  1408. _LIBCPP_INLINE_VISIBILITY
  1409. _Tp operator++(int) volatile _NOEXCEPT {return fetch_add(_Tp(1));}
  1410. _LIBCPP_INLINE_VISIBILITY
  1411. _Tp operator++(int) _NOEXCEPT {return fetch_add(_Tp(1));}
  1412. _LIBCPP_INLINE_VISIBILITY
  1413. _Tp operator--(int) volatile _NOEXCEPT {return fetch_sub(_Tp(1));}
  1414. _LIBCPP_INLINE_VISIBILITY
  1415. _Tp operator--(int) _NOEXCEPT {return fetch_sub(_Tp(1));}
  1416. _LIBCPP_INLINE_VISIBILITY
  1417. _Tp operator++() volatile _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1418. _LIBCPP_INLINE_VISIBILITY
  1419. _Tp operator++() _NOEXCEPT {return fetch_add(_Tp(1)) + _Tp(1);}
  1420. _LIBCPP_INLINE_VISIBILITY
  1421. _Tp operator--() volatile _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1422. _LIBCPP_INLINE_VISIBILITY
  1423. _Tp operator--() _NOEXCEPT {return fetch_sub(_Tp(1)) - _Tp(1);}
  1424. _LIBCPP_INLINE_VISIBILITY
  1425. _Tp operator+=(_Tp __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1426. _LIBCPP_INLINE_VISIBILITY
  1427. _Tp operator+=(_Tp __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1428. _LIBCPP_INLINE_VISIBILITY
  1429. _Tp operator-=(_Tp __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1430. _LIBCPP_INLINE_VISIBILITY
  1431. _Tp operator-=(_Tp __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1432. _LIBCPP_INLINE_VISIBILITY
  1433. _Tp operator&=(_Tp __op) volatile _NOEXCEPT {return fetch_and(__op) & __op;}
  1434. _LIBCPP_INLINE_VISIBILITY
  1435. _Tp operator&=(_Tp __op) _NOEXCEPT {return fetch_and(__op) & __op;}
  1436. _LIBCPP_INLINE_VISIBILITY
  1437. _Tp operator|=(_Tp __op) volatile _NOEXCEPT {return fetch_or(__op) | __op;}
  1438. _LIBCPP_INLINE_VISIBILITY
  1439. _Tp operator|=(_Tp __op) _NOEXCEPT {return fetch_or(__op) | __op;}
  1440. _LIBCPP_INLINE_VISIBILITY
  1441. _Tp operator^=(_Tp __op) volatile _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1442. _LIBCPP_INLINE_VISIBILITY
  1443. _Tp operator^=(_Tp __op) _NOEXCEPT {return fetch_xor(__op) ^ __op;}
  1444. };
  1445. // atomic<T>
  1446. template <class _Tp>
  1447. struct atomic
  1448. : public __atomic_base<_Tp>
  1449. {
  1450. typedef __atomic_base<_Tp> __base;
  1451. _LIBCPP_INLINE_VISIBILITY
  1452. atomic() _NOEXCEPT _LIBCPP_DEFAULT
  1453. _LIBCPP_INLINE_VISIBILITY
  1454. _LIBCPP_CONSTEXPR atomic(_Tp __d) _NOEXCEPT : __base(__d) {}
  1455. _LIBCPP_INLINE_VISIBILITY
  1456. _Tp operator=(_Tp __d) volatile _NOEXCEPT
  1457. {__base::store(__d); return __d;}
  1458. _LIBCPP_INLINE_VISIBILITY
  1459. _Tp operator=(_Tp __d) _NOEXCEPT
  1460. {__base::store(__d); return __d;}
  1461. };
  1462. // atomic<T*>
  1463. template <class _Tp>
  1464. struct atomic<_Tp*>
  1465. : public __atomic_base<_Tp*>
  1466. {
  1467. typedef __atomic_base<_Tp*> __base;
  1468. _LIBCPP_INLINE_VISIBILITY
  1469. atomic() _NOEXCEPT _LIBCPP_DEFAULT
  1470. _LIBCPP_INLINE_VISIBILITY
  1471. _LIBCPP_CONSTEXPR atomic(_Tp* __d) _NOEXCEPT : __base(__d) {}
  1472. _LIBCPP_INLINE_VISIBILITY
  1473. _Tp* operator=(_Tp* __d) volatile _NOEXCEPT
  1474. {__base::store(__d); return __d;}
  1475. _LIBCPP_INLINE_VISIBILITY
  1476. _Tp* operator=(_Tp* __d) _NOEXCEPT
  1477. {__base::store(__d); return __d;}
  1478. _LIBCPP_INLINE_VISIBILITY
  1479. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
  1480. volatile _NOEXCEPT
  1481. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1482. _LIBCPP_INLINE_VISIBILITY
  1483. _Tp* fetch_add(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1484. {return __cxx_atomic_fetch_add(&this->__a_, __op, __m);}
  1485. _LIBCPP_INLINE_VISIBILITY
  1486. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst)
  1487. volatile _NOEXCEPT
  1488. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1489. _LIBCPP_INLINE_VISIBILITY
  1490. _Tp* fetch_sub(ptrdiff_t __op, memory_order __m = memory_order_seq_cst) _NOEXCEPT
  1491. {return __cxx_atomic_fetch_sub(&this->__a_, __op, __m);}
  1492. _LIBCPP_INLINE_VISIBILITY
  1493. _Tp* operator++(int) volatile _NOEXCEPT {return fetch_add(1);}
  1494. _LIBCPP_INLINE_VISIBILITY
  1495. _Tp* operator++(int) _NOEXCEPT {return fetch_add(1);}
  1496. _LIBCPP_INLINE_VISIBILITY
  1497. _Tp* operator--(int) volatile _NOEXCEPT {return fetch_sub(1);}
  1498. _LIBCPP_INLINE_VISIBILITY
  1499. _Tp* operator--(int) _NOEXCEPT {return fetch_sub(1);}
  1500. _LIBCPP_INLINE_VISIBILITY
  1501. _Tp* operator++() volatile _NOEXCEPT {return fetch_add(1) + 1;}
  1502. _LIBCPP_INLINE_VISIBILITY
  1503. _Tp* operator++() _NOEXCEPT {return fetch_add(1) + 1;}
  1504. _LIBCPP_INLINE_VISIBILITY
  1505. _Tp* operator--() volatile _NOEXCEPT {return fetch_sub(1) - 1;}
  1506. _LIBCPP_INLINE_VISIBILITY
  1507. _Tp* operator--() _NOEXCEPT {return fetch_sub(1) - 1;}
  1508. _LIBCPP_INLINE_VISIBILITY
  1509. _Tp* operator+=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_add(__op) + __op;}
  1510. _LIBCPP_INLINE_VISIBILITY
  1511. _Tp* operator+=(ptrdiff_t __op) _NOEXCEPT {return fetch_add(__op) + __op;}
  1512. _LIBCPP_INLINE_VISIBILITY
  1513. _Tp* operator-=(ptrdiff_t __op) volatile _NOEXCEPT {return fetch_sub(__op) - __op;}
  1514. _LIBCPP_INLINE_VISIBILITY
  1515. _Tp* operator-=(ptrdiff_t __op) _NOEXCEPT {return fetch_sub(__op) - __op;}
  1516. };
  1517. // atomic_is_lock_free
  1518. template <class _Tp>
  1519. _LIBCPP_INLINE_VISIBILITY
  1520. bool
  1521. atomic_is_lock_free(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1522. {
  1523. return __o->is_lock_free();
  1524. }
  1525. template <class _Tp>
  1526. _LIBCPP_INLINE_VISIBILITY
  1527. bool
  1528. atomic_is_lock_free(const atomic<_Tp>* __o) _NOEXCEPT
  1529. {
  1530. return __o->is_lock_free();
  1531. }
  1532. // atomic_init
  1533. template <class _Tp>
  1534. _LIBCPP_INLINE_VISIBILITY
  1535. void
  1536. atomic_init(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1537. {
  1538. __cxx_atomic_init(&__o->__a_, __d);
  1539. }
  1540. template <class _Tp>
  1541. _LIBCPP_INLINE_VISIBILITY
  1542. void
  1543. atomic_init(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1544. {
  1545. __cxx_atomic_init(&__o->__a_, __d);
  1546. }
  1547. // atomic_store
  1548. template <class _Tp>
  1549. _LIBCPP_INLINE_VISIBILITY
  1550. void
  1551. atomic_store(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1552. {
  1553. __o->store(__d);
  1554. }
  1555. template <class _Tp>
  1556. _LIBCPP_INLINE_VISIBILITY
  1557. void
  1558. atomic_store(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1559. {
  1560. __o->store(__d);
  1561. }
  1562. // atomic_store_explicit
  1563. template <class _Tp>
  1564. _LIBCPP_INLINE_VISIBILITY
  1565. void
  1566. atomic_store_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1567. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1568. {
  1569. __o->store(__d, __m);
  1570. }
  1571. template <class _Tp>
  1572. _LIBCPP_INLINE_VISIBILITY
  1573. void
  1574. atomic_store_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1575. _LIBCPP_CHECK_STORE_MEMORY_ORDER(__m)
  1576. {
  1577. __o->store(__d, __m);
  1578. }
  1579. // atomic_load
  1580. template <class _Tp>
  1581. _LIBCPP_INLINE_VISIBILITY
  1582. _Tp
  1583. atomic_load(const volatile atomic<_Tp>* __o) _NOEXCEPT
  1584. {
  1585. return __o->load();
  1586. }
  1587. template <class _Tp>
  1588. _LIBCPP_INLINE_VISIBILITY
  1589. _Tp
  1590. atomic_load(const atomic<_Tp>* __o) _NOEXCEPT
  1591. {
  1592. return __o->load();
  1593. }
  1594. // atomic_load_explicit
  1595. template <class _Tp>
  1596. _LIBCPP_INLINE_VISIBILITY
  1597. _Tp
  1598. atomic_load_explicit(const volatile atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1599. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1600. {
  1601. return __o->load(__m);
  1602. }
  1603. template <class _Tp>
  1604. _LIBCPP_INLINE_VISIBILITY
  1605. _Tp
  1606. atomic_load_explicit(const atomic<_Tp>* __o, memory_order __m) _NOEXCEPT
  1607. _LIBCPP_CHECK_LOAD_MEMORY_ORDER(__m)
  1608. {
  1609. return __o->load(__m);
  1610. }
  1611. // atomic_exchange
  1612. template <class _Tp>
  1613. _LIBCPP_INLINE_VISIBILITY
  1614. _Tp
  1615. atomic_exchange(volatile atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1616. {
  1617. return __o->exchange(__d);
  1618. }
  1619. template <class _Tp>
  1620. _LIBCPP_INLINE_VISIBILITY
  1621. _Tp
  1622. atomic_exchange(atomic<_Tp>* __o, _Tp __d) _NOEXCEPT
  1623. {
  1624. return __o->exchange(__d);
  1625. }
  1626. // atomic_exchange_explicit
  1627. template <class _Tp>
  1628. _LIBCPP_INLINE_VISIBILITY
  1629. _Tp
  1630. atomic_exchange_explicit(volatile atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1631. {
  1632. return __o->exchange(__d, __m);
  1633. }
  1634. template <class _Tp>
  1635. _LIBCPP_INLINE_VISIBILITY
  1636. _Tp
  1637. atomic_exchange_explicit(atomic<_Tp>* __o, _Tp __d, memory_order __m) _NOEXCEPT
  1638. {
  1639. return __o->exchange(__d, __m);
  1640. }
  1641. // atomic_compare_exchange_weak
  1642. template <class _Tp>
  1643. _LIBCPP_INLINE_VISIBILITY
  1644. bool
  1645. atomic_compare_exchange_weak(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1646. {
  1647. return __o->compare_exchange_weak(*__e, __d);
  1648. }
  1649. template <class _Tp>
  1650. _LIBCPP_INLINE_VISIBILITY
  1651. bool
  1652. atomic_compare_exchange_weak(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1653. {
  1654. return __o->compare_exchange_weak(*__e, __d);
  1655. }
  1656. // atomic_compare_exchange_strong
  1657. template <class _Tp>
  1658. _LIBCPP_INLINE_VISIBILITY
  1659. bool
  1660. atomic_compare_exchange_strong(volatile atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1661. {
  1662. return __o->compare_exchange_strong(*__e, __d);
  1663. }
  1664. template <class _Tp>
  1665. _LIBCPP_INLINE_VISIBILITY
  1666. bool
  1667. atomic_compare_exchange_strong(atomic<_Tp>* __o, _Tp* __e, _Tp __d) _NOEXCEPT
  1668. {
  1669. return __o->compare_exchange_strong(*__e, __d);
  1670. }
  1671. // atomic_compare_exchange_weak_explicit
  1672. template <class _Tp>
  1673. _LIBCPP_INLINE_VISIBILITY
  1674. bool
  1675. atomic_compare_exchange_weak_explicit(volatile atomic<_Tp>* __o, _Tp* __e,
  1676. _Tp __d,
  1677. memory_order __s, memory_order __f) _NOEXCEPT
  1678. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1679. {
  1680. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1681. }
  1682. template <class _Tp>
  1683. _LIBCPP_INLINE_VISIBILITY
  1684. bool
  1685. atomic_compare_exchange_weak_explicit(atomic<_Tp>* __o, _Tp* __e, _Tp __d,
  1686. memory_order __s, memory_order __f) _NOEXCEPT
  1687. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1688. {
  1689. return __o->compare_exchange_weak(*__e, __d, __s, __f);
  1690. }
  1691. // atomic_compare_exchange_strong_explicit
  1692. template <class _Tp>
  1693. _LIBCPP_INLINE_VISIBILITY
  1694. bool
  1695. atomic_compare_exchange_strong_explicit(volatile atomic<_Tp>* __o,
  1696. _Tp* __e, _Tp __d,
  1697. memory_order __s, memory_order __f) _NOEXCEPT
  1698. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1699. {
  1700. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1701. }
  1702. template <class _Tp>
  1703. _LIBCPP_INLINE_VISIBILITY
  1704. bool
  1705. atomic_compare_exchange_strong_explicit(atomic<_Tp>* __o, _Tp* __e,
  1706. _Tp __d,
  1707. memory_order __s, memory_order __f) _NOEXCEPT
  1708. _LIBCPP_CHECK_EXCHANGE_MEMORY_ORDER(__s, __f)
  1709. {
  1710. return __o->compare_exchange_strong(*__e, __d, __s, __f);
  1711. }
  1712. // atomic_fetch_add
  1713. template <class _Tp>
  1714. _LIBCPP_INLINE_VISIBILITY
  1715. typename enable_if
  1716. <
  1717. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1718. _Tp
  1719. >::type
  1720. atomic_fetch_add(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1721. {
  1722. return __o->fetch_add(__op);
  1723. }
  1724. template <class _Tp>
  1725. _LIBCPP_INLINE_VISIBILITY
  1726. typename enable_if
  1727. <
  1728. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1729. _Tp
  1730. >::type
  1731. atomic_fetch_add(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1732. {
  1733. return __o->fetch_add(__op);
  1734. }
  1735. template <class _Tp>
  1736. _LIBCPP_INLINE_VISIBILITY
  1737. _Tp*
  1738. atomic_fetch_add(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1739. {
  1740. return __o->fetch_add(__op);
  1741. }
  1742. template <class _Tp>
  1743. _LIBCPP_INLINE_VISIBILITY
  1744. _Tp*
  1745. atomic_fetch_add(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1746. {
  1747. return __o->fetch_add(__op);
  1748. }
  1749. // atomic_fetch_add_explicit
  1750. template <class _Tp>
  1751. _LIBCPP_INLINE_VISIBILITY
  1752. typename enable_if
  1753. <
  1754. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1755. _Tp
  1756. >::type
  1757. atomic_fetch_add_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1758. {
  1759. return __o->fetch_add(__op, __m);
  1760. }
  1761. template <class _Tp>
  1762. _LIBCPP_INLINE_VISIBILITY
  1763. typename enable_if
  1764. <
  1765. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1766. _Tp
  1767. >::type
  1768. atomic_fetch_add_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1769. {
  1770. return __o->fetch_add(__op, __m);
  1771. }
  1772. template <class _Tp>
  1773. _LIBCPP_INLINE_VISIBILITY
  1774. _Tp*
  1775. atomic_fetch_add_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
  1776. memory_order __m) _NOEXCEPT
  1777. {
  1778. return __o->fetch_add(__op, __m);
  1779. }
  1780. template <class _Tp>
  1781. _LIBCPP_INLINE_VISIBILITY
  1782. _Tp*
  1783. atomic_fetch_add_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
  1784. {
  1785. return __o->fetch_add(__op, __m);
  1786. }
  1787. // atomic_fetch_sub
  1788. template <class _Tp>
  1789. _LIBCPP_INLINE_VISIBILITY
  1790. typename enable_if
  1791. <
  1792. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1793. _Tp
  1794. >::type
  1795. atomic_fetch_sub(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1796. {
  1797. return __o->fetch_sub(__op);
  1798. }
  1799. template <class _Tp>
  1800. _LIBCPP_INLINE_VISIBILITY
  1801. typename enable_if
  1802. <
  1803. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1804. _Tp
  1805. >::type
  1806. atomic_fetch_sub(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1807. {
  1808. return __o->fetch_sub(__op);
  1809. }
  1810. template <class _Tp>
  1811. _LIBCPP_INLINE_VISIBILITY
  1812. _Tp*
  1813. atomic_fetch_sub(volatile atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1814. {
  1815. return __o->fetch_sub(__op);
  1816. }
  1817. template <class _Tp>
  1818. _LIBCPP_INLINE_VISIBILITY
  1819. _Tp*
  1820. atomic_fetch_sub(atomic<_Tp*>* __o, ptrdiff_t __op) _NOEXCEPT
  1821. {
  1822. return __o->fetch_sub(__op);
  1823. }
  1824. // atomic_fetch_sub_explicit
  1825. template <class _Tp>
  1826. _LIBCPP_INLINE_VISIBILITY
  1827. typename enable_if
  1828. <
  1829. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1830. _Tp
  1831. >::type
  1832. atomic_fetch_sub_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1833. {
  1834. return __o->fetch_sub(__op, __m);
  1835. }
  1836. template <class _Tp>
  1837. _LIBCPP_INLINE_VISIBILITY
  1838. typename enable_if
  1839. <
  1840. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1841. _Tp
  1842. >::type
  1843. atomic_fetch_sub_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1844. {
  1845. return __o->fetch_sub(__op, __m);
  1846. }
  1847. template <class _Tp>
  1848. _LIBCPP_INLINE_VISIBILITY
  1849. _Tp*
  1850. atomic_fetch_sub_explicit(volatile atomic<_Tp*>* __o, ptrdiff_t __op,
  1851. memory_order __m) _NOEXCEPT
  1852. {
  1853. return __o->fetch_sub(__op, __m);
  1854. }
  1855. template <class _Tp>
  1856. _LIBCPP_INLINE_VISIBILITY
  1857. _Tp*
  1858. atomic_fetch_sub_explicit(atomic<_Tp*>* __o, ptrdiff_t __op, memory_order __m) _NOEXCEPT
  1859. {
  1860. return __o->fetch_sub(__op, __m);
  1861. }
  1862. // atomic_fetch_and
  1863. template <class _Tp>
  1864. _LIBCPP_INLINE_VISIBILITY
  1865. typename enable_if
  1866. <
  1867. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1868. _Tp
  1869. >::type
  1870. atomic_fetch_and(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1871. {
  1872. return __o->fetch_and(__op);
  1873. }
  1874. template <class _Tp>
  1875. _LIBCPP_INLINE_VISIBILITY
  1876. typename enable_if
  1877. <
  1878. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1879. _Tp
  1880. >::type
  1881. atomic_fetch_and(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1882. {
  1883. return __o->fetch_and(__op);
  1884. }
  1885. // atomic_fetch_and_explicit
  1886. template <class _Tp>
  1887. _LIBCPP_INLINE_VISIBILITY
  1888. typename enable_if
  1889. <
  1890. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1891. _Tp
  1892. >::type
  1893. atomic_fetch_and_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1894. {
  1895. return __o->fetch_and(__op, __m);
  1896. }
  1897. template <class _Tp>
  1898. _LIBCPP_INLINE_VISIBILITY
  1899. typename enable_if
  1900. <
  1901. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1902. _Tp
  1903. >::type
  1904. atomic_fetch_and_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1905. {
  1906. return __o->fetch_and(__op, __m);
  1907. }
  1908. // atomic_fetch_or
  1909. template <class _Tp>
  1910. _LIBCPP_INLINE_VISIBILITY
  1911. typename enable_if
  1912. <
  1913. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1914. _Tp
  1915. >::type
  1916. atomic_fetch_or(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1917. {
  1918. return __o->fetch_or(__op);
  1919. }
  1920. template <class _Tp>
  1921. _LIBCPP_INLINE_VISIBILITY
  1922. typename enable_if
  1923. <
  1924. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1925. _Tp
  1926. >::type
  1927. atomic_fetch_or(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1928. {
  1929. return __o->fetch_or(__op);
  1930. }
  1931. // atomic_fetch_or_explicit
  1932. template <class _Tp>
  1933. _LIBCPP_INLINE_VISIBILITY
  1934. typename enable_if
  1935. <
  1936. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1937. _Tp
  1938. >::type
  1939. atomic_fetch_or_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1940. {
  1941. return __o->fetch_or(__op, __m);
  1942. }
  1943. template <class _Tp>
  1944. _LIBCPP_INLINE_VISIBILITY
  1945. typename enable_if
  1946. <
  1947. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1948. _Tp
  1949. >::type
  1950. atomic_fetch_or_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1951. {
  1952. return __o->fetch_or(__op, __m);
  1953. }
  1954. // atomic_fetch_xor
  1955. template <class _Tp>
  1956. _LIBCPP_INLINE_VISIBILITY
  1957. typename enable_if
  1958. <
  1959. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1960. _Tp
  1961. >::type
  1962. atomic_fetch_xor(volatile atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1963. {
  1964. return __o->fetch_xor(__op);
  1965. }
  1966. template <class _Tp>
  1967. _LIBCPP_INLINE_VISIBILITY
  1968. typename enable_if
  1969. <
  1970. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1971. _Tp
  1972. >::type
  1973. atomic_fetch_xor(atomic<_Tp>* __o, _Tp __op) _NOEXCEPT
  1974. {
  1975. return __o->fetch_xor(__op);
  1976. }
  1977. // atomic_fetch_xor_explicit
  1978. template <class _Tp>
  1979. _LIBCPP_INLINE_VISIBILITY
  1980. typename enable_if
  1981. <
  1982. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1983. _Tp
  1984. >::type
  1985. atomic_fetch_xor_explicit(volatile atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1986. {
  1987. return __o->fetch_xor(__op, __m);
  1988. }
  1989. template <class _Tp>
  1990. _LIBCPP_INLINE_VISIBILITY
  1991. typename enable_if
  1992. <
  1993. is_integral<_Tp>::value && !is_same<_Tp, bool>::value,
  1994. _Tp
  1995. >::type
  1996. atomic_fetch_xor_explicit(atomic<_Tp>* __o, _Tp __op, memory_order __m) _NOEXCEPT
  1997. {
  1998. return __o->fetch_xor(__op, __m);
  1999. }
  2000. // flag type and operations
  2001. typedef struct atomic_flag
  2002. {
  2003. __cxx_atomic_impl<_LIBCPP_ATOMIC_FLAG_TYPE> __a_;
  2004. _LIBCPP_INLINE_VISIBILITY
  2005. bool test_and_set(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2006. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2007. _LIBCPP_INLINE_VISIBILITY
  2008. bool test_and_set(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2009. {return __cxx_atomic_exchange(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(true), __m);}
  2010. _LIBCPP_INLINE_VISIBILITY
  2011. void clear(memory_order __m = memory_order_seq_cst) volatile _NOEXCEPT
  2012. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2013. _LIBCPP_INLINE_VISIBILITY
  2014. void clear(memory_order __m = memory_order_seq_cst) _NOEXCEPT
  2015. {__cxx_atomic_store(&__a_, _LIBCPP_ATOMIC_FLAG_TYPE(false), __m);}
  2016. _LIBCPP_INLINE_VISIBILITY
  2017. atomic_flag() _NOEXCEPT _LIBCPP_DEFAULT
  2018. _LIBCPP_INLINE_VISIBILITY _LIBCPP_CONSTEXPR
  2019. atomic_flag(bool __b) _NOEXCEPT : __a_(__b) {} // EXTENSION
  2020. #ifndef _LIBCPP_CXX03_LANG
  2021. atomic_flag(const atomic_flag&) = delete;
  2022. atomic_flag& operator=(const atomic_flag&) = delete;
  2023. atomic_flag& operator=(const atomic_flag&) volatile = delete;
  2024. #else
  2025. private:
  2026. atomic_flag(const atomic_flag&);
  2027. atomic_flag& operator=(const atomic_flag&);
  2028. atomic_flag& operator=(const atomic_flag&) volatile;
  2029. #endif
  2030. } atomic_flag;
  2031. inline _LIBCPP_INLINE_VISIBILITY
  2032. bool
  2033. atomic_flag_test_and_set(volatile atomic_flag* __o) _NOEXCEPT
  2034. {
  2035. return __o->test_and_set();
  2036. }
  2037. inline _LIBCPP_INLINE_VISIBILITY
  2038. bool
  2039. atomic_flag_test_and_set(atomic_flag* __o) _NOEXCEPT
  2040. {
  2041. return __o->test_and_set();
  2042. }
  2043. inline _LIBCPP_INLINE_VISIBILITY
  2044. bool
  2045. atomic_flag_test_and_set_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2046. {
  2047. return __o->test_and_set(__m);
  2048. }
  2049. inline _LIBCPP_INLINE_VISIBILITY
  2050. bool
  2051. atomic_flag_test_and_set_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2052. {
  2053. return __o->test_and_set(__m);
  2054. }
  2055. inline _LIBCPP_INLINE_VISIBILITY
  2056. void
  2057. atomic_flag_clear(volatile atomic_flag* __o) _NOEXCEPT
  2058. {
  2059. __o->clear();
  2060. }
  2061. inline _LIBCPP_INLINE_VISIBILITY
  2062. void
  2063. atomic_flag_clear(atomic_flag* __o) _NOEXCEPT
  2064. {
  2065. __o->clear();
  2066. }
  2067. inline _LIBCPP_INLINE_VISIBILITY
  2068. void
  2069. atomic_flag_clear_explicit(volatile atomic_flag* __o, memory_order __m) _NOEXCEPT
  2070. {
  2071. __o->clear(__m);
  2072. }
  2073. inline _LIBCPP_INLINE_VISIBILITY
  2074. void
  2075. atomic_flag_clear_explicit(atomic_flag* __o, memory_order __m) _NOEXCEPT
  2076. {
  2077. __o->clear(__m);
  2078. }
  2079. // fences
  2080. inline _LIBCPP_INLINE_VISIBILITY
  2081. void
  2082. atomic_thread_fence(memory_order __m) _NOEXCEPT
  2083. {
  2084. __cxx_atomic_thread_fence(__m);
  2085. }
  2086. inline _LIBCPP_INLINE_VISIBILITY
  2087. void
  2088. atomic_signal_fence(memory_order __m) _NOEXCEPT
  2089. {
  2090. __cxx_atomic_signal_fence(__m);
  2091. }
  2092. // Atomics for standard typedef types
  2093. typedef atomic<bool> atomic_bool;
  2094. typedef atomic<char> atomic_char;
  2095. typedef atomic<signed char> atomic_schar;
  2096. typedef atomic<unsigned char> atomic_uchar;
  2097. typedef atomic<short> atomic_short;
  2098. typedef atomic<unsigned short> atomic_ushort;
  2099. typedef atomic<int> atomic_int;
  2100. typedef atomic<unsigned int> atomic_uint;
  2101. typedef atomic<long> atomic_long;
  2102. typedef atomic<unsigned long> atomic_ulong;
  2103. typedef atomic<long long> atomic_llong;
  2104. typedef atomic<unsigned long long> atomic_ullong;
  2105. typedef atomic<char16_t> atomic_char16_t;
  2106. typedef atomic<char32_t> atomic_char32_t;
  2107. typedef atomic<wchar_t> atomic_wchar_t;
  2108. typedef atomic<int_least8_t> atomic_int_least8_t;
  2109. typedef atomic<uint_least8_t> atomic_uint_least8_t;
  2110. typedef atomic<int_least16_t> atomic_int_least16_t;
  2111. typedef atomic<uint_least16_t> atomic_uint_least16_t;
  2112. typedef atomic<int_least32_t> atomic_int_least32_t;
  2113. typedef atomic<uint_least32_t> atomic_uint_least32_t;
  2114. typedef atomic<int_least64_t> atomic_int_least64_t;
  2115. typedef atomic<uint_least64_t> atomic_uint_least64_t;
  2116. typedef atomic<int_fast8_t> atomic_int_fast8_t;
  2117. typedef atomic<uint_fast8_t> atomic_uint_fast8_t;
  2118. typedef atomic<int_fast16_t> atomic_int_fast16_t;
  2119. typedef atomic<uint_fast16_t> atomic_uint_fast16_t;
  2120. typedef atomic<int_fast32_t> atomic_int_fast32_t;
  2121. typedef atomic<uint_fast32_t> atomic_uint_fast32_t;
  2122. typedef atomic<int_fast64_t> atomic_int_fast64_t;
  2123. typedef atomic<uint_fast64_t> atomic_uint_fast64_t;
  2124. typedef atomic< int8_t> atomic_int8_t;
  2125. typedef atomic<uint8_t> atomic_uint8_t;
  2126. typedef atomic< int16_t> atomic_int16_t;
  2127. typedef atomic<uint16_t> atomic_uint16_t;
  2128. typedef atomic< int32_t> atomic_int32_t;
  2129. typedef atomic<uint32_t> atomic_uint32_t;
  2130. typedef atomic< int64_t> atomic_int64_t;
  2131. typedef atomic<uint64_t> atomic_uint64_t;
  2132. typedef atomic<intptr_t> atomic_intptr_t;
  2133. typedef atomic<uintptr_t> atomic_uintptr_t;
  2134. typedef atomic<size_t> atomic_size_t;
  2135. typedef atomic<ptrdiff_t> atomic_ptrdiff_t;
  2136. typedef atomic<intmax_t> atomic_intmax_t;
  2137. typedef atomic<uintmax_t> atomic_uintmax_t;
  2138. #define ATOMIC_FLAG_INIT {false}
  2139. #define ATOMIC_VAR_INIT(__v) {__v}
  2140. _LIBCPP_END_NAMESPACE_STD
  2141. #endif // _LIBCPP_ATOMIC