cpu_ldst.h 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372
  1. /*
  2. * Software MMU support (per-target)
  3. *
  4. * This library is free software; you can redistribute it and/or
  5. * modify it under the terms of the GNU Lesser General Public
  6. * License as published by the Free Software Foundation; either
  7. * version 2.1 of the License, or (at your option) any later version.
  8. *
  9. * This library is distributed in the hope that it will be useful,
  10. * but WITHOUT ANY WARRANTY; without even the implied warranty of
  11. * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  12. * Lesser General Public License for more details.
  13. *
  14. * You should have received a copy of the GNU Lesser General Public
  15. * License along with this library; if not, see <http://www.gnu.org/licenses/>.
  16. *
  17. */
  18. /*
  19. * Generate inline load/store functions for all MMU modes (typically
  20. * at least _user and _kernel) as well as _data versions, for all data
  21. * sizes.
  22. *
  23. * Used by target op helpers.
  24. *
  25. * The syntax for the accessors is:
  26. *
  27. * load: cpu_ld{sign}{size}{end}_{mmusuffix}(env, ptr)
  28. * cpu_ld{sign}{size}{end}_{mmusuffix}_ra(env, ptr, retaddr)
  29. * cpu_ld{sign}{size}{end}_mmuidx_ra(env, ptr, mmu_idx, retaddr)
  30. * cpu_ld{sign}{size}{end}_mmu(env, ptr, oi, retaddr)
  31. *
  32. * store: cpu_st{size}{end}_{mmusuffix}(env, ptr, val)
  33. * cpu_st{size}{end}_{mmusuffix}_ra(env, ptr, val, retaddr)
  34. * cpu_st{size}{end}_mmuidx_ra(env, ptr, val, mmu_idx, retaddr)
  35. * cpu_st{size}{end}_mmu(env, ptr, val, oi, retaddr)
  36. *
  37. * sign is:
  38. * (empty): for 32 and 64 bit sizes
  39. * u : unsigned
  40. * s : signed
  41. *
  42. * size is:
  43. * b: 8 bits
  44. * w: 16 bits
  45. * l: 32 bits
  46. * q: 64 bits
  47. *
  48. * end is:
  49. * (empty): for target native endian, or for 8 bit access
  50. * _be: for forced big endian
  51. * _le: for forced little endian
  52. *
  53. * mmusuffix is one of the generic suffixes "data" or "code", or "mmuidx".
  54. * The "mmuidx" suffix carries an extra mmu_idx argument that specifies
  55. * the index to use; the "data" and "code" suffixes take the index from
  56. * cpu_mmu_index().
  57. *
  58. * The "mmu" suffix carries the full MemOpIdx, with both mmu_idx and the
  59. * MemOp including alignment requirements. The alignment will be enforced.
  60. */
  61. #ifndef CPU_LDST_H
  62. #define CPU_LDST_H
  63. #ifndef CONFIG_TCG
  64. #error Can only include this header with TCG
  65. #endif
  66. #include "exec/memopidx.h"
  67. #include "exec/vaddr.h"
  68. #include "exec/abi_ptr.h"
  69. #include "exec/mmu-access-type.h"
  70. #include "qemu/int128.h"
  71. #if defined(CONFIG_USER_ONLY)
  72. #include "user/guest-host.h"
  73. #endif /* CONFIG_USER_ONLY */
  74. uint32_t cpu_ldub_data(CPUArchState *env, abi_ptr ptr);
  75. int cpu_ldsb_data(CPUArchState *env, abi_ptr ptr);
  76. uint32_t cpu_lduw_be_data(CPUArchState *env, abi_ptr ptr);
  77. int cpu_ldsw_be_data(CPUArchState *env, abi_ptr ptr);
  78. uint32_t cpu_ldl_be_data(CPUArchState *env, abi_ptr ptr);
  79. uint64_t cpu_ldq_be_data(CPUArchState *env, abi_ptr ptr);
  80. uint32_t cpu_lduw_le_data(CPUArchState *env, abi_ptr ptr);
  81. int cpu_ldsw_le_data(CPUArchState *env, abi_ptr ptr);
  82. uint32_t cpu_ldl_le_data(CPUArchState *env, abi_ptr ptr);
  83. uint64_t cpu_ldq_le_data(CPUArchState *env, abi_ptr ptr);
  84. uint32_t cpu_ldub_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  85. int cpu_ldsb_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  86. uint32_t cpu_lduw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  87. int cpu_ldsw_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  88. uint32_t cpu_ldl_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  89. uint64_t cpu_ldq_be_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  90. uint32_t cpu_lduw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  91. int cpu_ldsw_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  92. uint32_t cpu_ldl_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  93. uint64_t cpu_ldq_le_data_ra(CPUArchState *env, abi_ptr ptr, uintptr_t ra);
  94. void cpu_stb_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
  95. void cpu_stw_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
  96. void cpu_stl_be_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
  97. void cpu_stq_be_data(CPUArchState *env, abi_ptr ptr, uint64_t val);
  98. void cpu_stw_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
  99. void cpu_stl_le_data(CPUArchState *env, abi_ptr ptr, uint32_t val);
  100. void cpu_stq_le_data(CPUArchState *env, abi_ptr ptr, uint64_t val);
  101. void cpu_stb_data_ra(CPUArchState *env, abi_ptr ptr,
  102. uint32_t val, uintptr_t ra);
  103. void cpu_stw_be_data_ra(CPUArchState *env, abi_ptr ptr,
  104. uint32_t val, uintptr_t ra);
  105. void cpu_stl_be_data_ra(CPUArchState *env, abi_ptr ptr,
  106. uint32_t val, uintptr_t ra);
  107. void cpu_stq_be_data_ra(CPUArchState *env, abi_ptr ptr,
  108. uint64_t val, uintptr_t ra);
  109. void cpu_stw_le_data_ra(CPUArchState *env, abi_ptr ptr,
  110. uint32_t val, uintptr_t ra);
  111. void cpu_stl_le_data_ra(CPUArchState *env, abi_ptr ptr,
  112. uint32_t val, uintptr_t ra);
  113. void cpu_stq_le_data_ra(CPUArchState *env, abi_ptr ptr,
  114. uint64_t val, uintptr_t ra);
  115. uint32_t cpu_ldub_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  116. int mmu_idx, uintptr_t ra);
  117. int cpu_ldsb_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  118. int mmu_idx, uintptr_t ra);
  119. uint32_t cpu_lduw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  120. int mmu_idx, uintptr_t ra);
  121. int cpu_ldsw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  122. int mmu_idx, uintptr_t ra);
  123. uint32_t cpu_ldl_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  124. int mmu_idx, uintptr_t ra);
  125. uint64_t cpu_ldq_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  126. int mmu_idx, uintptr_t ra);
  127. uint32_t cpu_lduw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  128. int mmu_idx, uintptr_t ra);
  129. int cpu_ldsw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  130. int mmu_idx, uintptr_t ra);
  131. uint32_t cpu_ldl_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  132. int mmu_idx, uintptr_t ra);
  133. uint64_t cpu_ldq_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr,
  134. int mmu_idx, uintptr_t ra);
  135. void cpu_stb_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
  136. int mmu_idx, uintptr_t ra);
  137. void cpu_stw_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
  138. int mmu_idx, uintptr_t ra);
  139. void cpu_stl_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
  140. int mmu_idx, uintptr_t ra);
  141. void cpu_stq_be_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint64_t val,
  142. int mmu_idx, uintptr_t ra);
  143. void cpu_stw_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
  144. int mmu_idx, uintptr_t ra);
  145. void cpu_stl_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint32_t val,
  146. int mmu_idx, uintptr_t ra);
  147. void cpu_stq_le_mmuidx_ra(CPUArchState *env, abi_ptr ptr, uint64_t val,
  148. int mmu_idx, uintptr_t ra);
  149. uint8_t cpu_ldb_mmu(CPUArchState *env, abi_ptr ptr, MemOpIdx oi, uintptr_t ra);
  150. uint16_t cpu_ldw_mmu(CPUArchState *env, abi_ptr ptr, MemOpIdx oi, uintptr_t ra);
  151. uint32_t cpu_ldl_mmu(CPUArchState *env, abi_ptr ptr, MemOpIdx oi, uintptr_t ra);
  152. uint64_t cpu_ldq_mmu(CPUArchState *env, abi_ptr ptr, MemOpIdx oi, uintptr_t ra);
  153. Int128 cpu_ld16_mmu(CPUArchState *env, abi_ptr addr, MemOpIdx oi, uintptr_t ra);
  154. void cpu_stb_mmu(CPUArchState *env, abi_ptr ptr, uint8_t val,
  155. MemOpIdx oi, uintptr_t ra);
  156. void cpu_stw_mmu(CPUArchState *env, abi_ptr ptr, uint16_t val,
  157. MemOpIdx oi, uintptr_t ra);
  158. void cpu_stl_mmu(CPUArchState *env, abi_ptr ptr, uint32_t val,
  159. MemOpIdx oi, uintptr_t ra);
  160. void cpu_stq_mmu(CPUArchState *env, abi_ptr ptr, uint64_t val,
  161. MemOpIdx oi, uintptr_t ra);
  162. void cpu_st16_mmu(CPUArchState *env, abi_ptr addr, Int128 val,
  163. MemOpIdx oi, uintptr_t ra);
  164. uint32_t cpu_atomic_cmpxchgb_mmu(CPUArchState *env, abi_ptr addr,
  165. uint32_t cmpv, uint32_t newv,
  166. MemOpIdx oi, uintptr_t retaddr);
  167. uint32_t cpu_atomic_cmpxchgw_le_mmu(CPUArchState *env, abi_ptr addr,
  168. uint32_t cmpv, uint32_t newv,
  169. MemOpIdx oi, uintptr_t retaddr);
  170. uint32_t cpu_atomic_cmpxchgl_le_mmu(CPUArchState *env, abi_ptr addr,
  171. uint32_t cmpv, uint32_t newv,
  172. MemOpIdx oi, uintptr_t retaddr);
  173. uint64_t cpu_atomic_cmpxchgq_le_mmu(CPUArchState *env, abi_ptr addr,
  174. uint64_t cmpv, uint64_t newv,
  175. MemOpIdx oi, uintptr_t retaddr);
  176. uint32_t cpu_atomic_cmpxchgw_be_mmu(CPUArchState *env, abi_ptr addr,
  177. uint32_t cmpv, uint32_t newv,
  178. MemOpIdx oi, uintptr_t retaddr);
  179. uint32_t cpu_atomic_cmpxchgl_be_mmu(CPUArchState *env, abi_ptr addr,
  180. uint32_t cmpv, uint32_t newv,
  181. MemOpIdx oi, uintptr_t retaddr);
  182. uint64_t cpu_atomic_cmpxchgq_be_mmu(CPUArchState *env, abi_ptr addr,
  183. uint64_t cmpv, uint64_t newv,
  184. MemOpIdx oi, uintptr_t retaddr);
  185. #define GEN_ATOMIC_HELPER(NAME, TYPE, SUFFIX) \
  186. TYPE cpu_atomic_ ## NAME ## SUFFIX ## _mmu \
  187. (CPUArchState *env, abi_ptr addr, TYPE val, \
  188. MemOpIdx oi, uintptr_t retaddr);
  189. #ifdef CONFIG_ATOMIC64
  190. #define GEN_ATOMIC_HELPER_ALL(NAME) \
  191. GEN_ATOMIC_HELPER(NAME, uint32_t, b) \
  192. GEN_ATOMIC_HELPER(NAME, uint32_t, w_le) \
  193. GEN_ATOMIC_HELPER(NAME, uint32_t, w_be) \
  194. GEN_ATOMIC_HELPER(NAME, uint32_t, l_le) \
  195. GEN_ATOMIC_HELPER(NAME, uint32_t, l_be) \
  196. GEN_ATOMIC_HELPER(NAME, uint64_t, q_le) \
  197. GEN_ATOMIC_HELPER(NAME, uint64_t, q_be)
  198. #else
  199. #define GEN_ATOMIC_HELPER_ALL(NAME) \
  200. GEN_ATOMIC_HELPER(NAME, uint32_t, b) \
  201. GEN_ATOMIC_HELPER(NAME, uint32_t, w_le) \
  202. GEN_ATOMIC_HELPER(NAME, uint32_t, w_be) \
  203. GEN_ATOMIC_HELPER(NAME, uint32_t, l_le) \
  204. GEN_ATOMIC_HELPER(NAME, uint32_t, l_be)
  205. #endif
  206. GEN_ATOMIC_HELPER_ALL(fetch_add)
  207. GEN_ATOMIC_HELPER_ALL(fetch_sub)
  208. GEN_ATOMIC_HELPER_ALL(fetch_and)
  209. GEN_ATOMIC_HELPER_ALL(fetch_or)
  210. GEN_ATOMIC_HELPER_ALL(fetch_xor)
  211. GEN_ATOMIC_HELPER_ALL(fetch_smin)
  212. GEN_ATOMIC_HELPER_ALL(fetch_umin)
  213. GEN_ATOMIC_HELPER_ALL(fetch_smax)
  214. GEN_ATOMIC_HELPER_ALL(fetch_umax)
  215. GEN_ATOMIC_HELPER_ALL(add_fetch)
  216. GEN_ATOMIC_HELPER_ALL(sub_fetch)
  217. GEN_ATOMIC_HELPER_ALL(and_fetch)
  218. GEN_ATOMIC_HELPER_ALL(or_fetch)
  219. GEN_ATOMIC_HELPER_ALL(xor_fetch)
  220. GEN_ATOMIC_HELPER_ALL(smin_fetch)
  221. GEN_ATOMIC_HELPER_ALL(umin_fetch)
  222. GEN_ATOMIC_HELPER_ALL(smax_fetch)
  223. GEN_ATOMIC_HELPER_ALL(umax_fetch)
  224. GEN_ATOMIC_HELPER_ALL(xchg)
  225. #undef GEN_ATOMIC_HELPER_ALL
  226. #undef GEN_ATOMIC_HELPER
  227. Int128 cpu_atomic_cmpxchgo_le_mmu(CPUArchState *env, abi_ptr addr,
  228. Int128 cmpv, Int128 newv,
  229. MemOpIdx oi, uintptr_t retaddr);
  230. Int128 cpu_atomic_cmpxchgo_be_mmu(CPUArchState *env, abi_ptr addr,
  231. Int128 cmpv, Int128 newv,
  232. MemOpIdx oi, uintptr_t retaddr);
  233. #if TARGET_BIG_ENDIAN
  234. # define cpu_lduw_data cpu_lduw_be_data
  235. # define cpu_ldsw_data cpu_ldsw_be_data
  236. # define cpu_ldl_data cpu_ldl_be_data
  237. # define cpu_ldq_data cpu_ldq_be_data
  238. # define cpu_lduw_data_ra cpu_lduw_be_data_ra
  239. # define cpu_ldsw_data_ra cpu_ldsw_be_data_ra
  240. # define cpu_ldl_data_ra cpu_ldl_be_data_ra
  241. # define cpu_ldq_data_ra cpu_ldq_be_data_ra
  242. # define cpu_lduw_mmuidx_ra cpu_lduw_be_mmuidx_ra
  243. # define cpu_ldsw_mmuidx_ra cpu_ldsw_be_mmuidx_ra
  244. # define cpu_ldl_mmuidx_ra cpu_ldl_be_mmuidx_ra
  245. # define cpu_ldq_mmuidx_ra cpu_ldq_be_mmuidx_ra
  246. # define cpu_stw_data cpu_stw_be_data
  247. # define cpu_stl_data cpu_stl_be_data
  248. # define cpu_stq_data cpu_stq_be_data
  249. # define cpu_stw_data_ra cpu_stw_be_data_ra
  250. # define cpu_stl_data_ra cpu_stl_be_data_ra
  251. # define cpu_stq_data_ra cpu_stq_be_data_ra
  252. # define cpu_stw_mmuidx_ra cpu_stw_be_mmuidx_ra
  253. # define cpu_stl_mmuidx_ra cpu_stl_be_mmuidx_ra
  254. # define cpu_stq_mmuidx_ra cpu_stq_be_mmuidx_ra
  255. #else
  256. # define cpu_lduw_data cpu_lduw_le_data
  257. # define cpu_ldsw_data cpu_ldsw_le_data
  258. # define cpu_ldl_data cpu_ldl_le_data
  259. # define cpu_ldq_data cpu_ldq_le_data
  260. # define cpu_lduw_data_ra cpu_lduw_le_data_ra
  261. # define cpu_ldsw_data_ra cpu_ldsw_le_data_ra
  262. # define cpu_ldl_data_ra cpu_ldl_le_data_ra
  263. # define cpu_ldq_data_ra cpu_ldq_le_data_ra
  264. # define cpu_lduw_mmuidx_ra cpu_lduw_le_mmuidx_ra
  265. # define cpu_ldsw_mmuidx_ra cpu_ldsw_le_mmuidx_ra
  266. # define cpu_ldl_mmuidx_ra cpu_ldl_le_mmuidx_ra
  267. # define cpu_ldq_mmuidx_ra cpu_ldq_le_mmuidx_ra
  268. # define cpu_stw_data cpu_stw_le_data
  269. # define cpu_stl_data cpu_stl_le_data
  270. # define cpu_stq_data cpu_stq_le_data
  271. # define cpu_stw_data_ra cpu_stw_le_data_ra
  272. # define cpu_stl_data_ra cpu_stl_le_data_ra
  273. # define cpu_stq_data_ra cpu_stq_le_data_ra
  274. # define cpu_stw_mmuidx_ra cpu_stw_le_mmuidx_ra
  275. # define cpu_stl_mmuidx_ra cpu_stl_le_mmuidx_ra
  276. # define cpu_stq_mmuidx_ra cpu_stq_le_mmuidx_ra
  277. #endif
  278. uint8_t cpu_ldb_code_mmu(CPUArchState *env, abi_ptr addr,
  279. MemOpIdx oi, uintptr_t ra);
  280. uint16_t cpu_ldw_code_mmu(CPUArchState *env, abi_ptr addr,
  281. MemOpIdx oi, uintptr_t ra);
  282. uint32_t cpu_ldl_code_mmu(CPUArchState *env, abi_ptr addr,
  283. MemOpIdx oi, uintptr_t ra);
  284. uint64_t cpu_ldq_code_mmu(CPUArchState *env, abi_ptr addr,
  285. MemOpIdx oi, uintptr_t ra);
  286. uint32_t cpu_ldub_code(CPUArchState *env, abi_ptr addr);
  287. uint32_t cpu_lduw_code(CPUArchState *env, abi_ptr addr);
  288. uint32_t cpu_ldl_code(CPUArchState *env, abi_ptr addr);
  289. uint64_t cpu_ldq_code(CPUArchState *env, abi_ptr addr);
  290. /**
  291. * tlb_vaddr_to_host:
  292. * @env: CPUArchState
  293. * @addr: guest virtual address to look up
  294. * @access_type: 0 for read, 1 for write, 2 for execute
  295. * @mmu_idx: MMU index to use for lookup
  296. *
  297. * Look up the specified guest virtual index in the TCG softmmu TLB.
  298. * If we can translate a host virtual address suitable for direct RAM
  299. * access, without causing a guest exception, then return it.
  300. * Otherwise (TLB entry is for an I/O access, guest software
  301. * TLB fill required, etc) return NULL.
  302. */
  303. #ifdef CONFIG_USER_ONLY
  304. static inline void *tlb_vaddr_to_host(CPUArchState *env, abi_ptr addr,
  305. MMUAccessType access_type, int mmu_idx)
  306. {
  307. return g2h(env_cpu(env), addr);
  308. }
  309. #else
  310. void *tlb_vaddr_to_host(CPUArchState *env, vaddr addr,
  311. MMUAccessType access_type, int mmu_idx);
  312. #endif
  313. /*
  314. * For user-only, helpers that use guest to host address translation
  315. * must protect the actual host memory access by recording 'retaddr'
  316. * for the signal handler. This is required for a race condition in
  317. * which another thread unmaps the page between a probe and the
  318. * actual access.
  319. */
  320. #ifdef CONFIG_USER_ONLY
  321. extern __thread uintptr_t helper_retaddr;
  322. static inline void set_helper_retaddr(uintptr_t ra)
  323. {
  324. helper_retaddr = ra;
  325. /*
  326. * Ensure that this write is visible to the SIGSEGV handler that
  327. * may be invoked due to a subsequent invalid memory operation.
  328. */
  329. signal_barrier();
  330. }
  331. static inline void clear_helper_retaddr(void)
  332. {
  333. /*
  334. * Ensure that previous memory operations have succeeded before
  335. * removing the data visible to the signal handler.
  336. */
  337. signal_barrier();
  338. helper_retaddr = 0;
  339. }
  340. #else
  341. #define set_helper_retaddr(ra) do { } while (0)
  342. #define clear_helper_retaddr() do { } while (0)
  343. #endif
  344. #endif /* CPU_LDST_H */