aes-round.h 4.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164
  1. /*
  2. * AES round fragments, generic version
  3. * SPDX-License-Identifier: GPL-2.0-or-later
  4. *
  5. * Copyright (C) 2023 Linaro, Ltd.
  6. */
  7. #ifndef CRYPTO_AES_ROUND_H
  8. #define CRYPTO_AES_ROUND_H
  9. /* Hosts with acceleration will usually need a 16-byte vector type. */
  10. typedef uint8_t AESStateVec __attribute__((vector_size(16)));
  11. typedef union {
  12. uint8_t b[16];
  13. uint32_t w[4];
  14. uint64_t d[2];
  15. AESStateVec v;
  16. } AESState;
  17. #include "host/crypto/aes-round.h"
  18. /*
  19. * Perform MixColumns.
  20. */
  21. void aesenc_MC_gen(AESState *ret, const AESState *st);
  22. void aesenc_MC_genrev(AESState *ret, const AESState *st);
  23. static inline void aesenc_MC(AESState *r, const AESState *st, bool be)
  24. {
  25. if (HAVE_AES_ACCEL) {
  26. aesenc_MC_accel(r, st, be);
  27. } else if (HOST_BIG_ENDIAN == be) {
  28. aesenc_MC_gen(r, st);
  29. } else {
  30. aesenc_MC_genrev(r, st);
  31. }
  32. }
  33. /*
  34. * Perform SubBytes + ShiftRows + AddRoundKey.
  35. */
  36. void aesenc_SB_SR_AK_gen(AESState *ret, const AESState *st,
  37. const AESState *rk);
  38. void aesenc_SB_SR_AK_genrev(AESState *ret, const AESState *st,
  39. const AESState *rk);
  40. static inline void aesenc_SB_SR_AK(AESState *r, const AESState *st,
  41. const AESState *rk, bool be)
  42. {
  43. if (HAVE_AES_ACCEL) {
  44. aesenc_SB_SR_AK_accel(r, st, rk, be);
  45. } else if (HOST_BIG_ENDIAN == be) {
  46. aesenc_SB_SR_AK_gen(r, st, rk);
  47. } else {
  48. aesenc_SB_SR_AK_genrev(r, st, rk);
  49. }
  50. }
  51. /*
  52. * Perform SubBytes + ShiftRows + MixColumns + AddRoundKey.
  53. */
  54. void aesenc_SB_SR_MC_AK_gen(AESState *ret, const AESState *st,
  55. const AESState *rk);
  56. void aesenc_SB_SR_MC_AK_genrev(AESState *ret, const AESState *st,
  57. const AESState *rk);
  58. static inline void aesenc_SB_SR_MC_AK(AESState *r, const AESState *st,
  59. const AESState *rk, bool be)
  60. {
  61. if (HAVE_AES_ACCEL) {
  62. aesenc_SB_SR_MC_AK_accel(r, st, rk, be);
  63. } else if (HOST_BIG_ENDIAN == be) {
  64. aesenc_SB_SR_MC_AK_gen(r, st, rk);
  65. } else {
  66. aesenc_SB_SR_MC_AK_genrev(r, st, rk);
  67. }
  68. }
  69. /*
  70. * Perform InvMixColumns.
  71. */
  72. void aesdec_IMC_gen(AESState *ret, const AESState *st);
  73. void aesdec_IMC_genrev(AESState *ret, const AESState *st);
  74. static inline void aesdec_IMC(AESState *r, const AESState *st, bool be)
  75. {
  76. if (HAVE_AES_ACCEL) {
  77. aesdec_IMC_accel(r, st, be);
  78. } else if (HOST_BIG_ENDIAN == be) {
  79. aesdec_IMC_gen(r, st);
  80. } else {
  81. aesdec_IMC_genrev(r, st);
  82. }
  83. }
  84. /*
  85. * Perform InvSubBytes + InvShiftRows + AddRoundKey.
  86. */
  87. void aesdec_ISB_ISR_AK_gen(AESState *ret, const AESState *st,
  88. const AESState *rk);
  89. void aesdec_ISB_ISR_AK_genrev(AESState *ret, const AESState *st,
  90. const AESState *rk);
  91. static inline void aesdec_ISB_ISR_AK(AESState *r, const AESState *st,
  92. const AESState *rk, bool be)
  93. {
  94. if (HAVE_AES_ACCEL) {
  95. aesdec_ISB_ISR_AK_accel(r, st, rk, be);
  96. } else if (HOST_BIG_ENDIAN == be) {
  97. aesdec_ISB_ISR_AK_gen(r, st, rk);
  98. } else {
  99. aesdec_ISB_ISR_AK_genrev(r, st, rk);
  100. }
  101. }
  102. /*
  103. * Perform InvSubBytes + InvShiftRows + AddRoundKey + InvMixColumns.
  104. */
  105. void aesdec_ISB_ISR_AK_IMC_gen(AESState *ret, const AESState *st,
  106. const AESState *rk);
  107. void aesdec_ISB_ISR_AK_IMC_genrev(AESState *ret, const AESState *st,
  108. const AESState *rk);
  109. static inline void aesdec_ISB_ISR_AK_IMC(AESState *r, const AESState *st,
  110. const AESState *rk, bool be)
  111. {
  112. if (HAVE_AES_ACCEL) {
  113. aesdec_ISB_ISR_AK_IMC_accel(r, st, rk, be);
  114. } else if (HOST_BIG_ENDIAN == be) {
  115. aesdec_ISB_ISR_AK_IMC_gen(r, st, rk);
  116. } else {
  117. aesdec_ISB_ISR_AK_IMC_genrev(r, st, rk);
  118. }
  119. }
  120. /*
  121. * Perform InvSubBytes + InvShiftRows + InvMixColumns + AddRoundKey.
  122. */
  123. void aesdec_ISB_ISR_IMC_AK_gen(AESState *ret, const AESState *st,
  124. const AESState *rk);
  125. void aesdec_ISB_ISR_IMC_AK_genrev(AESState *ret, const AESState *st,
  126. const AESState *rk);
  127. static inline void aesdec_ISB_ISR_IMC_AK(AESState *r, const AESState *st,
  128. const AESState *rk, bool be)
  129. {
  130. if (HAVE_AES_ACCEL) {
  131. aesdec_ISB_ISR_IMC_AK_accel(r, st, rk, be);
  132. } else if (HOST_BIG_ENDIAN == be) {
  133. aesdec_ISB_ISR_IMC_AK_gen(r, st, rk);
  134. } else {
  135. aesdec_ISB_ISR_IMC_AK_genrev(r, st, rk);
  136. }
  137. }
  138. #endif /* CRYPTO_AES_ROUND_H */