aes.c 33 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003
  1. /*
  2. * Copyright (c) 1998-2008, Brian Gladman, Worcester, UK.
  3. *
  4. * Copyright 2012, Ole Reinhardt <ole.reinhardt@embedded-it.de>
  5. *
  6. * All rights reserved.
  7. *
  8. * Redistribution and use in source and binary forms, with or without
  9. * modification, are permitted provided that the following conditions
  10. * are met:
  11. *
  12. * 1. Redistributions of source code must retain the above copyright
  13. * notice, this list of conditions and the following disclaimer.
  14. * 2. Redistributions in binary form must reproduce the above copyright
  15. * notice, this list of conditions and the following disclaimer in the
  16. * documentation and/or other materials provided with the distribution.
  17. * 3. Neither the name of the copyright holders nor the names of
  18. * contributors may be used to endorse or promote products derived
  19. * from this software without specific prior written permission.
  20. *
  21. * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
  22. * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
  23. * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
  24. * FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
  25. * COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
  26. * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
  27. * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
  28. * OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
  29. * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
  30. * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
  31. * THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
  32. * SUCH DAMAGE.
  33. *
  34. * For additional information see http://www.ethernut.de/
  35. */
  36. /*
  37. * \file gorp/crypt/aes.c
  38. *
  39. * \brief Brian Gladmans byte oriented implementation of the AES crypto algorithm
  40. *
  41. * Issue 09/09/2006
  42. *
  43. * This is an AES implementation that uses only 8-bit byte operations on the
  44. * cipher state (there are options to use 32-bit types if available).
  45. *
  46. * The combination of mix columns and byte substitution used here is based on
  47. * that developed by Karl Malbrain. His contribution is acknowledged.
  48. *
  49. * \verbatim
  50. * $Id$
  51. * \endverbatim
  52. */
  53. #include <compiler.h>
  54. #include <string.h>
  55. #include <stdlib.h>
  56. #include <inttypes.h>
  57. #include <gorp/aes.h>
  58. /* define if you have a fast memcpy function on your system */
  59. #define HAVE_MEMCPY
  60. /* define if you have fast 32-bit types on your system */
  61. #if !(defined(__AVR__) || defined(__H8300__) || defined(__H8300H__) || defined(__H8300S__))
  62. # define HAVE_UINT_32T
  63. #endif
  64. /*!
  65. * \brief functions for finite field multiplication in the AES Galois field
  66. */
  67. #define WPOLY 0x011b
  68. #define BPOLY 0x1b
  69. #define DPOLY 0x008d
  70. #define f1(x) (x)
  71. #define f2(x) ((x << 1) ^ (((x >> 7) & 1) * WPOLY))
  72. #define f4(x) ((x << 2) ^ (((x >> 6) & 1) * WPOLY) ^ (((x >> 6) & 2) * WPOLY))
  73. #define f8(x) ((x << 3) ^ (((x >> 5) & 1) * WPOLY) ^ (((x >> 5) & 2) * WPOLY) \
  74. ^ (((x >> 5) & 4) * WPOLY))
  75. #define d2(x) (((x) >> 1) ^ ((x) & 1 ? DPOLY : 0))
  76. #define f3(x) (f2(x) ^ x)
  77. #define f9(x) (f8(x) ^ x)
  78. #define fb(x) (f8(x) ^ f2(x) ^ x)
  79. #define fd(x) (f8(x) ^ f4(x) ^ x)
  80. #define fe(x) (f8(x) ^ f4(x) ^ f2(x))
  81. #if defined( AES_USE_TABLES )
  82. #define sb_data(w) { /* S Box data values */ \
  83. w(0x63), w(0x7c), w(0x77), w(0x7b), w(0xf2), w(0x6b), w(0x6f), w(0xc5),\
  84. w(0x30), w(0x01), w(0x67), w(0x2b), w(0xfe), w(0xd7), w(0xab), w(0x76),\
  85. w(0xca), w(0x82), w(0xc9), w(0x7d), w(0xfa), w(0x59), w(0x47), w(0xf0),\
  86. w(0xad), w(0xd4), w(0xa2), w(0xaf), w(0x9c), w(0xa4), w(0x72), w(0xc0),\
  87. w(0xb7), w(0xfd), w(0x93), w(0x26), w(0x36), w(0x3f), w(0xf7), w(0xcc),\
  88. w(0x34), w(0xa5), w(0xe5), w(0xf1), w(0x71), w(0xd8), w(0x31), w(0x15),\
  89. w(0x04), w(0xc7), w(0x23), w(0xc3), w(0x18), w(0x96), w(0x05), w(0x9a),\
  90. w(0x07), w(0x12), w(0x80), w(0xe2), w(0xeb), w(0x27), w(0xb2), w(0x75),\
  91. w(0x09), w(0x83), w(0x2c), w(0x1a), w(0x1b), w(0x6e), w(0x5a), w(0xa0),\
  92. w(0x52), w(0x3b), w(0xd6), w(0xb3), w(0x29), w(0xe3), w(0x2f), w(0x84),\
  93. w(0x53), w(0xd1), w(0x00), w(0xed), w(0x20), w(0xfc), w(0xb1), w(0x5b),\
  94. w(0x6a), w(0xcb), w(0xbe), w(0x39), w(0x4a), w(0x4c), w(0x58), w(0xcf),\
  95. w(0xd0), w(0xef), w(0xaa), w(0xfb), w(0x43), w(0x4d), w(0x33), w(0x85),\
  96. w(0x45), w(0xf9), w(0x02), w(0x7f), w(0x50), w(0x3c), w(0x9f), w(0xa8),\
  97. w(0x51), w(0xa3), w(0x40), w(0x8f), w(0x92), w(0x9d), w(0x38), w(0xf5),\
  98. w(0xbc), w(0xb6), w(0xda), w(0x21), w(0x10), w(0xff), w(0xf3), w(0xd2),\
  99. w(0xcd), w(0x0c), w(0x13), w(0xec), w(0x5f), w(0x97), w(0x44), w(0x17),\
  100. w(0xc4), w(0xa7), w(0x7e), w(0x3d), w(0x64), w(0x5d), w(0x19), w(0x73),\
  101. w(0x60), w(0x81), w(0x4f), w(0xdc), w(0x22), w(0x2a), w(0x90), w(0x88),\
  102. w(0x46), w(0xee), w(0xb8), w(0x14), w(0xde), w(0x5e), w(0x0b), w(0xdb),\
  103. w(0xe0), w(0x32), w(0x3a), w(0x0a), w(0x49), w(0x06), w(0x24), w(0x5c),\
  104. w(0xc2), w(0xd3), w(0xac), w(0x62), w(0x91), w(0x95), w(0xe4), w(0x79),\
  105. w(0xe7), w(0xc8), w(0x37), w(0x6d), w(0x8d), w(0xd5), w(0x4e), w(0xa9),\
  106. w(0x6c), w(0x56), w(0xf4), w(0xea), w(0x65), w(0x7a), w(0xae), w(0x08),\
  107. w(0xba), w(0x78), w(0x25), w(0x2e), w(0x1c), w(0xa6), w(0xb4), w(0xc6),\
  108. w(0xe8), w(0xdd), w(0x74), w(0x1f), w(0x4b), w(0xbd), w(0x8b), w(0x8a),\
  109. w(0x70), w(0x3e), w(0xb5), w(0x66), w(0x48), w(0x03), w(0xf6), w(0x0e),\
  110. w(0x61), w(0x35), w(0x57), w(0xb9), w(0x86), w(0xc1), w(0x1d), w(0x9e),\
  111. w(0xe1), w(0xf8), w(0x98), w(0x11), w(0x69), w(0xd9), w(0x8e), w(0x94),\
  112. w(0x9b), w(0x1e), w(0x87), w(0xe9), w(0xce), w(0x55), w(0x28), w(0xdf),\
  113. w(0x8c), w(0xa1), w(0x89), w(0x0d), w(0xbf), w(0xe6), w(0x42), w(0x68),\
  114. w(0x41), w(0x99), w(0x2d), w(0x0f), w(0xb0), w(0x54), w(0xbb), w(0x16) }
  115. #define isb_data(w) { /* inverse S Box data values */ \
  116. w(0x52), w(0x09), w(0x6a), w(0xd5), w(0x30), w(0x36), w(0xa5), w(0x38),\
  117. w(0xbf), w(0x40), w(0xa3), w(0x9e), w(0x81), w(0xf3), w(0xd7), w(0xfb),\
  118. w(0x7c), w(0xe3), w(0x39), w(0x82), w(0x9b), w(0x2f), w(0xff), w(0x87),\
  119. w(0x34), w(0x8e), w(0x43), w(0x44), w(0xc4), w(0xde), w(0xe9), w(0xcb),\
  120. w(0x54), w(0x7b), w(0x94), w(0x32), w(0xa6), w(0xc2), w(0x23), w(0x3d),\
  121. w(0xee), w(0x4c), w(0x95), w(0x0b), w(0x42), w(0xfa), w(0xc3), w(0x4e),\
  122. w(0x08), w(0x2e), w(0xa1), w(0x66), w(0x28), w(0xd9), w(0x24), w(0xb2),\
  123. w(0x76), w(0x5b), w(0xa2), w(0x49), w(0x6d), w(0x8b), w(0xd1), w(0x25),\
  124. w(0x72), w(0xf8), w(0xf6), w(0x64), w(0x86), w(0x68), w(0x98), w(0x16),\
  125. w(0xd4), w(0xa4), w(0x5c), w(0xcc), w(0x5d), w(0x65), w(0xb6), w(0x92),\
  126. w(0x6c), w(0x70), w(0x48), w(0x50), w(0xfd), w(0xed), w(0xb9), w(0xda),\
  127. w(0x5e), w(0x15), w(0x46), w(0x57), w(0xa7), w(0x8d), w(0x9d), w(0x84),\
  128. w(0x90), w(0xd8), w(0xab), w(0x00), w(0x8c), w(0xbc), w(0xd3), w(0x0a),\
  129. w(0xf7), w(0xe4), w(0x58), w(0x05), w(0xb8), w(0xb3), w(0x45), w(0x06),\
  130. w(0xd0), w(0x2c), w(0x1e), w(0x8f), w(0xca), w(0x3f), w(0x0f), w(0x02),\
  131. w(0xc1), w(0xaf), w(0xbd), w(0x03), w(0x01), w(0x13), w(0x8a), w(0x6b),\
  132. w(0x3a), w(0x91), w(0x11), w(0x41), w(0x4f), w(0x67), w(0xdc), w(0xea),\
  133. w(0x97), w(0xf2), w(0xcf), w(0xce), w(0xf0), w(0xb4), w(0xe6), w(0x73),\
  134. w(0x96), w(0xac), w(0x74), w(0x22), w(0xe7), w(0xad), w(0x35), w(0x85),\
  135. w(0xe2), w(0xf9), w(0x37), w(0xe8), w(0x1c), w(0x75), w(0xdf), w(0x6e),\
  136. w(0x47), w(0xf1), w(0x1a), w(0x71), w(0x1d), w(0x29), w(0xc5), w(0x89),\
  137. w(0x6f), w(0xb7), w(0x62), w(0x0e), w(0xaa), w(0x18), w(0xbe), w(0x1b),\
  138. w(0xfc), w(0x56), w(0x3e), w(0x4b), w(0xc6), w(0xd2), w(0x79), w(0x20),\
  139. w(0x9a), w(0xdb), w(0xc0), w(0xfe), w(0x78), w(0xcd), w(0x5a), w(0xf4),\
  140. w(0x1f), w(0xdd), w(0xa8), w(0x33), w(0x88), w(0x07), w(0xc7), w(0x31),\
  141. w(0xb1), w(0x12), w(0x10), w(0x59), w(0x27), w(0x80), w(0xec), w(0x5f),\
  142. w(0x60), w(0x51), w(0x7f), w(0xa9), w(0x19), w(0xb5), w(0x4a), w(0x0d),\
  143. w(0x2d), w(0xe5), w(0x7a), w(0x9f), w(0x93), w(0xc9), w(0x9c), w(0xef),\
  144. w(0xa0), w(0xe0), w(0x3b), w(0x4d), w(0xae), w(0x2a), w(0xf5), w(0xb0),\
  145. w(0xc8), w(0xeb), w(0xbb), w(0x3c), w(0x83), w(0x53), w(0x99), w(0x61),\
  146. w(0x17), w(0x2b), w(0x04), w(0x7e), w(0xba), w(0x77), w(0xd6), w(0x26),\
  147. w(0xe1), w(0x69), w(0x14), w(0x63), w(0x55), w(0x21), w(0x0c), w(0x7d) }
  148. #define mm_data(w) { /* basic data for forming finite field tables */ \
  149. w(0x00), w(0x01), w(0x02), w(0x03), w(0x04), w(0x05), w(0x06), w(0x07),\
  150. w(0x08), w(0x09), w(0x0a), w(0x0b), w(0x0c), w(0x0d), w(0x0e), w(0x0f),\
  151. w(0x10), w(0x11), w(0x12), w(0x13), w(0x14), w(0x15), w(0x16), w(0x17),\
  152. w(0x18), w(0x19), w(0x1a), w(0x1b), w(0x1c), w(0x1d), w(0x1e), w(0x1f),\
  153. w(0x20), w(0x21), w(0x22), w(0x23), w(0x24), w(0x25), w(0x26), w(0x27),\
  154. w(0x28), w(0x29), w(0x2a), w(0x2b), w(0x2c), w(0x2d), w(0x2e), w(0x2f),\
  155. w(0x30), w(0x31), w(0x32), w(0x33), w(0x34), w(0x35), w(0x36), w(0x37),\
  156. w(0x38), w(0x39), w(0x3a), w(0x3b), w(0x3c), w(0x3d), w(0x3e), w(0x3f),\
  157. w(0x40), w(0x41), w(0x42), w(0x43), w(0x44), w(0x45), w(0x46), w(0x47),\
  158. w(0x48), w(0x49), w(0x4a), w(0x4b), w(0x4c), w(0x4d), w(0x4e), w(0x4f),\
  159. w(0x50), w(0x51), w(0x52), w(0x53), w(0x54), w(0x55), w(0x56), w(0x57),\
  160. w(0x58), w(0x59), w(0x5a), w(0x5b), w(0x5c), w(0x5d), w(0x5e), w(0x5f),\
  161. w(0x60), w(0x61), w(0x62), w(0x63), w(0x64), w(0x65), w(0x66), w(0x67),\
  162. w(0x68), w(0x69), w(0x6a), w(0x6b), w(0x6c), w(0x6d), w(0x6e), w(0x6f),\
  163. w(0x70), w(0x71), w(0x72), w(0x73), w(0x74), w(0x75), w(0x76), w(0x77),\
  164. w(0x78), w(0x79), w(0x7a), w(0x7b), w(0x7c), w(0x7d), w(0x7e), w(0x7f),\
  165. w(0x80), w(0x81), w(0x82), w(0x83), w(0x84), w(0x85), w(0x86), w(0x87),\
  166. w(0x88), w(0x89), w(0x8a), w(0x8b), w(0x8c), w(0x8d), w(0x8e), w(0x8f),\
  167. w(0x90), w(0x91), w(0x92), w(0x93), w(0x94), w(0x95), w(0x96), w(0x97),\
  168. w(0x98), w(0x99), w(0x9a), w(0x9b), w(0x9c), w(0x9d), w(0x9e), w(0x9f),\
  169. w(0xa0), w(0xa1), w(0xa2), w(0xa3), w(0xa4), w(0xa5), w(0xa6), w(0xa7),\
  170. w(0xa8), w(0xa9), w(0xaa), w(0xab), w(0xac), w(0xad), w(0xae), w(0xaf),\
  171. w(0xb0), w(0xb1), w(0xb2), w(0xb3), w(0xb4), w(0xb5), w(0xb6), w(0xb7),\
  172. w(0xb8), w(0xb9), w(0xba), w(0xbb), w(0xbc), w(0xbd), w(0xbe), w(0xbf),\
  173. w(0xc0), w(0xc1), w(0xc2), w(0xc3), w(0xc4), w(0xc5), w(0xc6), w(0xc7),\
  174. w(0xc8), w(0xc9), w(0xca), w(0xcb), w(0xcc), w(0xcd), w(0xce), w(0xcf),\
  175. w(0xd0), w(0xd1), w(0xd2), w(0xd3), w(0xd4), w(0xd5), w(0xd6), w(0xd7),\
  176. w(0xd8), w(0xd9), w(0xda), w(0xdb), w(0xdc), w(0xdd), w(0xde), w(0xdf),\
  177. w(0xe0), w(0xe1), w(0xe2), w(0xe3), w(0xe4), w(0xe5), w(0xe6), w(0xe7),\
  178. w(0xe8), w(0xe9), w(0xea), w(0xeb), w(0xec), w(0xed), w(0xee), w(0xef),\
  179. w(0xf0), w(0xf1), w(0xf2), w(0xf3), w(0xf4), w(0xf5), w(0xf6), w(0xf7),\
  180. w(0xf8), w(0xf9), w(0xfa), w(0xfb), w(0xfc), w(0xfd), w(0xfe), w(0xff) }
  181. static const uint8_t sbox[256] = sb_data(f1);
  182. static const uint8_t isbox[256] = isb_data(f1);
  183. static const uint8_t gfm2_sbox[256] = sb_data(f2);
  184. static const uint8_t gfm3_sbox[256] = sb_data(f3);
  185. static const uint8_t gfmul_9[256] = mm_data(f9);
  186. static const uint8_t gfmul_b[256] = mm_data(fb);
  187. static const uint8_t gfmul_d[256] = mm_data(fd);
  188. static const uint8_t gfmul_e[256] = mm_data(fe);
  189. #define s_box(x) sbox[(x)]
  190. #define is_box(x) isbox[(x)]
  191. #define gfm2_sb(x) gfm2_sbox[(x)]
  192. #define gfm3_sb(x) gfm3_sbox[(x)]
  193. #define gfm_9(x) gfmul_9[(x)]
  194. #define gfm_b(x) gfmul_b[(x)]
  195. #define gfm_d(x) gfmul_d[(x)]
  196. #define gfm_e(x) gfmul_e[(x)]
  197. #else
  198. /* this is the high bit of x right shifted by 1 */
  199. /* position. Since the starting polynomial has */
  200. /* 9 bits (0x11b), this right shift keeps the */
  201. /* values of all top bits within a byte */
  202. static uint8_t hibit(const uint8_t x)
  203. {
  204. uint8_t r = (uint8_t)((x >> 1) | (x >> 2));
  205. r |= (r >> 2);
  206. r |= (r >> 4);
  207. return (r + 1) >> 1;
  208. }
  209. /* return the inverse of the finite field element x */
  210. static uint8_t gf_inv(const uint8_t x)
  211. {
  212. uint8_t p1 = x, p2 = BPOLY, n1 = hibit(x), n2 = 0x80, v1 = 1, v2 = 0;
  213. if(x < 2)
  214. return x;
  215. for( ; ; )
  216. {
  217. if(n1)
  218. while(n2 >= n1) /* divide polynomial p2 by p1 */
  219. {
  220. n2 /= n1; /* shift smaller polynomial left */
  221. p2 ^= (p1 * n2) & 0xff; /* and remove from larger one */
  222. v2 ^= (v1 * n2); /* shift accumulated value and */
  223. n2 = hibit(p2); /* add into result */
  224. }
  225. else
  226. return v1;
  227. if(n2) /* repeat with values swapped */
  228. while(n1 >= n2)
  229. {
  230. n1 /= n2;
  231. p1 ^= p2 * n1;
  232. v1 ^= v2 * n1;
  233. n1 = hibit(p1);
  234. }
  235. else
  236. return v2;
  237. }
  238. }
  239. /* The forward and inverse affine transformations used in the S-box */
  240. static uint8_t fwd_affine(const uint8_t x)
  241. {
  242. #if defined( HAVE_UINT_32T )
  243. uint32_t w = x;
  244. w ^= (w << 1) ^ (w << 2) ^ (w << 3) ^ (w << 4);
  245. return 0x63 ^ ((w ^ (w >> 8)) & 0xff);
  246. #else
  247. return 0x63 ^ x ^ (x << 1) ^ (x << 2) ^ (x << 3) ^ (x << 4)
  248. ^ (x >> 7) ^ (x >> 6) ^ (x >> 5) ^ (x >> 4);
  249. #endif
  250. }
  251. static uint8_t inv_affine(const uint8_t x)
  252. {
  253. #if defined( HAVE_UINT_32T )
  254. uint32_t w = x;
  255. w = (w << 1) ^ (w << 3) ^ (w << 6);
  256. return 0x05 ^ ((w ^ (w >> 8)) & 0xff);
  257. #else
  258. return 0x05 ^ (x << 1) ^ (x << 3) ^ (x << 6)
  259. ^ (x >> 7) ^ (x >> 5) ^ (x >> 2);
  260. #endif
  261. }
  262. #define s_box(x) fwd_affine(gf_inv(x))
  263. #define is_box(x) gf_inv(inv_affine(x))
  264. #define gfm2_sb(x) f2(s_box(x))
  265. #define gfm3_sb(x) f3(s_box(x))
  266. #define gfm_9(x) f9(x)
  267. #define gfm_b(x) fb(x)
  268. #define gfm_d(x) fd(x)
  269. #define gfm_e(x) fe(x)
  270. #endif
  271. #if defined( HAVE_MEMCPY )
  272. # define block_copy_nn(d, s, l) memcpy(d, s, l)
  273. # define block_copy(d, s) memcpy(d, s, N_BLOCK)
  274. #else
  275. # define block_copy_nn(d, s, l) copy_block_nn(d, s, l)
  276. # define block_copy(d, s) copy_block(d, s)
  277. static void copy_block( void *d, const void *s )
  278. {
  279. #if defined( HAVE_UINT_32T )
  280. ((uint32_t*)d)[ 0] = ((uint32_t*)s)[ 0];
  281. ((uint32_t*)d)[ 1] = ((uint32_t*)s)[ 1];
  282. ((uint32_t*)d)[ 2] = ((uint32_t*)s)[ 2];
  283. ((uint32_t*)d)[ 3] = ((uint32_t*)s)[ 3];
  284. #else
  285. ((uint8_t*)d)[ 0] = ((uint8_t*)s)[ 0];
  286. ((uint8_t*)d)[ 1] = ((uint8_t*)s)[ 1];
  287. ((uint8_t*)d)[ 2] = ((uint8_t*)s)[ 2];
  288. ((uint8_t*)d)[ 3] = ((uint8_t*)s)[ 3];
  289. ((uint8_t*)d)[ 4] = ((uint8_t*)s)[ 4];
  290. ((uint8_t*)d)[ 5] = ((uint8_t*)s)[ 5];
  291. ((uint8_t*)d)[ 6] = ((uint8_t*)s)[ 6];
  292. ((uint8_t*)d)[ 7] = ((uint8_t*)s)[ 7];
  293. ((uint8_t*)d)[ 8] = ((uint8_t*)s)[ 8];
  294. ((uint8_t*)d)[ 9] = ((uint8_t*)s)[ 9];
  295. ((uint8_t*)d)[10] = ((uint8_t*)s)[10];
  296. ((uint8_t*)d)[11] = ((uint8_t*)s)[11];
  297. ((uint8_t*)d)[12] = ((uint8_t*)s)[12];
  298. ((uint8_t*)d)[13] = ((uint8_t*)s)[13];
  299. ((uint8_t*)d)[14] = ((uint8_t*)s)[14];
  300. ((uint8_t*)d)[15] = ((uint8_t*)s)[15];
  301. #endif
  302. }
  303. static void copy_block_nn( void * d, const void *s, uint8_t nn )
  304. {
  305. while( nn-- ) {
  306. *((uint8_t*)d)++ = *((uint8_t*)s)++;
  307. }
  308. }
  309. #endif
  310. void xor_block( void *d, const void *s )
  311. {
  312. #if defined( HAVE_UINT_32T )
  313. ((uint32_t*)d)[ 0] ^= ((uint32_t*)s)[ 0];
  314. ((uint32_t*)d)[ 1] ^= ((uint32_t*)s)[ 1];
  315. ((uint32_t*)d)[ 2] ^= ((uint32_t*)s)[ 2];
  316. ((uint32_t*)d)[ 3] ^= ((uint32_t*)s)[ 3];
  317. #else
  318. ((uint8_t*)d)[ 0] ^= ((uint8_t*)s)[ 0];
  319. ((uint8_t*)d)[ 1] ^= ((uint8_t*)s)[ 1];
  320. ((uint8_t*)d)[ 2] ^= ((uint8_t*)s)[ 2];
  321. ((uint8_t*)d)[ 3] ^= ((uint8_t*)s)[ 3];
  322. ((uint8_t*)d)[ 4] ^= ((uint8_t*)s)[ 4];
  323. ((uint8_t*)d)[ 5] ^= ((uint8_t*)s)[ 5];
  324. ((uint8_t*)d)[ 6] ^= ((uint8_t*)s)[ 6];
  325. ((uint8_t*)d)[ 7] ^= ((uint8_t*)s)[ 7];
  326. ((uint8_t*)d)[ 8] ^= ((uint8_t*)s)[ 8];
  327. ((uint8_t*)d)[ 9] ^= ((uint8_t*)s)[ 9];
  328. ((uint8_t*)d)[10] ^= ((uint8_t*)s)[10];
  329. ((uint8_t*)d)[11] ^= ((uint8_t*)s)[11];
  330. ((uint8_t*)d)[12] ^= ((uint8_t*)s)[12];
  331. ((uint8_t*)d)[13] ^= ((uint8_t*)s)[13];
  332. ((uint8_t*)d)[14] ^= ((uint8_t*)s)[14];
  333. ((uint8_t*)d)[15] ^= ((uint8_t*)s)[15];
  334. #endif
  335. }
  336. void copy_and_key( void *d, const void *s, const void *k )
  337. {
  338. #if defined( HAVE_UINT_32T )
  339. ((uint32_t*)d)[ 0] = ((uint32_t*)s)[ 0] ^ ((uint32_t*)k)[ 0];
  340. ((uint32_t*)d)[ 1] = ((uint32_t*)s)[ 1] ^ ((uint32_t*)k)[ 1];
  341. ((uint32_t*)d)[ 2] = ((uint32_t*)s)[ 2] ^ ((uint32_t*)k)[ 2];
  342. ((uint32_t*)d)[ 3] = ((uint32_t*)s)[ 3] ^ ((uint32_t*)k)[ 3];
  343. #elif 1
  344. ((uint8_t*)d)[ 0] = ((uint8_t*)s)[ 0] ^ ((uint8_t*)k)[ 0];
  345. ((uint8_t*)d)[ 1] = ((uint8_t*)s)[ 1] ^ ((uint8_t*)k)[ 1];
  346. ((uint8_t*)d)[ 2] = ((uint8_t*)s)[ 2] ^ ((uint8_t*)k)[ 2];
  347. ((uint8_t*)d)[ 3] = ((uint8_t*)s)[ 3] ^ ((uint8_t*)k)[ 3];
  348. ((uint8_t*)d)[ 4] = ((uint8_t*)s)[ 4] ^ ((uint8_t*)k)[ 4];
  349. ((uint8_t*)d)[ 5] = ((uint8_t*)s)[ 5] ^ ((uint8_t*)k)[ 5];
  350. ((uint8_t*)d)[ 6] = ((uint8_t*)s)[ 6] ^ ((uint8_t*)k)[ 6];
  351. ((uint8_t*)d)[ 7] = ((uint8_t*)s)[ 7] ^ ((uint8_t*)k)[ 7];
  352. ((uint8_t*)d)[ 8] = ((uint8_t*)s)[ 8] ^ ((uint8_t*)k)[ 8];
  353. ((uint8_t*)d)[ 9] = ((uint8_t*)s)[ 9] ^ ((uint8_t*)k)[ 9];
  354. ((uint8_t*)d)[10] = ((uint8_t*)s)[10] ^ ((uint8_t*)k)[10];
  355. ((uint8_t*)d)[11] = ((uint8_t*)s)[11] ^ ((uint8_t*)k)[11];
  356. ((uint8_t*)d)[12] = ((uint8_t*)s)[12] ^ ((uint8_t*)k)[12];
  357. ((uint8_t*)d)[13] = ((uint8_t*)s)[13] ^ ((uint8_t*)k)[13];
  358. ((uint8_t*)d)[14] = ((uint8_t*)s)[14] ^ ((uint8_t*)k)[14];
  359. ((uint8_t*)d)[15] = ((uint8_t*)s)[15] ^ ((uint8_t*)k)[15];
  360. #else
  361. block_copy(d, s);
  362. xor_block(d, k);
  363. #endif
  364. }
  365. #if defined( AES_USE_VERSION_1 )
  366. static void add_round_key( uint8_t d[N_BLOCK], const uint8_t k[N_BLOCK] )
  367. {
  368. xor_block(d, k);
  369. }
  370. #endif
  371. void shift_sub_rows( uint8_t st[N_BLOCK] )
  372. {
  373. uint8_t tt;
  374. st[ 0] = s_box(st[ 0]); st[ 4] = s_box(st[ 4]);
  375. st[ 8] = s_box(st[ 8]); st[12] = s_box(st[12]);
  376. tt = st[1]; st[ 1] = s_box(st[ 5]); st[ 5] = s_box(st[ 9]);
  377. st[ 9] = s_box(st[13]); st[13] = s_box( tt );
  378. tt = st[2]; st[ 2] = s_box(st[10]); st[10] = s_box( tt );
  379. tt = st[6]; st[ 6] = s_box(st[14]); st[14] = s_box( tt );
  380. tt = st[15]; st[15] = s_box(st[11]); st[11] = s_box(st[ 7]);
  381. st[ 7] = s_box(st[ 3]); st[ 3] = s_box( tt );
  382. }
  383. void inv_shift_sub_rows( uint8_t st[N_BLOCK] )
  384. {
  385. uint8_t tt;
  386. st[ 0] = is_box(st[ 0]); st[ 4] = is_box(st[ 4]);
  387. st[ 8] = is_box(st[ 8]); st[12] = is_box(st[12]);
  388. tt = st[13]; st[13] = is_box(st[9]); st[ 9] = is_box(st[5]);
  389. st[ 5] = is_box(st[1]); st[ 1] = is_box( tt );
  390. tt = st[2]; st[ 2] = is_box(st[10]); st[10] = is_box( tt );
  391. tt = st[6]; st[ 6] = is_box(st[14]); st[14] = is_box( tt );
  392. tt = st[3]; st[ 3] = is_box(st[ 7]); st[ 7] = is_box(st[11]);
  393. st[11] = is_box(st[15]); st[15] = is_box( tt );
  394. }
  395. #if defined( AES_USE_VERSION_1 )
  396. void mix_sub_columns( uint8_t dt[N_BLOCK] )
  397. {
  398. uint8_t st[N_BLOCK];
  399. block_copy(st, dt);
  400. #else
  401. void mix_sub_columns( uint8_t dt[N_BLOCK], uint8_t st[N_BLOCK] )
  402. {
  403. #endif
  404. dt[ 0] = gfm2_sb(st[0]) ^ gfm3_sb(st[5]) ^ s_box(st[10]) ^ s_box(st[15]);
  405. dt[ 1] = s_box(st[0]) ^ gfm2_sb(st[5]) ^ gfm3_sb(st[10]) ^ s_box(st[15]);
  406. dt[ 2] = s_box(st[0]) ^ s_box(st[5]) ^ gfm2_sb(st[10]) ^ gfm3_sb(st[15]);
  407. dt[ 3] = gfm3_sb(st[0]) ^ s_box(st[5]) ^ s_box(st[10]) ^ gfm2_sb(st[15]);
  408. dt[ 4] = gfm2_sb(st[4]) ^ gfm3_sb(st[9]) ^ s_box(st[14]) ^ s_box(st[3]);
  409. dt[ 5] = s_box(st[4]) ^ gfm2_sb(st[9]) ^ gfm3_sb(st[14]) ^ s_box(st[3]);
  410. dt[ 6] = s_box(st[4]) ^ s_box(st[9]) ^ gfm2_sb(st[14]) ^ gfm3_sb(st[3]);
  411. dt[ 7] = gfm3_sb(st[4]) ^ s_box(st[9]) ^ s_box(st[14]) ^ gfm2_sb(st[3]);
  412. dt[ 8] = gfm2_sb(st[8]) ^ gfm3_sb(st[13]) ^ s_box(st[2]) ^ s_box(st[7]);
  413. dt[ 9] = s_box(st[8]) ^ gfm2_sb(st[13]) ^ gfm3_sb(st[2]) ^ s_box(st[7]);
  414. dt[10] = s_box(st[8]) ^ s_box(st[13]) ^ gfm2_sb(st[2]) ^ gfm3_sb(st[7]);
  415. dt[11] = gfm3_sb(st[8]) ^ s_box(st[13]) ^ s_box(st[2]) ^ gfm2_sb(st[7]);
  416. dt[12] = gfm2_sb(st[12]) ^ gfm3_sb(st[1]) ^ s_box(st[6]) ^ s_box(st[11]);
  417. dt[13] = s_box(st[12]) ^ gfm2_sb(st[1]) ^ gfm3_sb(st[6]) ^ s_box(st[11]);
  418. dt[14] = s_box(st[12]) ^ s_box(st[1]) ^ gfm2_sb(st[6]) ^ gfm3_sb(st[11]);
  419. dt[15] = gfm3_sb(st[12]) ^ s_box(st[1]) ^ s_box(st[6]) ^ gfm2_sb(st[11]);
  420. }
  421. #if defined( AES_USE_VERSION_1 )
  422. void inv_mix_sub_columns( uint8_t dt[N_BLOCK] )
  423. {
  424. uint8_t st[N_BLOCK];
  425. block_copy(st, dt);
  426. #else
  427. void inv_mix_sub_columns( uint8_t dt[N_BLOCK], uint8_t st[N_BLOCK] )
  428. {
  429. #endif
  430. dt[ 0] = is_box(gfm_e(st[ 0]) ^ gfm_b(st[ 1]) ^ gfm_d(st[ 2]) ^ gfm_9(st[ 3]));
  431. dt[ 5] = is_box(gfm_9(st[ 0]) ^ gfm_e(st[ 1]) ^ gfm_b(st[ 2]) ^ gfm_d(st[ 3]));
  432. dt[10] = is_box(gfm_d(st[ 0]) ^ gfm_9(st[ 1]) ^ gfm_e(st[ 2]) ^ gfm_b(st[ 3]));
  433. dt[15] = is_box(gfm_b(st[ 0]) ^ gfm_d(st[ 1]) ^ gfm_9(st[ 2]) ^ gfm_e(st[ 3]));
  434. dt[ 4] = is_box(gfm_e(st[ 4]) ^ gfm_b(st[ 5]) ^ gfm_d(st[ 6]) ^ gfm_9(st[ 7]));
  435. dt[ 9] = is_box(gfm_9(st[ 4]) ^ gfm_e(st[ 5]) ^ gfm_b(st[ 6]) ^ gfm_d(st[ 7]));
  436. dt[14] = is_box(gfm_d(st[ 4]) ^ gfm_9(st[ 5]) ^ gfm_e(st[ 6]) ^ gfm_b(st[ 7]));
  437. dt[ 3] = is_box(gfm_b(st[ 4]) ^ gfm_d(st[ 5]) ^ gfm_9(st[ 6]) ^ gfm_e(st[ 7]));
  438. dt[ 8] = is_box(gfm_e(st[ 8]) ^ gfm_b(st[ 9]) ^ gfm_d(st[10]) ^ gfm_9(st[11]));
  439. dt[13] = is_box(gfm_9(st[ 8]) ^ gfm_e(st[ 9]) ^ gfm_b(st[10]) ^ gfm_d(st[11]));
  440. dt[ 2] = is_box(gfm_d(st[ 8]) ^ gfm_9(st[ 9]) ^ gfm_e(st[10]) ^ gfm_b(st[11]));
  441. dt[ 7] = is_box(gfm_b(st[ 8]) ^ gfm_d(st[ 9]) ^ gfm_9(st[10]) ^ gfm_e(st[11]));
  442. dt[12] = is_box(gfm_e(st[12]) ^ gfm_b(st[13]) ^ gfm_d(st[14]) ^ gfm_9(st[15]));
  443. dt[ 1] = is_box(gfm_9(st[12]) ^ gfm_e(st[13]) ^ gfm_b(st[14]) ^ gfm_d(st[15]));
  444. dt[ 6] = is_box(gfm_d(st[12]) ^ gfm_9(st[13]) ^ gfm_e(st[14]) ^ gfm_b(st[15]));
  445. dt[11] = is_box(gfm_b(st[12]) ^ gfm_d(st[13]) ^ gfm_9(st[14]) ^ gfm_e(st[15]));
  446. }
  447. #if defined( AES_ENC_PREKEYED ) || defined( AES_DEC_PREKEYED )
  448. /*!
  449. * \brief Set the cipher key for the pre-keyed version.
  450. *
  451. * \para key Encryption / Decryption key
  452. * \para keylen length of the key in bytes (valid inputs are hence 16, 24 and 32 for 128, 192, 256 bit keys)
  453. * \para ctx AES encryption context.
  454. */
  455. return_type aes_set_key( const uint8_t key[], int keylen, aes_context ctx[1] )
  456. {
  457. uint8_t cc, rc, hi;
  458. switch( keylen )
  459. {
  460. case 16:
  461. case 128:
  462. keylen = 16;
  463. break;
  464. case 24:
  465. case 192:
  466. keylen = 24;
  467. break;
  468. case 32:
  469. case 256:
  470. keylen = 32;
  471. break;
  472. default:
  473. ctx->rnd = 0;
  474. return -1;
  475. }
  476. block_copy_nn(ctx->ksch, key, keylen);
  477. hi = (keylen + 28) << 2;
  478. ctx->rnd = (hi >> 4) - 1;
  479. for( cc = keylen, rc = 1; cc < hi; cc += 4 )
  480. { uint8_t tt, t0, t1, t2, t3;
  481. t0 = ctx->ksch[cc - 4];
  482. t1 = ctx->ksch[cc - 3];
  483. t2 = ctx->ksch[cc - 2];
  484. t3 = ctx->ksch[cc - 1];
  485. if( cc % keylen == 0 )
  486. {
  487. tt = t0;
  488. t0 = s_box(t1) ^ rc;
  489. t1 = s_box(t2);
  490. t2 = s_box(t3);
  491. t3 = s_box(tt);
  492. rc = f2(rc);
  493. }
  494. else if( keylen > 24 && cc % keylen == 16 )
  495. {
  496. t0 = s_box(t0);
  497. t1 = s_box(t1);
  498. t2 = s_box(t2);
  499. t3 = s_box(t3);
  500. }
  501. tt = cc - keylen;
  502. ctx->ksch[cc + 0] = ctx->ksch[tt + 0] ^ t0;
  503. ctx->ksch[cc + 1] = ctx->ksch[tt + 1] ^ t1;
  504. ctx->ksch[cc + 2] = ctx->ksch[tt + 2] ^ t2;
  505. ctx->ksch[cc + 3] = ctx->ksch[tt + 3] ^ t3;
  506. }
  507. return 0;
  508. }
  509. #endif
  510. #if defined( AES_ENC_PREKEYED )
  511. /*!
  512. * \brief Encrypt a single block of 16 bytes.
  513. *
  514. * \para in Input data block
  515. * \para out Output data block
  516. * \para ctx AES encryption context.
  517. */
  518. return_type aes_encrypt( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK], const aes_context ctx[1] )
  519. {
  520. if( ctx->rnd )
  521. {
  522. uint8_t s1[N_BLOCK], r;
  523. copy_and_key( s1, in, ctx->ksch );
  524. for( r = 1 ; r < ctx->rnd ; ++r )
  525. #if defined( AES_USE_VERSION_1 )
  526. {
  527. mix_sub_columns( s1 );
  528. add_round_key( s1, ctx->ksch + r * N_BLOCK);
  529. }
  530. #else
  531. { uint8_t s2[N_BLOCK];
  532. mix_sub_columns( s2, s1 );
  533. copy_and_key( s1, s2, ctx->ksch + r * N_BLOCK);
  534. }
  535. #endif
  536. shift_sub_rows( s1 );
  537. copy_and_key( out, s1, ctx->ksch + r * N_BLOCK );
  538. }
  539. else
  540. return -1;
  541. return 0;
  542. }
  543. /*!
  544. * \brief CBC encrypt a number of blocks (input and return an IV)
  545. *
  546. * \param in Input data (must be a multiple of 16 bytes)
  547. * \param out Output data (must be a multiple of 16 bytes)
  548. * \param iv Initialisation vector
  549. * \param ctx AES encryption context.
  550. */
  551. return_type aes_cbc_encrypt( const uint8_t *in, uint8_t *out,
  552. int n_block, uint8_t iv[N_BLOCK], const aes_context ctx[1] )
  553. {
  554. while(n_block--)
  555. {
  556. xor_block(iv, in);
  557. if(aes_encrypt(iv, iv, ctx) != EXIT_SUCCESS)
  558. return EXIT_FAILURE;
  559. memcpy(out, iv, N_BLOCK);
  560. in += N_BLOCK;
  561. out += N_BLOCK;
  562. }
  563. return EXIT_SUCCESS;
  564. }
  565. #endif
  566. #if defined( AES_DEC_PREKEYED )
  567. /*!
  568. * \brief Decrypt a single block of 16 bytes.
  569. *
  570. * \para in Input data block
  571. * \para out Output data block
  572. * \para ctx AES encryption context.
  573. */
  574. return_type aes_decrypt( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK], const aes_context ctx[1] )
  575. {
  576. if( ctx->rnd )
  577. {
  578. uint8_t s1[N_BLOCK], r;
  579. copy_and_key( s1, in, ctx->ksch + ctx->rnd * N_BLOCK );
  580. inv_shift_sub_rows( s1 );
  581. for( r = ctx->rnd ; --r ; )
  582. #if defined( AES_USE_VERSION_1 )
  583. {
  584. add_round_key( s1, ctx->ksch + r * N_BLOCK );
  585. inv_mix_sub_columns( s1 );
  586. }
  587. #else
  588. { uint8_t s2[N_BLOCK];
  589. copy_and_key( s2, s1, ctx->ksch + r * N_BLOCK );
  590. inv_mix_sub_columns( s1, s2 );
  591. }
  592. #endif
  593. copy_and_key( out, s1, ctx->ksch );
  594. }
  595. else
  596. return -1;
  597. return 0;
  598. }
  599. /*!
  600. * \brief CBC decrypt a number of blocks (input and return an IV)
  601. *
  602. * \param in Input data (must be a multiple of 16 bytes)
  603. * \param out Output data (must be a multiple of 16 bytes)
  604. * \param iv Initialisation vector
  605. * \param ctx AES encryption context.
  606. */
  607. return_type aes_cbc_decrypt( const uint8_t *in, uint8_t *out,
  608. int n_block, uint8_t iv[N_BLOCK], const aes_context ctx[1] )
  609. {
  610. while(n_block--)
  611. { uint8_t tmp[N_BLOCK];
  612. memcpy(tmp, in, N_BLOCK);
  613. if(aes_decrypt(in, out, ctx) != EXIT_SUCCESS)
  614. return EXIT_FAILURE;
  615. xor_block(out, iv);
  616. memcpy(iv, tmp, N_BLOCK);
  617. in += N_BLOCK;
  618. out += N_BLOCK;
  619. }
  620. return EXIT_SUCCESS;
  621. }
  622. #endif
  623. #if defined( AES_ENC_128_OTFK )
  624. /* The 'on the fly' encryption key update for for 128 bit keys */
  625. static void update_encrypt_key_128( uint8_t k[N_BLOCK], uint8_t *rc )
  626. {
  627. uint8_t cc;
  628. k[0] ^= s_box(k[13]) ^ *rc;
  629. k[1] ^= s_box(k[14]);
  630. k[2] ^= s_box(k[15]);
  631. k[3] ^= s_box(k[12]);
  632. *rc = f2( *rc );
  633. for(cc = 4; cc < 16; cc += 4 )
  634. {
  635. k[cc + 0] ^= k[cc - 4];
  636. k[cc + 1] ^= k[cc - 3];
  637. k[cc + 2] ^= k[cc - 2];
  638. k[cc + 3] ^= k[cc - 1];
  639. }
  640. }
  641. /*!
  642. * \brief Encrypt a single block of 16 bytes with 'on the fly' 128 bit keying
  643. *
  644. * \param in Input data block (16 bytes)
  645. * \param out Output data block (16 bytes)
  646. * \param key Key data (16 bytes)
  647. * \param o_key Output key (16 bytes)
  648. */
  649. void aes_encrypt_128( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK],
  650. const uint8_t key[N_BLOCK], uint8_t o_key[N_BLOCK] )
  651. {
  652. uint8_t s1[N_BLOCK], r, rc = 1;
  653. if(o_key != key)
  654. block_copy( o_key, key );
  655. copy_and_key( s1, in, o_key );
  656. for( r = 1 ; r < 10 ; ++r )
  657. #if defined( AES_USE_VERSION_1 )
  658. {
  659. mix_sub_columns( s1 );
  660. update_encrypt_key_128( o_key, &rc );
  661. add_round_key( s1, o_key );
  662. }
  663. #else
  664. {
  665. uint8_t s2[N_BLOCK];
  666. mix_sub_columns( s2, s1 );
  667. update_encrypt_key_128( o_key, &rc );
  668. copy_and_key( s1, s2, o_key );
  669. }
  670. #endif
  671. shift_sub_rows( s1 );
  672. update_encrypt_key_128( o_key, &rc );
  673. copy_and_key( out, s1, o_key );
  674. }
  675. #endif
  676. #if defined( AES_DEC_128_OTFK )
  677. /* The 'on the fly' decryption key update for for 128 bit keys */
  678. static void update_decrypt_key_128( uint8_t k[N_BLOCK], uint8_t *rc )
  679. {
  680. uint8_t cc;
  681. for( cc = 12; cc > 0; cc -= 4 )
  682. {
  683. k[cc + 0] ^= k[cc - 4];
  684. k[cc + 1] ^= k[cc - 3];
  685. k[cc + 2] ^= k[cc - 2];
  686. k[cc + 3] ^= k[cc - 1];
  687. }
  688. *rc = d2(*rc);
  689. k[0] ^= s_box(k[13]) ^ *rc;
  690. k[1] ^= s_box(k[14]);
  691. k[2] ^= s_box(k[15]);
  692. k[3] ^= s_box(k[12]);
  693. }
  694. /*!
  695. * \brief Decrypt a single block of 16 bytes with 'on the fly' 128 bit keying
  696. *
  697. * \param in Input data block (16 bytes)
  698. * \param out Output data block (16 bytes)
  699. * \param key Key data (16 bytes)
  700. * \param o_key Output key (16 bytes)
  701. */
  702. void aes_decrypt_128( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK],
  703. const uint8_t key[N_BLOCK], uint8_t o_key[N_BLOCK] )
  704. {
  705. uint8_t s1[N_BLOCK], r, rc = 0x6c;
  706. if(o_key != key)
  707. block_copy( o_key, key );
  708. copy_and_key( s1, in, o_key );
  709. inv_shift_sub_rows( s1 );
  710. for( r = 10 ; --r ; )
  711. #if defined( AES_USE_VERSION_1 )
  712. {
  713. update_decrypt_key_128( o_key, &rc );
  714. add_round_key( s1, o_key );
  715. inv_mix_sub_columns( s1 );
  716. }
  717. #else
  718. {
  719. uint8_t s2[N_BLOCK];
  720. update_decrypt_key_128( o_key, &rc );
  721. copy_and_key( s2, s1, o_key );
  722. inv_mix_sub_columns( s1, s2 );
  723. }
  724. #endif
  725. update_decrypt_key_128( o_key, &rc );
  726. copy_and_key( out, s1, o_key );
  727. }
  728. #endif
  729. #if defined( AES_ENC_256_OTFK )
  730. /* The 'on the fly' encryption key update for for 256 bit keys */
  731. static void update_encrypt_key_256( uint8_t k[2 * N_BLOCK], uint8_t *rc )
  732. {
  733. uint8_t cc;
  734. k[0] ^= s_box(k[29]) ^ *rc;
  735. k[1] ^= s_box(k[30]);
  736. k[2] ^= s_box(k[31]);
  737. k[3] ^= s_box(k[28]);
  738. *rc = f2( *rc );
  739. for(cc = 4; cc < 16; cc += 4)
  740. {
  741. k[cc + 0] ^= k[cc - 4];
  742. k[cc + 1] ^= k[cc - 3];
  743. k[cc + 2] ^= k[cc - 2];
  744. k[cc + 3] ^= k[cc - 1];
  745. }
  746. k[16] ^= s_box(k[12]);
  747. k[17] ^= s_box(k[13]);
  748. k[18] ^= s_box(k[14]);
  749. k[19] ^= s_box(k[15]);
  750. for( cc = 20; cc < 32; cc += 4 )
  751. {
  752. k[cc + 0] ^= k[cc - 4];
  753. k[cc + 1] ^= k[cc - 3];
  754. k[cc + 2] ^= k[cc - 2];
  755. k[cc + 3] ^= k[cc - 1];
  756. }
  757. }
  758. /*!
  759. * \brief Encrypt a single block of 16 bytes with 'on the fly' 256 bit keying
  760. *
  761. * \param in Input data block (16 bytes)
  762. * \param out Output data block (16 bytes)
  763. * \param key Key data (32 bytes)
  764. * \param o_key Output key (32 bytes)
  765. */
  766. void aes_encrypt_256( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK],
  767. const uint8_t key[2 * N_BLOCK], uint8_t o_key[2 * N_BLOCK] )
  768. {
  769. uint8_t s1[N_BLOCK], r, rc = 1;
  770. if(o_key != key)
  771. {
  772. block_copy( o_key, key );
  773. block_copy( o_key + 16, key + 16 );
  774. }
  775. copy_and_key( s1, in, o_key );
  776. for( r = 1 ; r < 14 ; ++r )
  777. #if defined( AES_USE_VERSION_1 )
  778. {
  779. mix_sub_columns(s1);
  780. if( r & 1 )
  781. add_round_key( s1, o_key + 16 );
  782. else
  783. {
  784. update_encrypt_key_256( o_key, &rc );
  785. add_round_key( s1, o_key );
  786. }
  787. }
  788. #else
  789. {
  790. uint8_t s2[N_BLOCK];
  791. mix_sub_columns( s2, s1 );
  792. if( r & 1 )
  793. copy_and_key( s1, s2, o_key + 16 );
  794. else
  795. {
  796. update_encrypt_key_256( o_key, &rc );
  797. copy_and_key( s1, s2, o_key );
  798. }
  799. }
  800. #endif
  801. shift_sub_rows( s1 );
  802. update_encrypt_key_256( o_key, &rc );
  803. copy_and_key( out, s1, o_key );
  804. }
  805. #endif
  806. #if defined( AES_DEC_256_OTFK )
  807. /* The 'on the fly' encryption key update for for 256 bit keys */
  808. static void update_decrypt_key_256( uint8_t k[2 * N_BLOCK], uint8_t *rc )
  809. {
  810. uint8_t cc;
  811. for(cc = 28; cc > 16; cc -= 4)
  812. {
  813. k[cc + 0] ^= k[cc - 4];
  814. k[cc + 1] ^= k[cc - 3];
  815. k[cc + 2] ^= k[cc - 2];
  816. k[cc + 3] ^= k[cc - 1];
  817. }
  818. k[16] ^= s_box(k[12]);
  819. k[17] ^= s_box(k[13]);
  820. k[18] ^= s_box(k[14]);
  821. k[19] ^= s_box(k[15]);
  822. for(cc = 12; cc > 0; cc -= 4)
  823. {
  824. k[cc + 0] ^= k[cc - 4];
  825. k[cc + 1] ^= k[cc - 3];
  826. k[cc + 2] ^= k[cc - 2];
  827. k[cc + 3] ^= k[cc - 1];
  828. }
  829. *rc = d2(*rc);
  830. k[0] ^= s_box(k[29]) ^ *rc;
  831. k[1] ^= s_box(k[30]);
  832. k[2] ^= s_box(k[31]);
  833. k[3] ^= s_box(k[28]);
  834. }
  835. /*!
  836. * \brief Decrypt a single block of 16 bytes with 'on the fly' 256 bit keying
  837. *
  838. * \param in Input data block (16 bytes)
  839. * \param out Output data block (16 bytes)
  840. * \param key Key data (32 bytes)
  841. * \param o_key Output key (32 bytes)
  842. */
  843. void aes_decrypt_256( const uint8_t in[N_BLOCK], uint8_t out[N_BLOCK],
  844. const uint8_t key[2 * N_BLOCK], uint8_t o_key[2 * N_BLOCK] )
  845. {
  846. uint8_t s1[N_BLOCK], r, rc = 0x80;
  847. if(o_key != key)
  848. {
  849. block_copy( o_key, key );
  850. block_copy( o_key + 16, key + 16 );
  851. }
  852. copy_and_key( s1, in, o_key );
  853. inv_shift_sub_rows( s1 );
  854. for( r = 14 ; --r ; )
  855. #if defined( AES_USE_VERSION_1 )
  856. {
  857. if( ( r & 1 ) )
  858. {
  859. update_decrypt_key_256( o_key, &rc );
  860. add_round_key( s1, o_key + 16 );
  861. }
  862. else
  863. add_round_key( s1, o_key );
  864. inv_mix_sub_columns( s1 );
  865. }
  866. #else
  867. {
  868. uint8_t s2[N_BLOCK];
  869. if( ( r & 1 ) )
  870. {
  871. update_decrypt_key_256( o_key, &rc );
  872. copy_and_key( s2, s1, o_key + 16 );
  873. }
  874. else
  875. copy_and_key( s2, s1, o_key );
  876. inv_mix_sub_columns( s1, s2 );
  877. }
  878. #endif
  879. copy_and_key( out, s1, o_key );
  880. }
  881. #endif