e_aes.c 44 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286
  1. /* ====================================================================
  2. * Copyright (c) 2001-2011 The OpenSSL Project. All rights reserved.
  3. *
  4. * Redistribution and use in source and binary forms, with or without
  5. * modification, are permitted provided that the following conditions
  6. * are met:
  7. *
  8. * 1. Redistributions of source code must retain the above copyright
  9. * notice, this list of conditions and the following disclaimer.
  10. *
  11. * 2. Redistributions in binary form must reproduce the above copyright
  12. * notice, this list of conditions and the following disclaimer in
  13. * the documentation and/or other materials provided with the
  14. * distribution.
  15. *
  16. * 3. All advertising materials mentioning features or use of this
  17. * software must display the following acknowledgment:
  18. * "This product includes software developed by the OpenSSL Project
  19. * for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
  20. *
  21. * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
  22. * endorse or promote products derived from this software without
  23. * prior written permission. For written permission, please contact
  24. * [email protected].
  25. *
  26. * 5. Products derived from this software may not be called "OpenSSL"
  27. * nor may "OpenSSL" appear in their names without prior written
  28. * permission of the OpenSSL Project.
  29. *
  30. * 6. Redistributions of any form whatsoever must retain the following
  31. * acknowledgment:
  32. * "This product includes software developed by the OpenSSL Project
  33. * for use in the OpenSSL Toolkit (http://www.openssl.org/)"
  34. *
  35. * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
  36. * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
  37. * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
  38. * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR
  39. * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
  40. * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
  41. * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
  42. * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
  43. * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
  44. * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
  45. * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
  46. * OF THE POSSIBILITY OF SUCH DAMAGE.
  47. * ====================================================================
  48. *
  49. */
  50. #include <openssl/opensslconf.h>
  51. #ifndef OPENSSL_NO_AES
  52. #include <openssl/crypto.h>
  53. # include <openssl/evp.h>
  54. # include <openssl/err.h>
  55. # include <string.h>
  56. # include <assert.h>
  57. # include <openssl/aes.h>
  58. # include "evp_locl.h"
  59. # ifndef OPENSSL_FIPS
  60. # include "modes_lcl.h"
  61. # include <openssl/rand.h>
  62. typedef struct {
  63. AES_KEY ks;
  64. block128_f block;
  65. union {
  66. cbc128_f cbc;
  67. ctr128_f ctr;
  68. } stream;
  69. } EVP_AES_KEY;
  70. typedef struct {
  71. AES_KEY ks; /* AES key schedule to use */
  72. int key_set; /* Set if key initialised */
  73. int iv_set; /* Set if an iv is set */
  74. GCM128_CONTEXT gcm;
  75. unsigned char *iv; /* Temporary IV store */
  76. int ivlen; /* IV length */
  77. int taglen;
  78. int iv_gen; /* It is OK to generate IVs */
  79. int tls_aad_len; /* TLS AAD length */
  80. ctr128_f ctr;
  81. } EVP_AES_GCM_CTX;
  82. typedef struct {
  83. AES_KEY ks1, ks2; /* AES key schedules to use */
  84. XTS128_CONTEXT xts;
  85. void (*stream) (const unsigned char *in,
  86. unsigned char *out, size_t length,
  87. const AES_KEY *key1, const AES_KEY *key2,
  88. const unsigned char iv[16]);
  89. } EVP_AES_XTS_CTX;
  90. typedef struct {
  91. AES_KEY ks; /* AES key schedule to use */
  92. int key_set; /* Set if key initialised */
  93. int iv_set; /* Set if an iv is set */
  94. int tag_set; /* Set if tag is valid */
  95. int len_set; /* Set if message length set */
  96. int L, M; /* L and M parameters from RFC3610 */
  97. CCM128_CONTEXT ccm;
  98. ccm128_f str;
  99. } EVP_AES_CCM_CTX;
  100. # define MAXBITCHUNK ((size_t)1<<(sizeof(size_t)*8-4))
  101. # ifdef VPAES_ASM
  102. int vpaes_set_encrypt_key(const unsigned char *userKey, int bits,
  103. AES_KEY *key);
  104. int vpaes_set_decrypt_key(const unsigned char *userKey, int bits,
  105. AES_KEY *key);
  106. void vpaes_encrypt(const unsigned char *in, unsigned char *out,
  107. const AES_KEY *key);
  108. void vpaes_decrypt(const unsigned char *in, unsigned char *out,
  109. const AES_KEY *key);
  110. void vpaes_cbc_encrypt(const unsigned char *in,
  111. unsigned char *out,
  112. size_t length,
  113. const AES_KEY *key, unsigned char *ivec, int enc);
  114. # endif
  115. # ifdef BSAES_ASM
  116. void bsaes_cbc_encrypt(const unsigned char *in, unsigned char *out,
  117. size_t length, const AES_KEY *key,
  118. unsigned char ivec[16], int enc);
  119. void bsaes_ctr32_encrypt_blocks(const unsigned char *in, unsigned char *out,
  120. size_t len, const AES_KEY *key,
  121. const unsigned char ivec[16]);
  122. void bsaes_xts_encrypt(const unsigned char *inp, unsigned char *out,
  123. size_t len, const AES_KEY *key1,
  124. const AES_KEY *key2, const unsigned char iv[16]);
  125. void bsaes_xts_decrypt(const unsigned char *inp, unsigned char *out,
  126. size_t len, const AES_KEY *key1,
  127. const AES_KEY *key2, const unsigned char iv[16]);
  128. # endif
  129. # ifdef AES_CTR_ASM
  130. void AES_ctr32_encrypt(const unsigned char *in, unsigned char *out,
  131. size_t blocks, const AES_KEY *key,
  132. const unsigned char ivec[AES_BLOCK_SIZE]);
  133. # endif
  134. # ifdef AES_XTS_ASM
  135. void AES_xts_encrypt(const char *inp, char *out, size_t len,
  136. const AES_KEY *key1, const AES_KEY *key2,
  137. const unsigned char iv[16]);
  138. void AES_xts_decrypt(const char *inp, char *out, size_t len,
  139. const AES_KEY *key1, const AES_KEY *key2,
  140. const unsigned char iv[16]);
  141. # endif
  142. # if defined(AES_ASM) && !defined(I386_ONLY) && ( \
  143. ((defined(__i386) || defined(__i386__) || \
  144. defined(_M_IX86)) && defined(OPENSSL_IA32_SSE2))|| \
  145. defined(__x86_64) || defined(__x86_64__) || \
  146. defined(_M_AMD64) || defined(_M_X64) || \
  147. defined(__INTEL__) )
  148. extern unsigned int OPENSSL_ia32cap_P[2];
  149. # ifdef VPAES_ASM
  150. # define VPAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  151. # endif
  152. # ifdef BSAES_ASM
  153. # define BSAES_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(41-32)))
  154. # endif
  155. /*
  156. * AES-NI section
  157. */
  158. # define AESNI_CAPABLE (OPENSSL_ia32cap_P[1]&(1<<(57-32)))
  159. int aesni_set_encrypt_key(const unsigned char *userKey, int bits,
  160. AES_KEY *key);
  161. int aesni_set_decrypt_key(const unsigned char *userKey, int bits,
  162. AES_KEY *key);
  163. void aesni_encrypt(const unsigned char *in, unsigned char *out,
  164. const AES_KEY *key);
  165. void aesni_decrypt(const unsigned char *in, unsigned char *out,
  166. const AES_KEY *key);
  167. void aesni_ecb_encrypt(const unsigned char *in,
  168. unsigned char *out,
  169. size_t length, const AES_KEY *key, int enc);
  170. void aesni_cbc_encrypt(const unsigned char *in,
  171. unsigned char *out,
  172. size_t length,
  173. const AES_KEY *key, unsigned char *ivec, int enc);
  174. void aesni_ctr32_encrypt_blocks(const unsigned char *in,
  175. unsigned char *out,
  176. size_t blocks,
  177. const void *key, const unsigned char *ivec);
  178. void aesni_xts_encrypt(const unsigned char *in,
  179. unsigned char *out,
  180. size_t length,
  181. const AES_KEY *key1, const AES_KEY *key2,
  182. const unsigned char iv[16]);
  183. void aesni_xts_decrypt(const unsigned char *in,
  184. unsigned char *out,
  185. size_t length,
  186. const AES_KEY *key1, const AES_KEY *key2,
  187. const unsigned char iv[16]);
  188. void aesni_ccm64_encrypt_blocks(const unsigned char *in,
  189. unsigned char *out,
  190. size_t blocks,
  191. const void *key,
  192. const unsigned char ivec[16],
  193. unsigned char cmac[16]);
  194. void aesni_ccm64_decrypt_blocks(const unsigned char *in,
  195. unsigned char *out,
  196. size_t blocks,
  197. const void *key,
  198. const unsigned char ivec[16],
  199. unsigned char cmac[16]);
  200. static int aesni_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  201. const unsigned char *iv, int enc)
  202. {
  203. int ret, mode;
  204. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  205. mode = ctx->cipher->flags & EVP_CIPH_MODE;
  206. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  207. && !enc) {
  208. ret = aesni_set_decrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  209. dat->block = (block128_f) aesni_decrypt;
  210. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  211. (cbc128_f) aesni_cbc_encrypt : NULL;
  212. } else {
  213. ret = aesni_set_encrypt_key(key, ctx->key_len * 8, ctx->cipher_data);
  214. dat->block = (block128_f) aesni_encrypt;
  215. if (mode == EVP_CIPH_CBC_MODE)
  216. dat->stream.cbc = (cbc128_f) aesni_cbc_encrypt;
  217. else if (mode == EVP_CIPH_CTR_MODE)
  218. dat->stream.ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  219. else
  220. dat->stream.cbc = NULL;
  221. }
  222. if (ret < 0) {
  223. EVPerr(EVP_F_AESNI_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  224. return 0;
  225. }
  226. return 1;
  227. }
  228. static int aesni_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  229. const unsigned char *in, size_t len)
  230. {
  231. aesni_cbc_encrypt(in, out, len, ctx->cipher_data, ctx->iv, ctx->encrypt);
  232. return 1;
  233. }
  234. static int aesni_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  235. const unsigned char *in, size_t len)
  236. {
  237. size_t bl = ctx->cipher->block_size;
  238. if (len < bl)
  239. return 1;
  240. aesni_ecb_encrypt(in, out, len, ctx->cipher_data, ctx->encrypt);
  241. return 1;
  242. }
  243. # define aesni_ofb_cipher aes_ofb_cipher
  244. static int aesni_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  245. const unsigned char *in, size_t len);
  246. # define aesni_cfb_cipher aes_cfb_cipher
  247. static int aesni_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  248. const unsigned char *in, size_t len);
  249. # define aesni_cfb8_cipher aes_cfb8_cipher
  250. static int aesni_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  251. const unsigned char *in, size_t len);
  252. # define aesni_cfb1_cipher aes_cfb1_cipher
  253. static int aesni_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  254. const unsigned char *in, size_t len);
  255. # define aesni_ctr_cipher aes_ctr_cipher
  256. static int aesni_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  257. const unsigned char *in, size_t len);
  258. static int aesni_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  259. const unsigned char *iv, int enc)
  260. {
  261. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  262. if (!iv && !key)
  263. return 1;
  264. if (key) {
  265. aesni_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
  266. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f) aesni_encrypt);
  267. gctx->ctr = (ctr128_f) aesni_ctr32_encrypt_blocks;
  268. /*
  269. * If we have an iv can set it directly, otherwise use saved IV.
  270. */
  271. if (iv == NULL && gctx->iv_set)
  272. iv = gctx->iv;
  273. if (iv) {
  274. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  275. gctx->iv_set = 1;
  276. }
  277. gctx->key_set = 1;
  278. } else {
  279. /* If key set use IV, otherwise copy */
  280. if (gctx->key_set)
  281. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  282. else
  283. memcpy(gctx->iv, iv, gctx->ivlen);
  284. gctx->iv_set = 1;
  285. gctx->iv_gen = 0;
  286. }
  287. return 1;
  288. }
  289. # define aesni_gcm_cipher aes_gcm_cipher
  290. static int aesni_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  291. const unsigned char *in, size_t len);
  292. static int aesni_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  293. const unsigned char *iv, int enc)
  294. {
  295. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  296. if (!iv && !key)
  297. return 1;
  298. if (key) {
  299. /* key_len is two AES keys */
  300. if (enc) {
  301. aesni_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  302. xctx->xts.block1 = (block128_f) aesni_encrypt;
  303. xctx->stream = aesni_xts_encrypt;
  304. } else {
  305. aesni_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  306. xctx->xts.block1 = (block128_f) aesni_decrypt;
  307. xctx->stream = aesni_xts_decrypt;
  308. }
  309. aesni_set_encrypt_key(key + ctx->key_len / 2,
  310. ctx->key_len * 4, &xctx->ks2);
  311. xctx->xts.block2 = (block128_f) aesni_encrypt;
  312. xctx->xts.key1 = &xctx->ks1;
  313. }
  314. if (iv) {
  315. xctx->xts.key2 = &xctx->ks2;
  316. memcpy(ctx->iv, iv, 16);
  317. }
  318. return 1;
  319. }
  320. # define aesni_xts_cipher aes_xts_cipher
  321. static int aesni_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  322. const unsigned char *in, size_t len);
  323. static int aesni_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  324. const unsigned char *iv, int enc)
  325. {
  326. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  327. if (!iv && !key)
  328. return 1;
  329. if (key) {
  330. aesni_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
  331. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  332. &cctx->ks, (block128_f) aesni_encrypt);
  333. cctx->str = enc ? (ccm128_f) aesni_ccm64_encrypt_blocks :
  334. (ccm128_f) aesni_ccm64_decrypt_blocks;
  335. cctx->key_set = 1;
  336. }
  337. if (iv) {
  338. memcpy(ctx->iv, iv, 15 - cctx->L);
  339. cctx->iv_set = 1;
  340. }
  341. return 1;
  342. }
  343. # define aesni_ccm_cipher aes_ccm_cipher
  344. static int aesni_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  345. const unsigned char *in, size_t len);
  346. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  347. static const EVP_CIPHER aesni_##keylen##_##mode = { \
  348. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  349. flags|EVP_CIPH_##MODE##_MODE, \
  350. aesni_init_key, \
  351. aesni_##mode##_cipher, \
  352. NULL, \
  353. sizeof(EVP_AES_KEY), \
  354. NULL,NULL,NULL,NULL }; \
  355. static const EVP_CIPHER aes_##keylen##_##mode = { \
  356. nid##_##keylen##_##nmode,blocksize, \
  357. keylen/8,ivlen, \
  358. flags|EVP_CIPH_##MODE##_MODE, \
  359. aes_init_key, \
  360. aes_##mode##_cipher, \
  361. NULL, \
  362. sizeof(EVP_AES_KEY), \
  363. NULL,NULL,NULL,NULL }; \
  364. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  365. { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  366. # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  367. static const EVP_CIPHER aesni_##keylen##_##mode = { \
  368. nid##_##keylen##_##mode,blocksize, \
  369. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  370. flags|EVP_CIPH_##MODE##_MODE, \
  371. aesni_##mode##_init_key, \
  372. aesni_##mode##_cipher, \
  373. aes_##mode##_cleanup, \
  374. sizeof(EVP_AES_##MODE##_CTX), \
  375. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  376. static const EVP_CIPHER aes_##keylen##_##mode = { \
  377. nid##_##keylen##_##mode,blocksize, \
  378. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  379. flags|EVP_CIPH_##MODE##_MODE, \
  380. aes_##mode##_init_key, \
  381. aes_##mode##_cipher, \
  382. aes_##mode##_cleanup, \
  383. sizeof(EVP_AES_##MODE##_CTX), \
  384. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  385. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  386. { return AESNI_CAPABLE?&aesni_##keylen##_##mode:&aes_##keylen##_##mode; }
  387. # else
  388. # define BLOCK_CIPHER_generic(nid,keylen,blocksize,ivlen,nmode,mode,MODE,flags) \
  389. static const EVP_CIPHER aes_##keylen##_##mode = { \
  390. nid##_##keylen##_##nmode,blocksize,keylen/8,ivlen, \
  391. flags|EVP_CIPH_##MODE##_MODE, \
  392. aes_init_key, \
  393. aes_##mode##_cipher, \
  394. NULL, \
  395. sizeof(EVP_AES_KEY), \
  396. NULL,NULL,NULL,NULL }; \
  397. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  398. { return &aes_##keylen##_##mode; }
  399. # define BLOCK_CIPHER_custom(nid,keylen,blocksize,ivlen,mode,MODE,flags) \
  400. static const EVP_CIPHER aes_##keylen##_##mode = { \
  401. nid##_##keylen##_##mode,blocksize, \
  402. (EVP_CIPH_##MODE##_MODE==EVP_CIPH_XTS_MODE?2:1)*keylen/8, ivlen, \
  403. flags|EVP_CIPH_##MODE##_MODE, \
  404. aes_##mode##_init_key, \
  405. aes_##mode##_cipher, \
  406. aes_##mode##_cleanup, \
  407. sizeof(EVP_AES_##MODE##_CTX), \
  408. NULL,NULL,aes_##mode##_ctrl,NULL }; \
  409. const EVP_CIPHER *EVP_aes_##keylen##_##mode(void) \
  410. { return &aes_##keylen##_##mode; }
  411. # endif
  412. # define BLOCK_CIPHER_generic_pack(nid,keylen,flags) \
  413. BLOCK_CIPHER_generic(nid,keylen,16,16,cbc,cbc,CBC,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  414. BLOCK_CIPHER_generic(nid,keylen,16,0,ecb,ecb,ECB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  415. BLOCK_CIPHER_generic(nid,keylen,1,16,ofb128,ofb,OFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  416. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb128,cfb,CFB,flags|EVP_CIPH_FLAG_DEFAULT_ASN1) \
  417. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb1,cfb1,CFB,flags) \
  418. BLOCK_CIPHER_generic(nid,keylen,1,16,cfb8,cfb8,CFB,flags) \
  419. BLOCK_CIPHER_generic(nid,keylen,1,16,ctr,ctr,CTR,flags)
  420. static int aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  421. const unsigned char *iv, int enc)
  422. {
  423. int ret, mode;
  424. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  425. mode = ctx->cipher->flags & EVP_CIPH_MODE;
  426. if ((mode == EVP_CIPH_ECB_MODE || mode == EVP_CIPH_CBC_MODE)
  427. && !enc)
  428. # ifdef BSAES_CAPABLE
  429. if (BSAES_CAPABLE && mode == EVP_CIPH_CBC_MODE) {
  430. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks);
  431. dat->block = (block128_f) AES_decrypt;
  432. dat->stream.cbc = (cbc128_f) bsaes_cbc_encrypt;
  433. } else
  434. # endif
  435. # ifdef VPAES_CAPABLE
  436. if (VPAES_CAPABLE) {
  437. ret = vpaes_set_decrypt_key(key, ctx->key_len * 8, &dat->ks);
  438. dat->block = (block128_f) vpaes_decrypt;
  439. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  440. (cbc128_f) vpaes_cbc_encrypt : NULL;
  441. } else
  442. # endif
  443. {
  444. ret = AES_set_decrypt_key(key, ctx->key_len * 8, &dat->ks);
  445. dat->block = (block128_f) AES_decrypt;
  446. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  447. (cbc128_f) AES_cbc_encrypt : NULL;
  448. } else
  449. # ifdef BSAES_CAPABLE
  450. if (BSAES_CAPABLE && mode == EVP_CIPH_CTR_MODE) {
  451. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks);
  452. dat->block = (block128_f) AES_encrypt;
  453. dat->stream.ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
  454. } else
  455. # endif
  456. # ifdef VPAES_CAPABLE
  457. if (VPAES_CAPABLE) {
  458. ret = vpaes_set_encrypt_key(key, ctx->key_len * 8, &dat->ks);
  459. dat->block = (block128_f) vpaes_encrypt;
  460. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  461. (cbc128_f) vpaes_cbc_encrypt : NULL;
  462. } else
  463. # endif
  464. {
  465. ret = AES_set_encrypt_key(key, ctx->key_len * 8, &dat->ks);
  466. dat->block = (block128_f) AES_encrypt;
  467. dat->stream.cbc = mode == EVP_CIPH_CBC_MODE ?
  468. (cbc128_f) AES_cbc_encrypt : NULL;
  469. # ifdef AES_CTR_ASM
  470. if (mode == EVP_CIPH_CTR_MODE)
  471. dat->stream.ctr = (ctr128_f) AES_ctr32_encrypt;
  472. # endif
  473. }
  474. if (ret < 0) {
  475. EVPerr(EVP_F_AES_INIT_KEY, EVP_R_AES_KEY_SETUP_FAILED);
  476. return 0;
  477. }
  478. return 1;
  479. }
  480. static int aes_cbc_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  481. const unsigned char *in, size_t len)
  482. {
  483. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  484. if (dat->stream.cbc)
  485. (*dat->stream.cbc) (in, out, len, &dat->ks, ctx->iv, ctx->encrypt);
  486. else if (ctx->encrypt)
  487. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  488. else
  489. CRYPTO_cbc128_encrypt(in, out, len, &dat->ks, ctx->iv, dat->block);
  490. return 1;
  491. }
  492. static int aes_ecb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  493. const unsigned char *in, size_t len)
  494. {
  495. size_t bl = ctx->cipher->block_size;
  496. size_t i;
  497. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  498. if (len < bl)
  499. return 1;
  500. for (i = 0, len -= bl; i <= len; i += bl)
  501. (*dat->block) (in + i, out + i, &dat->ks);
  502. return 1;
  503. }
  504. static int aes_ofb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  505. const unsigned char *in, size_t len)
  506. {
  507. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  508. CRYPTO_ofb128_encrypt(in, out, len, &dat->ks,
  509. ctx->iv, &ctx->num, dat->block);
  510. return 1;
  511. }
  512. static int aes_cfb_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  513. const unsigned char *in, size_t len)
  514. {
  515. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  516. CRYPTO_cfb128_encrypt(in, out, len, &dat->ks,
  517. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  518. return 1;
  519. }
  520. static int aes_cfb8_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  521. const unsigned char *in, size_t len)
  522. {
  523. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  524. CRYPTO_cfb128_8_encrypt(in, out, len, &dat->ks,
  525. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  526. return 1;
  527. }
  528. static int aes_cfb1_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  529. const unsigned char *in, size_t len)
  530. {
  531. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  532. if (ctx->flags & EVP_CIPH_FLAG_LENGTH_BITS) {
  533. CRYPTO_cfb128_1_encrypt(in, out, len, &dat->ks,
  534. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  535. return 1;
  536. }
  537. while (len >= MAXBITCHUNK) {
  538. CRYPTO_cfb128_1_encrypt(in, out, MAXBITCHUNK * 8, &dat->ks,
  539. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  540. len -= MAXBITCHUNK;
  541. }
  542. if (len)
  543. CRYPTO_cfb128_1_encrypt(in, out, len * 8, &dat->ks,
  544. ctx->iv, &ctx->num, ctx->encrypt, dat->block);
  545. return 1;
  546. }
  547. static int aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  548. const unsigned char *in, size_t len)
  549. {
  550. unsigned int num = ctx->num;
  551. EVP_AES_KEY *dat = (EVP_AES_KEY *) ctx->cipher_data;
  552. if (dat->stream.ctr)
  553. CRYPTO_ctr128_encrypt_ctr32(in, out, len, &dat->ks,
  554. ctx->iv, ctx->buf, &num, dat->stream.ctr);
  555. else
  556. CRYPTO_ctr128_encrypt(in, out, len, &dat->ks,
  557. ctx->iv, ctx->buf, &num, dat->block);
  558. ctx->num = (size_t)num;
  559. return 1;
  560. }
  561. BLOCK_CIPHER_generic_pack(NID_aes, 128, EVP_CIPH_FLAG_FIPS)
  562. BLOCK_CIPHER_generic_pack(NID_aes, 192, EVP_CIPH_FLAG_FIPS)
  563. BLOCK_CIPHER_generic_pack(NID_aes, 256, EVP_CIPH_FLAG_FIPS)
  564. static int aes_gcm_cleanup(EVP_CIPHER_CTX *c)
  565. {
  566. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  567. OPENSSL_cleanse(&gctx->gcm, sizeof(gctx->gcm));
  568. if (gctx->iv != c->iv)
  569. OPENSSL_free(gctx->iv);
  570. return 1;
  571. }
  572. /* increment counter (64-bit int) by 1 */
  573. static void ctr64_inc(unsigned char *counter)
  574. {
  575. int n = 8;
  576. unsigned char c;
  577. do {
  578. --n;
  579. c = counter[n];
  580. ++c;
  581. counter[n] = c;
  582. if (c)
  583. return;
  584. } while (n);
  585. }
  586. static int aes_gcm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  587. {
  588. EVP_AES_GCM_CTX *gctx = c->cipher_data;
  589. switch (type) {
  590. case EVP_CTRL_INIT:
  591. gctx->key_set = 0;
  592. gctx->iv_set = 0;
  593. gctx->ivlen = c->cipher->iv_len;
  594. gctx->iv = c->iv;
  595. gctx->taglen = -1;
  596. gctx->iv_gen = 0;
  597. gctx->tls_aad_len = -1;
  598. return 1;
  599. case EVP_CTRL_GCM_SET_IVLEN:
  600. if (arg <= 0)
  601. return 0;
  602. # ifdef OPENSSL_FIPS
  603. if (FIPS_module_mode() && !(c->flags & EVP_CIPH_FLAG_NON_FIPS_ALLOW)
  604. && arg < 12)
  605. return 0;
  606. # endif
  607. /* Allocate memory for IV if needed */
  608. if ((arg > EVP_MAX_IV_LENGTH) && (arg > gctx->ivlen)) {
  609. if (gctx->iv != c->iv)
  610. OPENSSL_free(gctx->iv);
  611. gctx->iv = OPENSSL_malloc(arg);
  612. if (!gctx->iv)
  613. return 0;
  614. }
  615. gctx->ivlen = arg;
  616. return 1;
  617. case EVP_CTRL_GCM_SET_TAG:
  618. if (arg <= 0 || arg > 16 || c->encrypt)
  619. return 0;
  620. memcpy(c->buf, ptr, arg);
  621. gctx->taglen = arg;
  622. return 1;
  623. case EVP_CTRL_GCM_GET_TAG:
  624. if (arg <= 0 || arg > 16 || !c->encrypt || gctx->taglen < 0)
  625. return 0;
  626. memcpy(ptr, c->buf, arg);
  627. return 1;
  628. case EVP_CTRL_GCM_SET_IV_FIXED:
  629. /* Special case: -1 length restores whole IV */
  630. if (arg == -1) {
  631. memcpy(gctx->iv, ptr, gctx->ivlen);
  632. gctx->iv_gen = 1;
  633. return 1;
  634. }
  635. /*
  636. * Fixed field must be at least 4 bytes and invocation field at least
  637. * 8.
  638. */
  639. if ((arg < 4) || (gctx->ivlen - arg) < 8)
  640. return 0;
  641. if (arg)
  642. memcpy(gctx->iv, ptr, arg);
  643. if (c->encrypt && RAND_bytes(gctx->iv + arg, gctx->ivlen - arg) <= 0)
  644. return 0;
  645. gctx->iv_gen = 1;
  646. return 1;
  647. case EVP_CTRL_GCM_IV_GEN:
  648. if (gctx->iv_gen == 0 || gctx->key_set == 0)
  649. return 0;
  650. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  651. if (arg <= 0 || arg > gctx->ivlen)
  652. arg = gctx->ivlen;
  653. memcpy(ptr, gctx->iv + gctx->ivlen - arg, arg);
  654. /*
  655. * Invocation field will be at least 8 bytes in size and so no need
  656. * to check wrap around or increment more than last 8 bytes.
  657. */
  658. ctr64_inc(gctx->iv + gctx->ivlen - 8);
  659. gctx->iv_set = 1;
  660. return 1;
  661. case EVP_CTRL_GCM_SET_IV_INV:
  662. if (gctx->iv_gen == 0 || gctx->key_set == 0 || c->encrypt)
  663. return 0;
  664. memcpy(gctx->iv + gctx->ivlen - arg, ptr, arg);
  665. CRYPTO_gcm128_setiv(&gctx->gcm, gctx->iv, gctx->ivlen);
  666. gctx->iv_set = 1;
  667. return 1;
  668. case EVP_CTRL_AEAD_TLS1_AAD:
  669. /* Save the AAD for later use */
  670. if (arg != EVP_AEAD_TLS1_AAD_LEN)
  671. return 0;
  672. memcpy(c->buf, ptr, arg);
  673. gctx->tls_aad_len = arg;
  674. {
  675. unsigned int len = c->buf[arg - 2] << 8 | c->buf[arg - 1];
  676. /* Correct length for explicit IV */
  677. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN;
  678. /* If decrypting correct for tag too */
  679. if (!c->encrypt)
  680. len -= EVP_GCM_TLS_TAG_LEN;
  681. c->buf[arg - 2] = len >> 8;
  682. c->buf[arg - 1] = len & 0xff;
  683. }
  684. /* Extra padding: tag appended to record */
  685. return EVP_GCM_TLS_TAG_LEN;
  686. case EVP_CTRL_COPY:
  687. {
  688. EVP_CIPHER_CTX *out = ptr;
  689. EVP_AES_GCM_CTX *gctx_out = out->cipher_data;
  690. if (gctx->gcm.key) {
  691. if (gctx->gcm.key != &gctx->ks)
  692. return 0;
  693. gctx_out->gcm.key = &gctx_out->ks;
  694. }
  695. if (gctx->iv == c->iv)
  696. gctx_out->iv = out->iv;
  697. else {
  698. gctx_out->iv = OPENSSL_malloc(gctx->ivlen);
  699. if (!gctx_out->iv)
  700. return 0;
  701. memcpy(gctx_out->iv, gctx->iv, gctx->ivlen);
  702. }
  703. return 1;
  704. }
  705. default:
  706. return -1;
  707. }
  708. }
  709. static int aes_gcm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  710. const unsigned char *iv, int enc)
  711. {
  712. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  713. if (!iv && !key)
  714. return 1;
  715. if (key) {
  716. do {
  717. # ifdef BSAES_CAPABLE
  718. if (BSAES_CAPABLE) {
  719. AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
  720. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  721. (block128_f) AES_encrypt);
  722. gctx->ctr = (ctr128_f) bsaes_ctr32_encrypt_blocks;
  723. break;
  724. } else
  725. # endif
  726. # ifdef VPAES_CAPABLE
  727. if (VPAES_CAPABLE) {
  728. vpaes_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
  729. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  730. (block128_f) vpaes_encrypt);
  731. gctx->ctr = NULL;
  732. break;
  733. } else
  734. # endif
  735. (void)0; /* terminate potentially open 'else' */
  736. AES_set_encrypt_key(key, ctx->key_len * 8, &gctx->ks);
  737. CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks,
  738. (block128_f) AES_encrypt);
  739. # ifdef AES_CTR_ASM
  740. gctx->ctr = (ctr128_f) AES_ctr32_encrypt;
  741. # else
  742. gctx->ctr = NULL;
  743. # endif
  744. } while (0);
  745. /*
  746. * If we have an iv can set it directly, otherwise use saved IV.
  747. */
  748. if (iv == NULL && gctx->iv_set)
  749. iv = gctx->iv;
  750. if (iv) {
  751. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  752. gctx->iv_set = 1;
  753. }
  754. gctx->key_set = 1;
  755. } else {
  756. /* If key set use IV, otherwise copy */
  757. if (gctx->key_set)
  758. CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
  759. else
  760. memcpy(gctx->iv, iv, gctx->ivlen);
  761. gctx->iv_set = 1;
  762. gctx->iv_gen = 0;
  763. }
  764. return 1;
  765. }
  766. /*
  767. * Handle TLS GCM packet format. This consists of the last portion of the IV
  768. * followed by the payload and finally the tag. On encrypt generate IV,
  769. * encrypt payload and write the tag. On verify retrieve IV, decrypt payload
  770. * and verify tag.
  771. */
  772. static int aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  773. const unsigned char *in, size_t len)
  774. {
  775. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  776. int rv = -1;
  777. /* Encrypt/decrypt must be performed in place */
  778. if (out != in
  779. || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
  780. return -1;
  781. /*
  782. * Set IV from start of buffer or generate IV and write to start of
  783. * buffer.
  784. */
  785. if (EVP_CIPHER_CTX_ctrl(ctx, ctx->encrypt ?
  786. EVP_CTRL_GCM_IV_GEN : EVP_CTRL_GCM_SET_IV_INV,
  787. EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
  788. goto err;
  789. /* Use saved AAD */
  790. if (CRYPTO_gcm128_aad(&gctx->gcm, ctx->buf, gctx->tls_aad_len))
  791. goto err;
  792. /* Fix buffer and length to point to payload */
  793. in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  794. out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
  795. len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  796. if (ctx->encrypt) {
  797. /* Encrypt payload */
  798. if (gctx->ctr) {
  799. if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
  800. in, out, len, gctx->ctr))
  801. goto err;
  802. } else {
  803. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  804. goto err;
  805. }
  806. out += len;
  807. /* Finally write tag */
  808. CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
  809. rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
  810. } else {
  811. /* Decrypt */
  812. if (gctx->ctr) {
  813. if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
  814. in, out, len, gctx->ctr))
  815. goto err;
  816. } else {
  817. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  818. goto err;
  819. }
  820. /* Retrieve tag */
  821. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, EVP_GCM_TLS_TAG_LEN);
  822. /* If tag mismatch wipe buffer */
  823. if (CRYPTO_memcmp(ctx->buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
  824. OPENSSL_cleanse(out, len);
  825. goto err;
  826. }
  827. rv = len;
  828. }
  829. err:
  830. gctx->iv_set = 0;
  831. gctx->tls_aad_len = -1;
  832. return rv;
  833. }
  834. static int aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  835. const unsigned char *in, size_t len)
  836. {
  837. EVP_AES_GCM_CTX *gctx = ctx->cipher_data;
  838. /* If not set up, return error */
  839. if (!gctx->key_set)
  840. return -1;
  841. if (gctx->tls_aad_len >= 0)
  842. return aes_gcm_tls_cipher(ctx, out, in, len);
  843. if (!gctx->iv_set)
  844. return -1;
  845. if (in) {
  846. if (out == NULL) {
  847. if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
  848. return -1;
  849. } else if (ctx->encrypt) {
  850. if (gctx->ctr) {
  851. if (CRYPTO_gcm128_encrypt_ctr32(&gctx->gcm,
  852. in, out, len, gctx->ctr))
  853. return -1;
  854. } else {
  855. if (CRYPTO_gcm128_encrypt(&gctx->gcm, in, out, len))
  856. return -1;
  857. }
  858. } else {
  859. if (gctx->ctr) {
  860. if (CRYPTO_gcm128_decrypt_ctr32(&gctx->gcm,
  861. in, out, len, gctx->ctr))
  862. return -1;
  863. } else {
  864. if (CRYPTO_gcm128_decrypt(&gctx->gcm, in, out, len))
  865. return -1;
  866. }
  867. }
  868. return len;
  869. } else {
  870. if (!ctx->encrypt) {
  871. if (gctx->taglen < 0)
  872. return -1;
  873. if (CRYPTO_gcm128_finish(&gctx->gcm, ctx->buf, gctx->taglen) != 0)
  874. return -1;
  875. gctx->iv_set = 0;
  876. return 0;
  877. }
  878. CRYPTO_gcm128_tag(&gctx->gcm, ctx->buf, 16);
  879. gctx->taglen = 16;
  880. /* Don't reuse the IV */
  881. gctx->iv_set = 0;
  882. return 0;
  883. }
  884. }
  885. # define CUSTOM_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 \
  886. | EVP_CIPH_CUSTOM_IV | EVP_CIPH_FLAG_CUSTOM_CIPHER \
  887. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  888. | EVP_CIPH_CUSTOM_COPY)
  889. BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, gcm, GCM,
  890. EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
  891. CUSTOM_FLAGS)
  892. BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, gcm, GCM,
  893. EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
  894. CUSTOM_FLAGS)
  895. BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, gcm, GCM,
  896. EVP_CIPH_FLAG_FIPS | EVP_CIPH_FLAG_AEAD_CIPHER |
  897. CUSTOM_FLAGS)
  898. static int aes_xts_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  899. {
  900. EVP_AES_XTS_CTX *xctx = c->cipher_data;
  901. if (type == EVP_CTRL_COPY) {
  902. EVP_CIPHER_CTX *out = ptr;
  903. EVP_AES_XTS_CTX *xctx_out = out->cipher_data;
  904. if (xctx->xts.key1) {
  905. if (xctx->xts.key1 != &xctx->ks1)
  906. return 0;
  907. xctx_out->xts.key1 = &xctx_out->ks1;
  908. }
  909. if (xctx->xts.key2) {
  910. if (xctx->xts.key2 != &xctx->ks2)
  911. return 0;
  912. xctx_out->xts.key2 = &xctx_out->ks2;
  913. }
  914. return 1;
  915. } else if (type != EVP_CTRL_INIT)
  916. return -1;
  917. /* key1 and key2 are used as an indicator both key and IV are set */
  918. xctx->xts.key1 = NULL;
  919. xctx->xts.key2 = NULL;
  920. return 1;
  921. }
  922. static int aes_xts_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  923. const unsigned char *iv, int enc)
  924. {
  925. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  926. if (!iv && !key)
  927. return 1;
  928. if (key)
  929. do {
  930. # ifdef AES_XTS_ASM
  931. xctx->stream = enc ? AES_xts_encrypt : AES_xts_decrypt;
  932. # else
  933. xctx->stream = NULL;
  934. # endif
  935. /* key_len is two AES keys */
  936. # ifdef BSAES_CAPABLE
  937. if (BSAES_CAPABLE)
  938. xctx->stream = enc ? bsaes_xts_encrypt : bsaes_xts_decrypt;
  939. else
  940. # endif
  941. # ifdef VPAES_CAPABLE
  942. if (VPAES_CAPABLE) {
  943. if (enc) {
  944. vpaes_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  945. xctx->xts.block1 = (block128_f) vpaes_encrypt;
  946. } else {
  947. vpaes_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  948. xctx->xts.block1 = (block128_f) vpaes_decrypt;
  949. }
  950. vpaes_set_encrypt_key(key + ctx->key_len / 2,
  951. ctx->key_len * 4, &xctx->ks2);
  952. xctx->xts.block2 = (block128_f) vpaes_encrypt;
  953. xctx->xts.key1 = &xctx->ks1;
  954. break;
  955. } else
  956. # endif
  957. (void)0; /* terminate potentially open 'else' */
  958. if (enc) {
  959. AES_set_encrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  960. xctx->xts.block1 = (block128_f) AES_encrypt;
  961. } else {
  962. AES_set_decrypt_key(key, ctx->key_len * 4, &xctx->ks1);
  963. xctx->xts.block1 = (block128_f) AES_decrypt;
  964. }
  965. AES_set_encrypt_key(key + ctx->key_len / 2,
  966. ctx->key_len * 4, &xctx->ks2);
  967. xctx->xts.block2 = (block128_f) AES_encrypt;
  968. xctx->xts.key1 = &xctx->ks1;
  969. } while (0);
  970. if (iv) {
  971. xctx->xts.key2 = &xctx->ks2;
  972. memcpy(ctx->iv, iv, 16);
  973. }
  974. return 1;
  975. }
  976. static int aes_xts_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  977. const unsigned char *in, size_t len)
  978. {
  979. EVP_AES_XTS_CTX *xctx = ctx->cipher_data;
  980. if (!xctx->xts.key1 || !xctx->xts.key2)
  981. return 0;
  982. if (!out || !in || len < AES_BLOCK_SIZE)
  983. return 0;
  984. # ifdef OPENSSL_FIPS
  985. /* Requirement of SP800-38E */
  986. if (FIPS_module_mode() && !(ctx->flags & EVP_CIPH_FLAG_NON_FIPS_ALLOW) &&
  987. (len > (1UL << 20) * 16)) {
  988. EVPerr(EVP_F_AES_XTS_CIPHER, EVP_R_TOO_LARGE);
  989. return 0;
  990. }
  991. # endif
  992. if (xctx->stream)
  993. (*xctx->stream) (in, out, len,
  994. xctx->xts.key1, xctx->xts.key2, ctx->iv);
  995. else if (CRYPTO_xts128_encrypt(&xctx->xts, ctx->iv, in, out, len,
  996. ctx->encrypt))
  997. return 0;
  998. return 1;
  999. }
  1000. # define aes_xts_cleanup NULL
  1001. # define XTS_FLAGS (EVP_CIPH_FLAG_DEFAULT_ASN1 | EVP_CIPH_CUSTOM_IV \
  1002. | EVP_CIPH_ALWAYS_CALL_INIT | EVP_CIPH_CTRL_INIT \
  1003. | EVP_CIPH_CUSTOM_COPY)
  1004. BLOCK_CIPHER_custom(NID_aes, 128, 1, 16, xts, XTS,
  1005. EVP_CIPH_FLAG_FIPS | XTS_FLAGS)
  1006. BLOCK_CIPHER_custom(NID_aes, 256, 1, 16, xts, XTS,
  1007. EVP_CIPH_FLAG_FIPS | XTS_FLAGS)
  1008. static int aes_ccm_ctrl(EVP_CIPHER_CTX *c, int type, int arg, void *ptr)
  1009. {
  1010. EVP_AES_CCM_CTX *cctx = c->cipher_data;
  1011. switch (type) {
  1012. case EVP_CTRL_INIT:
  1013. cctx->key_set = 0;
  1014. cctx->iv_set = 0;
  1015. cctx->L = 8;
  1016. cctx->M = 12;
  1017. cctx->tag_set = 0;
  1018. cctx->len_set = 0;
  1019. return 1;
  1020. case EVP_CTRL_CCM_SET_IVLEN:
  1021. arg = 15 - arg;
  1022. case EVP_CTRL_CCM_SET_L:
  1023. if (arg < 2 || arg > 8)
  1024. return 0;
  1025. cctx->L = arg;
  1026. return 1;
  1027. case EVP_CTRL_CCM_SET_TAG:
  1028. if ((arg & 1) || arg < 4 || arg > 16)
  1029. return 0;
  1030. if (c->encrypt && ptr)
  1031. return 0;
  1032. if (ptr) {
  1033. cctx->tag_set = 1;
  1034. memcpy(c->buf, ptr, arg);
  1035. }
  1036. cctx->M = arg;
  1037. return 1;
  1038. case EVP_CTRL_CCM_GET_TAG:
  1039. if (!c->encrypt || !cctx->tag_set)
  1040. return 0;
  1041. if (!CRYPTO_ccm128_tag(&cctx->ccm, ptr, (size_t)arg))
  1042. return 0;
  1043. cctx->tag_set = 0;
  1044. cctx->iv_set = 0;
  1045. cctx->len_set = 0;
  1046. return 1;
  1047. case EVP_CTRL_COPY:
  1048. {
  1049. EVP_CIPHER_CTX *out = ptr;
  1050. EVP_AES_CCM_CTX *cctx_out = out->cipher_data;
  1051. if (cctx->ccm.key) {
  1052. if (cctx->ccm.key != &cctx->ks)
  1053. return 0;
  1054. cctx_out->ccm.key = &cctx_out->ks;
  1055. }
  1056. return 1;
  1057. }
  1058. default:
  1059. return -1;
  1060. }
  1061. }
  1062. static int aes_ccm_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
  1063. const unsigned char *iv, int enc)
  1064. {
  1065. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  1066. if (!iv && !key)
  1067. return 1;
  1068. if (key)
  1069. do {
  1070. # ifdef VPAES_CAPABLE
  1071. if (VPAES_CAPABLE) {
  1072. vpaes_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
  1073. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  1074. &cctx->ks, (block128_f) vpaes_encrypt);
  1075. cctx->str = NULL;
  1076. cctx->key_set = 1;
  1077. break;
  1078. }
  1079. # endif
  1080. AES_set_encrypt_key(key, ctx->key_len * 8, &cctx->ks);
  1081. CRYPTO_ccm128_init(&cctx->ccm, cctx->M, cctx->L,
  1082. &cctx->ks, (block128_f) AES_encrypt);
  1083. cctx->str = NULL;
  1084. cctx->key_set = 1;
  1085. } while (0);
  1086. if (iv) {
  1087. memcpy(ctx->iv, iv, 15 - cctx->L);
  1088. cctx->iv_set = 1;
  1089. }
  1090. return 1;
  1091. }
  1092. static int aes_ccm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
  1093. const unsigned char *in, size_t len)
  1094. {
  1095. EVP_AES_CCM_CTX *cctx = ctx->cipher_data;
  1096. CCM128_CONTEXT *ccm = &cctx->ccm;
  1097. /* If not set up, return error */
  1098. if (!cctx->iv_set && !cctx->key_set)
  1099. return -1;
  1100. if (!ctx->encrypt && !cctx->tag_set)
  1101. return -1;
  1102. if (!out) {
  1103. if (!in) {
  1104. if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
  1105. return -1;
  1106. cctx->len_set = 1;
  1107. return len;
  1108. }
  1109. /* If have AAD need message length */
  1110. if (!cctx->len_set && len)
  1111. return -1;
  1112. CRYPTO_ccm128_aad(ccm, in, len);
  1113. return len;
  1114. }
  1115. /* EVP_*Final() doesn't return any data */
  1116. if (!in)
  1117. return 0;
  1118. /* If not set length yet do it */
  1119. if (!cctx->len_set) {
  1120. if (CRYPTO_ccm128_setiv(ccm, ctx->iv, 15 - cctx->L, len))
  1121. return -1;
  1122. cctx->len_set = 1;
  1123. }
  1124. if (ctx->encrypt) {
  1125. if (cctx->str ? CRYPTO_ccm128_encrypt_ccm64(ccm, in, out, len,
  1126. cctx->str) :
  1127. CRYPTO_ccm128_encrypt(ccm, in, out, len))
  1128. return -1;
  1129. cctx->tag_set = 1;
  1130. return len;
  1131. } else {
  1132. int rv = -1;
  1133. if (cctx->str ? !CRYPTO_ccm128_decrypt_ccm64(ccm, in, out, len,
  1134. cctx->str) :
  1135. !CRYPTO_ccm128_decrypt(ccm, in, out, len)) {
  1136. unsigned char tag[16];
  1137. if (CRYPTO_ccm128_tag(ccm, tag, cctx->M)) {
  1138. if (!CRYPTO_memcmp(tag, ctx->buf, cctx->M))
  1139. rv = len;
  1140. }
  1141. }
  1142. if (rv == -1)
  1143. OPENSSL_cleanse(out, len);
  1144. cctx->iv_set = 0;
  1145. cctx->tag_set = 0;
  1146. cctx->len_set = 0;
  1147. return rv;
  1148. }
  1149. }
  1150. # define aes_ccm_cleanup NULL
  1151. BLOCK_CIPHER_custom(NID_aes, 128, 1, 12, ccm, CCM,
  1152. EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
  1153. BLOCK_CIPHER_custom(NID_aes, 192, 1, 12, ccm, CCM,
  1154. EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
  1155. BLOCK_CIPHER_custom(NID_aes, 256, 1, 12, ccm, CCM,
  1156. EVP_CIPH_FLAG_FIPS | CUSTOM_FLAGS)
  1157. # endif
  1158. #endif