2 *************************************************************************
4 * 5F., No.36, Taiyuan St., Jhubei City,
8 * (c) Copyright 2002-2007, Ralink Technology, Inc.
10 * This program is free software; you can redistribute it and/or modify *
11 * it under the terms of the GNU General Public License as published by *
12 * the Free Software Foundation; either version 2 of the License, or *
13 * (at your option) any later version. *
15 * This program is distributed in the hope that it will be useful, *
16 * but WITHOUT ANY WARRANTY; without even the implied warranty of *
17 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
18 * GNU General Public License for more details. *
20 * You should have received a copy of the GNU General Public License *
21 * along with this program; if not, write to the *
22 * Free Software Foundation, Inc., *
23 * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. *
25 *************************************************************************
34 -------- ---------- ----------------------------------------------
35 Paul Wu 02-25-02 Initial
38 #include "../rt_config.h"
43 UINT32 erk[64]; /* encryption round keys */
44 UINT32 drk[64]; /* decryption round keys */
45 int nr; /* number of rounds */
49 /*****************************/
50 /******** SBOX Table *********/
51 /*****************************/
53 UCHAR SboxTable[256] =
55 0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5,
56 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76,
57 0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0,
58 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0,
59 0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc,
60 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15,
61 0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a,
62 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75,
63 0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0,
64 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84,
65 0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b,
66 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf,
67 0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85,
68 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8,
69 0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5,
70 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2,
71 0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17,
72 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73,
73 0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88,
74 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb,
75 0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c,
76 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79,
77 0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9,
78 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08,
79 0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6,
80 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a,
81 0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e,
82 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e,
83 0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94,
84 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf,
85 0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68,
86 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16
111 out[i] = a[i] ^ b[i];
118 return SboxTable[(int)a];
127 UCHAR rcon_table[12] =
129 0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80,
130 0x1b, 0x36, 0x36, 0x36
133 sbox_key[0] = RTMPCkipSbox(key[13]);
134 sbox_key[1] = RTMPCkipSbox(key[14]);
135 sbox_key[2] = RTMPCkipSbox(key[15]);
136 sbox_key[3] = RTMPCkipSbox(key[12]);
138 rcon = rcon_table[round];
140 xor_32(&key[0], sbox_key, &key[0]);
141 key[0] = key[0] ^ rcon;
143 xor_32(&key[4], &key[0], &key[4]);
144 xor_32(&key[8], &key[4], &key[8]);
145 xor_32(&key[12], &key[8], &key[12]);
154 for (i=0; i< 16; i++)
156 out[i] = RTMPCkipSbox(in[i]);
160 /************************************/
162 /* A 128 bit, bitwise exclusive or */
163 /************************************/
165 void bitwise_xor(unsigned char *ina, unsigned char *inb, unsigned char *out)
170 out[i] = ina[i] ^ inb[i];
212 if ((in[i] & 0x80)== 0x80)
218 swap_halfs[0] = in[2]; /* Swap halfs */
219 swap_halfs[1] = in[3];
220 swap_halfs[2] = in[0];
221 swap_halfs[3] = in[1];
223 rotl[0] = in[3]; /* Rotate left 8 bits */
228 andf7[0] = in[0] & 0x7f;
229 andf7[1] = in[1] & 0x7f;
230 andf7[2] = in[2] & 0x7f;
231 andf7[3] = in[3] & 0x7f;
233 for (i = 3; i>0; i--) /* logical shift left 1 bit */
235 andf7[i] = andf7[i] << 1;
236 if ((andf7[i-1] & 0x80) == 0x80)
238 andf7[i] = (andf7[i] | 0x01);
241 andf7[0] = andf7[0] << 1;
242 andf7[0] = andf7[0] & 0xfe;
244 xor_32(add1b, andf7, add1bf7);
246 xor_32(in, add1bf7, rotr);
248 temp[0] = rotr[0]; /* Rotate right 8 bits */
254 xor_32(add1bf7, rotr, temp);
255 xor_32(swap_halfs, rotl,tempb);
256 xor_32(temp, tempb, out);
260 /************************************************/
261 /* construct_mic_header1() */
262 /* Builds the first MIC header block from */
264 /************************************************/
266 void construct_mic_header1(
267 unsigned char *mic_header1,
271 mic_header1[0] = (unsigned char)((header_length - 2) / 256);
272 mic_header1[1] = (unsigned char)((header_length - 2) % 256);
273 mic_header1[2] = mpdu[0] & 0xcf; /* Mute CF poll & CF ack bits */
274 mic_header1[3] = mpdu[1] & 0xc7; /* Mute retry, more data and pwr mgt bits */
275 mic_header1[4] = mpdu[4]; /* A1 */
276 mic_header1[5] = mpdu[5];
277 mic_header1[6] = mpdu[6];
278 mic_header1[7] = mpdu[7];
279 mic_header1[8] = mpdu[8];
280 mic_header1[9] = mpdu[9];
281 mic_header1[10] = mpdu[10]; /* A2 */
282 mic_header1[11] = mpdu[11];
283 mic_header1[12] = mpdu[12];
284 mic_header1[13] = mpdu[13];
285 mic_header1[14] = mpdu[14];
286 mic_header1[15] = mpdu[15];
289 /************************************************/
290 /* construct_mic_header2() */
291 /* Builds the last MIC header block from */
293 /************************************************/
295 void construct_mic_header2(
296 unsigned char *mic_header2,
303 for (i = 0; i<16; i++) mic_header2[i]=0x00;
305 mic_header2[0] = mpdu[16]; /* A3 */
306 mic_header2[1] = mpdu[17];
307 mic_header2[2] = mpdu[18];
308 mic_header2[3] = mpdu[19];
309 mic_header2[4] = mpdu[20];
310 mic_header2[5] = mpdu[21];
312 // In Sequence Control field, mute sequence numer bits (12-bit)
313 mic_header2[6] = mpdu[22] & 0x0f; /* SC */
314 mic_header2[7] = 0x00; /* mpdu[23]; */
316 if ((!qc_exists) & a4_exists)
318 for (i=0;i<6;i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */
322 if (qc_exists && (!a4_exists))
324 mic_header2[8] = mpdu[24] & 0x0f; /* mute bits 15 - 4 */
325 mic_header2[9] = mpdu[25] & 0x00;
328 if (qc_exists && a4_exists)
330 for (i=0;i<6;i++) mic_header2[8+i] = mpdu[24+i]; /* A4 */
332 mic_header2[14] = mpdu[30] & 0x0f;
333 mic_header2[15] = mpdu[31] & 0x00;
338 /************************************************/
339 /* construct_mic_iv() */
340 /* Builds the MIC IV from header fields and PN */
341 /************************************************/
343 void construct_mic_iv(
344 unsigned char *mic_iv,
348 unsigned int payload_length,
349 unsigned char *pn_vector)
354 if (qc_exists && a4_exists)
355 mic_iv[1] = mpdu[30] & 0x0f; /* QoS_TC */
356 if (qc_exists && !a4_exists)
357 mic_iv[1] = mpdu[24] & 0x0f; /* mute bits 7-4 */
360 for (i = 2; i < 8; i++)
361 mic_iv[i] = mpdu[i + 8]; /* mic_iv[2:7] = A2[0:5] = mpdu[10:15] */
362 #ifdef CONSISTENT_PN_ORDER
363 for (i = 8; i < 14; i++)
364 mic_iv[i] = pn_vector[i - 8]; /* mic_iv[8:13] = PN[0:5] */
366 for (i = 8; i < 14; i++)
367 mic_iv[i] = pn_vector[13 - i]; /* mic_iv[8:13] = PN[5:0] */
369 i = (payload_length / 256);
370 i = (payload_length % 256);
371 mic_iv[14] = (unsigned char) (payload_length / 256);
372 mic_iv[15] = (unsigned char) (payload_length % 256);
376 /****************************************/
378 /* Performs a 128 bit AES encrypt with */
380 /****************************************/
381 void aes128k128d(unsigned char *key, unsigned char *data, unsigned char *ciphertext)
385 unsigned char intermediatea[16];
386 unsigned char intermediateb[16];
387 unsigned char round_key[16];
389 for(i=0; i<16; i++) round_key[i] = key[i];
391 for (round = 0; round < 11; round++)
395 xor_128(round_key, data, ciphertext);
396 next_key(round_key, round);
398 else if (round == 10)
400 byte_sub(ciphertext, intermediatea);
401 shift_row(intermediatea, intermediateb);
402 xor_128(intermediateb, round_key, ciphertext);
406 byte_sub(ciphertext, intermediatea);
407 shift_row(intermediatea, intermediateb);
408 mix_column(&intermediateb[0], &intermediatea[0]);
409 mix_column(&intermediateb[4], &intermediatea[4]);
410 mix_column(&intermediateb[8], &intermediatea[8]);
411 mix_column(&intermediateb[12], &intermediatea[12]);
412 xor_128(intermediatea, round_key, ciphertext);
413 next_key(round_key, round);
419 void construct_ctr_preload(
420 unsigned char *ctr_preload,
424 unsigned char *pn_vector,
429 for (i=0; i<16; i++) ctr_preload[i] = 0x00;
432 ctr_preload[0] = 0x01; /* flag */
433 if (qc_exists && a4_exists) ctr_preload[1] = mpdu[30] & 0x0f; /* QoC_Control */
434 if (qc_exists && !a4_exists) ctr_preload[1] = mpdu[24] & 0x0f;
436 for (i = 2; i < 8; i++)
437 ctr_preload[i] = mpdu[i + 8]; /* ctr_preload[2:7] = A2[0:5] = mpdu[10:15] */
438 #ifdef CONSISTENT_PN_ORDER
439 for (i = 8; i < 14; i++)
440 ctr_preload[i] = pn_vector[i - 8]; /* ctr_preload[8:13] = PN[0:5] */
442 for (i = 8; i < 14; i++)
443 ctr_preload[i] = pn_vector[13 - i]; /* ctr_preload[8:13] = PN[5:0] */
445 ctr_preload[14] = (unsigned char) (c / 256); // Ctr
446 ctr_preload[15] = (unsigned char) (c % 256);
450 BOOLEAN RTMPSoftDecryptAES(
451 IN PRTMP_ADAPTER pAd,
453 IN ULONG DataByteCnt,
454 IN PCIPHER_KEY pWpaKey)
461 UINT payload_remainder;
474 UCHAR ctr_preload[16];
475 UCHAR chain_buffer[16];
476 UCHAR padded_buffer[16];
478 UCHAR mic_header1[16];
479 UCHAR mic_header2[16];
484 RTMPFrameEndianChange(pAd, (PUCHAR)pData, DIR_READ, FALSE);
490 fc = *((PUSHORT)pData);
492 frame_type = ((fc0 >> 2) & 0x03);
493 frame_subtype = ((fc0 >> 4) & 0x0f);
495 from_ds = (fc1 & 0x2) >> 1;
498 a4_exists = (from_ds & to_ds);
499 qc_exists = ((frame_subtype == 0x08) || /* Assumed QoS subtypes */
500 (frame_subtype == 0x09) || /* Likely to change. */
501 (frame_subtype == 0x0a) ||
502 (frame_subtype == 0x0b)
509 KeyID = *((PUCHAR)(pData+ HeaderLen + 3));
512 if (pWpaKey[KeyID].KeyLen == 0)
514 DBGPRINT(RT_DEBUG_TRACE, ("RTMPSoftDecryptAES failed!(KeyID[%d] Length can not be 0)\n", KeyID));
518 PN[0] = *(pData+ HeaderLen);
519 PN[1] = *(pData+ HeaderLen + 1);
520 PN[2] = *(pData+ HeaderLen + 4);
521 PN[3] = *(pData+ HeaderLen + 5);
522 PN[4] = *(pData+ HeaderLen + 6);
523 PN[5] = *(pData+ HeaderLen + 7);
525 payload_len = DataByteCnt - HeaderLen - 8 - 8; // 8 bytes for CCMP header , 8 bytes for MIC
526 payload_remainder = (payload_len) % 16;
527 num_blocks = (payload_len) / 16;
531 // Find start of payload
532 payload_index = HeaderLen + 8; //IV+EIV
534 for (i=0; i< num_blocks; i++)
536 construct_ctr_preload(ctr_preload,
543 aes128k128d(pWpaKey[KeyID].Key, ctr_preload, aes_out);
545 bitwise_xor(aes_out, pData + payload_index, chain_buffer);
546 NdisMoveMemory(pData + payload_index - 8, chain_buffer, 16);
551 // If there is a short final block, then pad it
552 // encrypt it and copy the unpadded part back
554 if (payload_remainder > 0)
556 construct_ctr_preload(ctr_preload,
563 NdisZeroMemory(padded_buffer, 16);
564 NdisMoveMemory(padded_buffer, pData + payload_index, payload_remainder);
566 aes128k128d(pWpaKey[KeyID].Key, ctr_preload, aes_out);
568 bitwise_xor(aes_out, padded_buffer, chain_buffer);
569 NdisMoveMemory(pData + payload_index - 8, chain_buffer, payload_remainder);
570 payload_index += payload_remainder;
576 construct_ctr_preload(ctr_preload,
582 NdisZeroMemory(padded_buffer, 16);
583 NdisMoveMemory(padded_buffer, pData + payload_index, 8);
585 aes128k128d(pWpaKey[KeyID].Key, ctr_preload, aes_out);
587 bitwise_xor(aes_out, padded_buffer, chain_buffer);
589 NdisMoveMemory(TrailMIC, chain_buffer, 8);
596 //Force the protected frame bit on
597 *(pData + 1) = *(pData + 1) | 0x40;
599 // Find start of payload
600 // Because the CCMP header has been removed
601 payload_index = HeaderLen;
611 construct_mic_header1(
616 construct_mic_header2(
622 aes128k128d(pWpaKey[KeyID].Key, mic_iv, aes_out);
623 bitwise_xor(aes_out, mic_header1, chain_buffer);
624 aes128k128d(pWpaKey[KeyID].Key, chain_buffer, aes_out);
625 bitwise_xor(aes_out, mic_header2, chain_buffer);
626 aes128k128d(pWpaKey[KeyID].Key, chain_buffer, aes_out);
628 // iterate through each 16 byte payload block
629 for (i = 0; i < num_blocks; i++)
631 bitwise_xor(aes_out, pData + payload_index, chain_buffer);
633 aes128k128d(pWpaKey[KeyID].Key, chain_buffer, aes_out);
636 // Add on the final payload block if it needs padding
637 if (payload_remainder > 0)
639 NdisZeroMemory(padded_buffer, 16);
640 NdisMoveMemory(padded_buffer, pData + payload_index, payload_remainder);
642 bitwise_xor(aes_out, padded_buffer, chain_buffer);
643 aes128k128d(pWpaKey[KeyID].Key, chain_buffer, aes_out);
646 // aes_out contains padded mic, discard most significant
647 // 8 bytes to generate 64 bit MIC
648 for (i = 0 ; i < 8; i++) MIC[i] = aes_out[i];
650 if (!NdisEqualMemory(MIC, TrailMIC, 8))
652 DBGPRINT(RT_DEBUG_ERROR, ("RTMPSoftDecryptAES, MIC Error !\n")); //MIC error.
657 RTMPFrameEndianChange(pAd, (PUCHAR)pData, DIR_READ, FALSE);
663 /* ========================= AES En/Decryption ========================== */
665 #define uint8 unsigned char
669 #define uint32 unsigned int
673 static uint32 FSb[256] =
675 0x63, 0x7C, 0x77, 0x7B, 0xF2, 0x6B, 0x6F, 0xC5,
676 0x30, 0x01, 0x67, 0x2B, 0xFE, 0xD7, 0xAB, 0x76,
677 0xCA, 0x82, 0xC9, 0x7D, 0xFA, 0x59, 0x47, 0xF0,
678 0xAD, 0xD4, 0xA2, 0xAF, 0x9C, 0xA4, 0x72, 0xC0,
679 0xB7, 0xFD, 0x93, 0x26, 0x36, 0x3F, 0xF7, 0xCC,
680 0x34, 0xA5, 0xE5, 0xF1, 0x71, 0xD8, 0x31, 0x15,
681 0x04, 0xC7, 0x23, 0xC3, 0x18, 0x96, 0x05, 0x9A,
682 0x07, 0x12, 0x80, 0xE2, 0xEB, 0x27, 0xB2, 0x75,
683 0x09, 0x83, 0x2C, 0x1A, 0x1B, 0x6E, 0x5A, 0xA0,
684 0x52, 0x3B, 0xD6, 0xB3, 0x29, 0xE3, 0x2F, 0x84,
685 0x53, 0xD1, 0x00, 0xED, 0x20, 0xFC, 0xB1, 0x5B,
686 0x6A, 0xCB, 0xBE, 0x39, 0x4A, 0x4C, 0x58, 0xCF,
687 0xD0, 0xEF, 0xAA, 0xFB, 0x43, 0x4D, 0x33, 0x85,
688 0x45, 0xF9, 0x02, 0x7F, 0x50, 0x3C, 0x9F, 0xA8,
689 0x51, 0xA3, 0x40, 0x8F, 0x92, 0x9D, 0x38, 0xF5,
690 0xBC, 0xB6, 0xDA, 0x21, 0x10, 0xFF, 0xF3, 0xD2,
691 0xCD, 0x0C, 0x13, 0xEC, 0x5F, 0x97, 0x44, 0x17,
692 0xC4, 0xA7, 0x7E, 0x3D, 0x64, 0x5D, 0x19, 0x73,
693 0x60, 0x81, 0x4F, 0xDC, 0x22, 0x2A, 0x90, 0x88,
694 0x46, 0xEE, 0xB8, 0x14, 0xDE, 0x5E, 0x0B, 0xDB,
695 0xE0, 0x32, 0x3A, 0x0A, 0x49, 0x06, 0x24, 0x5C,
696 0xC2, 0xD3, 0xAC, 0x62, 0x91, 0x95, 0xE4, 0x79,
697 0xE7, 0xC8, 0x37, 0x6D, 0x8D, 0xD5, 0x4E, 0xA9,
698 0x6C, 0x56, 0xF4, 0xEA, 0x65, 0x7A, 0xAE, 0x08,
699 0xBA, 0x78, 0x25, 0x2E, 0x1C, 0xA6, 0xB4, 0xC6,
700 0xE8, 0xDD, 0x74, 0x1F, 0x4B, 0xBD, 0x8B, 0x8A,
701 0x70, 0x3E, 0xB5, 0x66, 0x48, 0x03, 0xF6, 0x0E,
702 0x61, 0x35, 0x57, 0xB9, 0x86, 0xC1, 0x1D, 0x9E,
703 0xE1, 0xF8, 0x98, 0x11, 0x69, 0xD9, 0x8E, 0x94,
704 0x9B, 0x1E, 0x87, 0xE9, 0xCE, 0x55, 0x28, 0xDF,
705 0x8C, 0xA1, 0x89, 0x0D, 0xBF, 0xE6, 0x42, 0x68,
706 0x41, 0x99, 0x2D, 0x0F, 0xB0, 0x54, 0xBB, 0x16
712 V(C6,63,63,A5), V(F8,7C,7C,84), V(EE,77,77,99), V(F6,7B,7B,8D), \
713 V(FF,F2,F2,0D), V(D6,6B,6B,BD), V(DE,6F,6F,B1), V(91,C5,C5,54), \
714 V(60,30,30,50), V(02,01,01,03), V(CE,67,67,A9), V(56,2B,2B,7D), \
715 V(E7,FE,FE,19), V(B5,D7,D7,62), V(4D,AB,AB,E6), V(EC,76,76,9A), \
716 V(8F,CA,CA,45), V(1F,82,82,9D), V(89,C9,C9,40), V(FA,7D,7D,87), \
717 V(EF,FA,FA,15), V(B2,59,59,EB), V(8E,47,47,C9), V(FB,F0,F0,0B), \
718 V(41,AD,AD,EC), V(B3,D4,D4,67), V(5F,A2,A2,FD), V(45,AF,AF,EA), \
719 V(23,9C,9C,BF), V(53,A4,A4,F7), V(E4,72,72,96), V(9B,C0,C0,5B), \
720 V(75,B7,B7,C2), V(E1,FD,FD,1C), V(3D,93,93,AE), V(4C,26,26,6A), \
721 V(6C,36,36,5A), V(7E,3F,3F,41), V(F5,F7,F7,02), V(83,CC,CC,4F), \
722 V(68,34,34,5C), V(51,A5,A5,F4), V(D1,E5,E5,34), V(F9,F1,F1,08), \
723 V(E2,71,71,93), V(AB,D8,D8,73), V(62,31,31,53), V(2A,15,15,3F), \
724 V(08,04,04,0C), V(95,C7,C7,52), V(46,23,23,65), V(9D,C3,C3,5E), \
725 V(30,18,18,28), V(37,96,96,A1), V(0A,05,05,0F), V(2F,9A,9A,B5), \
726 V(0E,07,07,09), V(24,12,12,36), V(1B,80,80,9B), V(DF,E2,E2,3D), \
727 V(CD,EB,EB,26), V(4E,27,27,69), V(7F,B2,B2,CD), V(EA,75,75,9F), \
728 V(12,09,09,1B), V(1D,83,83,9E), V(58,2C,2C,74), V(34,1A,1A,2E), \
729 V(36,1B,1B,2D), V(DC,6E,6E,B2), V(B4,5A,5A,EE), V(5B,A0,A0,FB), \
730 V(A4,52,52,F6), V(76,3B,3B,4D), V(B7,D6,D6,61), V(7D,B3,B3,CE), \
731 V(52,29,29,7B), V(DD,E3,E3,3E), V(5E,2F,2F,71), V(13,84,84,97), \
732 V(A6,53,53,F5), V(B9,D1,D1,68), V(00,00,00,00), V(C1,ED,ED,2C), \
733 V(40,20,20,60), V(E3,FC,FC,1F), V(79,B1,B1,C8), V(B6,5B,5B,ED), \
734 V(D4,6A,6A,BE), V(8D,CB,CB,46), V(67,BE,BE,D9), V(72,39,39,4B), \
735 V(94,4A,4A,DE), V(98,4C,4C,D4), V(B0,58,58,E8), V(85,CF,CF,4A), \
736 V(BB,D0,D0,6B), V(C5,EF,EF,2A), V(4F,AA,AA,E5), V(ED,FB,FB,16), \
737 V(86,43,43,C5), V(9A,4D,4D,D7), V(66,33,33,55), V(11,85,85,94), \
738 V(8A,45,45,CF), V(E9,F9,F9,10), V(04,02,02,06), V(FE,7F,7F,81), \
739 V(A0,50,50,F0), V(78,3C,3C,44), V(25,9F,9F,BA), V(4B,A8,A8,E3), \
740 V(A2,51,51,F3), V(5D,A3,A3,FE), V(80,40,40,C0), V(05,8F,8F,8A), \
741 V(3F,92,92,AD), V(21,9D,9D,BC), V(70,38,38,48), V(F1,F5,F5,04), \
742 V(63,BC,BC,DF), V(77,B6,B6,C1), V(AF,DA,DA,75), V(42,21,21,63), \
743 V(20,10,10,30), V(E5,FF,FF,1A), V(FD,F3,F3,0E), V(BF,D2,D2,6D), \
744 V(81,CD,CD,4C), V(18,0C,0C,14), V(26,13,13,35), V(C3,EC,EC,2F), \
745 V(BE,5F,5F,E1), V(35,97,97,A2), V(88,44,44,CC), V(2E,17,17,39), \
746 V(93,C4,C4,57), V(55,A7,A7,F2), V(FC,7E,7E,82), V(7A,3D,3D,47), \
747 V(C8,64,64,AC), V(BA,5D,5D,E7), V(32,19,19,2B), V(E6,73,73,95), \
748 V(C0,60,60,A0), V(19,81,81,98), V(9E,4F,4F,D1), V(A3,DC,DC,7F), \
749 V(44,22,22,66), V(54,2A,2A,7E), V(3B,90,90,AB), V(0B,88,88,83), \
750 V(8C,46,46,CA), V(C7,EE,EE,29), V(6B,B8,B8,D3), V(28,14,14,3C), \
751 V(A7,DE,DE,79), V(BC,5E,5E,E2), V(16,0B,0B,1D), V(AD,DB,DB,76), \
752 V(DB,E0,E0,3B), V(64,32,32,56), V(74,3A,3A,4E), V(14,0A,0A,1E), \
753 V(92,49,49,DB), V(0C,06,06,0A), V(48,24,24,6C), V(B8,5C,5C,E4), \
754 V(9F,C2,C2,5D), V(BD,D3,D3,6E), V(43,AC,AC,EF), V(C4,62,62,A6), \
755 V(39,91,91,A8), V(31,95,95,A4), V(D3,E4,E4,37), V(F2,79,79,8B), \
756 V(D5,E7,E7,32), V(8B,C8,C8,43), V(6E,37,37,59), V(DA,6D,6D,B7), \
757 V(01,8D,8D,8C), V(B1,D5,D5,64), V(9C,4E,4E,D2), V(49,A9,A9,E0), \
758 V(D8,6C,6C,B4), V(AC,56,56,FA), V(F3,F4,F4,07), V(CF,EA,EA,25), \
759 V(CA,65,65,AF), V(F4,7A,7A,8E), V(47,AE,AE,E9), V(10,08,08,18), \
760 V(6F,BA,BA,D5), V(F0,78,78,88), V(4A,25,25,6F), V(5C,2E,2E,72), \
761 V(38,1C,1C,24), V(57,A6,A6,F1), V(73,B4,B4,C7), V(97,C6,C6,51), \
762 V(CB,E8,E8,23), V(A1,DD,DD,7C), V(E8,74,74,9C), V(3E,1F,1F,21), \
763 V(96,4B,4B,DD), V(61,BD,BD,DC), V(0D,8B,8B,86), V(0F,8A,8A,85), \
764 V(E0,70,70,90), V(7C,3E,3E,42), V(71,B5,B5,C4), V(CC,66,66,AA), \
765 V(90,48,48,D8), V(06,03,03,05), V(F7,F6,F6,01), V(1C,0E,0E,12), \
766 V(C2,61,61,A3), V(6A,35,35,5F), V(AE,57,57,F9), V(69,B9,B9,D0), \
767 V(17,86,86,91), V(99,C1,C1,58), V(3A,1D,1D,27), V(27,9E,9E,B9), \
768 V(D9,E1,E1,38), V(EB,F8,F8,13), V(2B,98,98,B3), V(22,11,11,33), \
769 V(D2,69,69,BB), V(A9,D9,D9,70), V(07,8E,8E,89), V(33,94,94,A7), \
770 V(2D,9B,9B,B6), V(3C,1E,1E,22), V(15,87,87,92), V(C9,E9,E9,20), \
771 V(87,CE,CE,49), V(AA,55,55,FF), V(50,28,28,78), V(A5,DF,DF,7A), \
772 V(03,8C,8C,8F), V(59,A1,A1,F8), V(09,89,89,80), V(1A,0D,0D,17), \
773 V(65,BF,BF,DA), V(D7,E6,E6,31), V(84,42,42,C6), V(D0,68,68,B8), \
774 V(82,41,41,C3), V(29,99,99,B0), V(5A,2D,2D,77), V(1E,0F,0F,11), \
775 V(7B,B0,B0,CB), V(A8,54,54,FC), V(6D,BB,BB,D6), V(2C,16,16,3A)
777 #define V(a,b,c,d) 0x##a##b##c##d
778 static uint32 FT0[256] = { FT };
781 #define V(a,b,c,d) 0x##d##a##b##c
782 static uint32 FT1[256] = { FT };
785 #define V(a,b,c,d) 0x##c##d##a##b
786 static uint32 FT2[256] = { FT };
789 #define V(a,b,c,d) 0x##b##c##d##a
790 static uint32 FT3[256] = { FT };
797 static uint32 RSb[256] =
799 0x52, 0x09, 0x6A, 0xD5, 0x30, 0x36, 0xA5, 0x38,
800 0xBF, 0x40, 0xA3, 0x9E, 0x81, 0xF3, 0xD7, 0xFB,
801 0x7C, 0xE3, 0x39, 0x82, 0x9B, 0x2F, 0xFF, 0x87,
802 0x34, 0x8E, 0x43, 0x44, 0xC4, 0xDE, 0xE9, 0xCB,
803 0x54, 0x7B, 0x94, 0x32, 0xA6, 0xC2, 0x23, 0x3D,
804 0xEE, 0x4C, 0x95, 0x0B, 0x42, 0xFA, 0xC3, 0x4E,
805 0x08, 0x2E, 0xA1, 0x66, 0x28, 0xD9, 0x24, 0xB2,
806 0x76, 0x5B, 0xA2, 0x49, 0x6D, 0x8B, 0xD1, 0x25,
807 0x72, 0xF8, 0xF6, 0x64, 0x86, 0x68, 0x98, 0x16,
808 0xD4, 0xA4, 0x5C, 0xCC, 0x5D, 0x65, 0xB6, 0x92,
809 0x6C, 0x70, 0x48, 0x50, 0xFD, 0xED, 0xB9, 0xDA,
810 0x5E, 0x15, 0x46, 0x57, 0xA7, 0x8D, 0x9D, 0x84,
811 0x90, 0xD8, 0xAB, 0x00, 0x8C, 0xBC, 0xD3, 0x0A,
812 0xF7, 0xE4, 0x58, 0x05, 0xB8, 0xB3, 0x45, 0x06,
813 0xD0, 0x2C, 0x1E, 0x8F, 0xCA, 0x3F, 0x0F, 0x02,
814 0xC1, 0xAF, 0xBD, 0x03, 0x01, 0x13, 0x8A, 0x6B,
815 0x3A, 0x91, 0x11, 0x41, 0x4F, 0x67, 0xDC, 0xEA,
816 0x97, 0xF2, 0xCF, 0xCE, 0xF0, 0xB4, 0xE6, 0x73,
817 0x96, 0xAC, 0x74, 0x22, 0xE7, 0xAD, 0x35, 0x85,
818 0xE2, 0xF9, 0x37, 0xE8, 0x1C, 0x75, 0xDF, 0x6E,
819 0x47, 0xF1, 0x1A, 0x71, 0x1D, 0x29, 0xC5, 0x89,
820 0x6F, 0xB7, 0x62, 0x0E, 0xAA, 0x18, 0xBE, 0x1B,
821 0xFC, 0x56, 0x3E, 0x4B, 0xC6, 0xD2, 0x79, 0x20,
822 0x9A, 0xDB, 0xC0, 0xFE, 0x78, 0xCD, 0x5A, 0xF4,
823 0x1F, 0xDD, 0xA8, 0x33, 0x88, 0x07, 0xC7, 0x31,
824 0xB1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xEC, 0x5F,
825 0x60, 0x51, 0x7F, 0xA9, 0x19, 0xB5, 0x4A, 0x0D,
826 0x2D, 0xE5, 0x7A, 0x9F, 0x93, 0xC9, 0x9C, 0xEF,
827 0xA0, 0xE0, 0x3B, 0x4D, 0xAE, 0x2A, 0xF5, 0xB0,
828 0xC8, 0xEB, 0xBB, 0x3C, 0x83, 0x53, 0x99, 0x61,
829 0x17, 0x2B, 0x04, 0x7E, 0xBA, 0x77, 0xD6, 0x26,
830 0xE1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0C, 0x7D
837 V(51,F4,A7,50), V(7E,41,65,53), V(1A,17,A4,C3), V(3A,27,5E,96), \
838 V(3B,AB,6B,CB), V(1F,9D,45,F1), V(AC,FA,58,AB), V(4B,E3,03,93), \
839 V(20,30,FA,55), V(AD,76,6D,F6), V(88,CC,76,91), V(F5,02,4C,25), \
840 V(4F,E5,D7,FC), V(C5,2A,CB,D7), V(26,35,44,80), V(B5,62,A3,8F), \
841 V(DE,B1,5A,49), V(25,BA,1B,67), V(45,EA,0E,98), V(5D,FE,C0,E1), \
842 V(C3,2F,75,02), V(81,4C,F0,12), V(8D,46,97,A3), V(6B,D3,F9,C6), \
843 V(03,8F,5F,E7), V(15,92,9C,95), V(BF,6D,7A,EB), V(95,52,59,DA), \
844 V(D4,BE,83,2D), V(58,74,21,D3), V(49,E0,69,29), V(8E,C9,C8,44), \
845 V(75,C2,89,6A), V(F4,8E,79,78), V(99,58,3E,6B), V(27,B9,71,DD), \
846 V(BE,E1,4F,B6), V(F0,88,AD,17), V(C9,20,AC,66), V(7D,CE,3A,B4), \
847 V(63,DF,4A,18), V(E5,1A,31,82), V(97,51,33,60), V(62,53,7F,45), \
848 V(B1,64,77,E0), V(BB,6B,AE,84), V(FE,81,A0,1C), V(F9,08,2B,94), \
849 V(70,48,68,58), V(8F,45,FD,19), V(94,DE,6C,87), V(52,7B,F8,B7), \
850 V(AB,73,D3,23), V(72,4B,02,E2), V(E3,1F,8F,57), V(66,55,AB,2A), \
851 V(B2,EB,28,07), V(2F,B5,C2,03), V(86,C5,7B,9A), V(D3,37,08,A5), \
852 V(30,28,87,F2), V(23,BF,A5,B2), V(02,03,6A,BA), V(ED,16,82,5C), \
853 V(8A,CF,1C,2B), V(A7,79,B4,92), V(F3,07,F2,F0), V(4E,69,E2,A1), \
854 V(65,DA,F4,CD), V(06,05,BE,D5), V(D1,34,62,1F), V(C4,A6,FE,8A), \
855 V(34,2E,53,9D), V(A2,F3,55,A0), V(05,8A,E1,32), V(A4,F6,EB,75), \
856 V(0B,83,EC,39), V(40,60,EF,AA), V(5E,71,9F,06), V(BD,6E,10,51), \
857 V(3E,21,8A,F9), V(96,DD,06,3D), V(DD,3E,05,AE), V(4D,E6,BD,46), \
858 V(91,54,8D,B5), V(71,C4,5D,05), V(04,06,D4,6F), V(60,50,15,FF), \
859 V(19,98,FB,24), V(D6,BD,E9,97), V(89,40,43,CC), V(67,D9,9E,77), \
860 V(B0,E8,42,BD), V(07,89,8B,88), V(E7,19,5B,38), V(79,C8,EE,DB), \
861 V(A1,7C,0A,47), V(7C,42,0F,E9), V(F8,84,1E,C9), V(00,00,00,00), \
862 V(09,80,86,83), V(32,2B,ED,48), V(1E,11,70,AC), V(6C,5A,72,4E), \
863 V(FD,0E,FF,FB), V(0F,85,38,56), V(3D,AE,D5,1E), V(36,2D,39,27), \
864 V(0A,0F,D9,64), V(68,5C,A6,21), V(9B,5B,54,D1), V(24,36,2E,3A), \
865 V(0C,0A,67,B1), V(93,57,E7,0F), V(B4,EE,96,D2), V(1B,9B,91,9E), \
866 V(80,C0,C5,4F), V(61,DC,20,A2), V(5A,77,4B,69), V(1C,12,1A,16), \
867 V(E2,93,BA,0A), V(C0,A0,2A,E5), V(3C,22,E0,43), V(12,1B,17,1D), \
868 V(0E,09,0D,0B), V(F2,8B,C7,AD), V(2D,B6,A8,B9), V(14,1E,A9,C8), \
869 V(57,F1,19,85), V(AF,75,07,4C), V(EE,99,DD,BB), V(A3,7F,60,FD), \
870 V(F7,01,26,9F), V(5C,72,F5,BC), V(44,66,3B,C5), V(5B,FB,7E,34), \
871 V(8B,43,29,76), V(CB,23,C6,DC), V(B6,ED,FC,68), V(B8,E4,F1,63), \
872 V(D7,31,DC,CA), V(42,63,85,10), V(13,97,22,40), V(84,C6,11,20), \
873 V(85,4A,24,7D), V(D2,BB,3D,F8), V(AE,F9,32,11), V(C7,29,A1,6D), \
874 V(1D,9E,2F,4B), V(DC,B2,30,F3), V(0D,86,52,EC), V(77,C1,E3,D0), \
875 V(2B,B3,16,6C), V(A9,70,B9,99), V(11,94,48,FA), V(47,E9,64,22), \
876 V(A8,FC,8C,C4), V(A0,F0,3F,1A), V(56,7D,2C,D8), V(22,33,90,EF), \
877 V(87,49,4E,C7), V(D9,38,D1,C1), V(8C,CA,A2,FE), V(98,D4,0B,36), \
878 V(A6,F5,81,CF), V(A5,7A,DE,28), V(DA,B7,8E,26), V(3F,AD,BF,A4), \
879 V(2C,3A,9D,E4), V(50,78,92,0D), V(6A,5F,CC,9B), V(54,7E,46,62), \
880 V(F6,8D,13,C2), V(90,D8,B8,E8), V(2E,39,F7,5E), V(82,C3,AF,F5), \
881 V(9F,5D,80,BE), V(69,D0,93,7C), V(6F,D5,2D,A9), V(CF,25,12,B3), \
882 V(C8,AC,99,3B), V(10,18,7D,A7), V(E8,9C,63,6E), V(DB,3B,BB,7B), \
883 V(CD,26,78,09), V(6E,59,18,F4), V(EC,9A,B7,01), V(83,4F,9A,A8), \
884 V(E6,95,6E,65), V(AA,FF,E6,7E), V(21,BC,CF,08), V(EF,15,E8,E6), \
885 V(BA,E7,9B,D9), V(4A,6F,36,CE), V(EA,9F,09,D4), V(29,B0,7C,D6), \
886 V(31,A4,B2,AF), V(2A,3F,23,31), V(C6,A5,94,30), V(35,A2,66,C0), \
887 V(74,4E,BC,37), V(FC,82,CA,A6), V(E0,90,D0,B0), V(33,A7,D8,15), \
888 V(F1,04,98,4A), V(41,EC,DA,F7), V(7F,CD,50,0E), V(17,91,F6,2F), \
889 V(76,4D,D6,8D), V(43,EF,B0,4D), V(CC,AA,4D,54), V(E4,96,04,DF), \
890 V(9E,D1,B5,E3), V(4C,6A,88,1B), V(C1,2C,1F,B8), V(46,65,51,7F), \
891 V(9D,5E,EA,04), V(01,8C,35,5D), V(FA,87,74,73), V(FB,0B,41,2E), \
892 V(B3,67,1D,5A), V(92,DB,D2,52), V(E9,10,56,33), V(6D,D6,47,13), \
893 V(9A,D7,61,8C), V(37,A1,0C,7A), V(59,F8,14,8E), V(EB,13,3C,89), \
894 V(CE,A9,27,EE), V(B7,61,C9,35), V(E1,1C,E5,ED), V(7A,47,B1,3C), \
895 V(9C,D2,DF,59), V(55,F2,73,3F), V(18,14,CE,79), V(73,C7,37,BF), \
896 V(53,F7,CD,EA), V(5F,FD,AA,5B), V(DF,3D,6F,14), V(78,44,DB,86), \
897 V(CA,AF,F3,81), V(B9,68,C4,3E), V(38,24,34,2C), V(C2,A3,40,5F), \
898 V(16,1D,C3,72), V(BC,E2,25,0C), V(28,3C,49,8B), V(FF,0D,95,41), \
899 V(39,A8,01,71), V(08,0C,B3,DE), V(D8,B4,E4,9C), V(64,56,C1,90), \
900 V(7B,CB,84,61), V(D5,32,B6,70), V(48,6C,5C,74), V(D0,B8,57,42)
902 #define V(a,b,c,d) 0x##a##b##c##d
903 static uint32 RT0[256] = { RT };
906 #define V(a,b,c,d) 0x##d##a##b##c
907 static uint32 RT1[256] = { RT };
910 #define V(a,b,c,d) 0x##c##d##a##b
911 static uint32 RT2[256] = { RT };
914 #define V(a,b,c,d) 0x##b##c##d##a
915 static uint32 RT3[256] = { RT };
920 /* round constants */
922 static uint32 RCON[10] =
924 0x01000000, 0x02000000, 0x04000000, 0x08000000,
925 0x10000000, 0x20000000, 0x40000000, 0x80000000,
926 0x1B000000, 0x36000000
929 /* key schedule tables */
931 static int KT_init = 1;
933 static uint32 KT0[256];
934 static uint32 KT1[256];
935 static uint32 KT2[256];
936 static uint32 KT3[256];
938 /* platform-independant 32-bit integer manipulation macros */
940 #define GET_UINT32(n,b,i) \
942 (n) = ( (uint32) (b)[(i) ] << 24 ) \
943 | ( (uint32) (b)[(i) + 1] << 16 ) \
944 | ( (uint32) (b)[(i) + 2] << 8 ) \
945 | ( (uint32) (b)[(i) + 3] ); \
948 #define PUT_UINT32(n,b,i) \
950 (b)[(i) ] = (uint8) ( (n) >> 24 ); \
951 (b)[(i) + 1] = (uint8) ( (n) >> 16 ); \
952 (b)[(i) + 2] = (uint8) ( (n) >> 8 ); \
953 (b)[(i) + 3] = (uint8) ( (n) ); \
957 int rt_aes_set_key( aes_context *ctx, uint8 *key, int nbits )
964 case 128: ctx->nr = 10; break;
965 case 192: ctx->nr = 12; break;
966 case 256: ctx->nr = 14; break;
967 default : return( 1 );
970 RK = (uint32 *) ctx->erk;
972 for( i = 0; i < (nbits >> 5); i++ )
974 GET_UINT32( RK[i], key, i * 4 );
977 /* setup encryption round keys */
983 for( i = 0; i < 10; i++, RK += 4 )
985 RK[4] = RK[0] ^ RCON[i] ^
986 ( FSb[ (uint8) ( RK[3] >> 16 ) ] << 24 ) ^
987 ( FSb[ (uint8) ( RK[3] >> 8 ) ] << 16 ) ^
988 ( FSb[ (uint8) ( RK[3] ) ] << 8 ) ^
989 ( FSb[ (uint8) ( RK[3] >> 24 ) ] );
991 RK[5] = RK[1] ^ RK[4];
992 RK[6] = RK[2] ^ RK[5];
993 RK[7] = RK[3] ^ RK[6];
999 for( i = 0; i < 8; i++, RK += 6 )
1001 RK[6] = RK[0] ^ RCON[i] ^
1002 ( FSb[ (uint8) ( RK[5] >> 16 ) ] << 24 ) ^
1003 ( FSb[ (uint8) ( RK[5] >> 8 ) ] << 16 ) ^
1004 ( FSb[ (uint8) ( RK[5] ) ] << 8 ) ^
1005 ( FSb[ (uint8) ( RK[5] >> 24 ) ] );
1007 RK[7] = RK[1] ^ RK[6];
1008 RK[8] = RK[2] ^ RK[7];
1009 RK[9] = RK[3] ^ RK[8];
1010 RK[10] = RK[4] ^ RK[9];
1011 RK[11] = RK[5] ^ RK[10];
1017 for( i = 0; i < 7; i++, RK += 8 )
1019 RK[8] = RK[0] ^ RCON[i] ^
1020 ( FSb[ (uint8) ( RK[7] >> 16 ) ] << 24 ) ^
1021 ( FSb[ (uint8) ( RK[7] >> 8 ) ] << 16 ) ^
1022 ( FSb[ (uint8) ( RK[7] ) ] << 8 ) ^
1023 ( FSb[ (uint8) ( RK[7] >> 24 ) ] );
1025 RK[9] = RK[1] ^ RK[8];
1026 RK[10] = RK[2] ^ RK[9];
1027 RK[11] = RK[3] ^ RK[10];
1030 ( FSb[ (uint8) ( RK[11] >> 24 ) ] << 24 ) ^
1031 ( FSb[ (uint8) ( RK[11] >> 16 ) ] << 16 ) ^
1032 ( FSb[ (uint8) ( RK[11] >> 8 ) ] << 8 ) ^
1033 ( FSb[ (uint8) ( RK[11] ) ] );
1035 RK[13] = RK[5] ^ RK[12];
1036 RK[14] = RK[6] ^ RK[13];
1037 RK[15] = RK[7] ^ RK[14];
1042 /* setup decryption round keys */
1046 for( i = 0; i < 256; i++ )
1048 KT0[i] = RT0[ FSb[i] ];
1049 KT1[i] = RT1[ FSb[i] ];
1050 KT2[i] = RT2[ FSb[i] ];
1051 KT3[i] = RT3[ FSb[i] ];
1057 SK = (uint32 *) ctx->drk;
1064 for( i = 1; i < ctx->nr; i++ )
1068 *SK++ = KT0[ (uint8) ( *RK >> 24 ) ] ^
1069 KT1[ (uint8) ( *RK >> 16 ) ] ^
1070 KT2[ (uint8) ( *RK >> 8 ) ] ^
1071 KT3[ (uint8) ( *RK ) ]; RK++;
1073 *SK++ = KT0[ (uint8) ( *RK >> 24 ) ] ^
1074 KT1[ (uint8) ( *RK >> 16 ) ] ^
1075 KT2[ (uint8) ( *RK >> 8 ) ] ^
1076 KT3[ (uint8) ( *RK ) ]; RK++;
1078 *SK++ = KT0[ (uint8) ( *RK >> 24 ) ] ^
1079 KT1[ (uint8) ( *RK >> 16 ) ] ^
1080 KT2[ (uint8) ( *RK >> 8 ) ] ^
1081 KT3[ (uint8) ( *RK ) ]; RK++;
1083 *SK++ = KT0[ (uint8) ( *RK >> 24 ) ] ^
1084 KT1[ (uint8) ( *RK >> 16 ) ] ^
1085 KT2[ (uint8) ( *RK >> 8 ) ] ^
1086 KT3[ (uint8) ( *RK ) ]; RK++;
1099 /* AES 128-bit block encryption routine */
1101 void rt_aes_encrypt(aes_context *ctx, uint8 input[16], uint8 output[16] )
1103 uint32 *RK, X0, X1, X2, X3, Y0, Y1, Y2, Y3;
1105 RK = (uint32 *) ctx->erk;
1106 GET_UINT32( X0, input, 0 ); X0 ^= RK[0];
1107 GET_UINT32( X1, input, 4 ); X1 ^= RK[1];
1108 GET_UINT32( X2, input, 8 ); X2 ^= RK[2];
1109 GET_UINT32( X3, input, 12 ); X3 ^= RK[3];
1111 #define AES_FROUND(X0,X1,X2,X3,Y0,Y1,Y2,Y3) \
1115 X0 = RK[0] ^ FT0[ (uint8) ( Y0 >> 24 ) ] ^ \
1116 FT1[ (uint8) ( Y1 >> 16 ) ] ^ \
1117 FT2[ (uint8) ( Y2 >> 8 ) ] ^ \
1118 FT3[ (uint8) ( Y3 ) ]; \
1120 X1 = RK[1] ^ FT0[ (uint8) ( Y1 >> 24 ) ] ^ \
1121 FT1[ (uint8) ( Y2 >> 16 ) ] ^ \
1122 FT2[ (uint8) ( Y3 >> 8 ) ] ^ \
1123 FT3[ (uint8) ( Y0 ) ]; \
1125 X2 = RK[2] ^ FT0[ (uint8) ( Y2 >> 24 ) ] ^ \
1126 FT1[ (uint8) ( Y3 >> 16 ) ] ^ \
1127 FT2[ (uint8) ( Y0 >> 8 ) ] ^ \
1128 FT3[ (uint8) ( Y1 ) ]; \
1130 X3 = RK[3] ^ FT0[ (uint8) ( Y3 >> 24 ) ] ^ \
1131 FT1[ (uint8) ( Y0 >> 16 ) ] ^ \
1132 FT2[ (uint8) ( Y1 >> 8 ) ] ^ \
1133 FT3[ (uint8) ( Y2 ) ]; \
1136 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 1 */
1137 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 2 */
1138 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 3 */
1139 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 4 */
1140 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 5 */
1141 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 6 */
1142 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 7 */
1143 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 8 */
1144 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 9 */
1148 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 10 */
1149 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 11 */
1154 AES_FROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 12 */
1155 AES_FROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 13 */
1162 X0 = RK[0] ^ ( FSb[ (uint8) ( Y0 >> 24 ) ] << 24 ) ^
1163 ( FSb[ (uint8) ( Y1 >> 16 ) ] << 16 ) ^
1164 ( FSb[ (uint8) ( Y2 >> 8 ) ] << 8 ) ^
1165 ( FSb[ (uint8) ( Y3 ) ] );
1167 X1 = RK[1] ^ ( FSb[ (uint8) ( Y1 >> 24 ) ] << 24 ) ^
1168 ( FSb[ (uint8) ( Y2 >> 16 ) ] << 16 ) ^
1169 ( FSb[ (uint8) ( Y3 >> 8 ) ] << 8 ) ^
1170 ( FSb[ (uint8) ( Y0 ) ] );
1172 X2 = RK[2] ^ ( FSb[ (uint8) ( Y2 >> 24 ) ] << 24 ) ^
1173 ( FSb[ (uint8) ( Y3 >> 16 ) ] << 16 ) ^
1174 ( FSb[ (uint8) ( Y0 >> 8 ) ] << 8 ) ^
1175 ( FSb[ (uint8) ( Y1 ) ] );
1177 X3 = RK[3] ^ ( FSb[ (uint8) ( Y3 >> 24 ) ] << 24 ) ^
1178 ( FSb[ (uint8) ( Y0 >> 16 ) ] << 16 ) ^
1179 ( FSb[ (uint8) ( Y1 >> 8 ) ] << 8 ) ^
1180 ( FSb[ (uint8) ( Y2 ) ] );
1182 PUT_UINT32( X0, output, 0 );
1183 PUT_UINT32( X1, output, 4 );
1184 PUT_UINT32( X2, output, 8 );
1185 PUT_UINT32( X3, output, 12 );
1188 /* AES 128-bit block decryption routine */
1190 void rt_aes_decrypt( aes_context *ctx, uint8 input[16], uint8 output[16] )
1192 uint32 *RK, X0, X1, X2, X3, Y0, Y1, Y2, Y3;
1194 RK = (uint32 *) ctx->drk;
1196 GET_UINT32( X0, input, 0 ); X0 ^= RK[0];
1197 GET_UINT32( X1, input, 4 ); X1 ^= RK[1];
1198 GET_UINT32( X2, input, 8 ); X2 ^= RK[2];
1199 GET_UINT32( X3, input, 12 ); X3 ^= RK[3];
1201 #define AES_RROUND(X0,X1,X2,X3,Y0,Y1,Y2,Y3) \
1205 X0 = RK[0] ^ RT0[ (uint8) ( Y0 >> 24 ) ] ^ \
1206 RT1[ (uint8) ( Y3 >> 16 ) ] ^ \
1207 RT2[ (uint8) ( Y2 >> 8 ) ] ^ \
1208 RT3[ (uint8) ( Y1 ) ]; \
1210 X1 = RK[1] ^ RT0[ (uint8) ( Y1 >> 24 ) ] ^ \
1211 RT1[ (uint8) ( Y0 >> 16 ) ] ^ \
1212 RT2[ (uint8) ( Y3 >> 8 ) ] ^ \
1213 RT3[ (uint8) ( Y2 ) ]; \
1215 X2 = RK[2] ^ RT0[ (uint8) ( Y2 >> 24 ) ] ^ \
1216 RT1[ (uint8) ( Y1 >> 16 ) ] ^ \
1217 RT2[ (uint8) ( Y0 >> 8 ) ] ^ \
1218 RT3[ (uint8) ( Y3 ) ]; \
1220 X3 = RK[3] ^ RT0[ (uint8) ( Y3 >> 24 ) ] ^ \
1221 RT1[ (uint8) ( Y2 >> 16 ) ] ^ \
1222 RT2[ (uint8) ( Y1 >> 8 ) ] ^ \
1223 RT3[ (uint8) ( Y0 ) ]; \
1226 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 1 */
1227 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 2 */
1228 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 3 */
1229 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 4 */
1230 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 5 */
1231 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 6 */
1232 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 7 */
1233 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 8 */
1234 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 9 */
1238 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 10 */
1239 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 11 */
1244 AES_RROUND( X0, X1, X2, X3, Y0, Y1, Y2, Y3 ); /* round 12 */
1245 AES_RROUND( Y0, Y1, Y2, Y3, X0, X1, X2, X3 ); /* round 13 */
1252 X0 = RK[0] ^ ( RSb[ (uint8) ( Y0 >> 24 ) ] << 24 ) ^
1253 ( RSb[ (uint8) ( Y3 >> 16 ) ] << 16 ) ^
1254 ( RSb[ (uint8) ( Y2 >> 8 ) ] << 8 ) ^
1255 ( RSb[ (uint8) ( Y1 ) ] );
1257 X1 = RK[1] ^ ( RSb[ (uint8) ( Y1 >> 24 ) ] << 24 ) ^
1258 ( RSb[ (uint8) ( Y0 >> 16 ) ] << 16 ) ^
1259 ( RSb[ (uint8) ( Y3 >> 8 ) ] << 8 ) ^
1260 ( RSb[ (uint8) ( Y2 ) ] );
1262 X2 = RK[2] ^ ( RSb[ (uint8) ( Y2 >> 24 ) ] << 24 ) ^
1263 ( RSb[ (uint8) ( Y1 >> 16 ) ] << 16 ) ^
1264 ( RSb[ (uint8) ( Y0 >> 8 ) ] << 8 ) ^
1265 ( RSb[ (uint8) ( Y3 ) ] );
1267 X3 = RK[3] ^ ( RSb[ (uint8) ( Y3 >> 24 ) ] << 24 ) ^
1268 ( RSb[ (uint8) ( Y2 >> 16 ) ] << 16 ) ^
1269 ( RSb[ (uint8) ( Y1 >> 8 ) ] << 8 ) ^
1270 ( RSb[ (uint8) ( Y0 ) ] );
1272 PUT_UINT32( X0, output, 0 );
1273 PUT_UINT32( X1, output, 4 );
1274 PUT_UINT32( X2, output, 8 );
1275 PUT_UINT32( X3, output, 12 );
1279 ==========================================================================
1281 ENCRYPT AES GTK before sending in EAPOL frame.
1282 AES GTK length = 128 bit, so fix blocks for aes-key-wrap as 2 in this function.
1283 This function references to RFC 3394 for aes key wrap algorithm.
1285 ==========================================================================
1287 VOID AES_GTK_KEY_WRAP(
1289 IN UCHAR *plaintext,
1291 OUT UCHAR *ciphertext)
1293 UCHAR A[8], BIN[16], BOUT[16];
1295 INT num_blocks = p_len/8; // unit:64bits
1300 rt_aes_set_key(&aesctx, key, 128);
1303 for (i = 0; i < 8; i++)
1307 for (i = 0; i < num_blocks; i++)
1309 for (j = 0 ; j < 8; j++)
1310 R[8 * (i + 1) + j] = plaintext[8 * i + j];
1314 for (j = 0; j < 6; j++)
1316 for(i = 1; i <= num_blocks; i++)
1319 NdisMoveMemory(BIN, A, 8);
1320 NdisMoveMemory(&BIN[8], &R[8 * i], 8);
1321 rt_aes_encrypt(&aesctx, BIN, BOUT);
1323 NdisMoveMemory(A, &BOUT[0], 8);
1324 xor = num_blocks * j + i;
1325 A[7] = BOUT[7] ^ xor;
1326 NdisMoveMemory(&R[8 * i], &BOUT[8], 8);
1330 // Output ciphertext
1331 NdisMoveMemory(ciphertext, A, 8);
1333 for (i = 1; i <= num_blocks; i++)
1335 for (j = 0 ; j < 8; j++)
1336 ciphertext[8 * i + j] = R[8 * i + j];
1341 ========================================================================
1343 Routine Description:
1344 Misc function to decrypt AES body
1351 This function references to RFC 3394 for aes key unwrap algorithm.
1353 ========================================================================
1355 VOID AES_GTK_KEY_UNWRAP(
1357 OUT UCHAR *plaintext,
1359 IN UCHAR *ciphertext)
1362 UCHAR A[8], BIN[16], BOUT[16];
1367 INT num_blocks = c_len/8; // unit:64bits
1370 os_alloc_mem(NULL, (PUCHAR *)&R, 512);
1374 DBGPRINT(RT_DEBUG_ERROR, ("!!!AES_GTK_KEY_UNWRAP: no memory!!!\n"));
1379 NdisMoveMemory(A, ciphertext, 8);
1381 for(i = 0; i < (c_len-8); i++)
1383 R[ i] = ciphertext[i + 8];
1386 rt_aes_set_key(&aesctx, key, 128);
1388 for(j = 5; j >= 0; j--)
1390 for(i = (num_blocks-1); i > 0; i--)
1392 xor = (num_blocks -1 )* j + i;
1393 NdisMoveMemory(BIN, A, 8);
1394 BIN[7] = A[7] ^ xor;
1395 NdisMoveMemory(&BIN[8], &R[(i-1)*8], 8);
1396 rt_aes_decrypt(&aesctx, BIN, BOUT);
1397 NdisMoveMemory(A, &BOUT[0], 8);
1398 NdisMoveMemory(&R[(i-1)*8], &BOUT[8], 8);
1403 for(i = 0; i < c_len; i++)
1405 plaintext[i] = R[i];
1409 os_free_mem(NULL, R);
1413 /* ======= The related function of AES-128-CMAC ======= */
1414 VOID leftshift_onebit(
1421 for (i=15; i>=0; i--)
1423 output[i] = input[i] << 1;
1424 output[i] |= overflow;
1425 overflow = (input[i] & 0x80) ? 1 : 0;
1436 for (j=0; j<16; j++)
1450 * The Subkey Generation Algorithm
1452 VOID generate_subkey(
1458 UCHAR aes_128_key[16];
1459 UCHAR const_Zero[16];
1461 UCHAR const_Rb[16] = {0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
1462 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x87};
1464 // initial the key material
1465 memset(const_Zero, 0, 16);
1466 memset(aes_128_key, 0, 16);
1468 // AES-128 with key is applied to an all-zero input block
1469 rt_aes_set_key(&aesctx, key, 128);
1470 rt_aes_encrypt(&aesctx, const_Zero, aes_128_key);
1472 // derive K1(128-bit first subkey) and K2(128-bit second subkey), refer to rfc-4493 ch 2.3
1473 if ((aes_128_key[0] & 0x80) == 0)
1475 leftshift_onebit(aes_128_key, K1);
1479 leftshift_onebit(aes_128_key, tmp);
1480 xor_128(tmp, const_Rb, K1);
1483 if ((K1[0] & 0x80) == 0)
1485 leftshift_onebit(K1, K2);
1489 leftshift_onebit(K1, tmp);
1490 xor_128(tmp, const_Rb, K2);
1496 * AES-CMAC Algorithm. (refer to rfc-4493 and SP800-38B)
1498 * Input : key (128-bit key)
1499 * input (message to be authenticated)
1500 * len (length of the message in octets)
1502 * output: mac (message authentication code)
1510 UCHAR X[16], Y[16], M_last[16], padded[16];
1511 UCHAR K1[16], K2[16];
1515 generate_subkey(key, K1, K2);
1517 n = (len+15) / 16; // n is number of rounds
1527 flag = 1; // indicate that last block is a complete block
1529 flag = 0; // indicate that last block is not a complete block
1534 xor_128(&input[16*(n-1)], K1, M_last);
1538 do_padding(&input[16*(n-1)], padded, len%16);
1539 xor_128(padded, K2, M_last);
1543 for (i=0; i<n-1; i++)
1545 xor_128(X, &input[16*i], Y);
1546 rt_aes_set_key(&aesctx, key, 128);
1547 rt_aes_encrypt(&aesctx, Y, X);
1550 xor_128(X, M_last, Y);
1551 rt_aes_set_key(&aesctx, key, 128);
1552 rt_aes_encrypt(&aesctx, Y, X);
1554 for (i=0; i<16; i++)
1560 /* ======= The related function of AES-128-CMAC ======= */