2 * Copyright (c) 2009 David McCullough <david.mccullough@securecomputing.com>
4 * Copyright (c) 2003-2007 Cavium Networks (support@cavium.com). All rights
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions are met:
9 * 1. Redistributions of source code must retain the above copyright notice,
10 * this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright notice,
12 * this list of conditions and the following disclaimer in the documentation
13 * and/or other materials provided with the distribution.
14 * 3. All advertising materials mentioning features or use of this software
15 * must display the following acknowledgement:
16 * This product includes software developed by Cavium Networks
17 * 4. Cavium Networks' name may not be used to endorse or promote products
18 * derived from this software without specific prior written permission.
20 * This Software, including technical data, may be subject to U.S. export
21 * control laws, including the U.S. Export Administration Act and its
22 * associated regulations, and may be subject to export or import regulations
23 * in other countries. You warrant that You will comply strictly in all
24 * respects with all such regulations and acknowledge that you have the
25 * responsibility to obtain licenses to export, re-export or import the
28 * TO THE MAXIMUM EXTENT PERMITTED BY LAW, THE SOFTWARE IS PROVIDED "AS IS" AND
29 * WITH ALL FAULTS AND CAVIUM MAKES NO PROMISES, REPRESENTATIONS OR WARRANTIES,
30 * EITHER EXPRESS, IMPLIED, STATUTORY, OR OTHERWISE, WITH RESPECT TO THE
31 * SOFTWARE, INCLUDING ITS CONDITION, ITS CONFORMITY TO ANY REPRESENTATION OR
32 * DESCRIPTION, OR THE EXISTENCE OF ANY LATENT OR PATENT DEFECTS, AND CAVIUM
33 * SPECIFICALLY DISCLAIMS ALL IMPLIED (IF ANY) WARRANTIES OF TITLE,
34 * MERCHANTABILITY, NONINFRINGEMENT, FITNESS FOR A PARTICULAR PURPOSE, LACK OF
35 * VIRUSES, ACCURACY OR COMPLETENESS, QUIET ENJOYMENT, QUIET POSSESSION OR
36 * CORRESPONDENCE TO DESCRIPTION. THE ENTIRE RISK ARISING OUT OF USE OR
37 * PERFORMANCE OF THE SOFTWARE LIES WITH YOU.
39 /****************************************************************************/
41 #include <linux/scatterlist.h>
42 #include <asm/octeon/octeon.h>
43 #include "octeon-asm.h"
45 /****************************************************************************/
47 extern unsigned long octeon_crypto_enable(struct octeon_cop2_state
*);
48 extern void octeon_crypto_disable(struct octeon_cop2_state
*, unsigned long);
50 #define SG_INIT(s, p, i, l) \
53 (l) = (s)[0].length; \
54 (p) = (typeof(p)) sg_virt((s)); \
55 CVMX_PREFETCH0((p)); \
58 #define SG_CONSUME(s, p, i, l) \
61 (l) -= sizeof(*(p)); \
63 dprintk("%s, %d: l = %d\n", __FILE__, __LINE__, l); \
64 } else if ((l) == 0) { \
66 (l) = (s)[0].length; \
67 (p) = (typeof(p)) sg_virt(s); \
68 CVMX_PREFETCH0((p)); \
72 #define ESP_HEADER_LENGTH 8
73 #define DES_CBC_IV_LENGTH 8
74 #define AES_CBC_IV_LENGTH 16
75 #define ESP_HMAC_LEN 12
77 #define ESP_HEADER_LENGTH 8
78 #define DES_CBC_IV_LENGTH 8
80 /****************************************************************************/
82 #define CVM_LOAD_SHA_UNIT(dat, next) { \
85 CVMX_MT_HSH_DAT (dat, 0); \
86 } else if (next == 1) { \
88 CVMX_MT_HSH_DAT (dat, 1); \
89 } else if (next == 2) { \
91 CVMX_MT_HSH_DAT (dat, 2); \
92 } else if (next == 3) { \
94 CVMX_MT_HSH_DAT (dat, 3); \
95 } else if (next == 4) { \
97 CVMX_MT_HSH_DAT (dat, 4); \
98 } else if (next == 5) { \
100 CVMX_MT_HSH_DAT (dat, 5); \
101 } else if (next == 6) { \
103 CVMX_MT_HSH_DAT (dat, 6); \
105 CVMX_MT_HSH_STARTSHA (dat); \
110 #define CVM_LOAD2_SHA_UNIT(dat1, dat2, next) { \
112 CVMX_MT_HSH_DAT (dat1, 0); \
113 CVMX_MT_HSH_DAT (dat2, 1); \
115 } else if (next == 1) { \
116 CVMX_MT_HSH_DAT (dat1, 1); \
117 CVMX_MT_HSH_DAT (dat2, 2); \
119 } else if (next == 2) { \
120 CVMX_MT_HSH_DAT (dat1, 2); \
121 CVMX_MT_HSH_DAT (dat2, 3); \
123 } else if (next == 3) { \
124 CVMX_MT_HSH_DAT (dat1, 3); \
125 CVMX_MT_HSH_DAT (dat2, 4); \
127 } else if (next == 4) { \
128 CVMX_MT_HSH_DAT (dat1, 4); \
129 CVMX_MT_HSH_DAT (dat2, 5); \
131 } else if (next == 5) { \
132 CVMX_MT_HSH_DAT (dat1, 5); \
133 CVMX_MT_HSH_DAT (dat2, 6); \
135 } else if (next == 6) { \
136 CVMX_MT_HSH_DAT (dat1, 6); \
137 CVMX_MT_HSH_STARTSHA (dat2); \
140 CVMX_MT_HSH_STARTSHA (dat1); \
141 CVMX_MT_HSH_DAT (dat2, 0); \
146 /****************************************************************************/
148 #define CVM_LOAD_MD5_UNIT(dat, next) { \
151 CVMX_MT_HSH_DAT (dat, 0); \
152 } else if (next == 1) { \
154 CVMX_MT_HSH_DAT (dat, 1); \
155 } else if (next == 2) { \
157 CVMX_MT_HSH_DAT (dat, 2); \
158 } else if (next == 3) { \
160 CVMX_MT_HSH_DAT (dat, 3); \
161 } else if (next == 4) { \
163 CVMX_MT_HSH_DAT (dat, 4); \
164 } else if (next == 5) { \
166 CVMX_MT_HSH_DAT (dat, 5); \
167 } else if (next == 6) { \
169 CVMX_MT_HSH_DAT (dat, 6); \
171 CVMX_MT_HSH_STARTMD5 (dat); \
176 #define CVM_LOAD2_MD5_UNIT(dat1, dat2, next) { \
178 CVMX_MT_HSH_DAT (dat1, 0); \
179 CVMX_MT_HSH_DAT (dat2, 1); \
181 } else if (next == 1) { \
182 CVMX_MT_HSH_DAT (dat1, 1); \
183 CVMX_MT_HSH_DAT (dat2, 2); \
185 } else if (next == 2) { \
186 CVMX_MT_HSH_DAT (dat1, 2); \
187 CVMX_MT_HSH_DAT (dat2, 3); \
189 } else if (next == 3) { \
190 CVMX_MT_HSH_DAT (dat1, 3); \
191 CVMX_MT_HSH_DAT (dat2, 4); \
193 } else if (next == 4) { \
194 CVMX_MT_HSH_DAT (dat1, 4); \
195 CVMX_MT_HSH_DAT (dat2, 5); \
197 } else if (next == 5) { \
198 CVMX_MT_HSH_DAT (dat1, 5); \
199 CVMX_MT_HSH_DAT (dat2, 6); \
201 } else if (next == 6) { \
202 CVMX_MT_HSH_DAT (dat1, 6); \
203 CVMX_MT_HSH_STARTMD5 (dat2); \
206 CVMX_MT_HSH_STARTMD5 (dat1); \
207 CVMX_MT_HSH_DAT (dat2, 0); \
212 /****************************************************************************/
214 static inline uint64_t
218 (((a
>> 48) & 0xfful
) << 8) |
219 (((a
>> 40) & 0xfful
) << 16) |
220 (((a
>> 32) & 0xfful
) << 24) |
221 (((a
>> 24) & 0xfful
) << 32) |
222 (((a
>> 16) & 0xfful
) << 40) |
223 (((a
>> 8) & 0xfful
) << 48) | (((a
>> 0) & 0xfful
) << 56));
226 /****************************************************************************/
229 octo_calc_hash(__u8 auth
, unsigned char *key
, uint64_t *inner
, uint64_t *outer
)
231 uint8_t hash_key
[64];
233 register uint64_t xor1
= 0x3636363636363636ULL
;
234 register uint64_t xor2
= 0x5c5c5c5c5c5c5c5cULL
;
235 struct octeon_cop2_state state
;
238 dprintk("%s()\n", __FUNCTION__
);
240 memset(hash_key
, 0, sizeof(hash_key
));
241 memcpy(hash_key
, (uint8_t *) key
, (auth
? 20 : 16));
242 key1
= (uint64_t *) hash_key
;
243 flags
= octeon_crypto_enable(&state
);
245 CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL
, 0);
246 CVMX_MT_HSH_IV(0x98BADCFE10325476ULL
, 1);
247 CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL
, 2);
249 CVMX_MT_HSH_IV(0x0123456789ABCDEFULL
, 0);
250 CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL
, 1);
253 CVMX_MT_HSH_DAT((*key1
^ xor1
), 0);
255 CVMX_MT_HSH_DAT((*key1
^ xor1
), 1);
257 CVMX_MT_HSH_DAT((*key1
^ xor1
), 2);
259 CVMX_MT_HSH_DAT((*key1
^ xor1
), 3);
261 CVMX_MT_HSH_DAT((*key1
^ xor1
), 4);
263 CVMX_MT_HSH_DAT((*key1
^ xor1
), 5);
265 CVMX_MT_HSH_DAT((*key1
^ xor1
), 6);
268 CVMX_MT_HSH_STARTSHA((*key1
^ xor1
));
270 CVMX_MT_HSH_STARTMD5((*key1
^ xor1
));
272 CVMX_MF_HSH_IV(inner
[0], 0);
273 CVMX_MF_HSH_IV(inner
[1], 1);
276 CVMX_MF_HSH_IV(((uint64_t *) inner
)[2], 2);
279 memset(hash_key
, 0, sizeof(hash_key
));
280 memcpy(hash_key
, (uint8_t *) key
, (auth
? 20 : 16));
281 key1
= (uint64_t *) hash_key
;
283 CVMX_MT_HSH_IV(0x67452301EFCDAB89ULL
, 0);
284 CVMX_MT_HSH_IV(0x98BADCFE10325476ULL
, 1);
285 CVMX_MT_HSH_IV(0xC3D2E1F000000000ULL
, 2);
287 CVMX_MT_HSH_IV(0x0123456789ABCDEFULL
, 0);
288 CVMX_MT_HSH_IV(0xFEDCBA9876543210ULL
, 1);
291 CVMX_MT_HSH_DAT((*key1
^ xor2
), 0);
293 CVMX_MT_HSH_DAT((*key1
^ xor2
), 1);
295 CVMX_MT_HSH_DAT((*key1
^ xor2
), 2);
297 CVMX_MT_HSH_DAT((*key1
^ xor2
), 3);
299 CVMX_MT_HSH_DAT((*key1
^ xor2
), 4);
301 CVMX_MT_HSH_DAT((*key1
^ xor2
), 5);
303 CVMX_MT_HSH_DAT((*key1
^ xor2
), 6);
306 CVMX_MT_HSH_STARTSHA((*key1
^ xor2
));
308 CVMX_MT_HSH_STARTMD5((*key1
^ xor2
));
310 CVMX_MF_HSH_IV(outer
[0], 0);
311 CVMX_MF_HSH_IV(outer
[1], 1);
314 CVMX_MF_HSH_IV(outer
[2], 2);
316 octeon_crypto_disable(&state
, flags
);
320 /****************************************************************************/
324 octo_des_cbc_encrypt(
325 struct octo_sess
*od
,
326 struct scatterlist
*sg
, int sg_len
,
327 int auth_off
, int auth_len
,
328 int crypt_off
, int crypt_len
,
329 int icv_off
, uint8_t *ivp
)
333 struct octeon_cop2_state state
;
336 dprintk("%s()\n", __FUNCTION__
);
338 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
339 (crypt_off
& 0x7) || (crypt_off
+ crypt_len
> sg_len
))) {
340 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
341 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
342 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
343 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
347 SG_INIT(sg
, data
, data_i
, data_l
);
350 CVMX_PREFETCH0(od
->octo_enckey
);
352 flags
= octeon_crypto_enable(&state
);
355 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
356 if (od
->octo_encklen
== 24) {
357 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
358 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
359 } else if (od
->octo_encklen
== 8) {
360 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
361 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
363 octeon_crypto_disable(&state
, flags
);
364 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
368 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
370 while (crypt_off
> 0) {
371 SG_CONSUME(sg
, data
, data_i
, data_l
);
375 while (crypt_len
> 0) {
376 CVMX_MT_3DES_ENC_CBC(*data
);
377 CVMX_MF_3DES_RESULT(*data
);
378 SG_CONSUME(sg
, data
, data_i
, data_l
);
382 octeon_crypto_disable(&state
, flags
);
388 octo_des_cbc_decrypt(
389 struct octo_sess
*od
,
390 struct scatterlist
*sg
, int sg_len
,
391 int auth_off
, int auth_len
,
392 int crypt_off
, int crypt_len
,
393 int icv_off
, uint8_t *ivp
)
397 struct octeon_cop2_state state
;
400 dprintk("%s()\n", __FUNCTION__
);
402 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
403 (crypt_off
& 0x7) || (crypt_off
+ crypt_len
> sg_len
))) {
404 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
405 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
406 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
407 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
411 SG_INIT(sg
, data
, data_i
, data_l
);
414 CVMX_PREFETCH0(od
->octo_enckey
);
416 flags
= octeon_crypto_enable(&state
);
419 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
420 if (od
->octo_encklen
== 24) {
421 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
422 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
423 } else if (od
->octo_encklen
== 8) {
424 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
425 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
427 octeon_crypto_disable(&state
, flags
);
428 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
432 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
434 while (crypt_off
> 0) {
435 SG_CONSUME(sg
, data
, data_i
, data_l
);
439 while (crypt_len
> 0) {
440 CVMX_MT_3DES_DEC_CBC(*data
);
441 CVMX_MF_3DES_RESULT(*data
);
442 SG_CONSUME(sg
, data
, data_i
, data_l
);
446 octeon_crypto_disable(&state
, flags
);
450 /****************************************************************************/
454 octo_aes_cbc_encrypt(
455 struct octo_sess
*od
,
456 struct scatterlist
*sg
, int sg_len
,
457 int auth_off
, int auth_len
,
458 int crypt_off
, int crypt_len
,
459 int icv_off
, uint8_t *ivp
)
461 uint64_t *data
, *pdata
;
463 struct octeon_cop2_state state
;
466 dprintk("%s()\n", __FUNCTION__
);
468 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
469 (crypt_off
& 0x7) || (crypt_off
+ crypt_len
> sg_len
))) {
470 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
471 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
472 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
473 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
477 SG_INIT(sg
, data
, data_i
, data_l
);
480 CVMX_PREFETCH0(od
->octo_enckey
);
482 flags
= octeon_crypto_enable(&state
);
485 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
486 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
488 if (od
->octo_encklen
== 16) {
489 CVMX_MT_AES_KEY(0x0, 2);
490 CVMX_MT_AES_KEY(0x0, 3);
491 } else if (od
->octo_encklen
== 24) {
492 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
493 CVMX_MT_AES_KEY(0x0, 3);
494 } else if (od
->octo_encklen
== 32) {
495 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
496 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
498 octeon_crypto_disable(&state
, flags
);
499 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
502 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
504 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
505 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
507 while (crypt_off
> 0) {
508 SG_CONSUME(sg
, data
, data_i
, data_l
);
512 while (crypt_len
> 0) {
514 CVMX_MT_AES_ENC_CBC0(*data
);
515 SG_CONSUME(sg
, data
, data_i
, data_l
);
516 CVMX_MT_AES_ENC_CBC1(*data
);
517 CVMX_MF_AES_RESULT(*pdata
, 0);
518 CVMX_MF_AES_RESULT(*data
, 1);
519 SG_CONSUME(sg
, data
, data_i
, data_l
);
523 octeon_crypto_disable(&state
, flags
);
529 octo_aes_cbc_decrypt(
530 struct octo_sess
*od
,
531 struct scatterlist
*sg
, int sg_len
,
532 int auth_off
, int auth_len
,
533 int crypt_off
, int crypt_len
,
534 int icv_off
, uint8_t *ivp
)
536 uint64_t *data
, *pdata
;
538 struct octeon_cop2_state state
;
541 dprintk("%s()\n", __FUNCTION__
);
543 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
544 (crypt_off
& 0x7) || (crypt_off
+ crypt_len
> sg_len
))) {
545 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
546 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
547 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
548 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
552 SG_INIT(sg
, data
, data_i
, data_l
);
555 CVMX_PREFETCH0(od
->octo_enckey
);
557 flags
= octeon_crypto_enable(&state
);
560 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
561 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
563 if (od
->octo_encklen
== 16) {
564 CVMX_MT_AES_KEY(0x0, 2);
565 CVMX_MT_AES_KEY(0x0, 3);
566 } else if (od
->octo_encklen
== 24) {
567 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
568 CVMX_MT_AES_KEY(0x0, 3);
569 } else if (od
->octo_encklen
== 32) {
570 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
571 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
573 octeon_crypto_disable(&state
, flags
);
574 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
577 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
579 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
580 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
582 while (crypt_off
> 0) {
583 SG_CONSUME(sg
, data
, data_i
, data_l
);
587 while (crypt_len
> 0) {
589 CVMX_MT_AES_DEC_CBC0(*data
);
590 SG_CONSUME(sg
, data
, data_i
, data_l
);
591 CVMX_MT_AES_DEC_CBC1(*data
);
592 CVMX_MF_AES_RESULT(*pdata
, 0);
593 CVMX_MF_AES_RESULT(*data
, 1);
594 SG_CONSUME(sg
, data
, data_i
, data_l
);
598 octeon_crypto_disable(&state
, flags
);
602 /****************************************************************************/
606 octo_null_md5_encrypt(
607 struct octo_sess
*od
,
608 struct scatterlist
*sg
, int sg_len
,
609 int auth_off
, int auth_len
,
610 int crypt_off
, int crypt_len
,
611 int icv_off
, uint8_t *ivp
)
613 register int next
= 0;
616 int data_i
, data_l
, alen
= auth_len
;
617 struct octeon_cop2_state state
;
620 dprintk("%s()\n", __FUNCTION__
);
622 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 ||
623 (auth_off
& 0x7) || (auth_off
+ auth_len
> sg_len
))) {
624 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
625 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
626 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
627 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
631 SG_INIT(sg
, data
, data_i
, data_l
);
633 flags
= octeon_crypto_enable(&state
);
636 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
637 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
639 while (auth_off
> 0) {
640 SG_CONSUME(sg
, data
, data_i
, data_l
);
644 while (auth_len
> 0) {
645 CVM_LOAD_MD5_UNIT(*data
, next
);
647 SG_CONSUME(sg
, data
, data_i
, data_l
);
650 /* finish the hash */
651 CVMX_PREFETCH0(od
->octo_hmouter
);
653 if (unlikely(inplen
)) {
655 uint8_t *p
= (uint8_t *) & tmp
;
659 p
[inplen
] = ((uint8_t *) data
)[inplen
];
661 CVM_LOAD_MD5_UNIT(tmp
, next
);
663 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
666 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
669 /* Finish Inner hash */
671 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL
), next
);
673 CVMX_ES64(tmp1
, ((alen
+ 64) << 3));
674 CVM_LOAD_MD5_UNIT(tmp1
, next
);
676 /* Get the inner hash of HMAC */
677 CVMX_MF_HSH_IV(tmp1
, 0);
678 CVMX_MF_HSH_IV(tmp2
, 1);
680 /* Initialize hash unit */
681 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
682 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
684 CVMX_MT_HSH_DAT(tmp1
, 0);
685 CVMX_MT_HSH_DAT(tmp2
, 1);
686 CVMX_MT_HSH_DAT(0x8000000000000000ULL
, 2);
691 CVMX_ES64(tmp1
, ((64 + 16) << 3));
692 CVMX_MT_HSH_STARTMD5(tmp1
);
695 SG_INIT(sg
, data
, data_i
, data_l
);
696 while (icv_off
> 0) {
697 SG_CONSUME(sg
, data
, data_i
, data_l
);
700 CVMX_MF_HSH_IV(*data
, 0);
701 SG_CONSUME(sg
, data
, data_i
, data_l
);
702 CVMX_MF_HSH_IV(tmp1
, 1);
703 *(uint32_t *)data
= (uint32_t) (tmp1
>> 32);
705 octeon_crypto_disable(&state
, flags
);
709 /****************************************************************************/
713 octo_null_sha1_encrypt(
714 struct octo_sess
*od
,
715 struct scatterlist
*sg
, int sg_len
,
716 int auth_off
, int auth_len
,
717 int crypt_off
, int crypt_len
,
718 int icv_off
, uint8_t *ivp
)
720 register int next
= 0;
722 uint64_t tmp1
, tmp2
, tmp3
;
723 int data_i
, data_l
, alen
= auth_len
;
724 struct octeon_cop2_state state
;
727 dprintk("%s()\n", __FUNCTION__
);
729 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 ||
730 (auth_off
& 0x7) || (auth_off
+ auth_len
> sg_len
))) {
731 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
732 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
733 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
734 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
738 SG_INIT(sg
, data
, data_i
, data_l
);
740 flags
= octeon_crypto_enable(&state
);
743 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
744 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
745 CVMX_MT_HSH_IV(od
->octo_hminner
[2], 2);
747 while (auth_off
> 0) {
748 SG_CONSUME(sg
, data
, data_i
, data_l
);
752 while (auth_len
> 0) {
753 CVM_LOAD_SHA_UNIT(*data
, next
);
755 SG_CONSUME(sg
, data
, data_i
, data_l
);
758 /* finish the hash */
759 CVMX_PREFETCH0(od
->octo_hmouter
);
761 if (unlikely(inplen
)) {
763 uint8_t *p
= (uint8_t *) & tmp
;
767 p
[inplen
] = ((uint8_t *) data
)[inplen
];
769 CVM_LOAD_MD5_UNIT(tmp
, next
);
771 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
774 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
777 /* Finish Inner hash */
779 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL
), next
);
781 CVM_LOAD_SHA_UNIT((uint64_t) ((alen
+ 64) << 3), next
);
783 /* Get the inner hash of HMAC */
784 CVMX_MF_HSH_IV(tmp1
, 0);
785 CVMX_MF_HSH_IV(tmp2
, 1);
787 CVMX_MF_HSH_IV(tmp3
, 2);
789 /* Initialize hash unit */
790 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
791 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
792 CVMX_MT_HSH_IV(od
->octo_hmouter
[2], 2);
794 CVMX_MT_HSH_DAT(tmp1
, 0);
795 CVMX_MT_HSH_DAT(tmp2
, 1);
796 tmp3
|= 0x0000000080000000;
797 CVMX_MT_HSH_DAT(tmp3
, 2);
802 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
805 SG_INIT(sg
, data
, data_i
, data_l
);
806 while (icv_off
> 0) {
807 SG_CONSUME(sg
, data
, data_i
, data_l
);
810 CVMX_MF_HSH_IV(*data
, 0);
811 SG_CONSUME(sg
, data
, data_i
, data_l
);
812 CVMX_MF_HSH_IV(tmp1
, 1);
813 *(uint32_t *)data
= (uint32_t) (tmp1
>> 32);
815 octeon_crypto_disable(&state
, flags
);
819 /****************************************************************************/
823 octo_des_cbc_md5_encrypt(
824 struct octo_sess
*od
,
825 struct scatterlist
*sg
, int sg_len
,
826 int auth_off
, int auth_len
,
827 int crypt_off
, int crypt_len
,
828 int icv_off
, uint8_t *ivp
)
830 register int next
= 0;
835 uint64_t *data
= &mydata
.data64
[0];
838 int data_i
, data_l
, alen
= auth_len
;
839 struct octeon_cop2_state state
;
842 dprintk("%s()\n", __FUNCTION__
);
844 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
845 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
848 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
849 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
850 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
851 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
852 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
856 SG_INIT(sg
, data32
, data_i
, data_l
);
859 CVMX_PREFETCH0(od
->octo_enckey
);
861 flags
= octeon_crypto_enable(&state
);
864 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
865 if (od
->octo_encklen
== 24) {
866 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
867 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
868 } else if (od
->octo_encklen
== 8) {
869 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
870 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
872 octeon_crypto_disable(&state
, flags
);
873 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
877 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
880 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
881 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
883 while (crypt_off
> 0 && auth_off
> 0) {
884 SG_CONSUME(sg
, data32
, data_i
, data_l
);
889 while (crypt_len
> 0 || auth_len
> 0) {
890 uint32_t *first
= data32
;
891 mydata
.data32
[0] = *first
;
892 SG_CONSUME(sg
, data32
, data_i
, data_l
);
893 mydata
.data32
[1] = *data32
;
894 if (crypt_off
<= 0) {
896 CVMX_MT_3DES_ENC_CBC(*data
);
897 CVMX_MF_3DES_RESULT(*data
);
904 CVM_LOAD_MD5_UNIT(*data
, next
);
909 *first
= mydata
.data32
[0];
910 *data32
= mydata
.data32
[1];
911 SG_CONSUME(sg
, data32
, data_i
, data_l
);
914 /* finish the hash */
915 CVMX_PREFETCH0(od
->octo_hmouter
);
917 if (unlikely(inplen
)) {
919 uint8_t *p
= (uint8_t *) & tmp
;
923 p
[inplen
] = ((uint8_t *) data
)[inplen
];
925 CVM_LOAD_MD5_UNIT(tmp
, next
);
927 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
930 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
933 /* Finish Inner hash */
935 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL
), next
);
937 CVMX_ES64(tmp1
, ((alen
+ 64) << 3));
938 CVM_LOAD_MD5_UNIT(tmp1
, next
);
940 /* Get the inner hash of HMAC */
941 CVMX_MF_HSH_IV(tmp1
, 0);
942 CVMX_MF_HSH_IV(tmp2
, 1);
944 /* Initialize hash unit */
945 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
946 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
948 CVMX_MT_HSH_DAT(tmp1
, 0);
949 CVMX_MT_HSH_DAT(tmp2
, 1);
950 CVMX_MT_HSH_DAT(0x8000000000000000ULL
, 2);
955 CVMX_ES64(tmp1
, ((64 + 16) << 3));
956 CVMX_MT_HSH_STARTMD5(tmp1
);
959 SG_INIT(sg
, data32
, data_i
, data_l
);
960 while (icv_off
> 0) {
961 SG_CONSUME(sg
, data32
, data_i
, data_l
);
964 CVMX_MF_HSH_IV(tmp1
, 0);
965 *data32
= (uint32_t) (tmp1
>> 32);
966 SG_CONSUME(sg
, data32
, data_i
, data_l
);
967 *data32
= (uint32_t) tmp1
;
968 SG_CONSUME(sg
, data32
, data_i
, data_l
);
969 CVMX_MF_HSH_IV(tmp1
, 1);
970 *data32
= (uint32_t) (tmp1
>> 32);
972 octeon_crypto_disable(&state
, flags
);
977 octo_des_cbc_md5_decrypt(
978 struct octo_sess
*od
,
979 struct scatterlist
*sg
, int sg_len
,
980 int auth_off
, int auth_len
,
981 int crypt_off
, int crypt_len
,
982 int icv_off
, uint8_t *ivp
)
984 register int next
= 0;
989 uint64_t *data
= &mydata
.data64
[0];
992 int data_i
, data_l
, alen
= auth_len
;
993 struct octeon_cop2_state state
;
996 dprintk("%s()\n", __FUNCTION__
);
998 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
999 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1000 (crypt_len
& 0x7) ||
1002 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1003 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1004 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1005 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1006 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1010 SG_INIT(sg
, data32
, data_i
, data_l
);
1012 CVMX_PREFETCH0(ivp
);
1013 CVMX_PREFETCH0(od
->octo_enckey
);
1015 flags
= octeon_crypto_enable(&state
);
1018 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1019 if (od
->octo_encklen
== 24) {
1020 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1021 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1022 } else if (od
->octo_encklen
== 8) {
1023 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
1024 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
1026 octeon_crypto_disable(&state
, flags
);
1027 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1031 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
1034 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1035 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1037 while (crypt_off
> 0 && auth_off
> 0) {
1038 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1043 while (crypt_len
> 0 || auth_len
> 0) {
1044 uint32_t *first
= data32
;
1045 mydata
.data32
[0] = *first
;
1046 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1047 mydata
.data32
[1] = *data32
;
1048 if (auth_off
<= 0) {
1050 CVM_LOAD_MD5_UNIT(*data
, next
);
1055 if (crypt_off
<= 0) {
1056 if (crypt_len
> 0) {
1057 CVMX_MT_3DES_DEC_CBC(*data
);
1058 CVMX_MF_3DES_RESULT(*data
);
1063 *first
= mydata
.data32
[0];
1064 *data32
= mydata
.data32
[1];
1065 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1068 /* finish the hash */
1069 CVMX_PREFETCH0(od
->octo_hmouter
);
1071 if (unlikely(inplen
)) {
1073 uint8_t *p
= (uint8_t *) & tmp
;
1077 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1079 CVM_LOAD_MD5_UNIT(tmp
, next
);
1081 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1084 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1087 /* Finish Inner hash */
1089 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL
), next
);
1091 CVMX_ES64(tmp1
, ((alen
+ 64) << 3));
1092 CVM_LOAD_MD5_UNIT(tmp1
, next
);
1094 /* Get the inner hash of HMAC */
1095 CVMX_MF_HSH_IV(tmp1
, 0);
1096 CVMX_MF_HSH_IV(tmp2
, 1);
1098 /* Initialize hash unit */
1099 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
1100 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
1102 CVMX_MT_HSH_DAT(tmp1
, 0);
1103 CVMX_MT_HSH_DAT(tmp2
, 1);
1104 CVMX_MT_HSH_DAT(0x8000000000000000ULL
, 2);
1105 CVMX_MT_HSH_DATZ(3);
1106 CVMX_MT_HSH_DATZ(4);
1107 CVMX_MT_HSH_DATZ(5);
1108 CVMX_MT_HSH_DATZ(6);
1109 CVMX_ES64(tmp1
, ((64 + 16) << 3));
1110 CVMX_MT_HSH_STARTMD5(tmp1
);
1113 SG_INIT(sg
, data32
, data_i
, data_l
);
1114 while (icv_off
> 0) {
1115 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1118 CVMX_MF_HSH_IV(tmp1
, 0);
1119 *data32
= (uint32_t) (tmp1
>> 32);
1120 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1121 *data32
= (uint32_t) tmp1
;
1122 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1123 CVMX_MF_HSH_IV(tmp1
, 1);
1124 *data32
= (uint32_t) (tmp1
>> 32);
1126 octeon_crypto_disable(&state
, flags
);
1130 /****************************************************************************/
1134 octo_des_cbc_sha1_encrypt(
1135 struct octo_sess
*od
,
1136 struct scatterlist
*sg
, int sg_len
,
1137 int auth_off
, int auth_len
,
1138 int crypt_off
, int crypt_len
,
1139 int icv_off
, uint8_t *ivp
)
1141 register int next
= 0;
1146 uint64_t *data
= &mydata
.data64
[0];
1148 uint64_t tmp1
, tmp2
, tmp3
;
1149 int data_i
, data_l
, alen
= auth_len
;
1150 struct octeon_cop2_state state
;
1151 unsigned long flags
;
1153 dprintk("%s()\n", __FUNCTION__
);
1155 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
1156 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1157 (crypt_len
& 0x7) ||
1159 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1160 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1161 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1162 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1163 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1167 SG_INIT(sg
, data32
, data_i
, data_l
);
1169 CVMX_PREFETCH0(ivp
);
1170 CVMX_PREFETCH0(od
->octo_enckey
);
1172 flags
= octeon_crypto_enable(&state
);
1175 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1176 if (od
->octo_encklen
== 24) {
1177 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1178 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1179 } else if (od
->octo_encklen
== 8) {
1180 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
1181 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
1183 octeon_crypto_disable(&state
, flags
);
1184 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1188 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
1191 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1192 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1193 CVMX_MT_HSH_IV(od
->octo_hminner
[2], 2);
1195 while (crypt_off
> 0 && auth_off
> 0) {
1196 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1201 while (crypt_len
> 0 || auth_len
> 0) {
1202 uint32_t *first
= data32
;
1203 mydata
.data32
[0] = *first
;
1204 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1205 mydata
.data32
[1] = *data32
;
1206 if (crypt_off
<= 0) {
1207 if (crypt_len
> 0) {
1208 CVMX_MT_3DES_ENC_CBC(*data
);
1209 CVMX_MF_3DES_RESULT(*data
);
1214 if (auth_off
<= 0) {
1216 CVM_LOAD_SHA_UNIT(*data
, next
);
1221 *first
= mydata
.data32
[0];
1222 *data32
= mydata
.data32
[1];
1223 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1226 /* finish the hash */
1227 CVMX_PREFETCH0(od
->octo_hmouter
);
1229 if (unlikely(inplen
)) {
1231 uint8_t *p
= (uint8_t *) & tmp
;
1235 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1237 CVM_LOAD_SHA_UNIT(tmp
, next
);
1239 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
1242 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
1245 /* Finish Inner hash */
1247 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL
), next
);
1249 CVM_LOAD_SHA_UNIT((uint64_t) ((alen
+ 64) << 3), next
);
1251 /* Get the inner hash of HMAC */
1252 CVMX_MF_HSH_IV(tmp1
, 0);
1253 CVMX_MF_HSH_IV(tmp2
, 1);
1255 CVMX_MF_HSH_IV(tmp3
, 2);
1257 /* Initialize hash unit */
1258 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
1259 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
1260 CVMX_MT_HSH_IV(od
->octo_hmouter
[2], 2);
1262 CVMX_MT_HSH_DAT(tmp1
, 0);
1263 CVMX_MT_HSH_DAT(tmp2
, 1);
1264 tmp3
|= 0x0000000080000000;
1265 CVMX_MT_HSH_DAT(tmp3
, 2);
1266 CVMX_MT_HSH_DATZ(3);
1267 CVMX_MT_HSH_DATZ(4);
1268 CVMX_MT_HSH_DATZ(5);
1269 CVMX_MT_HSH_DATZ(6);
1270 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1273 SG_INIT(sg
, data32
, data_i
, data_l
);
1274 while (icv_off
> 0) {
1275 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1278 CVMX_MF_HSH_IV(tmp1
, 0);
1279 *data32
= (uint32_t) (tmp1
>> 32);
1280 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1281 *data32
= (uint32_t) tmp1
;
1282 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1283 CVMX_MF_HSH_IV(tmp1
, 1);
1284 *data32
= (uint32_t) (tmp1
>> 32);
1286 octeon_crypto_disable(&state
, flags
);
1291 octo_des_cbc_sha1_decrypt(
1292 struct octo_sess
*od
,
1293 struct scatterlist
*sg
, int sg_len
,
1294 int auth_off
, int auth_len
,
1295 int crypt_off
, int crypt_len
,
1296 int icv_off
, uint8_t *ivp
)
1298 register int next
= 0;
1303 uint64_t *data
= &mydata
.data64
[0];
1305 uint64_t tmp1
, tmp2
, tmp3
;
1306 int data_i
, data_l
, alen
= auth_len
;
1307 struct octeon_cop2_state state
;
1308 unsigned long flags
;
1310 dprintk("%s()\n", __FUNCTION__
);
1312 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
1313 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1314 (crypt_len
& 0x7) ||
1316 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1317 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1318 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1319 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1320 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1324 SG_INIT(sg
, data32
, data_i
, data_l
);
1326 CVMX_PREFETCH0(ivp
);
1327 CVMX_PREFETCH0(od
->octo_enckey
);
1329 flags
= octeon_crypto_enable(&state
);
1332 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1333 if (od
->octo_encklen
== 24) {
1334 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1335 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1336 } else if (od
->octo_encklen
== 8) {
1337 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 1);
1338 CVMX_MT_3DES_KEY(((uint64_t *) od
->octo_enckey
)[0], 2);
1340 octeon_crypto_disable(&state
, flags
);
1341 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1345 CVMX_MT_3DES_IV(* (uint64_t *) ivp
);
1348 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1349 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1350 CVMX_MT_HSH_IV(od
->octo_hminner
[2], 2);
1352 while (crypt_off
> 0 && auth_off
> 0) {
1353 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1358 while (crypt_len
> 0 || auth_len
> 0) {
1359 uint32_t *first
= data32
;
1360 mydata
.data32
[0] = *first
;
1361 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1362 mydata
.data32
[1] = *data32
;
1363 if (auth_off
<= 0) {
1365 CVM_LOAD_SHA_UNIT(*data
, next
);
1370 if (crypt_off
<= 0) {
1371 if (crypt_len
> 0) {
1372 CVMX_MT_3DES_DEC_CBC(*data
);
1373 CVMX_MF_3DES_RESULT(*data
);
1378 *first
= mydata
.data32
[0];
1379 *data32
= mydata
.data32
[1];
1380 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1383 /* finish the hash */
1384 CVMX_PREFETCH0(od
->octo_hmouter
);
1386 if (unlikely(inplen
)) {
1388 uint8_t *p
= (uint8_t *) & tmp
;
1392 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1394 CVM_LOAD_SHA_UNIT(tmp
, next
);
1396 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
1399 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
1402 /* Finish Inner hash */
1404 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL
), next
);
1406 CVM_LOAD_SHA_UNIT((uint64_t) ((alen
+ 64) << 3), next
);
1408 /* Get the inner hash of HMAC */
1409 CVMX_MF_HSH_IV(tmp1
, 0);
1410 CVMX_MF_HSH_IV(tmp2
, 1);
1412 CVMX_MF_HSH_IV(tmp3
, 2);
1414 /* Initialize hash unit */
1415 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
1416 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
1417 CVMX_MT_HSH_IV(od
->octo_hmouter
[2], 2);
1419 CVMX_MT_HSH_DAT(tmp1
, 0);
1420 CVMX_MT_HSH_DAT(tmp2
, 1);
1421 tmp3
|= 0x0000000080000000;
1422 CVMX_MT_HSH_DAT(tmp3
, 2);
1423 CVMX_MT_HSH_DATZ(3);
1424 CVMX_MT_HSH_DATZ(4);
1425 CVMX_MT_HSH_DATZ(5);
1426 CVMX_MT_HSH_DATZ(6);
1427 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
1429 SG_INIT(sg
, data32
, data_i
, data_l
);
1430 while (icv_off
> 0) {
1431 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1434 CVMX_MF_HSH_IV(tmp1
, 0);
1435 *data32
= (uint32_t) (tmp1
>> 32);
1436 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1437 *data32
= (uint32_t) tmp1
;
1438 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1439 CVMX_MF_HSH_IV(tmp1
, 1);
1440 *data32
= (uint32_t) (tmp1
>> 32);
1442 octeon_crypto_disable(&state
, flags
);
1446 /****************************************************************************/
1450 octo_aes_cbc_md5_encrypt(
1451 struct octo_sess
*od
,
1452 struct scatterlist
*sg
, int sg_len
,
1453 int auth_off
, int auth_len
,
1454 int crypt_off
, int crypt_len
,
1455 int icv_off
, uint8_t *ivp
)
1457 register int next
= 0;
1462 uint64_t *pdata
= &mydata
[0].data64
[0];
1463 uint64_t *data
= &mydata
[1].data64
[0];
1465 uint64_t tmp1
, tmp2
;
1466 int data_i
, data_l
, alen
= auth_len
;
1467 struct octeon_cop2_state state
;
1468 unsigned long flags
;
1470 dprintk("%s()\n", __FUNCTION__
);
1472 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
1473 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1474 (crypt_len
& 0x7) ||
1476 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1477 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1478 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1479 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1480 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1484 SG_INIT(sg
, data32
, data_i
, data_l
);
1486 CVMX_PREFETCH0(ivp
);
1487 CVMX_PREFETCH0(od
->octo_enckey
);
1489 flags
= octeon_crypto_enable(&state
);
1492 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1493 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1495 if (od
->octo_encklen
== 16) {
1496 CVMX_MT_AES_KEY(0x0, 2);
1497 CVMX_MT_AES_KEY(0x0, 3);
1498 } else if (od
->octo_encklen
== 24) {
1499 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1500 CVMX_MT_AES_KEY(0x0, 3);
1501 } else if (od
->octo_encklen
== 32) {
1502 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1503 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
1505 octeon_crypto_disable(&state
, flags
);
1506 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1509 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
1511 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
1512 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
1515 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1516 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1518 while (crypt_off
> 0 && auth_off
> 0) {
1519 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1524 /* align auth and crypt */
1525 while (crypt_off
> 0 && auth_len
> 0) {
1526 mydata
[0].data32
[0] = *data32
;
1527 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1528 mydata
[0].data32
[1] = *data32
;
1529 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1530 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1535 while (crypt_len
> 0) {
1536 uint32_t *pdata32
[3];
1538 pdata32
[0] = data32
;
1539 mydata
[0].data32
[0] = *data32
;
1540 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1542 pdata32
[1] = data32
;
1543 mydata
[0].data32
[1] = *data32
;
1544 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1546 pdata32
[2] = data32
;
1547 mydata
[1].data32
[0] = *data32
;
1548 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1550 mydata
[1].data32
[1] = *data32
;
1552 CVMX_MT_AES_ENC_CBC0(*pdata
);
1553 CVMX_MT_AES_ENC_CBC1(*data
);
1554 CVMX_MF_AES_RESULT(*pdata
, 0);
1555 CVMX_MF_AES_RESULT(*data
, 1);
1559 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1563 CVM_LOAD_MD5_UNIT(*data
, next
);
1567 *pdata32
[0] = mydata
[0].data32
[0];
1568 *pdata32
[1] = mydata
[0].data32
[1];
1569 *pdata32
[2] = mydata
[1].data32
[0];
1570 *data32
= mydata
[1].data32
[1];
1572 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1575 /* finish any left over hashing */
1576 while (auth_len
> 0) {
1577 mydata
[0].data32
[0] = *data32
;
1578 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1579 mydata
[0].data32
[1] = *data32
;
1580 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1581 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1585 /* finish the hash */
1586 CVMX_PREFETCH0(od
->octo_hmouter
);
1588 if (unlikely(inplen
)) {
1590 uint8_t *p
= (uint8_t *) & tmp
;
1594 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1596 CVM_LOAD_MD5_UNIT(tmp
, next
);
1598 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1601 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1604 /* Finish Inner hash */
1606 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL
), next
);
1608 CVMX_ES64(tmp1
, ((alen
+ 64) << 3));
1609 CVM_LOAD_MD5_UNIT(tmp1
, next
);
1611 /* Get the inner hash of HMAC */
1612 CVMX_MF_HSH_IV(tmp1
, 0);
1613 CVMX_MF_HSH_IV(tmp2
, 1);
1615 /* Initialize hash unit */
1616 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
1617 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
1619 CVMX_MT_HSH_DAT(tmp1
, 0);
1620 CVMX_MT_HSH_DAT(tmp2
, 1);
1621 CVMX_MT_HSH_DAT(0x8000000000000000ULL
, 2);
1622 CVMX_MT_HSH_DATZ(3);
1623 CVMX_MT_HSH_DATZ(4);
1624 CVMX_MT_HSH_DATZ(5);
1625 CVMX_MT_HSH_DATZ(6);
1626 CVMX_ES64(tmp1
, ((64 + 16) << 3));
1627 CVMX_MT_HSH_STARTMD5(tmp1
);
1630 SG_INIT(sg
, data32
, data_i
, data_l
);
1631 while (icv_off
> 0) {
1632 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1635 CVMX_MF_HSH_IV(tmp1
, 0);
1636 *data32
= (uint32_t) (tmp1
>> 32);
1637 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1638 *data32
= (uint32_t) tmp1
;
1639 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1640 CVMX_MF_HSH_IV(tmp1
, 1);
1641 *data32
= (uint32_t) (tmp1
>> 32);
1643 octeon_crypto_disable(&state
, flags
);
1648 octo_aes_cbc_md5_decrypt(
1649 struct octo_sess
*od
,
1650 struct scatterlist
*sg
, int sg_len
,
1651 int auth_off
, int auth_len
,
1652 int crypt_off
, int crypt_len
,
1653 int icv_off
, uint8_t *ivp
)
1655 register int next
= 0;
1660 uint64_t *pdata
= &mydata
[0].data64
[0];
1661 uint64_t *data
= &mydata
[1].data64
[0];
1663 uint64_t tmp1
, tmp2
;
1664 int data_i
, data_l
, alen
= auth_len
;
1665 struct octeon_cop2_state state
;
1666 unsigned long flags
;
1668 dprintk("%s()\n", __FUNCTION__
);
1670 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
1671 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1672 (crypt_len
& 0x7) ||
1674 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1675 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1676 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1677 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1678 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1682 SG_INIT(sg
, data32
, data_i
, data_l
);
1684 CVMX_PREFETCH0(ivp
);
1685 CVMX_PREFETCH0(od
->octo_enckey
);
1687 flags
= octeon_crypto_enable(&state
);
1690 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1691 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1693 if (od
->octo_encklen
== 16) {
1694 CVMX_MT_AES_KEY(0x0, 2);
1695 CVMX_MT_AES_KEY(0x0, 3);
1696 } else if (od
->octo_encklen
== 24) {
1697 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1698 CVMX_MT_AES_KEY(0x0, 3);
1699 } else if (od
->octo_encklen
== 32) {
1700 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1701 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
1703 octeon_crypto_disable(&state
, flags
);
1704 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1707 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
1709 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
1710 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
1713 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1714 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1716 while (crypt_off
> 0 && auth_off
> 0) {
1717 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1722 /* align auth and crypt */
1723 while (crypt_off
> 0 && auth_len
> 0) {
1724 mydata
[0].data32
[0] = *data32
;
1725 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1726 mydata
[0].data32
[1] = *data32
;
1727 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1728 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1733 while (crypt_len
> 0) {
1734 uint32_t *pdata32
[3];
1736 pdata32
[0] = data32
;
1737 mydata
[0].data32
[0] = *data32
;
1738 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1739 pdata32
[1] = data32
;
1740 mydata
[0].data32
[1] = *data32
;
1741 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1742 pdata32
[2] = data32
;
1743 mydata
[1].data32
[0] = *data32
;
1744 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1745 mydata
[1].data32
[1] = *data32
;
1748 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1753 CVM_LOAD_MD5_UNIT(*data
, next
);
1757 CVMX_MT_AES_DEC_CBC0(*pdata
);
1758 CVMX_MT_AES_DEC_CBC1(*data
);
1759 CVMX_MF_AES_RESULT(*pdata
, 0);
1760 CVMX_MF_AES_RESULT(*data
, 1);
1763 *pdata32
[0] = mydata
[0].data32
[0];
1764 *pdata32
[1] = mydata
[0].data32
[1];
1765 *pdata32
[2] = mydata
[1].data32
[0];
1766 *data32
= mydata
[1].data32
[1];
1768 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1771 /* finish left over hash if any */
1772 while (auth_len
> 0) {
1773 mydata
[0].data32
[0] = *data32
;
1774 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1775 mydata
[0].data32
[1] = *data32
;
1776 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1777 CVM_LOAD_MD5_UNIT(*pdata
, next
);
1782 /* finish the hash */
1783 CVMX_PREFETCH0(od
->octo_hmouter
);
1785 if (unlikely(inplen
)) {
1787 uint8_t *p
= (uint8_t *) & tmp
;
1791 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1793 CVM_LOAD_MD5_UNIT(tmp
, next
);
1795 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1798 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
1801 /* Finish Inner hash */
1803 CVM_LOAD_MD5_UNIT(((uint64_t) 0x0ULL
), next
);
1805 CVMX_ES64(tmp1
, ((alen
+ 64) << 3));
1806 CVM_LOAD_MD5_UNIT(tmp1
, next
);
1808 /* Get the inner hash of HMAC */
1809 CVMX_MF_HSH_IV(tmp1
, 0);
1810 CVMX_MF_HSH_IV(tmp2
, 1);
1812 /* Initialize hash unit */
1813 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
1814 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
1816 CVMX_MT_HSH_DAT(tmp1
, 0);
1817 CVMX_MT_HSH_DAT(tmp2
, 1);
1818 CVMX_MT_HSH_DAT(0x8000000000000000ULL
, 2);
1819 CVMX_MT_HSH_DATZ(3);
1820 CVMX_MT_HSH_DATZ(4);
1821 CVMX_MT_HSH_DATZ(5);
1822 CVMX_MT_HSH_DATZ(6);
1823 CVMX_ES64(tmp1
, ((64 + 16) << 3));
1824 CVMX_MT_HSH_STARTMD5(tmp1
);
1827 SG_INIT(sg
, data32
, data_i
, data_l
);
1828 while (icv_off
> 0) {
1829 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1832 CVMX_MF_HSH_IV(tmp1
, 0);
1833 *data32
= (uint32_t) (tmp1
>> 32);
1834 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1835 *data32
= (uint32_t) tmp1
;
1836 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1837 CVMX_MF_HSH_IV(tmp1
, 1);
1838 *data32
= (uint32_t) (tmp1
>> 32);
1840 octeon_crypto_disable(&state
, flags
);
1844 /****************************************************************************/
1848 octo_aes_cbc_sha1_encrypt(
1849 struct octo_sess
*od
,
1850 struct scatterlist
*sg
, int sg_len
,
1851 int auth_off
, int auth_len
,
1852 int crypt_off
, int crypt_len
,
1853 int icv_off
, uint8_t *ivp
)
1855 register int next
= 0;
1860 uint64_t *pdata
= &mydata
[0].data64
[0];
1861 uint64_t *data
= &mydata
[1].data64
[0];
1863 uint64_t tmp1
, tmp2
, tmp3
;
1864 int data_i
, data_l
, alen
= auth_len
;
1865 struct octeon_cop2_state state
;
1866 unsigned long flags
;
1868 dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
1869 __FUNCTION__
, auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
);
1871 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
1872 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
1873 (crypt_len
& 0x7) ||
1875 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
1876 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
1877 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
1878 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
1879 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
1883 SG_INIT(sg
, data32
, data_i
, data_l
);
1885 CVMX_PREFETCH0(ivp
);
1886 CVMX_PREFETCH0(od
->octo_enckey
);
1888 flags
= octeon_crypto_enable(&state
);
1891 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
1892 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
1894 if (od
->octo_encklen
== 16) {
1895 CVMX_MT_AES_KEY(0x0, 2);
1896 CVMX_MT_AES_KEY(0x0, 3);
1897 } else if (od
->octo_encklen
== 24) {
1898 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1899 CVMX_MT_AES_KEY(0x0, 3);
1900 } else if (od
->octo_encklen
== 32) {
1901 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
1902 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
1904 octeon_crypto_disable(&state
, flags
);
1905 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
1908 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
1910 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
1911 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
1914 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
1915 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
1916 CVMX_MT_HSH_IV(od
->octo_hminner
[2], 2);
1918 while (crypt_off
> 0 && auth_off
> 0) {
1919 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1924 /* align auth and crypt */
1925 while (crypt_off
> 0 && auth_len
> 0) {
1926 mydata
[0].data32
[0] = *data32
;
1927 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1928 mydata
[0].data32
[1] = *data32
;
1929 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1930 CVM_LOAD_SHA_UNIT(*pdata
, next
);
1935 while (crypt_len
> 0) {
1936 uint32_t *pdata32
[3];
1938 pdata32
[0] = data32
;
1939 mydata
[0].data32
[0] = *data32
;
1940 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1941 pdata32
[1] = data32
;
1942 mydata
[0].data32
[1] = *data32
;
1943 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1944 pdata32
[2] = data32
;
1945 mydata
[1].data32
[0] = *data32
;
1946 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1947 mydata
[1].data32
[1] = *data32
;
1949 CVMX_MT_AES_ENC_CBC0(*pdata
);
1950 CVMX_MT_AES_ENC_CBC1(*data
);
1951 CVMX_MF_AES_RESULT(*pdata
, 0);
1952 CVMX_MF_AES_RESULT(*data
, 1);
1956 CVM_LOAD_SHA_UNIT(*pdata
, next
);
1960 CVM_LOAD_SHA_UNIT(*data
, next
);
1964 *pdata32
[0] = mydata
[0].data32
[0];
1965 *pdata32
[1] = mydata
[0].data32
[1];
1966 *pdata32
[2] = mydata
[1].data32
[0];
1967 *data32
= mydata
[1].data32
[1];
1969 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1972 /* finish and hashing */
1973 while (auth_len
> 0) {
1974 mydata
[0].data32
[0] = *data32
;
1975 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1976 mydata
[0].data32
[1] = *data32
;
1977 SG_CONSUME(sg
, data32
, data_i
, data_l
);
1978 CVM_LOAD_SHA_UNIT(*pdata
, next
);
1982 /* finish the hash */
1983 CVMX_PREFETCH0(od
->octo_hmouter
);
1985 if (unlikely(inplen
)) {
1987 uint8_t *p
= (uint8_t *) & tmp
;
1991 p
[inplen
] = ((uint8_t *) data
)[inplen
];
1993 CVM_LOAD_SHA_UNIT(tmp
, next
);
1995 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
1998 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
2001 /* Finish Inner hash */
2003 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL
), next
);
2005 CVM_LOAD_SHA_UNIT((uint64_t) ((alen
+ 64) << 3), next
);
2007 /* Get the inner hash of HMAC */
2008 CVMX_MF_HSH_IV(tmp1
, 0);
2009 CVMX_MF_HSH_IV(tmp2
, 1);
2011 CVMX_MF_HSH_IV(tmp3
, 2);
2013 /* Initialize hash unit */
2014 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
2015 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
2016 CVMX_MT_HSH_IV(od
->octo_hmouter
[2], 2);
2018 CVMX_MT_HSH_DAT(tmp1
, 0);
2019 CVMX_MT_HSH_DAT(tmp2
, 1);
2020 tmp3
|= 0x0000000080000000;
2021 CVMX_MT_HSH_DAT(tmp3
, 2);
2022 CVMX_MT_HSH_DATZ(3);
2023 CVMX_MT_HSH_DATZ(4);
2024 CVMX_MT_HSH_DATZ(5);
2025 CVMX_MT_HSH_DATZ(6);
2026 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
2028 /* finish the hash */
2029 CVMX_PREFETCH0(od
->octo_hmouter
);
2031 if (unlikely(inplen
)) {
2033 uint8_t *p
= (uint8_t *) & tmp
;
2037 p
[inplen
] = ((uint8_t *) data
)[inplen
];
2039 CVM_LOAD_MD5_UNIT(tmp
, next
);
2041 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
2044 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
2048 SG_INIT(sg
, data32
, data_i
, data_l
);
2049 while (icv_off
> 0) {
2050 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2053 CVMX_MF_HSH_IV(tmp1
, 0);
2054 *data32
= (uint32_t) (tmp1
>> 32);
2055 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2056 *data32
= (uint32_t) tmp1
;
2057 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2058 CVMX_MF_HSH_IV(tmp1
, 1);
2059 *data32
= (uint32_t) (tmp1
>> 32);
2061 octeon_crypto_disable(&state
, flags
);
2066 octo_aes_cbc_sha1_decrypt(
2067 struct octo_sess
*od
,
2068 struct scatterlist
*sg
, int sg_len
,
2069 int auth_off
, int auth_len
,
2070 int crypt_off
, int crypt_len
,
2071 int icv_off
, uint8_t *ivp
)
2073 register int next
= 0;
2078 uint64_t *pdata
= &mydata
[0].data64
[0];
2079 uint64_t *data
= &mydata
[1].data64
[0];
2081 uint64_t tmp1
, tmp2
, tmp3
;
2082 int data_i
, data_l
, alen
= auth_len
;
2083 struct octeon_cop2_state state
;
2084 unsigned long flags
;
2086 dprintk("%s(a_off=%d a_len=%d c_off=%d c_len=%d icv_off=%d)\n",
2087 __FUNCTION__
, auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
);
2089 if (unlikely(od
== NULL
|| sg
==NULL
|| sg_len
==0 || ivp
==NULL
||
2090 (crypt_off
& 0x3) || (crypt_off
+ crypt_len
> sg_len
) ||
2091 (crypt_len
& 0x7) ||
2093 (auth_off
& 0x3) || (auth_off
+ auth_len
> sg_len
))) {
2094 dprintk("%s: Bad parameters od=%p sg=%p sg_len=%d "
2095 "auth_off=%d auth_len=%d crypt_off=%d crypt_len=%d "
2096 "icv_off=%d ivp=%p\n", __FUNCTION__
, od
, sg
, sg_len
,
2097 auth_off
, auth_len
, crypt_off
, crypt_len
, icv_off
, ivp
);
2101 SG_INIT(sg
, data32
, data_i
, data_l
);
2103 CVMX_PREFETCH0(ivp
);
2104 CVMX_PREFETCH0(od
->octo_enckey
);
2106 flags
= octeon_crypto_enable(&state
);
2109 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[0], 0);
2110 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[1], 1);
2112 if (od
->octo_encklen
== 16) {
2113 CVMX_MT_AES_KEY(0x0, 2);
2114 CVMX_MT_AES_KEY(0x0, 3);
2115 } else if (od
->octo_encklen
== 24) {
2116 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
2117 CVMX_MT_AES_KEY(0x0, 3);
2118 } else if (od
->octo_encklen
== 32) {
2119 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[2], 2);
2120 CVMX_MT_AES_KEY(((uint64_t *) od
->octo_enckey
)[3], 3);
2122 octeon_crypto_disable(&state
, flags
);
2123 dprintk("%s: Bad key length %d\n", __FUNCTION__
, od
->octo_encklen
);
2126 CVMX_MT_AES_KEYLENGTH(od
->octo_encklen
/ 8 - 1);
2128 CVMX_MT_AES_IV(((uint64_t *) ivp
)[0], 0);
2129 CVMX_MT_AES_IV(((uint64_t *) ivp
)[1], 1);
2132 CVMX_MT_HSH_IV(od
->octo_hminner
[0], 0);
2133 CVMX_MT_HSH_IV(od
->octo_hminner
[1], 1);
2134 CVMX_MT_HSH_IV(od
->octo_hminner
[2], 2);
2136 while (crypt_off
> 0 && auth_off
> 0) {
2137 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2142 /* align auth and crypt */
2143 while (crypt_off
> 0 && auth_len
> 0) {
2144 mydata
[0].data32
[0] = *data32
;
2145 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2146 mydata
[0].data32
[1] = *data32
;
2147 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2148 CVM_LOAD_SHA_UNIT(*pdata
, next
);
2153 while (crypt_len
> 0) {
2154 uint32_t *pdata32
[3];
2156 pdata32
[0] = data32
;
2157 mydata
[0].data32
[0] = *data32
;
2158 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2159 pdata32
[1] = data32
;
2160 mydata
[0].data32
[1] = *data32
;
2161 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2162 pdata32
[2] = data32
;
2163 mydata
[1].data32
[0] = *data32
;
2164 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2165 mydata
[1].data32
[1] = *data32
;
2168 CVM_LOAD_SHA_UNIT(*pdata
, next
);
2172 CVM_LOAD_SHA_UNIT(*data
, next
);
2176 CVMX_MT_AES_DEC_CBC0(*pdata
);
2177 CVMX_MT_AES_DEC_CBC1(*data
);
2178 CVMX_MF_AES_RESULT(*pdata
, 0);
2179 CVMX_MF_AES_RESULT(*data
, 1);
2182 *pdata32
[0] = mydata
[0].data32
[0];
2183 *pdata32
[1] = mydata
[0].data32
[1];
2184 *pdata32
[2] = mydata
[1].data32
[0];
2185 *data32
= mydata
[1].data32
[1];
2187 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2190 /* finish and leftover hashing */
2191 while (auth_len
> 0) {
2192 mydata
[0].data32
[0] = *data32
;
2193 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2194 mydata
[0].data32
[1] = *data32
;
2195 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2196 CVM_LOAD_SHA_UNIT(*pdata
, next
);
2200 /* finish the hash */
2201 CVMX_PREFETCH0(od
->octo_hmouter
);
2203 if (unlikely(inplen
)) {
2205 uint8_t *p
= (uint8_t *) & tmp
;
2209 p
[inplen
] = ((uint8_t *) data
)[inplen
];
2211 CVM_LOAD_SHA_UNIT(tmp
, next
);
2213 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
2216 CVM_LOAD_SHA_UNIT(0x8000000000000000ULL
, next
);
2219 /* Finish Inner hash */
2221 CVM_LOAD_SHA_UNIT(((uint64_t) 0x0ULL
), next
);
2223 CVM_LOAD_SHA_UNIT((uint64_t) ((alen
+ 64) << 3), next
);
2225 /* Get the inner hash of HMAC */
2226 CVMX_MF_HSH_IV(tmp1
, 0);
2227 CVMX_MF_HSH_IV(tmp2
, 1);
2229 CVMX_MF_HSH_IV(tmp3
, 2);
2231 /* Initialize hash unit */
2232 CVMX_MT_HSH_IV(od
->octo_hmouter
[0], 0);
2233 CVMX_MT_HSH_IV(od
->octo_hmouter
[1], 1);
2234 CVMX_MT_HSH_IV(od
->octo_hmouter
[2], 2);
2236 CVMX_MT_HSH_DAT(tmp1
, 0);
2237 CVMX_MT_HSH_DAT(tmp2
, 1);
2238 tmp3
|= 0x0000000080000000;
2239 CVMX_MT_HSH_DAT(tmp3
, 2);
2240 CVMX_MT_HSH_DATZ(3);
2241 CVMX_MT_HSH_DATZ(4);
2242 CVMX_MT_HSH_DATZ(5);
2243 CVMX_MT_HSH_DATZ(6);
2244 CVMX_MT_HSH_STARTSHA((uint64_t) ((64 + 20) << 3));
2246 /* finish the hash */
2247 CVMX_PREFETCH0(od
->octo_hmouter
);
2249 if (unlikely(inplen
)) {
2251 uint8_t *p
= (uint8_t *) & tmp
;
2255 p
[inplen
] = ((uint8_t *) data
)[inplen
];
2257 CVM_LOAD_MD5_UNIT(tmp
, next
);
2259 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
2262 CVM_LOAD_MD5_UNIT(0x8000000000000000ULL
, next
);
2266 SG_INIT(sg
, data32
, data_i
, data_l
);
2267 while (icv_off
> 0) {
2268 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2271 CVMX_MF_HSH_IV(tmp1
, 0);
2272 *data32
= (uint32_t) (tmp1
>> 32);
2273 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2274 *data32
= (uint32_t) tmp1
;
2275 SG_CONSUME(sg
, data32
, data_i
, data_l
);
2276 CVMX_MF_HSH_IV(tmp1
, 1);
2277 *data32
= (uint32_t) (tmp1
>> 32);
2279 octeon_crypto_disable(&state
, flags
);
2283 /****************************************************************************/
This page took 0.197862 seconds and 5 git commands to generate.