1/*-
2 * Copyright (c) 2003-2012 Broadcom Corporation
3 * All Rights Reserved
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 *
9 * 1. Redistributions of source code must retain the above copyright
10 *    notice, this list of conditions and the following disclaimer.
11 * 2. Redistributions in binary form must reproduce the above copyright
12 *    notice, this list of conditions and the following disclaimer in
13 *    the documentation and/or other materials provided with the
14 *    distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY BROADCOM ``AS IS'' AND ANY EXPRESS OR
17 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
18 * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
19 * ARE DISCLAIMED. IN NO EVENT SHALL BROADCOM OR CONTRIBUTORS BE LIABLE
20 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
23 * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
24 * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
25 * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
26 * IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 * $FreeBSD$
29 */
30
31#ifndef _NLM_HAL_CRYPTO_H_
32#define _NLM_HAL_CRYPTO_H_
33
34#define	SAE_CFG_REG		0x00
35#define SAE_ENG_SEL_0		0x01
36#define SAE_ENG_SEL_1		0x02
37#define SAE_ENG_SEL_2		0x03
38#define SAE_ENG_SEL_3		0x04
39#define SAE_ENG_SEL_4		0x05
40#define SAE_ENG_SEL_5		0x06
41#define SAE_ENG_SEL_6		0x07
42#define SAE_ENG_SEL_7		0x08
43
44#define	RSA_CFG_REG		0x00
45#define RSA_ENG_SEL_0		0x01
46#define RSA_ENG_SEL_1		0x02
47#define RSA_ENG_SEL_2		0x03
48
49#define nlm_read_sec_reg(b, r)		nlm_read_reg(b, r)
50#define nlm_write_sec_reg(b, r, v)	nlm_write_reg(b, r, v)
51#define nlm_get_sec_pcibase(node)	nlm_pcicfg_base(XLP_IO_SEC_OFFSET(node))
52#define nlm_get_sec_regbase(node)        \
53                        (nlm_get_sec_pcibase(node) + XLP_IO_PCI_HDRSZ)
54
55#define nlm_read_rsa_reg(b, r)		nlm_read_reg(b, r)
56#define nlm_write_rsa_reg(b, r, v)	nlm_write_reg(b, r, v)
57#define nlm_get_rsa_pcibase(node)	nlm_pcicfg_base(XLP_IO_RSA_OFFSET(node))
58#define nlm_get_rsa_regbase(node)        \
59                        (nlm_get_rsa_pcibase(node) + XLP_IO_PCI_HDRSZ)
60
61#define nlm_pcibase_sec(node)     nlm_pcicfg_base(XLP_IO_SEC_OFFSET(node))
62#define nlm_qidstart_sec(node)    nlm_qidstart_kseg(nlm_pcibase_sec(node))
63#define nlm_qnum_sec(node)        nlm_qnum_kseg(nlm_pcibase_sec(node))
64
65/*
66 * Since buffer allocation for crypto at kernel is done as malloc, each
67 * segment size is given as page size which is 4K by default
68 */
69#define NLM_CRYPTO_MAX_SEG_LEN	PAGE_SIZE
70
71#define MAX_KEY_LEN_IN_DW		20
72
73#define left_shift64(x, bitshift, numofbits)			\
74    ((uint64_t)(x) << (bitshift))
75
76#define left_shift64_mask(x, bitshift, numofbits)			\
77    (((uint64_t)(x) & ((1ULL << (numofbits)) - 1)) << (bitshift))
78
79/**
80* @brief cipher algorithms
81* @ingroup crypto
82*/
83enum nlm_cipher_algo {
84	NLM_CIPHER_BYPASS = 0,
85	NLM_CIPHER_DES = 1,
86	NLM_CIPHER_3DES = 2,
87	NLM_CIPHER_AES128 = 3,
88	NLM_CIPHER_AES192 = 4,
89	NLM_CIPHER_AES256 = 5,
90	NLM_CIPHER_ARC4 = 6,
91	NLM_CIPHER_KASUMI_F8 = 7,
92	NLM_CIPHER_SNOW3G_F8 = 8,
93	NLM_CIPHER_CAMELLIA128 = 9,
94	NLM_CIPHER_CAMELLIA192 = 0xA,
95	NLM_CIPHER_CAMELLIA256 = 0xB,
96	NLM_CIPHER_MAX = 0xC,
97};
98
99/**
100* @brief cipher modes
101* @ingroup crypto
102*/
103enum nlm_cipher_mode {
104	NLM_CIPHER_MODE_ECB = 0,
105	NLM_CIPHER_MODE_CBC = 1,
106	NLM_CIPHER_MODE_CFB = 2,
107	NLM_CIPHER_MODE_OFB = 3,
108	NLM_CIPHER_MODE_CTR = 4,
109	NLM_CIPHER_MODE_AES_F8 = 5,
110	NLM_CIPHER_MODE_GCM = 6,
111	NLM_CIPHER_MODE_CCM = 7,
112	NLM_CIPHER_MODE_UNDEFINED1 = 8,
113	NLM_CIPHER_MODE_UNDEFINED2 = 9,
114	NLM_CIPHER_MODE_LRW = 0xA,
115	NLM_CIPHER_MODE_XTS = 0xB,
116	NLM_CIPHER_MODE_MAX = 0xC,
117};
118
119/**
120* @brief hash algorithms
121* @ingroup crypto
122*/
123enum nlm_hash_algo {
124	NLM_HASH_BYPASS = 0,
125	NLM_HASH_MD5 = 1,
126	NLM_HASH_SHA = 2,
127	NLM_HASH_UNDEFINED = 3,
128	NLM_HASH_AES128 = 4,
129	NLM_HASH_AES192 = 5,
130	NLM_HASH_AES256 = 6,
131	NLM_HASH_KASUMI_F9 = 7,
132	NLM_HASH_SNOW3G_F9 = 8,
133	NLM_HASH_CAMELLIA128 = 9,
134	NLM_HASH_CAMELLIA192 = 0xA,
135	NLM_HASH_CAMELLIA256 = 0xB,
136	NLM_HASH_GHASH = 0xC,
137	NLM_HASH_MAX = 0xD
138};
139
140/**
141* @brief hash modes
142* @ingroup crypto
143*/
144enum nlm_hash_mode {
145	NLM_HASH_MODE_SHA1 = 0,	/* Only SHA */
146	NLM_HASH_MODE_SHA224 = 1,	/* Only SHA */
147	NLM_HASH_MODE_SHA256 = 2,	/* Only SHA */
148	NLM_HASH_MODE_SHA384 = 3,	/* Only SHA */
149	NLM_HASH_MODE_SHA512 = 4,	/* Only SHA */
150	NLM_HASH_MODE_CMAC = 5,	/* AES and Camellia */
151	NLM_HASH_MODE_XCBC = 6,	/* AES and Camellia */
152	NLM_HASH_MODE_CBC_MAC = 7,	/* AES and Camellia */
153	NLM_HASH_MODE_CCM = 8,	/* AES */
154	NLM_HASH_MODE_GCM = 9,	/* AES */
155	NLM_HASH_MODE_MAX = 0xA,
156};
157
158/**
159* @brief crypto control descriptor, should be cache aligned
160* @ingroup crypto
161*/
162struct nlm_crypto_pkt_ctrl {
163	uint64_t desc0;
164	/* combination of cipher and hash keys */
165	uint64_t key[MAX_KEY_LEN_IN_DW];
166	uint32_t cipherkeylen;
167	uint32_t hashkeylen;
168	uint32_t taglen;
169};
170
171/**
172* @brief crypto packet descriptor, should be cache aligned
173* @ingroup crypto
174*/
175struct nlm_crypto_pkt_param {
176	uint64_t desc0;
177	uint64_t desc1;
178	uint64_t desc2;
179	uint64_t desc3;
180	uint64_t segment[1][2];
181};
182
183static __inline__ uint64_t
184nlm_crypto_form_rsa_ecc_fmn_entry0(unsigned int l3alloc, unsigned int type,
185    unsigned int func, uint64_t srcaddr)
186{
187	return (left_shift64(l3alloc, 61, 1) |
188	    left_shift64(type, 46, 7) |
189	    left_shift64(func, 40, 6) |
190	    left_shift64(srcaddr, 0, 40));
191}
192
193static __inline__ uint64_t
194nlm_crypto_form_rsa_ecc_fmn_entry1(unsigned int dstclobber,
195    unsigned int l3alloc, unsigned int fbvc, uint64_t dstaddr)
196{
197	return (left_shift64(dstclobber, 62, 1) |
198	    left_shift64(l3alloc, 61, 1) |
199	    left_shift64(fbvc, 40, 12) |
200	    left_shift64(dstaddr, 0, 40));
201}
202
203/**
204* @brief Generate cypto control descriptor
205* @ingroup crypto
206* hmac : 1 for hash with hmac
207* hashalg, see hash_alg enums
208* hashmode, see hash_mode enums
209* cipherhalg, see  cipher_alg enums
210* ciphermode, see  cipher_mode enums
211* arc4_cipherkeylen : length of arc4 cipher key, 0 is interpreted as 32
212* arc4_keyinit :
213* cfbmask : cipher text for feedback,
214*           0(1 bit), 1(2 bits), 2(4 bits), 3(8 bits), 4(16bits), 5(32 bits),
215*           6(64 bits), 7(128 bits)
216*/
217static __inline__ uint64_t
218nlm_crypto_form_pkt_ctrl_desc(unsigned int hmac, unsigned int hashalg,
219    unsigned int hashmode, unsigned int cipheralg, unsigned int ciphermode,
220    unsigned int arc4_cipherkeylen, unsigned int arc4_keyinit,
221    unsigned int cfbmask)
222{
223	return (left_shift64(hmac, 61, 1) |
224	    left_shift64(hashalg, 52, 8) |
225	    left_shift64(hashmode, 43, 8) |
226	    left_shift64(cipheralg, 34, 8) |
227	    left_shift64(ciphermode, 25, 8) |
228	    left_shift64(arc4_cipherkeylen, 18, 5) |
229	    left_shift64(arc4_keyinit, 17, 1) |
230	    left_shift64(cfbmask, 0, 3));
231}
232/**
233* @brief Generate cypto packet descriptor 0
234* @ingroup crypto
235* tls : 1 (tls enabled) 0(tls disabled)
236* hash_source : 1 (encrypted data is sent to the auth engine)
237*               0 (plain data is sent to the auth engine)
238* hashout_l3alloc : 1 (auth output is transited through l3 cache)
239* encrypt : 1 (for encrypt) 0 (for decrypt)
240* ivlen : iv length in bytes
241* hashdst_addr : hash out physical address, byte aligned
242*/
243static __inline__ uint64_t
244nlm_crypto_form_pkt_desc0(unsigned int tls, unsigned int hash_source,
245    unsigned int hashout_l3alloc, unsigned int encrypt, unsigned int ivlen,
246    uint64_t hashdst_addr)
247{
248	return (left_shift64(tls, 63, 1) |
249	    left_shift64(hash_source, 62, 1) |
250	    left_shift64(hashout_l3alloc, 60, 1) |
251	    left_shift64(encrypt, 59, 1) |
252	    left_shift64_mask((ivlen - 1), 41, 16) |
253	    left_shift64(hashdst_addr, 0, 40));
254}
255
256/**
257* @brief Generate cypto packet descriptor 1
258* @ingroup crypto
259* cipherlen : cipher length in bytes
260* hashlen : hash length in bytes
261*/
262static __inline__ uint64_t
263nlm_crypto_form_pkt_desc1(unsigned int cipherlen, unsigned int hashlen)
264{
265	return (left_shift64_mask((cipherlen - 1), 32, 32) |
266	    left_shift64_mask((hashlen - 1), 0, 32));
267}
268
269/**
270* @brief Generate cypto packet descriptor 2
271* @ingroup crypto
272* ivoff : iv offset, offset from start of src data addr
273* ciperbit_cnt : number of valid bits in the last input byte to the cipher,
274*                0 (8 bits), 1 (1 bit)..7 (7 bits)
275* cipheroff : cipher offset, offset from start of src data addr
276* hashbit_cnt : number of valid bits in the last input byte to the auth
277*              0 (8 bits), 1 (1 bit)..7 (7 bits)
278* hashclobber : 1 (hash output will be written as multiples of cachelines, no
279*              read modify write)
280* hashoff : hash offset, offset from start of src data addr
281*/
282
283static __inline__ uint64_t
284nlm_crypto_form_pkt_desc2(unsigned int ivoff, unsigned int cipherbit_cnt,
285    unsigned int cipheroff, unsigned int hashbit_cnt, unsigned int hashclobber,
286    unsigned int hashoff)
287{
288	return (left_shift64(ivoff , 45, 16) |
289	    left_shift64(cipherbit_cnt, 42, 3) |
290	    left_shift64(cipheroff, 22, 16) |
291	    left_shift64(hashbit_cnt, 19, 3) |
292	    left_shift64(hashclobber, 18, 1) |
293	    left_shift64(hashoff, 0, 16));
294}
295
296/**
297* @brief Generate cypto packet descriptor 3
298* @ingroup crypto
299* designer_vc : designer freeback fmn destination id
300* taglen : length in bits of the tag generated by the auth engine
301*          md5 (128 bits), sha1 (160), sha224 (224), sha384 (384),
302*          sha512 (512), Kasumi (32), snow3g (32), gcm (128)
303* hmacpad : 1 if hmac padding is already done
304*/
305static  __inline__ uint64_t
306nlm_crypto_form_pkt_desc3(unsigned int designer_vc, unsigned int taglen,
307    unsigned int arc4_state_save_l3, unsigned int arc4_save_state,
308    unsigned int hmacpad)
309{
310	return (left_shift64(designer_vc, 48, 16) |
311	    left_shift64(taglen, 11, 16) |
312	    left_shift64(arc4_state_save_l3, 8, 1) |
313	    left_shift64(arc4_save_state, 6, 1) |
314	    left_shift64(hmacpad, 5, 1));
315}
316
317/**
318* @brief Generate cypto packet descriptor 4
319* @ingroup crypto
320* srcfraglen : length of the source fragment(header + data + tail) in bytes
321* srcfragaddr : physical address of the srouce fragment
322*/
323static __inline__ uint64_t
324nlm_crypto_form_pkt_desc4(uint64_t srcfraglen,
325    unsigned int srcfragaddr )
326{
327	return (left_shift64_mask((srcfraglen - 1), 48, 16) |
328	    left_shift64(srcfragaddr, 0, 40));
329}
330
331/**
332* @brief Generate cypto packet descriptor 5
333* @ingroup crypto
334* dstfraglen : length of the dst fragment(header + data + tail) in bytes
335* chipherout_l3alloc : 1(cipher output is transited through l3 cache)
336* cipherclobber : 1 (cipher output will be written as multiples of cachelines,
337*                 no read modify write)
338* chiperdst_addr : physical address of the cipher destination address
339*/
340static __inline__ uint64_t
341nlm_crypto_form_pkt_desc5(unsigned int dstfraglen,
342    unsigned int cipherout_l3alloc, unsigned int cipherclobber,
343    uint64_t cipherdst_addr)
344
345{
346	return (left_shift64_mask((dstfraglen - 1), 48, 16) |
347	    left_shift64(cipherout_l3alloc, 46, 1) |
348	    left_shift64(cipherclobber, 41, 1) |
349	    left_shift64(cipherdst_addr, 0, 40));
350}
351
352/**
353  * @brief Generate crypto packet fmn message entry 0
354  * @ingroup crypto
355  * freeback_vc: freeback response destination address
356  * designer_fblen : Designer freeback length, 1 - 4
357  * designerdesc_valid : designer desc valid or not
358  * cipher_keylen : cipher key length in bytes
359  * ctrldesc_addr : physicall address of the control descriptor
360  */
361static __inline__ uint64_t
362nlm_crypto_form_pkt_fmn_entry0(unsigned int freeback_vc,
363    unsigned int designer_fblen, unsigned int designerdesc_valid,
364    unsigned int cipher_keylen, uint64_t cntldesc_addr)
365{
366	return (left_shift64(freeback_vc, 48, 16) |
367	    left_shift64_mask(designer_fblen - 1, 46, 2) |
368	    left_shift64(designerdesc_valid, 45, 1) |
369	    left_shift64_mask(((cipher_keylen + 7) >> 3), 40, 5) |
370	    left_shift64(cntldesc_addr >> 6, 0, 34));
371}
372
373/**
374  * @brief Generate crypto packet fmn message entry 1
375  * @ingroup crypto
376  * arc4load_state : 1 if load state required 0 otherwise
377  * hash_keylen : hash key length in bytes
378  * pktdesc_size : packet descriptor size in bytes
379  * pktdesc_addr : physicall address of the packet descriptor
380  */
381static __inline__ uint64_t
382nlm_crypto_form_pkt_fmn_entry1(unsigned int arc4load_state,
383    unsigned int hash_keylen, unsigned int pktdesc_size,
384    uint64_t pktdesc_addr)
385{
386	return (left_shift64(arc4load_state, 63, 1) |
387	    left_shift64_mask(((hash_keylen + 7) >> 3), 56, 5) |
388	    left_shift64_mask(((pktdesc_size >> 4) - 1), 43, 12) |
389	    left_shift64(pktdesc_addr >> 6, 0, 34));
390}
391
392static __inline__ int
393nlm_crypto_get_hklen_taglen(enum nlm_hash_algo hashalg,
394    enum nlm_hash_mode hashmode, unsigned int *taglen, unsigned int *hklen)
395{
396	if (hashalg == NLM_HASH_MD5) {
397		*taglen = 128;
398		*hklen  = 64;
399	} else if (hashalg == NLM_HASH_SHA) {
400		switch (hashmode) {
401		case NLM_HASH_MODE_SHA1:
402			*taglen = 160;
403			*hklen  = 64;
404			break;
405		case NLM_HASH_MODE_SHA224:
406			*taglen = 224;
407			*hklen  = 64;
408			break;
409		case NLM_HASH_MODE_SHA256:
410			*taglen = 256;
411			*hklen  = 64;
412			break;
413		case NLM_HASH_MODE_SHA384:
414			*taglen = 384;
415			*hklen  = 128;
416			break;
417		case NLM_HASH_MODE_SHA512:
418			*taglen = 512;
419			*hklen  = 128;
420			break;
421		default:
422			printf("Error : invalid shaid (%s)\n", __func__);
423			return (-1);
424		}
425	} else if (hashalg == NLM_HASH_KASUMI_F9) {
426		*taglen = 32;
427		*hklen  = 0;
428	} else if (hashalg == NLM_HASH_SNOW3G_F9) {
429		*taglen = 32;
430		*hklen  = 0;
431	} else if (hashmode == NLM_HASH_MODE_XCBC) {
432		*taglen = 128;
433		*hklen  = 0;
434	} else if (hashmode == NLM_HASH_MODE_GCM) {
435		*taglen = 128;
436		*hklen  = 0;
437	} else if (hashalg == NLM_HASH_BYPASS) {
438		*taglen = 0;
439		*hklen  = 0;
440	} else {
441		printf("Error:Hash alg/mode not found\n");
442		return (-1);
443	}
444
445	/* TODO : Add remaining cases */
446	return (0);
447}
448
449/**
450* @brief Generate fill cryto control info structure
451* @ingroup crypto
452* hmac : 1 for hash with hmac
453* hashalg: see above,  hash_alg enums
454* hashmode: see above, hash_mode enums
455* cipherhalg: see above,  cipher_alg enums
456* ciphermode: see above, cipher_mode enums
457*
458*/
459static __inline__ int
460nlm_crypto_fill_pkt_ctrl(struct nlm_crypto_pkt_ctrl *ctrl, unsigned int hmac,
461    enum nlm_hash_algo hashalg, enum nlm_hash_mode hashmode,
462    enum nlm_cipher_algo cipheralg, enum nlm_cipher_mode ciphermode,
463    unsigned char *cipherkey, unsigned int cipherkeylen,
464    unsigned char *hashkey, unsigned int hashkeylen)
465{
466	unsigned int taglen = 0, hklen = 0;
467
468	ctrl->desc0 = nlm_crypto_form_pkt_ctrl_desc(hmac, hashalg, hashmode,
469	    cipheralg, ciphermode, 0, 0, 0);
470	memset(ctrl->key, 0, sizeof(ctrl->key));
471	if (cipherkey)
472		memcpy(ctrl->key, cipherkey, cipherkeylen);
473	if (hashkey)
474		memcpy((unsigned char *)&ctrl->key[(cipherkeylen + 7) / 8],
475			    hashkey, hashkeylen);
476	if (nlm_crypto_get_hklen_taglen(hashalg, hashmode, &taglen, &hklen)
477	    < 0)
478		return (-1);
479
480	ctrl->cipherkeylen = cipherkeylen;
481	ctrl->hashkeylen = hklen;
482	ctrl->taglen = taglen;
483
484	/* TODO : add the invalid checks and return error */
485	return (0);
486}
487
488/**
489* @brief Top level function for generation pkt desc 0 to 3 for cipher auth
490* @ingroup crypto
491* ctrl : pointer to control structure
492* param : pointer to the param structure
493* encrypt : 1(for encrypt) 0(for decrypt)
494* hash_source : 1(encrypted data is sent to the auth engine) 0(plain data is
495*		sent to the auth engine)
496* ivoff : iv offset from start of data
497* ivlen : iv length in bytes
498* hashoff : hash offset from start of data
499* hashlen : hash length in bytes
500* hmacpad : hmac padding required or not, 1 if already padded
501* cipheroff : cipher offset from start of data
502* cipherlen : cipher length in bytes
503* hashdst_addr : hash destination physical address
504*/
505static __inline__ void
506nlm_crypto_fill_cipher_auth_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
507    struct nlm_crypto_pkt_param *param, unsigned int encrypt,
508    unsigned int hash_source, unsigned int ivoff, unsigned int ivlen,
509    unsigned int hashoff, unsigned int hashlen, unsigned int hmacpad,
510    unsigned int cipheroff, unsigned int cipherlen, unsigned char *hashdst_addr)
511{
512	param->desc0 = nlm_crypto_form_pkt_desc0(0, hash_source, 1, encrypt,
513			   ivlen, vtophys(hashdst_addr));
514	param->desc1 = nlm_crypto_form_pkt_desc1(cipherlen, hashlen);
515	param->desc2 = nlm_crypto_form_pkt_desc2(ivoff, 0, cipheroff, 0, 0,
516			   hashoff);
517	param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0,
518			   hmacpad);
519}
520
521/**
522* @brief Top level function for generation pkt desc 0 to 3 for cipher operation
523* @ingroup crypto
524* ctrl : pointer to control structure
525* param : pointer to the param structure
526* encrypt : 1(for encrypt) 0(for decrypt)
527* ivoff : iv offset from start of data
528* ivlen : iv length in bytes
529* cipheroff : cipher offset from start of data
530* cipherlen : cipher length in bytes
531*/
532static __inline__ void
533nlm_crypto_fill_cipher_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
534    struct nlm_crypto_pkt_param *param, unsigned int encrypt,
535    unsigned int ivoff, unsigned int ivlen, unsigned int cipheroff,
536    unsigned int cipherlen)
537{
538	param->desc0 = nlm_crypto_form_pkt_desc0(0, 0, 0, encrypt, ivlen, 0ULL);
539	param->desc1 = nlm_crypto_form_pkt_desc1(cipherlen, 1);
540	param->desc2 = nlm_crypto_form_pkt_desc2(ivoff, 0, cipheroff, 0, 0, 0);
541	param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0, 0);
542}
543
544/**
545* @brief Top level function for generation pkt desc 0 to 3 for auth operation
546* @ingroup crypto
547* ctrl : pointer to control structure
548* param : pointer to the param structure
549* hashoff : hash offset from start of data
550* hashlen : hash length in bytes
551* hmacpad : hmac padding required or not, 1 if already padded
552* hashdst_addr : hash destination physical address
553*/
554static __inline__ void
555nlm_crypto_fill_auth_pkt_param(struct nlm_crypto_pkt_ctrl *ctrl,
556    struct nlm_crypto_pkt_param *param, unsigned int hashoff,
557    unsigned int hashlen, unsigned int hmacpad, unsigned char *hashdst_addr)
558{
559	param->desc0 = nlm_crypto_form_pkt_desc0(0, 0, 1, 0, 1,
560			   vtophys(hashdst_addr));
561	param->desc1 = nlm_crypto_form_pkt_desc1(1, hashlen);
562	param->desc2 = nlm_crypto_form_pkt_desc2(0, 0, 0, 0, 0, hashoff);
563	param->desc3 = nlm_crypto_form_pkt_desc3(0, ctrl->taglen, 0, 0,
564			   hmacpad);
565}
566
567static __inline__ unsigned int
568nlm_crypto_fill_src_seg(struct nlm_crypto_pkt_param *param, int seg,
569    unsigned char *input, unsigned int inlen)
570{
571	unsigned off = 0, len = 0;
572	unsigned int remlen = inlen;
573
574	for (; remlen > 0;) {
575		len = remlen > NLM_CRYPTO_MAX_SEG_LEN ?
576		    NLM_CRYPTO_MAX_SEG_LEN : remlen;
577		param->segment[seg][0] = nlm_crypto_form_pkt_desc4(len,
578		    vtophys(input + off));
579		remlen -= len;
580		off += len;
581		seg++;
582	}
583	return (seg);
584}
585
586static __inline__ unsigned int
587nlm_crypto_fill_dst_seg(struct nlm_crypto_pkt_param *param,
588		int seg, unsigned char *output, unsigned int outlen)
589{
590	unsigned off = 0, len = 0;
591	unsigned int remlen = outlen;
592
593	for (; remlen > 0;) {
594		len = remlen > NLM_CRYPTO_MAX_SEG_LEN ?
595		    NLM_CRYPTO_MAX_SEG_LEN : remlen;
596		param->segment[seg][1] = nlm_crypto_form_pkt_desc5(len, 1, 0,
597		    vtophys(output + off));
598		remlen -= len;
599		off += len;
600		seg++;
601	}
602	return (seg);
603}
604
605#endif
606