1/* SPDX-License-Identifier: BSD-3-Clause */
2/* Copyright(c) 2007-2022 Intel Corporation */
3
4/**
5 ***************************************************************************
6 * @file lac_sym_qat_cipher.c      QAT-related support functions for Cipher
7 *
8 * @ingroup LacSymQat_Cipher
9 *
10 * @description Functions to support the QAT related operations for Cipher
11 ***************************************************************************/
12
13/*
14*******************************************************************************
15* Include public/global header files
16*******************************************************************************
17*/
18
19#include "cpa.h"
20#include "icp_accel_devices.h"
21#include "icp_adf_debug.h"
22#include "lac_sym_qat.h"
23#include "lac_sym_qat_cipher.h"
24#include "lac_mem.h"
25#include "lac_common.h"
26#include "cpa_cy_sym.h"
27#include "lac_sym_qat.h"
28#include "lac_sym_cipher_defs.h"
29#include "icp_qat_hw.h"
30#include "icp_qat_fw_la.h"
31#include "sal_hw_gen.h"
32
33#define LAC_UNUSED_POS_MASK 0x3
34
35/*****************************************************************************
36 *  Internal data
37 *****************************************************************************/
38
39typedef enum _icp_qat_hw_key_depend {
40	IS_KEY_DEP_NO = 0,
41	IS_KEY_DEP_YES,
42} icp_qat_hw_key_depend;
43
44/* LAC_CIPHER_IS_XTS_MODE */
45static const uint8_t key_size_xts[] = {
46	0,
47	0,
48	0,
49	0,
50	0,
51	0,
52	0,
53	0,
54	0,
55	0,
56	0,
57	0,
58	0,
59	0,
60	0,
61	0,
62	0,
63	0,
64	0,
65	0,
66	0,
67	0,
68	0,
69	0,
70	0,
71	0,
72	0,
73	0,
74	0,
75	0,
76	0,
77	0,
78	ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_XTS_KEY_SZ */
79	0,
80	0,
81	0,
82	0,
83	0,
84	0,
85	0,
86	0,
87	0,
88	0,
89	0,
90	0,
91	0,
92	0,
93	0,
94	0,
95	0,
96	0,
97	0,
98	0,
99	0,
100	0,
101	0,
102	0,
103	0,
104	0,
105	0,
106	0,
107	0,
108	0,
109	0,
110	ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_XTS_KEY_SZ */
111};
112/* LAC_CIPHER_IS_AES */
113static const uint8_t key_size_aes[] = {
114	0,
115	0,
116	0,
117	0,
118	0,
119	0,
120	0,
121	0,
122	0,
123	0,
124	0,
125	0,
126	0,
127	0,
128	0,
129	0,
130	ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_KEY_SZ */
131	0,
132	0,
133	0,
134	0,
135	0,
136	0,
137	0,
138	ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_KEY_SZ */
139	0,
140	0,
141	0,
142	0,
143	0,
144	0,
145	0,
146	ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_KEY_SZ */
147};
148/* LAC_CIPHER_IS_AES_F8 */
149static const uint8_t key_size_f8[] = {
150	0,
151	0,
152	0,
153	0,
154	0,
155	0,
156	0,
157	0,
158	0,
159	0,
160	0,
161	0,
162	0,
163	0,
164	0,
165	0,
166	0,
167	0,
168	0,
169	0,
170	0,
171	0,
172	0,
173	0,
174	0,
175	0,
176	0,
177	0,
178	0,
179	0,
180	0,
181	0,
182	ICP_QAT_HW_CIPHER_ALGO_AES128, /* ICP_QAT_HW_AES_128_F8_KEY_SZ */
183	0,
184	0,
185	0,
186	0,
187	0,
188	0,
189	0,
190	0,
191	0,
192	0,
193	0,
194	0,
195	0,
196	0,
197	0,
198	ICP_QAT_HW_CIPHER_ALGO_AES192, /* ICP_QAT_HW_AES_192_F8_KEY_SZ */
199	0,
200	0,
201	0,
202	0,
203	0,
204	0,
205	0,
206	0,
207	0,
208	0,
209	0,
210	0,
211	0,
212	0,
213	0,
214	ICP_QAT_HW_CIPHER_ALGO_AES256 /* ICP_QAT_HW_AES_256_F8_KEY_SZ */
215};
216
217typedef struct _icp_qat_hw_cipher_info {
218	icp_qat_hw_cipher_algo_t algorithm;
219	icp_qat_hw_cipher_mode_t mode;
220	icp_qat_hw_cipher_convert_t key_convert[2];
221	icp_qat_hw_cipher_dir_t dir[2];
222	icp_qat_hw_key_depend isKeyLenDepend;
223	const uint8_t *pAlgByKeySize;
224} icp_qat_hw_cipher_info;
225
226static const icp_qat_hw_cipher_info icp_qat_alg_info[] = {
227	/* CPA_CY_SYM_CIPHER_NULL */
228	{
229	    ICP_QAT_HW_CIPHER_ALGO_NULL,
230	    ICP_QAT_HW_CIPHER_ECB_MODE,
231	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
232	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
233	    IS_KEY_DEP_NO,
234	    NULL,
235	},
236	/* CPA_CY_SYM_CIPHER_ARC4 */
237	{
238	    ICP_QAT_HW_CIPHER_ALGO_ARC4,
239	    ICP_QAT_HW_CIPHER_ECB_MODE,
240	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
241	    /* Streaming ciphers are a special case. Decrypt = encrypt */
242	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
243	    IS_KEY_DEP_NO,
244	    NULL,
245	},
246	/* CPA_CY_SYM_CIPHER_AES_ECB */
247	{
248	    ICP_QAT_HW_CIPHER_ALGO_AES128,
249	    ICP_QAT_HW_CIPHER_ECB_MODE,
250	    /* AES decrypt key needs to be reversed.  Instead of reversing the
251	     * key at session registration, it is instead reversed on-the-fly by
252	     * setting the KEY_CONVERT bit here
253	     */
254	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
255	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
256	    IS_KEY_DEP_YES,
257	    key_size_aes,
258	},
259	/* CPA_CY_SYM_CIPHER_AES_CBC */
260	{
261	    ICP_QAT_HW_CIPHER_ALGO_AES128,
262	    ICP_QAT_HW_CIPHER_CBC_MODE,
263	    /* AES decrypt key needs to be reversed.  Instead of reversing the
264	     * key at session registration, it is instead reversed on-the-fly by
265	     * setting the KEY_CONVERT bit here
266	     */
267	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
268	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
269	    IS_KEY_DEP_YES,
270	    key_size_aes,
271	},
272	/* CPA_CY_SYM_CIPHER_AES_CTR */
273	{
274	    ICP_QAT_HW_CIPHER_ALGO_AES128,
275	    ICP_QAT_HW_CIPHER_CTR_MODE,
276	    /* AES decrypt key needs to be reversed.  Instead of reversing the
277	     * key at session registration, it is instead reversed on-the-fly by
278	     * setting the KEY_CONVERT bit here
279	     */
280	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
281	    /* Streaming ciphers are a special case. Decrypt = encrypt
282	     * Overriding default values previously set for AES
283	     */
284	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
285	    IS_KEY_DEP_YES,
286	    key_size_aes,
287	},
288	/* CPA_CY_SYM_CIPHER_AES_CCM */
289	{
290	    ICP_QAT_HW_CIPHER_ALGO_AES128,
291	    ICP_QAT_HW_CIPHER_CTR_MODE,
292	    /* AES decrypt key needs to be reversed.  Instead of reversing the
293	     * key at session registration, it is instead reversed on-the-fly by
294	     * setting the KEY_CONVERT bit here
295	     */
296	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
297	    /* Streaming ciphers are a special case. Decrypt = encrypt
298	     * Overriding default values previously set for AES
299	     */
300	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
301	    IS_KEY_DEP_YES,
302	    key_size_aes,
303	},
304	/* CPA_CY_SYM_CIPHER_AES_GCM */
305	{
306	    ICP_QAT_HW_CIPHER_ALGO_AES128,
307	    ICP_QAT_HW_CIPHER_CTR_MODE,
308	    /* AES decrypt key needs to be reversed.  Instead of reversing the
309	     * key at session registration, it is instead reversed on-the-fly by
310	     * setting the KEY_CONVERT bit here
311	     */
312	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
313	    /* Streaming ciphers are a special case. Decrypt = encrypt
314	     * Overriding default values previously set for AES
315	     */
316	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
317	    IS_KEY_DEP_YES,
318	    key_size_aes,
319	},
320	/* CPA_CY_SYM_CIPHER_DES_ECB */
321	{
322	    ICP_QAT_HW_CIPHER_ALGO_DES,
323	    ICP_QAT_HW_CIPHER_ECB_MODE,
324	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
325	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
326	    IS_KEY_DEP_NO,
327	    NULL,
328	},
329	/* CPA_CY_SYM_CIPHER_DES_CBC */
330	{
331	    ICP_QAT_HW_CIPHER_ALGO_DES,
332	    ICP_QAT_HW_CIPHER_CBC_MODE,
333	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
334	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
335	    IS_KEY_DEP_NO,
336	    NULL,
337	},
338	/* CPA_CY_SYM_CIPHER_3DES_ECB */
339	{
340	    ICP_QAT_HW_CIPHER_ALGO_3DES,
341	    ICP_QAT_HW_CIPHER_ECB_MODE,
342	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
343	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
344	    IS_KEY_DEP_NO,
345	    NULL,
346	},
347	/* CPA_CY_SYM_CIPHER_3DES_CBC */
348	{
349	    ICP_QAT_HW_CIPHER_ALGO_3DES,
350	    ICP_QAT_HW_CIPHER_CBC_MODE,
351	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
352	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
353	    IS_KEY_DEP_NO,
354	    NULL,
355	},
356	/* CPA_CY_SYM_CIPHER_3DES_CTR */
357	{
358	    ICP_QAT_HW_CIPHER_ALGO_3DES,
359	    ICP_QAT_HW_CIPHER_CTR_MODE,
360	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
361	    /* Streaming ciphers are a special case. Decrypt = encrypt
362	     * Overriding default values previously set for AES
363	     */
364	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
365	    IS_KEY_DEP_NO,
366	    NULL,
367	},
368	/* CPA_CY_SYM_CIPHER_KASUMI_F8 */
369	{
370	    ICP_QAT_HW_CIPHER_ALGO_KASUMI,
371	    ICP_QAT_HW_CIPHER_F8_MODE,
372	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
373	    /* Streaming ciphers are a special case. Decrypt = encrypt */
374	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
375	    IS_KEY_DEP_NO,
376	    NULL,
377	},
378	/* CPA_CY_SYM_CIPHER_SNOW3G_UEA2 */
379	{
380	    /* The KEY_CONVERT bit has to be set for Snow_3G operation */
381	    ICP_QAT_HW_CIPHER_ALGO_SNOW_3G_UEA2,
382	    ICP_QAT_HW_CIPHER_ECB_MODE,
383	    { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
384	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
385	    IS_KEY_DEP_NO,
386	    NULL,
387	},
388	/* CPA_CY_SYM_CIPHER_AES_F8 */
389	{
390	    ICP_QAT_HW_CIPHER_ALGO_AES128,
391	    ICP_QAT_HW_CIPHER_F8_MODE,
392	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
393	    /* Streaming ciphers are a special case. Decrypt = encrypt */
394	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
395	    IS_KEY_DEP_YES,
396	    key_size_f8,
397	},
398	/* CPA_CY_SYM_CIPHER_AES_XTS */
399	{
400	    ICP_QAT_HW_CIPHER_ALGO_AES128,
401	    ICP_QAT_HW_CIPHER_XTS_MODE,
402	    /* AES decrypt key needs to be reversed.  Instead of reversing the
403	     * key at session registration, it is instead reversed on-the-fly by
404	     * setting the KEY_CONVERT bit here
405	     */
406	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
407	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
408	    IS_KEY_DEP_YES,
409	    key_size_xts,
410	},
411	/* CPA_CY_SYM_CIPHER_ZUC_EEA3 */
412	{
413	    ICP_QAT_HW_CIPHER_ALGO_ZUC_3G_128_EEA3,
414	    ICP_QAT_HW_CIPHER_ECB_MODE,
415	    { ICP_QAT_HW_CIPHER_KEY_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
416	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
417	    IS_KEY_DEP_NO,
418	    NULL,
419	},
420	/* CPA_CY_SYM_CIPHER_CHACHA */
421	{
422	    ICP_QAT_HW_CIPHER_ALGO_CHACHA20_POLY1305,
423	    ICP_QAT_HW_CIPHER_CTR_MODE,
424	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
425	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
426	    IS_KEY_DEP_NO,
427	    NULL,
428	},
429	/* CPA_CY_SYM_CIPHER_SM4_ECB */
430	{
431	    ICP_QAT_HW_CIPHER_ALGO_SM4,
432	    ICP_QAT_HW_CIPHER_ECB_MODE,
433	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
434	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
435	    IS_KEY_DEP_NO,
436	    NULL,
437	},
438	/* CPA_CY_SYM_CIPHER_SM4_CBC */
439	{
440	    ICP_QAT_HW_CIPHER_ALGO_SM4,
441	    ICP_QAT_HW_CIPHER_CBC_MODE,
442	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_KEY_CONVERT },
443	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_DECRYPT },
444	    IS_KEY_DEP_NO,
445	    NULL,
446	},
447	/* CPA_CY_SYM_CIPHER_SM4_CTR */
448	{
449	    ICP_QAT_HW_CIPHER_ALGO_SM4,
450	    ICP_QAT_HW_CIPHER_CTR_MODE,
451	    { ICP_QAT_HW_CIPHER_NO_CONVERT, ICP_QAT_HW_CIPHER_NO_CONVERT },
452	    { ICP_QAT_HW_CIPHER_ENCRYPT, ICP_QAT_HW_CIPHER_ENCRYPT },
453	    IS_KEY_DEP_NO,
454	    NULL,
455	},
456};
457
458/*****************************************************************************
459 *  Internal functions
460 *****************************************************************************/
461
462void
463LacSymQat_CipherCtrlBlockWrite(icp_qat_la_bulk_req_ftr_t *pMsg,
464			       Cpa32U cipherAlgorithm,
465			       Cpa32U targetKeyLenInBytes,
466			       Cpa32U sliceType,
467			       icp_qat_fw_slice_t nextSlice,
468			       Cpa8U cipherCfgOffsetInQuadWord)
469{
470	icp_qat_fw_cipher_cd_ctrl_hdr_t *cd_ctrl =
471	    (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pMsg->cd_ctrl);
472
473	/* state_padding_sz is nonzero for f8 mode only */
474	cd_ctrl->cipher_padding_sz = 0;
475
476	/* Special handling of AES 192 key for UCS slice.
477	   UCS requires it to have 32 bytes - set is as targetKeyLen
478	   in this case, and add padding. It makes no sense
479	   to force applications to provide such key length for couple reasons:
480	   1. It won't be possible to distinguish between AES 192 and 256 based
481	      on key lenght only
482	   2. Only some modes of AES will use UCS slice, then application will
483	      have to know which ones */
484	if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType &&
485	    ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) {
486		targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ;
487	}
488
489	switch (cipherAlgorithm) {
490	/* Base Key is not passed down to QAT in the case of ARC4 or NULL */
491	case CPA_CY_SYM_CIPHER_ARC4:
492	case CPA_CY_SYM_CIPHER_NULL:
493		cd_ctrl->cipher_key_sz = 0;
494		break;
495	case CPA_CY_SYM_CIPHER_KASUMI_F8:
496		cd_ctrl->cipher_key_sz =
497		    LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_KASUMI_F8_KEY_SZ);
498		cd_ctrl->cipher_padding_sz =
499		    ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR;
500		break;
501	/* For Snow3G UEA2 content descriptor key size is
502	   key size plus iv size */
503	case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
504		cd_ctrl->cipher_key_sz =
505		    LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_SNOW_3G_UEA2_KEY_SZ +
506					   ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
507		break;
508	case CPA_CY_SYM_CIPHER_AES_F8:
509		cd_ctrl->cipher_key_sz =
510		    LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes);
511		cd_ctrl->cipher_padding_sz =
512		    (2 * ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR);
513		break;
514	/* For ZUC EEA3 content descriptor key size is
515	   key size plus iv size */
516	case CPA_CY_SYM_CIPHER_ZUC_EEA3:
517		cd_ctrl->cipher_key_sz =
518		    LAC_BYTES_TO_QUADWORDS(ICP_QAT_HW_ZUC_3G_EEA3_KEY_SZ +
519					   ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
520		break;
521	default:
522		cd_ctrl->cipher_key_sz =
523		    LAC_BYTES_TO_QUADWORDS(targetKeyLenInBytes);
524	}
525
526	cd_ctrl->cipher_state_sz = LAC_BYTES_TO_QUADWORDS(
527	    LacSymQat_CipherIvSizeBytesGet(cipherAlgorithm));
528
529	cd_ctrl->cipher_cfg_offset = cipherCfgOffsetInQuadWord;
530
531	ICP_QAT_FW_COMN_NEXT_ID_SET(cd_ctrl, nextSlice);
532	ICP_QAT_FW_COMN_CURR_ID_SET(cd_ctrl, ICP_QAT_FW_SLICE_CIPHER);
533}
534
535void
536LacSymQat_CipherGetCfgData(lac_session_desc_t *pSession,
537			   icp_qat_hw_cipher_algo_t *pAlgorithm,
538			   icp_qat_hw_cipher_mode_t *pMode,
539			   icp_qat_hw_cipher_dir_t *pDir,
540			   icp_qat_hw_cipher_convert_t *pKey_convert)
541{
542	sal_crypto_service_t *pService =
543	    (sal_crypto_service_t *)pSession->pInstance;
544
545	CpaCySymCipherAlgorithm cipherAlgorithm = 0;
546	icp_qat_hw_cipher_dir_t cipherDirection = 0;
547
548	/* Set defaults */
549	*pKey_convert = ICP_QAT_HW_CIPHER_NO_CONVERT;
550	*pAlgorithm = ICP_QAT_HW_CIPHER_ALGO_NULL;
551	*pMode = ICP_QAT_HW_CIPHER_ECB_MODE;
552	*pDir = ICP_QAT_HW_CIPHER_ENCRYPT;
553
554	/* decrease since it's numbered from 1 instead of 0 */
555	cipherAlgorithm = pSession->cipherAlgorithm - 1;
556	cipherDirection =
557	    pSession->cipherDirection == CPA_CY_SYM_CIPHER_DIRECTION_ENCRYPT ?
558		  ICP_QAT_HW_CIPHER_ENCRYPT :
559		  ICP_QAT_HW_CIPHER_DECRYPT;
560
561	*pAlgorithm = icp_qat_alg_info[cipherAlgorithm].algorithm;
562	*pMode = icp_qat_alg_info[cipherAlgorithm].mode;
563	*pDir = icp_qat_alg_info[cipherAlgorithm].dir[cipherDirection];
564	*pKey_convert =
565	    icp_qat_alg_info[cipherAlgorithm].key_convert[cipherDirection];
566
567	if (IS_KEY_DEP_NO != icp_qat_alg_info[cipherAlgorithm].isKeyLenDepend) {
568		*pAlgorithm = icp_qat_alg_info[cipherAlgorithm]
569				  .pAlgByKeySize[pSession->cipherKeyLenInBytes];
570	}
571
572	/* CCP and AES_GCM single pass, despite being limited to CTR/AEAD mode,
573	 * support both Encrypt/Decrypt modes - this is because of the
574	 * differences in the hash computation/verification paths in
575	 * encrypt/decrypt modes respectively.
576	 * By default CCP is set as CTR Mode.Set AEAD Mode for AES_GCM.
577	 */
578	if (SPC == pSession->singlePassState) {
579		if (LAC_CIPHER_IS_GCM(pSession->cipherAlgorithm))
580			*pMode = ICP_QAT_HW_CIPHER_AEAD_MODE;
581		else if (isCyGen4x(pService) &&
582			 LAC_CIPHER_IS_CCM(pSession->cipherAlgorithm))
583			*pMode = ICP_QAT_HW_CIPHER_CCM_MODE;
584
585		if (cipherDirection == ICP_QAT_HW_CIPHER_DECRYPT)
586			*pDir = ICP_QAT_HW_CIPHER_DECRYPT;
587	}
588}
589
590void
591LacSymQat_CipherHwBlockPopulateCfgData(lac_session_desc_t *pSession,
592				       const void *pCipherHwBlock,
593				       Cpa32U *pSizeInBytes)
594{
595	icp_qat_hw_cipher_algo_t algorithm = ICP_QAT_HW_CIPHER_ALGO_NULL;
596	icp_qat_hw_cipher_mode_t mode = ICP_QAT_HW_CIPHER_ECB_MODE;
597	icp_qat_hw_cipher_dir_t dir = ICP_QAT_HW_CIPHER_ENCRYPT;
598	icp_qat_hw_cipher_convert_t key_convert;
599	icp_qat_hw_cipher_config_t *pCipherConfig =
600	    (icp_qat_hw_cipher_config_t *)pCipherHwBlock;
601	icp_qat_hw_ucs_cipher_config_t *pUCSCipherConfig =
602	    (icp_qat_hw_ucs_cipher_config_t *)pCipherHwBlock;
603
604	Cpa32U val, reserved;
605	Cpa32U aed_hash_cmp_length = 0;
606
607	*pSizeInBytes = 0;
608
609	LacSymQat_CipherGetCfgData(
610	    pSession, &algorithm, &mode, &dir, &key_convert);
611
612	/* Build the cipher config into the hardware setup block */
613	if (SPC == pSession->singlePassState) {
614		aed_hash_cmp_length = pSession->hashResultSize;
615		reserved = ICP_QAT_HW_CIPHER_CONFIG_BUILD_UPPER(
616		    pSession->aadLenInBytes);
617	} else {
618		reserved = 0;
619	}
620
621	val = ICP_QAT_HW_CIPHER_CONFIG_BUILD(
622	    mode, algorithm, key_convert, dir, aed_hash_cmp_length);
623
624	/* UCS slice has 128-bit configuration register.
625	   Leacy cipher slice has 64-bit config register */
626	if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == pSession->cipherSliceType) {
627		pUCSCipherConfig->val = val;
628		pUCSCipherConfig->reserved[0] = reserved;
629		pUCSCipherConfig->reserved[1] = 0;
630		pUCSCipherConfig->reserved[2] = 0;
631		*pSizeInBytes = sizeof(icp_qat_hw_ucs_cipher_config_t);
632	} else {
633		pCipherConfig->val = val;
634		pCipherConfig->reserved = reserved;
635		*pSizeInBytes = sizeof(icp_qat_hw_cipher_config_t);
636	}
637}
638
639void
640LacSymQat_CipherHwBlockPopulateKeySetup(
641    lac_session_desc_t *pSessionDesc,
642    const CpaCySymCipherSetupData *pCipherSetupData,
643    Cpa32U targetKeyLenInBytes,
644    Cpa32U sliceType,
645    const void *pCipherHwBlock,
646    Cpa32U *pSizeInBytes)
647{
648	Cpa8U *pCipherKey = (Cpa8U *)pCipherHwBlock;
649	Cpa32U actualKeyLenInBytes = pCipherSetupData->cipherKeyLenInBytes;
650
651	*pSizeInBytes = 0;
652
653	/* Key is copied into content descriptor for all cases except for
654	 * Arc4 and Null cipher */
655	if (!(LAC_CIPHER_IS_ARC4(pCipherSetupData->cipherAlgorithm) ||
656	      LAC_CIPHER_IS_NULL(pCipherSetupData->cipherAlgorithm))) {
657		/* Special handling of AES 192 key for UCS slice.
658		   UCS requires it to have 32 bytes - set is as targetKeyLen
659		   in this case, and add padding. It makes no sense
660		   to force applications to provide such key length for couple
661		   reasons:
662		   1. It won't be possible to distinguish between AES 192 and
663		   256 based on key lenght only
664		   2. Only some modes of AES will use UCS slice, then
665		   application will have to know which ones */
666		if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType &&
667		    ICP_QAT_HW_AES_192_KEY_SZ == targetKeyLenInBytes) {
668			targetKeyLenInBytes = ICP_QAT_HW_UCS_AES_192_KEY_SZ;
669		}
670
671		/* Set the Cipher key field in the cipher block */
672		memcpy(pCipherKey,
673		       pCipherSetupData->pCipherKey,
674		       actualKeyLenInBytes);
675		/* Pad the key with 0's if required */
676		if (0 < (targetKeyLenInBytes - actualKeyLenInBytes)) {
677			LAC_OS_BZERO(pCipherKey + actualKeyLenInBytes,
678				     targetKeyLenInBytes - actualKeyLenInBytes);
679		}
680		*pSizeInBytes += targetKeyLenInBytes;
681
682		switch (pCipherSetupData->cipherAlgorithm) {
683			/* For Kasumi in F8 mode Cipher Key is concatenated with
684			 * Cipher Key XOR-ed with Key Modifier (CK||CK^KM) */
685		case CPA_CY_SYM_CIPHER_KASUMI_F8: {
686			Cpa32U wordIndex = 0;
687			Cpa32U *pu32CipherKey =
688			    (Cpa32U *)pCipherSetupData->pCipherKey;
689			Cpa32U *pTempKey =
690			    (Cpa32U *)(pCipherKey + targetKeyLenInBytes);
691
692			/* XOR Key with KASUMI F8 key modifier at 4 bytes level
693			 */
694			for (wordIndex = 0; wordIndex <
695			     LAC_BYTES_TO_LONGWORDS(targetKeyLenInBytes);
696			     wordIndex++) {
697				pTempKey[wordIndex] = pu32CipherKey[wordIndex] ^
698				    LAC_CIPHER_KASUMI_F8_KEY_MODIFIER_4_BYTES;
699			}
700
701			*pSizeInBytes += targetKeyLenInBytes;
702
703			/* also add padding for F8 */
704			*pSizeInBytes += LAC_QUADWORDS_TO_BYTES(
705			    ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR);
706			LAC_OS_BZERO((Cpa8U *)pTempKey + targetKeyLenInBytes,
707				     LAC_QUADWORDS_TO_BYTES(
708					 ICP_QAT_HW_MODE_F8_NUM_REG_TO_CLEAR));
709		} break;
710			/* For AES in F8 mode Cipher Key is concatenated with
711			 * Cipher Key XOR-ed with Key Mask (CK||CK^KM) */
712		case CPA_CY_SYM_CIPHER_AES_F8: {
713			Cpa32U index = 0;
714			Cpa8U *pTempKey =
715			    pCipherKey + (targetKeyLenInBytes / 2);
716			*pSizeInBytes += targetKeyLenInBytes;
717			/* XOR Key with key Mask */
718			for (index = 0; index < targetKeyLenInBytes; index++) {
719				pTempKey[index] =
720				    pCipherKey[index] ^ pTempKey[index];
721			}
722			pTempKey = (pCipherKey + targetKeyLenInBytes);
723			/* also add padding for AES F8 */
724			*pSizeInBytes += 2 * targetKeyLenInBytes;
725			LAC_OS_BZERO(pTempKey, 2 * targetKeyLenInBytes);
726		} break;
727		case CPA_CY_SYM_CIPHER_SNOW3G_UEA2: {
728			/* For Snow3G zero area after the key for FW */
729			LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes,
730				     ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ);
731
732			*pSizeInBytes += ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
733		} break;
734		case CPA_CY_SYM_CIPHER_ZUC_EEA3: {
735			/* For ZUC zero area after the key for FW */
736			LAC_OS_BZERO(pCipherKey + targetKeyLenInBytes,
737				     ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ);
738
739			*pSizeInBytes += ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
740		} break;
741		case CPA_CY_SYM_CIPHER_AES_XTS: {
742			/* For AES in XTS mode Cipher Key is concatenated with
743			 * second Cipher Key which is used for tweak calculation
744			 * (CK1||CK2). For decryption Cipher Key needs to be
745			 * converted to reverse key.*/
746			if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE == sliceType) {
747				Cpa32U key_len =
748				    pCipherSetupData->cipherKeyLenInBytes / 2;
749				memcpy(pSessionDesc->cipherAesXtsKey1Forward,
750				       pCipherSetupData->pCipherKey,
751				       key_len);
752
753				qatUtilsAESKeyExpansionForward(
754				    pSessionDesc->cipherAesXtsKey1Forward,
755				    key_len,
756				    (uint32_t *)
757					pSessionDesc->cipherAesXtsKey1Reverse);
758
759				memcpy(pSessionDesc->cipherAesXtsKey2,
760				       pCipherSetupData->pCipherKey + key_len,
761				       key_len);
762
763				if (CPA_CY_SYM_CIPHER_DIRECTION_DECRYPT ==
764				    pCipherSetupData->cipherDirection) {
765					memcpy(pCipherKey,
766					       pSessionDesc
767						   ->cipherAesXtsKey1Reverse,
768					       key_len);
769				} else {
770					memcpy(pCipherKey,
771					       pSessionDesc
772						   ->cipherAesXtsKey1Forward,
773					       key_len);
774				}
775			}
776		} break;
777		default:
778			break;
779		}
780	}
781}
782
783/*****************************************************************************
784 *  External functions
785 *****************************************************************************/
786
787Cpa8U
788LacSymQat_CipherBlockSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)
789{
790	Cpa8U blockSize = 0;
791	switch (cipherAlgorithm) {
792	case CPA_CY_SYM_CIPHER_ARC4:
793		blockSize = LAC_CIPHER_ARC4_BLOCK_LEN_BYTES;
794		break;
795	/* Handle AES or AES_F8 */
796	case CPA_CY_SYM_CIPHER_AES_ECB:
797	case CPA_CY_SYM_CIPHER_AES_CBC:
798	case CPA_CY_SYM_CIPHER_AES_CTR:
799	case CPA_CY_SYM_CIPHER_AES_CCM:
800	case CPA_CY_SYM_CIPHER_AES_GCM:
801	case CPA_CY_SYM_CIPHER_AES_XTS:
802	case CPA_CY_SYM_CIPHER_AES_F8:
803		blockSize = ICP_QAT_HW_AES_BLK_SZ;
804		break;
805	/* Handle DES */
806	case CPA_CY_SYM_CIPHER_DES_ECB:
807	case CPA_CY_SYM_CIPHER_DES_CBC:
808		blockSize = ICP_QAT_HW_DES_BLK_SZ;
809		break;
810	/* Handle TRIPLE DES */
811	case CPA_CY_SYM_CIPHER_3DES_ECB:
812	case CPA_CY_SYM_CIPHER_3DES_CBC:
813	case CPA_CY_SYM_CIPHER_3DES_CTR:
814		blockSize = ICP_QAT_HW_3DES_BLK_SZ;
815		break;
816	case CPA_CY_SYM_CIPHER_KASUMI_F8:
817		blockSize = ICP_QAT_HW_KASUMI_BLK_SZ;
818		break;
819	case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
820		blockSize = ICP_QAT_HW_SNOW_3G_BLK_SZ;
821		break;
822	case CPA_CY_SYM_CIPHER_ZUC_EEA3:
823		blockSize = ICP_QAT_HW_ZUC_3G_BLK_SZ;
824		break;
825	case CPA_CY_SYM_CIPHER_NULL:
826		blockSize = LAC_CIPHER_NULL_BLOCK_LEN_BYTES;
827		break;
828	case CPA_CY_SYM_CIPHER_CHACHA:
829		blockSize = ICP_QAT_HW_CHACHAPOLY_BLK_SZ;
830		break;
831	case CPA_CY_SYM_CIPHER_SM4_ECB:
832	case CPA_CY_SYM_CIPHER_SM4_CBC:
833	case CPA_CY_SYM_CIPHER_SM4_CTR:
834		blockSize = ICP_QAT_HW_SM4_BLK_SZ;
835		break;
836	default:
837		QAT_UTILS_LOG("Algorithm not supported in Cipher");
838	}
839	return blockSize;
840}
841
842Cpa32U
843LacSymQat_CipherIvSizeBytesGet(CpaCySymCipherAlgorithm cipherAlgorithm)
844{
845	Cpa32U ivSize = 0;
846	switch (cipherAlgorithm) {
847	case CPA_CY_SYM_CIPHER_ARC4:
848		ivSize = LAC_CIPHER_ARC4_STATE_LEN_BYTES;
849		break;
850	case CPA_CY_SYM_CIPHER_KASUMI_F8:
851		ivSize = ICP_QAT_HW_KASUMI_BLK_SZ;
852		break;
853	case CPA_CY_SYM_CIPHER_SNOW3G_UEA2:
854		ivSize = ICP_QAT_HW_SNOW_3G_UEA2_IV_SZ;
855		break;
856	case CPA_CY_SYM_CIPHER_ZUC_EEA3:
857		ivSize = ICP_QAT_HW_ZUC_3G_EEA3_IV_SZ;
858		break;
859	case CPA_CY_SYM_CIPHER_CHACHA:
860		ivSize = ICP_QAT_HW_CHACHAPOLY_IV_SZ;
861		break;
862	case CPA_CY_SYM_CIPHER_AES_ECB:
863	case CPA_CY_SYM_CIPHER_DES_ECB:
864	case CPA_CY_SYM_CIPHER_3DES_ECB:
865	case CPA_CY_SYM_CIPHER_SM4_ECB:
866	case CPA_CY_SYM_CIPHER_NULL:
867		/* for all ECB Mode IV size is 0 */
868		break;
869	default:
870		ivSize = LacSymQat_CipherBlockSizeBytesGet(cipherAlgorithm);
871	}
872	return ivSize;
873}
874
875inline CpaStatus
876LacSymQat_CipherRequestParamsPopulate(lac_session_desc_t *pSessionDesc,
877				      icp_qat_fw_la_bulk_req_t *pReq,
878				      Cpa32U cipherOffsetInBytes,
879				      Cpa32U cipherLenInBytes,
880				      Cpa64U ivBufferPhysAddr,
881				      Cpa8U *pIvBufferVirt)
882{
883	icp_qat_fw_la_cipher_req_params_t *pCipherReqParams;
884	icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherCdCtrlHdr;
885	icp_qat_fw_serv_specif_flags *pCipherSpecificFlags;
886	Cpa32U usedBufSize = 0;
887	Cpa32U totalBufSize = 0;
888
889	pCipherReqParams = (icp_qat_fw_la_cipher_req_params_t
890				*)((Cpa8U *)&(pReq->serv_specif_rqpars) +
891				   ICP_QAT_FW_CIPHER_REQUEST_PARAMETERS_OFFSET);
892	pCipherCdCtrlHdr = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(pReq->cd_ctrl);
893	pCipherSpecificFlags = &(pReq->comn_hdr.serv_specif_flags);
894
895	pCipherReqParams->cipher_offset = cipherOffsetInBytes;
896	pCipherReqParams->cipher_length = cipherLenInBytes;
897
898	/* Don't copy the buffer into the Msg if
899	 * it's too big for the cipher_IV_array
900	 * OR if the FW needs to update it
901	 * OR if there's no buffer supplied
902	 * OR if last partial
903	 */
904	if ((pCipherCdCtrlHdr->cipher_state_sz >
905	     LAC_SYM_QAT_HASH_IV_REQ_MAX_SIZE_QW) ||
906	    (ICP_QAT_FW_LA_UPDATE_STATE_GET(*pCipherSpecificFlags) ==
907	     ICP_QAT_FW_LA_UPDATE_STATE) ||
908	    (pIvBufferVirt == NULL) ||
909	    (ICP_QAT_FW_LA_PARTIAL_GET(*pCipherSpecificFlags) ==
910	     ICP_QAT_FW_LA_PARTIAL_END)) {
911		/* Populate the field with a ptr to the flat buffer */
912		pCipherReqParams->u.s.cipher_IV_ptr = ivBufferPhysAddr;
913		pCipherReqParams->u.s.resrvd1 = 0;
914		/* Set the flag indicating the field format */
915		ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
916		    *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_64BIT_PTR);
917	} else {
918		/* Populate the field with the contents of the buffer,
919		 * zero field first as data may be smaller than the field */
920
921		/* In case of XTS mode using UCS slice always embedd IV.
922		 * IV provided by user needs to be encrypted to calculate
923		 * initial tweak, use pCipherReqParams->u.cipher_IV_array as
924		 * destination buffer for tweak value */
925		if (ICP_QAT_FW_LA_USE_UCS_SLICE_TYPE ==
926			pSessionDesc->cipherSliceType &&
927		    LAC_CIPHER_IS_XTS_MODE(pSessionDesc->cipherAlgorithm)) {
928			memset(pCipherReqParams->u.cipher_IV_array,
929			       0,
930			       LAC_LONGWORDS_TO_BYTES(
931				   ICP_QAT_FW_NUM_LONGWORDS_4));
932			qatUtilsAESEncrypt(
933			    pSessionDesc->cipherAesXtsKey2,
934			    pSessionDesc->cipherKeyLenInBytes / 2,
935			    pIvBufferVirt,
936			    (Cpa8U *)pCipherReqParams->u.cipher_IV_array);
937		} else {
938			totalBufSize =
939			    LAC_LONGWORDS_TO_BYTES(ICP_QAT_FW_NUM_LONGWORDS_4);
940			usedBufSize = LAC_QUADWORDS_TO_BYTES(
941			    pCipherCdCtrlHdr->cipher_state_sz);
942			/* Only initialise unused buffer if applicable*/
943			if (usedBufSize < totalBufSize) {
944				memset(
945				    (&pCipherReqParams->u.cipher_IV_array
946					  [usedBufSize & LAC_UNUSED_POS_MASK]),
947				    0,
948				    totalBufSize - usedBufSize);
949			}
950			memcpy(pCipherReqParams->u.cipher_IV_array,
951			       pIvBufferVirt,
952			       usedBufSize);
953		}
954		/* Set the flag indicating the field format */
955		ICP_QAT_FW_LA_CIPH_IV_FLD_FLAG_SET(
956		    *pCipherSpecificFlags, ICP_QAT_FW_CIPH_IV_16BYTE_DATA);
957	}
958
959	return CPA_STATUS_SUCCESS;
960}
961
962void
963LacSymQat_CipherArc4StateInit(const Cpa8U *pKey,
964			      Cpa32U keyLenInBytes,
965			      Cpa8U *pArc4CipherState)
966{
967	Cpa32U i = 0;
968	Cpa32U j = 0;
969	Cpa32U k = 0;
970
971	for (i = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i) {
972		pArc4CipherState[i] = (Cpa8U)i;
973	}
974
975	for (i = 0, k = 0; i < LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES; ++i, ++k) {
976		Cpa8U swap = 0;
977
978		if (k >= keyLenInBytes)
979			k -= keyLenInBytes;
980
981		j = (j + pArc4CipherState[i] + pKey[k]);
982		if (j >= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES)
983			j %= LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES;
984
985		/* Swap state[i] & state[j] */
986		swap = pArc4CipherState[i];
987		pArc4CipherState[i] = pArc4CipherState[j];
988		pArc4CipherState[j] = swap;
989	}
990
991	/* Initialise i & j values for QAT */
992	pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES] = 0;
993	pArc4CipherState[LAC_CIPHER_ARC4_KEY_MATRIX_LEN_BYTES + 1] = 0;
994}
995
996/* Update the cipher_key_sz in the Request cache prepared and stored
997 * in the session */
998void
999LacSymQat_CipherXTSModeUpdateKeyLen(lac_session_desc_t *pSessionDesc,
1000				    Cpa32U newKeySizeInBytes)
1001{
1002	icp_qat_fw_cipher_cd_ctrl_hdr_t *pCipherControlBlock = NULL;
1003
1004	pCipherControlBlock = (icp_qat_fw_cipher_cd_ctrl_hdr_t *)&(
1005	    pSessionDesc->reqCacheFtr.cd_ctrl);
1006
1007	pCipherControlBlock->cipher_key_sz =
1008	    LAC_BYTES_TO_QUADWORDS(newKeySizeInBytes);
1009}
1010