1/*	$NetBSD: arcfour.c,v 1.6 2023/06/19 21:41:43 christos Exp $	*/
2
3/*
4 * Copyright (c) 2003 - 2006 Kungliga Tekniska H��gskolan
5 * (Royal Institute of Technology, Stockholm, Sweden).
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 *
12 * 1. Redistributions of source code must retain the above copyright
13 *    notice, this list of conditions and the following disclaimer.
14 *
15 * 2. Redistributions in binary form must reproduce the above copyright
16 *    notice, this list of conditions and the following disclaimer in the
17 *    documentation and/or other materials provided with the distribution.
18 *
19 * 3. Neither the name of the Institute nor the names of its contributors
20 *    may be used to endorse or promote products derived from this software
21 *    without specific prior written permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY THE INSTITUTE AND CONTRIBUTORS ``AS IS'' AND
24 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
25 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
26 * ARE DISCLAIMED.  IN NO EVENT SHALL THE INSTITUTE OR CONTRIBUTORS BE LIABLE
27 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
28 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
29 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
30 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
31 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
32 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
33 * SUCH DAMAGE.
34 */
35
36#include "gsskrb5_locl.h"
37
38/*
39 * Implements draft-brezak-win2k-krb-rc4-hmac-04.txt
40 *
41 * The arcfour message have the following formats:
42 *
43 * MIC token
44 * 	TOK_ID[2] = 01 01
45 *	SGN_ALG[2] = 11 00
46 *	Filler[4]
47 *	SND_SEQ[8]
48 *	SGN_CKSUM[8]
49 *
50 * WRAP token
51 *	TOK_ID[2] = 02 01
52 *	SGN_ALG[2];
53 *	SEAL_ALG[2]
54 *	Filler[2]
55 *	SND_SEQ[2]
56 *	SGN_CKSUM[8]
57 *	Confounder[8]
58 */
59
60/*
61 * WRAP in DCE-style have a fixed size header, the oid and length over
62 * the WRAP header is a total of
63 * GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE +
64 * GSS_ARCFOUR_WRAP_TOKEN_SIZE byte (ie total of 45 bytes overhead,
65 * remember the 2 bytes from APPL [0] SEQ).
66 */
67
68#define GSS_ARCFOUR_WRAP_TOKEN_SIZE 32
69#define GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE 13
70
71
72static krb5_error_code
73arcfour_mic_key(krb5_context context, krb5_keyblock *key,
74		const void *cksum_data, size_t cksum_size,
75		void *key6_data, size_t key6_size)
76{
77    krb5_error_code ret;
78
79    Checksum cksum_k5;
80    krb5_keyblock key5;
81    char k5_data[16];
82
83    Checksum cksum_k6;
84
85    char T[4];
86
87    memset(T, 0, 4);
88    cksum_k5.checksum.data = k5_data;
89    cksum_k5.checksum.length = sizeof(k5_data);
90
91    if (key->keytype == KRB5_ENCTYPE_ARCFOUR_HMAC_MD5_56) {
92	char L40[14] = "fortybits";
93
94	memcpy(L40 + 10, T, sizeof(T));
95	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
96			L40, 14, 0, key, &cksum_k5);
97	memset(&k5_data[7], 0xAB, 9);
98    } else {
99	ret = krb5_hmac(context, CKSUMTYPE_RSA_MD5,
100			T, 4, 0, key, &cksum_k5);
101    }
102    if (ret)
103	return ret;
104
105    key5.keytype = KRB5_ENCTYPE_ARCFOUR_HMAC_MD5;
106    key5.keyvalue = cksum_k5.checksum;
107
108    cksum_k6.checksum.data = key6_data;
109    cksum_k6.checksum.length = key6_size;
110
111    return krb5_hmac(context, CKSUMTYPE_RSA_MD5,
112		     cksum_data, cksum_size, 0, &key5, &cksum_k6);
113}
114
115
116static krb5_error_code
117arcfour_mic_cksum_iov(krb5_context context,
118		      krb5_keyblock *key, unsigned usage,
119		      u_char *sgn_cksum, size_t sgn_cksum_sz,
120		      const u_char *v1, size_t l1,
121		      const void *v2, size_t l2,
122		      const gss_iov_buffer_desc *iov,
123		      int iov_count,
124		      const gss_iov_buffer_desc *padding)
125{
126    Checksum CKSUM;
127    u_char *ptr;
128    size_t len;
129    size_t ofs = 0;
130    int i;
131    krb5_crypto crypto;
132    krb5_error_code ret;
133
134    assert(sgn_cksum_sz == 8);
135
136    len = l1 + l2;
137
138    for (i=0; i < iov_count; i++) {
139	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
140	case GSS_IOV_BUFFER_TYPE_DATA:
141	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
142	    break;
143	default:
144	    continue;
145	}
146
147	len += iov[i].buffer.length;
148    }
149
150    if (padding) {
151	len += padding->buffer.length;
152    }
153
154    ptr = malloc(len);
155    if (ptr == NULL)
156	return ENOMEM;
157
158    memcpy(ptr + ofs, v1, l1);
159    ofs += l1;
160    memcpy(ptr + ofs, v2, l2);
161    ofs += l2;
162
163    for (i=0; i < iov_count; i++) {
164	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
165	case GSS_IOV_BUFFER_TYPE_DATA:
166	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
167	    break;
168	default:
169	    continue;
170	}
171
172	memcpy(ptr + ofs,
173	       iov[i].buffer.value,
174	       iov[i].buffer.length);
175	ofs += iov[i].buffer.length;
176    }
177
178    if (padding) {
179	memcpy(ptr + ofs,
180	       padding->buffer.value,
181	       padding->buffer.length);
182	/* ofs += padding->buffer.length; */
183    }
184
185    ret = krb5_crypto_init(context, key, 0, &crypto);
186    if (ret) {
187	free(ptr);
188	return ret;
189    }
190
191    ret = krb5_create_checksum(context,
192			       crypto,
193			       usage,
194			       0,
195			       ptr, len,
196			       &CKSUM);
197    memset(ptr, 0, len);
198    free(ptr);
199    if (ret == 0) {
200	memcpy(sgn_cksum, CKSUM.checksum.data, sgn_cksum_sz);
201	free_Checksum(&CKSUM);
202    }
203    krb5_crypto_destroy(context, crypto);
204
205    return ret;
206}
207
208static krb5_error_code
209arcfour_mic_cksum(krb5_context context,
210		  krb5_keyblock *key, unsigned usage,
211		  u_char *sgn_cksum, size_t sgn_cksum_sz,
212		  const u_char *v1, size_t l1,
213		  const void *v2, size_t l2,
214		  const void *v3, size_t l3)
215{
216    gss_iov_buffer_desc iov;
217
218    iov.type = GSS_IOV_BUFFER_TYPE_SIGN_ONLY;
219    iov.buffer.value = rk_UNCONST(v3);
220    iov.buffer.length = l3;
221
222    return arcfour_mic_cksum_iov(context, key, usage,
223				 sgn_cksum, sgn_cksum_sz,
224				 v1, l1, v2, l2,
225				 &iov, 1, NULL);
226}
227
228
229OM_uint32
230_gssapi_get_mic_arcfour(OM_uint32 * minor_status,
231			const gsskrb5_ctx context_handle,
232			krb5_context context,
233			gss_qop_t qop_req,
234			const gss_buffer_t message_buffer,
235			gss_buffer_t message_token,
236			krb5_keyblock *key)
237{
238    krb5_error_code ret;
239    int32_t seq_number;
240    size_t len, total_len;
241    u_char k6_data[16], *p0, *p;
242    EVP_CIPHER_CTX *rc4_key;
243
244    _gsskrb5_encap_length (22, &len, &total_len, GSS_KRB5_MECHANISM);
245
246    message_token->length = total_len;
247    message_token->value  = malloc (total_len);
248    if (message_token->value == NULL) {
249	*minor_status = ENOMEM;
250	return GSS_S_FAILURE;
251    }
252
253    p0 = _gssapi_make_mech_header(message_token->value,
254				  len,
255				  GSS_KRB5_MECHANISM);
256    p = p0;
257
258    *p++ = 0x01; /* TOK_ID */
259    *p++ = 0x01;
260    *p++ = 0x11; /* SGN_ALG */
261    *p++ = 0x00;
262    *p++ = 0xff; /* Filler */
263    *p++ = 0xff;
264    *p++ = 0xff;
265    *p++ = 0xff;
266
267    p = NULL;
268
269    ret = arcfour_mic_cksum(context,
270			    key, KRB5_KU_USAGE_SIGN,
271			    p0 + 16, 8,  /* SGN_CKSUM */
272			    p0, 8, /* TOK_ID, SGN_ALG, Filer */
273			    message_buffer->value, message_buffer->length,
274			    NULL, 0);
275    if (ret) {
276	_gsskrb5_release_buffer(minor_status, message_token);
277	*minor_status = ret;
278	return GSS_S_FAILURE;
279    }
280
281    ret = arcfour_mic_key(context, key,
282			  p0 + 16, 8, /* SGN_CKSUM */
283			  k6_data, sizeof(k6_data));
284    if (ret) {
285	_gsskrb5_release_buffer(minor_status, message_token);
286	*minor_status = ret;
287	return GSS_S_FAILURE;
288    }
289
290    HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
291    krb5_auth_con_getlocalseqnumber (context,
292				     context_handle->auth_context,
293				     &seq_number);
294    p = p0 + 8; /* SND_SEQ */
295    _gsskrb5_encode_be_om_uint32(seq_number, p);
296
297    krb5_auth_con_setlocalseqnumber (context,
298				     context_handle->auth_context,
299				     ++seq_number);
300    HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
301
302    memset (p + 4, (context_handle->more_flags & LOCAL) ? 0 : 0xff, 4);
303
304#if OPENSSL_VERSION_NUMBER < 0x10100000UL
305    EVP_CIPHER_CTX rc4_keys;
306    rc4_key = &rc4_keys;
307    EVP_CIPHER_CTX_init(rc4_key);
308#else
309    rc4_key = EVP_CIPHER_CTX_new();
310#endif
311    if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
312	*minor_status = EINVAL;
313	return GSS_S_FAILURE;
314    }
315
316    EVP_Cipher(rc4_key, p, p, 8);
317#if OPENSSL_VERSION_NUMBER < 0x10100000UL
318    EVP_CIPHER_CTX_cleanup(rc4_key);
319#else
320    EVP_CIPHER_CTX_free(rc4_key);
321#endif
322
323    memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
324
325    *minor_status = 0;
326    return GSS_S_COMPLETE;
327}
328
329
330OM_uint32
331_gssapi_verify_mic_arcfour(OM_uint32 * minor_status,
332			   const gsskrb5_ctx context_handle,
333			   krb5_context context,
334			   const gss_buffer_t message_buffer,
335			   const gss_buffer_t token_buffer,
336			   gss_qop_t * qop_state,
337			   krb5_keyblock *key,
338			   const char *type)
339{
340    krb5_error_code ret;
341    uint32_t seq_number;
342    OM_uint32 omret;
343    u_char SND_SEQ[8], cksum_data[8], *p;
344    char k6_data[16];
345    int cmp;
346
347    if (qop_state)
348	*qop_state = 0;
349
350    p = token_buffer->value;
351    omret = _gsskrb5_verify_header (&p,
352				       token_buffer->length,
353				       type,
354				       GSS_KRB5_MECHANISM);
355    if (omret)
356	return omret;
357
358    if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
359	return GSS_S_BAD_SIG;
360    p += 2;
361    if (memcmp (p, "\xff\xff\xff\xff", 4) != 0)
362	return GSS_S_BAD_MIC;
363    p += 4;
364
365    ret = arcfour_mic_cksum(context,
366			    key, KRB5_KU_USAGE_SIGN,
367			    cksum_data, sizeof(cksum_data),
368			    p - 8, 8,
369			    message_buffer->value, message_buffer->length,
370			    NULL, 0);
371    if (ret) {
372	*minor_status = ret;
373	return GSS_S_FAILURE;
374    }
375
376    ret = arcfour_mic_key(context, key,
377			  cksum_data, sizeof(cksum_data),
378			  k6_data, sizeof(k6_data));
379    if (ret) {
380	*minor_status = ret;
381	return GSS_S_FAILURE;
382    }
383
384    cmp = (ct_memcmp(cksum_data, p + 8, 8) == 0);
385    if (cmp) {
386	*minor_status = 0;
387	return GSS_S_BAD_MIC;
388    }
389
390    {
391	EVP_CIPHER_CTX *rc4_key;
392#if OPENSSL_VERSION_NUMBER < 0x10100000UL
393	EVP_CIPHER_CTX rc4_keys;
394	rc4_key = &rc4_keys;
395	EVP_CIPHER_CTX_init(rc4_key);
396#else
397	rc4_key = EVP_CIPHER_CTX_new();
398#endif
399
400	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, (void *)k6_data, NULL,
401	    0)) {
402	    *minor_status = EINVAL;
403	    return GSS_S_FAILURE;
404	}
405	EVP_Cipher(rc4_key, SND_SEQ, p, 8);
406#if OPENSSL_VERSION_NUMBER < 0x10100000UL
407	EVP_CIPHER_CTX_cleanup(rc4_key);
408#else
409	EVP_CIPHER_CTX_free(rc4_key);
410#endif
411
412	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
413    }
414
415    _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
416
417    if (context_handle->more_flags & LOCAL)
418	cmp = (ct_memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4) != 0);
419    else
420	cmp = (ct_memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4) != 0);
421
422    memset_s(SND_SEQ, sizeof(SND_SEQ), 0, sizeof(SND_SEQ));
423    if (cmp != 0) {
424	*minor_status = 0;
425	return GSS_S_BAD_MIC;
426    }
427
428    HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
429    omret = _gssapi_msg_order_check(context_handle->order, seq_number);
430    HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
431    if (omret)
432	return omret;
433
434    *minor_status = 0;
435    return GSS_S_COMPLETE;
436}
437
438OM_uint32
439_gssapi_wrap_arcfour(OM_uint32 * minor_status,
440		     const gsskrb5_ctx context_handle,
441		     krb5_context context,
442		     int conf_req_flag,
443		     gss_qop_t qop_req,
444		     const gss_buffer_t input_message_buffer,
445		     int * conf_state,
446		     gss_buffer_t output_message_buffer,
447		     krb5_keyblock *key)
448{
449    u_char Klocaldata[16], k6_data[16], *p, *p0;
450    size_t len, total_len, datalen;
451    krb5_keyblock Klocal;
452    krb5_error_code ret;
453    int32_t seq_number;
454
455    if (conf_state)
456	*conf_state = 0;
457
458    datalen = input_message_buffer->length;
459
460    if (IS_DCE_STYLE(context_handle)) {
461	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
462	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
463	total_len += datalen;
464    } else {
465	datalen += 1; /* padding */
466	len = datalen + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
467	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
468    }
469
470    output_message_buffer->length = total_len;
471    output_message_buffer->value  = malloc (total_len);
472    if (output_message_buffer->value == NULL) {
473	*minor_status = ENOMEM;
474	return GSS_S_FAILURE;
475    }
476
477    p0 = _gssapi_make_mech_header(output_message_buffer->value,
478				  len,
479				  GSS_KRB5_MECHANISM);
480    p = p0;
481
482    *p++ = 0x02; /* TOK_ID */
483    *p++ = 0x01;
484    *p++ = 0x11; /* SGN_ALG */
485    *p++ = 0x00;
486    if (conf_req_flag) {
487	*p++ = 0x10; /* SEAL_ALG */
488	*p++ = 0x00;
489    } else {
490	*p++ = 0xff; /* SEAL_ALG */
491	*p++ = 0xff;
492    }
493    *p++ = 0xff; /* Filler */
494    *p++ = 0xff;
495
496    p = NULL;
497
498    HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
499    krb5_auth_con_getlocalseqnumber (context,
500				     context_handle->auth_context,
501				     &seq_number);
502
503    _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
504
505    krb5_auth_con_setlocalseqnumber (context,
506				     context_handle->auth_context,
507				     ++seq_number);
508    HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
509
510    memset (p0 + 8 + 4,
511	    (context_handle->more_flags & LOCAL) ? 0 : 0xff,
512	    4);
513
514    krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
515
516    /* p points to data */
517    p = p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
518    memcpy(p, input_message_buffer->value, input_message_buffer->length);
519
520    if (!IS_DCE_STYLE(context_handle))
521	p[input_message_buffer->length] = 1; /* padding */
522
523    ret = arcfour_mic_cksum(context,
524			    key, KRB5_KU_USAGE_SEAL,
525			    p0 + 16, 8, /* SGN_CKSUM */
526			    p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
527			    p0 + 24, 8, /* Confounder */
528			    p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
529			    datalen);
530    if (ret) {
531	*minor_status = ret;
532	_gsskrb5_release_buffer(minor_status, output_message_buffer);
533	return GSS_S_FAILURE;
534    }
535
536    {
537	int i;
538
539	Klocal.keytype = key->keytype;
540	Klocal.keyvalue.data = Klocaldata;
541	Klocal.keyvalue.length = sizeof(Klocaldata);
542
543	for (i = 0; i < 16; i++)
544	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
545    }
546    ret = arcfour_mic_key(context, &Klocal,
547			  p0 + 8, 4, /* SND_SEQ */
548			  k6_data, sizeof(k6_data));
549    memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
550    if (ret) {
551	_gsskrb5_release_buffer(minor_status, output_message_buffer);
552	*minor_status = ret;
553	return GSS_S_FAILURE;
554    }
555
556
557    if(conf_req_flag) {
558	EVP_CIPHER_CTX *rc4_key;
559#if OPENSSL_VERSION_NUMBER < 0x10100000UL
560	EVP_CIPHER_CTX rc4_keys;
561	rc4_key = &rc4_keys;
562	EVP_CIPHER_CTX_init(rc4_key);
563#else
564	rc4_key = EVP_CIPHER_CTX_new();
565#endif
566
567	EVP_CIPHER_CTX_init(rc4_key);
568	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
569	    *minor_status = EINVAL;
570	    return GSS_S_FAILURE;
571	}
572	EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8 + datalen);
573#if OPENSSL_VERSION_NUMBER < 0x10100000UL
574	EVP_CIPHER_CTX_cleanup(rc4_key);
575#else
576	EVP_CIPHER_CTX_free(rc4_key);
577#endif
578    }
579    memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
580
581    ret = arcfour_mic_key(context, key,
582			  p0 + 16, 8, /* SGN_CKSUM */
583			  k6_data, sizeof(k6_data));
584    if (ret) {
585	_gsskrb5_release_buffer(minor_status, output_message_buffer);
586	*minor_status = ret;
587	return GSS_S_FAILURE;
588    }
589
590    {
591	EVP_CIPHER_CTX *rc4_key;
592#if OPENSSL_VERSION_NUMBER < 0x10100000UL
593	EVP_CIPHER_CTX rc4_keys;
594	rc4_key = &rc4_keys;
595	EVP_CIPHER_CTX_init(rc4_key);
596#else
597	rc4_key = EVP_CIPHER_CTX_new();
598#endif
599
600	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
601	    *minor_status = EINVAL;
602	    return GSS_S_FAILURE;
603	}
604	EVP_Cipher(rc4_key, p0 + 8, p0 + 8 /* SND_SEQ */, 8);
605#if OPENSSL_VERSION_NUMBER < 0x10100000UL
606	EVP_CIPHER_CTX_cleanup(rc4_key);
607#else
608	EVP_CIPHER_CTX_free(rc4_key);
609#endif
610	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
611    }
612
613    if (conf_state)
614	*conf_state = conf_req_flag;
615
616    *minor_status = 0;
617    return GSS_S_COMPLETE;
618}
619
620OM_uint32 _gssapi_unwrap_arcfour(OM_uint32 *minor_status,
621				 const gsskrb5_ctx context_handle,
622				 krb5_context context,
623				 const gss_buffer_t input_message_buffer,
624				 gss_buffer_t output_message_buffer,
625				 int *conf_state,
626				 gss_qop_t *qop_state,
627				 krb5_keyblock *key)
628{
629    u_char Klocaldata[16];
630    krb5_keyblock Klocal;
631    krb5_error_code ret;
632    uint32_t seq_number;
633    size_t datalen;
634    OM_uint32 omret;
635    u_char k6_data[16], SND_SEQ[8], Confounder[8];
636    u_char cksum_data[8];
637    u_char *p, *p0;
638    int cmp;
639    int conf_flag;
640    size_t padlen = 0, len;
641
642    if (conf_state)
643	*conf_state = 0;
644    if (qop_state)
645	*qop_state = 0;
646
647    p0 = input_message_buffer->value;
648
649    if (IS_DCE_STYLE(context_handle)) {
650	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE +
651	    GSS_ARCFOUR_WRAP_TOKEN_DCE_DER_HEADER_SIZE;
652	if (input_message_buffer->length < len)
653	    return GSS_S_BAD_MECH;
654    } else {
655	len = input_message_buffer->length;
656    }
657
658    omret = _gssapi_verify_mech_header(&p0,
659				       len,
660				       GSS_KRB5_MECHANISM);
661    if (omret)
662	return omret;
663
664    /* length of mech header */
665    len = (p0 - (u_char *)input_message_buffer->value) +
666	GSS_ARCFOUR_WRAP_TOKEN_SIZE;
667
668    if (len > input_message_buffer->length)
669	return GSS_S_BAD_MECH;
670
671    /* length of data */
672    datalen = input_message_buffer->length - len;
673
674    p = p0;
675
676    if (memcmp(p, "\x02\x01", 2) != 0)
677	return GSS_S_BAD_SIG;
678    p += 2;
679    if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
680	return GSS_S_BAD_SIG;
681    p += 2;
682
683    if (memcmp (p, "\x10\x00", 2) == 0)
684	conf_flag = 1;
685    else if (memcmp (p, "\xff\xff", 2) == 0)
686	conf_flag = 0;
687    else
688	return GSS_S_BAD_SIG;
689
690    p += 2;
691    if (memcmp (p, "\xff\xff", 2) != 0)
692	return GSS_S_BAD_MIC;
693    p = NULL;
694
695    ret = arcfour_mic_key(context, key,
696			  p0 + 16, 8, /* SGN_CKSUM */
697			  k6_data, sizeof(k6_data));
698    if (ret) {
699	*minor_status = ret;
700	return GSS_S_FAILURE;
701    }
702
703    {
704	EVP_CIPHER_CTX *rc4_key;
705#if OPENSSL_VERSION_NUMBER < 0x10100000UL
706	EVP_CIPHER_CTX rc4_keys;
707	rc4_key = &rc4_keys;
708	EVP_CIPHER_CTX_init(rc4_key);
709#else
710	rc4_key = EVP_CIPHER_CTX_new();
711#endif
712
713	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
714	    *minor_status = EINVAL;
715	    return GSS_S_FAILURE;
716	}
717	EVP_Cipher(rc4_key, SND_SEQ, p0 + 8, 8);
718#if OPENSSL_VERSION_NUMBER < 0x10100000UL
719	EVP_CIPHER_CTX_cleanup(rc4_key);
720#else
721	EVP_CIPHER_CTX_free(rc4_key);
722#endif
723	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
724    }
725
726    _gsskrb5_decode_be_om_uint32(SND_SEQ, &seq_number);
727
728    if (context_handle->more_flags & LOCAL)
729	cmp = (ct_memcmp(&SND_SEQ[4], "\xff\xff\xff\xff", 4) != 0);
730    else
731	cmp = (ct_memcmp(&SND_SEQ[4], "\x00\x00\x00\x00", 4) != 0);
732
733    if (cmp != 0) {
734	*minor_status = 0;
735	return GSS_S_BAD_MIC;
736    }
737
738    {
739	int i;
740
741	Klocal.keytype = key->keytype;
742	Klocal.keyvalue.data = Klocaldata;
743	Klocal.keyvalue.length = sizeof(Klocaldata);
744
745	for (i = 0; i < 16; i++)
746	    Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
747    }
748    ret = arcfour_mic_key(context, &Klocal,
749			  SND_SEQ, 4,
750			  k6_data, sizeof(k6_data));
751    memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
752    if (ret) {
753	*minor_status = ret;
754	return GSS_S_FAILURE;
755    }
756
757    output_message_buffer->value = malloc(datalen);
758    if (output_message_buffer->value == NULL) {
759	*minor_status = ENOMEM;
760	return GSS_S_FAILURE;
761    }
762    output_message_buffer->length = datalen;
763
764    if(conf_flag) {
765	EVP_CIPHER_CTX *rc4_key;
766#if OPENSSL_VERSION_NUMBER < 0x10100000UL
767	EVP_CIPHER_CTX rc4_keys;
768	rc4_key = &rc4_keys;
769	EVP_CIPHER_CTX_init(rc4_key);
770#else
771	rc4_key = EVP_CIPHER_CTX_new();
772#endif
773	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
774	    *minor_status = EINVAL;
775	    return GSS_S_FAILURE;
776	}
777	EVP_Cipher(rc4_key, Confounder, p0 + 24, 8);
778	EVP_Cipher(rc4_key, output_message_buffer->value, p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE, datalen);
779#if OPENSSL_VERSION_NUMBER < 0x10100000UL
780	EVP_CIPHER_CTX_cleanup(rc4_key);
781#else
782	EVP_CIPHER_CTX_free(rc4_key);
783#endif
784    } else {
785	memcpy(Confounder, p0 + 24, 8); /* Confounder */
786	memcpy(output_message_buffer->value,
787	       p0 + GSS_ARCFOUR_WRAP_TOKEN_SIZE,
788	       datalen);
789    }
790    memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
791
792    if (!IS_DCE_STYLE(context_handle)) {
793	ret = _gssapi_verify_pad(output_message_buffer, datalen, &padlen);
794	if (ret) {
795	    _gsskrb5_release_buffer(minor_status, output_message_buffer);
796	    *minor_status = 0;
797	    return ret;
798	}
799	output_message_buffer->length -= padlen;
800    }
801
802    ret = arcfour_mic_cksum(context,
803			    key, KRB5_KU_USAGE_SEAL,
804			    cksum_data, sizeof(cksum_data),
805			    p0, 8,
806			    Confounder, sizeof(Confounder),
807			    output_message_buffer->value,
808			    output_message_buffer->length + padlen);
809    if (ret) {
810	_gsskrb5_release_buffer(minor_status, output_message_buffer);
811	*minor_status = ret;
812	return GSS_S_FAILURE;
813    }
814
815    cmp = (ct_memcmp(cksum_data, p0 + 16, 8) == 0); /* SGN_CKSUM */
816    if (cmp) {
817	_gsskrb5_release_buffer(minor_status, output_message_buffer);
818	*minor_status = 0;
819	return GSS_S_BAD_MIC;
820    }
821
822    HEIMDAL_MUTEX_lock(&context_handle->ctx_id_mutex);
823    omret = _gssapi_msg_order_check(context_handle->order, seq_number);
824    HEIMDAL_MUTEX_unlock(&context_handle->ctx_id_mutex);
825    if (omret)
826	return omret;
827
828    if (conf_state)
829	*conf_state = conf_flag;
830
831    *minor_status = 0;
832    return GSS_S_COMPLETE;
833}
834
835static OM_uint32
836max_wrap_length_arcfour(const gsskrb5_ctx ctx,
837			krb5_crypto crypto,
838			size_t input_length,
839			OM_uint32 *max_input_size)
840{
841    /*
842     * if GSS_C_DCE_STYLE is in use:
843     *  - we only need to encapsulate the WRAP token
844     * However, since this is a fixed since, we just
845     */
846    if (IS_DCE_STYLE(ctx)) {
847	size_t len, total_len;
848
849	len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
850	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
851
852	if (input_length < len)
853	    *max_input_size = 0;
854	else
855	    *max_input_size = input_length - len;
856
857    } else {
858	size_t extrasize = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
859	size_t blocksize = 8;
860	size_t len, total_len;
861
862	len = 8 + input_length + blocksize + extrasize;
863
864	_gsskrb5_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
865
866	total_len -= input_length; /* token length */
867	if (total_len < input_length) {
868	    *max_input_size = (input_length - total_len);
869	    (*max_input_size) &= (~(OM_uint32)(blocksize - 1));
870	} else {
871	    *max_input_size = 0;
872	}
873    }
874
875    return GSS_S_COMPLETE;
876}
877
878OM_uint32
879_gssapi_wrap_size_arcfour(OM_uint32 *minor_status,
880			  const gsskrb5_ctx ctx,
881			  krb5_context context,
882			  int conf_req_flag,
883			  gss_qop_t qop_req,
884			  OM_uint32 req_output_size,
885			  OM_uint32 *max_input_size,
886			  krb5_keyblock *key)
887{
888    krb5_error_code ret;
889    krb5_crypto crypto;
890
891    ret = krb5_crypto_init(context, key, 0, &crypto);
892    if (ret != 0) {
893	*minor_status = ret;
894	return GSS_S_FAILURE;
895    }
896
897    ret = max_wrap_length_arcfour(ctx, crypto,
898				  req_output_size, max_input_size);
899    if (ret != 0) {
900	*minor_status = ret;
901	krb5_crypto_destroy(context, crypto);
902	return GSS_S_FAILURE;
903    }
904
905    krb5_crypto_destroy(context, crypto);
906
907    return GSS_S_COMPLETE;
908}
909
910OM_uint32
911_gssapi_wrap_iov_length_arcfour(OM_uint32 *minor_status,
912				gsskrb5_ctx ctx,
913				krb5_context context,
914				int conf_req_flag,
915				gss_qop_t qop_req,
916				int *conf_state,
917				gss_iov_buffer_desc *iov,
918				int iov_count)
919{
920    OM_uint32 major_status;
921    size_t data_len = 0;
922    int i;
923    gss_iov_buffer_desc *header = NULL;
924    gss_iov_buffer_desc *padding = NULL;
925    gss_iov_buffer_desc *trailer = NULL;
926
927    *minor_status = 0;
928
929    for (i = 0; i < iov_count; i++) {
930	switch(GSS_IOV_BUFFER_TYPE(iov[i].type)) {
931	case GSS_IOV_BUFFER_TYPE_EMPTY:
932	    break;
933	case GSS_IOV_BUFFER_TYPE_DATA:
934	    data_len += iov[i].buffer.length;
935	    break;
936	case GSS_IOV_BUFFER_TYPE_HEADER:
937	    if (header != NULL) {
938		*minor_status = EINVAL;
939		return GSS_S_FAILURE;
940	    }
941	    header = &iov[i];
942	    break;
943	case GSS_IOV_BUFFER_TYPE_TRAILER:
944	    if (trailer != NULL) {
945		*minor_status = EINVAL;
946		return GSS_S_FAILURE;
947	    }
948	    trailer = &iov[i];
949	    break;
950	case GSS_IOV_BUFFER_TYPE_PADDING:
951	    if (padding != NULL) {
952		*minor_status = EINVAL;
953		return GSS_S_FAILURE;
954	    }
955	    padding = &iov[i];
956	    break;
957	case GSS_IOV_BUFFER_TYPE_SIGN_ONLY:
958	    break;
959	default:
960	    *minor_status = EINVAL;
961	    return GSS_S_FAILURE;
962	}
963    }
964
965    if (header == NULL) {
966        *minor_status = EINVAL;
967        return GSS_S_FAILURE;
968    }
969
970    major_status = _gk_verify_buffers(minor_status, ctx, header,
971				      padding, trailer, FALSE);
972    if (major_status != GSS_S_COMPLETE) {
973	    return major_status;
974    }
975
976    if (IS_DCE_STYLE(ctx)) {
977	size_t len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
978	size_t total_len;
979	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
980	header->buffer.length = total_len;
981    } else {
982	size_t len;
983	size_t total_len;
984	if (padding) {
985	    data_len += 1; /* padding */
986	}
987	len = data_len + GSS_ARCFOUR_WRAP_TOKEN_SIZE;
988	_gssapi_encap_length(len, &len, &total_len, GSS_KRB5_MECHANISM);
989	header->buffer.length = total_len - data_len;
990    }
991
992    if (trailer) {
993	trailer->buffer.length = 0;
994    }
995
996    if (padding) {
997	padding->buffer.length = 1;
998    }
999
1000    return GSS_S_COMPLETE;
1001}
1002
1003OM_uint32
1004_gssapi_wrap_iov_arcfour(OM_uint32 *minor_status,
1005			 gsskrb5_ctx ctx,
1006			 krb5_context context,
1007			 int conf_req_flag,
1008			 int *conf_state,
1009			 gss_iov_buffer_desc *iov,
1010			 int iov_count,
1011			 krb5_keyblock *key)
1012{
1013    OM_uint32 major_status, junk;
1014    gss_iov_buffer_desc *header, *padding, *trailer;
1015    krb5_error_code kret;
1016    int32_t seq_number;
1017    u_char Klocaldata[16], k6_data[16], *p, *p0;
1018    size_t make_len = 0;
1019    size_t header_len = 0;
1020    size_t data_len = 0;
1021    krb5_keyblock Klocal;
1022    int i;
1023
1024    header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
1025    padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
1026    trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
1027
1028    major_status = _gk_verify_buffers(minor_status, ctx, header,
1029				      padding, trailer, FALSE);
1030    if (major_status != GSS_S_COMPLETE) {
1031	return major_status;
1032    }
1033
1034    for (i = 0; i < iov_count; i++) {
1035	switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1036	case GSS_IOV_BUFFER_TYPE_DATA:
1037	    break;
1038	default:
1039	    continue;
1040	}
1041
1042	data_len += iov[i].buffer.length;
1043    }
1044
1045    if (padding) {
1046	data_len += 1;
1047    }
1048
1049    if (IS_DCE_STYLE(ctx)) {
1050	size_t unwrapped_len;
1051	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE;
1052	_gssapi_encap_length(unwrapped_len,
1053			     &make_len,
1054			     &header_len,
1055			     GSS_KRB5_MECHANISM);
1056    } else {
1057	size_t unwrapped_len;
1058	unwrapped_len = GSS_ARCFOUR_WRAP_TOKEN_SIZE + data_len;
1059	_gssapi_encap_length(unwrapped_len,
1060			     &make_len,
1061			     &header_len,
1062			     GSS_KRB5_MECHANISM);
1063	header_len -= data_len;
1064    }
1065
1066    if (GSS_IOV_BUFFER_FLAGS(header->type) & GSS_IOV_BUFFER_FLAG_ALLOCATE) {
1067	major_status = _gk_allocate_buffer(minor_status, header,
1068					   header_len);
1069	if (major_status != GSS_S_COMPLETE)
1070	    goto failure;
1071    } else if (header->buffer.length < header_len) {
1072	*minor_status = KRB5_BAD_MSIZE;
1073	major_status = GSS_S_FAILURE;
1074	goto failure;
1075    } else {
1076	header->buffer.length = header_len;
1077    }
1078
1079    if (padding) {
1080	if (GSS_IOV_BUFFER_FLAGS(padding->type) & GSS_IOV_BUFFER_FLAG_ALLOCATE) {
1081	    major_status = _gk_allocate_buffer(minor_status, padding, 1);
1082	    if (major_status != GSS_S_COMPLETE)
1083		goto failure;
1084	} else if (padding->buffer.length < 1) {
1085	    *minor_status = KRB5_BAD_MSIZE;
1086	    major_status = GSS_S_FAILURE;
1087	    goto failure;
1088	} else {
1089	    padding->buffer.length = 1;
1090	}
1091	memset(padding->buffer.value, 1, 1);
1092    }
1093
1094    if (trailer) {
1095	trailer->buffer.length = 0;
1096	trailer->buffer.value = NULL;
1097    }
1098
1099    p0 = _gssapi_make_mech_header(header->buffer.value,
1100				  make_len,
1101				  GSS_KRB5_MECHANISM);
1102    p = p0;
1103
1104    *p++ = 0x02; /* TOK_ID */
1105    *p++ = 0x01;
1106    *p++ = 0x11; /* SGN_ALG */
1107    *p++ = 0x00;
1108    if (conf_req_flag) {
1109	*p++ = 0x10; /* SEAL_ALG */
1110	*p++ = 0x00;
1111    } else {
1112	*p++ = 0xff; /* SEAL_ALG */
1113	*p++ = 0xff;
1114    }
1115    *p++ = 0xff; /* Filler */
1116    *p++ = 0xff;
1117
1118    p = NULL;
1119
1120    HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1121    krb5_auth_con_getlocalseqnumber(context,
1122				    ctx->auth_context,
1123				    &seq_number);
1124    _gsskrb5_encode_be_om_uint32(seq_number, p0 + 8);
1125
1126    krb5_auth_con_setlocalseqnumber(context,
1127				    ctx->auth_context,
1128				    ++seq_number);
1129    HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1130
1131    memset(p0 + 8 + 4,
1132           (ctx->more_flags & LOCAL) ? 0 : 0xff,
1133           4);
1134
1135    krb5_generate_random_block(p0 + 24, 8); /* fill in Confounder */
1136
1137    /* Sign Data */
1138    kret = arcfour_mic_cksum_iov(context,
1139				 key, KRB5_KU_USAGE_SEAL,
1140				 p0 + 16, 8, /* SGN_CKSUM */
1141				 p0, 8, /* TOK_ID, SGN_ALG, SEAL_ALG, Filler */
1142				 p0 + 24, 8, /* Confounder */
1143				 iov, iov_count, /* Data + SignOnly */
1144				 padding); /* padding */
1145    if (kret) {
1146	*minor_status = kret;
1147	major_status = GSS_S_FAILURE;
1148	goto failure;
1149    }
1150
1151    Klocal.keytype = key->keytype;
1152    Klocal.keyvalue.data = Klocaldata;
1153    Klocal.keyvalue.length = sizeof(Klocaldata);
1154
1155    for (i = 0; i < 16; i++) {
1156	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1157    }
1158    kret = arcfour_mic_key(context, &Klocal,
1159			   p0 + 8, 4, /* SND_SEQ */
1160			   k6_data, sizeof(k6_data));
1161    memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
1162    if (kret) {
1163	*minor_status = kret;
1164	major_status = GSS_S_FAILURE;
1165	goto failure;
1166    }
1167
1168    if (conf_req_flag) {
1169	EVP_CIPHER_CTX *rc4_key;
1170#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1171	EVP_CIPHER_CTX rc4_keys;
1172	rc4_key = &rc4_keys;
1173	EVP_CIPHER_CTX_init(rc4_key);
1174#else
1175	rc4_key = EVP_CIPHER_CTX_new();
1176#endif
1177	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
1178	    *minor_status = EINVAL;
1179	    return GSS_S_FAILURE;
1180	}
1181
1182	/* Confounder */
1183	EVP_Cipher(rc4_key, p0 + 24, p0 + 24, 8);
1184
1185	/* Seal Data */
1186	for (i=0; i < iov_count; i++) {
1187	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1188	    case GSS_IOV_BUFFER_TYPE_DATA:
1189		break;
1190	    default:
1191		continue;
1192	    }
1193
1194	    EVP_Cipher(rc4_key, iov[i].buffer.value,
1195		       iov[i].buffer.value, iov[i].buffer.length);
1196	}
1197
1198	/* Padding */
1199	if (padding) {
1200	    EVP_Cipher(rc4_key, padding->buffer.value,
1201		       padding->buffer.value, padding->buffer.length);
1202	}
1203
1204#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1205	EVP_CIPHER_CTX_cleanup(rc4_key);
1206#else
1207	EVP_CIPHER_CTX_free(rc4_key);
1208#endif
1209    }
1210    memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1211
1212    kret = arcfour_mic_key(context, key,
1213			   p0 + 16, 8, /* SGN_CKSUM */
1214			   k6_data, sizeof(k6_data));
1215    if (kret) {
1216	*minor_status = kret;
1217	major_status = GSS_S_FAILURE;
1218        return major_status;
1219    }
1220
1221    {
1222	EVP_CIPHER_CTX *rc4_key;
1223#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1224	EVP_CIPHER_CTX rc4_keys;
1225	rc4_key = &rc4_keys;
1226	EVP_CIPHER_CTX_init(rc4_key);
1227#else
1228	rc4_key = EVP_CIPHER_CTX_new();
1229#endif
1230	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
1231	    *minor_status = EINVAL;
1232	    return GSS_S_FAILURE;
1233	}
1234	EVP_Cipher(rc4_key, p0 + 8, p0 + 8, 8); /* SND_SEQ */
1235#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1236	EVP_CIPHER_CTX_cleanup(rc4_key);
1237#else
1238	EVP_CIPHER_CTX_free(rc4_key);
1239#endif
1240
1241	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1242    }
1243
1244    if (conf_state)
1245	*conf_state = conf_req_flag;
1246
1247    *minor_status = 0;
1248    return GSS_S_COMPLETE;
1249
1250failure:
1251
1252    gss_release_iov_buffer(&junk, iov, iov_count);
1253
1254    return major_status;
1255}
1256
1257OM_uint32
1258_gssapi_unwrap_iov_arcfour(OM_uint32 *minor_status,
1259			   gsskrb5_ctx ctx,
1260			   krb5_context context,
1261			   int *pconf_state,
1262			   gss_qop_t *pqop_state,
1263			   gss_iov_buffer_desc *iov,
1264			   int iov_count,
1265			   krb5_keyblock *key)
1266{
1267    OM_uint32 major_status;
1268    gss_iov_buffer_desc *header, *padding, *trailer;
1269    krb5_keyblock Klocal;
1270    uint8_t Klocaldata[16];
1271    uint8_t k6_data[16], snd_seq[8], Confounder[8];
1272    uint8_t cksum_data[8];
1273    uint8_t *_p = NULL;
1274    const uint8_t *p, *p0;
1275    size_t verify_len = 0;
1276    uint32_t seq_number;
1277    size_t hlen = 0;
1278    int conf_state;
1279    int cmp;
1280    size_t i;
1281    krb5_error_code kret;
1282    OM_uint32 ret;
1283
1284    if (pconf_state != NULL) {
1285	*pconf_state = 0;
1286    }
1287    if (pqop_state != NULL) {
1288	*pqop_state = 0;
1289    }
1290
1291    header = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_HEADER);
1292    padding = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_PADDING);
1293    trailer = _gk_find_buffer(iov, iov_count, GSS_IOV_BUFFER_TYPE_TRAILER);
1294
1295    /* Check if the packet is correct */
1296    major_status = _gk_verify_buffers(minor_status,
1297				      ctx,
1298				      header,
1299				      padding,
1300				      trailer,
1301				      FALSE); /* behaves as stream cipher */
1302    if (major_status != GSS_S_COMPLETE) {
1303	return major_status;
1304    }
1305
1306    if (padding != NULL && padding->buffer.length != 1) {
1307	*minor_status = EINVAL;
1308	return GSS_S_FAILURE;
1309    }
1310
1311    verify_len = header->buffer.length;
1312
1313    if (!IS_DCE_STYLE(context)) {
1314	for (i = 0; i < iov_count; i++) {
1315	    /* length in header also includes data and padding */
1316	    if (GSS_IOV_BUFFER_TYPE(iov[i].type) == GSS_IOV_BUFFER_TYPE_DATA)
1317		verify_len += iov[i].buffer.length;
1318	}
1319
1320	if (padding)
1321	    verify_len += padding->buffer.length;
1322    }
1323
1324    _p = header->buffer.value;
1325
1326    ret = _gssapi_verify_mech_header(&_p,
1327				     verify_len,
1328				     GSS_KRB5_MECHANISM);
1329    if (ret) {
1330	return ret;
1331    }
1332    p0 = _p;
1333
1334    /* length of mech header */
1335    hlen = (p0 - (uint8_t *)header->buffer.value);
1336    hlen += GSS_ARCFOUR_WRAP_TOKEN_SIZE;
1337
1338    if (hlen > header->buffer.length) {
1339	return GSS_S_BAD_MECH;
1340    }
1341
1342    p = p0;
1343
1344    if (memcmp(p, "\x02\x01", 2) != 0)
1345	return GSS_S_BAD_SIG;
1346    p += 2;
1347    if (memcmp(p, "\x11\x00", 2) != 0) /* SGN_ALG = HMAC MD5 ARCFOUR */
1348	return GSS_S_BAD_SIG;
1349    p += 2;
1350
1351    if (memcmp (p, "\x10\x00", 2) == 0)
1352	conf_state = 1;
1353    else if (memcmp (p, "\xff\xff", 2) == 0)
1354	conf_state = 0;
1355    else
1356	return GSS_S_BAD_SIG;
1357
1358    p += 2;
1359    if (memcmp (p, "\xff\xff", 2) != 0)
1360	return GSS_S_BAD_MIC;
1361    p = NULL;
1362
1363    kret = arcfour_mic_key(context,
1364			   key,
1365			   p0 + 16, /* SGN_CKSUM */
1366			   8,       /* SGN_CKSUM_LEN */
1367			   k6_data,
1368			   sizeof(k6_data));
1369    if (kret) {
1370	*minor_status = kret;
1371	return GSS_S_FAILURE;
1372    }
1373
1374    {
1375	EVP_CIPHER_CTX *rc4_key;
1376#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1377	EVP_CIPHER_CTX rc4_keys;
1378	rc4_key = &rc4_keys;
1379	EVP_CIPHER_CTX_init(rc4_key);
1380#else
1381	rc4_key = EVP_CIPHER_CTX_new();
1382#endif
1383
1384	EVP_CIPHER_CTX_init(rc4_key);
1385	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
1386	    *minor_status = EINVAL;
1387	    return GSS_S_FAILURE;
1388	}
1389	EVP_Cipher(rc4_key, snd_seq, p0 + 8, 8); /* SND_SEQ */
1390#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1391	EVP_CIPHER_CTX_cleanup(rc4_key);
1392#else
1393	EVP_CIPHER_CTX_free(rc4_key);
1394#endif
1395
1396	memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1397    }
1398
1399    _gsskrb5_decode_be_om_uint32(snd_seq, &seq_number);
1400
1401    if (ctx->more_flags & LOCAL) {
1402	cmp = (ct_memcmp(&snd_seq[4], "\xff\xff\xff\xff", 4) != 0);
1403    } else {
1404	cmp = (ct_memcmp(&snd_seq[4], "\x00\x00\x00\x00", 4) != 0);
1405    }
1406    if (cmp != 0) {
1407	*minor_status = 0;
1408	return GSS_S_BAD_MIC;
1409    }
1410
1411    /* keyblock */
1412    Klocal.keytype = key->keytype;
1413    Klocal.keyvalue.data = Klocaldata;
1414    Klocal.keyvalue.length = sizeof(Klocaldata);
1415
1416    for (i = 0; i < 16; i++) {
1417	Klocaldata[i] = ((u_char *)key->keyvalue.data)[i] ^ 0xF0;
1418    }
1419
1420    kret = arcfour_mic_key(context,
1421			   &Klocal,
1422			   snd_seq,
1423			   4,
1424			   k6_data, sizeof(k6_data));
1425    memset_s(Klocaldata, sizeof(Klocaldata), 0, sizeof(Klocaldata));
1426    if (kret) {
1427	*minor_status = kret;
1428	return GSS_S_FAILURE;
1429    }
1430
1431    if (conf_state == 1) {
1432	EVP_CIPHER_CTX *rc4_key;
1433#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1434	EVP_CIPHER_CTX rc4_keys;
1435	rc4_key = &rc4_keys;
1436	EVP_CIPHER_CTX_init(rc4_key);
1437#else
1438	rc4_key = EVP_CIPHER_CTX_new();
1439#endif
1440
1441	if (!EVP_CipherInit_ex(rc4_key, EVP_rc4(), NULL, k6_data, NULL, 1)) {
1442	    *minor_status = EINVAL;
1443	    return GSS_S_FAILURE;
1444	}
1445
1446	/* Confounder */
1447	EVP_Cipher(rc4_key, Confounder, p0 + 24, 8);
1448
1449	/* Data */
1450	for (i = 0; i < iov_count; i++) {
1451	    switch (GSS_IOV_BUFFER_TYPE(iov[i].type)) {
1452	    case GSS_IOV_BUFFER_TYPE_DATA:
1453		break;
1454	    default:
1455		continue;
1456	    }
1457
1458	    EVP_Cipher(rc4_key, iov[i].buffer.value,
1459		       iov[i].buffer.value, iov[i].buffer.length);
1460	}
1461
1462	/* Padding */
1463	if (padding) {
1464	    EVP_Cipher(rc4_key, padding->buffer.value,
1465		       padding->buffer.value, padding->buffer.length);
1466	}
1467
1468#if OPENSSL_VERSION_NUMBER < 0x10100000UL
1469	EVP_CIPHER_CTX_cleanup(rc4_key);
1470#else
1471	EVP_CIPHER_CTX_free(rc4_key);
1472#endif
1473    } else {
1474	/* Confounder */
1475	memcpy(Confounder, p0 + 24, 8);
1476    }
1477    memset_s(k6_data, sizeof(k6_data), 0, sizeof(k6_data));
1478
1479    /* Prepare the buffer for signing */
1480    kret = arcfour_mic_cksum_iov(context,
1481				 key, KRB5_KU_USAGE_SEAL,
1482				 cksum_data, sizeof(cksum_data),
1483				 p0, 8,
1484				 Confounder, sizeof(Confounder),
1485				 iov, iov_count,
1486				 padding);
1487    if (kret) {
1488	*minor_status = kret;
1489	return GSS_S_FAILURE;
1490    }
1491
1492    cmp = memcmp(cksum_data, p0 + 16, 8); /* SGN_CKSUM */
1493    if (cmp != 0) {
1494	*minor_status = 0;
1495	return GSS_S_BAD_MIC;
1496    }
1497
1498    if (padding) {
1499	size_t plen;
1500
1501	ret = _gssapi_verify_pad(&padding->buffer, 1, &plen);
1502	if (ret) {
1503	    *minor_status = 0;
1504	    return ret;
1505	}
1506    }
1507
1508    HEIMDAL_MUTEX_lock(&ctx->ctx_id_mutex);
1509    ret = _gssapi_msg_order_check(ctx->order, seq_number);
1510    HEIMDAL_MUTEX_unlock(&ctx->ctx_id_mutex);
1511    if (ret != 0) {
1512	return ret;
1513    }
1514
1515    if (pconf_state) {
1516	*pconf_state = conf_state;
1517    }
1518
1519    *minor_status = 0;
1520    return GSS_S_COMPLETE;
1521}
1522