191671Sume/*	$NetBSD: bf_enc_686.S,v 1.1 2001/09/09 11:01:02 tls Exp $	*/
291671Sume/*	$FreeBSD$	*/
391671Sume
491671Sume/* Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com)
591671Sume * All rights reserved.
691671Sume *
791671Sume * This package is an SSL implementation written
891671Sume * by Eric Young (eay@cryptsoft.com).
991671Sume * The implementation was written so as to conform with Netscapes SSL.
1091671Sume *
1191671Sume * This library is free for commercial and non-commercial use as long as
1291671Sume * the following conditions are aheared to.  The following conditions
1391671Sume * apply to all code found in this distribution, be it the RC4, RSA,
1491671Sume * lhash, DES, etc., code; not just the SSL code.  The SSL documentation
1591671Sume * included with this distribution is covered by the same copyright terms
1691671Sume * except that the holder is Tim Hudson (tjh@cryptsoft.com).
1791671Sume *
1891671Sume * Copyright remains Eric Young's, and as such any Copyright notices in
1991671Sume * the code are not to be removed.
2091671Sume * If this package is used in a product, Eric Young should be given attribution
2191671Sume * as the author of the parts of the library used.
2291671Sume * This can be in the form of a textual message at program startup or
2391671Sume * in documentation (online or textual) provided with the package.
2491671Sume *
2591671Sume * Redistribution and use in source and binary forms, with or without
2691671Sume * modification, are permitted provided that the following conditions
2791671Sume * are met:
2891671Sume * 1. Redistributions of source code must retain the copyright
2991671Sume *    notice, this list of conditions and the following disclaimer.
3091671Sume * 2. Redistributions in binary form must reproduce the above copyright
3191671Sume *    notice, this list of conditions and the following disclaimer in the
3291671Sume *    documentation and/or other materials provided with the distribution.
3391671Sume * 3. All advertising materials mentioning features or use of this software
3491671Sume *    must display the following acknowledgement:
3591671Sume *    "This product includes cryptographic software written by
3691671Sume *     Eric Young (eay@cryptsoft.com)"
3791671Sume *    The word 'cryptographic' can be left out if the rouines from the library
3891671Sume *    being used are not cryptographic related :-).
3991671Sume * 4. If you include any Windows specific code (or a derivative thereof) from
4091671Sume *    the apps directory (application code) you must include an acknowledgement:
4191671Sume *    "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
4291671Sume *
4391671Sume * THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND
4491671Sume * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
4591671Sume * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
4691671Sume * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
4791671Sume * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
4891671Sume * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
4991671Sume * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
5091671Sume * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
5191671Sume * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
5291671Sume * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
5391671Sume * SUCH DAMAGE.
5491671Sume *
5591671Sume * The licence and distribution terms for any publically available version or
5691671Sume * derivative of this code cannot be changed.  i.e. this code cannot simply be
5791671Sume * copied and put under another distribution licence
5891671Sume * [including the GNU Public Licence.]
5991671Sume */
6091671Sume
6191671Sume/*
6291671Sume * Modified from the output of `perl bf-686.pl elf' by
6391671Sume * Jason R. Thorpe <thorpej@zembu.com> and Thor Lancelot Simon
6491671Sume * <tls@netbsd.org>
6591671Sume */
6691671Sume
6791671Sume#include <i386/include/asm.h>
6891671Sume#define	_C_LABEL	CNAME
6991671Sume
7091671SumeENTRY(BF_encrypt)
7191671Sume	pushl	%ebp
7291671Sume	pushl	%ebx
7391671Sume	pushl	%esi
7491671Sume	pushl	%edi
7591671Sume
7691671Sume
7791671Sume	/* Load the 2 words */
7891671Sume	movl	20(%esp),	%eax
7991671Sume	movl	(%eax),		%ecx
8091671Sume	movl	4(%eax),	%edx
8191671Sume
8291671Sume	/* P pointer, s and enc flag */
8391671Sume	movl	24(%esp),	%edi
8491671Sume	xorl	%eax,		%eax
8591671Sume	xorl	%ebx,		%ebx
8691671Sume	xorl	(%edi),		%ecx
8791671Sume
8891671Sume	/* Round 0 */
8991671Sume	rorl	$16,		%ecx
9091671Sume	movl	4(%edi),	%esi
9191671Sume	movb	%ch,		%al
9291671Sume	movb	%cl,		%bl
9391671Sume	rorl	$16,		%ecx
9491671Sume	xorl	%esi,		%edx
9591671Sume	movl	72(%edi,%eax,4),%esi
9691671Sume	movl	1096(%edi,%ebx,4),%ebp
9791671Sume	movb	%ch,		%al
9891671Sume	movb	%cl,		%bl
9991671Sume	addl	%ebp,		%esi
10091671Sume	movl	2120(%edi,%eax,4),%eax
10191671Sume	xorl	%eax,		%esi
10291671Sume	movl	3144(%edi,%ebx,4),%ebp
10391671Sume	addl	%ebp,		%esi
10491671Sume	xorl	%eax,		%eax
10591671Sume	xorl	%esi,		%edx
10691671Sume
10791671Sume	/* Round 1 */
10891671Sume	rorl	$16,		%edx
10991671Sume	movl	8(%edi),	%esi
11091671Sume	movb	%dh,		%al
11191671Sume	movb	%dl,		%bl
11291671Sume	rorl	$16,		%edx
11391671Sume	xorl	%esi,		%ecx
11491671Sume	movl	72(%edi,%eax,4),%esi
11591671Sume	movl	1096(%edi,%ebx,4),%ebp
11691671Sume	movb	%dh,		%al
11791671Sume	movb	%dl,		%bl
11891671Sume	addl	%ebp,		%esi
11991671Sume	movl	2120(%edi,%eax,4),%eax
12091671Sume	xorl	%eax,		%esi
12191671Sume	movl	3144(%edi,%ebx,4),%ebp
12291671Sume	addl	%ebp,		%esi
12391671Sume	xorl	%eax,		%eax
12491671Sume	xorl	%esi,		%ecx
12591671Sume
12691671Sume	/* Round 2 */
12791671Sume	rorl	$16,		%ecx
12891671Sume	movl	12(%edi),	%esi
12991671Sume	movb	%ch,		%al
13091671Sume	movb	%cl,		%bl
13191671Sume	rorl	$16,		%ecx
13291671Sume	xorl	%esi,		%edx
13391671Sume	movl	72(%edi,%eax,4),%esi
13491671Sume	movl	1096(%edi,%ebx,4),%ebp
13591671Sume	movb	%ch,		%al
13691671Sume	movb	%cl,		%bl
13791671Sume	addl	%ebp,		%esi
13891671Sume	movl	2120(%edi,%eax,4),%eax
13991671Sume	xorl	%eax,		%esi
14091671Sume	movl	3144(%edi,%ebx,4),%ebp
14191671Sume	addl	%ebp,		%esi
14291671Sume	xorl	%eax,		%eax
14391671Sume	xorl	%esi,		%edx
14491671Sume
14591671Sume	/* Round 3 */
14691671Sume	rorl	$16,		%edx
14791671Sume	movl	16(%edi),	%esi
14891671Sume	movb	%dh,		%al
14991671Sume	movb	%dl,		%bl
15091671Sume	rorl	$16,		%edx
15191671Sume	xorl	%esi,		%ecx
15291671Sume	movl	72(%edi,%eax,4),%esi
15391671Sume	movl	1096(%edi,%ebx,4),%ebp
15491671Sume	movb	%dh,		%al
15591671Sume	movb	%dl,		%bl
15691671Sume	addl	%ebp,		%esi
15791671Sume	movl	2120(%edi,%eax,4),%eax
15891671Sume	xorl	%eax,		%esi
15991671Sume	movl	3144(%edi,%ebx,4),%ebp
16091671Sume	addl	%ebp,		%esi
16191671Sume	xorl	%eax,		%eax
16291671Sume	xorl	%esi,		%ecx
16391671Sume
16491671Sume	/* Round 4 */
16591671Sume	rorl	$16,		%ecx
16691671Sume	movl	20(%edi),	%esi
16791671Sume	movb	%ch,		%al
16891671Sume	movb	%cl,		%bl
16991671Sume	rorl	$16,		%ecx
17091671Sume	xorl	%esi,		%edx
17191671Sume	movl	72(%edi,%eax,4),%esi
17291671Sume	movl	1096(%edi,%ebx,4),%ebp
17391671Sume	movb	%ch,		%al
17491671Sume	movb	%cl,		%bl
17591671Sume	addl	%ebp,		%esi
17691671Sume	movl	2120(%edi,%eax,4),%eax
17791671Sume	xorl	%eax,		%esi
17891671Sume	movl	3144(%edi,%ebx,4),%ebp
17991671Sume	addl	%ebp,		%esi
18091671Sume	xorl	%eax,		%eax
18191671Sume	xorl	%esi,		%edx
18291671Sume
18391671Sume	/* Round 5 */
18491671Sume	rorl	$16,		%edx
18591671Sume	movl	24(%edi),	%esi
18691671Sume	movb	%dh,		%al
18791671Sume	movb	%dl,		%bl
18891671Sume	rorl	$16,		%edx
18991671Sume	xorl	%esi,		%ecx
19091671Sume	movl	72(%edi,%eax,4),%esi
19191671Sume	movl	1096(%edi,%ebx,4),%ebp
19291671Sume	movb	%dh,		%al
19391671Sume	movb	%dl,		%bl
19491671Sume	addl	%ebp,		%esi
19591671Sume	movl	2120(%edi,%eax,4),%eax
19691671Sume	xorl	%eax,		%esi
19791671Sume	movl	3144(%edi,%ebx,4),%ebp
19891671Sume	addl	%ebp,		%esi
19991671Sume	xorl	%eax,		%eax
20091671Sume	xorl	%esi,		%ecx
20191671Sume
20291671Sume	/* Round 6 */
20391671Sume	rorl	$16,		%ecx
20491671Sume	movl	28(%edi),	%esi
20591671Sume	movb	%ch,		%al
20691671Sume	movb	%cl,		%bl
20791671Sume	rorl	$16,		%ecx
20891671Sume	xorl	%esi,		%edx
20991671Sume	movl	72(%edi,%eax,4),%esi
21091671Sume	movl	1096(%edi,%ebx,4),%ebp
21191671Sume	movb	%ch,		%al
21291671Sume	movb	%cl,		%bl
21391671Sume	addl	%ebp,		%esi
21491671Sume	movl	2120(%edi,%eax,4),%eax
21591671Sume	xorl	%eax,		%esi
21691671Sume	movl	3144(%edi,%ebx,4),%ebp
21791671Sume	addl	%ebp,		%esi
21891671Sume	xorl	%eax,		%eax
21991671Sume	xorl	%esi,		%edx
22091671Sume
22191671Sume	/* Round 7 */
22291671Sume	rorl	$16,		%edx
22391671Sume	movl	32(%edi),	%esi
22491671Sume	movb	%dh,		%al
22591671Sume	movb	%dl,		%bl
22691671Sume	rorl	$16,		%edx
22791671Sume	xorl	%esi,		%ecx
22891671Sume	movl	72(%edi,%eax,4),%esi
22991671Sume	movl	1096(%edi,%ebx,4),%ebp
23091671Sume	movb	%dh,		%al
23191671Sume	movb	%dl,		%bl
23291671Sume	addl	%ebp,		%esi
23391671Sume	movl	2120(%edi,%eax,4),%eax
23491671Sume	xorl	%eax,		%esi
23591671Sume	movl	3144(%edi,%ebx,4),%ebp
23691671Sume	addl	%ebp,		%esi
23791671Sume	xorl	%eax,		%eax
23891671Sume	xorl	%esi,		%ecx
23991671Sume
24091671Sume	/* Round 8 */
24191671Sume	rorl	$16,		%ecx
24291671Sume	movl	36(%edi),	%esi
24391671Sume	movb	%ch,		%al
24491671Sume	movb	%cl,		%bl
24591671Sume	rorl	$16,		%ecx
24691671Sume	xorl	%esi,		%edx
24791671Sume	movl	72(%edi,%eax,4),%esi
24891671Sume	movl	1096(%edi,%ebx,4),%ebp
24991671Sume	movb	%ch,		%al
25091671Sume	movb	%cl,		%bl
25191671Sume	addl	%ebp,		%esi
25291671Sume	movl	2120(%edi,%eax,4),%eax
25391671Sume	xorl	%eax,		%esi
25491671Sume	movl	3144(%edi,%ebx,4),%ebp
25591671Sume	addl	%ebp,		%esi
25691671Sume	xorl	%eax,		%eax
25791671Sume	xorl	%esi,		%edx
25891671Sume
25991671Sume	/* Round 9 */
26091671Sume	rorl	$16,		%edx
26191671Sume	movl	40(%edi),	%esi
26291671Sume	movb	%dh,		%al
26391671Sume	movb	%dl,		%bl
26491671Sume	rorl	$16,		%edx
26591671Sume	xorl	%esi,		%ecx
26691671Sume	movl	72(%edi,%eax,4),%esi
26791671Sume	movl	1096(%edi,%ebx,4),%ebp
26891671Sume	movb	%dh,		%al
26991671Sume	movb	%dl,		%bl
27091671Sume	addl	%ebp,		%esi
27191671Sume	movl	2120(%edi,%eax,4),%eax
27291671Sume	xorl	%eax,		%esi
27391671Sume	movl	3144(%edi,%ebx,4),%ebp
27491671Sume	addl	%ebp,		%esi
27591671Sume	xorl	%eax,		%eax
27691671Sume	xorl	%esi,		%ecx
27791671Sume
27891671Sume	/* Round 10 */
27991671Sume	rorl	$16,		%ecx
28091671Sume	movl	44(%edi),	%esi
28191671Sume	movb	%ch,		%al
28291671Sume	movb	%cl,		%bl
28391671Sume	rorl	$16,		%ecx
28491671Sume	xorl	%esi,		%edx
28591671Sume	movl	72(%edi,%eax,4),%esi
28691671Sume	movl	1096(%edi,%ebx,4),%ebp
28791671Sume	movb	%ch,		%al
28891671Sume	movb	%cl,		%bl
28991671Sume	addl	%ebp,		%esi
29091671Sume	movl	2120(%edi,%eax,4),%eax
29191671Sume	xorl	%eax,		%esi
29291671Sume	movl	3144(%edi,%ebx,4),%ebp
29391671Sume	addl	%ebp,		%esi
29491671Sume	xorl	%eax,		%eax
29591671Sume	xorl	%esi,		%edx
29691671Sume
29791671Sume	/* Round 11 */
29891671Sume	rorl	$16,		%edx
29991671Sume	movl	48(%edi),	%esi
30091671Sume	movb	%dh,		%al
30191671Sume	movb	%dl,		%bl
30291671Sume	rorl	$16,		%edx
30391671Sume	xorl	%esi,		%ecx
30491671Sume	movl	72(%edi,%eax,4),%esi
30591671Sume	movl	1096(%edi,%ebx,4),%ebp
30691671Sume	movb	%dh,		%al
30791671Sume	movb	%dl,		%bl
30891671Sume	addl	%ebp,		%esi
30991671Sume	movl	2120(%edi,%eax,4),%eax
31091671Sume	xorl	%eax,		%esi
31191671Sume	movl	3144(%edi,%ebx,4),%ebp
31291671Sume	addl	%ebp,		%esi
31391671Sume	xorl	%eax,		%eax
31491671Sume	xorl	%esi,		%ecx
31591671Sume
31691671Sume	/* Round 12 */
31791671Sume	rorl	$16,		%ecx
31891671Sume	movl	52(%edi),	%esi
31991671Sume	movb	%ch,		%al
32091671Sume	movb	%cl,		%bl
32191671Sume	rorl	$16,		%ecx
32291671Sume	xorl	%esi,		%edx
32391671Sume	movl	72(%edi,%eax,4),%esi
32491671Sume	movl	1096(%edi,%ebx,4),%ebp
32591671Sume	movb	%ch,		%al
32691671Sume	movb	%cl,		%bl
32791671Sume	addl	%ebp,		%esi
32891671Sume	movl	2120(%edi,%eax,4),%eax
32991671Sume	xorl	%eax,		%esi
33091671Sume	movl	3144(%edi,%ebx,4),%ebp
33191671Sume	addl	%ebp,		%esi
33291671Sume	xorl	%eax,		%eax
33391671Sume	xorl	%esi,		%edx
33491671Sume
33591671Sume	/* Round 13 */
33691671Sume	rorl	$16,		%edx
33791671Sume	movl	56(%edi),	%esi
33891671Sume	movb	%dh,		%al
33991671Sume	movb	%dl,		%bl
34091671Sume	rorl	$16,		%edx
34191671Sume	xorl	%esi,		%ecx
34291671Sume	movl	72(%edi,%eax,4),%esi
34391671Sume	movl	1096(%edi,%ebx,4),%ebp
34491671Sume	movb	%dh,		%al
34591671Sume	movb	%dl,		%bl
34691671Sume	addl	%ebp,		%esi
34791671Sume	movl	2120(%edi,%eax,4),%eax
34891671Sume	xorl	%eax,		%esi
34991671Sume	movl	3144(%edi,%ebx,4),%ebp
35091671Sume	addl	%ebp,		%esi
35191671Sume	xorl	%eax,		%eax
35291671Sume	xorl	%esi,		%ecx
35391671Sume
35491671Sume	/* Round 14 */
35591671Sume	rorl	$16,		%ecx
35691671Sume	movl	60(%edi),	%esi
35791671Sume	movb	%ch,		%al
35891671Sume	movb	%cl,		%bl
35991671Sume	rorl	$16,		%ecx
36091671Sume	xorl	%esi,		%edx
36191671Sume	movl	72(%edi,%eax,4),%esi
36291671Sume	movl	1096(%edi,%ebx,4),%ebp
36391671Sume	movb	%ch,		%al
36491671Sume	movb	%cl,		%bl
36591671Sume	addl	%ebp,		%esi
36691671Sume	movl	2120(%edi,%eax,4),%eax
36791671Sume	xorl	%eax,		%esi
36891671Sume	movl	3144(%edi,%ebx,4),%ebp
36991671Sume	addl	%ebp,		%esi
37091671Sume	xorl	%eax,		%eax
37191671Sume	xorl	%esi,		%edx
37291671Sume
37391671Sume	/* Round 15 */
37491671Sume	rorl	$16,		%edx
37591671Sume	movl	64(%edi),	%esi
37691671Sume	movb	%dh,		%al
37791671Sume	movb	%dl,		%bl
37891671Sume	rorl	$16,		%edx
37991671Sume	xorl	%esi,		%ecx
38091671Sume	movl	72(%edi,%eax,4),%esi
38191671Sume	movl	1096(%edi,%ebx,4),%ebp
38291671Sume	movb	%dh,		%al
38391671Sume	movb	%dl,		%bl
38491671Sume	addl	%ebp,		%esi
38591671Sume	movl	2120(%edi,%eax,4),%eax
38691671Sume	xorl	%eax,		%esi
38791671Sume	movl	3144(%edi,%ebx,4),%ebp
38891671Sume	addl	%ebp,		%esi
38991671Sume	xorl	%eax,		%eax
39091671Sume	xorl	%esi,		%ecx
39191671Sume	xorl	68(%edi),	%edx
39291671Sume	movl	20(%esp),	%eax
39391671Sume	movl	%edx,		(%eax)
39491671Sume	movl	%ecx,		4(%eax)
39591671Sume	popl	%edi
39691671Sume	popl	%esi
39791671Sume	popl	%ebx
39891671Sume	popl	%ebp
39991671Sume	ret
40091671Sume.L_BF_encrypt_end:
40191671Sume	.size   _C_LABEL(BF_encrypt),.L_BF_encrypt_end-_C_LABEL(BF_encrypt)
40291671Sume
40391671SumeENTRY(BF_decrypt)
40491671Sume	pushl	%ebp
40591671Sume	pushl	%ebx
40691671Sume	pushl	%esi
40791671Sume	pushl	%edi
40891671Sume
40991671Sume
41091671Sume	/* Load the 2 words */
41191671Sume	movl	20(%esp),	%eax
41291671Sume	movl	(%eax),		%ecx
41391671Sume	movl	4(%eax),	%edx
41491671Sume
41591671Sume	/* P pointer, s and enc flag */
41691671Sume	movl	24(%esp),	%edi
41791671Sume	xorl	%eax,		%eax
41891671Sume	xorl	%ebx,		%ebx
41991671Sume	xorl	68(%edi),	%ecx
42091671Sume
42191671Sume	/* Round 16 */
42291671Sume	rorl	$16,		%ecx
42391671Sume	movl	64(%edi),	%esi
42491671Sume	movb	%ch,		%al
42591671Sume	movb	%cl,		%bl
42691671Sume	rorl	$16,		%ecx
42791671Sume	xorl	%esi,		%edx
42891671Sume	movl	72(%edi,%eax,4),%esi
42991671Sume	movl	1096(%edi,%ebx,4),%ebp
43091671Sume	movb	%ch,		%al
43191671Sume	movb	%cl,		%bl
43291671Sume	addl	%ebp,		%esi
43391671Sume	movl	2120(%edi,%eax,4),%eax
43491671Sume	xorl	%eax,		%esi
43591671Sume	movl	3144(%edi,%ebx,4),%ebp
43691671Sume	addl	%ebp,		%esi
43791671Sume	xorl	%eax,		%eax
43891671Sume	xorl	%esi,		%edx
43991671Sume
44091671Sume	/* Round 15 */
44191671Sume	rorl	$16,		%edx
44291671Sume	movl	60(%edi),	%esi
44391671Sume	movb	%dh,		%al
44491671Sume	movb	%dl,		%bl
44591671Sume	rorl	$16,		%edx
44691671Sume	xorl	%esi,		%ecx
44791671Sume	movl	72(%edi,%eax,4),%esi
44891671Sume	movl	1096(%edi,%ebx,4),%ebp
44991671Sume	movb	%dh,		%al
45091671Sume	movb	%dl,		%bl
45191671Sume	addl	%ebp,		%esi
45291671Sume	movl	2120(%edi,%eax,4),%eax
45391671Sume	xorl	%eax,		%esi
45491671Sume	movl	3144(%edi,%ebx,4),%ebp
45591671Sume	addl	%ebp,		%esi
45691671Sume	xorl	%eax,		%eax
45791671Sume	xorl	%esi,		%ecx
45891671Sume
45991671Sume	/* Round 14 */
46091671Sume	rorl	$16,		%ecx
46191671Sume	movl	56(%edi),	%esi
46291671Sume	movb	%ch,		%al
46391671Sume	movb	%cl,		%bl
46491671Sume	rorl	$16,		%ecx
46591671Sume	xorl	%esi,		%edx
46691671Sume	movl	72(%edi,%eax,4),%esi
46791671Sume	movl	1096(%edi,%ebx,4),%ebp
46891671Sume	movb	%ch,		%al
46991671Sume	movb	%cl,		%bl
47091671Sume	addl	%ebp,		%esi
47191671Sume	movl	2120(%edi,%eax,4),%eax
47291671Sume	xorl	%eax,		%esi
47391671Sume	movl	3144(%edi,%ebx,4),%ebp
47491671Sume	addl	%ebp,		%esi
47591671Sume	xorl	%eax,		%eax
47691671Sume	xorl	%esi,		%edx
47791671Sume
47891671Sume	/* Round 13 */
47991671Sume	rorl	$16,		%edx
48091671Sume	movl	52(%edi),	%esi
48191671Sume	movb	%dh,		%al
48291671Sume	movb	%dl,		%bl
48391671Sume	rorl	$16,		%edx
48491671Sume	xorl	%esi,		%ecx
48591671Sume	movl	72(%edi,%eax,4),%esi
48691671Sume	movl	1096(%edi,%ebx,4),%ebp
48791671Sume	movb	%dh,		%al
48891671Sume	movb	%dl,		%bl
48991671Sume	addl	%ebp,		%esi
49091671Sume	movl	2120(%edi,%eax,4),%eax
49191671Sume	xorl	%eax,		%esi
49291671Sume	movl	3144(%edi,%ebx,4),%ebp
49391671Sume	addl	%ebp,		%esi
49491671Sume	xorl	%eax,		%eax
49591671Sume	xorl	%esi,		%ecx
49691671Sume
49791671Sume	/* Round 12 */
49891671Sume	rorl	$16,		%ecx
49991671Sume	movl	48(%edi),	%esi
50091671Sume	movb	%ch,		%al
50191671Sume	movb	%cl,		%bl
50291671Sume	rorl	$16,		%ecx
50391671Sume	xorl	%esi,		%edx
50491671Sume	movl	72(%edi,%eax,4),%esi
50591671Sume	movl	1096(%edi,%ebx,4),%ebp
50691671Sume	movb	%ch,		%al
50791671Sume	movb	%cl,		%bl
50891671Sume	addl	%ebp,		%esi
50991671Sume	movl	2120(%edi,%eax,4),%eax
51091671Sume	xorl	%eax,		%esi
51191671Sume	movl	3144(%edi,%ebx,4),%ebp
51291671Sume	addl	%ebp,		%esi
51391671Sume	xorl	%eax,		%eax
51491671Sume	xorl	%esi,		%edx
51591671Sume
51691671Sume	/* Round 11 */
51791671Sume	rorl	$16,		%edx
51891671Sume	movl	44(%edi),	%esi
51991671Sume	movb	%dh,		%al
52091671Sume	movb	%dl,		%bl
52191671Sume	rorl	$16,		%edx
52291671Sume	xorl	%esi,		%ecx
52391671Sume	movl	72(%edi,%eax,4),%esi
52491671Sume	movl	1096(%edi,%ebx,4),%ebp
52591671Sume	movb	%dh,		%al
52691671Sume	movb	%dl,		%bl
52791671Sume	addl	%ebp,		%esi
52891671Sume	movl	2120(%edi,%eax,4),%eax
52991671Sume	xorl	%eax,		%esi
53091671Sume	movl	3144(%edi,%ebx,4),%ebp
53191671Sume	addl	%ebp,		%esi
53291671Sume	xorl	%eax,		%eax
53391671Sume	xorl	%esi,		%ecx
53491671Sume
53591671Sume	/* Round 10 */
53691671Sume	rorl	$16,		%ecx
53791671Sume	movl	40(%edi),	%esi
53891671Sume	movb	%ch,		%al
53991671Sume	movb	%cl,		%bl
54091671Sume	rorl	$16,		%ecx
54191671Sume	xorl	%esi,		%edx
54291671Sume	movl	72(%edi,%eax,4),%esi
54391671Sume	movl	1096(%edi,%ebx,4),%ebp
54491671Sume	movb	%ch,		%al
54591671Sume	movb	%cl,		%bl
54691671Sume	addl	%ebp,		%esi
54791671Sume	movl	2120(%edi,%eax,4),%eax
54891671Sume	xorl	%eax,		%esi
54991671Sume	movl	3144(%edi,%ebx,4),%ebp
55091671Sume	addl	%ebp,		%esi
55191671Sume	xorl	%eax,		%eax
55291671Sume	xorl	%esi,		%edx
55391671Sume
55491671Sume	/* Round 9 */
55591671Sume	rorl	$16,		%edx
55691671Sume	movl	36(%edi),	%esi
55791671Sume	movb	%dh,		%al
55891671Sume	movb	%dl,		%bl
55991671Sume	rorl	$16,		%edx
56091671Sume	xorl	%esi,		%ecx
56191671Sume	movl	72(%edi,%eax,4),%esi
56291671Sume	movl	1096(%edi,%ebx,4),%ebp
56391671Sume	movb	%dh,		%al
56491671Sume	movb	%dl,		%bl
56591671Sume	addl	%ebp,		%esi
56691671Sume	movl	2120(%edi,%eax,4),%eax
56791671Sume	xorl	%eax,		%esi
56891671Sume	movl	3144(%edi,%ebx,4),%ebp
56991671Sume	addl	%ebp,		%esi
57091671Sume	xorl	%eax,		%eax
57191671Sume	xorl	%esi,		%ecx
57291671Sume
57391671Sume	/* Round 8 */
57491671Sume	rorl	$16,		%ecx
57591671Sume	movl	32(%edi),	%esi
57691671Sume	movb	%ch,		%al
57791671Sume	movb	%cl,		%bl
57891671Sume	rorl	$16,		%ecx
57991671Sume	xorl	%esi,		%edx
58091671Sume	movl	72(%edi,%eax,4),%esi
58191671Sume	movl	1096(%edi,%ebx,4),%ebp
58291671Sume	movb	%ch,		%al
58391671Sume	movb	%cl,		%bl
58491671Sume	addl	%ebp,		%esi
58591671Sume	movl	2120(%edi,%eax,4),%eax
58691671Sume	xorl	%eax,		%esi
58791671Sume	movl	3144(%edi,%ebx,4),%ebp
58891671Sume	addl	%ebp,		%esi
58991671Sume	xorl	%eax,		%eax
59091671Sume	xorl	%esi,		%edx
59191671Sume
59291671Sume	/* Round 7 */
59391671Sume	rorl	$16,		%edx
59491671Sume	movl	28(%edi),	%esi
59591671Sume	movb	%dh,		%al
59691671Sume	movb	%dl,		%bl
59791671Sume	rorl	$16,		%edx
59891671Sume	xorl	%esi,		%ecx
59991671Sume	movl	72(%edi,%eax,4),%esi
60091671Sume	movl	1096(%edi,%ebx,4),%ebp
60191671Sume	movb	%dh,		%al
60291671Sume	movb	%dl,		%bl
60391671Sume	addl	%ebp,		%esi
60491671Sume	movl	2120(%edi,%eax,4),%eax
60591671Sume	xorl	%eax,		%esi
60691671Sume	movl	3144(%edi,%ebx,4),%ebp
60791671Sume	addl	%ebp,		%esi
60891671Sume	xorl	%eax,		%eax
60991671Sume	xorl	%esi,		%ecx
61091671Sume
61191671Sume	/* Round 6 */
61291671Sume	rorl	$16,		%ecx
61391671Sume	movl	24(%edi),	%esi
61491671Sume	movb	%ch,		%al
61591671Sume	movb	%cl,		%bl
61691671Sume	rorl	$16,		%ecx
61791671Sume	xorl	%esi,		%edx
61891671Sume	movl	72(%edi,%eax,4),%esi
61991671Sume	movl	1096(%edi,%ebx,4),%ebp
62091671Sume	movb	%ch,		%al
62191671Sume	movb	%cl,		%bl
62291671Sume	addl	%ebp,		%esi
62391671Sume	movl	2120(%edi,%eax,4),%eax
62491671Sume	xorl	%eax,		%esi
62591671Sume	movl	3144(%edi,%ebx,4),%ebp
62691671Sume	addl	%ebp,		%esi
62791671Sume	xorl	%eax,		%eax
62891671Sume	xorl	%esi,		%edx
62991671Sume
63091671Sume	/* Round 5 */
63191671Sume	rorl	$16,		%edx
63291671Sume	movl	20(%edi),	%esi
63391671Sume	movb	%dh,		%al
63491671Sume	movb	%dl,		%bl
63591671Sume	rorl	$16,		%edx
63691671Sume	xorl	%esi,		%ecx
63791671Sume	movl	72(%edi,%eax,4),%esi
63891671Sume	movl	1096(%edi,%ebx,4),%ebp
63991671Sume	movb	%dh,		%al
64091671Sume	movb	%dl,		%bl
64191671Sume	addl	%ebp,		%esi
64291671Sume	movl	2120(%edi,%eax,4),%eax
64391671Sume	xorl	%eax,		%esi
64491671Sume	movl	3144(%edi,%ebx,4),%ebp
64591671Sume	addl	%ebp,		%esi
64691671Sume	xorl	%eax,		%eax
64791671Sume	xorl	%esi,		%ecx
64891671Sume
64991671Sume	/* Round 4 */
65091671Sume	rorl	$16,		%ecx
65191671Sume	movl	16(%edi),	%esi
65291671Sume	movb	%ch,		%al
65391671Sume	movb	%cl,		%bl
65491671Sume	rorl	$16,		%ecx
65591671Sume	xorl	%esi,		%edx
65691671Sume	movl	72(%edi,%eax,4),%esi
65791671Sume	movl	1096(%edi,%ebx,4),%ebp
65891671Sume	movb	%ch,		%al
65991671Sume	movb	%cl,		%bl
66091671Sume	addl	%ebp,		%esi
66191671Sume	movl	2120(%edi,%eax,4),%eax
66291671Sume	xorl	%eax,		%esi
66391671Sume	movl	3144(%edi,%ebx,4),%ebp
66491671Sume	addl	%ebp,		%esi
66591671Sume	xorl	%eax,		%eax
66691671Sume	xorl	%esi,		%edx
66791671Sume
66891671Sume	/* Round 3 */
66991671Sume	rorl	$16,		%edx
67091671Sume	movl	12(%edi),	%esi
67191671Sume	movb	%dh,		%al
67291671Sume	movb	%dl,		%bl
67391671Sume	rorl	$16,		%edx
67491671Sume	xorl	%esi,		%ecx
67591671Sume	movl	72(%edi,%eax,4),%esi
67691671Sume	movl	1096(%edi,%ebx,4),%ebp
67791671Sume	movb	%dh,		%al
67891671Sume	movb	%dl,		%bl
67991671Sume	addl	%ebp,		%esi
68091671Sume	movl	2120(%edi,%eax,4),%eax
68191671Sume	xorl	%eax,		%esi
68291671Sume	movl	3144(%edi,%ebx,4),%ebp
68391671Sume	addl	%ebp,		%esi
68491671Sume	xorl	%eax,		%eax
68591671Sume	xorl	%esi,		%ecx
68691671Sume
68791671Sume	/* Round 2 */
68891671Sume	rorl	$16,		%ecx
68991671Sume	movl	8(%edi),	%esi
69091671Sume	movb	%ch,		%al
69191671Sume	movb	%cl,		%bl
69291671Sume	rorl	$16,		%ecx
69391671Sume	xorl	%esi,		%edx
69491671Sume	movl	72(%edi,%eax,4),%esi
69591671Sume	movl	1096(%edi,%ebx,4),%ebp
69691671Sume	movb	%ch,		%al
69791671Sume	movb	%cl,		%bl
69891671Sume	addl	%ebp,		%esi
69991671Sume	movl	2120(%edi,%eax,4),%eax
70091671Sume	xorl	%eax,		%esi
70191671Sume	movl	3144(%edi,%ebx,4),%ebp
70291671Sume	addl	%ebp,		%esi
70391671Sume	xorl	%eax,		%eax
70491671Sume	xorl	%esi,		%edx
70591671Sume
70691671Sume	/* Round 1 */
70791671Sume	rorl	$16,		%edx
70891671Sume	movl	4(%edi),	%esi
70991671Sume	movb	%dh,		%al
71091671Sume	movb	%dl,		%bl
71191671Sume	rorl	$16,		%edx
71291671Sume	xorl	%esi,		%ecx
71391671Sume	movl	72(%edi,%eax,4),%esi
71491671Sume	movl	1096(%edi,%ebx,4),%ebp
71591671Sume	movb	%dh,		%al
71691671Sume	movb	%dl,		%bl
71791671Sume	addl	%ebp,		%esi
71891671Sume	movl	2120(%edi,%eax,4),%eax
71991671Sume	xorl	%eax,		%esi
72091671Sume	movl	3144(%edi,%ebx,4),%ebp
72191671Sume	addl	%ebp,		%esi
72291671Sume	xorl	%eax,		%eax
72391671Sume	xorl	%esi,		%ecx
72491671Sume	xorl	(%edi),		%edx
72591671Sume	movl	20(%esp),	%eax
72691671Sume	movl	%edx,		(%eax)
72791671Sume	movl	%ecx,		4(%eax)
72891671Sume	popl	%edi
72991671Sume	popl	%esi
73091671Sume	popl	%ebx
73191671Sume	popl	%ebp
73291671Sume	ret
73391671Sume	.L_BF_decrypt_end:
73491671Sume	.size   _C_LABEL(BF_decrypt),.L_BF_decrypt_end-_C_LABEL(BF_decrypt)
735