1#include <machine/asm.h>
2.text
3.globl	gcm_gmult_4bit_x86
4.type	gcm_gmult_4bit_x86,@function
5.align	16
6gcm_gmult_4bit_x86:
7.L_gcm_gmult_4bit_x86_begin:
8	pushl	%ebp
9	pushl	%ebx
10	pushl	%esi
11	pushl	%edi
12	subl	$84,%esp
13	movl	104(%esp),%edi
14	movl	108(%esp),%esi
15	movl	(%edi),%ebp
16	movl	4(%edi),%edx
17	movl	8(%edi),%ecx
18	movl	12(%edi),%ebx
19	movl	$0,16(%esp)
20	movl	$471859200,20(%esp)
21	movl	$943718400,24(%esp)
22	movl	$610271232,28(%esp)
23	movl	$1887436800,32(%esp)
24	movl	$1822425088,36(%esp)
25	movl	$1220542464,40(%esp)
26	movl	$1423966208,44(%esp)
27	movl	$3774873600,48(%esp)
28	movl	$4246732800,52(%esp)
29	movl	$3644850176,56(%esp)
30	movl	$3311403008,60(%esp)
31	movl	$2441084928,64(%esp)
32	movl	$2376073216,68(%esp)
33	movl	$2847932416,72(%esp)
34	movl	$3051356160,76(%esp)
35	movl	%ebp,(%esp)
36	movl	%edx,4(%esp)
37	movl	%ecx,8(%esp)
38	movl	%ebx,12(%esp)
39	shrl	$20,%ebx
40	andl	$240,%ebx
41	movl	4(%esi,%ebx,1),%ebp
42	movl	(%esi,%ebx,1),%edx
43	movl	12(%esi,%ebx,1),%ecx
44	movl	8(%esi,%ebx,1),%ebx
45	xorl	%eax,%eax
46	movl	$15,%edi
47	jmp	.L000x86_loop
48.align	16
49.L000x86_loop:
50	movb	%bl,%al
51	shrdl	$4,%ecx,%ebx
52	andb	$15,%al
53	shrdl	$4,%edx,%ecx
54	shrdl	$4,%ebp,%edx
55	shrl	$4,%ebp
56	xorl	16(%esp,%eax,4),%ebp
57	movb	(%esp,%edi,1),%al
58	andb	$240,%al
59	xorl	8(%esi,%eax,1),%ebx
60	xorl	12(%esi,%eax,1),%ecx
61	xorl	(%esi,%eax,1),%edx
62	xorl	4(%esi,%eax,1),%ebp
63	decl	%edi
64	js	.L001x86_break
65	movb	%bl,%al
66	shrdl	$4,%ecx,%ebx
67	andb	$15,%al
68	shrdl	$4,%edx,%ecx
69	shrdl	$4,%ebp,%edx
70	shrl	$4,%ebp
71	xorl	16(%esp,%eax,4),%ebp
72	movb	(%esp,%edi,1),%al
73	shlb	$4,%al
74	xorl	8(%esi,%eax,1),%ebx
75	xorl	12(%esi,%eax,1),%ecx
76	xorl	(%esi,%eax,1),%edx
77	xorl	4(%esi,%eax,1),%ebp
78	jmp	.L000x86_loop
79.align	16
80.L001x86_break:
81	bswap	%ebx
82	bswap	%ecx
83	bswap	%edx
84	bswap	%ebp
85	movl	104(%esp),%edi
86	movl	%ebx,12(%edi)
87	movl	%ecx,8(%edi)
88	movl	%edx,4(%edi)
89	movl	%ebp,(%edi)
90	addl	$84,%esp
91	popl	%edi
92	popl	%esi
93	popl	%ebx
94	popl	%ebp
95	ret
96.size	gcm_gmult_4bit_x86,.-.L_gcm_gmult_4bit_x86_begin
97.globl	gcm_ghash_4bit_x86
98.type	gcm_ghash_4bit_x86,@function
99.align	16
100gcm_ghash_4bit_x86:
101.L_gcm_ghash_4bit_x86_begin:
102	pushl	%ebp
103	pushl	%ebx
104	pushl	%esi
105	pushl	%edi
106	subl	$84,%esp
107	movl	104(%esp),%ebx
108	movl	108(%esp),%esi
109	movl	112(%esp),%edi
110	movl	116(%esp),%ecx
111	addl	%edi,%ecx
112	movl	%ecx,116(%esp)
113	movl	(%ebx),%ebp
114	movl	4(%ebx),%edx
115	movl	8(%ebx),%ecx
116	movl	12(%ebx),%ebx
117	movl	$0,16(%esp)
118	movl	$471859200,20(%esp)
119	movl	$943718400,24(%esp)
120	movl	$610271232,28(%esp)
121	movl	$1887436800,32(%esp)
122	movl	$1822425088,36(%esp)
123	movl	$1220542464,40(%esp)
124	movl	$1423966208,44(%esp)
125	movl	$3774873600,48(%esp)
126	movl	$4246732800,52(%esp)
127	movl	$3644850176,56(%esp)
128	movl	$3311403008,60(%esp)
129	movl	$2441084928,64(%esp)
130	movl	$2376073216,68(%esp)
131	movl	$2847932416,72(%esp)
132	movl	$3051356160,76(%esp)
133.align	16
134.L002x86_outer_loop:
135	xorl	12(%edi),%ebx
136	xorl	8(%edi),%ecx
137	xorl	4(%edi),%edx
138	xorl	(%edi),%ebp
139	movl	%ebx,12(%esp)
140	movl	%ecx,8(%esp)
141	movl	%edx,4(%esp)
142	movl	%ebp,(%esp)
143	shrl	$20,%ebx
144	andl	$240,%ebx
145	movl	4(%esi,%ebx,1),%ebp
146	movl	(%esi,%ebx,1),%edx
147	movl	12(%esi,%ebx,1),%ecx
148	movl	8(%esi,%ebx,1),%ebx
149	xorl	%eax,%eax
150	movl	$15,%edi
151	jmp	.L003x86_loop
152.align	16
153.L003x86_loop:
154	movb	%bl,%al
155	shrdl	$4,%ecx,%ebx
156	andb	$15,%al
157	shrdl	$4,%edx,%ecx
158	shrdl	$4,%ebp,%edx
159	shrl	$4,%ebp
160	xorl	16(%esp,%eax,4),%ebp
161	movb	(%esp,%edi,1),%al
162	andb	$240,%al
163	xorl	8(%esi,%eax,1),%ebx
164	xorl	12(%esi,%eax,1),%ecx
165	xorl	(%esi,%eax,1),%edx
166	xorl	4(%esi,%eax,1),%ebp
167	decl	%edi
168	js	.L004x86_break
169	movb	%bl,%al
170	shrdl	$4,%ecx,%ebx
171	andb	$15,%al
172	shrdl	$4,%edx,%ecx
173	shrdl	$4,%ebp,%edx
174	shrl	$4,%ebp
175	xorl	16(%esp,%eax,4),%ebp
176	movb	(%esp,%edi,1),%al
177	shlb	$4,%al
178	xorl	8(%esi,%eax,1),%ebx
179	xorl	12(%esi,%eax,1),%ecx
180	xorl	(%esi,%eax,1),%edx
181	xorl	4(%esi,%eax,1),%ebp
182	jmp	.L003x86_loop
183.align	16
184.L004x86_break:
185	bswap	%ebx
186	bswap	%ecx
187	bswap	%edx
188	bswap	%ebp
189	movl	112(%esp),%edi
190	leal	16(%edi),%edi
191	cmpl	116(%esp),%edi
192	movl	%edi,112(%esp)
193	jb	.L002x86_outer_loop
194	movl	104(%esp),%edi
195	movl	%ebx,12(%edi)
196	movl	%ecx,8(%edi)
197	movl	%edx,4(%edi)
198	movl	%ebp,(%edi)
199	addl	$84,%esp
200	popl	%edi
201	popl	%esi
202	popl	%ebx
203	popl	%ebp
204	ret
205.size	gcm_ghash_4bit_x86,.-.L_gcm_ghash_4bit_x86_begin
206.globl	gcm_gmult_4bit_mmx
207.type	gcm_gmult_4bit_mmx,@function
208.align	16
209gcm_gmult_4bit_mmx:
210.L_gcm_gmult_4bit_mmx_begin:
211	pushl	%ebp
212	pushl	%ebx
213	pushl	%esi
214	pushl	%edi
215	movl	20(%esp),%edi
216	movl	24(%esp),%esi
217	call	.L005pic_point
218.L005pic_point:
219	popl	%eax
220	leal	.Lrem_4bit-.L005pic_point(%eax),%eax
221	movzbl	15(%edi),%ebx
222	xorl	%ecx,%ecx
223	movl	%ebx,%edx
224	movb	%dl,%cl
225	movl	$14,%ebp
226	shlb	$4,%cl
227	andl	$240,%edx
228	movq	8(%esi,%ecx,1),%mm0
229	movq	(%esi,%ecx,1),%mm1
230	movd	%mm0,%ebx
231	jmp	.L006mmx_loop
232.align	16
233.L006mmx_loop:
234	psrlq	$4,%mm0
235	andl	$15,%ebx
236	movq	%mm1,%mm2
237	psrlq	$4,%mm1
238	pxor	8(%esi,%edx,1),%mm0
239	movb	(%edi,%ebp,1),%cl
240	psllq	$60,%mm2
241	pxor	(%eax,%ebx,8),%mm1
242	decl	%ebp
243	movd	%mm0,%ebx
244	pxor	(%esi,%edx,1),%mm1
245	movl	%ecx,%edx
246	pxor	%mm2,%mm0
247	js	.L007mmx_break
248	shlb	$4,%cl
249	andl	$15,%ebx
250	psrlq	$4,%mm0
251	andl	$240,%edx
252	movq	%mm1,%mm2
253	psrlq	$4,%mm1
254	pxor	8(%esi,%ecx,1),%mm0
255	psllq	$60,%mm2
256	pxor	(%eax,%ebx,8),%mm1
257	movd	%mm0,%ebx
258	pxor	(%esi,%ecx,1),%mm1
259	pxor	%mm2,%mm0
260	jmp	.L006mmx_loop
261.align	16
262.L007mmx_break:
263	shlb	$4,%cl
264	andl	$15,%ebx
265	psrlq	$4,%mm0
266	andl	$240,%edx
267	movq	%mm1,%mm2
268	psrlq	$4,%mm1
269	pxor	8(%esi,%ecx,1),%mm0
270	psllq	$60,%mm2
271	pxor	(%eax,%ebx,8),%mm1
272	movd	%mm0,%ebx
273	pxor	(%esi,%ecx,1),%mm1
274	pxor	%mm2,%mm0
275	psrlq	$4,%mm0
276	andl	$15,%ebx
277	movq	%mm1,%mm2
278	psrlq	$4,%mm1
279	pxor	8(%esi,%edx,1),%mm0
280	psllq	$60,%mm2
281	pxor	(%eax,%ebx,8),%mm1
282	movd	%mm0,%ebx
283	pxor	(%esi,%edx,1),%mm1
284	pxor	%mm2,%mm0
285	psrlq	$32,%mm0
286	movd	%mm1,%edx
287	psrlq	$32,%mm1
288	movd	%mm0,%ecx
289	movd	%mm1,%ebp
290	bswap	%ebx
291	bswap	%edx
292	bswap	%ecx
293	bswap	%ebp
294	emms
295	movl	%ebx,12(%edi)
296	movl	%edx,4(%edi)
297	movl	%ecx,8(%edi)
298	movl	%ebp,(%edi)
299	popl	%edi
300	popl	%esi
301	popl	%ebx
302	popl	%ebp
303	ret
304.size	gcm_gmult_4bit_mmx,.-.L_gcm_gmult_4bit_mmx_begin
305.globl	gcm_ghash_4bit_mmx
306.type	gcm_ghash_4bit_mmx,@function
307.align	16
308gcm_ghash_4bit_mmx:
309.L_gcm_ghash_4bit_mmx_begin:
310	pushl	%ebp
311	pushl	%ebx
312	pushl	%esi
313	pushl	%edi
314	movl	20(%esp),%eax
315	movl	24(%esp),%ebx
316	movl	28(%esp),%ecx
317	movl	32(%esp),%edx
318	movl	%esp,%ebp
319	call	.L008pic_point
320.L008pic_point:
321	popl	%esi
322	leal	.Lrem_8bit-.L008pic_point(%esi),%esi
323	subl	$544,%esp
324	andl	$-64,%esp
325	subl	$16,%esp
326	addl	%ecx,%edx
327	movl	%eax,544(%esp)
328	movl	%edx,552(%esp)
329	movl	%ebp,556(%esp)
330	addl	$128,%ebx
331	leal	144(%esp),%edi
332	leal	400(%esp),%ebp
333	movl	-120(%ebx),%edx
334	movq	-120(%ebx),%mm0
335	movq	-128(%ebx),%mm3
336	shll	$4,%edx
337	movb	%dl,(%esp)
338	movl	-104(%ebx),%edx
339	movq	-104(%ebx),%mm2
340	movq	-112(%ebx),%mm5
341	movq	%mm0,-128(%edi)
342	psrlq	$4,%mm0
343	movq	%mm3,(%edi)
344	movq	%mm3,%mm7
345	psrlq	$4,%mm3
346	shll	$4,%edx
347	movb	%dl,1(%esp)
348	movl	-88(%ebx),%edx
349	movq	-88(%ebx),%mm1
350	psllq	$60,%mm7
351	movq	-96(%ebx),%mm4
352	por	%mm7,%mm0
353	movq	%mm2,-120(%edi)
354	psrlq	$4,%mm2
355	movq	%mm5,8(%edi)
356	movq	%mm5,%mm6
357	movq	%mm0,-128(%ebp)
358	psrlq	$4,%mm5
359	movq	%mm3,(%ebp)
360	shll	$4,%edx
361	movb	%dl,2(%esp)
362	movl	-72(%ebx),%edx
363	movq	-72(%ebx),%mm0
364	psllq	$60,%mm6
365	movq	-80(%ebx),%mm3
366	por	%mm6,%mm2
367	movq	%mm1,-112(%edi)
368	psrlq	$4,%mm1
369	movq	%mm4,16(%edi)
370	movq	%mm4,%mm7
371	movq	%mm2,-120(%ebp)
372	psrlq	$4,%mm4
373	movq	%mm5,8(%ebp)
374	shll	$4,%edx
375	movb	%dl,3(%esp)
376	movl	-56(%ebx),%edx
377	movq	-56(%ebx),%mm2
378	psllq	$60,%mm7
379	movq	-64(%ebx),%mm5
380	por	%mm7,%mm1
381	movq	%mm0,-104(%edi)
382	psrlq	$4,%mm0
383	movq	%mm3,24(%edi)
384	movq	%mm3,%mm6
385	movq	%mm1,-112(%ebp)
386	psrlq	$4,%mm3
387	movq	%mm4,16(%ebp)
388	shll	$4,%edx
389	movb	%dl,4(%esp)
390	movl	-40(%ebx),%edx
391	movq	-40(%ebx),%mm1
392	psllq	$60,%mm6
393	movq	-48(%ebx),%mm4
394	por	%mm6,%mm0
395	movq	%mm2,-96(%edi)
396	psrlq	$4,%mm2
397	movq	%mm5,32(%edi)
398	movq	%mm5,%mm7
399	movq	%mm0,-104(%ebp)
400	psrlq	$4,%mm5
401	movq	%mm3,24(%ebp)
402	shll	$4,%edx
403	movb	%dl,5(%esp)
404	movl	-24(%ebx),%edx
405	movq	-24(%ebx),%mm0
406	psllq	$60,%mm7
407	movq	-32(%ebx),%mm3
408	por	%mm7,%mm2
409	movq	%mm1,-88(%edi)
410	psrlq	$4,%mm1
411	movq	%mm4,40(%edi)
412	movq	%mm4,%mm6
413	movq	%mm2,-96(%ebp)
414	psrlq	$4,%mm4
415	movq	%mm5,32(%ebp)
416	shll	$4,%edx
417	movb	%dl,6(%esp)
418	movl	-8(%ebx),%edx
419	movq	-8(%ebx),%mm2
420	psllq	$60,%mm6
421	movq	-16(%ebx),%mm5
422	por	%mm6,%mm1
423	movq	%mm0,-80(%edi)
424	psrlq	$4,%mm0
425	movq	%mm3,48(%edi)
426	movq	%mm3,%mm7
427	movq	%mm1,-88(%ebp)
428	psrlq	$4,%mm3
429	movq	%mm4,40(%ebp)
430	shll	$4,%edx
431	movb	%dl,7(%esp)
432	movl	8(%ebx),%edx
433	movq	8(%ebx),%mm1
434	psllq	$60,%mm7
435	movq	(%ebx),%mm4
436	por	%mm7,%mm0
437	movq	%mm2,-72(%edi)
438	psrlq	$4,%mm2
439	movq	%mm5,56(%edi)
440	movq	%mm5,%mm6
441	movq	%mm0,-80(%ebp)
442	psrlq	$4,%mm5
443	movq	%mm3,48(%ebp)
444	shll	$4,%edx
445	movb	%dl,8(%esp)
446	movl	24(%ebx),%edx
447	movq	24(%ebx),%mm0
448	psllq	$60,%mm6
449	movq	16(%ebx),%mm3
450	por	%mm6,%mm2
451	movq	%mm1,-64(%edi)
452	psrlq	$4,%mm1
453	movq	%mm4,64(%edi)
454	movq	%mm4,%mm7
455	movq	%mm2,-72(%ebp)
456	psrlq	$4,%mm4
457	movq	%mm5,56(%ebp)
458	shll	$4,%edx
459	movb	%dl,9(%esp)
460	movl	40(%ebx),%edx
461	movq	40(%ebx),%mm2
462	psllq	$60,%mm7
463	movq	32(%ebx),%mm5
464	por	%mm7,%mm1
465	movq	%mm0,-56(%edi)
466	psrlq	$4,%mm0
467	movq	%mm3,72(%edi)
468	movq	%mm3,%mm6
469	movq	%mm1,-64(%ebp)
470	psrlq	$4,%mm3
471	movq	%mm4,64(%ebp)
472	shll	$4,%edx
473	movb	%dl,10(%esp)
474	movl	56(%ebx),%edx
475	movq	56(%ebx),%mm1
476	psllq	$60,%mm6
477	movq	48(%ebx),%mm4
478	por	%mm6,%mm0
479	movq	%mm2,-48(%edi)
480	psrlq	$4,%mm2
481	movq	%mm5,80(%edi)
482	movq	%mm5,%mm7
483	movq	%mm0,-56(%ebp)
484	psrlq	$4,%mm5
485	movq	%mm3,72(%ebp)
486	shll	$4,%edx
487	movb	%dl,11(%esp)
488	movl	72(%ebx),%edx
489	movq	72(%ebx),%mm0
490	psllq	$60,%mm7
491	movq	64(%ebx),%mm3
492	por	%mm7,%mm2
493	movq	%mm1,-40(%edi)
494	psrlq	$4,%mm1
495	movq	%mm4,88(%edi)
496	movq	%mm4,%mm6
497	movq	%mm2,-48(%ebp)
498	psrlq	$4,%mm4
499	movq	%mm5,80(%ebp)
500	shll	$4,%edx
501	movb	%dl,12(%esp)
502	movl	88(%ebx),%edx
503	movq	88(%ebx),%mm2
504	psllq	$60,%mm6
505	movq	80(%ebx),%mm5
506	por	%mm6,%mm1
507	movq	%mm0,-32(%edi)
508	psrlq	$4,%mm0
509	movq	%mm3,96(%edi)
510	movq	%mm3,%mm7
511	movq	%mm1,-40(%ebp)
512	psrlq	$4,%mm3
513	movq	%mm4,88(%ebp)
514	shll	$4,%edx
515	movb	%dl,13(%esp)
516	movl	104(%ebx),%edx
517	movq	104(%ebx),%mm1
518	psllq	$60,%mm7
519	movq	96(%ebx),%mm4
520	por	%mm7,%mm0
521	movq	%mm2,-24(%edi)
522	psrlq	$4,%mm2
523	movq	%mm5,104(%edi)
524	movq	%mm5,%mm6
525	movq	%mm0,-32(%ebp)
526	psrlq	$4,%mm5
527	movq	%mm3,96(%ebp)
528	shll	$4,%edx
529	movb	%dl,14(%esp)
530	movl	120(%ebx),%edx
531	movq	120(%ebx),%mm0
532	psllq	$60,%mm6
533	movq	112(%ebx),%mm3
534	por	%mm6,%mm2
535	movq	%mm1,-16(%edi)
536	psrlq	$4,%mm1
537	movq	%mm4,112(%edi)
538	movq	%mm4,%mm7
539	movq	%mm2,-24(%ebp)
540	psrlq	$4,%mm4
541	movq	%mm5,104(%ebp)
542	shll	$4,%edx
543	movb	%dl,15(%esp)
544	psllq	$60,%mm7
545	por	%mm7,%mm1
546	movq	%mm0,-8(%edi)
547	psrlq	$4,%mm0
548	movq	%mm3,120(%edi)
549	movq	%mm3,%mm6
550	movq	%mm1,-16(%ebp)
551	psrlq	$4,%mm3
552	movq	%mm4,112(%ebp)
553	psllq	$60,%mm6
554	por	%mm6,%mm0
555	movq	%mm0,-8(%ebp)
556	movq	%mm3,120(%ebp)
557	movq	(%eax),%mm6
558	movl	8(%eax),%ebx
559	movl	12(%eax),%edx
560.align	16
561.L009outer:
562	xorl	12(%ecx),%edx
563	xorl	8(%ecx),%ebx
564	pxor	(%ecx),%mm6
565	leal	16(%ecx),%ecx
566	movl	%ebx,536(%esp)
567	movq	%mm6,528(%esp)
568	movl	%ecx,548(%esp)
569	xorl	%eax,%eax
570	roll	$8,%edx
571	movb	%dl,%al
572	movl	%eax,%ebp
573	andb	$15,%al
574	shrl	$4,%ebp
575	pxor	%mm0,%mm0
576	roll	$8,%edx
577	pxor	%mm1,%mm1
578	pxor	%mm2,%mm2
579	movq	16(%esp,%eax,8),%mm7
580	movq	144(%esp,%eax,8),%mm6
581	movb	%dl,%al
582	movd	%mm7,%ebx
583	psrlq	$8,%mm7
584	movq	%mm6,%mm3
585	movl	%eax,%edi
586	psrlq	$8,%mm6
587	pxor	272(%esp,%ebp,8),%mm7
588	andb	$15,%al
589	psllq	$56,%mm3
590	shrl	$4,%edi
591	pxor	16(%esp,%eax,8),%mm7
592	roll	$8,%edx
593	pxor	144(%esp,%eax,8),%mm6
594	pxor	%mm3,%mm7
595	pxor	400(%esp,%ebp,8),%mm6
596	xorb	(%esp,%ebp,1),%bl
597	movb	%dl,%al
598	movd	%mm7,%ecx
599	movzbl	%bl,%ebx
600	psrlq	$8,%mm7
601	movq	%mm6,%mm3
602	movl	%eax,%ebp
603	psrlq	$8,%mm6
604	pxor	272(%esp,%edi,8),%mm7
605	andb	$15,%al
606	psllq	$56,%mm3
607	shrl	$4,%ebp
608	pinsrw	$2,(%esi,%ebx,2),%mm2
609	pxor	16(%esp,%eax,8),%mm7
610	roll	$8,%edx
611	pxor	144(%esp,%eax,8),%mm6
612	pxor	%mm3,%mm7
613	pxor	400(%esp,%edi,8),%mm6
614	xorb	(%esp,%edi,1),%cl
615	movb	%dl,%al
616	movl	536(%esp),%edx
617	movd	%mm7,%ebx
618	movzbl	%cl,%ecx
619	psrlq	$8,%mm7
620	movq	%mm6,%mm3
621	movl	%eax,%edi
622	psrlq	$8,%mm6
623	pxor	272(%esp,%ebp,8),%mm7
624	andb	$15,%al
625	psllq	$56,%mm3
626	pxor	%mm2,%mm6
627	shrl	$4,%edi
628	pinsrw	$2,(%esi,%ecx,2),%mm1
629	pxor	16(%esp,%eax,8),%mm7
630	roll	$8,%edx
631	pxor	144(%esp,%eax,8),%mm6
632	pxor	%mm3,%mm7
633	pxor	400(%esp,%ebp,8),%mm6
634	xorb	(%esp,%ebp,1),%bl
635	movb	%dl,%al
636	movd	%mm7,%ecx
637	movzbl	%bl,%ebx
638	psrlq	$8,%mm7
639	movq	%mm6,%mm3
640	movl	%eax,%ebp
641	psrlq	$8,%mm6
642	pxor	272(%esp,%edi,8),%mm7
643	andb	$15,%al
644	psllq	$56,%mm3
645	pxor	%mm1,%mm6
646	shrl	$4,%ebp
647	pinsrw	$2,(%esi,%ebx,2),%mm0
648	pxor	16(%esp,%eax,8),%mm7
649	roll	$8,%edx
650	pxor	144(%esp,%eax,8),%mm6
651	pxor	%mm3,%mm7
652	pxor	400(%esp,%edi,8),%mm6
653	xorb	(%esp,%edi,1),%cl
654	movb	%dl,%al
655	movd	%mm7,%ebx
656	movzbl	%cl,%ecx
657	psrlq	$8,%mm7
658	movq	%mm6,%mm3
659	movl	%eax,%edi
660	psrlq	$8,%mm6
661	pxor	272(%esp,%ebp,8),%mm7
662	andb	$15,%al
663	psllq	$56,%mm3
664	pxor	%mm0,%mm6
665	shrl	$4,%edi
666	pinsrw	$2,(%esi,%ecx,2),%mm2
667	pxor	16(%esp,%eax,8),%mm7
668	roll	$8,%edx
669	pxor	144(%esp,%eax,8),%mm6
670	pxor	%mm3,%mm7
671	pxor	400(%esp,%ebp,8),%mm6
672	xorb	(%esp,%ebp,1),%bl
673	movb	%dl,%al
674	movd	%mm7,%ecx
675	movzbl	%bl,%ebx
676	psrlq	$8,%mm7
677	movq	%mm6,%mm3
678	movl	%eax,%ebp
679	psrlq	$8,%mm6
680	pxor	272(%esp,%edi,8),%mm7
681	andb	$15,%al
682	psllq	$56,%mm3
683	pxor	%mm2,%mm6
684	shrl	$4,%ebp
685	pinsrw	$2,(%esi,%ebx,2),%mm1
686	pxor	16(%esp,%eax,8),%mm7
687	roll	$8,%edx
688	pxor	144(%esp,%eax,8),%mm6
689	pxor	%mm3,%mm7
690	pxor	400(%esp,%edi,8),%mm6
691	xorb	(%esp,%edi,1),%cl
692	movb	%dl,%al
693	movl	532(%esp),%edx
694	movd	%mm7,%ebx
695	movzbl	%cl,%ecx
696	psrlq	$8,%mm7
697	movq	%mm6,%mm3
698	movl	%eax,%edi
699	psrlq	$8,%mm6
700	pxor	272(%esp,%ebp,8),%mm7
701	andb	$15,%al
702	psllq	$56,%mm3
703	pxor	%mm1,%mm6
704	shrl	$4,%edi
705	pinsrw	$2,(%esi,%ecx,2),%mm0
706	pxor	16(%esp,%eax,8),%mm7
707	roll	$8,%edx
708	pxor	144(%esp,%eax,8),%mm6
709	pxor	%mm3,%mm7
710	pxor	400(%esp,%ebp,8),%mm6
711	xorb	(%esp,%ebp,1),%bl
712	movb	%dl,%al
713	movd	%mm7,%ecx
714	movzbl	%bl,%ebx
715	psrlq	$8,%mm7
716	movq	%mm6,%mm3
717	movl	%eax,%ebp
718	psrlq	$8,%mm6
719	pxor	272(%esp,%edi,8),%mm7
720	andb	$15,%al
721	psllq	$56,%mm3
722	pxor	%mm0,%mm6
723	shrl	$4,%ebp
724	pinsrw	$2,(%esi,%ebx,2),%mm2
725	pxor	16(%esp,%eax,8),%mm7
726	roll	$8,%edx
727	pxor	144(%esp,%eax,8),%mm6
728	pxor	%mm3,%mm7
729	pxor	400(%esp,%edi,8),%mm6
730	xorb	(%esp,%edi,1),%cl
731	movb	%dl,%al
732	movd	%mm7,%ebx
733	movzbl	%cl,%ecx
734	psrlq	$8,%mm7
735	movq	%mm6,%mm3
736	movl	%eax,%edi
737	psrlq	$8,%mm6
738	pxor	272(%esp,%ebp,8),%mm7
739	andb	$15,%al
740	psllq	$56,%mm3
741	pxor	%mm2,%mm6
742	shrl	$4,%edi
743	pinsrw	$2,(%esi,%ecx,2),%mm1
744	pxor	16(%esp,%eax,8),%mm7
745	roll	$8,%edx
746	pxor	144(%esp,%eax,8),%mm6
747	pxor	%mm3,%mm7
748	pxor	400(%esp,%ebp,8),%mm6
749	xorb	(%esp,%ebp,1),%bl
750	movb	%dl,%al
751	movd	%mm7,%ecx
752	movzbl	%bl,%ebx
753	psrlq	$8,%mm7
754	movq	%mm6,%mm3
755	movl	%eax,%ebp
756	psrlq	$8,%mm6
757	pxor	272(%esp,%edi,8),%mm7
758	andb	$15,%al
759	psllq	$56,%mm3
760	pxor	%mm1,%mm6
761	shrl	$4,%ebp
762	pinsrw	$2,(%esi,%ebx,2),%mm0
763	pxor	16(%esp,%eax,8),%mm7
764	roll	$8,%edx
765	pxor	144(%esp,%eax,8),%mm6
766	pxor	%mm3,%mm7
767	pxor	400(%esp,%edi,8),%mm6
768	xorb	(%esp,%edi,1),%cl
769	movb	%dl,%al
770	movl	528(%esp),%edx
771	movd	%mm7,%ebx
772	movzbl	%cl,%ecx
773	psrlq	$8,%mm7
774	movq	%mm6,%mm3
775	movl	%eax,%edi
776	psrlq	$8,%mm6
777	pxor	272(%esp,%ebp,8),%mm7
778	andb	$15,%al
779	psllq	$56,%mm3
780	pxor	%mm0,%mm6
781	shrl	$4,%edi
782	pinsrw	$2,(%esi,%ecx,2),%mm2
783	pxor	16(%esp,%eax,8),%mm7
784	roll	$8,%edx
785	pxor	144(%esp,%eax,8),%mm6
786	pxor	%mm3,%mm7
787	pxor	400(%esp,%ebp,8),%mm6
788	xorb	(%esp,%ebp,1),%bl
789	movb	%dl,%al
790	movd	%mm7,%ecx
791	movzbl	%bl,%ebx
792	psrlq	$8,%mm7
793	movq	%mm6,%mm3
794	movl	%eax,%ebp
795	psrlq	$8,%mm6
796	pxor	272(%esp,%edi,8),%mm7
797	andb	$15,%al
798	psllq	$56,%mm3
799	pxor	%mm2,%mm6
800	shrl	$4,%ebp
801	pinsrw	$2,(%esi,%ebx,2),%mm1
802	pxor	16(%esp,%eax,8),%mm7
803	roll	$8,%edx
804	pxor	144(%esp,%eax,8),%mm6
805	pxor	%mm3,%mm7
806	pxor	400(%esp,%edi,8),%mm6
807	xorb	(%esp,%edi,1),%cl
808	movb	%dl,%al
809	movd	%mm7,%ebx
810	movzbl	%cl,%ecx
811	psrlq	$8,%mm7
812	movq	%mm6,%mm3
813	movl	%eax,%edi
814	psrlq	$8,%mm6
815	pxor	272(%esp,%ebp,8),%mm7
816	andb	$15,%al
817	psllq	$56,%mm3
818	pxor	%mm1,%mm6
819	shrl	$4,%edi
820	pinsrw	$2,(%esi,%ecx,2),%mm0
821	pxor	16(%esp,%eax,8),%mm7
822	roll	$8,%edx
823	pxor	144(%esp,%eax,8),%mm6
824	pxor	%mm3,%mm7
825	pxor	400(%esp,%ebp,8),%mm6
826	xorb	(%esp,%ebp,1),%bl
827	movb	%dl,%al
828	movd	%mm7,%ecx
829	movzbl	%bl,%ebx
830	psrlq	$8,%mm7
831	movq	%mm6,%mm3
832	movl	%eax,%ebp
833	psrlq	$8,%mm6
834	pxor	272(%esp,%edi,8),%mm7
835	andb	$15,%al
836	psllq	$56,%mm3
837	pxor	%mm0,%mm6
838	shrl	$4,%ebp
839	pinsrw	$2,(%esi,%ebx,2),%mm2
840	pxor	16(%esp,%eax,8),%mm7
841	roll	$8,%edx
842	pxor	144(%esp,%eax,8),%mm6
843	pxor	%mm3,%mm7
844	pxor	400(%esp,%edi,8),%mm6
845	xorb	(%esp,%edi,1),%cl
846	movb	%dl,%al
847	movl	524(%esp),%edx
848	movd	%mm7,%ebx
849	movzbl	%cl,%ecx
850	psrlq	$8,%mm7
851	movq	%mm6,%mm3
852	movl	%eax,%edi
853	psrlq	$8,%mm6
854	pxor	272(%esp,%ebp,8),%mm7
855	andb	$15,%al
856	psllq	$56,%mm3
857	pxor	%mm2,%mm6
858	shrl	$4,%edi
859	pinsrw	$2,(%esi,%ecx,2),%mm1
860	pxor	16(%esp,%eax,8),%mm7
861	pxor	144(%esp,%eax,8),%mm6
862	xorb	(%esp,%ebp,1),%bl
863	pxor	%mm3,%mm7
864	pxor	400(%esp,%ebp,8),%mm6
865	movzbl	%bl,%ebx
866	pxor	%mm2,%mm2
867	psllq	$4,%mm1
868	movd	%mm7,%ecx
869	psrlq	$4,%mm7
870	movq	%mm6,%mm3
871	psrlq	$4,%mm6
872	shll	$4,%ecx
873	pxor	16(%esp,%edi,8),%mm7
874	psllq	$60,%mm3
875	movzbl	%cl,%ecx
876	pxor	%mm3,%mm7
877	pxor	144(%esp,%edi,8),%mm6
878	pinsrw	$2,(%esi,%ebx,2),%mm0
879	pxor	%mm1,%mm6
880	movd	%mm7,%edx
881	pinsrw	$3,(%esi,%ecx,2),%mm2
882	psllq	$12,%mm0
883	pxor	%mm0,%mm6
884	psrlq	$32,%mm7
885	pxor	%mm2,%mm6
886	movl	548(%esp),%ecx
887	movd	%mm7,%ebx
888	movq	%mm6,%mm3
889	psllw	$8,%mm6
890	psrlw	$8,%mm3
891	por	%mm3,%mm6
892	bswap	%edx
893	pshufw	$27,%mm6,%mm6
894	bswap	%ebx
895	cmpl	552(%esp),%ecx
896	jne	.L009outer
897	movl	544(%esp),%eax
898	movl	%edx,12(%eax)
899	movl	%ebx,8(%eax)
900	movq	%mm6,(%eax)
901	movl	556(%esp),%esp
902	emms
903	popl	%edi
904	popl	%esi
905	popl	%ebx
906	popl	%ebp
907	ret
908.size	gcm_ghash_4bit_mmx,.-.L_gcm_ghash_4bit_mmx_begin
909.globl	gcm_init_clmul
910.type	gcm_init_clmul,@function
911.align	16
912gcm_init_clmul:
913.L_gcm_init_clmul_begin:
914	movl	4(%esp),%edx
915	movl	8(%esp),%eax
916	call	.L010pic
917.L010pic:
918	popl	%ecx
919	leal	.Lbswap-.L010pic(%ecx),%ecx
920	movdqu	(%eax),%xmm2
921	pshufd	$78,%xmm2,%xmm2
922	pshufd	$255,%xmm2,%xmm4
923	movdqa	%xmm2,%xmm3
924	psllq	$1,%xmm2
925	pxor	%xmm5,%xmm5
926	psrlq	$63,%xmm3
927	pcmpgtd	%xmm4,%xmm5
928	pslldq	$8,%xmm3
929	por	%xmm3,%xmm2
930	pand	16(%ecx),%xmm5
931	pxor	%xmm5,%xmm2
932	movdqa	%xmm2,%xmm0
933	movdqa	%xmm0,%xmm1
934	pshufd	$78,%xmm0,%xmm3
935	pshufd	$78,%xmm2,%xmm4
936	pxor	%xmm0,%xmm3
937	pxor	%xmm2,%xmm4
938.byte	102,15,58,68,194,0
939.byte	102,15,58,68,202,17
940.byte	102,15,58,68,220,0
941	xorps	%xmm0,%xmm3
942	xorps	%xmm1,%xmm3
943	movdqa	%xmm3,%xmm4
944	psrldq	$8,%xmm3
945	pslldq	$8,%xmm4
946	pxor	%xmm3,%xmm1
947	pxor	%xmm4,%xmm0
948	movdqa	%xmm0,%xmm4
949	movdqa	%xmm0,%xmm3
950	psllq	$5,%xmm0
951	pxor	%xmm0,%xmm3
952	psllq	$1,%xmm0
953	pxor	%xmm3,%xmm0
954	psllq	$57,%xmm0
955	movdqa	%xmm0,%xmm3
956	pslldq	$8,%xmm0
957	psrldq	$8,%xmm3
958	pxor	%xmm4,%xmm0
959	pxor	%xmm3,%xmm1
960	movdqa	%xmm0,%xmm4
961	psrlq	$1,%xmm0
962	pxor	%xmm4,%xmm1
963	pxor	%xmm0,%xmm4
964	psrlq	$5,%xmm0
965	pxor	%xmm4,%xmm0
966	psrlq	$1,%xmm0
967	pxor	%xmm1,%xmm0
968	pshufd	$78,%xmm2,%xmm3
969	pshufd	$78,%xmm0,%xmm4
970	pxor	%xmm2,%xmm3
971	movdqu	%xmm2,(%edx)
972	pxor	%xmm0,%xmm4
973	movdqu	%xmm0,16(%edx)
974.byte	102,15,58,15,227,8
975	movdqu	%xmm4,32(%edx)
976	ret
977.size	gcm_init_clmul,.-.L_gcm_init_clmul_begin
978.globl	gcm_gmult_clmul
979.type	gcm_gmult_clmul,@function
980.align	16
981gcm_gmult_clmul:
982.L_gcm_gmult_clmul_begin:
983	movl	4(%esp),%eax
984	movl	8(%esp),%edx
985	call	.L011pic
986.L011pic:
987	popl	%ecx
988	leal	.Lbswap-.L011pic(%ecx),%ecx
989	movdqu	(%eax),%xmm0
990	movdqa	(%ecx),%xmm5
991	movups	(%edx),%xmm2
992.byte	102,15,56,0,197
993	movups	32(%edx),%xmm4
994	movdqa	%xmm0,%xmm1
995	pshufd	$78,%xmm0,%xmm3
996	pxor	%xmm0,%xmm3
997.byte	102,15,58,68,194,0
998.byte	102,15,58,68,202,17
999.byte	102,15,58,68,220,0
1000	xorps	%xmm0,%xmm3
1001	xorps	%xmm1,%xmm3
1002	movdqa	%xmm3,%xmm4
1003	psrldq	$8,%xmm3
1004	pslldq	$8,%xmm4
1005	pxor	%xmm3,%xmm1
1006	pxor	%xmm4,%xmm0
1007	movdqa	%xmm0,%xmm4
1008	movdqa	%xmm0,%xmm3
1009	psllq	$5,%xmm0
1010	pxor	%xmm0,%xmm3
1011	psllq	$1,%xmm0
1012	pxor	%xmm3,%xmm0
1013	psllq	$57,%xmm0
1014	movdqa	%xmm0,%xmm3
1015	pslldq	$8,%xmm0
1016	psrldq	$8,%xmm3
1017	pxor	%xmm4,%xmm0
1018	pxor	%xmm3,%xmm1
1019	movdqa	%xmm0,%xmm4
1020	psrlq	$1,%xmm0
1021	pxor	%xmm4,%xmm1
1022	pxor	%xmm0,%xmm4
1023	psrlq	$5,%xmm0
1024	pxor	%xmm4,%xmm0
1025	psrlq	$1,%xmm0
1026	pxor	%xmm1,%xmm0
1027.byte	102,15,56,0,197
1028	movdqu	%xmm0,(%eax)
1029	ret
1030.size	gcm_gmult_clmul,.-.L_gcm_gmult_clmul_begin
1031.globl	gcm_ghash_clmul
1032.type	gcm_ghash_clmul,@function
1033.align	16
1034gcm_ghash_clmul:
1035.L_gcm_ghash_clmul_begin:
1036	pushl	%ebp
1037	pushl	%ebx
1038	pushl	%esi
1039	pushl	%edi
1040	movl	20(%esp),%eax
1041	movl	24(%esp),%edx
1042	movl	28(%esp),%esi
1043	movl	32(%esp),%ebx
1044	call	.L012pic
1045.L012pic:
1046	popl	%ecx
1047	leal	.Lbswap-.L012pic(%ecx),%ecx
1048	movdqu	(%eax),%xmm0
1049	movdqa	(%ecx),%xmm5
1050	movdqu	(%edx),%xmm2
1051.byte	102,15,56,0,197
1052	subl	$16,%ebx
1053	jz	.L013odd_tail
1054	movdqu	(%esi),%xmm3
1055	movdqu	16(%esi),%xmm6
1056.byte	102,15,56,0,221
1057.byte	102,15,56,0,245
1058	movdqu	32(%edx),%xmm5
1059	pxor	%xmm3,%xmm0
1060	pshufd	$78,%xmm6,%xmm3
1061	movdqa	%xmm6,%xmm7
1062	pxor	%xmm6,%xmm3
1063	leal	32(%esi),%esi
1064.byte	102,15,58,68,242,0
1065.byte	102,15,58,68,250,17
1066.byte	102,15,58,68,221,0
1067	movups	16(%edx),%xmm2
1068	nop
1069	subl	$32,%ebx
1070	jbe	.L014even_tail
1071	jmp	.L015mod_loop
1072.align	32
1073.L015mod_loop:
1074	pshufd	$78,%xmm0,%xmm4
1075	movdqa	%xmm0,%xmm1
1076	pxor	%xmm0,%xmm4
1077	nop
1078.byte	102,15,58,68,194,0
1079.byte	102,15,58,68,202,17
1080.byte	102,15,58,68,229,16
1081	movups	(%edx),%xmm2
1082	xorps	%xmm6,%xmm0
1083	movdqa	(%ecx),%xmm5
1084	xorps	%xmm7,%xmm1
1085	movdqu	(%esi),%xmm7
1086	pxor	%xmm0,%xmm3
1087	movdqu	16(%esi),%xmm6
1088	pxor	%xmm1,%xmm3
1089.byte	102,15,56,0,253
1090	pxor	%xmm3,%xmm4
1091	movdqa	%xmm4,%xmm3
1092	psrldq	$8,%xmm4
1093	pslldq	$8,%xmm3
1094	pxor	%xmm4,%xmm1
1095	pxor	%xmm3,%xmm0
1096.byte	102,15,56,0,245
1097	pxor	%xmm7,%xmm1
1098	movdqa	%xmm6,%xmm7
1099	movdqa	%xmm0,%xmm4
1100	movdqa	%xmm0,%xmm3
1101	psllq	$5,%xmm0
1102	pxor	%xmm0,%xmm3
1103	psllq	$1,%xmm0
1104	pxor	%xmm3,%xmm0
1105.byte	102,15,58,68,242,0
1106	movups	32(%edx),%xmm5
1107	psllq	$57,%xmm0
1108	movdqa	%xmm0,%xmm3
1109	pslldq	$8,%xmm0
1110	psrldq	$8,%xmm3
1111	pxor	%xmm4,%xmm0
1112	pxor	%xmm3,%xmm1
1113	pshufd	$78,%xmm7,%xmm3
1114	movdqa	%xmm0,%xmm4
1115	psrlq	$1,%xmm0
1116	pxor	%xmm7,%xmm3
1117	pxor	%xmm4,%xmm1
1118.byte	102,15,58,68,250,17
1119	movups	16(%edx),%xmm2
1120	pxor	%xmm0,%xmm4
1121	psrlq	$5,%xmm0
1122	pxor	%xmm4,%xmm0
1123	psrlq	$1,%xmm0
1124	pxor	%xmm1,%xmm0
1125.byte	102,15,58,68,221,0
1126	leal	32(%esi),%esi
1127	subl	$32,%ebx
1128	ja	.L015mod_loop
1129.L014even_tail:
1130	pshufd	$78,%xmm0,%xmm4
1131	movdqa	%xmm0,%xmm1
1132	pxor	%xmm0,%xmm4
1133.byte	102,15,58,68,194,0
1134.byte	102,15,58,68,202,17
1135.byte	102,15,58,68,229,16
1136	movdqa	(%ecx),%xmm5
1137	xorps	%xmm6,%xmm0
1138	xorps	%xmm7,%xmm1
1139	pxor	%xmm0,%xmm3
1140	pxor	%xmm1,%xmm3
1141	pxor	%xmm3,%xmm4
1142	movdqa	%xmm4,%xmm3
1143	psrldq	$8,%xmm4
1144	pslldq	$8,%xmm3
1145	pxor	%xmm4,%xmm1
1146	pxor	%xmm3,%xmm0
1147	movdqa	%xmm0,%xmm4
1148	movdqa	%xmm0,%xmm3
1149	psllq	$5,%xmm0
1150	pxor	%xmm0,%xmm3
1151	psllq	$1,%xmm0
1152	pxor	%xmm3,%xmm0
1153	psllq	$57,%xmm0
1154	movdqa	%xmm0,%xmm3
1155	pslldq	$8,%xmm0
1156	psrldq	$8,%xmm3
1157	pxor	%xmm4,%xmm0
1158	pxor	%xmm3,%xmm1
1159	movdqa	%xmm0,%xmm4
1160	psrlq	$1,%xmm0
1161	pxor	%xmm4,%xmm1
1162	pxor	%xmm0,%xmm4
1163	psrlq	$5,%xmm0
1164	pxor	%xmm4,%xmm0
1165	psrlq	$1,%xmm0
1166	pxor	%xmm1,%xmm0
1167	testl	%ebx,%ebx
1168	jnz	.L016done
1169	movups	(%edx),%xmm2
1170.L013odd_tail:
1171	movdqu	(%esi),%xmm3
1172.byte	102,15,56,0,221
1173	pxor	%xmm3,%xmm0
1174	movdqa	%xmm0,%xmm1
1175	pshufd	$78,%xmm0,%xmm3
1176	pshufd	$78,%xmm2,%xmm4
1177	pxor	%xmm0,%xmm3
1178	pxor	%xmm2,%xmm4
1179.byte	102,15,58,68,194,0
1180.byte	102,15,58,68,202,17
1181.byte	102,15,58,68,220,0
1182	xorps	%xmm0,%xmm3
1183	xorps	%xmm1,%xmm3
1184	movdqa	%xmm3,%xmm4
1185	psrldq	$8,%xmm3
1186	pslldq	$8,%xmm4
1187	pxor	%xmm3,%xmm1
1188	pxor	%xmm4,%xmm0
1189	movdqa	%xmm0,%xmm4
1190	movdqa	%xmm0,%xmm3
1191	psllq	$5,%xmm0
1192	pxor	%xmm0,%xmm3
1193	psllq	$1,%xmm0
1194	pxor	%xmm3,%xmm0
1195	psllq	$57,%xmm0
1196	movdqa	%xmm0,%xmm3
1197	pslldq	$8,%xmm0
1198	psrldq	$8,%xmm3
1199	pxor	%xmm4,%xmm0
1200	pxor	%xmm3,%xmm1
1201	movdqa	%xmm0,%xmm4
1202	psrlq	$1,%xmm0
1203	pxor	%xmm4,%xmm1
1204	pxor	%xmm0,%xmm4
1205	psrlq	$5,%xmm0
1206	pxor	%xmm4,%xmm0
1207	psrlq	$1,%xmm0
1208	pxor	%xmm1,%xmm0
1209.L016done:
1210.byte	102,15,56,0,197
1211	movdqu	%xmm0,(%eax)
1212	popl	%edi
1213	popl	%esi
1214	popl	%ebx
1215	popl	%ebp
1216	ret
1217.size	gcm_ghash_clmul,.-.L_gcm_ghash_clmul_begin
1218.align	64
1219.Lbswap:
1220.byte	15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
1221.byte	1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,194
1222.align	64
1223.Lrem_8bit:
1224.value	0,450,900,582,1800,1738,1164,1358
1225.value	3600,4050,3476,3158,2328,2266,2716,2910
1226.value	7200,7650,8100,7782,6952,6890,6316,6510
1227.value	4656,5106,4532,4214,5432,5370,5820,6014
1228.value	14400,14722,15300,14854,16200,16010,15564,15630
1229.value	13904,14226,13780,13334,12632,12442,13020,13086
1230.value	9312,9634,10212,9766,9064,8874,8428,8494
1231.value	10864,11186,10740,10294,11640,11450,12028,12094
1232.value	28800,28994,29444,29382,30600,30282,29708,30158
1233.value	32400,32594,32020,31958,31128,30810,31260,31710
1234.value	27808,28002,28452,28390,27560,27242,26668,27118
1235.value	25264,25458,24884,24822,26040,25722,26172,26622
1236.value	18624,18690,19268,19078,20424,19978,19532,19854
1237.value	18128,18194,17748,17558,16856,16410,16988,17310
1238.value	21728,21794,22372,22182,21480,21034,20588,20910
1239.value	23280,23346,22900,22710,24056,23610,24188,24510
1240.value	57600,57538,57988,58182,58888,59338,58764,58446
1241.value	61200,61138,60564,60758,59416,59866,60316,59998
1242.value	64800,64738,65188,65382,64040,64490,63916,63598
1243.value	62256,62194,61620,61814,62520,62970,63420,63102
1244.value	55616,55426,56004,56070,56904,57226,56780,56334
1245.value	55120,54930,54484,54550,53336,53658,54236,53790
1246.value	50528,50338,50916,50982,49768,50090,49644,49198
1247.value	52080,51890,51444,51510,52344,52666,53244,52798
1248.value	37248,36930,37380,37830,38536,38730,38156,38094
1249.value	40848,40530,39956,40406,39064,39258,39708,39646
1250.value	36256,35938,36388,36838,35496,35690,35116,35054
1251.value	33712,33394,32820,33270,33976,34170,34620,34558
1252.value	43456,43010,43588,43910,44744,44810,44364,44174
1253.value	42960,42514,42068,42390,41176,41242,41820,41630
1254.value	46560,46114,46692,47014,45800,45866,45420,45230
1255.value	48112,47666,47220,47542,48376,48442,49020,48830
1256.align	64
1257.Lrem_4bit:
1258.long	0,0,0,471859200,0,943718400,0,610271232
1259.long	0,1887436800,0,1822425088,0,1220542464,0,1423966208
1260.long	0,3774873600,0,4246732800,0,3644850176,0,3311403008
1261.long	0,2441084928,0,2376073216,0,2847932416,0,3051356160
1262.byte	71,72,65,83,72,32,102,111,114,32,120,56,54,44,32,67
1263.byte	82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112
1264.byte	112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62
1265.byte	0
1266