fold-const.c revision 72562
1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000 Free Software Foundation, Inc.
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING.  If not, write to
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA.  */
21
22/*@@ This file should be rewritten to use an arbitrary precision
23  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25  @@ The routines that translate from the ap rep should
26  @@ warn if precision et. al. is lost.
27  @@ This would also make life easier when this technology is used
28  @@ for cross-compilers.  */
29
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32   and force_fit_type.
33
34   fold takes a tree as argument and returns a simplified tree.
35
36   size_binop takes a tree code for an arithmetic operation
37   and two operands that are trees, and produces a tree for the
38   result, assuming the type comes from `sizetype'.
39
40   size_int takes an integer value, and creates a tree constant
41   with type from `sizetype'.
42
43   force_fit_type takes a constant and prior overflow indicator, and
44   forces the value to fit the type.  It returns an overflow indicator.  */
45
46#include "config.h"
47#include "system.h"
48#include <setjmp.h>
49#include "flags.h"
50#include "tree.h"
51#include "rtl.h"
52#include "toplev.h"
53
54static void encode		PROTO((HOST_WIDE_INT *,
55				       HOST_WIDE_INT, HOST_WIDE_INT));
56static void decode		PROTO((HOST_WIDE_INT *,
57				       HOST_WIDE_INT *, HOST_WIDE_INT *));
58int div_and_round_double	PROTO((enum tree_code, int, HOST_WIDE_INT,
59				       HOST_WIDE_INT, HOST_WIDE_INT,
60				       HOST_WIDE_INT, HOST_WIDE_INT *,
61				       HOST_WIDE_INT *, HOST_WIDE_INT *,
62				       HOST_WIDE_INT *));
63static int split_tree		PROTO((tree, enum tree_code, tree *,
64				       tree *, int *));
65static tree int_const_binop	PROTO((enum tree_code, tree, tree, int, int));
66static tree const_binop		PROTO((enum tree_code, tree, tree, int));
67static tree fold_convert	PROTO((tree, tree));
68static enum tree_code invert_tree_comparison PROTO((enum tree_code));
69static enum tree_code swap_tree_comparison PROTO((enum tree_code));
70static int truth_value_p	PROTO((enum tree_code));
71static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
72static int twoval_comparison_p	PROTO((tree, tree *, tree *, int *));
73static tree eval_subst		PROTO((tree, tree, tree, tree, tree));
74static tree omit_one_operand	PROTO((tree, tree, tree));
75static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
76static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
77static tree make_bit_field_ref	PROTO((tree, tree, int, int, int));
78static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
79					      tree, tree));
80static tree decode_field_reference PROTO((tree, int *, int *,
81					  enum machine_mode *, int *,
82					  int *, tree *, tree *));
83static int all_ones_mask_p	PROTO((tree, int));
84static int simple_operand_p	PROTO((tree));
85static tree range_binop		PROTO((enum tree_code, tree, tree, int,
86				       tree, int));
87static tree make_range		PROTO((tree, int *, tree *, tree *));
88static tree build_range_check	PROTO((tree, tree, int, tree, tree));
89static int merge_ranges		PROTO((int *, tree *, tree *, int, tree, tree,
90				       int, tree, tree));
91static tree fold_range_test	PROTO((tree));
92static tree unextend		PROTO((tree, int, int, tree));
93static tree fold_truthop	PROTO((enum tree_code, tree, tree, tree));
94static tree strip_compound_expr PROTO((tree, tree));
95static int multiple_of_p	PROTO((tree, tree, tree));
96static tree constant_boolean_node PROTO((int, tree));
97static int count_cond		PROTO((tree, int));
98static void const_binop_1	PROTO((PTR));
99static void fold_convert_1	PROTO((PTR));
100
101#ifndef BRANCH_COST
102#define BRANCH_COST 1
103#endif
104
105/* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
106   Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
107   Then this yields nonzero if overflow occurred during the addition.
108   Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
109   Use `^' to test whether signs differ, and `< 0' to isolate the sign.  */
110#define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
111
112/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
113   We do that by representing the two-word integer in 4 words, with only
114   HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number.  */
115
116#define LOWPART(x) \
117  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
118#define HIGHPART(x) \
119  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
120#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
121
122/* Unpack a two-word integer into 4 words.
123   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
124   WORDS points to the array of HOST_WIDE_INTs.  */
125
126static void
127encode (words, low, hi)
128     HOST_WIDE_INT *words;
129     HOST_WIDE_INT low, hi;
130{
131  words[0] = LOWPART (low);
132  words[1] = HIGHPART (low);
133  words[2] = LOWPART (hi);
134  words[3] = HIGHPART (hi);
135}
136
137/* Pack an array of 4 words into a two-word integer.
138   WORDS points to the array of words.
139   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
140
141static void
142decode (words, low, hi)
143     HOST_WIDE_INT *words;
144     HOST_WIDE_INT *low, *hi;
145{
146  *low = words[0] | words[1] * BASE;
147  *hi = words[2] | words[3] * BASE;
148}
149
150/* Make the integer constant T valid for its type
151   by setting to 0 or 1 all the bits in the constant
152   that don't belong in the type.
153   Yield 1 if a signed overflow occurs, 0 otherwise.
154   If OVERFLOW is nonzero, a signed overflow has already occurred
155   in calculating T, so propagate it.
156
157   Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
158   if it exists.  */
159
160int
161force_fit_type (t, overflow)
162     tree t;
163     int overflow;
164{
165  HOST_WIDE_INT low, high;
166  register int prec;
167
168  if (TREE_CODE (t) == REAL_CST)
169    {
170#ifdef CHECK_FLOAT_VALUE
171      CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
172			 overflow);
173#endif
174      return overflow;
175    }
176
177  else if (TREE_CODE (t) != INTEGER_CST)
178    return overflow;
179
180  low = TREE_INT_CST_LOW (t);
181  high = TREE_INT_CST_HIGH (t);
182
183  if (POINTER_TYPE_P (TREE_TYPE (t)))
184    prec = POINTER_SIZE;
185  else
186    prec = TYPE_PRECISION (TREE_TYPE (t));
187
188  /* First clear all bits that are beyond the type's precision.  */
189
190  if (prec == 2 * HOST_BITS_PER_WIDE_INT)
191    ;
192  else if (prec > HOST_BITS_PER_WIDE_INT)
193    {
194      TREE_INT_CST_HIGH (t)
195	&= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
196    }
197  else
198    {
199      TREE_INT_CST_HIGH (t) = 0;
200      if (prec < HOST_BITS_PER_WIDE_INT)
201	TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
202    }
203
204  /* Unsigned types do not suffer sign extension or overflow.  */
205  if (TREE_UNSIGNED (TREE_TYPE (t)))
206    return overflow;
207
208  /* If the value's sign bit is set, extend the sign.  */
209  if (prec != 2 * HOST_BITS_PER_WIDE_INT
210      && (prec > HOST_BITS_PER_WIDE_INT
211	  ? (TREE_INT_CST_HIGH (t)
212	     & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
213	  : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
214    {
215      /* Value is negative:
216	 set to 1 all the bits that are outside this type's precision.  */
217      if (prec > HOST_BITS_PER_WIDE_INT)
218	{
219	  TREE_INT_CST_HIGH (t)
220	    |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
221	}
222      else
223	{
224	  TREE_INT_CST_HIGH (t) = -1;
225	  if (prec < HOST_BITS_PER_WIDE_INT)
226	    TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
227	}
228    }
229
230  /* Yield nonzero if signed overflow occurred.  */
231  return
232    ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
233     != 0);
234}
235
236/* Add two doubleword integers with doubleword result.
237   Each argument is given as two `HOST_WIDE_INT' pieces.
238   One argument is L1 and H1; the other, L2 and H2.
239   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
240
241int
242add_double (l1, h1, l2, h2, lv, hv)
243     HOST_WIDE_INT l1, h1, l2, h2;
244     HOST_WIDE_INT *lv, *hv;
245{
246  HOST_WIDE_INT l, h;
247
248  l = l1 + l2;
249  h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
250
251  *lv = l;
252  *hv = h;
253  return overflow_sum_sign (h1, h2, h);
254}
255
256/* Negate a doubleword integer with doubleword result.
257   Return nonzero if the operation overflows, assuming it's signed.
258   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
259   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
260
261int
262neg_double (l1, h1, lv, hv)
263     HOST_WIDE_INT l1, h1;
264     HOST_WIDE_INT *lv, *hv;
265{
266  if (l1 == 0)
267    {
268      *lv = 0;
269      *hv = - h1;
270      return (*hv & h1) < 0;
271    }
272  else
273    {
274      *lv = - l1;
275      *hv = ~ h1;
276      return 0;
277    }
278}
279
280/* Multiply two doubleword integers with doubleword result.
281   Return nonzero if the operation overflows, assuming it's signed.
282   Each argument is given as two `HOST_WIDE_INT' pieces.
283   One argument is L1 and H1; the other, L2 and H2.
284   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
285
286int
287mul_double (l1, h1, l2, h2, lv, hv)
288     HOST_WIDE_INT l1, h1, l2, h2;
289     HOST_WIDE_INT *lv, *hv;
290{
291  HOST_WIDE_INT arg1[4];
292  HOST_WIDE_INT arg2[4];
293  HOST_WIDE_INT prod[4 * 2];
294  register unsigned HOST_WIDE_INT carry;
295  register int i, j, k;
296  HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
297
298  encode (arg1, l1, h1);
299  encode (arg2, l2, h2);
300
301  bzero ((char *) prod, sizeof prod);
302
303  for (i = 0; i < 4; i++)
304    {
305      carry = 0;
306      for (j = 0; j < 4; j++)
307	{
308	  k = i + j;
309	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
310	  carry += arg1[i] * arg2[j];
311	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
312	  carry += prod[k];
313	  prod[k] = LOWPART (carry);
314	  carry = HIGHPART (carry);
315	}
316      prod[i + 4] = carry;
317    }
318
319  decode (prod, lv, hv);	/* This ignores prod[4] through prod[4*2-1] */
320
321  /* Check for overflow by calculating the top half of the answer in full;
322     it should agree with the low half's sign bit.  */
323  decode (prod+4, &toplow, &tophigh);
324  if (h1 < 0)
325    {
326      neg_double (l2, h2, &neglow, &neghigh);
327      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
328    }
329  if (h2 < 0)
330    {
331      neg_double (l1, h1, &neglow, &neghigh);
332      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
333    }
334  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
335}
336
337/* Shift the doubleword integer in L1, H1 left by COUNT places
338   keeping only PREC bits of result.
339   Shift right if COUNT is negative.
340   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
341   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
342
343void
344lshift_double (l1, h1, count, prec, lv, hv, arith)
345     HOST_WIDE_INT l1, h1, count;
346     int prec;
347     HOST_WIDE_INT *lv, *hv;
348     int arith;
349{
350  if (count < 0)
351    {
352      rshift_double (l1, h1, - count, prec, lv, hv, arith);
353      return;
354    }
355
356#ifdef SHIFT_COUNT_TRUNCATED
357  if (SHIFT_COUNT_TRUNCATED)
358    count %= prec;
359#endif
360
361  if (count >= HOST_BITS_PER_WIDE_INT)
362    {
363      *hv = (unsigned HOST_WIDE_INT) l1 << (count - HOST_BITS_PER_WIDE_INT);
364      *lv = 0;
365    }
366  else
367    {
368      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
369	     | ((unsigned HOST_WIDE_INT) l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
370      *lv = (unsigned HOST_WIDE_INT) l1 << count;
371    }
372}
373
374/* Shift the doubleword integer in L1, H1 right by COUNT places
375   keeping only PREC bits of result.  COUNT must be positive.
376   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
377   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
378
379void
380rshift_double (l1, h1, count, prec, lv, hv, arith)
381     HOST_WIDE_INT l1, h1, count;
382     int prec ATTRIBUTE_UNUSED;
383     HOST_WIDE_INT *lv, *hv;
384     int arith;
385{
386  unsigned HOST_WIDE_INT signmask;
387  signmask = (arith
388	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
389	      : 0);
390
391#ifdef SHIFT_COUNT_TRUNCATED
392  if (SHIFT_COUNT_TRUNCATED)
393    count %= prec;
394#endif
395
396  if (count >= HOST_BITS_PER_WIDE_INT)
397    {
398      *hv = signmask;
399      *lv = ((signmask << (2 * HOST_BITS_PER_WIDE_INT - count - 1) << 1)
400	     | ((unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT)));
401    }
402  else
403    {
404      *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
405	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
406      *hv = ((signmask << (HOST_BITS_PER_WIDE_INT - count))
407	     | ((unsigned HOST_WIDE_INT) h1 >> count));
408    }
409}
410
411/* Rotate the doubleword integer in L1, H1 left by COUNT places
412   keeping only PREC bits of result.
413   Rotate right if COUNT is negative.
414   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
415
416void
417lrotate_double (l1, h1, count, prec, lv, hv)
418     HOST_WIDE_INT l1, h1, count;
419     int prec;
420     HOST_WIDE_INT *lv, *hv;
421{
422  HOST_WIDE_INT s1l, s1h, s2l, s2h;
423
424  count %= prec;
425  if (count < 0)
426    count += prec;
427
428  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
429  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
430  *lv = s1l | s2l;
431  *hv = s1h | s2h;
432}
433
434/* Rotate the doubleword integer in L1, H1 left by COUNT places
435   keeping only PREC bits of result.  COUNT must be positive.
436   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
437
438void
439rrotate_double (l1, h1, count, prec, lv, hv)
440     HOST_WIDE_INT l1, h1, count;
441     int prec;
442     HOST_WIDE_INT *lv, *hv;
443{
444  HOST_WIDE_INT s1l, s1h, s2l, s2h;
445
446  count %= prec;
447  if (count < 0)
448    count += prec;
449
450  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
451  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
452  *lv = s1l | s2l;
453  *hv = s1h | s2h;
454}
455
456/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
457   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
458   CODE is a tree code for a kind of division, one of
459   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
460   or EXACT_DIV_EXPR
461   It controls how the quotient is rounded to a integer.
462   Return nonzero if the operation overflows.
463   UNS nonzero says do unsigned division.  */
464
465int
466div_and_round_double (code, uns,
467		      lnum_orig, hnum_orig, lden_orig, hden_orig,
468		      lquo, hquo, lrem, hrem)
469     enum tree_code code;
470     int uns;
471     HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
472     HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
473     HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
474{
475  int quo_neg = 0;
476  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
477  HOST_WIDE_INT den[4], quo[4];
478  register int i, j;
479  unsigned HOST_WIDE_INT work;
480  register unsigned HOST_WIDE_INT carry = 0;
481  HOST_WIDE_INT lnum = lnum_orig;
482  HOST_WIDE_INT hnum = hnum_orig;
483  HOST_WIDE_INT lden = lden_orig;
484  HOST_WIDE_INT hden = hden_orig;
485  int overflow = 0;
486
487  if ((hden == 0) && (lden == 0))
488    overflow = 1, lden = 1;
489
490  /* calculate quotient sign and convert operands to unsigned.  */
491  if (!uns)
492    {
493      if (hnum < 0)
494	{
495	  quo_neg = ~ quo_neg;
496	  /* (minimum integer) / (-1) is the only overflow case.  */
497	  if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
498	    overflow = 1;
499	}
500      if (hden < 0)
501	{
502	  quo_neg = ~ quo_neg;
503	  neg_double (lden, hden, &lden, &hden);
504	}
505    }
506
507  if (hnum == 0 && hden == 0)
508    {				/* single precision */
509      *hquo = *hrem = 0;
510      /* This unsigned division rounds toward zero.  */
511      *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
512      goto finish_up;
513    }
514
515  if (hnum == 0)
516    {				/* trivial case: dividend < divisor */
517      /* hden != 0 already checked.  */
518      *hquo = *lquo = 0;
519      *hrem = hnum;
520      *lrem = lnum;
521      goto finish_up;
522    }
523
524  bzero ((char *) quo, sizeof quo);
525
526  bzero ((char *) num, sizeof num);	/* to zero 9th element */
527  bzero ((char *) den, sizeof den);
528
529  encode (num, lnum, hnum);
530  encode (den, lden, hden);
531
532  /* Special code for when the divisor < BASE.  */
533  if (hden == 0 && lden < (HOST_WIDE_INT) BASE)
534    {
535      /* hnum != 0 already checked.  */
536      for (i = 4 - 1; i >= 0; i--)
537	{
538	  work = num[i] + carry * BASE;
539	  quo[i] = work / (unsigned HOST_WIDE_INT) lden;
540	  carry = work % (unsigned HOST_WIDE_INT) lden;
541	}
542    }
543  else
544    {
545      /* Full double precision division,
546	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
547    int num_hi_sig, den_hi_sig;
548    unsigned HOST_WIDE_INT quo_est, scale;
549
550    /* Find the highest non-zero divisor digit.  */
551    for (i = 4 - 1; ; i--)
552      if (den[i] != 0) {
553	den_hi_sig = i;
554	break;
555      }
556
557    /* Insure that the first digit of the divisor is at least BASE/2.
558       This is required by the quotient digit estimation algorithm.  */
559
560    scale = BASE / (den[den_hi_sig] + 1);
561    if (scale > 1) {		/* scale divisor and dividend */
562      carry = 0;
563      for (i = 0; i <= 4 - 1; i++) {
564	work = (num[i] * scale) + carry;
565	num[i] = LOWPART (work);
566	carry = HIGHPART (work);
567      } num[4] = carry;
568      carry = 0;
569      for (i = 0; i <= 4 - 1; i++) {
570	work = (den[i] * scale) + carry;
571	den[i] = LOWPART (work);
572	carry = HIGHPART (work);
573	if (den[i] != 0) den_hi_sig = i;
574      }
575    }
576
577    num_hi_sig = 4;
578
579    /* Main loop */
580    for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
581      /* guess the next quotient digit, quo_est, by dividing the first
582	 two remaining dividend digits by the high order quotient digit.
583	 quo_est is never low and is at most 2 high.  */
584      unsigned HOST_WIDE_INT tmp;
585
586      num_hi_sig = i + den_hi_sig + 1;
587      work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
588      if (num[num_hi_sig] != den[den_hi_sig])
589	quo_est = work / den[den_hi_sig];
590      else
591	quo_est = BASE - 1;
592
593      /* refine quo_est so it's usually correct, and at most one high.   */
594      tmp = work - quo_est * den[den_hi_sig];
595      if (tmp < BASE
596	  && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
597	quo_est--;
598
599      /* Try QUO_EST as the quotient digit, by multiplying the
600         divisor by QUO_EST and subtracting from the remaining dividend.
601	 Keep in mind that QUO_EST is the I - 1st digit.  */
602
603      carry = 0;
604      for (j = 0; j <= den_hi_sig; j++)
605	{
606	  work = quo_est * den[j] + carry;
607	  carry = HIGHPART (work);
608	  work = num[i + j] - LOWPART (work);
609	  num[i + j] = LOWPART (work);
610	  carry += HIGHPART (work) != 0;
611	}
612
613      /* if quo_est was high by one, then num[i] went negative and
614	 we need to correct things.  */
615
616      if (num[num_hi_sig] < carry)
617	{
618	  quo_est--;
619	  carry = 0;		/* add divisor back in */
620	  for (j = 0; j <= den_hi_sig; j++)
621	    {
622	      work = num[i + j] + den[j] + carry;
623	      carry = HIGHPART (work);
624	      num[i + j] = LOWPART (work);
625	    }
626	  num [num_hi_sig] += carry;
627	}
628
629      /* store the quotient digit.  */
630      quo[i] = quo_est;
631    }
632  }
633
634  decode (quo, lquo, hquo);
635
636 finish_up:
637  /* if result is negative, make it so.  */
638  if (quo_neg)
639    neg_double (*lquo, *hquo, lquo, hquo);
640
641  /* compute trial remainder:  rem = num - (quo * den)  */
642  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
643  neg_double (*lrem, *hrem, lrem, hrem);
644  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
645
646  switch (code)
647    {
648    case TRUNC_DIV_EXPR:
649    case TRUNC_MOD_EXPR:	/* round toward zero */
650    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
651      return overflow;
652
653    case FLOOR_DIV_EXPR:
654    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
655      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
656	{
657	  /* quo = quo - 1;  */
658	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
659		      lquo, hquo);
660	}
661      else return overflow;
662      break;
663
664    case CEIL_DIV_EXPR:
665    case CEIL_MOD_EXPR:		/* round toward positive infinity */
666      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
667	{
668	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
669		      lquo, hquo);
670	}
671      else return overflow;
672      break;
673
674    case ROUND_DIV_EXPR:
675    case ROUND_MOD_EXPR:	/* round to closest integer */
676      {
677	HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
678	HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
679
680	/* get absolute values */
681	if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
682	if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
683
684	/* if (2 * abs (lrem) >= abs (lden)) */
685	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
686		    labs_rem, habs_rem, &ltwice, &htwice);
687	if (((unsigned HOST_WIDE_INT) habs_den
688	     < (unsigned HOST_WIDE_INT) htwice)
689	    || (((unsigned HOST_WIDE_INT) habs_den
690		 == (unsigned HOST_WIDE_INT) htwice)
691		&& ((HOST_WIDE_INT unsigned) labs_den
692		    < (unsigned HOST_WIDE_INT) ltwice)))
693	  {
694	    if (*hquo < 0)
695	      /* quo = quo - 1;  */
696	      add_double (*lquo, *hquo,
697			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
698	    else
699	      /* quo = quo + 1; */
700	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
701			  lquo, hquo);
702	  }
703	else return overflow;
704      }
705      break;
706
707    default:
708      abort ();
709    }
710
711  /* compute true remainder:  rem = num - (quo * den)  */
712  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
713  neg_double (*lrem, *hrem, lrem, hrem);
714  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
715  return overflow;
716}
717
718#ifndef REAL_ARITHMETIC
719/* Effectively truncate a real value to represent the nearest possible value
720   in a narrower mode.  The result is actually represented in the same data
721   type as the argument, but its value is usually different.
722
723   A trap may occur during the FP operations and it is the responsibility
724   of the calling function to have a handler established.  */
725
726REAL_VALUE_TYPE
727real_value_truncate (mode, arg)
728     enum machine_mode mode;
729     REAL_VALUE_TYPE arg;
730{
731  return REAL_VALUE_TRUNCATE (mode, arg);
732}
733
734#if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
735
736/* Check for infinity in an IEEE double precision number.  */
737
738int
739target_isinf (x)
740     REAL_VALUE_TYPE x;
741{
742  /* The IEEE 64-bit double format.  */
743  union {
744    REAL_VALUE_TYPE d;
745    struct {
746      unsigned sign      :  1;
747      unsigned exponent  : 11;
748      unsigned mantissa1 : 20;
749      unsigned mantissa2;
750    } little_endian;
751    struct {
752      unsigned mantissa2;
753      unsigned mantissa1 : 20;
754      unsigned exponent  : 11;
755      unsigned sign      :  1;
756    } big_endian;
757  } u;
758
759  u.d = dconstm1;
760  if (u.big_endian.sign == 1)
761    {
762      u.d = x;
763      return (u.big_endian.exponent == 2047
764	      && u.big_endian.mantissa1 == 0
765	      && u.big_endian.mantissa2 == 0);
766    }
767  else
768    {
769      u.d = x;
770      return (u.little_endian.exponent == 2047
771	      && u.little_endian.mantissa1 == 0
772	      && u.little_endian.mantissa2 == 0);
773    }
774}
775
776/* Check whether an IEEE double precision number is a NaN.  */
777
778int
779target_isnan (x)
780     REAL_VALUE_TYPE x;
781{
782  /* The IEEE 64-bit double format.  */
783  union {
784    REAL_VALUE_TYPE d;
785    struct {
786      unsigned sign      :  1;
787      unsigned exponent  : 11;
788      unsigned mantissa1 : 20;
789      unsigned mantissa2;
790    } little_endian;
791    struct {
792      unsigned mantissa2;
793      unsigned mantissa1 : 20;
794      unsigned exponent  : 11;
795      unsigned sign      :  1;
796    } big_endian;
797  } u;
798
799  u.d = dconstm1;
800  if (u.big_endian.sign == 1)
801    {
802      u.d = x;
803      return (u.big_endian.exponent == 2047
804	      && (u.big_endian.mantissa1 != 0
805		  || u.big_endian.mantissa2 != 0));
806    }
807  else
808    {
809      u.d = x;
810      return (u.little_endian.exponent == 2047
811	      && (u.little_endian.mantissa1 != 0
812		  || u.little_endian.mantissa2 != 0));
813    }
814}
815
816/* Check for a negative IEEE double precision number.  */
817
818int
819target_negative (x)
820     REAL_VALUE_TYPE x;
821{
822  /* The IEEE 64-bit double format.  */
823  union {
824    REAL_VALUE_TYPE d;
825    struct {
826      unsigned sign      :  1;
827      unsigned exponent  : 11;
828      unsigned mantissa1 : 20;
829      unsigned mantissa2;
830    } little_endian;
831    struct {
832      unsigned mantissa2;
833      unsigned mantissa1 : 20;
834      unsigned exponent  : 11;
835      unsigned sign      :  1;
836    } big_endian;
837  } u;
838
839  u.d = dconstm1;
840  if (u.big_endian.sign == 1)
841    {
842      u.d = x;
843      return u.big_endian.sign;
844    }
845  else
846    {
847      u.d = x;
848      return u.little_endian.sign;
849    }
850}
851#else /* Target not IEEE */
852
853/* Let's assume other float formats don't have infinity.
854   (This can be overridden by redefining REAL_VALUE_ISINF.)  */
855
856target_isinf (x)
857     REAL_VALUE_TYPE x;
858{
859  return 0;
860}
861
862/* Let's assume other float formats don't have NaNs.
863   (This can be overridden by redefining REAL_VALUE_ISNAN.)  */
864
865target_isnan (x)
866     REAL_VALUE_TYPE x;
867{
868  return 0;
869}
870
871/* Let's assume other float formats don't have minus zero.
872   (This can be overridden by redefining REAL_VALUE_NEGATIVE.)  */
873
874target_negative (x)
875     REAL_VALUE_TYPE x;
876{
877  return x < 0;
878}
879#endif /* Target not IEEE */
880
881/* Try to change R into its exact multiplicative inverse in machine mode
882   MODE.  Return nonzero function value if successful.  */
883
884int
885exact_real_inverse (mode, r)
886     enum machine_mode mode;
887     REAL_VALUE_TYPE *r;
888{
889  jmp_buf float_error;
890  union
891    {
892      double d;
893      unsigned short i[4];
894    }x, t, y;
895  int i;
896
897  /* Usually disable if bounds checks are not reliable.  */
898  if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT) && !flag_pretend_float)
899    return 0;
900
901  /* Set array index to the less significant bits in the unions, depending
902     on the endian-ness of the host doubles.
903     Disable if insufficient information on the data structure.  */
904#if HOST_FLOAT_FORMAT == UNKNOWN_FLOAT_FORMAT
905  return 0;
906#else
907#if HOST_FLOAT_FORMAT == VAX_FLOAT_FORMAT
908#define K 2
909#else
910#if HOST_FLOAT_FORMAT == IBM_FLOAT_FORMAT
911#define K 2
912#else
913#define K (2 * HOST_FLOAT_WORDS_BIG_ENDIAN)
914#endif
915#endif
916#endif
917
918  if (setjmp (float_error))
919    {
920      /* Don't do the optimization if there was an arithmetic error.  */
921fail:
922      set_float_handler (NULL_PTR);
923      return 0;
924    }
925  set_float_handler (float_error);
926
927  /* Domain check the argument.  */
928  x.d = *r;
929  if (x.d == 0.0)
930    goto fail;
931
932#ifdef REAL_INFINITY
933  if (REAL_VALUE_ISINF (x.d) || REAL_VALUE_ISNAN (x.d))
934    goto fail;
935#endif
936
937  /* Compute the reciprocal and check for numerical exactness.
938     It is unnecessary to check all the significand bits to determine
939     whether X is a power of 2.  If X is not, then it is impossible for
940     the bottom half significand of both X and 1/X to be all zero bits.
941     Hence we ignore the data structure of the top half and examine only
942     the low order bits of the two significands.  */
943  t.d = 1.0 / x.d;
944  if (x.i[K] != 0 || x.i[K + 1] != 0 || t.i[K] != 0 || t.i[K + 1] != 0)
945    goto fail;
946
947  /* Truncate to the required mode and range-check the result.  */
948  y.d = REAL_VALUE_TRUNCATE (mode, t.d);
949#ifdef CHECK_FLOAT_VALUE
950  i = 0;
951  if (CHECK_FLOAT_VALUE (mode, y.d, i))
952    goto fail;
953#endif
954
955  /* Fail if truncation changed the value.  */
956  if (y.d != t.d || y.d == 0.0)
957    goto fail;
958
959#ifdef REAL_INFINITY
960  if (REAL_VALUE_ISINF (y.d) || REAL_VALUE_ISNAN (y.d))
961    goto fail;
962#endif
963
964  /* Output the reciprocal and return success flag.  */
965  set_float_handler (NULL_PTR);
966  *r = y.d;
967  return 1;
968}
969
970
971/* Convert C9X hexadecimal floating point string constant S.  Return
972   real value type in mode MODE.  This function uses the host computer's
973   fp arithmetic when there is no REAL_ARITHMETIC.  */
974
975REAL_VALUE_TYPE
976real_hex_to_f (s, mode)
977   char *s;
978   enum machine_mode mode;
979{
980   REAL_VALUE_TYPE ip;
981   char *p = s;
982   unsigned HOST_WIDE_INT low, high;
983   int frexpon, expon, shcount, nrmcount, k;
984   int sign, expsign, decpt, isfloat, isldouble, gotp, lost;
985   char c;
986
987   isldouble = 0;
988   isfloat = 0;
989   frexpon = 0;
990   expon = 0;
991   expsign = 1;
992   ip = 0.0;
993
994   while (*p == ' ' || *p == '\t')
995     ++p;
996
997   /* Sign, if any, comes first.  */
998   sign = 1;
999   if (*p == '-')
1000     {
1001       sign = -1;
1002       ++p;
1003     }
1004
1005   /* The string is supposed to start with 0x or 0X .  */
1006   if (*p == '0')
1007     {
1008       ++p;
1009       if (*p == 'x' || *p == 'X')
1010	 ++p;
1011       else
1012	 abort ();
1013     }
1014   else
1015     abort ();
1016
1017   while (*p == '0')
1018     ++p;
1019
1020   high = 0;
1021   low = 0;
1022   lost = 0; /* Nonzero low order bits shifted out and discarded.  */
1023   frexpon = 0;  /* Bits after the decimal point.  */
1024   expon = 0;  /* Value of exponent.  */
1025   decpt = 0;  /* How many decimal points.  */
1026   gotp = 0;  /* How many P's.  */
1027   shcount = 0;
1028   while ((c = *p) != '\0')
1029     {
1030       if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F')
1031	   || (c >= 'a' && c <= 'f'))
1032	 {
1033	   k = c & 0x7f;
1034	   if (k >= 'a')
1035	     k = k - 'a' + 10;
1036	   else if (k >= 'A')
1037	     k = k - 'A' + 10;
1038	   else
1039	     k = k - '0';
1040
1041	   if ((high & 0xf0000000) == 0)
1042	     {
1043	       high = (high << 4) + ((low >> 28) & 15);
1044	       low = (low << 4) + k;
1045	       shcount += 4;
1046	       if (decpt)
1047		 frexpon += 4;
1048	     }
1049	   else
1050	     {
1051	       /* Record nonzero lost bits.  */
1052	       lost |= k;
1053	       if (!decpt)
1054		 frexpon -= 4;
1055	     }
1056	   ++p;
1057	 }
1058       else if ( c == '.')
1059	 {
1060	   ++decpt;
1061	   ++p;
1062	 }
1063       else if (c == 'p' || c == 'P')
1064	 {
1065	   ++gotp;
1066	   ++p;
1067	   /* Sign of exponent.  */
1068	   if (*p == '-')
1069	     {
1070	       expsign = -1;
1071	       ++p;
1072	     }
1073	   /* Value of exponent.
1074	      The exponent field is a decimal integer.  */
1075	   while (isdigit(*p))
1076	     {
1077	       k = (*p++ & 0x7f) - '0';
1078	       expon = 10 * expon + k;
1079	     }
1080	   expon *= expsign;
1081	   /* F suffix is ambiguous in the significand part
1082	      so it must appear after the decimal exponent field.  */
1083	   if (*p == 'f' || *p == 'F')
1084	     {
1085	       isfloat = 1;
1086	       ++p;
1087	       break;
1088	     }
1089	 }
1090       else if (c == 'l' || c == 'L')
1091	 {
1092	   isldouble = 1;
1093	   ++p;
1094	   break;
1095	 }
1096       else
1097	 break;
1098     }
1099   /* Abort if last character read was not legitimate.  */
1100   c = *p;
1101   if ((c != '\0' && c != ' ' && c != '\n' && c != '\r') || (decpt > 1))
1102     abort ();
1103   /* There must be either one decimal point or one p.  */
1104   if (decpt == 0 && gotp == 0)
1105     abort ();
1106   shcount -= 4;
1107   if ((high == 0) && (low == 0))
1108     {
1109       return dconst0;
1110     }
1111
1112   /* Normalize.  */
1113   nrmcount = 0;
1114   if (high == 0)
1115     {
1116       high = low;
1117       low = 0;
1118       nrmcount += 32;
1119     }
1120   /* Leave a high guard bit for carry-out.  */
1121   if ((high & 0x80000000) != 0)
1122     {
1123       lost |= low & 1;
1124       low = (low >> 1) | (high << 31);
1125       high = high >> 1;
1126       nrmcount -= 1;
1127     }
1128   if ((high & 0xffff8000) == 0)
1129     {
1130       high = (high << 16) + ((low >> 16) & 0xffff);
1131       low = low << 16;
1132       nrmcount += 16;
1133     }
1134   while ((high & 0xc0000000) == 0)
1135     {
1136       high = (high << 1) + ((low >> 31) & 1);
1137       low = low << 1;
1138       nrmcount += 1;
1139     }
1140   if (isfloat || GET_MODE_SIZE(mode) == UNITS_PER_WORD)
1141     {
1142       /* Keep 24 bits precision, bits 0x7fffff80.
1143	  Rounding bit is 0x40.  */
1144       lost = lost | low | (high & 0x3f);
1145       low = 0;
1146       if (high & 0x40)
1147	 {
1148	   if ((high & 0x80) || lost)
1149	     high += 0x40;
1150	 }
1151       high &= 0xffffff80;
1152     }
1153   else
1154     {
1155       /* We need real.c to do long double formats, so here default
1156	  to double precision.  */
1157#if HOST_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
1158       /* IEEE double.
1159	  Keep 53 bits precision, bits 0x7fffffff fffffc00.
1160	  Rounding bit is low word 0x200.  */
1161       lost = lost | (low & 0x1ff);
1162       if (low & 0x200)
1163	 {
1164	   if ((low & 0x400) || lost)
1165	     {
1166	       low = (low + 0x200) & 0xfffffc00;
1167	       if (low == 0)
1168		 high += 1;
1169	     }
1170	 }
1171       low &= 0xfffffc00;
1172#else
1173       /* Assume it's a VAX with 56-bit significand,
1174          bits 0x7fffffff ffffff80.  */
1175       lost = lost | (low & 0x7f);
1176       if (low & 0x40)
1177	 {
1178	   if ((low & 0x80) || lost)
1179	     {
1180	       low = (low + 0x40) & 0xffffff80;
1181	       if (low == 0)
1182		 high += 1;
1183	     }
1184	 }
1185       low &= 0xffffff80;
1186#endif
1187     }
1188   ip = (double) high;
1189   ip =  REAL_VALUE_LDEXP (ip, 32) + (double) low;
1190   /* Apply shifts and exponent value as power of 2.  */
1191   ip = REAL_VALUE_LDEXP (ip, expon - (nrmcount + frexpon));
1192
1193   if (sign < 0)
1194     ip = -ip;
1195   return ip;
1196}
1197
1198#endif /* no REAL_ARITHMETIC */
1199
1200/* Split a tree IN into a constant and a variable part
1201   that could be combined with CODE to make IN.
1202   CODE must be a commutative arithmetic operation.
1203   Store the constant part into *CONP and the variable in &VARP.
1204   Return 1 if this was done; zero means the tree IN did not decompose
1205   this way.
1206
1207   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
1208   Therefore, we must tell the caller whether the variable part
1209   was subtracted.  We do this by storing 1 or -1 into *VARSIGNP.
1210   The value stored is the coefficient for the variable term.
1211   The constant term we return should always be added;
1212   we negate it if necessary.  */
1213
1214static int
1215split_tree (in, code, varp, conp, varsignp)
1216     tree in;
1217     enum tree_code code;
1218     tree *varp, *conp;
1219     int *varsignp;
1220{
1221  register tree outtype = TREE_TYPE (in);
1222  *varp = 0;
1223  *conp = 0;
1224
1225  /* Strip any conversions that don't change the machine mode.  */
1226  while ((TREE_CODE (in) == NOP_EXPR
1227	  || TREE_CODE (in) == CONVERT_EXPR)
1228	 && (TYPE_MODE (TREE_TYPE (in))
1229	     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
1230    in = TREE_OPERAND (in, 0);
1231
1232  if (TREE_CODE (in) == code
1233      || (! FLOAT_TYPE_P (TREE_TYPE (in))
1234	  /* We can associate addition and subtraction together
1235	     (even though the C standard doesn't say so)
1236	     for integers because the value is not affected.
1237	     For reals, the value might be affected, so we can't.  */
1238	  && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1239	      || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1240    {
1241      enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
1242      if (code == INTEGER_CST)
1243	{
1244	  *conp = TREE_OPERAND (in, 0);
1245	  *varp = TREE_OPERAND (in, 1);
1246	  if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1247	      && TREE_TYPE (*varp) != outtype)
1248	    *varp = convert (outtype, *varp);
1249	  *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1250	  return 1;
1251	}
1252      if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
1253	{
1254	  *conp = TREE_OPERAND (in, 1);
1255	  *varp = TREE_OPERAND (in, 0);
1256	  *varsignp = 1;
1257	  if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1258	      && TREE_TYPE (*varp) != outtype)
1259	    *varp = convert (outtype, *varp);
1260	  if (TREE_CODE (in) == MINUS_EXPR)
1261	    {
1262	      /* If operation is subtraction and constant is second,
1263		 must negate it to get an additive constant.
1264		 And this cannot be done unless it is a manifest constant.
1265		 It could also be the address of a static variable.
1266		 We cannot negate that, so give up.  */
1267	      if (TREE_CODE (*conp) == INTEGER_CST)
1268		/* Subtracting from integer_zero_node loses for long long.  */
1269		*conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
1270	      else
1271		return 0;
1272	    }
1273	  return 1;
1274	}
1275      if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
1276	{
1277	  *conp = TREE_OPERAND (in, 0);
1278	  *varp = TREE_OPERAND (in, 1);
1279	  if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
1280	      && TREE_TYPE (*varp) != outtype)
1281	    *varp = convert (outtype, *varp);
1282	  *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
1283	  return 1;
1284	}
1285    }
1286  return 0;
1287}
1288
1289/* Combine two integer constants ARG1 and ARG2 under operation CODE
1290   to produce a new constant.
1291
1292   If NOTRUNC is nonzero, do not truncate the result to fit the data type.
1293   If FORSIZE is nonzero, compute overflow for unsigned types.  */
1294
1295static tree
1296int_const_binop (code, arg1, arg2, notrunc, forsize)
1297     enum tree_code code;
1298     register tree arg1, arg2;
1299     int notrunc, forsize;
1300{
1301  HOST_WIDE_INT int1l, int1h, int2l, int2h;
1302  HOST_WIDE_INT low, hi;
1303  HOST_WIDE_INT garbagel, garbageh;
1304  register tree t;
1305  int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
1306  int overflow = 0;
1307  int no_overflow = 0;
1308
1309  int1l = TREE_INT_CST_LOW (arg1);
1310  int1h = TREE_INT_CST_HIGH (arg1);
1311  int2l = TREE_INT_CST_LOW (arg2);
1312  int2h = TREE_INT_CST_HIGH (arg2);
1313
1314  switch (code)
1315    {
1316    case BIT_IOR_EXPR:
1317      low = int1l | int2l, hi = int1h | int2h;
1318      break;
1319
1320    case BIT_XOR_EXPR:
1321      low = int1l ^ int2l, hi = int1h ^ int2h;
1322      break;
1323
1324    case BIT_AND_EXPR:
1325      low = int1l & int2l, hi = int1h & int2h;
1326      break;
1327
1328    case BIT_ANDTC_EXPR:
1329      low = int1l & ~int2l, hi = int1h & ~int2h;
1330      break;
1331
1332    case RSHIFT_EXPR:
1333      int2l = - int2l;
1334    case LSHIFT_EXPR:
1335      /* It's unclear from the C standard whether shifts can overflow.
1336	 The following code ignores overflow; perhaps a C standard
1337	 interpretation ruling is needed.  */
1338      lshift_double (int1l, int1h, int2l,
1339		     TYPE_PRECISION (TREE_TYPE (arg1)),
1340		     &low, &hi,
1341		     !uns);
1342      no_overflow = 1;
1343      break;
1344
1345    case RROTATE_EXPR:
1346      int2l = - int2l;
1347    case LROTATE_EXPR:
1348      lrotate_double (int1l, int1h, int2l,
1349		      TYPE_PRECISION (TREE_TYPE (arg1)),
1350		      &low, &hi);
1351      break;
1352
1353    case PLUS_EXPR:
1354      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1355      break;
1356
1357    case MINUS_EXPR:
1358      neg_double (int2l, int2h, &low, &hi);
1359      add_double (int1l, int1h, low, hi, &low, &hi);
1360      overflow = overflow_sum_sign (hi, int2h, int1h);
1361      break;
1362
1363    case MULT_EXPR:
1364      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1365      break;
1366
1367    case TRUNC_DIV_EXPR:
1368    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1369    case EXACT_DIV_EXPR:
1370      /* This is a shortcut for a common special case.  */
1371      if (int2h == 0 && int2l > 0
1372	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1373	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1374	  && int1h == 0 && int1l >= 0)
1375	{
1376	  if (code == CEIL_DIV_EXPR)
1377	    int1l += int2l - 1;
1378	  low = int1l / int2l, hi = 0;
1379	  break;
1380	}
1381
1382      /* ... fall through ... */
1383
1384    case ROUND_DIV_EXPR:
1385      if (int2h == 0 && int2l == 1)
1386	{
1387	  low = int1l, hi = int1h;
1388	  break;
1389	}
1390      if (int1l == int2l && int1h == int2h
1391	  && ! (int1l == 0 && int1h == 0))
1392	{
1393	  low = 1, hi = 0;
1394	  break;
1395	}
1396      overflow = div_and_round_double (code, uns,
1397				       int1l, int1h, int2l, int2h,
1398				       &low, &hi, &garbagel, &garbageh);
1399      break;
1400
1401    case TRUNC_MOD_EXPR:
1402    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1403      /* This is a shortcut for a common special case.  */
1404      if (int2h == 0 && int2l > 0
1405	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1406	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1407	  && int1h == 0 && int1l >= 0)
1408	{
1409	  if (code == CEIL_MOD_EXPR)
1410	    int1l += int2l - 1;
1411	  low = int1l % int2l, hi = 0;
1412	  break;
1413	}
1414
1415      /* ... fall through ... */
1416
1417    case ROUND_MOD_EXPR:
1418      overflow = div_and_round_double (code, uns,
1419				       int1l, int1h, int2l, int2h,
1420				       &garbagel, &garbageh, &low, &hi);
1421      break;
1422
1423    case MIN_EXPR:
1424    case MAX_EXPR:
1425      if (uns)
1426	{
1427	  low = (((unsigned HOST_WIDE_INT) int1h
1428		  < (unsigned HOST_WIDE_INT) int2h)
1429		 || (((unsigned HOST_WIDE_INT) int1h
1430		      == (unsigned HOST_WIDE_INT) int2h)
1431		     && ((unsigned HOST_WIDE_INT) int1l
1432			 < (unsigned HOST_WIDE_INT) int2l)));
1433	}
1434      else
1435	{
1436	  low = ((int1h < int2h)
1437		 || ((int1h == int2h)
1438		     && ((unsigned HOST_WIDE_INT) int1l
1439			 < (unsigned HOST_WIDE_INT) int2l)));
1440	}
1441      if (low == (code == MIN_EXPR))
1442	low = int1l, hi = int1h;
1443      else
1444	low = int2l, hi = int2h;
1445      break;
1446
1447    default:
1448      abort ();
1449    }
1450
1451  if (TREE_TYPE (arg1) == sizetype && hi == 0
1452      && low >= 0
1453      && (TYPE_MAX_VALUE (sizetype) == NULL
1454	  || low <= TREE_INT_CST_LOW (TYPE_MAX_VALUE (sizetype)))
1455      && ! overflow
1456      && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1457    t = size_int (low);
1458  else
1459    {
1460      t = build_int_2 (low, hi);
1461      TREE_TYPE (t) = TREE_TYPE (arg1);
1462    }
1463
1464  TREE_OVERFLOW (t)
1465    = ((notrunc ? (!uns || forsize) && overflow
1466	: force_fit_type (t, (!uns || forsize) && overflow) && ! no_overflow)
1467       | TREE_OVERFLOW (arg1)
1468       | TREE_OVERFLOW (arg2));
1469  /* If we're doing a size calculation, unsigned arithmetic does overflow.
1470     So check if force_fit_type truncated the value.  */
1471  if (forsize
1472      && ! TREE_OVERFLOW (t)
1473      && (TREE_INT_CST_HIGH (t) != hi
1474	  || TREE_INT_CST_LOW (t) != low))
1475    TREE_OVERFLOW (t) = 1;
1476  TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1477				| TREE_CONSTANT_OVERFLOW (arg1)
1478				| TREE_CONSTANT_OVERFLOW (arg2));
1479  return t;
1480}
1481
1482struct cb_args
1483{
1484  /* Input */
1485  tree arg1;
1486  REAL_VALUE_TYPE d1, d2;
1487  enum tree_code code;
1488  /* Output */
1489  tree t;
1490};
1491
1492static void
1493const_binop_1 (data)
1494  PTR data;
1495{
1496  struct cb_args * args = (struct cb_args *) data;
1497  REAL_VALUE_TYPE value;
1498
1499#ifdef REAL_ARITHMETIC
1500  REAL_ARITHMETIC (value, args->code, args->d1, args->d2);
1501#else
1502  switch (args->code)
1503    {
1504    case PLUS_EXPR:
1505      value = args->d1 + args->d2;
1506      break;
1507
1508    case MINUS_EXPR:
1509      value = args->d1 - args->d2;
1510      break;
1511
1512    case MULT_EXPR:
1513      value = args->d1 * args->d2;
1514      break;
1515
1516    case RDIV_EXPR:
1517#ifndef REAL_INFINITY
1518      if (args->d2 == 0)
1519	abort ();
1520#endif
1521
1522      value = args->d1 / args->d2;
1523      break;
1524
1525    case MIN_EXPR:
1526      value = MIN (args->d1, args->d2);
1527      break;
1528
1529    case MAX_EXPR:
1530      value = MAX (args->d1, args->d2);
1531      break;
1532
1533    default:
1534      abort ();
1535    }
1536#endif /* no REAL_ARITHMETIC */
1537  args->t =
1538    build_real (TREE_TYPE (args->arg1),
1539		real_value_truncate (TYPE_MODE (TREE_TYPE (args->arg1)),
1540				     value));
1541}
1542
1543/* Combine two constants ARG1 and ARG2 under operation CODE
1544   to produce a new constant.
1545   We assume ARG1 and ARG2 have the same data type,
1546   or at least are the same kind of constant and the same machine mode.
1547
1548   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1549
1550static tree
1551const_binop (code, arg1, arg2, notrunc)
1552     enum tree_code code;
1553     register tree arg1, arg2;
1554     int notrunc;
1555{
1556  STRIP_NOPS (arg1); STRIP_NOPS (arg2);
1557
1558  if (TREE_CODE (arg1) == INTEGER_CST)
1559    return int_const_binop (code, arg1, arg2, notrunc, 0);
1560
1561#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1562  if (TREE_CODE (arg1) == REAL_CST)
1563    {
1564      REAL_VALUE_TYPE d1;
1565      REAL_VALUE_TYPE d2;
1566      int overflow = 0;
1567      tree t;
1568      struct cb_args args;
1569
1570      d1 = TREE_REAL_CST (arg1);
1571      d2 = TREE_REAL_CST (arg2);
1572
1573      /* If either operand is a NaN, just return it.  Otherwise, set up
1574	 for floating-point trap; we return an overflow.  */
1575      if (REAL_VALUE_ISNAN (d1))
1576	return arg1;
1577      else if (REAL_VALUE_ISNAN (d2))
1578	return arg2;
1579
1580      /* Setup input for const_binop_1() */
1581      args.arg1 = arg1;
1582      args.d1 = d1;
1583      args.d2 = d2;
1584      args.code = code;
1585
1586      if (do_float_handler (const_binop_1, (PTR) &args))
1587	{
1588	  /* Receive output from const_binop_1() */
1589	  t = args.t;
1590	}
1591      else
1592	{
1593	  /* We got an exception from const_binop_1() */
1594	  t = copy_node (arg1);
1595	  overflow = 1;
1596	}
1597
1598      TREE_OVERFLOW (t)
1599	= (force_fit_type (t, overflow)
1600	   | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1601      TREE_CONSTANT_OVERFLOW (t)
1602	= TREE_OVERFLOW (t)
1603	  | TREE_CONSTANT_OVERFLOW (arg1)
1604	  | TREE_CONSTANT_OVERFLOW (arg2);
1605      return t;
1606    }
1607#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1608  if (TREE_CODE (arg1) == COMPLEX_CST)
1609    {
1610      register tree type = TREE_TYPE (arg1);
1611      register tree r1 = TREE_REALPART (arg1);
1612      register tree i1 = TREE_IMAGPART (arg1);
1613      register tree r2 = TREE_REALPART (arg2);
1614      register tree i2 = TREE_IMAGPART (arg2);
1615      register tree t;
1616
1617      switch (code)
1618	{
1619	case PLUS_EXPR:
1620	  t = build_complex (type,
1621			     const_binop (PLUS_EXPR, r1, r2, notrunc),
1622			     const_binop (PLUS_EXPR, i1, i2, notrunc));
1623	  break;
1624
1625	case MINUS_EXPR:
1626	  t = build_complex (type,
1627			     const_binop (MINUS_EXPR, r1, r2, notrunc),
1628			     const_binop (MINUS_EXPR, i1, i2, notrunc));
1629	  break;
1630
1631	case MULT_EXPR:
1632	  t = build_complex (type,
1633			     const_binop (MINUS_EXPR,
1634					  const_binop (MULT_EXPR,
1635						       r1, r2, notrunc),
1636					  const_binop (MULT_EXPR,
1637						       i1, i2, notrunc),
1638					  notrunc),
1639			     const_binop (PLUS_EXPR,
1640					  const_binop (MULT_EXPR,
1641						       r1, i2, notrunc),
1642					  const_binop (MULT_EXPR,
1643						       i1, r2, notrunc),
1644					  notrunc));
1645	  break;
1646
1647	case RDIV_EXPR:
1648	  {
1649	    register tree magsquared
1650	      = const_binop (PLUS_EXPR,
1651			     const_binop (MULT_EXPR, r2, r2, notrunc),
1652			     const_binop (MULT_EXPR, i2, i2, notrunc),
1653			     notrunc);
1654
1655	    t = build_complex (type,
1656			       const_binop
1657			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1658				? TRUNC_DIV_EXPR : RDIV_EXPR,
1659				const_binop (PLUS_EXPR,
1660					     const_binop (MULT_EXPR, r1, r2,
1661							  notrunc),
1662					     const_binop (MULT_EXPR, i1, i2,
1663							  notrunc),
1664					     notrunc),
1665				magsquared, notrunc),
1666			       const_binop
1667			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1668				? TRUNC_DIV_EXPR : RDIV_EXPR,
1669				const_binop (MINUS_EXPR,
1670					     const_binop (MULT_EXPR, i1, r2,
1671							  notrunc),
1672					     const_binop (MULT_EXPR, r1, i2,
1673							  notrunc),
1674					     notrunc),
1675				magsquared, notrunc));
1676	  }
1677	  break;
1678
1679	default:
1680	  abort ();
1681	}
1682      return t;
1683    }
1684  return 0;
1685}
1686
1687/* Return an INTEGER_CST with value V .  The type is determined by bit_p:
1688   if it is zero, the type is taken from sizetype; if it is one, the type
1689   is taken from bitsizetype.  */
1690
1691tree
1692size_int_wide (number, high, bit_p)
1693     unsigned HOST_WIDE_INT number, high;
1694     int bit_p;
1695{
1696  register tree t;
1697  /* Type-size nodes already made for small sizes.  */
1698  static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1][2];
1699
1700  if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high
1701      && size_table[number][bit_p] != 0)
1702    return size_table[number][bit_p];
1703  if (number < 2*HOST_BITS_PER_WIDE_INT + 1 && ! high)
1704    {
1705      push_obstacks_nochange ();
1706      /* Make this a permanent node.  */
1707      end_temporary_allocation ();
1708      t = build_int_2 (number, 0);
1709      TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1710      size_table[number][bit_p] = t;
1711      pop_obstacks ();
1712    }
1713  else
1714    {
1715      t = build_int_2 (number, high);
1716      TREE_TYPE (t) = bit_p ? bitsizetype : sizetype;
1717      TREE_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (t) = force_fit_type (t, 0);
1718    }
1719  return t;
1720}
1721
1722/* Combine operands OP1 and OP2 with arithmetic operation CODE.
1723   CODE is a tree code.  Data type is taken from `sizetype',
1724   If the operands are constant, so is the result.  */
1725
1726tree
1727size_binop (code, arg0, arg1)
1728     enum tree_code code;
1729     tree arg0, arg1;
1730{
1731  /* Handle the special case of two integer constants faster.  */
1732  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1733    {
1734      /* And some specific cases even faster than that.  */
1735      if (code == PLUS_EXPR && integer_zerop (arg0))
1736	return arg1;
1737      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1738	       && integer_zerop (arg1))
1739	return arg0;
1740      else if (code == MULT_EXPR && integer_onep (arg0))
1741	return arg1;
1742
1743      /* Handle general case of two integer constants.  */
1744      return int_const_binop (code, arg0, arg1, 0, 1);
1745    }
1746
1747  if (arg0 == error_mark_node || arg1 == error_mark_node)
1748    return error_mark_node;
1749
1750  return fold (build (code, sizetype, arg0, arg1));
1751}
1752
1753/* Combine operands OP1 and OP2 with arithmetic operation CODE.
1754   CODE is a tree code.  Data type is taken from `ssizetype',
1755   If the operands are constant, so is the result.  */
1756
1757tree
1758ssize_binop (code, arg0, arg1)
1759     enum tree_code code;
1760     tree arg0, arg1;
1761{
1762  /* Handle the special case of two integer constants faster.  */
1763  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764    {
1765      /* And some specific cases even faster than that.  */
1766      if (code == PLUS_EXPR && integer_zerop (arg0))
1767	return arg1;
1768      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1769	       && integer_zerop (arg1))
1770	return arg0;
1771      else if (code == MULT_EXPR && integer_onep (arg0))
1772	return arg1;
1773
1774      /* Handle general case of two integer constants.  We convert
1775         arg0 to ssizetype because int_const_binop uses its type for the
1776	 return value.  */
1777      arg0 = convert (ssizetype, arg0);
1778      return int_const_binop (code, arg0, arg1, 0, 0);
1779    }
1780
1781  if (arg0 == error_mark_node || arg1 == error_mark_node)
1782    return error_mark_node;
1783
1784  return fold (build (code, ssizetype, arg0, arg1));
1785}
1786
1787struct fc_args
1788{
1789  /* Input */
1790  tree arg1, type;
1791  /* Output */
1792  tree t;
1793};
1794
1795static void
1796fold_convert_1 (data)
1797  PTR data;
1798{
1799  struct fc_args * args = (struct fc_args *) data;
1800
1801  args->t = build_real (args->type,
1802			real_value_truncate (TYPE_MODE (args->type),
1803					     TREE_REAL_CST (args->arg1)));
1804}
1805
1806/* Given T, a tree representing type conversion of ARG1, a constant,
1807   return a constant tree representing the result of conversion.  */
1808
1809static tree
1810fold_convert (t, arg1)
1811     register tree t;
1812     register tree arg1;
1813{
1814  register tree type = TREE_TYPE (t);
1815  int overflow = 0;
1816
1817  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1818    {
1819      if (TREE_CODE (arg1) == INTEGER_CST)
1820	{
1821	  /* If we would build a constant wider than GCC supports,
1822	     leave the conversion unfolded.  */
1823	  if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1824	    return t;
1825
1826	  /* Given an integer constant, make new constant with new type,
1827	     appropriately sign-extended or truncated.  */
1828	  t = build_int_2 (TREE_INT_CST_LOW (arg1),
1829			   TREE_INT_CST_HIGH (arg1));
1830	  TREE_TYPE (t) = type;
1831	  /* Indicate an overflow if (1) ARG1 already overflowed,
1832	     or (2) force_fit_type indicates an overflow.
1833	     Tell force_fit_type that an overflow has already occurred
1834	     if ARG1 is a too-large unsigned value and T is signed.
1835	     But don't indicate an overflow if converting a pointer.  */
1836	  TREE_OVERFLOW (t)
1837	    = ((force_fit_type (t,
1838				(TREE_INT_CST_HIGH (arg1) < 0
1839				 && (TREE_UNSIGNED (type)
1840				    < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1841		&& ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1842	       || TREE_OVERFLOW (arg1));
1843	  TREE_CONSTANT_OVERFLOW (t)
1844	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1845	}
1846#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1847      else if (TREE_CODE (arg1) == REAL_CST)
1848	{
1849	  /* Don't initialize these, use assignments.
1850	     Initialized local aggregates don't work on old compilers.  */
1851	  REAL_VALUE_TYPE x;
1852	  REAL_VALUE_TYPE l;
1853	  REAL_VALUE_TYPE u;
1854	  tree type1 = TREE_TYPE (arg1);
1855	  int no_upper_bound;
1856
1857	  x = TREE_REAL_CST (arg1);
1858	  l = real_value_from_int_cst (type1, TYPE_MIN_VALUE (type));
1859
1860	  no_upper_bound = (TYPE_MAX_VALUE (type) == NULL);
1861	  if (!no_upper_bound)
1862	    u = real_value_from_int_cst (type1, TYPE_MAX_VALUE (type));
1863
1864	  /* See if X will be in range after truncation towards 0.
1865	     To compensate for truncation, move the bounds away from 0,
1866	     but reject if X exactly equals the adjusted bounds.  */
1867#ifdef REAL_ARITHMETIC
1868	  REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1869	  if (!no_upper_bound)
1870	    REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1871#else
1872	  l--;
1873	  if (!no_upper_bound)
1874	    u++;
1875#endif
1876	  /* If X is a NaN, use zero instead and show we have an overflow.
1877	     Otherwise, range check.  */
1878	  if (REAL_VALUE_ISNAN (x))
1879	    overflow = 1, x = dconst0;
1880	  else if (! (REAL_VALUES_LESS (l, x)
1881		      && !no_upper_bound
1882		      && REAL_VALUES_LESS (x, u)))
1883	    overflow = 1;
1884
1885#ifndef REAL_ARITHMETIC
1886	  {
1887	    HOST_WIDE_INT low, high;
1888	    HOST_WIDE_INT half_word
1889	      = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1890
1891	    if (x < 0)
1892	      x = -x;
1893
1894	    high = (HOST_WIDE_INT) (x / half_word / half_word);
1895	    x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1896	    if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1897	      {
1898		low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1899		low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1900	      }
1901	    else
1902	      low = (HOST_WIDE_INT) x;
1903	    if (TREE_REAL_CST (arg1) < 0)
1904	      neg_double (low, high, &low, &high);
1905	    t = build_int_2 (low, high);
1906	  }
1907#else
1908	  {
1909	    HOST_WIDE_INT low, high;
1910	    REAL_VALUE_TO_INT (&low, &high, x);
1911	    t = build_int_2 (low, high);
1912	  }
1913#endif
1914	  TREE_TYPE (t) = type;
1915	  TREE_OVERFLOW (t)
1916	    = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1917	  TREE_CONSTANT_OVERFLOW (t)
1918	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1919	}
1920#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1921      TREE_TYPE (t) = type;
1922    }
1923  else if (TREE_CODE (type) == REAL_TYPE)
1924    {
1925#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1926      if (TREE_CODE (arg1) == INTEGER_CST)
1927	return build_real_from_int_cst (type, arg1);
1928#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1929      if (TREE_CODE (arg1) == REAL_CST)
1930	{
1931	  struct fc_args args;
1932
1933	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1934	    {
1935	      t = arg1;
1936	      TREE_TYPE (arg1) = type;
1937	      return t;
1938	    }
1939
1940	  /* Setup input for fold_convert_1() */
1941	  args.arg1 = arg1;
1942	  args.type = type;
1943
1944	  if (do_float_handler (fold_convert_1, (PTR) &args))
1945	    {
1946	      /* Receive output from fold_convert_1() */
1947	      t = args.t;
1948	    }
1949	  else
1950	    {
1951	      /* We got an exception from fold_convert_1() */
1952	      overflow = 1;
1953	      t = copy_node (arg1);
1954	    }
1955
1956	  TREE_OVERFLOW (t)
1957	    = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1958	  TREE_CONSTANT_OVERFLOW (t)
1959	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1960	  return t;
1961	}
1962    }
1963  TREE_CONSTANT (t) = 1;
1964  return t;
1965}
1966
1967/* Return an expr equal to X but certainly not valid as an lvalue.  */
1968
1969tree
1970non_lvalue (x)
1971     tree x;
1972{
1973  tree result;
1974
1975  /* These things are certainly not lvalues.  */
1976  if (TREE_CODE (x) == NON_LVALUE_EXPR
1977      || TREE_CODE (x) == INTEGER_CST
1978      || TREE_CODE (x) == REAL_CST
1979      || TREE_CODE (x) == STRING_CST
1980      || TREE_CODE (x) == ADDR_EXPR)
1981    return x;
1982
1983  result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1984  TREE_CONSTANT (result) = TREE_CONSTANT (x);
1985  return result;
1986}
1987
1988/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1989   Zero means allow extended lvalues.  */
1990
1991int pedantic_lvalues;
1992
1993/* When pedantic, return an expr equal to X but certainly not valid as a
1994   pedantic lvalue.  Otherwise, return X.  */
1995
1996tree
1997pedantic_non_lvalue (x)
1998     tree x;
1999{
2000  if (pedantic_lvalues)
2001    return non_lvalue (x);
2002  else
2003    return x;
2004}
2005
2006/* Given a tree comparison code, return the code that is the logical inverse
2007   of the given code.  It is not safe to do this for floating-point
2008   comparisons, except for NE_EXPR and EQ_EXPR.  */
2009
2010static enum tree_code
2011invert_tree_comparison (code)
2012     enum tree_code code;
2013{
2014  switch (code)
2015    {
2016    case EQ_EXPR:
2017      return NE_EXPR;
2018    case NE_EXPR:
2019      return EQ_EXPR;
2020    case GT_EXPR:
2021      return LE_EXPR;
2022    case GE_EXPR:
2023      return LT_EXPR;
2024    case LT_EXPR:
2025      return GE_EXPR;
2026    case LE_EXPR:
2027      return GT_EXPR;
2028    default:
2029      abort ();
2030    }
2031}
2032
2033/* Similar, but return the comparison that results if the operands are
2034   swapped.  This is safe for floating-point.  */
2035
2036static enum tree_code
2037swap_tree_comparison (code)
2038     enum tree_code code;
2039{
2040  switch (code)
2041    {
2042    case EQ_EXPR:
2043    case NE_EXPR:
2044      return code;
2045    case GT_EXPR:
2046      return LT_EXPR;
2047    case GE_EXPR:
2048      return LE_EXPR;
2049    case LT_EXPR:
2050      return GT_EXPR;
2051    case LE_EXPR:
2052      return GE_EXPR;
2053    default:
2054      abort ();
2055    }
2056}
2057
2058/* Return nonzero if CODE is a tree code that represents a truth value.  */
2059
2060static int
2061truth_value_p (code)
2062     enum tree_code code;
2063{
2064  return (TREE_CODE_CLASS (code) == '<'
2065	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2066	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2067	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2068}
2069
2070/* Return nonzero if two operands are necessarily equal.
2071   If ONLY_CONST is non-zero, only return non-zero for constants.
2072   This function tests whether the operands are indistinguishable;
2073   it does not test whether they are equal using C's == operation.
2074   The distinction is important for IEEE floating point, because
2075   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2076   (2) two NaNs may be indistinguishable, but NaN!=NaN.  */
2077
2078int
2079operand_equal_p (arg0, arg1, only_const)
2080     tree arg0, arg1;
2081     int only_const;
2082{
2083  /* If both types don't have the same signedness, then we can't consider
2084     them equal.  We must check this before the STRIP_NOPS calls
2085     because they may change the signedness of the arguments.  */
2086  if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2087    return 0;
2088
2089  STRIP_NOPS (arg0);
2090  STRIP_NOPS (arg1);
2091
2092  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2093      /* This is needed for conversions and for COMPONENT_REF.
2094	 Might as well play it safe and always test this.  */
2095      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2096    return 0;
2097
2098  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2099     We don't care about side effects in that case because the SAVE_EXPR
2100     takes care of that for us. In all other cases, two expressions are
2101     equal if they have no side effects.  If we have two identical
2102     expressions with side effects that should be treated the same due
2103     to the only side effects being identical SAVE_EXPR's, that will
2104     be detected in the recursive calls below.  */
2105  if (arg0 == arg1 && ! only_const
2106      && (TREE_CODE (arg0) == SAVE_EXPR
2107	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2108    return 1;
2109
2110  /* Next handle constant cases, those for which we can return 1 even
2111     if ONLY_CONST is set.  */
2112  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2113    switch (TREE_CODE (arg0))
2114      {
2115      case INTEGER_CST:
2116	return (! TREE_CONSTANT_OVERFLOW (arg0)
2117		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2118		&& TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
2119		&& TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1));
2120
2121      case REAL_CST:
2122	return (! TREE_CONSTANT_OVERFLOW (arg0)
2123		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2124		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2125					  TREE_REAL_CST (arg1)));
2126
2127      case COMPLEX_CST:
2128	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2129				 only_const)
2130		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2131				    only_const));
2132
2133      case STRING_CST:
2134	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2135		&& ! memcmp (TREE_STRING_POINTER (arg0),
2136			      TREE_STRING_POINTER (arg1),
2137			      TREE_STRING_LENGTH (arg0)));
2138
2139      case ADDR_EXPR:
2140	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2141				0);
2142      default:
2143	break;
2144      }
2145
2146  if (only_const)
2147    return 0;
2148
2149  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2150    {
2151    case '1':
2152      /* Two conversions are equal only if signedness and modes match.  */
2153      if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2154	  && (TREE_UNSIGNED (TREE_TYPE (arg0))
2155	      != TREE_UNSIGNED (TREE_TYPE (arg1))))
2156	return 0;
2157
2158      return operand_equal_p (TREE_OPERAND (arg0, 0),
2159			      TREE_OPERAND (arg1, 0), 0);
2160
2161    case '<':
2162    case '2':
2163      if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2164	  && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2165			      0))
2166	return 1;
2167
2168      /* For commutative ops, allow the other order.  */
2169      return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2170	       || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2171	       || TREE_CODE (arg0) == BIT_IOR_EXPR
2172	       || TREE_CODE (arg0) == BIT_XOR_EXPR
2173	       || TREE_CODE (arg0) == BIT_AND_EXPR
2174	       || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2175	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2176				  TREE_OPERAND (arg1, 1), 0)
2177	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2178				  TREE_OPERAND (arg1, 0), 0));
2179
2180    case 'r':
2181      /* If either of the pointer (or reference) expressions we are dereferencing
2182	 contain a side effect, these cannot be equal. */
2183      if (TREE_SIDE_EFFECTS (arg0)
2184	  || TREE_SIDE_EFFECTS (arg1))
2185	return 0;
2186
2187      switch (TREE_CODE (arg0))
2188	{
2189	case INDIRECT_REF:
2190	  return operand_equal_p (TREE_OPERAND (arg0, 0),
2191				  TREE_OPERAND (arg1, 0), 0);
2192
2193	case COMPONENT_REF:
2194	case ARRAY_REF:
2195	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2196				   TREE_OPERAND (arg1, 0), 0)
2197		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2198				      TREE_OPERAND (arg1, 1), 0));
2199
2200	case BIT_FIELD_REF:
2201	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2202				   TREE_OPERAND (arg1, 0), 0)
2203		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2204				      TREE_OPERAND (arg1, 1), 0)
2205		  && operand_equal_p (TREE_OPERAND (arg0, 2),
2206				      TREE_OPERAND (arg1, 2), 0));
2207	default:
2208	  return 0;
2209	}
2210
2211    case 'e':
2212      if (TREE_CODE (arg0) == RTL_EXPR)
2213	return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2214      return 0;
2215
2216    default:
2217      return 0;
2218    }
2219}
2220
2221/* Similar to operand_equal_p, but see if ARG0 might have been made by
2222   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2223
2224   When in doubt, return 0.  */
2225
2226static int
2227operand_equal_for_comparison_p (arg0, arg1, other)
2228     tree arg0, arg1;
2229     tree other;
2230{
2231  int unsignedp1, unsignedpo;
2232  tree primarg0, primarg1, primother;
2233  unsigned correct_width;
2234
2235  if (operand_equal_p (arg0, arg1, 0))
2236    return 1;
2237
2238  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2239      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2240    return 0;
2241
2242  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2243     and see if the inner values are the same.  This removes any
2244     signedness comparison, which doesn't matter here.  */
2245  primarg0 = arg0, primarg1 = arg1;
2246  STRIP_NOPS (primarg0);  STRIP_NOPS (primarg1);
2247  if (operand_equal_p (primarg0, primarg1, 0))
2248    return 1;
2249
2250  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2251     actual comparison operand, ARG0.
2252
2253     First throw away any conversions to wider types
2254     already present in the operands.  */
2255
2256  primarg1 = get_narrower (arg1, &unsignedp1);
2257  primother = get_narrower (other, &unsignedpo);
2258
2259  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2260  if (unsignedp1 == unsignedpo
2261      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2262      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2263    {
2264      tree type = TREE_TYPE (arg0);
2265
2266      /* Make sure shorter operand is extended the right way
2267	 to match the longer operand.  */
2268      primarg1 = convert (signed_or_unsigned_type (unsignedp1,
2269						  TREE_TYPE (primarg1)),
2270			 primarg1);
2271
2272      if (operand_equal_p (arg0, convert (type, primarg1), 0))
2273	return 1;
2274    }
2275
2276  return 0;
2277}
2278
2279/* See if ARG is an expression that is either a comparison or is performing
2280   arithmetic on comparisons.  The comparisons must only be comparing
2281   two different values, which will be stored in *CVAL1 and *CVAL2; if
2282   they are non-zero it means that some operands have already been found.
2283   No variables may be used anywhere else in the expression except in the
2284   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2285   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2286
2287   If this is true, return 1.  Otherwise, return zero.  */
2288
2289static int
2290twoval_comparison_p (arg, cval1, cval2, save_p)
2291     tree arg;
2292     tree *cval1, *cval2;
2293     int *save_p;
2294{
2295  enum tree_code code = TREE_CODE (arg);
2296  char class = TREE_CODE_CLASS (code);
2297
2298  /* We can handle some of the 'e' cases here.  */
2299  if (class == 'e' && code == TRUTH_NOT_EXPR)
2300    class = '1';
2301  else if (class == 'e'
2302	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2303	       || code == COMPOUND_EXPR))
2304    class = '2';
2305
2306  /* ??? Disable this since the SAVE_EXPR might already be in use outside
2307     the expression.  There may be no way to make this work, but it needs
2308     to be looked at again for 2.6.  */
2309#if 0
2310  else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
2311    {
2312      /* If we've already found a CVAL1 or CVAL2, this expression is
2313	 two complex to handle.  */
2314      if (*cval1 || *cval2)
2315	return 0;
2316
2317      class = '1';
2318      *save_p = 1;
2319    }
2320#endif
2321
2322  switch (class)
2323    {
2324    case '1':
2325      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2326
2327    case '2':
2328      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2329	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2330				      cval1, cval2, save_p));
2331
2332    case 'c':
2333      return 1;
2334
2335    case 'e':
2336      if (code == COND_EXPR)
2337	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2338				     cval1, cval2, save_p)
2339		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2340					cval1, cval2, save_p)
2341		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2342					cval1, cval2, save_p));
2343      return 0;
2344
2345    case '<':
2346      /* First see if we can handle the first operand, then the second.  For
2347	 the second operand, we know *CVAL1 can't be zero.  It must be that
2348	 one side of the comparison is each of the values; test for the
2349	 case where this isn't true by failing if the two operands
2350	 are the same.  */
2351
2352      if (operand_equal_p (TREE_OPERAND (arg, 0),
2353			   TREE_OPERAND (arg, 1), 0))
2354	return 0;
2355
2356      if (*cval1 == 0)
2357	*cval1 = TREE_OPERAND (arg, 0);
2358      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2359	;
2360      else if (*cval2 == 0)
2361	*cval2 = TREE_OPERAND (arg, 0);
2362      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2363	;
2364      else
2365	return 0;
2366
2367      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2368	;
2369      else if (*cval2 == 0)
2370	*cval2 = TREE_OPERAND (arg, 1);
2371      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2372	;
2373      else
2374	return 0;
2375
2376      return 1;
2377
2378    default:
2379      return 0;
2380    }
2381}
2382
2383/* ARG is a tree that is known to contain just arithmetic operations and
2384   comparisons.  Evaluate the operations in the tree substituting NEW0 for
2385   any occurrence of OLD0 as an operand of a comparison and likewise for
2386   NEW1 and OLD1.  */
2387
2388static tree
2389eval_subst (arg, old0, new0, old1, new1)
2390     tree arg;
2391     tree old0, new0, old1, new1;
2392{
2393  tree type = TREE_TYPE (arg);
2394  enum tree_code code = TREE_CODE (arg);
2395  char class = TREE_CODE_CLASS (code);
2396
2397  /* We can handle some of the 'e' cases here.  */
2398  if (class == 'e' && code == TRUTH_NOT_EXPR)
2399    class = '1';
2400  else if (class == 'e'
2401	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2402    class = '2';
2403
2404  switch (class)
2405    {
2406    case '1':
2407      return fold (build1 (code, type,
2408			   eval_subst (TREE_OPERAND (arg, 0),
2409				       old0, new0, old1, new1)));
2410
2411    case '2':
2412      return fold (build (code, type,
2413			  eval_subst (TREE_OPERAND (arg, 0),
2414				      old0, new0, old1, new1),
2415			  eval_subst (TREE_OPERAND (arg, 1),
2416				      old0, new0, old1, new1)));
2417
2418    case 'e':
2419      switch (code)
2420	{
2421	case SAVE_EXPR:
2422	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2423
2424	case COMPOUND_EXPR:
2425	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2426
2427	case COND_EXPR:
2428	  return fold (build (code, type,
2429			      eval_subst (TREE_OPERAND (arg, 0),
2430					  old0, new0, old1, new1),
2431			      eval_subst (TREE_OPERAND (arg, 1),
2432					  old0, new0, old1, new1),
2433			      eval_subst (TREE_OPERAND (arg, 2),
2434					  old0, new0, old1, new1)));
2435	default:
2436	  break;
2437	}
2438      /* fall through - ??? */
2439
2440    case '<':
2441      {
2442	tree arg0 = TREE_OPERAND (arg, 0);
2443	tree arg1 = TREE_OPERAND (arg, 1);
2444
2445	/* We need to check both for exact equality and tree equality.  The
2446	   former will be true if the operand has a side-effect.  In that
2447	   case, we know the operand occurred exactly once.  */
2448
2449	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2450	  arg0 = new0;
2451	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2452	  arg0 = new1;
2453
2454	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2455	  arg1 = new0;
2456	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2457	  arg1 = new1;
2458
2459	return fold (build (code, type, arg0, arg1));
2460      }
2461
2462    default:
2463      return arg;
2464    }
2465}
2466
2467/* Return a tree for the case when the result of an expression is RESULT
2468   converted to TYPE and OMITTED was previously an operand of the expression
2469   but is now not needed (e.g., we folded OMITTED * 0).
2470
2471   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2472   the conversion of RESULT to TYPE.  */
2473
2474static tree
2475omit_one_operand (type, result, omitted)
2476     tree type, result, omitted;
2477{
2478  tree t = convert (type, result);
2479
2480  if (TREE_SIDE_EFFECTS (omitted))
2481    return build (COMPOUND_EXPR, type, omitted, t);
2482
2483  return non_lvalue (t);
2484}
2485
2486/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2487
2488static tree
2489pedantic_omit_one_operand (type, result, omitted)
2490     tree type, result, omitted;
2491{
2492  tree t = convert (type, result);
2493
2494  if (TREE_SIDE_EFFECTS (omitted))
2495    return build (COMPOUND_EXPR, type, omitted, t);
2496
2497  return pedantic_non_lvalue (t);
2498}
2499
2500
2501
2502/* Return a simplified tree node for the truth-negation of ARG.  This
2503   never alters ARG itself.  We assume that ARG is an operation that
2504   returns a truth value (0 or 1).  */
2505
2506tree
2507invert_truthvalue (arg)
2508     tree arg;
2509{
2510  tree type = TREE_TYPE (arg);
2511  enum tree_code code = TREE_CODE (arg);
2512
2513  if (code == ERROR_MARK)
2514    return arg;
2515
2516  /* If this is a comparison, we can simply invert it, except for
2517     floating-point non-equality comparisons, in which case we just
2518     enclose a TRUTH_NOT_EXPR around what we have.  */
2519
2520  if (TREE_CODE_CLASS (code) == '<')
2521    {
2522      if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2523	  && !flag_fast_math && code != NE_EXPR && code != EQ_EXPR)
2524	return build1 (TRUTH_NOT_EXPR, type, arg);
2525      else
2526	return build (invert_tree_comparison (code), type,
2527		      TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2528    }
2529
2530  switch (code)
2531    {
2532    case INTEGER_CST:
2533      return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2534					 && TREE_INT_CST_HIGH (arg) == 0, 0));
2535
2536    case TRUTH_AND_EXPR:
2537      return build (TRUTH_OR_EXPR, type,
2538		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2539		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2540
2541    case TRUTH_OR_EXPR:
2542      return build (TRUTH_AND_EXPR, type,
2543		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2544		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2545
2546    case TRUTH_XOR_EXPR:
2547      /* Here we can invert either operand.  We invert the first operand
2548	 unless the second operand is a TRUTH_NOT_EXPR in which case our
2549	 result is the XOR of the first operand with the inside of the
2550	 negation of the second operand.  */
2551
2552      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2553	return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2554		      TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2555      else
2556	return build (TRUTH_XOR_EXPR, type,
2557		      invert_truthvalue (TREE_OPERAND (arg, 0)),
2558		      TREE_OPERAND (arg, 1));
2559
2560    case TRUTH_ANDIF_EXPR:
2561      return build (TRUTH_ORIF_EXPR, type,
2562		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2563		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2564
2565    case TRUTH_ORIF_EXPR:
2566      return build (TRUTH_ANDIF_EXPR, type,
2567		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2568		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2569
2570    case TRUTH_NOT_EXPR:
2571      return TREE_OPERAND (arg, 0);
2572
2573    case COND_EXPR:
2574      return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2575		    invert_truthvalue (TREE_OPERAND (arg, 1)),
2576		    invert_truthvalue (TREE_OPERAND (arg, 2)));
2577
2578    case COMPOUND_EXPR:
2579      return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2580		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2581
2582    case NON_LVALUE_EXPR:
2583      return invert_truthvalue (TREE_OPERAND (arg, 0));
2584
2585    case NOP_EXPR:
2586    case CONVERT_EXPR:
2587    case FLOAT_EXPR:
2588      return build1 (TREE_CODE (arg), type,
2589		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2590
2591    case BIT_AND_EXPR:
2592      if (!integer_onep (TREE_OPERAND (arg, 1)))
2593	break;
2594      return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2595
2596    case SAVE_EXPR:
2597      return build1 (TRUTH_NOT_EXPR, type, arg);
2598
2599    case CLEANUP_POINT_EXPR:
2600      return build1 (CLEANUP_POINT_EXPR, type,
2601		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2602
2603    default:
2604      break;
2605    }
2606  if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2607    abort ();
2608  return build1 (TRUTH_NOT_EXPR, type, arg);
2609}
2610
2611/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2612   operands are another bit-wise operation with a common input.  If so,
2613   distribute the bit operations to save an operation and possibly two if
2614   constants are involved.  For example, convert
2615   	(A | B) & (A | C) into A | (B & C)
2616   Further simplification will occur if B and C are constants.
2617
2618   If this optimization cannot be done, 0 will be returned.  */
2619
2620static tree
2621distribute_bit_expr (code, type, arg0, arg1)
2622     enum tree_code code;
2623     tree type;
2624     tree arg0, arg1;
2625{
2626  tree common;
2627  tree left, right;
2628
2629  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2630      || TREE_CODE (arg0) == code
2631      || (TREE_CODE (arg0) != BIT_AND_EXPR
2632	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
2633    return 0;
2634
2635  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2636    {
2637      common = TREE_OPERAND (arg0, 0);
2638      left = TREE_OPERAND (arg0, 1);
2639      right = TREE_OPERAND (arg1, 1);
2640    }
2641  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2642    {
2643      common = TREE_OPERAND (arg0, 0);
2644      left = TREE_OPERAND (arg0, 1);
2645      right = TREE_OPERAND (arg1, 0);
2646    }
2647  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2648    {
2649      common = TREE_OPERAND (arg0, 1);
2650      left = TREE_OPERAND (arg0, 0);
2651      right = TREE_OPERAND (arg1, 1);
2652    }
2653  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2654    {
2655      common = TREE_OPERAND (arg0, 1);
2656      left = TREE_OPERAND (arg0, 0);
2657      right = TREE_OPERAND (arg1, 0);
2658    }
2659  else
2660    return 0;
2661
2662  return fold (build (TREE_CODE (arg0), type, common,
2663		      fold (build (code, type, left, right))));
2664}
2665
2666/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2667   starting at BITPOS.  The field is unsigned if UNSIGNEDP is non-zero.  */
2668
2669static tree
2670make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2671     tree inner;
2672     tree type;
2673     int bitsize, bitpos;
2674     int unsignedp;
2675{
2676  tree result = build (BIT_FIELD_REF, type, inner,
2677		       size_int (bitsize), bitsize_int (bitpos, 0L));
2678
2679  TREE_UNSIGNED (result) = unsignedp;
2680
2681  return result;
2682}
2683
2684/* Optimize a bit-field compare.
2685
2686   There are two cases:  First is a compare against a constant and the
2687   second is a comparison of two items where the fields are at the same
2688   bit position relative to the start of a chunk (byte, halfword, word)
2689   large enough to contain it.  In these cases we can avoid the shift
2690   implicit in bitfield extractions.
2691
2692   For constants, we emit a compare of the shifted constant with the
2693   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2694   compared.  For two fields at the same position, we do the ANDs with the
2695   similar mask and compare the result of the ANDs.
2696
2697   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2698   COMPARE_TYPE is the type of the comparison, and LHS and RHS
2699   are the left and right operands of the comparison, respectively.
2700
2701   If the optimization described above can be done, we return the resulting
2702   tree.  Otherwise we return zero.  */
2703
2704static tree
2705optimize_bit_field_compare (code, compare_type, lhs, rhs)
2706     enum tree_code code;
2707     tree compare_type;
2708     tree lhs, rhs;
2709{
2710  int lbitpos, lbitsize, rbitpos, rbitsize;
2711  int lnbitpos, lnbitsize, rnbitpos = 0, rnbitsize = 0;
2712  tree type = TREE_TYPE (lhs);
2713  tree signed_type, unsigned_type;
2714  int const_p = TREE_CODE (rhs) == INTEGER_CST;
2715  enum machine_mode lmode, rmode, lnmode, rnmode = VOIDmode;
2716  int lunsignedp, runsignedp;
2717  int lvolatilep = 0, rvolatilep = 0;
2718  int alignment;
2719  tree linner, rinner = NULL_TREE;
2720  tree mask;
2721  tree offset;
2722
2723  /* Get all the information about the extractions being done.  If the bit size
2724     if the same as the size of the underlying object, we aren't doing an
2725     extraction at all and so can do nothing.  */
2726  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2727				&lunsignedp, &lvolatilep, &alignment);
2728  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2729      || offset != 0)
2730    return 0;
2731
2732 if (!const_p)
2733   {
2734     /* If this is not a constant, we can only do something if bit positions,
2735	sizes, and signedness are the same.   */
2736     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2737				   &runsignedp, &rvolatilep, &alignment);
2738
2739     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2740	 || lunsignedp != runsignedp || offset != 0)
2741       return 0;
2742   }
2743
2744  /* See if we can find a mode to refer to this field.  We should be able to,
2745     but fail if we can't.  */
2746  lnmode = get_best_mode (lbitsize, lbitpos,
2747			  TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2748			  lvolatilep);
2749  if (lnmode == VOIDmode)
2750    return 0;
2751
2752  /* Set signed and unsigned types of the precision of this mode for the
2753     shifts below.  */
2754  signed_type = type_for_mode (lnmode, 0);
2755  unsigned_type = type_for_mode (lnmode, 1);
2756
2757  if (! const_p)
2758    {
2759      rnmode = get_best_mode (rbitsize, rbitpos,
2760			      TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2761			      rvolatilep);
2762      if (rnmode == VOIDmode)
2763	return 0;
2764    }
2765
2766  /* Compute the bit position and size for the new reference and our offset
2767     within it. If the new reference is the same size as the original, we
2768     won't optimize anything, so return zero.  */
2769  lnbitsize = GET_MODE_BITSIZE (lnmode);
2770  lnbitpos = lbitpos & ~ (lnbitsize - 1);
2771  lbitpos -= lnbitpos;
2772  if (lnbitsize == lbitsize)
2773    return 0;
2774
2775  if (! const_p)
2776    {
2777      rnbitsize = GET_MODE_BITSIZE (rnmode);
2778      rnbitpos = rbitpos & ~ (rnbitsize - 1);
2779      rbitpos -= rnbitpos;
2780      if (rnbitsize == rbitsize)
2781	return 0;
2782    }
2783
2784  if (BYTES_BIG_ENDIAN)
2785    lbitpos = lnbitsize - lbitsize - lbitpos;
2786
2787  /* Make the mask to be used against the extracted field.  */
2788  mask = build_int_2 (~0, ~0);
2789  TREE_TYPE (mask) = unsigned_type;
2790  force_fit_type (mask, 0);
2791  mask = convert (unsigned_type, mask);
2792  mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2793  mask = const_binop (RSHIFT_EXPR, mask,
2794		      size_int (lnbitsize - lbitsize - lbitpos), 0);
2795
2796  if (! const_p)
2797    /* If not comparing with constant, just rework the comparison
2798       and return.  */
2799    return build (code, compare_type,
2800		  build (BIT_AND_EXPR, unsigned_type,
2801			 make_bit_field_ref (linner, unsigned_type,
2802					     lnbitsize, lnbitpos, 1),
2803			 mask),
2804		  build (BIT_AND_EXPR, unsigned_type,
2805			 make_bit_field_ref (rinner, unsigned_type,
2806					     rnbitsize, rnbitpos, 1),
2807			 mask));
2808
2809  /* Otherwise, we are handling the constant case. See if the constant is too
2810     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
2811     this not only for its own sake, but to avoid having to test for this
2812     error case below.  If we didn't, we might generate wrong code.
2813
2814     For unsigned fields, the constant shifted right by the field length should
2815     be all zero.  For signed fields, the high-order bits should agree with
2816     the sign bit.  */
2817
2818  if (lunsignedp)
2819    {
2820      if (! integer_zerop (const_binop (RSHIFT_EXPR,
2821					convert (unsigned_type, rhs),
2822					size_int (lbitsize), 0)))
2823	{
2824	  warning ("comparison is always %d due to width of bitfield",
2825		   code == NE_EXPR);
2826	  return convert (compare_type,
2827			  (code == NE_EXPR
2828			   ? integer_one_node : integer_zero_node));
2829	}
2830    }
2831  else
2832    {
2833      tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2834			      size_int (lbitsize - 1), 0);
2835      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2836	{
2837	  warning ("comparison is always %d due to width of bitfield",
2838		   code == NE_EXPR);
2839	  return convert (compare_type,
2840			  (code == NE_EXPR
2841			   ? integer_one_node : integer_zero_node));
2842	}
2843    }
2844
2845  /* Single-bit compares should always be against zero.  */
2846  if (lbitsize == 1 && ! integer_zerop (rhs))
2847    {
2848      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2849      rhs = convert (type, integer_zero_node);
2850    }
2851
2852  /* Make a new bitfield reference, shift the constant over the
2853     appropriate number of bits and mask it with the computed mask
2854     (in case this was a signed field).  If we changed it, make a new one.  */
2855  lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2856  if (lvolatilep)
2857    {
2858      TREE_SIDE_EFFECTS (lhs) = 1;
2859      TREE_THIS_VOLATILE (lhs) = 1;
2860    }
2861
2862  rhs = fold (const_binop (BIT_AND_EXPR,
2863			   const_binop (LSHIFT_EXPR,
2864					convert (unsigned_type, rhs),
2865					size_int (lbitpos), 0),
2866			   mask, 0));
2867
2868  return build (code, compare_type,
2869		build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2870		rhs);
2871}
2872
2873/* Subroutine for fold_truthop: decode a field reference.
2874
2875   If EXP is a comparison reference, we return the innermost reference.
2876
2877   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2878   set to the starting bit number.
2879
2880   If the innermost field can be completely contained in a mode-sized
2881   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
2882
2883   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2884   otherwise it is not changed.
2885
2886   *PUNSIGNEDP is set to the signedness of the field.
2887
2888   *PMASK is set to the mask used.  This is either contained in a
2889   BIT_AND_EXPR or derived from the width of the field.
2890
2891   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2892
2893   Return 0 if this is not a component reference or is one that we can't
2894   do anything with.  */
2895
2896static tree
2897decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2898			pvolatilep, pmask, pand_mask)
2899     tree exp;
2900     int *pbitsize, *pbitpos;
2901     enum machine_mode *pmode;
2902     int *punsignedp, *pvolatilep;
2903     tree *pmask;
2904     tree *pand_mask;
2905{
2906  tree and_mask = 0;
2907  tree mask, inner, offset;
2908  tree unsigned_type;
2909  int precision;
2910  int alignment;
2911
2912  /* All the optimizations using this function assume integer fields.
2913     There are problems with FP fields since the type_for_size call
2914     below can fail for, e.g., XFmode.  */
2915  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2916    return 0;
2917
2918  STRIP_NOPS (exp);
2919
2920  if (TREE_CODE (exp) == BIT_AND_EXPR)
2921    {
2922      and_mask = TREE_OPERAND (exp, 1);
2923      exp = TREE_OPERAND (exp, 0);
2924      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2925      if (TREE_CODE (and_mask) != INTEGER_CST)
2926	return 0;
2927    }
2928
2929
2930  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2931			       punsignedp, pvolatilep, &alignment);
2932  if ((inner == exp && and_mask == 0)
2933      || *pbitsize < 0 || offset != 0)
2934    return 0;
2935
2936  /* Compute the mask to access the bitfield.  */
2937  unsigned_type = type_for_size (*pbitsize, 1);
2938  precision = TYPE_PRECISION (unsigned_type);
2939
2940  mask = build_int_2 (~0, ~0);
2941  TREE_TYPE (mask) = unsigned_type;
2942  force_fit_type (mask, 0);
2943  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2944  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2945
2946  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
2947  if (and_mask != 0)
2948    mask = fold (build (BIT_AND_EXPR, unsigned_type,
2949			convert (unsigned_type, and_mask), mask));
2950
2951  *pmask = mask;
2952  *pand_mask = and_mask;
2953  return inner;
2954}
2955
2956/* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2957   bit positions.  */
2958
2959static int
2960all_ones_mask_p (mask, size)
2961     tree mask;
2962     int size;
2963{
2964  tree type = TREE_TYPE (mask);
2965  int precision = TYPE_PRECISION (type);
2966  tree tmask;
2967
2968  tmask = build_int_2 (~0, ~0);
2969  TREE_TYPE (tmask) = signed_type (type);
2970  force_fit_type (tmask, 0);
2971  return
2972    tree_int_cst_equal (mask,
2973			const_binop (RSHIFT_EXPR,
2974				     const_binop (LSHIFT_EXPR, tmask,
2975						  size_int (precision - size),
2976						  0),
2977				     size_int (precision - size), 0));
2978}
2979
2980/* Subroutine for fold_truthop: determine if an operand is simple enough
2981   to be evaluated unconditionally.  */
2982
2983static int
2984simple_operand_p (exp)
2985     tree exp;
2986{
2987  /* Strip any conversions that don't change the machine mode.  */
2988  while ((TREE_CODE (exp) == NOP_EXPR
2989	  || TREE_CODE (exp) == CONVERT_EXPR)
2990	 && (TYPE_MODE (TREE_TYPE (exp))
2991	     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2992    exp = TREE_OPERAND (exp, 0);
2993
2994  return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2995	  || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2996	      && ! TREE_ADDRESSABLE (exp)
2997	      && ! TREE_THIS_VOLATILE (exp)
2998	      && ! DECL_NONLOCAL (exp)
2999	      /* Don't regard global variables as simple.  They may be
3000		 allocated in ways unknown to the compiler (shared memory,
3001		 #pragma weak, etc).  */
3002	      && ! TREE_PUBLIC (exp)
3003	      && ! DECL_EXTERNAL (exp)
3004	      /* Loading a static variable is unduly expensive, but global
3005		 registers aren't expensive.  */
3006	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3007}
3008
3009/* The following functions are subroutines to fold_range_test and allow it to
3010   try to change a logical combination of comparisons into a range test.
3011
3012   For example, both
3013   	X == 2 && X == 3 && X == 4 && X == 5
3014   and
3015   	X >= 2 && X <= 5
3016   are converted to
3017	(unsigned) (X - 2) <= 3
3018
3019   We describe each set of comparisons as being either inside or outside
3020   a range, using a variable named like IN_P, and then describe the
3021   range with a lower and upper bound.  If one of the bounds is omitted,
3022   it represents either the highest or lowest value of the type.
3023
3024   In the comments below, we represent a range by two numbers in brackets
3025   preceded by a "+" to designate being inside that range, or a "-" to
3026   designate being outside that range, so the condition can be inverted by
3027   flipping the prefix.  An omitted bound is represented by a "-".  For
3028   example, "- [-, 10]" means being outside the range starting at the lowest
3029   possible value and ending at 10, in other words, being greater than 10.
3030   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3031   always false.
3032
3033   We set up things so that the missing bounds are handled in a consistent
3034   manner so neither a missing bound nor "true" and "false" need to be
3035   handled using a special case.  */
3036
3037/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3038   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3039   and UPPER1_P are nonzero if the respective argument is an upper bound
3040   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3041   must be specified for a comparison.  ARG1 will be converted to ARG0's
3042   type if both are specified.  */
3043
3044static tree
3045range_binop (code, type, arg0, upper0_p, arg1, upper1_p)
3046     enum tree_code code;
3047     tree type;
3048     tree arg0, arg1;
3049     int upper0_p, upper1_p;
3050{
3051  tree tem;
3052  int result;
3053  int sgn0, sgn1;
3054
3055  /* If neither arg represents infinity, do the normal operation.
3056     Else, if not a comparison, return infinity.  Else handle the special
3057     comparison rules. Note that most of the cases below won't occur, but
3058     are handled for consistency.  */
3059
3060  if (arg0 != 0 && arg1 != 0)
3061    {
3062      tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3063			 arg0, convert (TREE_TYPE (arg0), arg1)));
3064      STRIP_NOPS (tem);
3065      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3066    }
3067
3068  if (TREE_CODE_CLASS (code) != '<')
3069    return 0;
3070
3071  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3072     for neither.  In real maths, we cannot assume open ended ranges are
3073     the same. But, this is computer arithmetic, where numbers are finite.
3074     We can therefore make the transformation of any unbounded range with
3075     the value Z, Z being greater than any representable number. This permits
3076     us to treat unbounded ranges as equal. */
3077  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3078  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3079  switch (code)
3080    {
3081    case EQ_EXPR:
3082      result = sgn0 == sgn1;
3083      break;
3084    case NE_EXPR:
3085      result = sgn0 != sgn1;
3086      break;
3087    case LT_EXPR:
3088      result = sgn0 < sgn1;
3089      break;
3090    case LE_EXPR:
3091      result = sgn0 <= sgn1;
3092      break;
3093    case GT_EXPR:
3094      result = sgn0 > sgn1;
3095      break;
3096    case GE_EXPR:
3097      result = sgn0 >= sgn1;
3098      break;
3099    default:
3100      abort ();
3101    }
3102
3103  return convert (type, result ? integer_one_node : integer_zero_node);
3104}
3105
3106/* Given EXP, a logical expression, set the range it is testing into
3107   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3108   actually being tested.  *PLOW and *PHIGH will have be made the same type
3109   as the returned expression.  If EXP is not a comparison, we will most
3110   likely not be returning a useful value and range.  */
3111
3112static tree
3113make_range (exp, pin_p, plow, phigh)
3114     tree exp;
3115     int *pin_p;
3116     tree *plow, *phigh;
3117{
3118  enum tree_code code;
3119  tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3120  tree orig_type = NULL_TREE;
3121  int in_p, n_in_p;
3122  tree low, high, n_low, n_high;
3123
3124  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3125     and see if we can refine the range.  Some of the cases below may not
3126     happen, but it doesn't seem worth worrying about this.  We "continue"
3127     the outer loop when we've changed something; otherwise we "break"
3128     the switch, which will "break" the while.  */
3129
3130  in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3131
3132  while (1)
3133    {
3134      code = TREE_CODE (exp);
3135
3136      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3137	{
3138	  arg0 = TREE_OPERAND (exp, 0);
3139	  if (TREE_CODE_CLASS (code) == '<'
3140	      || TREE_CODE_CLASS (code) == '1'
3141	      || TREE_CODE_CLASS (code) == '2')
3142	    type = TREE_TYPE (arg0);
3143	  if (TREE_CODE_CLASS (code) == '2'
3144	      || TREE_CODE_CLASS (code) == '<'
3145	      || (TREE_CODE_CLASS (code) == 'e'
3146		  && tree_code_length[(int) code] > 1))
3147	    arg1 = TREE_OPERAND (exp, 1);
3148	}
3149
3150      /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3151	 lose a cast by accident.  */
3152      if (type != NULL_TREE && orig_type == NULL_TREE)
3153	orig_type = type;
3154
3155      switch (code)
3156	{
3157	case TRUTH_NOT_EXPR:
3158	  in_p = ! in_p, exp = arg0;
3159	  continue;
3160
3161	case EQ_EXPR: case NE_EXPR:
3162	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3163	  /* We can only do something if the range is testing for zero
3164	     and if the second operand is an integer constant.  Note that
3165	     saying something is "in" the range we make is done by
3166	     complementing IN_P since it will set in the initial case of
3167	     being not equal to zero; "out" is leaving it alone.  */
3168	  if (low == 0 || high == 0
3169	      || ! integer_zerop (low) || ! integer_zerop (high)
3170	      || TREE_CODE (arg1) != INTEGER_CST)
3171	    break;
3172
3173	  switch (code)
3174	    {
3175	    case NE_EXPR:  /* - [c, c]  */
3176	      low = high = arg1;
3177	      break;
3178	    case EQ_EXPR:  /* + [c, c]  */
3179	      in_p = ! in_p, low = high = arg1;
3180	      break;
3181	    case GT_EXPR:  /* - [-, c] */
3182	      low = 0, high = arg1;
3183	      break;
3184	    case GE_EXPR:  /* + [c, -] */
3185	      in_p = ! in_p, low = arg1, high = 0;
3186	      break;
3187	    case LT_EXPR:  /* - [c, -] */
3188	      low = arg1, high = 0;
3189	      break;
3190	    case LE_EXPR:  /* + [-, c] */
3191	      in_p = ! in_p, low = 0, high = arg1;
3192	      break;
3193	    default:
3194	      abort ();
3195	    }
3196
3197	  exp = arg0;
3198
3199	  /* If this is an unsigned comparison, we also know that EXP is
3200	     greater than or equal to zero.  We base the range tests we make
3201	     on that fact, so we record it here so we can parse existing
3202	     range tests.  */
3203	  if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3204	    {
3205	      if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3206				  1, convert (type, integer_zero_node),
3207				  NULL_TREE))
3208		break;
3209
3210	      in_p = n_in_p, low = n_low, high = n_high;
3211
3212	      /* If the high bound is missing, reverse the range so it
3213		 goes from zero to the low bound minus 1.  */
3214	      if (high == 0)
3215		{
3216		  in_p = ! in_p;
3217		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3218				      integer_one_node, 0);
3219		  low = convert (type, integer_zero_node);
3220		}
3221	    }
3222	  continue;
3223
3224	case NEGATE_EXPR:
3225	  /* (-x) IN [a,b] -> x in [-b, -a]  */
3226	  n_low = range_binop (MINUS_EXPR, type,
3227			       convert (type, integer_zero_node), 0, high, 1);
3228	  n_high = range_binop (MINUS_EXPR, type,
3229				convert (type, integer_zero_node), 0, low, 0);
3230	  low = n_low, high = n_high;
3231	  exp = arg0;
3232	  continue;
3233
3234	case BIT_NOT_EXPR:
3235	  /* ~ X -> -X - 1  */
3236	  exp = build (MINUS_EXPR, type, build1 (NEGATE_EXPR, type, arg0),
3237		       convert (type, integer_one_node));
3238	  continue;
3239
3240	case PLUS_EXPR:  case MINUS_EXPR:
3241	  if (TREE_CODE (arg1) != INTEGER_CST)
3242	    break;
3243
3244	  /* If EXP is signed, any overflow in the computation is undefined,
3245	     so we don't worry about it so long as our computations on
3246	     the bounds don't overflow.  For unsigned, overflow is defined
3247	     and this is exactly the right thing.  */
3248	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3249			       type, low, 0, arg1, 0);
3250	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3251				type, high, 1, arg1, 0);
3252	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
3253	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
3254	    break;
3255
3256	  /* Check for an unsigned range which has wrapped around the maximum
3257	     value thus making n_high < n_low, and normalize it.  */
3258	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3259	    {
3260	      low = range_binop (PLUS_EXPR, type, n_high, 0,
3261				 integer_one_node, 0);
3262	      high = range_binop (MINUS_EXPR, type, n_low, 0,
3263				  integer_one_node, 0);
3264
3265	      /* If the range is of the form +/- [ x+1, x ], we won't
3266		 be able to normalize it.  But then, it represents the
3267		 whole range or the empty set, so make it +/- [ -, - ].
3268	      */
3269	      if (tree_int_cst_equal (n_low, low)
3270		  && tree_int_cst_equal (n_high, high))
3271		low = high = 0;
3272	      else
3273		in_p = ! in_p;
3274	    }
3275	  else
3276	    low = n_low, high = n_high;
3277
3278	  exp = arg0;
3279	  continue;
3280
3281	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3282	  if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3283	    break;
3284
3285	  if (! INTEGRAL_TYPE_P (type)
3286	      || (low != 0 && ! int_fits_type_p (low, type))
3287	      || (high != 0 && ! int_fits_type_p (high, type)))
3288	    break;
3289
3290	  n_low = low, n_high = high;
3291
3292	  if (n_low != 0)
3293	    n_low = convert (type, n_low);
3294
3295	  if (n_high != 0)
3296	    n_high = convert (type, n_high);
3297
3298	  /* If we're converting from an unsigned to a signed type,
3299	     we will be doing the comparison as unsigned.  The tests above
3300	     have already verified that LOW and HIGH are both positive.
3301
3302	     So we have to make sure that the original unsigned value will
3303	     be interpreted as positive.  */
3304	  if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3305	    {
3306	      tree equiv_type = type_for_mode (TYPE_MODE (type), 1);
3307	      tree high_positive;
3308
3309	      /* A range without an upper bound is, naturally, unbounded.
3310		 Since convert would have cropped a very large value, use
3311		  the max value for the destination type.  */
3312
3313	      high_positive = TYPE_MAX_VALUE (equiv_type);
3314	      if (!high_positive)
3315		{
3316		  high_positive = TYPE_MAX_VALUE (type);
3317		  if (!high_positive)
3318		    abort();
3319		}
3320	      high_positive = fold (build (RSHIFT_EXPR, type,
3321					   convert (type, high_positive),
3322					   convert (type, integer_one_node)));
3323
3324	      /* If the low bound is specified, "and" the range with the
3325		 range for which the original unsigned value will be
3326		 positive.  */
3327	      if (low != 0)
3328		{
3329		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3330				      1, n_low, n_high,
3331				      1, convert (type, integer_zero_node),
3332				      high_positive))
3333		    break;
3334
3335		  in_p = (n_in_p == in_p);
3336		}
3337	      else
3338		{
3339		  /* Otherwise, "or" the range with the range of the input
3340		     that will be interpreted as negative.  */
3341		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3342				      0, n_low, n_high,
3343				      1, convert (type, integer_zero_node),
3344				      high_positive))
3345		    break;
3346
3347		  in_p = (in_p != n_in_p);
3348		}
3349	    }
3350
3351	  exp = arg0;
3352	  low = n_low, high = n_high;
3353	  continue;
3354
3355	default:
3356	  break;
3357	}
3358
3359      break;
3360    }
3361
3362  /* If EXP is a constant, we can evaluate whether this is true or false.  */
3363  if (TREE_CODE (exp) == INTEGER_CST)
3364    {
3365      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3366						 exp, 0, low, 0))
3367		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3368						    exp, 1, high, 1)));
3369      low = high = 0;
3370      exp = 0;
3371    }
3372
3373  *pin_p = in_p, *plow = low, *phigh = high;
3374  return exp;
3375}
3376
3377/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3378   type, TYPE, return an expression to test if EXP is in (or out of, depending
3379   on IN_P) the range.  */
3380
3381static tree
3382build_range_check (type, exp, in_p, low, high)
3383     tree type;
3384     tree exp;
3385     int in_p;
3386     tree low, high;
3387{
3388  tree etype = TREE_TYPE (exp);
3389  tree utype, value;
3390
3391  if (! in_p
3392      && (0 != (value = build_range_check (type, exp, 1, low, high))))
3393    return invert_truthvalue (value);
3394
3395  else if (low == 0 && high == 0)
3396    return convert (type, integer_one_node);
3397
3398  else if (low == 0)
3399    return fold (build (LE_EXPR, type, exp, high));
3400
3401  else if (high == 0)
3402    return fold (build (GE_EXPR, type, exp, low));
3403
3404  else if (operand_equal_p (low, high, 0))
3405    return fold (build (EQ_EXPR, type, exp, low));
3406
3407  else if (TREE_UNSIGNED (etype) && integer_zerop (low))
3408    return build_range_check (type, exp, 1, 0, high);
3409
3410  else if (integer_zerop (low))
3411    {
3412      utype = unsigned_type (etype);
3413      return build_range_check (type, convert (utype, exp), 1, 0,
3414				convert (utype, high));
3415    }
3416
3417  else if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3418	   && ! TREE_OVERFLOW (value))
3419    return build_range_check (type,
3420			      fold (build (MINUS_EXPR, etype, exp, low)),
3421			      1, convert (etype, integer_zero_node), value);
3422  else
3423    return 0;
3424}
3425
3426/* Given two ranges, see if we can merge them into one.  Return 1 if we
3427   can, 0 if we can't.  Set the output range into the specified parameters.  */
3428
3429static int
3430merge_ranges (pin_p, plow, phigh, in0_p, low0, high0, in1_p, low1, high1)
3431     int *pin_p;
3432     tree *plow, *phigh;
3433     int in0_p, in1_p;
3434     tree low0, high0, low1, high1;
3435{
3436  int no_overlap;
3437  int subset;
3438  int temp;
3439  tree tem;
3440  int in_p;
3441  tree low, high;
3442  int lowequal = ((low0 == 0 && low1 == 0)
3443		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3444						low0, 0, low1, 0)));
3445  int highequal = ((high0 == 0 && high1 == 0)
3446		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3447						 high0, 1, high1, 1)));
3448
3449  /* Make range 0 be the range that starts first, or ends last if they
3450     start at the same value.  Swap them if it isn't.  */
3451  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3452				 low0, 0, low1, 0))
3453      || (lowequal
3454	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
3455					high1, 1, high0, 1))))
3456    {
3457      temp = in0_p, in0_p = in1_p, in1_p = temp;
3458      tem = low0, low0 = low1, low1 = tem;
3459      tem = high0, high0 = high1, high1 = tem;
3460    }
3461
3462  /* Now flag two cases, whether the ranges are disjoint or whether the
3463     second range is totally subsumed in the first.  Note that the tests
3464     below are simplified by the ones above.  */
3465  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3466					  high0, 1, low1, 0));
3467  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3468				      high1, 1, high0, 1));
3469
3470  /* We now have four cases, depending on whether we are including or
3471     excluding the two ranges.  */
3472  if (in0_p && in1_p)
3473    {
3474      /* If they don't overlap, the result is false.  If the second range
3475	 is a subset it is the result.  Otherwise, the range is from the start
3476	 of the second to the end of the first.  */
3477      if (no_overlap)
3478	in_p = 0, low = high = 0;
3479      else if (subset)
3480	in_p = 1, low = low1, high = high1;
3481      else
3482	in_p = 1, low = low1, high = high0;
3483    }
3484
3485  else if (in0_p && ! in1_p)
3486    {
3487      /* If they don't overlap, the result is the first range.  If they are
3488	 equal, the result is false.  If the second range is a subset of the
3489	 first, and the ranges begin at the same place, we go from just after
3490	 the end of the first range to the end of the second.  If the second
3491	 range is not a subset of the first, or if it is a subset and both
3492	 ranges end at the same place, the range starts at the start of the
3493	 first range and ends just before the second range.
3494	 Otherwise, we can't describe this as a single range.  */
3495      if (no_overlap)
3496	in_p = 1, low = low0, high = high0;
3497      else if (lowequal && highequal)
3498	in_p = 0, low = high = 0;
3499      else if (subset && lowequal)
3500	{
3501	  in_p = 1, high = high0;
3502	  low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3503			     integer_one_node, 0);
3504	}
3505      else if (! subset || highequal)
3506	{
3507	  in_p = 1, low = low0;
3508	  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3509			      integer_one_node, 0);
3510	}
3511      else
3512	return 0;
3513    }
3514
3515  else if (! in0_p && in1_p)
3516    {
3517      /* If they don't overlap, the result is the second range.  If the second
3518	 is a subset of the first, the result is false.  Otherwise,
3519	 the range starts just after the first range and ends at the
3520	 end of the second.  */
3521      if (no_overlap)
3522	in_p = 1, low = low1, high = high1;
3523      else if (subset)
3524	in_p = 0, low = high = 0;
3525      else
3526	{
3527	  in_p = 1, high = high1;
3528	  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3529			     integer_one_node, 0);
3530	}
3531    }
3532
3533  else
3534    {
3535      /* The case where we are excluding both ranges.  Here the complex case
3536	 is if they don't overlap.  In that case, the only time we have a
3537	 range is if they are adjacent.  If the second is a subset of the
3538	 first, the result is the first.  Otherwise, the range to exclude
3539	 starts at the beginning of the first range and ends at the end of the
3540	 second.  */
3541      if (no_overlap)
3542	{
3543	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3544					 range_binop (PLUS_EXPR, NULL_TREE,
3545						      high0, 1,
3546						      integer_one_node, 1),
3547					 1, low1, 0)))
3548	    in_p = 0, low = low0, high = high1;
3549	  else
3550	    return 0;
3551	}
3552      else if (subset)
3553	in_p = 0, low = low0, high = high0;
3554      else
3555	in_p = 0, low = low0, high = high1;
3556    }
3557
3558  *pin_p = in_p, *plow = low, *phigh = high;
3559  return 1;
3560}
3561
3562/* EXP is some logical combination of boolean tests.  See if we can
3563   merge it into some range test.  Return the new tree if so.  */
3564
3565static tree
3566fold_range_test (exp)
3567     tree exp;
3568{
3569  int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3570	       || TREE_CODE (exp) == TRUTH_OR_EXPR);
3571  int in0_p, in1_p, in_p;
3572  tree low0, low1, low, high0, high1, high;
3573  tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3574  tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3575  tree tem;
3576
3577  /* If this is an OR operation, invert both sides; we will invert
3578     again at the end.  */
3579  if (or_op)
3580    in0_p = ! in0_p, in1_p = ! in1_p;
3581
3582  /* If both expressions are the same, if we can merge the ranges, and we
3583     can build the range test, return it or it inverted.  If one of the
3584     ranges is always true or always false, consider it to be the same
3585     expression as the other.  */
3586  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3587      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3588		       in1_p, low1, high1)
3589      && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3590					 lhs != 0 ? lhs
3591					 : rhs != 0 ? rhs : integer_zero_node,
3592					 in_p, low, high))))
3593    return or_op ? invert_truthvalue (tem) : tem;
3594
3595  /* On machines where the branch cost is expensive, if this is a
3596     short-circuited branch and the underlying object on both sides
3597     is the same, make a non-short-circuit operation.  */
3598  else if (BRANCH_COST >= 2
3599	   && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3600	       || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3601	   && operand_equal_p (lhs, rhs, 0))
3602    {
3603      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
3604	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3605	 which cases we can't do this.  */
3606      if (simple_operand_p (lhs))
3607	return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3608		      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3609		      TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3610		      TREE_OPERAND (exp, 1));
3611
3612      else if (current_function_decl != 0
3613	       && ! contains_placeholder_p (lhs))
3614	{
3615	  tree common = save_expr (lhs);
3616
3617	  if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3618					     or_op ? ! in0_p : in0_p,
3619					     low0, high0))
3620	      && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3621						 or_op ? ! in1_p : in1_p,
3622						 low1, high1))))
3623	    return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3624			  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3625			  TREE_TYPE (exp), lhs, rhs);
3626	}
3627    }
3628
3629  return 0;
3630}
3631
3632/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3633   bit value.  Arrange things so the extra bits will be set to zero if and
3634   only if C is signed-extended to its full width.  If MASK is nonzero,
3635   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
3636
3637static tree
3638unextend (c, p, unsignedp, mask)
3639     tree c;
3640     int p;
3641     int unsignedp;
3642     tree mask;
3643{
3644  tree type = TREE_TYPE (c);
3645  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3646  tree temp;
3647
3648  if (p == modesize || unsignedp)
3649    return c;
3650
3651  /* We work by getting just the sign bit into the low-order bit, then
3652     into the high-order bit, then sign-extend.  We then XOR that value
3653     with C.  */
3654  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3655  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3656
3657  /* We must use a signed type in order to get an arithmetic right shift.
3658     However, we must also avoid introducing accidental overflows, so that
3659     a subsequent call to integer_zerop will work.  Hence we must
3660     do the type conversion here.  At this point, the constant is either
3661     zero or one, and the conversion to a signed type can never overflow.
3662     We could get an overflow if this conversion is done anywhere else.  */
3663  if (TREE_UNSIGNED (type))
3664    temp = convert (signed_type (type), temp);
3665
3666  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3667  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3668  if (mask != 0)
3669    temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3670  /* If necessary, convert the type back to match the type of C.  */
3671  if (TREE_UNSIGNED (type))
3672    temp = convert (type, temp);
3673
3674  return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3675}
3676
3677/* Find ways of folding logical expressions of LHS and RHS:
3678   Try to merge two comparisons to the same innermost item.
3679   Look for range tests like "ch >= '0' && ch <= '9'".
3680   Look for combinations of simple terms on machines with expensive branches
3681   and evaluate the RHS unconditionally.
3682
3683   For example, if we have p->a == 2 && p->b == 4 and we can make an
3684   object large enough to span both A and B, we can do this with a comparison
3685   against the object ANDed with the a mask.
3686
3687   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3688   operations to do this with one comparison.
3689
3690   We check for both normal comparisons and the BIT_AND_EXPRs made this by
3691   function and the one above.
3692
3693   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
3694   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3695
3696   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3697   two operands.
3698
3699   We return the simplified tree or 0 if no optimization is possible.  */
3700
3701static tree
3702fold_truthop (code, truth_type, lhs, rhs)
3703     enum tree_code code;
3704     tree truth_type, lhs, rhs;
3705{
3706  /* If this is the "or" of two comparisons, we can do something if we
3707     the comparisons are NE_EXPR.  If this is the "and", we can do something
3708     if the comparisons are EQ_EXPR.  I.e.,
3709     	(a->b == 2 && a->c == 4) can become (a->new == NEW).
3710
3711     WANTED_CODE is this operation code.  For single bit fields, we can
3712     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3713     comparison for one-bit fields.  */
3714
3715  enum tree_code wanted_code;
3716  enum tree_code lcode, rcode;
3717  tree ll_arg, lr_arg, rl_arg, rr_arg;
3718  tree ll_inner, lr_inner, rl_inner, rr_inner;
3719  int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3720  int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3721  int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3722  int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3723  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3724  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3725  enum machine_mode lnmode, rnmode;
3726  tree ll_mask, lr_mask, rl_mask, rr_mask;
3727  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3728  tree l_const, r_const;
3729  tree lntype, rntype, result;
3730  int first_bit, end_bit;
3731  int volatilep;
3732
3733  /* Start by getting the comparison codes.  Fail if anything is volatile.
3734     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3735     it were surrounded with a NE_EXPR.  */
3736
3737  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3738    return 0;
3739
3740  lcode = TREE_CODE (lhs);
3741  rcode = TREE_CODE (rhs);
3742
3743  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3744    lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3745
3746  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3747    rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3748
3749  if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3750    return 0;
3751
3752  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3753	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3754
3755  ll_arg = TREE_OPERAND (lhs, 0);
3756  lr_arg = TREE_OPERAND (lhs, 1);
3757  rl_arg = TREE_OPERAND (rhs, 0);
3758  rr_arg = TREE_OPERAND (rhs, 1);
3759
3760  /* If the RHS can be evaluated unconditionally and its operands are
3761     simple, it wins to evaluate the RHS unconditionally on machines
3762     with expensive branches.  In this case, this isn't a comparison
3763     that can be merged.  */
3764
3765  /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
3766     are with zero (tmw).  */
3767
3768  if (BRANCH_COST >= 2
3769      && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
3770      && simple_operand_p (rl_arg)
3771      && simple_operand_p (rr_arg))
3772    return build (code, truth_type, lhs, rhs);
3773
3774  /* See if the comparisons can be merged.  Then get all the parameters for
3775     each side.  */
3776
3777  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3778      || (rcode != EQ_EXPR && rcode != NE_EXPR))
3779    return 0;
3780
3781  volatilep = 0;
3782  ll_inner = decode_field_reference (ll_arg,
3783				     &ll_bitsize, &ll_bitpos, &ll_mode,
3784				     &ll_unsignedp, &volatilep, &ll_mask,
3785				     &ll_and_mask);
3786  lr_inner = decode_field_reference (lr_arg,
3787				     &lr_bitsize, &lr_bitpos, &lr_mode,
3788				     &lr_unsignedp, &volatilep, &lr_mask,
3789				     &lr_and_mask);
3790  rl_inner = decode_field_reference (rl_arg,
3791				     &rl_bitsize, &rl_bitpos, &rl_mode,
3792				     &rl_unsignedp, &volatilep, &rl_mask,
3793				     &rl_and_mask);
3794  rr_inner = decode_field_reference (rr_arg,
3795				     &rr_bitsize, &rr_bitpos, &rr_mode,
3796				     &rr_unsignedp, &volatilep, &rr_mask,
3797				     &rr_and_mask);
3798
3799  /* It must be true that the inner operation on the lhs of each
3800     comparison must be the same if we are to be able to do anything.
3801     Then see if we have constants.  If not, the same must be true for
3802     the rhs's.  */
3803  if (volatilep || ll_inner == 0 || rl_inner == 0
3804      || ! operand_equal_p (ll_inner, rl_inner, 0))
3805    return 0;
3806
3807  if (TREE_CODE (lr_arg) == INTEGER_CST
3808      && TREE_CODE (rr_arg) == INTEGER_CST)
3809    l_const = lr_arg, r_const = rr_arg;
3810  else if (lr_inner == 0 || rr_inner == 0
3811	   || ! operand_equal_p (lr_inner, rr_inner, 0))
3812    return 0;
3813  else
3814    l_const = r_const = 0;
3815
3816  /* If either comparison code is not correct for our logical operation,
3817     fail.  However, we can convert a one-bit comparison against zero into
3818     the opposite comparison against that bit being set in the field.  */
3819
3820  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3821  if (lcode != wanted_code)
3822    {
3823      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3824	{
3825	  /* Make the left operand unsigned, since we are only interested
3826	     in the value of one bit.  Otherwise we are doing the wrong
3827	     thing below.  */
3828	  ll_unsignedp = 1;
3829	  l_const = ll_mask;
3830	}
3831      else
3832	return 0;
3833    }
3834
3835  /* This is analogous to the code for l_const above.  */
3836  if (rcode != wanted_code)
3837    {
3838      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3839	{
3840	  rl_unsignedp = 1;
3841	  r_const = rl_mask;
3842	}
3843      else
3844	return 0;
3845    }
3846
3847  /* See if we can find a mode that contains both fields being compared on
3848     the left.  If we can't, fail.  Otherwise, update all constants and masks
3849     to be relative to a field of that size.  */
3850  first_bit = MIN (ll_bitpos, rl_bitpos);
3851  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3852  lnmode = get_best_mode (end_bit - first_bit, first_bit,
3853			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3854			  volatilep);
3855  if (lnmode == VOIDmode)
3856    return 0;
3857
3858  lnbitsize = GET_MODE_BITSIZE (lnmode);
3859  lnbitpos = first_bit & ~ (lnbitsize - 1);
3860  lntype = type_for_size (lnbitsize, 1);
3861  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3862
3863  if (BYTES_BIG_ENDIAN)
3864    {
3865      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3866      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3867    }
3868
3869  ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3870			 size_int (xll_bitpos), 0);
3871  rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
3872			 size_int (xrl_bitpos), 0);
3873
3874  if (l_const)
3875    {
3876      l_const = convert (lntype, l_const);
3877      l_const = unextend (l_const,  ll_bitsize, ll_unsignedp, ll_and_mask);
3878      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
3879      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
3880					fold (build1 (BIT_NOT_EXPR,
3881						      lntype, ll_mask)),
3882					0)))
3883	{
3884	  warning ("comparison is always %d", wanted_code == NE_EXPR);
3885
3886	  return convert (truth_type,
3887			  wanted_code == NE_EXPR
3888			  ? integer_one_node : integer_zero_node);
3889	}
3890    }
3891  if (r_const)
3892    {
3893      r_const = convert (lntype, r_const);
3894      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
3895      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
3896      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
3897					fold (build1 (BIT_NOT_EXPR,
3898						      lntype, rl_mask)),
3899					0)))
3900	{
3901	  warning ("comparison is always %d", wanted_code == NE_EXPR);
3902
3903	  return convert (truth_type,
3904			  wanted_code == NE_EXPR
3905			  ? integer_one_node : integer_zero_node);
3906	}
3907    }
3908
3909  /* If the right sides are not constant, do the same for it.  Also,
3910     disallow this optimization if a size or signedness mismatch occurs
3911     between the left and right sides.  */
3912  if (l_const == 0)
3913    {
3914      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
3915	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
3916	  /* Make sure the two fields on the right
3917	     correspond to the left without being swapped.  */
3918	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
3919	return 0;
3920
3921      first_bit = MIN (lr_bitpos, rr_bitpos);
3922      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
3923      rnmode = get_best_mode (end_bit - first_bit, first_bit,
3924			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
3925			      volatilep);
3926      if (rnmode == VOIDmode)
3927	return 0;
3928
3929      rnbitsize = GET_MODE_BITSIZE (rnmode);
3930      rnbitpos = first_bit & ~ (rnbitsize - 1);
3931      rntype = type_for_size (rnbitsize, 1);
3932      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
3933
3934      if (BYTES_BIG_ENDIAN)
3935	{
3936	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
3937	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
3938	}
3939
3940      lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
3941			     size_int (xlr_bitpos), 0);
3942      rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
3943			     size_int (xrr_bitpos), 0);
3944
3945      /* Make a mask that corresponds to both fields being compared.
3946	 Do this for both items being compared.  If the operands are the
3947	 same size and the bits being compared are in the same position
3948	 then we can do this by masking both and comparing the masked
3949	 results.  */
3950      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3951      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
3952      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
3953	{
3954	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
3955				    ll_unsignedp || rl_unsignedp);
3956	  if (! all_ones_mask_p (ll_mask, lnbitsize))
3957	    lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
3958
3959	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
3960				    lr_unsignedp || rr_unsignedp);
3961	  if (! all_ones_mask_p (lr_mask, rnbitsize))
3962	    rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
3963
3964	  return build (wanted_code, truth_type, lhs, rhs);
3965	}
3966
3967      /* There is still another way we can do something:  If both pairs of
3968	 fields being compared are adjacent, we may be able to make a wider
3969	 field containing them both.
3970
3971	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
3972	 the mask must be shifted to account for the shift done by
3973	 make_bit_field_ref.  */
3974      if ((ll_bitsize + ll_bitpos == rl_bitpos
3975	   && lr_bitsize + lr_bitpos == rr_bitpos)
3976	  || (ll_bitpos == rl_bitpos + rl_bitsize
3977	      && lr_bitpos == rr_bitpos + rr_bitsize))
3978	{
3979	  tree type;
3980
3981	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
3982				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
3983	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
3984				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
3985
3986	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
3987				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
3988	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
3989				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
3990
3991	  /* Convert to the smaller type before masking out unwanted bits.  */
3992	  type = lntype;
3993	  if (lntype != rntype)
3994	    {
3995	      if (lnbitsize > rnbitsize)
3996		{
3997		  lhs = convert (rntype, lhs);
3998		  ll_mask = convert (rntype, ll_mask);
3999		  type = rntype;
4000		}
4001	      else if (lnbitsize < rnbitsize)
4002		{
4003		  rhs = convert (lntype, rhs);
4004		  lr_mask = convert (lntype, lr_mask);
4005		  type = lntype;
4006		}
4007	    }
4008
4009	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4010	    lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4011
4012	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4013	    rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4014
4015	  return build (wanted_code, truth_type, lhs, rhs);
4016	}
4017
4018      return 0;
4019    }
4020
4021  /* Handle the case of comparisons with constants.  If there is something in
4022     common between the masks, those bits of the constants must be the same.
4023     If not, the condition is always false.  Test for this to avoid generating
4024     incorrect code below.  */
4025  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4026  if (! integer_zerop (result)
4027      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4028			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4029    {
4030      if (wanted_code == NE_EXPR)
4031	{
4032	  warning ("`or' of unmatched not-equal tests is always 1");
4033	  return convert (truth_type, integer_one_node);
4034	}
4035      else
4036	{
4037	  warning ("`and' of mutually exclusive equal-tests is always 0");
4038	  return convert (truth_type, integer_zero_node);
4039	}
4040    }
4041
4042  /* Construct the expression we will return.  First get the component
4043     reference we will make.  Unless the mask is all ones the width of
4044     that field, perform the mask operation.  Then compare with the
4045     merged constant.  */
4046  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4047			       ll_unsignedp || rl_unsignedp);
4048
4049  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4050  if (! all_ones_mask_p (ll_mask, lnbitsize))
4051    result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4052
4053  return build (wanted_code, truth_type, result,
4054		const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4055}
4056
4057/* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4058   S, a SAVE_EXPR, return the expression actually being evaluated.   Note
4059   that we may sometimes modify the tree.  */
4060
4061static tree
4062strip_compound_expr (t, s)
4063     tree t;
4064     tree s;
4065{
4066  enum tree_code code = TREE_CODE (t);
4067
4068  /* See if this is the COMPOUND_EXPR we want to eliminate.  */
4069  if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4070      && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4071    return TREE_OPERAND (t, 1);
4072
4073  /* See if this is a COND_EXPR or a simple arithmetic operator.   We
4074     don't bother handling any other types.  */
4075  else if (code == COND_EXPR)
4076    {
4077      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4078      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4079      TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4080    }
4081  else if (TREE_CODE_CLASS (code) == '1')
4082    TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4083  else if (TREE_CODE_CLASS (code) == '<'
4084	   || TREE_CODE_CLASS (code) == '2')
4085    {
4086      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4087      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4088    }
4089
4090  return t;
4091}
4092
4093/* Return a node which has the indicated constant VALUE (either 0 or
4094   1), and is of the indicated TYPE.  */
4095
4096static tree
4097constant_boolean_node (value, type)
4098     int value;
4099     tree type;
4100{
4101  if (type == integer_type_node)
4102    return value ? integer_one_node : integer_zero_node;
4103  else if (TREE_CODE (type) == BOOLEAN_TYPE)
4104    return truthvalue_conversion (value ? integer_one_node :
4105				  integer_zero_node);
4106  else
4107    {
4108      tree t = build_int_2 (value, 0);
4109      TREE_TYPE (t) = type;
4110      return t;
4111    }
4112}
4113
4114/* Utility function for the following routine, to see how complex a nesting of
4115   COND_EXPRs can be.  EXPR is the expression and LIMIT is a count beyond which
4116   we don't care (to avoid spending too much time on complex expressions.).  */
4117
4118static int
4119count_cond (expr, lim)
4120     tree expr;
4121     int lim;
4122{
4123  int true, false;
4124
4125  if (TREE_CODE (expr) != COND_EXPR)
4126    return 0;
4127  else if (lim <= 0)
4128    return 0;
4129
4130  true = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4131  false = count_cond (TREE_OPERAND (expr, 2), lim - 1 - true);
4132  return MIN (lim, 1 + true + false);
4133}
4134
4135/* Perform constant folding and related simplification of EXPR.
4136   The related simplifications include x*1 => x, x*0 => 0, etc.,
4137   and application of the associative law.
4138   NOP_EXPR conversions may be removed freely (as long as we
4139   are careful not to change the C type of the overall expression)
4140   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
4141   but we can constant-fold them if they have constant operands.  */
4142
4143tree
4144fold (expr)
4145     tree expr;
4146{
4147  register tree t = expr;
4148  tree t1 = NULL_TREE;
4149  tree tem;
4150  tree type = TREE_TYPE (expr);
4151  register tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4152  register enum tree_code code = TREE_CODE (t);
4153  register int kind;
4154  int invert;
4155
4156  /* WINS will be nonzero when the switch is done
4157     if all operands are constant.  */
4158
4159  int wins = 1;
4160
4161  /* Don't try to process an RTL_EXPR since its operands aren't trees.
4162     Likewise for a SAVE_EXPR that's already been evaluated.  */
4163  if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t)) != 0)
4164    return t;
4165
4166  /* Return right away if already constant.  */
4167  if (TREE_CONSTANT (t))
4168    {
4169      if (code == CONST_DECL)
4170	return DECL_INITIAL (t);
4171      return t;
4172    }
4173
4174#ifdef MAX_INTEGER_COMPUTATION_MODE
4175  check_max_integer_computation_mode (expr);
4176#endif
4177
4178  kind = TREE_CODE_CLASS (code);
4179  if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
4180    {
4181      tree subop;
4182
4183      /* Special case for conversion ops that can have fixed point args.  */
4184      arg0 = TREE_OPERAND (t, 0);
4185
4186      /* Don't use STRIP_NOPS, because signedness of argument type matters.  */
4187      if (arg0 != 0)
4188	STRIP_TYPE_NOPS (arg0);
4189
4190      if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
4191	subop = TREE_REALPART (arg0);
4192      else
4193	subop = arg0;
4194
4195      if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
4196#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4197	  && TREE_CODE (subop) != REAL_CST
4198#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4199	  )
4200	/* Note that TREE_CONSTANT isn't enough:
4201	   static var addresses are constant but we can't
4202	   do arithmetic on them.  */
4203	wins = 0;
4204    }
4205  else if (kind == 'e' || kind == '<'
4206	   || kind == '1' || kind == '2' || kind == 'r')
4207    {
4208      register int len = tree_code_length[(int) code];
4209      register int i;
4210      for (i = 0; i < len; i++)
4211	{
4212	  tree op = TREE_OPERAND (t, i);
4213	  tree subop;
4214
4215	  if (op == 0)
4216	    continue;		/* Valid for CALL_EXPR, at least.  */
4217
4218	  if (kind == '<' || code == RSHIFT_EXPR)
4219	    {
4220	      /* Signedness matters here.  Perhaps we can refine this
4221		 later.  */
4222	      STRIP_TYPE_NOPS (op);
4223	    }
4224	  else
4225	    {
4226	      /* Strip any conversions that don't change the mode.  */
4227	      STRIP_NOPS (op);
4228	    }
4229
4230	  if (TREE_CODE (op) == COMPLEX_CST)
4231	    subop = TREE_REALPART (op);
4232	  else
4233	    subop = op;
4234
4235	  if (TREE_CODE (subop) != INTEGER_CST
4236#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
4237	      && TREE_CODE (subop) != REAL_CST
4238#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
4239	      )
4240	    /* Note that TREE_CONSTANT isn't enough:
4241	       static var addresses are constant but we can't
4242	       do arithmetic on them.  */
4243	    wins = 0;
4244
4245	  if (i == 0)
4246	    arg0 = op;
4247	  else if (i == 1)
4248	    arg1 = op;
4249	}
4250    }
4251
4252  /* If this is a commutative operation, and ARG0 is a constant, move it
4253     to ARG1 to reduce the number of tests below.  */
4254  if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
4255       || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
4256       || code == BIT_AND_EXPR)
4257      && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
4258    {
4259      tem = arg0; arg0 = arg1; arg1 = tem;
4260
4261      tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
4262      TREE_OPERAND (t, 1) = tem;
4263    }
4264
4265  /* Now WINS is set as described above,
4266     ARG0 is the first operand of EXPR,
4267     and ARG1 is the second operand (if it has more than one operand).
4268
4269     First check for cases where an arithmetic operation is applied to a
4270     compound, conditional, or comparison operation.  Push the arithmetic
4271     operation inside the compound or conditional to see if any folding
4272     can then be done.  Convert comparison to conditional for this purpose.
4273     The also optimizes non-constant cases that used to be done in
4274     expand_expr.
4275
4276     Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
4277     one of the operands is a comparison and the other is a comparison, a
4278     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
4279     code below would make the expression more complex.  Change it to a
4280     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
4281     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
4282
4283  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
4284       || code == EQ_EXPR || code == NE_EXPR)
4285      && ((truth_value_p (TREE_CODE (arg0))
4286	   && (truth_value_p (TREE_CODE (arg1))
4287	       || (TREE_CODE (arg1) == BIT_AND_EXPR
4288		   && integer_onep (TREE_OPERAND (arg1, 1)))))
4289	  || (truth_value_p (TREE_CODE (arg1))
4290	      && (truth_value_p (TREE_CODE (arg0))
4291		  || (TREE_CODE (arg0) == BIT_AND_EXPR
4292		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
4293    {
4294      t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
4295		       : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
4296		       : TRUTH_XOR_EXPR,
4297		       type, arg0, arg1));
4298
4299      if (code == EQ_EXPR)
4300	t = invert_truthvalue (t);
4301
4302      return t;
4303    }
4304
4305  if (TREE_CODE_CLASS (code) == '1')
4306    {
4307      if (TREE_CODE (arg0) == COMPOUND_EXPR)
4308	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4309		      fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
4310      else if (TREE_CODE (arg0) == COND_EXPR)
4311	{
4312	  t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
4313			   fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
4314			   fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
4315
4316	  /* If this was a conversion, and all we did was to move into
4317	     inside the COND_EXPR, bring it back out.  But leave it if
4318	     it is a conversion from integer to integer and the
4319	     result precision is no wider than a word since such a
4320	     conversion is cheap and may be optimized away by combine,
4321	     while it couldn't if it were outside the COND_EXPR.  Then return
4322	     so we don't get into an infinite recursion loop taking the
4323	     conversion out and then back in.  */
4324
4325	  if ((code == NOP_EXPR || code == CONVERT_EXPR
4326	       || code == NON_LVALUE_EXPR)
4327	      && TREE_CODE (t) == COND_EXPR
4328	      && TREE_CODE (TREE_OPERAND (t, 1)) == code
4329	      && TREE_CODE (TREE_OPERAND (t, 2)) == code
4330	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
4331		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
4332	      && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
4333		    && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
4334		    && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
4335	    t = build1 (code, type,
4336			build (COND_EXPR,
4337			       TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
4338			       TREE_OPERAND (t, 0),
4339			       TREE_OPERAND (TREE_OPERAND (t, 1), 0),
4340			       TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
4341	  return t;
4342	}
4343      else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
4344	return fold (build (COND_EXPR, type, arg0,
4345			    fold (build1 (code, type, integer_one_node)),
4346			    fold (build1 (code, type, integer_zero_node))));
4347   }
4348  else if (TREE_CODE_CLASS (code) == '2'
4349	   || TREE_CODE_CLASS (code) == '<')
4350    {
4351      if (TREE_CODE (arg1) == COMPOUND_EXPR)
4352	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4353		      fold (build (code, type,
4354				   arg0, TREE_OPERAND (arg1, 1))));
4355      else if ((TREE_CODE (arg1) == COND_EXPR
4356		|| (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
4357		    && TREE_CODE_CLASS (code) != '<'))
4358	       && (TREE_CODE (arg0) != COND_EXPR
4359		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4360	       && (! TREE_SIDE_EFFECTS (arg0)
4361		   || (current_function_decl != 0
4362		       && ! contains_placeholder_p (arg0))))
4363	{
4364	  tree test, true_value, false_value;
4365	  tree lhs = 0, rhs = 0;
4366
4367	  if (TREE_CODE (arg1) == COND_EXPR)
4368	    {
4369	      test = TREE_OPERAND (arg1, 0);
4370	      true_value = TREE_OPERAND (arg1, 1);
4371	      false_value = TREE_OPERAND (arg1, 2);
4372	    }
4373	  else
4374	    {
4375	      tree testtype = TREE_TYPE (arg1);
4376	      test = arg1;
4377	      true_value = convert (testtype, integer_one_node);
4378	      false_value = convert (testtype, integer_zero_node);
4379	    }
4380
4381	  /* If ARG0 is complex we want to make sure we only evaluate
4382	     it once.  Though this is only required if it is volatile, it
4383	     might be more efficient even if it is not.  However, if we
4384	     succeed in folding one part to a constant, we do not need
4385	     to make this SAVE_EXPR.  Since we do this optimization
4386	     primarily to see if we do end up with constant and this
4387	     SAVE_EXPR interferes with later optimizations, suppressing
4388	     it when we can is important.
4389
4390	     If we are not in a function, we can't make a SAVE_EXPR, so don't
4391	     try to do so.  Don't try to see if the result is a constant
4392	     if an arm is a COND_EXPR since we get exponential behavior
4393	     in that case.  */
4394
4395	  if (TREE_CODE (arg0) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4396	      && current_function_decl != 0
4397	      && ((TREE_CODE (arg0) != VAR_DECL
4398		   && TREE_CODE (arg0) != PARM_DECL)
4399		  || TREE_SIDE_EFFECTS (arg0)))
4400	    {
4401	      if (TREE_CODE (true_value) != COND_EXPR)
4402		lhs = fold (build (code, type, arg0, true_value));
4403
4404	      if (TREE_CODE (false_value) != COND_EXPR)
4405		rhs = fold (build (code, type, arg0, false_value));
4406
4407	      if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4408		  && (rhs == 0 || !TREE_CONSTANT (rhs)))
4409		arg0 = save_expr (arg0), lhs = rhs = 0;
4410	    }
4411
4412	  if (lhs == 0)
4413	    lhs = fold (build (code, type, arg0, true_value));
4414	  if (rhs == 0)
4415	    rhs = fold (build (code, type, arg0, false_value));
4416
4417	  test = fold (build (COND_EXPR, type, test, lhs, rhs));
4418
4419	  if (TREE_CODE (arg0) == SAVE_EXPR)
4420	    return build (COMPOUND_EXPR, type,
4421			  convert (void_type_node, arg0),
4422			  strip_compound_expr (test, arg0));
4423	  else
4424	    return convert (type, test);
4425	}
4426
4427      else if (TREE_CODE (arg0) == COMPOUND_EXPR)
4428	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4429		      fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4430      else if ((TREE_CODE (arg0) == COND_EXPR
4431		|| (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4432		    && TREE_CODE_CLASS (code) != '<'))
4433	       && (TREE_CODE (arg1) != COND_EXPR
4434		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
4435	       && (! TREE_SIDE_EFFECTS (arg1)
4436		   || (current_function_decl != 0
4437		       && ! contains_placeholder_p (arg1))))
4438	{
4439	  tree test, true_value, false_value;
4440	  tree lhs = 0, rhs = 0;
4441
4442	  if (TREE_CODE (arg0) == COND_EXPR)
4443	    {
4444	      test = TREE_OPERAND (arg0, 0);
4445	      true_value = TREE_OPERAND (arg0, 1);
4446	      false_value = TREE_OPERAND (arg0, 2);
4447	    }
4448	  else
4449	    {
4450	      tree testtype = TREE_TYPE (arg0);
4451	      test = arg0;
4452	      true_value = convert (testtype, integer_one_node);
4453	      false_value = convert (testtype, integer_zero_node);
4454	    }
4455
4456	  if (TREE_CODE (arg1) != SAVE_EXPR && ! TREE_CONSTANT (arg0)
4457	      && current_function_decl != 0
4458	      && ((TREE_CODE (arg1) != VAR_DECL
4459		   && TREE_CODE (arg1) != PARM_DECL)
4460		  || TREE_SIDE_EFFECTS (arg1)))
4461	    {
4462	      if (TREE_CODE (true_value) != COND_EXPR)
4463		lhs = fold (build (code, type, true_value, arg1));
4464
4465	      if (TREE_CODE (false_value) != COND_EXPR)
4466		rhs = fold (build (code, type, false_value, arg1));
4467
4468	      if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4469		  && (rhs == 0 || !TREE_CONSTANT (rhs)))
4470		arg1 = save_expr (arg1), lhs = rhs = 0;
4471	    }
4472
4473	  if (lhs == 0)
4474	    lhs = fold (build (code, type, true_value, arg1));
4475
4476	  if (rhs == 0)
4477	    rhs = fold (build (code, type, false_value, arg1));
4478
4479	  test = fold (build (COND_EXPR, type, test, lhs, rhs));
4480	  if (TREE_CODE (arg1) == SAVE_EXPR)
4481	    return build (COMPOUND_EXPR, type,
4482			  convert (void_type_node, arg1),
4483			  strip_compound_expr (test, arg1));
4484	  else
4485	    return convert (type, test);
4486	}
4487    }
4488  else if (TREE_CODE_CLASS (code) == '<'
4489	   && TREE_CODE (arg0) == COMPOUND_EXPR)
4490    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
4491		  fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
4492  else if (TREE_CODE_CLASS (code) == '<'
4493	   && TREE_CODE (arg1) == COMPOUND_EXPR)
4494    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
4495		  fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
4496
4497  switch (code)
4498    {
4499    case INTEGER_CST:
4500    case REAL_CST:
4501    case STRING_CST:
4502    case COMPLEX_CST:
4503    case CONSTRUCTOR:
4504      return t;
4505
4506    case CONST_DECL:
4507      return fold (DECL_INITIAL (t));
4508
4509    case NOP_EXPR:
4510    case FLOAT_EXPR:
4511    case CONVERT_EXPR:
4512    case FIX_TRUNC_EXPR:
4513      /* Other kinds of FIX are not handled properly by fold_convert.  */
4514
4515      if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
4516	return TREE_OPERAND (t, 0);
4517
4518      /* Handle cases of two conversions in a row.  */
4519      if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
4520	  || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
4521	{
4522	  tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4523	  tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
4524	  tree final_type = TREE_TYPE (t);
4525	  int inside_int = INTEGRAL_TYPE_P (inside_type);
4526	  int inside_ptr = POINTER_TYPE_P (inside_type);
4527	  int inside_float = FLOAT_TYPE_P (inside_type);
4528	  int inside_prec = TYPE_PRECISION (inside_type);
4529	  int inside_unsignedp = TREE_UNSIGNED (inside_type);
4530	  int inter_int = INTEGRAL_TYPE_P (inter_type);
4531	  int inter_ptr = POINTER_TYPE_P (inter_type);
4532	  int inter_float = FLOAT_TYPE_P (inter_type);
4533	  int inter_prec = TYPE_PRECISION (inter_type);
4534	  int inter_unsignedp = TREE_UNSIGNED (inter_type);
4535	  int final_int = INTEGRAL_TYPE_P (final_type);
4536	  int final_ptr = POINTER_TYPE_P (final_type);
4537	  int final_float = FLOAT_TYPE_P (final_type);
4538	  int final_prec = TYPE_PRECISION (final_type);
4539	  int final_unsignedp = TREE_UNSIGNED (final_type);
4540
4541	  /* In addition to the cases of two conversions in a row
4542	     handled below, if we are converting something to its own
4543	     type via an object of identical or wider precision, neither
4544	     conversion is needed.  */
4545	  if (inside_type == final_type
4546	      && ((inter_int && final_int) || (inter_float && final_float))
4547	      && inter_prec >= final_prec)
4548	    return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
4549
4550	  /* Likewise, if the intermediate and final types are either both
4551	     float or both integer, we don't need the middle conversion if
4552	     it is wider than the final type and doesn't change the signedness
4553	     (for integers).  Avoid this if the final type is a pointer
4554	     since then we sometimes need the inner conversion.  Likewise if
4555	     the outer has a precision not equal to the size of its mode.  */
4556	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
4557	       || (inter_float && inside_float))
4558	      && inter_prec >= inside_prec
4559	      && (inter_float || inter_unsignedp == inside_unsignedp)
4560	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4561		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4562	      && ! final_ptr)
4563	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4564
4565	  /* If we have a sign-extension of a zero-extended value, we can
4566	     replace that by a single zero-extension.  */
4567	  if (inside_int && inter_int && final_int
4568	      && inside_prec < inter_prec && inter_prec < final_prec
4569	      && inside_unsignedp && !inter_unsignedp)
4570	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4571
4572	  /* Two conversions in a row are not needed unless:
4573	     - some conversion is floating-point (overstrict for now), or
4574	     - the intermediate type is narrower than both initial and
4575	       final, or
4576	     - the intermediate type and innermost type differ in signedness,
4577	       and the outermost type is wider than the intermediate, or
4578	     - the initial type is a pointer type and the precisions of the
4579	       intermediate and final types differ, or
4580	     - the final type is a pointer type and the precisions of the
4581	       initial and intermediate types differ.  */
4582	  if (! inside_float && ! inter_float && ! final_float
4583	      && (inter_prec > inside_prec || inter_prec > final_prec)
4584	      && ! (inside_int && inter_int
4585		    && inter_unsignedp != inside_unsignedp
4586		    && inter_prec < final_prec)
4587	      && ((inter_unsignedp && inter_prec > inside_prec)
4588		  == (final_unsignedp && final_prec > inter_prec))
4589	      && ! (inside_ptr && inter_prec != final_prec)
4590	      && ! (final_ptr && inside_prec != inter_prec)
4591	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
4592		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
4593	      && ! final_ptr)
4594	    return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
4595	}
4596
4597      if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
4598	  && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
4599	  /* Detect assigning a bitfield.  */
4600	  && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
4601	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
4602	{
4603	  /* Don't leave an assignment inside a conversion
4604	     unless assigning a bitfield.  */
4605	  tree prev = TREE_OPERAND (t, 0);
4606	  TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
4607	  /* First do the assignment, then return converted constant.  */
4608	  t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
4609	  TREE_USED (t) = 1;
4610	  return t;
4611	}
4612      if (!wins)
4613	{
4614	  TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
4615	  return t;
4616	}
4617      return fold_convert (t, arg0);
4618
4619#if 0  /* This loses on &"foo"[0].  */
4620    case ARRAY_REF:
4621	{
4622	  int i;
4623
4624	  /* Fold an expression like: "foo"[2] */
4625	  if (TREE_CODE (arg0) == STRING_CST
4626	      && TREE_CODE (arg1) == INTEGER_CST
4627	      && !TREE_INT_CST_HIGH (arg1)
4628	      && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4629	    {
4630	      t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4631	      TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
4632	      force_fit_type (t, 0);
4633	    }
4634	}
4635      return t;
4636#endif /* 0 */
4637
4638    case COMPONENT_REF:
4639      if (TREE_CODE (arg0) == CONSTRUCTOR)
4640	{
4641	  tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
4642	  if (m)
4643	    t = TREE_VALUE (m);
4644	}
4645      return t;
4646
4647    case RANGE_EXPR:
4648      TREE_CONSTANT (t) = wins;
4649      return t;
4650
4651    case NEGATE_EXPR:
4652      if (wins)
4653	{
4654	  if (TREE_CODE (arg0) == INTEGER_CST)
4655	    {
4656	      HOST_WIDE_INT low, high;
4657	      int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4658					 TREE_INT_CST_HIGH (arg0),
4659					 &low, &high);
4660	      t = build_int_2 (low, high);
4661	      TREE_TYPE (t) = type;
4662	      TREE_OVERFLOW (t)
4663		= (TREE_OVERFLOW (arg0)
4664		   | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
4665	      TREE_CONSTANT_OVERFLOW (t)
4666		= TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4667	    }
4668	  else if (TREE_CODE (arg0) == REAL_CST)
4669	    t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4670	}
4671      else if (TREE_CODE (arg0) == NEGATE_EXPR)
4672	return TREE_OPERAND (arg0, 0);
4673
4674      /* Convert - (a - b) to (b - a) for non-floating-point.  */
4675      else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
4676	return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
4677		      TREE_OPERAND (arg0, 0));
4678
4679      return t;
4680
4681    case ABS_EXPR:
4682      if (wins)
4683	{
4684	  if (TREE_CODE (arg0) == INTEGER_CST)
4685	    {
4686	      if (! TREE_UNSIGNED (type)
4687		  && TREE_INT_CST_HIGH (arg0) < 0)
4688		{
4689		  HOST_WIDE_INT low, high;
4690		  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
4691					     TREE_INT_CST_HIGH (arg0),
4692					     &low, &high);
4693		  t = build_int_2 (low, high);
4694		  TREE_TYPE (t) = type;
4695		  TREE_OVERFLOW (t)
4696		    = (TREE_OVERFLOW (arg0)
4697		       | force_fit_type (t, overflow));
4698		  TREE_CONSTANT_OVERFLOW (t)
4699		    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
4700		}
4701	    }
4702	  else if (TREE_CODE (arg0) == REAL_CST)
4703	    {
4704	      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
4705		t = build_real (type,
4706				REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
4707	    }
4708	}
4709      else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
4710	return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
4711      return t;
4712
4713    case CONJ_EXPR:
4714      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
4715	return arg0;
4716      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
4717	return build (COMPLEX_EXPR, TREE_TYPE (arg0),
4718		      TREE_OPERAND (arg0, 0),
4719		      fold (build1 (NEGATE_EXPR,
4720				    TREE_TYPE (TREE_TYPE (arg0)),
4721				    TREE_OPERAND (arg0, 1))));
4722      else if (TREE_CODE (arg0) == COMPLEX_CST)
4723	return build_complex (type, TREE_OPERAND (arg0, 0),
4724			      fold (build1 (NEGATE_EXPR,
4725					    TREE_TYPE (TREE_TYPE (arg0)),
4726					    TREE_OPERAND (arg0, 1))));
4727      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
4728	return fold (build (TREE_CODE (arg0), type,
4729			    fold (build1 (CONJ_EXPR, type,
4730					  TREE_OPERAND (arg0, 0))),
4731			    fold (build1 (CONJ_EXPR,
4732					  type, TREE_OPERAND (arg0, 1)))));
4733      else if (TREE_CODE (arg0) == CONJ_EXPR)
4734	return TREE_OPERAND (arg0, 0);
4735      return t;
4736
4737    case BIT_NOT_EXPR:
4738      if (wins)
4739	{
4740	  t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
4741			   ~ TREE_INT_CST_HIGH (arg0));
4742	  TREE_TYPE (t) = type;
4743	  force_fit_type (t, 0);
4744	  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
4745	  TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
4746	}
4747      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
4748	return TREE_OPERAND (arg0, 0);
4749      return t;
4750
4751    case PLUS_EXPR:
4752      /* A + (-B) -> A - B */
4753      if (TREE_CODE (arg1) == NEGATE_EXPR)
4754	return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4755      else if (! FLOAT_TYPE_P (type))
4756	{
4757	  if (integer_zerop (arg1))
4758	    return non_lvalue (convert (type, arg0));
4759
4760	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
4761	     with a constant, and the two constants have no bits in common,
4762	     we should treat this as a BIT_IOR_EXPR since this may produce more
4763	     simplifications.  */
4764	  if (TREE_CODE (arg0) == BIT_AND_EXPR
4765	      && TREE_CODE (arg1) == BIT_AND_EXPR
4766	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4767	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
4768	      && integer_zerop (const_binop (BIT_AND_EXPR,
4769					     TREE_OPERAND (arg0, 1),
4770					     TREE_OPERAND (arg1, 1), 0)))
4771	    {
4772	      code = BIT_IOR_EXPR;
4773	      goto bit_ior;
4774	    }
4775
4776	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
4777	    {
4778	      tree arg00, arg01, arg10, arg11;
4779	      tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
4780
4781	      /* (A * C) + (B * C) -> (A+B) * C.
4782		 We are most concerned about the case where C is a constant,
4783		 but other combinations show up during loop reduction.  Since
4784		 it is not difficult, try all four possibilities.  */
4785
4786	      arg00 = TREE_OPERAND (arg0, 0);
4787	      arg01 = TREE_OPERAND (arg0, 1);
4788	      arg10 = TREE_OPERAND (arg1, 0);
4789	      arg11 = TREE_OPERAND (arg1, 1);
4790	      same = NULL_TREE;
4791
4792	      if (operand_equal_p (arg01, arg11, 0))
4793		same = arg01, alt0 = arg00, alt1 = arg10;
4794	      else if (operand_equal_p (arg00, arg10, 0))
4795		same = arg00, alt0 = arg01, alt1 = arg11;
4796	      else if (operand_equal_p (arg00, arg11, 0))
4797		same = arg00, alt0 = arg01, alt1 = arg10;
4798	      else if (operand_equal_p (arg01, arg10, 0))
4799		same = arg01, alt0 = arg00, alt1 = arg11;
4800
4801	      if (same)
4802	        return fold (build (MULT_EXPR, type,
4803				    fold (build (PLUS_EXPR, type, alt0, alt1)),
4804				    same));
4805	    }
4806	}
4807      /* In IEEE floating point, x+0 may not equal x.  */
4808      else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4809		|| flag_fast_math)
4810	       && real_zerop (arg1))
4811	return non_lvalue (convert (type, arg0));
4812    associate:
4813      /* In most languages, can't associate operations on floats
4814	 through parentheses.  Rather than remember where the parentheses
4815	 were, we don't associate floats at all.  It shouldn't matter much.
4816	 However, associating multiplications is only very slightly
4817	 inaccurate, so do that if -ffast-math is specified.  */
4818      if (FLOAT_TYPE_P (type)
4819	  && ! (flag_fast_math && code == MULT_EXPR))
4820	goto binary;
4821
4822      /* The varsign == -1 cases happen only for addition and subtraction.
4823	 It says that the arg that was split was really CON minus VAR.
4824	 The rest of the code applies to all associative operations.  */
4825      if (!wins)
4826	{
4827	  tree var, con;
4828	  int varsign;
4829
4830	  if (split_tree (arg0, code, &var, &con, &varsign))
4831	    {
4832	      if (varsign == -1)
4833		{
4834		  /* EXPR is (CON-VAR) +- ARG1.  */
4835		  /* If it is + and VAR==ARG1, return just CONST.  */
4836		  if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
4837		    return convert (TREE_TYPE (t), con);
4838
4839		  /* If ARG0 is a constant, don't change things around;
4840		     instead keep all the constant computations together.  */
4841
4842		  if (TREE_CONSTANT (arg0))
4843		    return t;
4844
4845		  /* Otherwise return (CON +- ARG1) - VAR.  */
4846		  t = build (MINUS_EXPR, type,
4847			     fold (build (code, type, con, arg1)), var);
4848		}
4849	      else
4850		{
4851		  /* EXPR is (VAR+CON) +- ARG1.  */
4852		  /* If it is - and VAR==ARG1, return just CONST.  */
4853		  if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
4854		    return convert (TREE_TYPE (t), con);
4855
4856		  /* If ARG0 is a constant, don't change things around;
4857		     instead keep all the constant computations together.  */
4858
4859		  if (TREE_CONSTANT (arg0))
4860		    return t;
4861
4862		  /* Otherwise return VAR +- (ARG1 +- CON).  */
4863		  tem = fold (build (code, type, arg1, con));
4864		  t = build (code, type, var, tem);
4865
4866		  if (integer_zerop (tem)
4867		      && (code == PLUS_EXPR || code == MINUS_EXPR))
4868		    return convert (type, var);
4869		  /* If we have x +/- (c - d) [c an explicit integer]
4870		     change it to x -/+ (d - c) since if d is relocatable
4871		     then the latter can be a single immediate insn
4872		     and the former cannot.  */
4873		  if (TREE_CODE (tem) == MINUS_EXPR
4874		      && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
4875		    {
4876		      tree tem1 = TREE_OPERAND (tem, 1);
4877		      TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
4878		      TREE_OPERAND (tem, 0) = tem1;
4879		      TREE_SET_CODE (t,
4880				     (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4881		    }
4882		}
4883	      return t;
4884	    }
4885
4886	  if (split_tree (arg1, code, &var, &con, &varsign))
4887	    {
4888	      if (TREE_CONSTANT (arg1))
4889		return t;
4890
4891	      if (varsign == -1)
4892		TREE_SET_CODE (t,
4893			       (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
4894
4895	      /* EXPR is ARG0 +- (CON +- VAR).  */
4896	      if (TREE_CODE (t) == MINUS_EXPR
4897		  && operand_equal_p (var, arg0, 0))
4898		{
4899		  /* If VAR and ARG0 cancel, return just CON or -CON.  */
4900		  if (code == PLUS_EXPR)
4901		    return convert (TREE_TYPE (t), con);
4902		  return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
4903				       convert (TREE_TYPE (t), con)));
4904		}
4905
4906	      t = build (TREE_CODE (t), type,
4907			 fold (build (code, TREE_TYPE (t), arg0, con)), var);
4908
4909	      if (integer_zerop (TREE_OPERAND (t, 0))
4910		  && TREE_CODE (t) == PLUS_EXPR)
4911		return convert (TREE_TYPE (t), var);
4912	      return t;
4913	    }
4914	}
4915    binary:
4916#if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
4917      if (TREE_CODE (arg1) == REAL_CST)
4918	return t;
4919#endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
4920      if (wins)
4921	t1 = const_binop (code, arg0, arg1, 0);
4922      if (t1 != NULL_TREE)
4923	{
4924	  /* The return value should always have
4925	     the same type as the original expression.  */
4926	  if (TREE_TYPE (t1) != TREE_TYPE (t))
4927	    t1 = convert (TREE_TYPE (t), t1);
4928
4929	  return t1;
4930	}
4931      return t;
4932
4933    case MINUS_EXPR:
4934      if (! FLOAT_TYPE_P (type))
4935	{
4936	  if (! wins && integer_zerop (arg0))
4937	    return build1 (NEGATE_EXPR, type, arg1);
4938	  if (integer_zerop (arg1))
4939	    return non_lvalue (convert (type, arg0));
4940
4941	  /* (A * C) - (B * C) -> (A-B) * C.  Since we are most concerned
4942	     about the case where C is a constant, just try one of the
4943	     four possibilities.  */
4944
4945	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
4946	      && operand_equal_p (TREE_OPERAND (arg0, 1),
4947				  TREE_OPERAND (arg1, 1), 0))
4948	    return fold (build (MULT_EXPR, type,
4949				fold (build (MINUS_EXPR, type,
4950					     TREE_OPERAND (arg0, 0),
4951					     TREE_OPERAND (arg1, 0))),
4952				TREE_OPERAND (arg0, 1)));
4953	}
4954      /* Convert A - (-B) to A + B.  */
4955      else if (TREE_CODE (arg1) == NEGATE_EXPR)
4956	return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
4957
4958      else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4959	       || flag_fast_math)
4960	{
4961	  /* Except with IEEE floating point, 0-x equals -x.  */
4962	  if (! wins && real_zerop (arg0))
4963	    return build1 (NEGATE_EXPR, type, arg1);
4964	  /* Except with IEEE floating point, x-0 equals x.  */
4965	  if (real_zerop (arg1))
4966	    return non_lvalue (convert (type, arg0));
4967	}
4968
4969      /* Fold &x - &x.  This can happen from &x.foo - &x.
4970	 This is unsafe for certain floats even in non-IEEE formats.
4971	 In IEEE, it is unsafe because it does wrong for NaNs.
4972	 Also note that operand_equal_p is always false if an operand
4973	 is volatile.  */
4974
4975      if ((! FLOAT_TYPE_P (type) || flag_fast_math)
4976	  && operand_equal_p (arg0, arg1, 0))
4977	return convert (type, integer_zero_node);
4978
4979      goto associate;
4980
4981    case MULT_EXPR:
4982      if (! FLOAT_TYPE_P (type))
4983	{
4984	  if (integer_zerop (arg1))
4985	    return omit_one_operand (type, arg1, arg0);
4986	  if (integer_onep (arg1))
4987	    return non_lvalue (convert (type, arg0));
4988
4989	  /* ((A / C) * C) is A if the division is an
4990	     EXACT_DIV_EXPR.   Since C is normally a constant,
4991	     just check for one of the four possibilities.  */
4992
4993	  if (TREE_CODE (arg0) == EXACT_DIV_EXPR
4994	      && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4995	    return TREE_OPERAND (arg0, 0);
4996
4997	  /* (a * (1 << b)) is (a << b)  */
4998	  if (TREE_CODE (arg1) == LSHIFT_EXPR
4999	      && integer_onep (TREE_OPERAND (arg1, 0)))
5000	    return fold (build (LSHIFT_EXPR, type, arg0,
5001				TREE_OPERAND (arg1, 1)));
5002	  if (TREE_CODE (arg0) == LSHIFT_EXPR
5003	      && integer_onep (TREE_OPERAND (arg0, 0)))
5004	    return fold (build (LSHIFT_EXPR, type, arg1,
5005				TREE_OPERAND (arg0, 1)));
5006	}
5007      else
5008	{
5009	  /* x*0 is 0, except for IEEE floating point.  */
5010	  if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
5011	       || flag_fast_math)
5012	      && real_zerop (arg1))
5013	    return omit_one_operand (type, arg1, arg0);
5014	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
5015	     However, ANSI says we can drop signals,
5016	     so we can do this anyway.  */
5017	  if (real_onep (arg1))
5018	    return non_lvalue (convert (type, arg0));
5019	  /* x*2 is x+x */
5020	  if (! wins && real_twop (arg1) && current_function_decl != 0
5021	      && ! contains_placeholder_p (arg0))
5022	    {
5023	      tree arg = save_expr (arg0);
5024	      return build (PLUS_EXPR, type, arg, arg);
5025	    }
5026	}
5027      goto associate;
5028
5029    case BIT_IOR_EXPR:
5030    bit_ior:
5031      {
5032      register enum tree_code code0, code1;
5033
5034      if (integer_all_onesp (arg1))
5035	return omit_one_operand (type, arg1, arg0);
5036      if (integer_zerop (arg1))
5037	return non_lvalue (convert (type, arg0));
5038      t1 = distribute_bit_expr (code, type, arg0, arg1);
5039      if (t1 != NULL_TREE)
5040	return t1;
5041
5042      /* (A << C1) | (A >> C2) if A is unsigned and C1+C2 is the size of A
5043	 is a rotate of A by C1 bits.  */
5044      /* (A << B) | (A >> (Z - B)) if A is unsigned and Z is the size of A
5045	 is a rotate of A by B bits.  */
5046
5047      code0 = TREE_CODE (arg0);
5048      code1 = TREE_CODE (arg1);
5049      if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5050	  || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5051	  && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
5052	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5053	{
5054	  register tree tree01, tree11;
5055	  register enum tree_code code01, code11;
5056
5057	  tree01 = TREE_OPERAND (arg0, 1);
5058	  tree11 = TREE_OPERAND (arg1, 1);
5059	  STRIP_NOPS (tree01);
5060	  STRIP_NOPS (tree11);
5061	  code01 = TREE_CODE (tree01);
5062	  code11 = TREE_CODE (tree11);
5063	  if (code01 == INTEGER_CST
5064	    && code11 == INTEGER_CST
5065	    && TREE_INT_CST_HIGH (tree01) == 0
5066	    && TREE_INT_CST_HIGH (tree11) == 0
5067	    && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5068	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5069	    return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5070		      code0 == LSHIFT_EXPR ? tree01 : tree11);
5071	  else if (code11 == MINUS_EXPR)
5072	    {
5073	      tree tree110, tree111;
5074	      tree110 = TREE_OPERAND (tree11, 0);
5075	      tree111 = TREE_OPERAND (tree11, 1);
5076	      STRIP_NOPS (tree110);
5077	      STRIP_NOPS (tree111);
5078	      if (TREE_CODE (tree110) == INTEGER_CST
5079		  && TREE_INT_CST_HIGH (tree110) == 0
5080		  && (TREE_INT_CST_LOW (tree110)
5081		      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5082		  && operand_equal_p (tree01, tree111, 0))
5083		return build ((code0 == LSHIFT_EXPR
5084			       ? LROTATE_EXPR
5085			       : RROTATE_EXPR),
5086			      type, TREE_OPERAND (arg0, 0), tree01);
5087	    }
5088	  else if (code01 == MINUS_EXPR)
5089	    {
5090	      tree tree010, tree011;
5091	      tree010 = TREE_OPERAND (tree01, 0);
5092	      tree011 = TREE_OPERAND (tree01, 1);
5093	      STRIP_NOPS (tree010);
5094	      STRIP_NOPS (tree011);
5095	      if (TREE_CODE (tree010) == INTEGER_CST
5096		  && TREE_INT_CST_HIGH (tree010) == 0
5097		  && (TREE_INT_CST_LOW (tree010)
5098		      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5099		  && operand_equal_p (tree11, tree011, 0))
5100		return build ((code0 != LSHIFT_EXPR
5101			       ? LROTATE_EXPR
5102			       : RROTATE_EXPR),
5103			       type, TREE_OPERAND (arg0, 0), tree11);
5104	    }
5105	}
5106
5107      goto associate;
5108      }
5109
5110    case BIT_XOR_EXPR:
5111      if (integer_zerop (arg1))
5112	return non_lvalue (convert (type, arg0));
5113      if (integer_all_onesp (arg1))
5114	return fold (build1 (BIT_NOT_EXPR, type, arg0));
5115      goto associate;
5116
5117    case BIT_AND_EXPR:
5118    bit_and:
5119      if (integer_all_onesp (arg1))
5120	return non_lvalue (convert (type, arg0));
5121      if (integer_zerop (arg1))
5122	return omit_one_operand (type, arg1, arg0);
5123      t1 = distribute_bit_expr (code, type, arg0, arg1);
5124      if (t1 != NULL_TREE)
5125	return t1;
5126      /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char.  */
5127      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
5128	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
5129	{
5130	  int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
5131	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5132	      && (~TREE_INT_CST_LOW (arg0)
5133		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5134	    return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
5135	}
5136      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
5137	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5138	{
5139	  int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
5140	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
5141	      && (~TREE_INT_CST_LOW (arg1)
5142		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
5143	    return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
5144	}
5145      goto associate;
5146
5147    case BIT_ANDTC_EXPR:
5148      if (integer_all_onesp (arg0))
5149	return non_lvalue (convert (type, arg1));
5150      if (integer_zerop (arg0))
5151	return omit_one_operand (type, arg0, arg1);
5152      if (TREE_CODE (arg1) == INTEGER_CST)
5153	{
5154	  arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
5155	  code = BIT_AND_EXPR;
5156	  goto bit_and;
5157	}
5158      goto binary;
5159
5160    case RDIV_EXPR:
5161      /* In most cases, do nothing with a divide by zero.  */
5162#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
5163#ifndef REAL_INFINITY
5164      if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
5165	return t;
5166#endif
5167#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
5168
5169      /* In IEEE floating point, x/1 is not equivalent to x for snans.
5170	 However, ANSI says we can drop signals, so we can do this anyway.  */
5171      if (real_onep (arg1))
5172	return non_lvalue (convert (type, arg0));
5173
5174      /* If ARG1 is a constant, we can convert this to a multiply by the
5175	 reciprocal.  This does not have the same rounding properties,
5176	 so only do this if -ffast-math.  We can actually always safely
5177	 do it if ARG1 is a power of two, but it's hard to tell if it is
5178	 or not in a portable manner.  */
5179      if (TREE_CODE (arg1) == REAL_CST)
5180	{
5181	  if (flag_fast_math
5182	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
5183					  arg1, 0)))
5184	    return fold (build (MULT_EXPR, type, arg0, tem));
5185	  /* Find the reciprocal if optimizing and the result is exact. */
5186	  else if (optimize)
5187	    {
5188	      REAL_VALUE_TYPE r;
5189	      r = TREE_REAL_CST (arg1);
5190	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
5191		  {
5192		    tem = build_real (type, r);
5193		    return fold (build (MULT_EXPR, type, arg0, tem));
5194		  }
5195	    }
5196	}
5197      goto binary;
5198
5199    case TRUNC_DIV_EXPR:
5200    case ROUND_DIV_EXPR:
5201    case FLOOR_DIV_EXPR:
5202    case CEIL_DIV_EXPR:
5203    case EXACT_DIV_EXPR:
5204      if (integer_onep (arg1))
5205	return non_lvalue (convert (type, arg0));
5206      if (integer_zerop (arg1))
5207	return t;
5208
5209      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
5210	 operation, EXACT_DIV_EXPR.
5211
5212	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
5213	 At one time others generated faster code, it's not clear if they do
5214	 after the last round to changes to the DIV code in expmed.c.  */
5215      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
5216	  && multiple_of_p (type, arg0, arg1))
5217	return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
5218
5219      /* If we have ((a / C1) / C2) where both division are the same type, try
5220	 to simplify.  First see if C1 * C2 overflows or not.  */
5221      if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
5222	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5223	{
5224	  tree new_divisor;
5225
5226	  new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
5227	  tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
5228
5229	  if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
5230	      && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
5231	    {
5232	      /* If no overflow, divide by C1*C2.  */
5233	      return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
5234	    }
5235	}
5236
5237      /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
5238	 where C1 % C3 == 0 or C3 % C1 == 0.  We can simplify these
5239	 expressions, which often appear in the offsets or sizes of
5240	 objects with a varying size.  Only deal with positive divisors
5241	 and multiplicands.   If C2 is negative, we must have C2 % C3 == 0.
5242
5243	 Look for NOPs and SAVE_EXPRs inside.  */
5244
5245      if (TREE_CODE (arg1) == INTEGER_CST
5246	  && tree_int_cst_sgn (arg1) >= 0)
5247	{
5248	  int have_save_expr = 0;
5249	  tree c2 = integer_zero_node;
5250	  tree xarg0 = arg0;
5251
5252	  if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
5253	    have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
5254
5255	  STRIP_NOPS (xarg0);
5256
5257	  /* Look inside the dividend and simplify using EXACT_DIV_EXPR
5258	     if possible.  */
5259	  if (TREE_CODE (xarg0) == MULT_EXPR
5260	      && multiple_of_p (type, TREE_OPERAND (xarg0, 0), arg1))
5261	    {
5262	      tree t;
5263
5264	      t = fold (build (MULT_EXPR, type,
5265			       fold (build (EXACT_DIV_EXPR, type,
5266					    TREE_OPERAND (xarg0, 0), arg1)),
5267			       TREE_OPERAND (xarg0, 1)));
5268	      if (have_save_expr)
5269		t = save_expr (t);
5270	      return t;
5271
5272	    }
5273
5274	  if (TREE_CODE (xarg0) == MULT_EXPR
5275	      && multiple_of_p (type, TREE_OPERAND (xarg0, 1), arg1))
5276	    {
5277	      tree t;
5278
5279	      t = fold (build (MULT_EXPR, type,
5280			       fold (build (EXACT_DIV_EXPR, type,
5281					    TREE_OPERAND (xarg0, 1), arg1)),
5282			       TREE_OPERAND (xarg0, 0)));
5283	      if (have_save_expr)
5284		t = save_expr (t);
5285	      return t;
5286	    }
5287
5288	  if (TREE_CODE (xarg0) == PLUS_EXPR
5289	      && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
5290	    c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
5291	  else if (TREE_CODE (xarg0) == MINUS_EXPR
5292		   && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5293		   /* If we are doing this computation unsigned, the negate
5294		      is incorrect.  */
5295		   && ! TREE_UNSIGNED (type))
5296	    {
5297	      c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
5298	      xarg0 = TREE_OPERAND (xarg0, 0);
5299	    }
5300
5301	  if (TREE_CODE (xarg0) == SAVE_EXPR && SAVE_EXPR_RTL (xarg0) == 0)
5302	    have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
5303
5304	  STRIP_NOPS (xarg0);
5305
5306	  if (TREE_CODE (xarg0) == MULT_EXPR
5307	      && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5308	      && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
5309	      && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
5310					      TREE_OPERAND (xarg0, 1), arg1, 1))
5311		  || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
5312						 TREE_OPERAND (xarg0, 1), 1)))
5313	      && (tree_int_cst_sgn (c2) >= 0
5314		  || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
5315						 arg1, 1))))
5316	    {
5317	      tree outer_div = integer_one_node;
5318	      tree c1 = TREE_OPERAND (xarg0, 1);
5319	      tree c3 = arg1;
5320
5321	      /* If C3 > C1, set them equal and do a divide by
5322		 C3/C1 at the end of the operation.  */
5323	      if (tree_int_cst_lt (c1, c3))
5324		outer_div = const_binop (code, c3, c1, 0), c3 = c1;
5325
5326	      /* The result is A * (C1/C3) + (C2/C3).  */
5327	      t = fold (build (PLUS_EXPR, type,
5328			       fold (build (MULT_EXPR, type,
5329					    TREE_OPERAND (xarg0, 0),
5330					    const_binop (code, c1, c3, 1))),
5331			       const_binop (code, c2, c3, 1)));
5332
5333	      if (! integer_onep (outer_div))
5334		t = fold (build (code, type, t, convert (type, outer_div)));
5335
5336	      if (have_save_expr)
5337		t = save_expr (t);
5338
5339	      return t;
5340	    }
5341	}
5342
5343      goto binary;
5344
5345    case CEIL_MOD_EXPR:
5346    case FLOOR_MOD_EXPR:
5347    case ROUND_MOD_EXPR:
5348    case TRUNC_MOD_EXPR:
5349      if (integer_onep (arg1))
5350	return omit_one_operand (type, integer_zero_node, arg0);
5351      if (integer_zerop (arg1))
5352	return t;
5353
5354      /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
5355	 where C1 % C3 == 0.  Handle similarly to the division case,
5356	 but don't bother with SAVE_EXPRs.  */
5357
5358      if (TREE_CODE (arg1) == INTEGER_CST
5359	  && ! integer_zerop (arg1))
5360	{
5361	  tree c2 = integer_zero_node;
5362	  tree xarg0 = arg0;
5363
5364	  if (TREE_CODE (xarg0) == PLUS_EXPR
5365	      && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
5366	    c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
5367	  else if (TREE_CODE (xarg0) == MINUS_EXPR
5368		   && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5369		   && ! TREE_UNSIGNED (type))
5370	    {
5371	      c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
5372	      xarg0 = TREE_OPERAND (xarg0, 0);
5373	    }
5374
5375	  STRIP_NOPS (xarg0);
5376
5377	  if (TREE_CODE (xarg0) == MULT_EXPR
5378	      && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
5379	      && integer_zerop (const_binop (TRUNC_MOD_EXPR,
5380					     TREE_OPERAND (xarg0, 1),
5381					     arg1, 1))
5382	      && tree_int_cst_sgn (c2) >= 0)
5383	    /* The result is (C2%C3).  */
5384	    return omit_one_operand (type, const_binop (code, c2, arg1, 1),
5385				     TREE_OPERAND (xarg0, 0));
5386	}
5387
5388      goto binary;
5389
5390    case LSHIFT_EXPR:
5391    case RSHIFT_EXPR:
5392    case LROTATE_EXPR:
5393    case RROTATE_EXPR:
5394      if (integer_zerop (arg1))
5395	return non_lvalue (convert (type, arg0));
5396      /* Since negative shift count is not well-defined,
5397	 don't try to compute it in the compiler.  */
5398      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
5399	return t;
5400      /* Rewrite an LROTATE_EXPR by a constant into an
5401	 RROTATE_EXPR by a new constant.  */
5402      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
5403	{
5404	  TREE_SET_CODE (t, RROTATE_EXPR);
5405	  code = RROTATE_EXPR;
5406	  TREE_OPERAND (t, 1) = arg1
5407	    = const_binop
5408	      (MINUS_EXPR,
5409	       convert (TREE_TYPE (arg1),
5410			build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
5411	       arg1, 0);
5412	  if (tree_int_cst_sgn (arg1) < 0)
5413	    return t;
5414	}
5415
5416      /* If we have a rotate of a bit operation with the rotate count and
5417	 the second operand of the bit operation both constant,
5418	 permute the two operations.  */
5419      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5420	  && (TREE_CODE (arg0) == BIT_AND_EXPR
5421	      || TREE_CODE (arg0) == BIT_ANDTC_EXPR
5422	      || TREE_CODE (arg0) == BIT_IOR_EXPR
5423	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
5424	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
5425	return fold (build (TREE_CODE (arg0), type,
5426			    fold (build (code, type,
5427					 TREE_OPERAND (arg0, 0), arg1)),
5428			    fold (build (code, type,
5429					 TREE_OPERAND (arg0, 1), arg1))));
5430
5431      /* Two consecutive rotates adding up to the width of the mode can
5432	 be ignored.  */
5433      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
5434	  && TREE_CODE (arg0) == RROTATE_EXPR
5435	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5436	  && TREE_INT_CST_HIGH (arg1) == 0
5437	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
5438	  && ((TREE_INT_CST_LOW (arg1)
5439	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
5440	      == GET_MODE_BITSIZE (TYPE_MODE (type))))
5441	return TREE_OPERAND (arg0, 0);
5442
5443      goto binary;
5444
5445    case MIN_EXPR:
5446      if (operand_equal_p (arg0, arg1, 0))
5447	return arg0;
5448      if (INTEGRAL_TYPE_P (type)
5449	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
5450	return omit_one_operand (type, arg1, arg0);
5451      goto associate;
5452
5453    case MAX_EXPR:
5454      if (operand_equal_p (arg0, arg1, 0))
5455	return arg0;
5456      if (INTEGRAL_TYPE_P (type)
5457	  && TYPE_MAX_VALUE (type)
5458	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
5459	return omit_one_operand (type, arg1, arg0);
5460      goto associate;
5461
5462    case TRUTH_NOT_EXPR:
5463      /* Note that the operand of this must be an int
5464	 and its values must be 0 or 1.
5465	 ("true" is a fixed value perhaps depending on the language,
5466	 but we don't handle values other than 1 correctly yet.)  */
5467      tem = invert_truthvalue (arg0);
5468      /* Avoid infinite recursion.  */
5469      if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
5470	return t;
5471      return convert (type, tem);
5472
5473    case TRUTH_ANDIF_EXPR:
5474      /* Note that the operands of this must be ints
5475	 and their values must be 0 or 1.
5476	 ("true" is a fixed value perhaps depending on the language.)  */
5477      /* If first arg is constant zero, return it.  */
5478      if (integer_zerop (arg0))
5479	return arg0;
5480    case TRUTH_AND_EXPR:
5481      /* If either arg is constant true, drop it.  */
5482      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5483	return non_lvalue (arg1);
5484      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5485	return non_lvalue (arg0);
5486      /* If second arg is constant zero, result is zero, but first arg
5487	 must be evaluated.  */
5488      if (integer_zerop (arg1))
5489	return omit_one_operand (type, arg1, arg0);
5490      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
5491	 case will be handled here.  */
5492      if (integer_zerop (arg0))
5493	return omit_one_operand (type, arg0, arg1);
5494
5495    truth_andor:
5496      /* We only do these simplifications if we are optimizing.  */
5497      if (!optimize)
5498	return t;
5499
5500      /* Check for things like (A || B) && (A || C).  We can convert this
5501	 to A || (B && C).  Note that either operator can be any of the four
5502	 truth and/or operations and the transformation will still be
5503	 valid.   Also note that we only care about order for the
5504	 ANDIF and ORIF operators.  If B contains side effects, this
5505	 might change the truth-value of A. */
5506      if (TREE_CODE (arg0) == TREE_CODE (arg1)
5507	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
5508	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
5509	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
5510	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
5511	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
5512	{
5513	  tree a00 = TREE_OPERAND (arg0, 0);
5514	  tree a01 = TREE_OPERAND (arg0, 1);
5515	  tree a10 = TREE_OPERAND (arg1, 0);
5516	  tree a11 = TREE_OPERAND (arg1, 1);
5517	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
5518			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
5519			     && (code == TRUTH_AND_EXPR
5520				 || code == TRUTH_OR_EXPR));
5521
5522	  if (operand_equal_p (a00, a10, 0))
5523	    return fold (build (TREE_CODE (arg0), type, a00,
5524				fold (build (code, type, a01, a11))));
5525	  else if (commutative && operand_equal_p (a00, a11, 0))
5526	    return fold (build (TREE_CODE (arg0), type, a00,
5527				fold (build (code, type, a01, a10))));
5528	  else if (commutative && operand_equal_p (a01, a10, 0))
5529	    return fold (build (TREE_CODE (arg0), type, a01,
5530				fold (build (code, type, a00, a11))));
5531
5532	  /* This case if tricky because we must either have commutative
5533	     operators or else A10 must not have side-effects.  */
5534
5535	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
5536		   && operand_equal_p (a01, a11, 0))
5537	    return fold (build (TREE_CODE (arg0), type,
5538				fold (build (code, type, a00, a10)),
5539				a01));
5540	}
5541
5542      /* See if we can build a range comparison.  */
5543      if (0 != (tem = fold_range_test (t)))
5544	return tem;
5545
5546      /* Check for the possibility of merging component references.  If our
5547	 lhs is another similar operation, try to merge its rhs with our
5548	 rhs.  Then try to merge our lhs and rhs.  */
5549      if (TREE_CODE (arg0) == code
5550	  && 0 != (tem = fold_truthop (code, type,
5551				       TREE_OPERAND (arg0, 1), arg1)))
5552	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
5553
5554      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
5555	return tem;
5556
5557      return t;
5558
5559    case TRUTH_ORIF_EXPR:
5560      /* Note that the operands of this must be ints
5561	 and their values must be 0 or true.
5562	 ("true" is a fixed value perhaps depending on the language.)  */
5563      /* If first arg is constant true, return it.  */
5564      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5565	return arg0;
5566    case TRUTH_OR_EXPR:
5567      /* If either arg is constant zero, drop it.  */
5568      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
5569	return non_lvalue (arg1);
5570      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
5571	return non_lvalue (arg0);
5572      /* If second arg is constant true, result is true, but we must
5573	 evaluate first arg.  */
5574      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
5575	return omit_one_operand (type, arg1, arg0);
5576      /* Likewise for first arg, but note this only occurs here for
5577	 TRUTH_OR_EXPR.  */
5578      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
5579	return omit_one_operand (type, arg0, arg1);
5580      goto truth_andor;
5581
5582    case TRUTH_XOR_EXPR:
5583      /* If either arg is constant zero, drop it.  */
5584      if (integer_zerop (arg0))
5585	return non_lvalue (arg1);
5586      if (integer_zerop (arg1))
5587	return non_lvalue (arg0);
5588      /* If either arg is constant true, this is a logical inversion.  */
5589      if (integer_onep (arg0))
5590	return non_lvalue (invert_truthvalue (arg1));
5591      if (integer_onep (arg1))
5592	return non_lvalue (invert_truthvalue (arg0));
5593      return t;
5594
5595    case EQ_EXPR:
5596    case NE_EXPR:
5597    case LT_EXPR:
5598    case GT_EXPR:
5599    case LE_EXPR:
5600    case GE_EXPR:
5601      /* If one arg is a constant integer, put it last.  */
5602      if (TREE_CODE (arg0) == INTEGER_CST
5603	  && TREE_CODE (arg1) != INTEGER_CST)
5604	{
5605	  TREE_OPERAND (t, 0) = arg1;
5606	  TREE_OPERAND (t, 1) = arg0;
5607	  arg0 = TREE_OPERAND (t, 0);
5608	  arg1 = TREE_OPERAND (t, 1);
5609	  code = swap_tree_comparison (code);
5610	  TREE_SET_CODE (t, code);
5611	}
5612
5613      /* Convert foo++ == CONST into ++foo == CONST + INCR.
5614	 First, see if one arg is constant; find the constant arg
5615	 and the other one.  */
5616      {
5617	tree constop = 0, varop = NULL_TREE;
5618	int constopnum = -1;
5619
5620	if (TREE_CONSTANT (arg1))
5621	  constopnum = 1, constop = arg1, varop = arg0;
5622	if (TREE_CONSTANT (arg0))
5623	  constopnum = 0, constop = arg0, varop = arg1;
5624
5625	if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
5626	  {
5627	    /* This optimization is invalid for ordered comparisons
5628	       if CONST+INCR overflows or if foo+incr might overflow.
5629	       This optimization is invalid for floating point due to rounding.
5630	       For pointer types we assume overflow doesn't happen.  */
5631	    if (POINTER_TYPE_P (TREE_TYPE (varop))
5632		|| (! FLOAT_TYPE_P (TREE_TYPE (varop))
5633		    && (code == EQ_EXPR || code == NE_EXPR)))
5634	      {
5635		tree newconst
5636		  = fold (build (PLUS_EXPR, TREE_TYPE (varop),
5637				 constop, TREE_OPERAND (varop, 1)));
5638
5639		/* Do not overwrite the current varop to be a preincrement,
5640		   create a new node so that we won't confuse our caller who
5641		   might create trees and throw them away, reusing the
5642		   arguments that they passed to build.  This shows up in
5643		   the THEN or ELSE parts of ?: being postincrements.  */
5644		varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
5645			       TREE_OPERAND (varop, 0),
5646			       TREE_OPERAND (varop, 1));
5647
5648		/* If VAROP is a reference to a bitfield, we must mask
5649		   the constant by the width of the field.  */
5650		if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5651		    && DECL_BIT_FIELD(TREE_OPERAND
5652				      (TREE_OPERAND (varop, 0), 1)))
5653		  {
5654		    int size
5655		      = TREE_INT_CST_LOW (DECL_SIZE
5656					  (TREE_OPERAND
5657					   (TREE_OPERAND (varop, 0), 1)));
5658		    tree mask, unsigned_type;
5659		    int precision;
5660		    tree folded_compare;
5661
5662		    /* First check whether the comparison would come out
5663		       always the same.  If we don't do that we would
5664		       change the meaning with the masking.  */
5665		    if (constopnum == 0)
5666		      folded_compare = fold (build (code, type, constop,
5667						    TREE_OPERAND (varop, 0)));
5668		    else
5669		      folded_compare = fold (build (code, type,
5670						    TREE_OPERAND (varop, 0),
5671						    constop));
5672		    if (integer_zerop (folded_compare)
5673			|| integer_onep (folded_compare))
5674		      return omit_one_operand (type, folded_compare, varop);
5675
5676		    unsigned_type = type_for_size (size, 1);
5677		    precision = TYPE_PRECISION (unsigned_type);
5678		    mask = build_int_2 (~0, ~0);
5679		    TREE_TYPE (mask) = unsigned_type;
5680		    force_fit_type (mask, 0);
5681		    mask = const_binop (RSHIFT_EXPR, mask,
5682					size_int (precision - size), 0);
5683		    newconst = fold (build (BIT_AND_EXPR,
5684					    TREE_TYPE (varop), newconst,
5685					    convert (TREE_TYPE (varop),
5686						     mask)));
5687		  }
5688
5689
5690		t = build (code, type,
5691			   (constopnum == 0) ? newconst : varop,
5692			   (constopnum == 1) ? newconst : varop);
5693		return t;
5694	      }
5695	  }
5696	else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
5697	  {
5698	    if (POINTER_TYPE_P (TREE_TYPE (varop))
5699		|| (! FLOAT_TYPE_P (TREE_TYPE (varop))
5700		    && (code == EQ_EXPR || code == NE_EXPR)))
5701	      {
5702		tree newconst
5703		  = fold (build (MINUS_EXPR, TREE_TYPE (varop),
5704				 constop, TREE_OPERAND (varop, 1)));
5705
5706		/* Do not overwrite the current varop to be a predecrement,
5707		   create a new node so that we won't confuse our caller who
5708		   might create trees and throw them away, reusing the
5709		   arguments that they passed to build.  This shows up in
5710		   the THEN or ELSE parts of ?: being postdecrements.  */
5711		varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
5712			       TREE_OPERAND (varop, 0),
5713			       TREE_OPERAND (varop, 1));
5714
5715		if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
5716		    && DECL_BIT_FIELD(TREE_OPERAND
5717				      (TREE_OPERAND (varop, 0), 1)))
5718		  {
5719		    int size
5720		      = TREE_INT_CST_LOW (DECL_SIZE
5721					  (TREE_OPERAND
5722					   (TREE_OPERAND (varop, 0), 1)));
5723		    tree mask, unsigned_type;
5724		    int precision;
5725		    tree folded_compare;
5726
5727		    if (constopnum == 0)
5728		      folded_compare = fold (build (code, type, constop,
5729						    TREE_OPERAND (varop, 0)));
5730		    else
5731		      folded_compare = fold (build (code, type,
5732						    TREE_OPERAND (varop, 0),
5733						    constop));
5734		    if (integer_zerop (folded_compare)
5735			|| integer_onep (folded_compare))
5736		      return omit_one_operand (type, folded_compare, varop);
5737
5738		    unsigned_type = type_for_size (size, 1);
5739		    precision = TYPE_PRECISION (unsigned_type);
5740		    mask = build_int_2 (~0, ~0);
5741		    TREE_TYPE (mask) = TREE_TYPE (varop);
5742		    force_fit_type (mask, 0);
5743		    mask = const_binop (RSHIFT_EXPR, mask,
5744					size_int (precision - size), 0);
5745		    newconst = fold (build (BIT_AND_EXPR,
5746					    TREE_TYPE (varop), newconst,
5747					    convert (TREE_TYPE (varop),
5748						     mask)));
5749		  }
5750
5751
5752		t = build (code, type,
5753			   (constopnum == 0) ? newconst : varop,
5754			   (constopnum == 1) ? newconst : varop);
5755		return t;
5756	      }
5757	  }
5758      }
5759
5760      /* Change X >= CST to X > (CST - 1) if CST is positive.  */
5761      if (TREE_CODE (arg1) == INTEGER_CST
5762	  && TREE_CODE (arg0) != INTEGER_CST
5763	  && tree_int_cst_sgn (arg1) > 0)
5764	{
5765	  switch (TREE_CODE (t))
5766	    {
5767	    case GE_EXPR:
5768	      code = GT_EXPR;
5769	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5770	      t = build (code, type, TREE_OPERAND (t, 0), arg1);
5771	      break;
5772
5773	    case LT_EXPR:
5774	      code = LE_EXPR;
5775	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
5776	      t = build (code, type, TREE_OPERAND (t, 0), arg1);
5777	      break;
5778
5779	    default:
5780	      break;
5781	    }
5782	}
5783
5784      /* If this is an EQ or NE comparison with zero and ARG0 is
5785	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
5786	 two operations, but the latter can be done in one less insn
5787	 on machines that have only two-operand insns or on which a
5788	 constant cannot be the first operand.  */
5789      if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
5790	  && TREE_CODE (arg0) == BIT_AND_EXPR)
5791	{
5792	  if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
5793	      && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
5794	    return
5795	      fold (build (code, type,
5796			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
5797				  build (RSHIFT_EXPR,
5798					 TREE_TYPE (TREE_OPERAND (arg0, 0)),
5799					 TREE_OPERAND (arg0, 1),
5800					 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
5801				  convert (TREE_TYPE (arg0),
5802					   integer_one_node)),
5803			   arg1));
5804	  else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
5805		   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
5806	    return
5807	      fold (build (code, type,
5808			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
5809				  build (RSHIFT_EXPR,
5810					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
5811					 TREE_OPERAND (arg0, 0),
5812					 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
5813				  convert (TREE_TYPE (arg0),
5814					   integer_one_node)),
5815			   arg1));
5816	}
5817
5818      /* If this is an NE or EQ comparison of zero against the result of a
5819	 signed MOD operation whose second operand is a power of 2, make
5820	 the MOD operation unsigned since it is simpler and equivalent.  */
5821      if ((code == NE_EXPR || code == EQ_EXPR)
5822	  && integer_zerop (arg1)
5823	  && ! TREE_UNSIGNED (TREE_TYPE (arg0))
5824	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
5825	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
5826	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
5827	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
5828	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
5829	{
5830	  tree newtype = unsigned_type (TREE_TYPE (arg0));
5831	  tree newmod = build (TREE_CODE (arg0), newtype,
5832			       convert (newtype, TREE_OPERAND (arg0, 0)),
5833			       convert (newtype, TREE_OPERAND (arg0, 1)));
5834
5835	  return build (code, type, newmod, convert (newtype, arg1));
5836	}
5837
5838      /* If this is an NE comparison of zero with an AND of one, remove the
5839	 comparison since the AND will give the correct value.  */
5840      if (code == NE_EXPR && integer_zerop (arg1)
5841	  && TREE_CODE (arg0) == BIT_AND_EXPR
5842	  && integer_onep (TREE_OPERAND (arg0, 1)))
5843	return convert (type, arg0);
5844
5845      /* If we have (A & C) == C where C is a power of 2, convert this into
5846	 (A & C) != 0.  Similarly for NE_EXPR.  */
5847      if ((code == EQ_EXPR || code == NE_EXPR)
5848	  && TREE_CODE (arg0) == BIT_AND_EXPR
5849	  && integer_pow2p (TREE_OPERAND (arg0, 1))
5850	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
5851	return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
5852		      arg0, integer_zero_node);
5853
5854      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
5855	 and similarly for >= into !=.  */
5856      if ((code == LT_EXPR || code == GE_EXPR)
5857	  && TREE_UNSIGNED (TREE_TYPE (arg0))
5858	  && TREE_CODE (arg1) == LSHIFT_EXPR
5859	  && integer_onep (TREE_OPERAND (arg1, 0)))
5860	return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5861		      build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5862			     TREE_OPERAND (arg1, 1)),
5863		      convert (TREE_TYPE (arg0), integer_zero_node));
5864
5865      else if ((code == LT_EXPR || code == GE_EXPR)
5866	       && TREE_UNSIGNED (TREE_TYPE (arg0))
5867	       && (TREE_CODE (arg1) == NOP_EXPR
5868		   || TREE_CODE (arg1) == CONVERT_EXPR)
5869	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
5870	       && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
5871	return
5872	  build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
5873		 convert (TREE_TYPE (arg0),
5874			  build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
5875				 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
5876		 convert (TREE_TYPE (arg0), integer_zero_node));
5877
5878      /* Simplify comparison of something with itself.  (For IEEE
5879	 floating-point, we can only do some of these simplifications.)  */
5880      if (operand_equal_p (arg0, arg1, 0))
5881	{
5882	  switch (code)
5883	    {
5884	    case EQ_EXPR:
5885	    case GE_EXPR:
5886	    case LE_EXPR:
5887	      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5888		return constant_boolean_node (1, type);
5889	      code = EQ_EXPR;
5890	      TREE_SET_CODE (t, code);
5891	      break;
5892
5893	    case NE_EXPR:
5894	      /* For NE, we can only do this simplification if integer.  */
5895	      if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
5896		break;
5897	      /* ... fall through ...  */
5898	    case GT_EXPR:
5899	    case LT_EXPR:
5900	      return constant_boolean_node (0, type);
5901	    default:
5902	      abort ();
5903	    }
5904	}
5905
5906      /* An unsigned comparison against 0 can be simplified.  */
5907      if (integer_zerop (arg1)
5908	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5909	      || POINTER_TYPE_P (TREE_TYPE (arg1)))
5910	  && TREE_UNSIGNED (TREE_TYPE (arg1)))
5911	{
5912	  switch (TREE_CODE (t))
5913	    {
5914	    case GT_EXPR:
5915	      code = NE_EXPR;
5916	      TREE_SET_CODE (t, NE_EXPR);
5917	      break;
5918	    case LE_EXPR:
5919	      code = EQ_EXPR;
5920	      TREE_SET_CODE (t, EQ_EXPR);
5921	      break;
5922	    case GE_EXPR:
5923	      return omit_one_operand (type,
5924				       convert (type, integer_one_node),
5925				       arg0);
5926	    case LT_EXPR:
5927	      return omit_one_operand (type,
5928				       convert (type, integer_zero_node),
5929				       arg0);
5930	    default:
5931	      break;
5932	    }
5933	}
5934
5935      /* An unsigned <= 0x7fffffff can be simplified.  */
5936      {
5937	int width = TYPE_PRECISION (TREE_TYPE (arg1));
5938	if (TREE_CODE (arg1) == INTEGER_CST
5939	    && ! TREE_CONSTANT_OVERFLOW (arg1)
5940	    && width <= HOST_BITS_PER_WIDE_INT
5941	    && TREE_INT_CST_LOW (arg1) == ((HOST_WIDE_INT) 1 << (width - 1)) - 1
5942	    && TREE_INT_CST_HIGH (arg1) == 0
5943	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
5944		|| POINTER_TYPE_P (TREE_TYPE (arg1)))
5945	    && TREE_UNSIGNED (TREE_TYPE (arg1)))
5946	  {
5947	    switch (TREE_CODE (t))
5948	      {
5949	      case LE_EXPR:
5950		return fold (build (GE_EXPR, type,
5951				    convert (signed_type (TREE_TYPE (arg0)),
5952					     arg0),
5953				    convert (signed_type (TREE_TYPE (arg1)),
5954					     integer_zero_node)));
5955	      case GT_EXPR:
5956		return fold (build (LT_EXPR, type,
5957				    convert (signed_type (TREE_TYPE (arg0)),
5958					     arg0),
5959				    convert (signed_type (TREE_TYPE (arg1)),
5960					     integer_zero_node)));
5961	      default:
5962		break;
5963	      }
5964	  }
5965      }
5966
5967      /* If we are comparing an expression that just has comparisons
5968	 of two integer values, arithmetic expressions of those comparisons,
5969	 and constants, we can simplify it.  There are only three cases
5970	 to check: the two values can either be equal, the first can be
5971	 greater, or the second can be greater.  Fold the expression for
5972	 those three values.  Since each value must be 0 or 1, we have
5973	 eight possibilities, each of which corresponds to the constant 0
5974	 or 1 or one of the six possible comparisons.
5975
5976	 This handles common cases like (a > b) == 0 but also handles
5977	 expressions like  ((x > y) - (y > x)) > 0, which supposedly
5978	 occur in macroized code.  */
5979
5980      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
5981	{
5982	  tree cval1 = 0, cval2 = 0;
5983	  int save_p = 0;
5984
5985	  if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
5986	      /* Don't handle degenerate cases here; they should already
5987		 have been handled anyway.  */
5988	      && cval1 != 0 && cval2 != 0
5989	      && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
5990	      && TREE_TYPE (cval1) == TREE_TYPE (cval2)
5991	      && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
5992	      && TYPE_MAX_VALUE (TREE_TYPE (cval1))
5993	      && TYPE_MAX_VALUE (TREE_TYPE (cval2))
5994	      && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
5995				    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
5996	    {
5997	      tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
5998	      tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
5999
6000	      /* We can't just pass T to eval_subst in case cval1 or cval2
6001		 was the same as ARG1.  */
6002
6003	      tree high_result
6004		= fold (build (code, type,
6005			       eval_subst (arg0, cval1, maxval, cval2, minval),
6006			       arg1));
6007	      tree equal_result
6008		= fold (build (code, type,
6009			       eval_subst (arg0, cval1, maxval, cval2, maxval),
6010			       arg1));
6011	      tree low_result
6012		= fold (build (code, type,
6013			       eval_subst (arg0, cval1, minval, cval2, maxval),
6014			       arg1));
6015
6016	      /* All three of these results should be 0 or 1.  Confirm they
6017		 are.  Then use those values to select the proper code
6018		 to use.  */
6019
6020	      if ((integer_zerop (high_result)
6021		   || integer_onep (high_result))
6022		  && (integer_zerop (equal_result)
6023		      || integer_onep (equal_result))
6024		  && (integer_zerop (low_result)
6025		      || integer_onep (low_result)))
6026		{
6027		  /* Make a 3-bit mask with the high-order bit being the
6028		     value for `>', the next for '=', and the low for '<'.  */
6029		  switch ((integer_onep (high_result) * 4)
6030			  + (integer_onep (equal_result) * 2)
6031			  + integer_onep (low_result))
6032		    {
6033		    case 0:
6034		      /* Always false.  */
6035		      return omit_one_operand (type, integer_zero_node, arg0);
6036		    case 1:
6037		      code = LT_EXPR;
6038		      break;
6039		    case 2:
6040		      code = EQ_EXPR;
6041		      break;
6042		    case 3:
6043		      code = LE_EXPR;
6044		      break;
6045		    case 4:
6046		      code = GT_EXPR;
6047		      break;
6048		    case 5:
6049		      code = NE_EXPR;
6050		      break;
6051		    case 6:
6052		      code = GE_EXPR;
6053		      break;
6054		    case 7:
6055		      /* Always true.  */
6056		      return omit_one_operand (type, integer_one_node, arg0);
6057		    }
6058
6059		  t = build (code, type, cval1, cval2);
6060		  if (save_p)
6061		    return save_expr (t);
6062		  else
6063		    return fold (t);
6064		}
6065	    }
6066	}
6067
6068      /* If this is a comparison of a field, we may be able to simplify it.  */
6069      if ((TREE_CODE (arg0) == COMPONENT_REF
6070	   || TREE_CODE (arg0) == BIT_FIELD_REF)
6071	  && (code == EQ_EXPR || code == NE_EXPR)
6072	  /* Handle the constant case even without -O
6073	     to make sure the warnings are given.  */
6074	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
6075	{
6076	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
6077	  return t1 ? t1 : t;
6078	}
6079
6080      /* If this is a comparison of complex values and either or both sides
6081	 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
6082	 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
6083	 This may prevent needless evaluations.  */
6084      if ((code == EQ_EXPR || code == NE_EXPR)
6085	  && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
6086	  && (TREE_CODE (arg0) == COMPLEX_EXPR
6087	      || TREE_CODE (arg1) == COMPLEX_EXPR
6088	      || TREE_CODE (arg0) == COMPLEX_CST
6089	      || TREE_CODE (arg1) == COMPLEX_CST))
6090	{
6091	  tree subtype = TREE_TYPE (TREE_TYPE (arg0));
6092	  tree real0, imag0, real1, imag1;
6093
6094	  arg0 = save_expr (arg0);
6095	  arg1 = save_expr (arg1);
6096	  real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
6097	  imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
6098	  real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
6099	  imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
6100
6101	  return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
6102			       : TRUTH_ORIF_EXPR),
6103			      type,
6104			      fold (build (code, type, real0, real1)),
6105			      fold (build (code, type, imag0, imag1))));
6106	}
6107
6108      /* From here on, the only cases we handle are when the result is
6109	 known to be a constant.
6110
6111	 To compute GT, swap the arguments and do LT.
6112	 To compute GE, do LT and invert the result.
6113	 To compute LE, swap the arguments, do LT and invert the result.
6114	 To compute NE, do EQ and invert the result.
6115
6116	 Therefore, the code below must handle only EQ and LT.  */
6117
6118      if (code == LE_EXPR || code == GT_EXPR)
6119	{
6120	  tem = arg0, arg0 = arg1, arg1 = tem;
6121	  code = swap_tree_comparison (code);
6122	}
6123
6124      /* Note that it is safe to invert for real values here because we
6125	 will check below in the one case that it matters.  */
6126
6127      invert = 0;
6128      if (code == NE_EXPR || code == GE_EXPR)
6129	{
6130	  invert = 1;
6131	  code = invert_tree_comparison (code);
6132	}
6133
6134      /* Compute a result for LT or EQ if args permit;
6135	 otherwise return T.  */
6136      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6137	{
6138	  if (code == EQ_EXPR)
6139	    t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
6140			       == TREE_INT_CST_LOW (arg1))
6141			      && (TREE_INT_CST_HIGH (arg0)
6142				  == TREE_INT_CST_HIGH (arg1)),
6143			      0);
6144	  else
6145	    t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
6146			       ? INT_CST_LT_UNSIGNED (arg0, arg1)
6147			       : INT_CST_LT (arg0, arg1)),
6148			      0);
6149	}
6150
6151#if 0 /* This is no longer useful, but breaks some real code.  */
6152      /* Assume a nonexplicit constant cannot equal an explicit one,
6153	 since such code would be undefined anyway.
6154	 Exception: on sysvr4, using #pragma weak,
6155	 a label can come out as 0.  */
6156      else if (TREE_CODE (arg1) == INTEGER_CST
6157	       && !integer_zerop (arg1)
6158	       && TREE_CONSTANT (arg0)
6159	       && TREE_CODE (arg0) == ADDR_EXPR
6160	       && code == EQ_EXPR)
6161	t1 = build_int_2 (0, 0);
6162#endif
6163      /* Two real constants can be compared explicitly.  */
6164      else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6165	{
6166	  /* If either operand is a NaN, the result is false with two
6167	     exceptions: First, an NE_EXPR is true on NaNs, but that case
6168	     is already handled correctly since we will be inverting the
6169	     result for NE_EXPR.  Second, if we had inverted a LE_EXPR
6170	     or a GE_EXPR into a LT_EXPR, we must return true so that it
6171	     will be inverted into false.  */
6172
6173	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
6174	      || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
6175	    t1 = build_int_2 (invert && code == LT_EXPR, 0);
6176
6177	  else if (code == EQ_EXPR)
6178	    t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
6179						 TREE_REAL_CST (arg1)),
6180			      0);
6181	  else
6182	    t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
6183						TREE_REAL_CST (arg1)),
6184			      0);
6185	}
6186
6187      if (t1 == NULL_TREE)
6188	return t;
6189
6190      if (invert)
6191	TREE_INT_CST_LOW (t1) ^= 1;
6192
6193      TREE_TYPE (t1) = type;
6194      if (TREE_CODE (type) == BOOLEAN_TYPE)
6195	return truthvalue_conversion (t1);
6196      return t1;
6197
6198    case COND_EXPR:
6199      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
6200	 so all simple results must be passed through pedantic_non_lvalue.  */
6201      if (TREE_CODE (arg0) == INTEGER_CST)
6202	return pedantic_non_lvalue
6203	  (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
6204      else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
6205	return pedantic_omit_one_operand (type, arg1, arg0);
6206
6207      /* If the second operand is zero, invert the comparison and swap
6208	 the second and third operands.  Likewise if the second operand
6209	 is constant and the third is not or if the third operand is
6210	 equivalent to the first operand of the comparison.  */
6211
6212      if (integer_zerop (arg1)
6213	  || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
6214	  || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6215	      && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6216						 TREE_OPERAND (t, 2),
6217						 TREE_OPERAND (arg0, 1))))
6218	{
6219	  /* See if this can be inverted.  If it can't, possibly because
6220	     it was a floating-point inequality comparison, don't do
6221	     anything.  */
6222	  tem = invert_truthvalue (arg0);
6223
6224	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6225	    {
6226	      t = build (code, type, tem,
6227			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6228	      arg0 = tem;
6229	      /* arg1 should be the first argument of the new T.  */
6230	      arg1 = TREE_OPERAND (t, 1);
6231	      STRIP_NOPS (arg1);
6232	    }
6233	}
6234
6235      /* If we have A op B ? A : C, we may be able to convert this to a
6236	 simpler expression, depending on the operation and the values
6237	 of B and C.  IEEE floating point prevents this though,
6238	 because A or B might be -0.0 or a NaN.  */
6239
6240      if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
6241	  && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
6242	      || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
6243	      || flag_fast_math)
6244	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
6245					     arg1, TREE_OPERAND (arg0, 1)))
6246	{
6247	  tree arg2 = TREE_OPERAND (t, 2);
6248	  enum tree_code comp_code = TREE_CODE (arg0);
6249
6250	  STRIP_NOPS (arg2);
6251
6252	  /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
6253	     depending on the comparison operation.  */
6254	  if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
6255	       ? real_zerop (TREE_OPERAND (arg0, 1))
6256	       : integer_zerop (TREE_OPERAND (arg0, 1)))
6257	      && TREE_CODE (arg2) == NEGATE_EXPR
6258	      && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
6259	    switch (comp_code)
6260	      {
6261	      case EQ_EXPR:
6262		return pedantic_non_lvalue
6263		  (fold (build1 (NEGATE_EXPR, type, arg1)));
6264	      case NE_EXPR:
6265		return pedantic_non_lvalue (convert (type, arg1));
6266	      case GE_EXPR:
6267	      case GT_EXPR:
6268		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6269		  arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6270		return pedantic_non_lvalue
6271		  (convert (type, fold (build1 (ABS_EXPR,
6272						TREE_TYPE (arg1), arg1))));
6273	      case LE_EXPR:
6274	      case LT_EXPR:
6275		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
6276		  arg1 = convert (signed_type (TREE_TYPE (arg1)), arg1);
6277		return pedantic_non_lvalue
6278		  (fold (build1 (NEGATE_EXPR, type,
6279				 convert (type,
6280					  fold (build1 (ABS_EXPR,
6281							TREE_TYPE (arg1),
6282							arg1))))));
6283	      default:
6284		abort ();
6285	      }
6286
6287	  /* If this is A != 0 ? A : 0, this is simply A.  For ==, it is
6288	     always zero.  */
6289
6290	  if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
6291	    {
6292	      if (comp_code == NE_EXPR)
6293		return pedantic_non_lvalue (convert (type, arg1));
6294	      else if (comp_code == EQ_EXPR)
6295		return pedantic_non_lvalue (convert (type, integer_zero_node));
6296	    }
6297
6298	  /* If this is A op B ? A : B, this is either A, B, min (A, B),
6299	     or max (A, B), depending on the operation.  */
6300
6301	  if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
6302					      arg2, TREE_OPERAND (arg0, 0)))
6303	    {
6304	      tree comp_op0 = TREE_OPERAND (arg0, 0);
6305	      tree comp_op1 = TREE_OPERAND (arg0, 1);
6306	      tree comp_type = TREE_TYPE (comp_op0);
6307
6308	      switch (comp_code)
6309		{
6310		case EQ_EXPR:
6311		  return pedantic_non_lvalue (convert (type, arg2));
6312		case NE_EXPR:
6313		  return pedantic_non_lvalue (convert (type, arg1));
6314		case LE_EXPR:
6315		case LT_EXPR:
6316		  /* In C++ a ?: expression can be an lvalue, so put the
6317		     operand which will be used if they are equal first
6318		     so that we can convert this back to the
6319		     corresponding COND_EXPR.  */
6320		  return pedantic_non_lvalue
6321		    (convert (type, (fold (build (MIN_EXPR, comp_type,
6322						  (comp_code == LE_EXPR
6323						   ? comp_op0 : comp_op1),
6324						  (comp_code == LE_EXPR
6325						   ? comp_op1 : comp_op0))))));
6326		  break;
6327		case GE_EXPR:
6328		case GT_EXPR:
6329		  return pedantic_non_lvalue
6330		    (convert (type, fold (build (MAX_EXPR, comp_type,
6331						 (comp_code == GE_EXPR
6332						  ? comp_op0 : comp_op1),
6333						 (comp_code == GE_EXPR
6334						  ? comp_op1 : comp_op0)))));
6335		  break;
6336		default:
6337		  abort ();
6338		}
6339	    }
6340
6341	  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
6342	     we might still be able to simplify this.  For example,
6343	     if C1 is one less or one more than C2, this might have started
6344	     out as a MIN or MAX and been transformed by this function.
6345	     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
6346
6347	  if (INTEGRAL_TYPE_P (type)
6348	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6349	      && TREE_CODE (arg2) == INTEGER_CST)
6350	    switch (comp_code)
6351	      {
6352	      case EQ_EXPR:
6353		/* We can replace A with C1 in this case.  */
6354		arg1 = convert (type, TREE_OPERAND (arg0, 1));
6355		t = build (code, type, TREE_OPERAND (t, 0), arg1,
6356			   TREE_OPERAND (t, 2));
6357		break;
6358
6359	      case LT_EXPR:
6360		/* If C1 is C2 + 1, this is min(A, C2).  */
6361		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6362		    && operand_equal_p (TREE_OPERAND (arg0, 1),
6363					const_binop (PLUS_EXPR, arg2,
6364						     integer_one_node, 0), 1))
6365		  return pedantic_non_lvalue
6366		    (fold (build (MIN_EXPR, type, arg1, arg2)));
6367		break;
6368
6369	      case LE_EXPR:
6370		/* If C1 is C2 - 1, this is min(A, C2).  */
6371		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6372		    && operand_equal_p (TREE_OPERAND (arg0, 1),
6373					const_binop (MINUS_EXPR, arg2,
6374						     integer_one_node, 0), 1))
6375		  return pedantic_non_lvalue
6376		    (fold (build (MIN_EXPR, type, arg1, arg2)));
6377		break;
6378
6379	      case GT_EXPR:
6380		/* If C1 is C2 - 1, this is max(A, C2).  */
6381		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
6382		    && operand_equal_p (TREE_OPERAND (arg0, 1),
6383					const_binop (MINUS_EXPR, arg2,
6384						     integer_one_node, 0), 1))
6385		  return pedantic_non_lvalue
6386		    (fold (build (MAX_EXPR, type, arg1, arg2)));
6387		break;
6388
6389	      case GE_EXPR:
6390		/* If C1 is C2 + 1, this is max(A, C2).  */
6391		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
6392		    && operand_equal_p (TREE_OPERAND (arg0, 1),
6393					const_binop (PLUS_EXPR, arg2,
6394						     integer_one_node, 0), 1))
6395		  return pedantic_non_lvalue
6396		    (fold (build (MAX_EXPR, type, arg1, arg2)));
6397		break;
6398	      case NE_EXPR:
6399		break;
6400	      default:
6401		abort ();
6402	      }
6403	}
6404
6405      /* If the second operand is simpler than the third, swap them
6406	 since that produces better jump optimization results.  */
6407      if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
6408	   || TREE_CODE (arg1) == SAVE_EXPR)
6409	  && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
6410		|| TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
6411		|| TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
6412	{
6413	  /* See if this can be inverted.  If it can't, possibly because
6414	     it was a floating-point inequality comparison, don't do
6415	     anything.  */
6416	  tem = invert_truthvalue (arg0);
6417
6418	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
6419	    {
6420	      t = build (code, type, tem,
6421			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
6422	      arg0 = tem;
6423	      /* arg1 should be the first argument of the new T.  */
6424	      arg1 = TREE_OPERAND (t, 1);
6425	      STRIP_NOPS (arg1);
6426	    }
6427	}
6428
6429      /* Convert A ? 1 : 0 to simply A.  */
6430      if (integer_onep (TREE_OPERAND (t, 1))
6431	  && integer_zerop (TREE_OPERAND (t, 2))
6432	  /* If we try to convert TREE_OPERAND (t, 0) to our type, the
6433	     call to fold will try to move the conversion inside
6434	     a COND, which will recurse.  In that case, the COND_EXPR
6435	     is probably the best choice, so leave it alone.  */
6436	  && type == TREE_TYPE (arg0))
6437	return pedantic_non_lvalue (arg0);
6438
6439      /* Look for expressions of the form A & 2 ? 2 : 0.  The result of this
6440	 operation is simply A & 2.  */
6441
6442      if (integer_zerop (TREE_OPERAND (t, 2))
6443	  && TREE_CODE (arg0) == NE_EXPR
6444	  && integer_zerop (TREE_OPERAND (arg0, 1))
6445	  && integer_pow2p (arg1)
6446	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
6447	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
6448			      arg1, 1))
6449	return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
6450
6451      return t;
6452
6453    case COMPOUND_EXPR:
6454      /* When pedantic, a compound expression can be neither an lvalue
6455	 nor an integer constant expression.  */
6456      if (TREE_SIDE_EFFECTS (arg0) || pedantic)
6457	return t;
6458      /* Don't let (0, 0) be null pointer constant.  */
6459      if (integer_zerop (arg1))
6460	return build1 (NOP_EXPR, TREE_TYPE (arg1), arg1);
6461      return arg1;
6462
6463    case COMPLEX_EXPR:
6464      if (wins)
6465	return build_complex (type, arg0, arg1);
6466      return t;
6467
6468    case REALPART_EXPR:
6469      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6470	return t;
6471      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6472	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
6473				 TREE_OPERAND (arg0, 1));
6474      else if (TREE_CODE (arg0) == COMPLEX_CST)
6475	return TREE_REALPART (arg0);
6476      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6477	return fold (build (TREE_CODE (arg0), type,
6478			    fold (build1 (REALPART_EXPR, type,
6479					  TREE_OPERAND (arg0, 0))),
6480			    fold (build1 (REALPART_EXPR,
6481					  type, TREE_OPERAND (arg0, 1)))));
6482      return t;
6483
6484    case IMAGPART_EXPR:
6485      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6486	return convert (type, integer_zero_node);
6487      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6488	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
6489				 TREE_OPERAND (arg0, 0));
6490      else if (TREE_CODE (arg0) == COMPLEX_CST)
6491	return TREE_IMAGPART (arg0);
6492      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6493	return fold (build (TREE_CODE (arg0), type,
6494			    fold (build1 (IMAGPART_EXPR, type,
6495					  TREE_OPERAND (arg0, 0))),
6496			    fold (build1 (IMAGPART_EXPR, type,
6497					  TREE_OPERAND (arg0, 1)))));
6498      return t;
6499
6500      /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
6501         appropriate.  */
6502    case CLEANUP_POINT_EXPR:
6503      if (! has_cleanups (arg0))
6504	return TREE_OPERAND (t, 0);
6505
6506      {
6507	enum tree_code code0 = TREE_CODE (arg0);
6508	int kind0 = TREE_CODE_CLASS (code0);
6509	tree arg00 = TREE_OPERAND (arg0, 0);
6510	tree arg01;
6511
6512	if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
6513	  return fold (build1 (code0, type,
6514			       fold (build1 (CLEANUP_POINT_EXPR,
6515					     TREE_TYPE (arg00), arg00))));
6516
6517	if (kind0 == '<' || kind0 == '2'
6518	    || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
6519	    || code0 == TRUTH_AND_EXPR   || code0 == TRUTH_OR_EXPR
6520	    || code0 == TRUTH_XOR_EXPR)
6521	  {
6522	    arg01 = TREE_OPERAND (arg0, 1);
6523
6524	    if (TREE_CONSTANT (arg00)
6525		|| ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
6526		    && ! has_cleanups (arg00)))
6527	      return fold (build (code0, type, arg00,
6528				  fold (build1 (CLEANUP_POINT_EXPR,
6529						TREE_TYPE (arg01), arg01))));
6530
6531	    if (TREE_CONSTANT (arg01))
6532	      return fold (build (code0, type,
6533				  fold (build1 (CLEANUP_POINT_EXPR,
6534						TREE_TYPE (arg00), arg00)),
6535				  arg01));
6536	  }
6537
6538	return t;
6539      }
6540
6541    default:
6542      return t;
6543    } /* switch (code) */
6544}
6545
6546/* Determine if first argument is a multiple of second argument.
6547   Return 0 if it is not, or is not easily determined to so be.
6548
6549   An example of the sort of thing we care about (at this point --
6550   this routine could surely be made more general, and expanded
6551   to do what the *_DIV_EXPR's fold() cases do now) is discovering
6552   that
6553
6554     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6555
6556   is a multiple of
6557
6558     SAVE_EXPR (J * 8)
6559
6560   when we know that the two `SAVE_EXPR (J * 8)' nodes are the
6561   same node (which means they will have the same value at run
6562   time, even though we don't know when they'll be assigned).
6563
6564   This code also handles discovering that
6565
6566     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
6567
6568   is a multiple of
6569
6570     8
6571
6572   (of course) so we don't have to worry about dealing with a
6573   possible remainder.
6574
6575   Note that we _look_ inside a SAVE_EXPR only to determine
6576   how it was calculated; it is not safe for fold() to do much
6577   of anything else with the internals of a SAVE_EXPR, since
6578   fold() cannot know when it will be evaluated at run time.
6579   For example, the latter example above _cannot_ be implemented
6580   as
6581
6582     SAVE_EXPR (I) * J
6583
6584   or any variant thereof, since the value of J at evaluation time
6585   of the original SAVE_EXPR is not necessarily the same at the time
6586   the new expression is evaluated.  The only optimization of this
6587   sort that would be valid is changing
6588
6589     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
6590   divided by
6591     8
6592
6593   to
6594
6595     SAVE_EXPR (I) * SAVE_EXPR (J)
6596
6597   (where the same SAVE_EXPR (J) is used in the original and the
6598   transformed version).  */
6599
6600static int
6601multiple_of_p (type, top, bottom)
6602     tree type;
6603     tree top;
6604     tree bottom;
6605{
6606  if (operand_equal_p (top, bottom, 0))
6607    return 1;
6608
6609  if (TREE_CODE (type) != INTEGER_TYPE)
6610    return 0;
6611
6612  switch (TREE_CODE (top))
6613    {
6614    case MULT_EXPR:
6615      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6616	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6617
6618    case PLUS_EXPR:
6619    case MINUS_EXPR:
6620      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
6621	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
6622
6623    case NOP_EXPR:
6624      /* Punt if conversion from non-integral or wider integral type.  */
6625      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
6626	  || (TYPE_PRECISION (type)
6627	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
6628	return 0;
6629      /* Fall through. */
6630    case SAVE_EXPR:
6631      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
6632
6633    case INTEGER_CST:
6634      if ((TREE_CODE (bottom) != INTEGER_CST)
6635	  || (tree_int_cst_sgn (top) < 0)
6636	  || (tree_int_cst_sgn (bottom) < 0))
6637	return 0;
6638      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
6639					 top, bottom, 0));
6640
6641    default:
6642      return 0;
6643    }
6644}
6645
6646