fold-const.c revision 132718
1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/*@@ This file should be rewritten to use an arbitrary precision
23  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25  @@ The routines that translate from the ap rep should
26  @@ warn if precision et. al. is lost.
27  @@ This would also make life easier when this technology is used
28  @@ for cross-compilers.  */
29
30/* The entry points in this file are fold, size_int_wide, size_binop
31   and force_fit_type.
32
33   fold takes a tree as argument and returns a simplified tree.
34
35   size_binop takes a tree code for an arithmetic operation
36   and two operands that are trees, and produces a tree for the
37   result, assuming the type comes from `sizetype'.
38
39   size_int takes an integer value, and creates a tree constant
40   with type from `sizetype'.
41
42   force_fit_type takes a constant and prior overflow indicator, and
43   forces the value to fit the type.  It returns an overflow indicator.  */
44
45#include "config.h"
46#include "system.h"
47#include "coretypes.h"
48#include "tm.h"
49#include "flags.h"
50#include "tree.h"
51#include "real.h"
52#include "rtl.h"
53#include "expr.h"
54#include "tm_p.h"
55#include "toplev.h"
56#include "ggc.h"
57#include "hashtab.h"
58#include "langhooks.h"
59#include "md5.h"
60
61static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63static bool negate_mathfn_p (enum built_in_function);
64static bool negate_expr_p (tree);
65static tree negate_expr (tree);
66static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67static tree associate_trees (tree, tree, enum tree_code, tree);
68static tree int_const_binop (enum tree_code, tree, tree, int);
69static tree const_binop (enum tree_code, tree, tree, int);
70static hashval_t size_htab_hash (const void *);
71static int size_htab_eq (const void *, const void *);
72static tree fold_convert_const (enum tree_code, tree, tree);
73static tree fold_convert (tree, tree);
74static enum tree_code invert_tree_comparison (enum tree_code);
75static enum tree_code swap_tree_comparison (enum tree_code);
76static int comparison_to_compcode (enum tree_code);
77static enum tree_code compcode_to_comparison (int);
78static int truth_value_p (enum tree_code);
79static int operand_equal_for_comparison_p (tree, tree, tree);
80static int twoval_comparison_p (tree, tree *, tree *, int *);
81static tree eval_subst (tree, tree, tree, tree, tree);
82static tree pedantic_omit_one_operand (tree, tree, tree);
83static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84static tree make_bit_field_ref (tree, tree, int, int, int);
85static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87				    enum machine_mode *, int *, int *,
88				    tree *, tree *);
89static int all_ones_mask_p (tree, int);
90static tree sign_bit_p (tree, tree);
91static int simple_operand_p (tree);
92static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93static tree make_range (tree, int *, tree *, tree *);
94static tree build_range_check (tree, tree, int, tree, tree);
95static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96			 tree);
97static tree fold_range_test (tree);
98static tree unextend (tree, int, int, tree);
99static tree fold_truthop (enum tree_code, tree, tree, tree);
100static tree optimize_minmax_comparison (tree);
101static tree extract_muldiv (tree, tree, enum tree_code, tree);
102static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103static tree strip_compound_expr (tree, tree);
104static int multiple_of_p (tree, tree, tree);
105static tree constant_boolean_node (int, tree);
106static int count_cond (tree, int);
107static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108						 tree, int);
109static bool fold_real_zero_addition_p (tree, tree, int);
110static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111				 tree, tree, tree);
112static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113static bool reorder_operands_p (tree, tree);
114static bool tree_swap_operands_p (tree, tree, bool);
115
116/* The following constants represent a bit based encoding of GCC's
117   comparison operators.  This encoding simplifies transformations
118   on relational comparison operators, such as AND and OR.  */
119#define COMPCODE_FALSE   0
120#define COMPCODE_LT      1
121#define COMPCODE_EQ      2
122#define COMPCODE_LE      3
123#define COMPCODE_GT      4
124#define COMPCODE_NE      5
125#define COMPCODE_GE      6
126#define COMPCODE_TRUE    7
127
128/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
130   and SUM1.  Then this yields nonzero if overflow occurred during the
131   addition.
132
133   Overflow occurs if A and B have the same sign, but A and SUM differ in
134   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
135   sign.  */
136#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137
138/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139   We do that by representing the two-word integer in 4 words, with only
140   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
142
143#define LOWPART(x) \
144  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145#define HIGHPART(x) \
146  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148
149/* Unpack a two-word integer into 4 words.
150   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151   WORDS points to the array of HOST_WIDE_INTs.  */
152
153static void
154encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
155{
156  words[0] = LOWPART (low);
157  words[1] = HIGHPART (low);
158  words[2] = LOWPART (hi);
159  words[3] = HIGHPART (hi);
160}
161
162/* Pack an array of 4 words into a two-word integer.
163   WORDS points to the array of words.
164   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
165
166static void
167decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
168	HOST_WIDE_INT *hi)
169{
170  *low = words[0] + words[1] * BASE;
171  *hi = words[2] + words[3] * BASE;
172}
173
174/* Make the integer constant T valid for its type by setting to 0 or 1 all
175   the bits in the constant that don't belong in the type.
176
177   Return 1 if a signed overflow occurs, 0 otherwise.  If OVERFLOW is
178   nonzero, a signed overflow has already occurred in calculating T, so
179   propagate it.  */
180
181int
182force_fit_type (tree t, int overflow)
183{
184  unsigned HOST_WIDE_INT low;
185  HOST_WIDE_INT high;
186  unsigned int prec;
187
188  if (TREE_CODE (t) == REAL_CST)
189    {
190      /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191	 Consider doing it via real_convert now.  */
192      return overflow;
193    }
194
195  else if (TREE_CODE (t) != INTEGER_CST)
196    return overflow;
197
198  low = TREE_INT_CST_LOW (t);
199  high = TREE_INT_CST_HIGH (t);
200
201  if (POINTER_TYPE_P (TREE_TYPE (t))
202      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
203    prec = POINTER_SIZE;
204  else
205    prec = TYPE_PRECISION (TREE_TYPE (t));
206
207  /* First clear all bits that are beyond the type's precision.  */
208
209  if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210    ;
211  else if (prec > HOST_BITS_PER_WIDE_INT)
212    TREE_INT_CST_HIGH (t)
213      &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214  else
215    {
216      TREE_INT_CST_HIGH (t) = 0;
217      if (prec < HOST_BITS_PER_WIDE_INT)
218	TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
219    }
220
221  /* Unsigned types do not suffer sign extension or overflow unless they
222     are a sizetype.  */
223  if (TREE_UNSIGNED (TREE_TYPE (t))
224      && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225	    && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226    return overflow;
227
228  /* If the value's sign bit is set, extend the sign.  */
229  if (prec != 2 * HOST_BITS_PER_WIDE_INT
230      && (prec > HOST_BITS_PER_WIDE_INT
231	  ? 0 != (TREE_INT_CST_HIGH (t)
232		  & ((HOST_WIDE_INT) 1
233		     << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234	  : 0 != (TREE_INT_CST_LOW (t)
235		  & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
236    {
237      /* Value is negative:
238	 set to 1 all the bits that are outside this type's precision.  */
239      if (prec > HOST_BITS_PER_WIDE_INT)
240	TREE_INT_CST_HIGH (t)
241	  |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242      else
243	{
244	  TREE_INT_CST_HIGH (t) = -1;
245	  if (prec < HOST_BITS_PER_WIDE_INT)
246	    TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
247	}
248    }
249
250  /* Return nonzero if signed overflow occurred.  */
251  return
252    ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253     != 0);
254}
255
256/* Add two doubleword integers with doubleword result.
257   Each argument is given as two `HOST_WIDE_INT' pieces.
258   One argument is L1 and H1; the other, L2 and H2.
259   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
260
261int
262add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263	    unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
265{
266  unsigned HOST_WIDE_INT l;
267  HOST_WIDE_INT h;
268
269  l = l1 + l2;
270  h = h1 + h2 + (l < l1);
271
272  *lv = l;
273  *hv = h;
274  return OVERFLOW_SUM_SIGN (h1, h2, h);
275}
276
277/* Negate a doubleword integer with doubleword result.
278   Return nonzero if the operation overflows, assuming it's signed.
279   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
281
282int
283neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
285{
286  if (l1 == 0)
287    {
288      *lv = 0;
289      *hv = - h1;
290      return (*hv & h1) < 0;
291    }
292  else
293    {
294      *lv = -l1;
295      *hv = ~h1;
296      return 0;
297    }
298}
299
300/* Multiply two doubleword integers with doubleword result.
301   Return nonzero if the operation overflows, assuming it's signed.
302   Each argument is given as two `HOST_WIDE_INT' pieces.
303   One argument is L1 and H1; the other, L2 and H2.
304   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
305
306int
307mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308	    unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
310{
311  HOST_WIDE_INT arg1[4];
312  HOST_WIDE_INT arg2[4];
313  HOST_WIDE_INT prod[4 * 2];
314  unsigned HOST_WIDE_INT carry;
315  int i, j, k;
316  unsigned HOST_WIDE_INT toplow, neglow;
317  HOST_WIDE_INT tophigh, neghigh;
318
319  encode (arg1, l1, h1);
320  encode (arg2, l2, h2);
321
322  memset (prod, 0, sizeof prod);
323
324  for (i = 0; i < 4; i++)
325    {
326      carry = 0;
327      for (j = 0; j < 4; j++)
328	{
329	  k = i + j;
330	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
331	  carry += arg1[i] * arg2[j];
332	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
333	  carry += prod[k];
334	  prod[k] = LOWPART (carry);
335	  carry = HIGHPART (carry);
336	}
337      prod[i + 4] = carry;
338    }
339
340  decode (prod, lv, hv);	/* This ignores prod[4] through prod[4*2-1] */
341
342  /* Check for overflow by calculating the top half of the answer in full;
343     it should agree with the low half's sign bit.  */
344  decode (prod + 4, &toplow, &tophigh);
345  if (h1 < 0)
346    {
347      neg_double (l2, h2, &neglow, &neghigh);
348      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
349    }
350  if (h2 < 0)
351    {
352      neg_double (l1, h1, &neglow, &neghigh);
353      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354    }
355  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
356}
357
358/* Shift the doubleword integer in L1, H1 left by COUNT places
359   keeping only PREC bits of result.
360   Shift right if COUNT is negative.
361   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
363
364void
365lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366	       HOST_WIDE_INT count, unsigned int prec,
367	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
368{
369  unsigned HOST_WIDE_INT signmask;
370
371  if (count < 0)
372    {
373      rshift_double (l1, h1, -count, prec, lv, hv, arith);
374      return;
375    }
376
377#ifdef SHIFT_COUNT_TRUNCATED
378  if (SHIFT_COUNT_TRUNCATED)
379    count %= prec;
380#endif
381
382  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
383    {
384      /* Shifting by the host word size is undefined according to the
385	 ANSI standard, so we must handle this as a special case.  */
386      *hv = 0;
387      *lv = 0;
388    }
389  else if (count >= HOST_BITS_PER_WIDE_INT)
390    {
391      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392      *lv = 0;
393    }
394  else
395    {
396      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
398      *lv = l1 << count;
399    }
400
401  /* Sign extend all bits that are beyond the precision.  */
402
403  signmask = -((prec > HOST_BITS_PER_WIDE_INT
404		? ((unsigned HOST_WIDE_INT) *hv
405		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406		: (*lv >> (prec - 1))) & 1);
407
408  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
409    ;
410  else if (prec >= HOST_BITS_PER_WIDE_INT)
411    {
412      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
414    }
415  else
416    {
417      *hv = signmask;
418      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419      *lv |= signmask << prec;
420    }
421}
422
423/* Shift the doubleword integer in L1, H1 right by COUNT places
424   keeping only PREC bits of result.  COUNT must be positive.
425   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
427
428void
429rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430	       HOST_WIDE_INT count, unsigned int prec,
431	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
432	       int arith)
433{
434  unsigned HOST_WIDE_INT signmask;
435
436  signmask = (arith
437	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
438	      : 0);
439
440#ifdef SHIFT_COUNT_TRUNCATED
441  if (SHIFT_COUNT_TRUNCATED)
442    count %= prec;
443#endif
444
445  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446    {
447      /* Shifting by the host word size is undefined according to the
448	 ANSI standard, so we must handle this as a special case.  */
449      *hv = 0;
450      *lv = 0;
451    }
452  else if (count >= HOST_BITS_PER_WIDE_INT)
453    {
454      *hv = 0;
455      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
456    }
457  else
458    {
459      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460      *lv = ((l1 >> count)
461	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
462    }
463
464  /* Zero / sign extend all bits that are beyond the precision.  */
465
466  if (count >= (HOST_WIDE_INT)prec)
467    {
468      *hv = signmask;
469      *lv = signmask;
470    }
471  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472    ;
473  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474    {
475      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
477    }
478  else
479    {
480      *hv = signmask;
481      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482      *lv |= signmask << (prec - count);
483    }
484}
485
486/* Rotate the doubleword integer in L1, H1 left by COUNT places
487   keeping only PREC bits of result.
488   Rotate right if COUNT is negative.
489   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
490
491void
492lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493		HOST_WIDE_INT count, unsigned int prec,
494		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495{
496  unsigned HOST_WIDE_INT s1l, s2l;
497  HOST_WIDE_INT s1h, s2h;
498
499  count %= prec;
500  if (count < 0)
501    count += prec;
502
503  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505  *lv = s1l | s2l;
506  *hv = s1h | s2h;
507}
508
509/* Rotate the doubleword integer in L1, H1 left by COUNT places
510   keeping only PREC bits of result.  COUNT must be positive.
511   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
512
513void
514rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515		HOST_WIDE_INT count, unsigned int prec,
516		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517{
518  unsigned HOST_WIDE_INT s1l, s2l;
519  HOST_WIDE_INT s1h, s2h;
520
521  count %= prec;
522  if (count < 0)
523    count += prec;
524
525  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527  *lv = s1l | s2l;
528  *hv = s1h | s2h;
529}
530
531/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533   CODE is a tree code for a kind of division, one of
534   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535   or EXACT_DIV_EXPR
536   It controls how the quotient is rounded to an integer.
537   Return nonzero if the operation overflows.
538   UNS nonzero says do unsigned division.  */
539
540int
541div_and_round_double (enum tree_code code, int uns,
542		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543		      HOST_WIDE_INT hnum_orig,
544		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545		      HOST_WIDE_INT hden_orig,
546		      unsigned HOST_WIDE_INT *lquo,
547		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548		      HOST_WIDE_INT *hrem)
549{
550  int quo_neg = 0;
551  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
552  HOST_WIDE_INT den[4], quo[4];
553  int i, j;
554  unsigned HOST_WIDE_INT work;
555  unsigned HOST_WIDE_INT carry = 0;
556  unsigned HOST_WIDE_INT lnum = lnum_orig;
557  HOST_WIDE_INT hnum = hnum_orig;
558  unsigned HOST_WIDE_INT lden = lden_orig;
559  HOST_WIDE_INT hden = hden_orig;
560  int overflow = 0;
561
562  if (hden == 0 && lden == 0)
563    overflow = 1, lden = 1;
564
565  /* Calculate quotient sign and convert operands to unsigned.  */
566  if (!uns)
567    {
568      if (hnum < 0)
569	{
570	  quo_neg = ~ quo_neg;
571	  /* (minimum integer) / (-1) is the only overflow case.  */
572	  if (neg_double (lnum, hnum, &lnum, &hnum)
573	      && ((HOST_WIDE_INT) lden & hden) == -1)
574	    overflow = 1;
575	}
576      if (hden < 0)
577	{
578	  quo_neg = ~ quo_neg;
579	  neg_double (lden, hden, &lden, &hden);
580	}
581    }
582
583  if (hnum == 0 && hden == 0)
584    {				/* single precision */
585      *hquo = *hrem = 0;
586      /* This unsigned division rounds toward zero.  */
587      *lquo = lnum / lden;
588      goto finish_up;
589    }
590
591  if (hnum == 0)
592    {				/* trivial case: dividend < divisor */
593      /* hden != 0 already checked.  */
594      *hquo = *lquo = 0;
595      *hrem = hnum;
596      *lrem = lnum;
597      goto finish_up;
598    }
599
600  memset (quo, 0, sizeof quo);
601
602  memset (num, 0, sizeof num);	/* to zero 9th element */
603  memset (den, 0, sizeof den);
604
605  encode (num, lnum, hnum);
606  encode (den, lden, hden);
607
608  /* Special code for when the divisor < BASE.  */
609  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610    {
611      /* hnum != 0 already checked.  */
612      for (i = 4 - 1; i >= 0; i--)
613	{
614	  work = num[i] + carry * BASE;
615	  quo[i] = work / lden;
616	  carry = work % lden;
617	}
618    }
619  else
620    {
621      /* Full double precision division,
622	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
623      int num_hi_sig, den_hi_sig;
624      unsigned HOST_WIDE_INT quo_est, scale;
625
626      /* Find the highest nonzero divisor digit.  */
627      for (i = 4 - 1;; i--)
628	if (den[i] != 0)
629	  {
630	    den_hi_sig = i;
631	    break;
632	  }
633
634      /* Insure that the first digit of the divisor is at least BASE/2.
635	 This is required by the quotient digit estimation algorithm.  */
636
637      scale = BASE / (den[den_hi_sig] + 1);
638      if (scale > 1)
639	{		/* scale divisor and dividend */
640	  carry = 0;
641	  for (i = 0; i <= 4 - 1; i++)
642	    {
643	      work = (num[i] * scale) + carry;
644	      num[i] = LOWPART (work);
645	      carry = HIGHPART (work);
646	    }
647
648	  num[4] = carry;
649	  carry = 0;
650	  for (i = 0; i <= 4 - 1; i++)
651	    {
652	      work = (den[i] * scale) + carry;
653	      den[i] = LOWPART (work);
654	      carry = HIGHPART (work);
655	      if (den[i] != 0) den_hi_sig = i;
656	    }
657	}
658
659      num_hi_sig = 4;
660
661      /* Main loop */
662      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663	{
664	  /* Guess the next quotient digit, quo_est, by dividing the first
665	     two remaining dividend digits by the high order quotient digit.
666	     quo_est is never low and is at most 2 high.  */
667	  unsigned HOST_WIDE_INT tmp;
668
669	  num_hi_sig = i + den_hi_sig + 1;
670	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671	  if (num[num_hi_sig] != den[den_hi_sig])
672	    quo_est = work / den[den_hi_sig];
673	  else
674	    quo_est = BASE - 1;
675
676	  /* Refine quo_est so it's usually correct, and at most one high.  */
677	  tmp = work - quo_est * den[den_hi_sig];
678	  if (tmp < BASE
679	      && (den[den_hi_sig - 1] * quo_est
680		  > (tmp * BASE + num[num_hi_sig - 2])))
681	    quo_est--;
682
683	  /* Try QUO_EST as the quotient digit, by multiplying the
684	     divisor by QUO_EST and subtracting from the remaining dividend.
685	     Keep in mind that QUO_EST is the I - 1st digit.  */
686
687	  carry = 0;
688	  for (j = 0; j <= den_hi_sig; j++)
689	    {
690	      work = quo_est * den[j] + carry;
691	      carry = HIGHPART (work);
692	      work = num[i + j] - LOWPART (work);
693	      num[i + j] = LOWPART (work);
694	      carry += HIGHPART (work) != 0;
695	    }
696
697	  /* If quo_est was high by one, then num[i] went negative and
698	     we need to correct things.  */
699	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
700	    {
701	      quo_est--;
702	      carry = 0;		/* add divisor back in */
703	      for (j = 0; j <= den_hi_sig; j++)
704		{
705		  work = num[i + j] + den[j] + carry;
706		  carry = HIGHPART (work);
707		  num[i + j] = LOWPART (work);
708		}
709
710	      num [num_hi_sig] += carry;
711	    }
712
713	  /* Store the quotient digit.  */
714	  quo[i] = quo_est;
715	}
716    }
717
718  decode (quo, lquo, hquo);
719
720 finish_up:
721  /* If result is negative, make it so.  */
722  if (quo_neg)
723    neg_double (*lquo, *hquo, lquo, hquo);
724
725  /* compute trial remainder:  rem = num - (quo * den)  */
726  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727  neg_double (*lrem, *hrem, lrem, hrem);
728  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
729
730  switch (code)
731    {
732    case TRUNC_DIV_EXPR:
733    case TRUNC_MOD_EXPR:	/* round toward zero */
734    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
735      return overflow;
736
737    case FLOOR_DIV_EXPR:
738    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
739      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
740	{
741	  /* quo = quo - 1;  */
742	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
743		      lquo, hquo);
744	}
745      else
746	return overflow;
747      break;
748
749    case CEIL_DIV_EXPR:
750    case CEIL_MOD_EXPR:		/* round toward positive infinity */
751      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
752	{
753	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754		      lquo, hquo);
755	}
756      else
757	return overflow;
758      break;
759
760    case ROUND_DIV_EXPR:
761    case ROUND_MOD_EXPR:	/* round to closest integer */
762      {
763	unsigned HOST_WIDE_INT labs_rem = *lrem;
764	HOST_WIDE_INT habs_rem = *hrem;
765	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766	HOST_WIDE_INT habs_den = hden, htwice;
767
768	/* Get absolute values.  */
769	if (*hrem < 0)
770	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771	if (hden < 0)
772	  neg_double (lden, hden, &labs_den, &habs_den);
773
774	/* If (2 * abs (lrem) >= abs (lden)) */
775	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776		    labs_rem, habs_rem, &ltwice, &htwice);
777
778	if (((unsigned HOST_WIDE_INT) habs_den
779	     < (unsigned HOST_WIDE_INT) htwice)
780	    || (((unsigned HOST_WIDE_INT) habs_den
781		 == (unsigned HOST_WIDE_INT) htwice)
782		&& (labs_den < ltwice)))
783	  {
784	    if (*hquo < 0)
785	      /* quo = quo - 1;  */
786	      add_double (*lquo, *hquo,
787			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788	    else
789	      /* quo = quo + 1; */
790	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791			  lquo, hquo);
792	  }
793	else
794	  return overflow;
795      }
796      break;
797
798    default:
799      abort ();
800    }
801
802  /* Compute true remainder:  rem = num - (quo * den)  */
803  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804  neg_double (*lrem, *hrem, lrem, hrem);
805  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806  return overflow;
807}
808
809/* Return true if built-in mathematical function specified by CODE
810   preserves the sign of it argument, i.e. -f(x) == f(-x).  */
811
812static bool
813negate_mathfn_p (enum built_in_function code)
814{
815  switch (code)
816    {
817    case BUILT_IN_ASIN:
818    case BUILT_IN_ASINF:
819    case BUILT_IN_ASINL:
820    case BUILT_IN_ATAN:
821    case BUILT_IN_ATANF:
822    case BUILT_IN_ATANL:
823    case BUILT_IN_SIN:
824    case BUILT_IN_SINF:
825    case BUILT_IN_SINL:
826    case BUILT_IN_TAN:
827    case BUILT_IN_TANF:
828    case BUILT_IN_TANL:
829      return true;
830
831    default:
832      break;
833    }
834  return false;
835}
836
837
838/* Determine whether an expression T can be cheaply negated using
839   the function negate_expr.  */
840
841static bool
842negate_expr_p (tree t)
843{
844  unsigned HOST_WIDE_INT val;
845  unsigned int prec;
846  tree type;
847
848  if (t == 0)
849    return false;
850
851  type = TREE_TYPE (t);
852
853  STRIP_SIGN_NOPS (t);
854  switch (TREE_CODE (t))
855    {
856    case INTEGER_CST:
857      if (TREE_UNSIGNED (type) || ! flag_trapv)
858	return true;
859
860      /* Check that -CST will not overflow type.  */
861      prec = TYPE_PRECISION (type);
862      if (prec > HOST_BITS_PER_WIDE_INT)
863	{
864	  if (TREE_INT_CST_LOW (t) != 0)
865	    return true;
866	  prec -= HOST_BITS_PER_WIDE_INT;
867	  val = TREE_INT_CST_HIGH (t);
868	}
869      else
870	val = TREE_INT_CST_LOW (t);
871      if (prec < HOST_BITS_PER_WIDE_INT)
872	val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873      return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874
875    case REAL_CST:
876    case NEGATE_EXPR:
877      return true;
878
879    case COMPLEX_CST:
880      return negate_expr_p (TREE_REALPART (t))
881	     && negate_expr_p (TREE_IMAGPART (t));
882
883    case MINUS_EXPR:
884      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
885      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886	     && reorder_operands_p (TREE_OPERAND (t, 0),
887				    TREE_OPERAND (t, 1));
888
889    case MULT_EXPR:
890      if (TREE_UNSIGNED (TREE_TYPE (t)))
891        break;
892
893      /* Fall through.  */
894
895    case RDIV_EXPR:
896      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897	return negate_expr_p (TREE_OPERAND (t, 1))
898	       || negate_expr_p (TREE_OPERAND (t, 0));
899      break;
900
901    case NOP_EXPR:
902      /* Negate -((double)float) as (double)(-float).  */
903      if (TREE_CODE (type) == REAL_TYPE)
904	{
905	  tree tem = strip_float_extensions (t);
906	  if (tem != t)
907	    return negate_expr_p (tem);
908	}
909      break;
910
911    case CALL_EXPR:
912      /* Negate -f(x) as f(-x).  */
913      if (negate_mathfn_p (builtin_mathfn_code (t)))
914	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
915      break;
916
917    default:
918      break;
919    }
920  return false;
921}
922
923/* Given T, an expression, return the negation of T.  Allow for T to be
924   null, in which case return null.  */
925
926static tree
927negate_expr (tree t)
928{
929  tree type;
930  tree tem;
931
932  if (t == 0)
933    return 0;
934
935  type = TREE_TYPE (t);
936  STRIP_SIGN_NOPS (t);
937
938  switch (TREE_CODE (t))
939    {
940    case INTEGER_CST:
941      {
942	unsigned HOST_WIDE_INT low;
943	HOST_WIDE_INT high;
944	int overflow = neg_double (TREE_INT_CST_LOW (t),
945				   TREE_INT_CST_HIGH (t),
946				   &low, &high);
947	tem = build_int_2 (low, high);
948	TREE_TYPE (tem) = type;
949	TREE_OVERFLOW (tem)
950	  = (TREE_OVERFLOW (t)
951	     | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952	TREE_CONSTANT_OVERFLOW (tem)
953	  = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
954      }
955      if (! TREE_OVERFLOW (tem)
956	  || TREE_UNSIGNED (type)
957	  || ! flag_trapv)
958	return tem;
959      break;
960
961    case REAL_CST:
962      tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963      /* Two's complement FP formats, such as c4x, may overflow.  */
964      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965	return fold_convert (type, tem);
966      break;
967
968    case COMPLEX_CST:
969      {
970	tree rpart = negate_expr (TREE_REALPART (t));
971	tree ipart = negate_expr (TREE_IMAGPART (t));
972
973	if ((TREE_CODE (rpart) == REAL_CST
974	     && TREE_CODE (ipart) == REAL_CST)
975	    || (TREE_CODE (rpart) == INTEGER_CST
976		&& TREE_CODE (ipart) == INTEGER_CST))
977	  return build_complex (type, rpart, ipart);
978      }
979      break;
980
981    case NEGATE_EXPR:
982      return fold_convert (type, TREE_OPERAND (t, 0));
983
984    case MINUS_EXPR:
985      /* - (A - B) -> B - A  */
986      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988	return fold_convert (type,
989			     fold (build (MINUS_EXPR, TREE_TYPE (t),
990					  TREE_OPERAND (t, 1),
991					  TREE_OPERAND (t, 0))));
992      break;
993
994    case MULT_EXPR:
995      if (TREE_UNSIGNED (TREE_TYPE (t)))
996        break;
997
998      /* Fall through.  */
999
1000    case RDIV_EXPR:
1001      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1002	{
1003	  tem = TREE_OPERAND (t, 1);
1004	  if (negate_expr_p (tem))
1005	    return fold_convert (type,
1006				 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007					      TREE_OPERAND (t, 0),
1008					      negate_expr (tem))));
1009	  tem = TREE_OPERAND (t, 0);
1010	  if (negate_expr_p (tem))
1011	    return fold_convert (type,
1012				 fold (build (TREE_CODE (t), TREE_TYPE (t),
1013					      negate_expr (tem),
1014					      TREE_OPERAND (t, 1))));
1015	}
1016      break;
1017
1018    case NOP_EXPR:
1019      /* Convert -((double)float) into (double)(-float).  */
1020      if (TREE_CODE (type) == REAL_TYPE)
1021	{
1022	  tem = strip_float_extensions (t);
1023	  if (tem != t && negate_expr_p (tem))
1024	    return fold_convert (type, negate_expr (tem));
1025	}
1026      break;
1027
1028    case CALL_EXPR:
1029      /* Negate -f(x) as f(-x).  */
1030      if (negate_mathfn_p (builtin_mathfn_code (t))
1031	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1032	{
1033	  tree fndecl, arg, arglist;
1034
1035	  fndecl = get_callee_fndecl (t);
1036	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037	  arglist = build_tree_list (NULL_TREE, arg);
1038	  return build_function_call_expr (fndecl, arglist);
1039	}
1040      break;
1041
1042    default:
1043      break;
1044    }
1045
1046  tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047  return fold_convert (type, tem);
1048}
1049
1050/* Split a tree IN into a constant, literal and variable parts that could be
1051   combined with CODE to make IN.  "constant" means an expression with
1052   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1053   commutative arithmetic operation.  Store the constant part into *CONP,
1054   the literal in *LITP and return the variable part.  If a part isn't
1055   present, set it to null.  If the tree does not decompose in this way,
1056   return the entire tree as the variable part and the other parts as null.
1057
1058   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1059   case, we negate an operand that was subtracted.  Except if it is a
1060   literal for which we use *MINUS_LITP instead.
1061
1062   If NEGATE_P is true, we are negating all of IN, again except a literal
1063   for which we use *MINUS_LITP instead.
1064
1065   If IN is itself a literal or constant, return it as appropriate.
1066
1067   Note that we do not guarantee that any of the three values will be the
1068   same type as IN, but they will have the same signedness and mode.  */
1069
1070static tree
1071split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072	    tree *minus_litp, int negate_p)
1073{
1074  tree var = 0;
1075
1076  *conp = 0;
1077  *litp = 0;
1078  *minus_litp = 0;
1079
1080  /* Strip any conversions that don't change the machine mode or signedness.  */
1081  STRIP_SIGN_NOPS (in);
1082
1083  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1084    *litp = in;
1085  else if (TREE_CODE (in) == code
1086	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087	       /* We can associate addition and subtraction together (even
1088		  though the C standard doesn't say so) for integers because
1089		  the value is not affected.  For reals, the value might be
1090		  affected, so we can't.  */
1091	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1093    {
1094      tree op0 = TREE_OPERAND (in, 0);
1095      tree op1 = TREE_OPERAND (in, 1);
1096      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1098
1099      /* First see if either of the operands is a literal, then a constant.  */
1100      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101	*litp = op0, op0 = 0;
1102      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1104
1105      if (op0 != 0 && TREE_CONSTANT (op0))
1106	*conp = op0, op0 = 0;
1107      else if (op1 != 0 && TREE_CONSTANT (op1))
1108	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1109
1110      /* If we haven't dealt with either operand, this is not a case we can
1111	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1112      if (op0 != 0 && op1 != 0)
1113	var = in;
1114      else if (op0 != 0)
1115	var = op0;
1116      else
1117	var = op1, neg_var_p = neg1_p;
1118
1119      /* Now do any needed negations.  */
1120      if (neg_litp_p)
1121	*minus_litp = *litp, *litp = 0;
1122      if (neg_conp_p)
1123	*conp = negate_expr (*conp);
1124      if (neg_var_p)
1125	var = negate_expr (var);
1126    }
1127  else if (TREE_CONSTANT (in))
1128    *conp = in;
1129  else
1130    var = in;
1131
1132  if (negate_p)
1133    {
1134      if (*litp)
1135	*minus_litp = *litp, *litp = 0;
1136      else if (*minus_litp)
1137	*litp = *minus_litp, *minus_litp = 0;
1138      *conp = negate_expr (*conp);
1139      var = negate_expr (var);
1140    }
1141
1142  return var;
1143}
1144
1145/* Re-associate trees split by the above function.  T1 and T2 are either
1146   expressions to associate or null.  Return the new expression, if any.  If
1147   we build an operation, do it in TYPE and with CODE.  */
1148
1149static tree
1150associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1151{
1152  if (t1 == 0)
1153    return t2;
1154  else if (t2 == 0)
1155    return t1;
1156
1157  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158     try to fold this since we will have infinite recursion.  But do
1159     deal with any NEGATE_EXPRs.  */
1160  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1162    {
1163      if (code == PLUS_EXPR)
1164	{
1165	  if (TREE_CODE (t1) == NEGATE_EXPR)
1166	    return build (MINUS_EXPR, type, fold_convert (type, t2),
1167			  fold_convert (type, TREE_OPERAND (t1, 0)));
1168	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1169	    return build (MINUS_EXPR, type, fold_convert (type, t1),
1170			  fold_convert (type, TREE_OPERAND (t2, 0)));
1171	}
1172      return build (code, type, fold_convert (type, t1),
1173		    fold_convert (type, t2));
1174    }
1175
1176  return fold (build (code, type, fold_convert (type, t1),
1177		      fold_convert (type, t2)));
1178}
1179
1180/* Combine two integer constants ARG1 and ARG2 under operation CODE
1181   to produce a new constant.
1182
1183   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1184
1185static tree
1186int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1187{
1188  unsigned HOST_WIDE_INT int1l, int2l;
1189  HOST_WIDE_INT int1h, int2h;
1190  unsigned HOST_WIDE_INT low;
1191  HOST_WIDE_INT hi;
1192  unsigned HOST_WIDE_INT garbagel;
1193  HOST_WIDE_INT garbageh;
1194  tree t;
1195  tree type = TREE_TYPE (arg1);
1196  int uns = TREE_UNSIGNED (type);
1197  int is_sizetype
1198    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1199  int overflow = 0;
1200  int no_overflow = 0;
1201
1202  int1l = TREE_INT_CST_LOW (arg1);
1203  int1h = TREE_INT_CST_HIGH (arg1);
1204  int2l = TREE_INT_CST_LOW (arg2);
1205  int2h = TREE_INT_CST_HIGH (arg2);
1206
1207  switch (code)
1208    {
1209    case BIT_IOR_EXPR:
1210      low = int1l | int2l, hi = int1h | int2h;
1211      break;
1212
1213    case BIT_XOR_EXPR:
1214      low = int1l ^ int2l, hi = int1h ^ int2h;
1215      break;
1216
1217    case BIT_AND_EXPR:
1218      low = int1l & int2l, hi = int1h & int2h;
1219      break;
1220
1221    case RSHIFT_EXPR:
1222      int2l = -int2l;
1223    case LSHIFT_EXPR:
1224      /* It's unclear from the C standard whether shifts can overflow.
1225	 The following code ignores overflow; perhaps a C standard
1226	 interpretation ruling is needed.  */
1227      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1228		     &low, &hi, !uns);
1229      no_overflow = 1;
1230      break;
1231
1232    case RROTATE_EXPR:
1233      int2l = - int2l;
1234    case LROTATE_EXPR:
1235      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1236		      &low, &hi);
1237      break;
1238
1239    case PLUS_EXPR:
1240      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1241      break;
1242
1243    case MINUS_EXPR:
1244      neg_double (int2l, int2h, &low, &hi);
1245      add_double (int1l, int1h, low, hi, &low, &hi);
1246      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1247      break;
1248
1249    case MULT_EXPR:
1250      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1251      break;
1252
1253    case TRUNC_DIV_EXPR:
1254    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255    case EXACT_DIV_EXPR:
1256      /* This is a shortcut for a common special case.  */
1257      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1259	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1260	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1261	{
1262	  if (code == CEIL_DIV_EXPR)
1263	    int1l += int2l - 1;
1264
1265	  low = int1l / int2l, hi = 0;
1266	  break;
1267	}
1268
1269      /* ... fall through ...  */
1270
1271    case ROUND_DIV_EXPR:
1272      if (int2h == 0 && int2l == 1)
1273	{
1274	  low = int1l, hi = int1h;
1275	  break;
1276	}
1277      if (int1l == int2l && int1h == int2h
1278	  && ! (int1l == 0 && int1h == 0))
1279	{
1280	  low = 1, hi = 0;
1281	  break;
1282	}
1283      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284				       &low, &hi, &garbagel, &garbageh);
1285      break;
1286
1287    case TRUNC_MOD_EXPR:
1288    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289      /* This is a shortcut for a common special case.  */
1290      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1292	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1293	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1294	{
1295	  if (code == CEIL_MOD_EXPR)
1296	    int1l += int2l - 1;
1297	  low = int1l % int2l, hi = 0;
1298	  break;
1299	}
1300
1301      /* ... fall through ...  */
1302
1303    case ROUND_MOD_EXPR:
1304      overflow = div_and_round_double (code, uns,
1305				       int1l, int1h, int2l, int2h,
1306				       &garbagel, &garbageh, &low, &hi);
1307      break;
1308
1309    case MIN_EXPR:
1310    case MAX_EXPR:
1311      if (uns)
1312	low = (((unsigned HOST_WIDE_INT) int1h
1313		< (unsigned HOST_WIDE_INT) int2h)
1314	       || (((unsigned HOST_WIDE_INT) int1h
1315		    == (unsigned HOST_WIDE_INT) int2h)
1316		   && int1l < int2l));
1317      else
1318	low = (int1h < int2h
1319	       || (int1h == int2h && int1l < int2l));
1320
1321      if (low == (code == MIN_EXPR))
1322	low = int1l, hi = int1h;
1323      else
1324	low = int2l, hi = int2h;
1325      break;
1326
1327    default:
1328      abort ();
1329    }
1330
1331  /* If this is for a sizetype, can be represented as one (signed)
1332     HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1333     constants.  */
1334  if (is_sizetype
1335      && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336	  || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337      && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338    return size_int_type_wide (low, type);
1339  else
1340    {
1341      t = build_int_2 (low, hi);
1342      TREE_TYPE (t) = TREE_TYPE (arg1);
1343    }
1344
1345  TREE_OVERFLOW (t)
1346    = ((notrunc
1347	? (!uns || is_sizetype) && overflow
1348	: (force_fit_type (t, (!uns || is_sizetype) && overflow)
1349	   && ! no_overflow))
1350       | TREE_OVERFLOW (arg1)
1351       | TREE_OVERFLOW (arg2));
1352
1353  /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354     So check if force_fit_type truncated the value.  */
1355  if (is_sizetype
1356      && ! TREE_OVERFLOW (t)
1357      && (TREE_INT_CST_HIGH (t) != hi
1358	  || TREE_INT_CST_LOW (t) != low))
1359    TREE_OVERFLOW (t) = 1;
1360
1361  TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362				| TREE_CONSTANT_OVERFLOW (arg1)
1363				| TREE_CONSTANT_OVERFLOW (arg2));
1364  return t;
1365}
1366
1367/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1369   are the same kind of constant and the same machine mode.
1370
1371   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1372
1373static tree
1374const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1375{
1376  STRIP_NOPS (arg1);
1377  STRIP_NOPS (arg2);
1378
1379  if (TREE_CODE (arg1) == INTEGER_CST)
1380    return int_const_binop (code, arg1, arg2, notrunc);
1381
1382  if (TREE_CODE (arg1) == REAL_CST)
1383    {
1384      enum machine_mode mode;
1385      REAL_VALUE_TYPE d1;
1386      REAL_VALUE_TYPE d2;
1387      REAL_VALUE_TYPE value;
1388      tree t, type;
1389
1390      d1 = TREE_REAL_CST (arg1);
1391      d2 = TREE_REAL_CST (arg2);
1392
1393      type = TREE_TYPE (arg1);
1394      mode = TYPE_MODE (type);
1395
1396      /* Don't perform operation if we honor signaling NaNs and
1397	 either operand is a NaN.  */
1398      if (HONOR_SNANS (mode)
1399	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1400	return NULL_TREE;
1401
1402      /* Don't perform operation if it would raise a division
1403	 by zero exception.  */
1404      if (code == RDIV_EXPR
1405	  && REAL_VALUES_EQUAL (d2, dconst0)
1406	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1407	return NULL_TREE;
1408
1409      /* If either operand is a NaN, just return it.  Otherwise, set up
1410	 for floating-point trap; we return an overflow.  */
1411      if (REAL_VALUE_ISNAN (d1))
1412	return arg1;
1413      else if (REAL_VALUE_ISNAN (d2))
1414	return arg2;
1415
1416      REAL_ARITHMETIC (value, code, d1, d2);
1417
1418      t = build_real (type, real_value_truncate (mode, value));
1419
1420      TREE_OVERFLOW (t)
1421	= (force_fit_type (t, 0)
1422	   | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423      TREE_CONSTANT_OVERFLOW (t)
1424	= TREE_OVERFLOW (t)
1425	  | TREE_CONSTANT_OVERFLOW (arg1)
1426	  | TREE_CONSTANT_OVERFLOW (arg2);
1427      return t;
1428    }
1429  if (TREE_CODE (arg1) == COMPLEX_CST)
1430    {
1431      tree type = TREE_TYPE (arg1);
1432      tree r1 = TREE_REALPART (arg1);
1433      tree i1 = TREE_IMAGPART (arg1);
1434      tree r2 = TREE_REALPART (arg2);
1435      tree i2 = TREE_IMAGPART (arg2);
1436      tree t;
1437
1438      switch (code)
1439	{
1440	case PLUS_EXPR:
1441	  t = build_complex (type,
1442			     const_binop (PLUS_EXPR, r1, r2, notrunc),
1443			     const_binop (PLUS_EXPR, i1, i2, notrunc));
1444	  break;
1445
1446	case MINUS_EXPR:
1447	  t = build_complex (type,
1448			     const_binop (MINUS_EXPR, r1, r2, notrunc),
1449			     const_binop (MINUS_EXPR, i1, i2, notrunc));
1450	  break;
1451
1452	case MULT_EXPR:
1453	  t = build_complex (type,
1454			     const_binop (MINUS_EXPR,
1455					  const_binop (MULT_EXPR,
1456						       r1, r2, notrunc),
1457					  const_binop (MULT_EXPR,
1458						       i1, i2, notrunc),
1459					  notrunc),
1460			     const_binop (PLUS_EXPR,
1461					  const_binop (MULT_EXPR,
1462						       r1, i2, notrunc),
1463					  const_binop (MULT_EXPR,
1464						       i1, r2, notrunc),
1465					  notrunc));
1466	  break;
1467
1468	case RDIV_EXPR:
1469	  {
1470	    tree magsquared
1471	      = const_binop (PLUS_EXPR,
1472			     const_binop (MULT_EXPR, r2, r2, notrunc),
1473			     const_binop (MULT_EXPR, i2, i2, notrunc),
1474			     notrunc);
1475
1476	    t = build_complex (type,
1477			       const_binop
1478			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1479				? TRUNC_DIV_EXPR : RDIV_EXPR,
1480				const_binop (PLUS_EXPR,
1481					     const_binop (MULT_EXPR, r1, r2,
1482							  notrunc),
1483					     const_binop (MULT_EXPR, i1, i2,
1484							  notrunc),
1485					     notrunc),
1486				magsquared, notrunc),
1487			       const_binop
1488			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1489				? TRUNC_DIV_EXPR : RDIV_EXPR,
1490				const_binop (MINUS_EXPR,
1491					     const_binop (MULT_EXPR, i1, r2,
1492							  notrunc),
1493					     const_binop (MULT_EXPR, r1, i2,
1494							  notrunc),
1495					     notrunc),
1496				magsquared, notrunc));
1497	  }
1498	  break;
1499
1500	default:
1501	  abort ();
1502	}
1503      return t;
1504    }
1505  return 0;
1506}
1507
1508/* These are the hash table functions for the hash table of INTEGER_CST
1509   nodes of a sizetype.  */
1510
1511/* Return the hash code code X, an INTEGER_CST.  */
1512
1513static hashval_t
1514size_htab_hash (const void *x)
1515{
1516  tree t = (tree) x;
1517
1518  return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1519	  ^ htab_hash_pointer (TREE_TYPE (t))
1520	  ^ (TREE_OVERFLOW (t) << 20));
1521}
1522
1523/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1524   is the same as that given by *Y, which is the same.  */
1525
1526static int
1527size_htab_eq (const void *x, const void *y)
1528{
1529  tree xt = (tree) x;
1530  tree yt = (tree) y;
1531
1532  return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1533	  && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1534	  && TREE_TYPE (xt) == TREE_TYPE (yt)
1535	  && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1536}
1537
1538/* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1539   bits are given by NUMBER and of the sizetype represented by KIND.  */
1540
1541tree
1542size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1543{
1544  return size_int_type_wide (number, sizetype_tab[(int) kind]);
1545}
1546
1547/* Likewise, but the desired type is specified explicitly.  */
1548
1549static GTY (()) tree new_const;
1550static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1551     htab_t size_htab;
1552
1553tree
1554size_int_type_wide (HOST_WIDE_INT number, tree type)
1555{
1556  void **slot;
1557
1558  if (size_htab == 0)
1559    {
1560      size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1561      new_const = make_node (INTEGER_CST);
1562    }
1563
1564  /* Adjust NEW_CONST to be the constant we want.  If it's already in the
1565     hash table, we return the value from the hash table.  Otherwise, we
1566     place that in the hash table and make a new node for the next time.  */
1567  TREE_INT_CST_LOW (new_const) = number;
1568  TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1569  TREE_TYPE (new_const) = type;
1570  TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1571    = force_fit_type (new_const, 0);
1572
1573  slot = htab_find_slot (size_htab, new_const, INSERT);
1574  if (*slot == 0)
1575    {
1576      tree t = new_const;
1577
1578      *slot = new_const;
1579      new_const = make_node (INTEGER_CST);
1580      return t;
1581    }
1582  else
1583    return (tree) *slot;
1584}
1585
1586/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1587   is a tree code.  The type of the result is taken from the operands.
1588   Both must be the same type integer type and it must be a size type.
1589   If the operands are constant, so is the result.  */
1590
1591tree
1592size_binop (enum tree_code code, tree arg0, tree arg1)
1593{
1594  tree type = TREE_TYPE (arg0);
1595
1596  if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1597      || type != TREE_TYPE (arg1))
1598    abort ();
1599
1600  /* Handle the special case of two integer constants faster.  */
1601  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1602    {
1603      /* And some specific cases even faster than that.  */
1604      if (code == PLUS_EXPR && integer_zerop (arg0))
1605	return arg1;
1606      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1607	       && integer_zerop (arg1))
1608	return arg0;
1609      else if (code == MULT_EXPR && integer_onep (arg0))
1610	return arg1;
1611
1612      /* Handle general case of two integer constants.  */
1613      return int_const_binop (code, arg0, arg1, 0);
1614    }
1615
1616  if (arg0 == error_mark_node || arg1 == error_mark_node)
1617    return error_mark_node;
1618
1619  return fold (build (code, type, arg0, arg1));
1620}
1621
1622/* Given two values, either both of sizetype or both of bitsizetype,
1623   compute the difference between the two values.  Return the value
1624   in signed type corresponding to the type of the operands.  */
1625
1626tree
1627size_diffop (tree arg0, tree arg1)
1628{
1629  tree type = TREE_TYPE (arg0);
1630  tree ctype;
1631
1632  if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1633      || type != TREE_TYPE (arg1))
1634    abort ();
1635
1636  /* If the type is already signed, just do the simple thing.  */
1637  if (! TREE_UNSIGNED (type))
1638    return size_binop (MINUS_EXPR, arg0, arg1);
1639
1640  ctype = (type == bitsizetype || type == ubitsizetype
1641	   ? sbitsizetype : ssizetype);
1642
1643  /* If either operand is not a constant, do the conversions to the signed
1644     type and subtract.  The hardware will do the right thing with any
1645     overflow in the subtraction.  */
1646  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1647    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1648		       fold_convert (ctype, arg1));
1649
1650  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1651     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1652     overflow) and negate (which can't either).  Special-case a result
1653     of zero while we're here.  */
1654  if (tree_int_cst_equal (arg0, arg1))
1655    return fold_convert (ctype, integer_zero_node);
1656  else if (tree_int_cst_lt (arg1, arg0))
1657    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1658  else
1659    return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1660		       fold_convert (ctype, size_binop (MINUS_EXPR,
1661							arg1, arg0)));
1662}
1663
1664
1665/* Attempt to fold type conversion operation CODE of expression ARG1 to
1666   type TYPE.  If no simplification can be done return NULL_TREE.  */
1667
1668static tree
1669fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1670		    tree arg1)
1671{
1672  int overflow = 0;
1673  tree t;
1674
1675  if (TREE_TYPE (arg1) == type)
1676    return arg1;
1677
1678  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1679    {
1680      if (TREE_CODE (arg1) == INTEGER_CST)
1681	{
1682	  /* If we would build a constant wider than GCC supports,
1683	     leave the conversion unfolded.  */
1684	  if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1685	    return NULL_TREE;
1686
1687	  /* If we are trying to make a sizetype for a small integer, use
1688	     size_int to pick up cached types to reduce duplicate nodes.  */
1689	  if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1690	      && !TREE_CONSTANT_OVERFLOW (arg1)
1691	      && compare_tree_int (arg1, 10000) < 0)
1692	    return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1693
1694	  /* Given an integer constant, make new constant with new type,
1695	     appropriately sign-extended or truncated.  */
1696	  t = build_int_2 (TREE_INT_CST_LOW (arg1),
1697			   TREE_INT_CST_HIGH (arg1));
1698	  TREE_TYPE (t) = type;
1699	  /* Indicate an overflow if (1) ARG1 already overflowed,
1700	     or (2) force_fit_type indicates an overflow.
1701	     Tell force_fit_type that an overflow has already occurred
1702	     if ARG1 is a too-large unsigned value and T is signed.
1703	     But don't indicate an overflow if converting a pointer.  */
1704	  TREE_OVERFLOW (t)
1705	    = ((force_fit_type (t,
1706				(TREE_INT_CST_HIGH (arg1) < 0
1707				 && (TREE_UNSIGNED (type)
1708				    < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1709		&& ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1710	       || TREE_OVERFLOW (arg1));
1711	  TREE_CONSTANT_OVERFLOW (t)
1712	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1713	  return t;
1714	}
1715      else if (TREE_CODE (arg1) == REAL_CST)
1716	{
1717	  /* The following code implements the floating point to integer
1718	     conversion rules required by the Java Language Specification,
1719	     that IEEE NaNs are mapped to zero and values that overflow
1720	     the target precision saturate, i.e. values greater than
1721	     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1722	     are mapped to INT_MIN.  These semantics are allowed by the
1723	     C and C++ standards that simply state that the behavior of
1724	     FP-to-integer conversion is unspecified upon overflow.  */
1725
1726	  HOST_WIDE_INT high, low;
1727
1728	  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1729	  /* If x is NaN, return zero and show we have an overflow.  */
1730	  if (REAL_VALUE_ISNAN (x))
1731	    {
1732	      overflow = 1;
1733	      high = 0;
1734	      low = 0;
1735	    }
1736
1737	  /* See if X will be in range after truncation towards 0.
1738	     To compensate for truncation, move the bounds away from 0,
1739	     but reject if X exactly equals the adjusted bounds.  */
1740
1741	  if (! overflow)
1742	    {
1743	      tree lt = TYPE_MIN_VALUE (type);
1744	      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1745	      REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1746	      if (! REAL_VALUES_LESS (l, x))
1747		{
1748		  overflow = 1;
1749		  high = TREE_INT_CST_HIGH (lt);
1750		  low = TREE_INT_CST_LOW (lt);
1751		}
1752	    }
1753
1754	  if (! overflow)
1755	    {
1756	      tree ut = TYPE_MAX_VALUE (type);
1757	      if (ut)
1758		{
1759		  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1760		  REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1761		  if (! REAL_VALUES_LESS (x, u))
1762		    {
1763		      overflow = 1;
1764		      high = TREE_INT_CST_HIGH (ut);
1765		      low = TREE_INT_CST_LOW (ut);
1766		    }
1767		}
1768	    }
1769
1770	  if (! overflow)
1771	    REAL_VALUE_TO_INT (&low, &high, x);
1772
1773	  t = build_int_2 (low, high);
1774	  TREE_TYPE (t) = type;
1775	  TREE_OVERFLOW (t)
1776	    = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1777	  TREE_CONSTANT_OVERFLOW (t)
1778	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1779	  return t;
1780	}
1781    }
1782  else if (TREE_CODE (type) == REAL_TYPE)
1783    {
1784      if (TREE_CODE (arg1) == INTEGER_CST)
1785	return build_real_from_int_cst (type, arg1);
1786      if (TREE_CODE (arg1) == REAL_CST)
1787	{
1788	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1789	    {
1790	      /* We make a copy of ARG1 so that we don't modify an
1791		 existing constant tree.  */
1792	      t = copy_node (arg1);
1793	      TREE_TYPE (t) = type;
1794	      return t;
1795	    }
1796
1797	  t = build_real (type,
1798			  real_value_truncate (TYPE_MODE (type),
1799					       TREE_REAL_CST (arg1)));
1800
1801	  TREE_OVERFLOW (t)
1802	    = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1803	  TREE_CONSTANT_OVERFLOW (t)
1804	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1805	  return t;
1806	}
1807    }
1808  return NULL_TREE;
1809}
1810
1811/* Convert expression ARG to type TYPE.  Used by the middle-end for
1812   simple conversions in preference to calling the front-end's convert.  */
1813
1814static tree
1815fold_convert (tree type, tree arg)
1816{
1817  tree orig = TREE_TYPE (arg);
1818  tree tem;
1819
1820  if (type == orig)
1821    return arg;
1822
1823  if (TREE_CODE (arg) == ERROR_MARK
1824      || TREE_CODE (type) == ERROR_MARK
1825      || TREE_CODE (orig) == ERROR_MARK)
1826    return error_mark_node;
1827
1828  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1829    return fold (build1 (NOP_EXPR, type, arg));
1830
1831  if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type))
1832    {
1833      if (TREE_CODE (arg) == INTEGER_CST)
1834	{
1835	  tem = fold_convert_const (NOP_EXPR, type, arg);
1836	  if (tem != NULL_TREE)
1837	    return tem;
1838	}
1839      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1840        return fold (build1 (NOP_EXPR, type, arg));
1841      if (TREE_CODE (orig) == COMPLEX_TYPE)
1842	{
1843	  tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1844	  return fold_convert (type, tem);
1845	}
1846      if (TREE_CODE (orig) == VECTOR_TYPE
1847	  && GET_MODE_SIZE (TYPE_MODE (type))
1848	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1849	return fold (build1 (NOP_EXPR, type, arg));
1850    }
1851  else if (TREE_CODE (type) == REAL_TYPE)
1852    {
1853      if (TREE_CODE (arg) == INTEGER_CST)
1854	{
1855	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1856	  if (tem != NULL_TREE)
1857	    return tem;
1858	}
1859      else if (TREE_CODE (arg) == REAL_CST)
1860	{
1861	  tem = fold_convert_const (NOP_EXPR, type, arg);
1862	  if (tem != NULL_TREE)
1863	    return tem;
1864	}
1865
1866      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1867        return fold (build1 (FLOAT_EXPR, type, arg));
1868      if (TREE_CODE (orig) == REAL_TYPE)
1869	return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1870			     type, arg));
1871      if (TREE_CODE (orig) == COMPLEX_TYPE)
1872	{
1873	  tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1874	  return fold_convert (type, tem);
1875	}
1876    }
1877  else if (TREE_CODE (type) == COMPLEX_TYPE)
1878    {
1879      if (INTEGRAL_TYPE_P (orig)
1880	  || POINTER_TYPE_P (orig)
1881	  || TREE_CODE (orig) == REAL_TYPE)
1882	return build (COMPLEX_EXPR, type,
1883		      fold_convert (TREE_TYPE (type), arg),
1884		      fold_convert (TREE_TYPE (type), integer_zero_node));
1885      if (TREE_CODE (orig) == COMPLEX_TYPE)
1886	{
1887	  tree rpart, ipart;
1888
1889	  if (TREE_CODE (arg) == COMPLEX_EXPR)
1890	    {
1891	      rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1892	      ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1893	      return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1894	    }
1895
1896	  arg = save_expr (arg);
1897	  rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1898	  ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1899	  rpart = fold_convert (TREE_TYPE (type), rpart);
1900	  ipart = fold_convert (TREE_TYPE (type), ipart);
1901	  return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1902	}
1903    }
1904  else if (TREE_CODE (type) == VECTOR_TYPE)
1905    {
1906      if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1907	  && GET_MODE_SIZE (TYPE_MODE (type))
1908	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1909	return fold (build1 (NOP_EXPR, type, arg));
1910      if (TREE_CODE (orig) == VECTOR_TYPE
1911	  && GET_MODE_SIZE (TYPE_MODE (type))
1912	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1913	return fold (build1 (NOP_EXPR, type, arg));
1914    }
1915  else if (VOID_TYPE_P (type))
1916    return fold (build1 (CONVERT_EXPR, type, arg));
1917  abort ();
1918}
1919
1920/* Return an expr equal to X but certainly not valid as an lvalue.  */
1921
1922tree
1923non_lvalue (tree x)
1924{
1925  tree result;
1926
1927  /* These things are certainly not lvalues.  */
1928  if (TREE_CODE (x) == NON_LVALUE_EXPR
1929      || TREE_CODE (x) == INTEGER_CST
1930      || TREE_CODE (x) == REAL_CST
1931      || TREE_CODE (x) == STRING_CST
1932      || TREE_CODE (x) == ADDR_EXPR)
1933    return x;
1934
1935  result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1936  TREE_CONSTANT (result) = TREE_CONSTANT (x);
1937  return result;
1938}
1939
1940/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1941   Zero means allow extended lvalues.  */
1942
1943int pedantic_lvalues;
1944
1945/* When pedantic, return an expr equal to X but certainly not valid as a
1946   pedantic lvalue.  Otherwise, return X.  */
1947
1948tree
1949pedantic_non_lvalue (tree x)
1950{
1951  if (pedantic_lvalues)
1952    return non_lvalue (x);
1953  else
1954    return x;
1955}
1956
1957/* Given a tree comparison code, return the code that is the logical inverse
1958   of the given code.  It is not safe to do this for floating-point
1959   comparisons, except for NE_EXPR and EQ_EXPR.  */
1960
1961static enum tree_code
1962invert_tree_comparison (enum tree_code code)
1963{
1964  switch (code)
1965    {
1966    case EQ_EXPR:
1967      return NE_EXPR;
1968    case NE_EXPR:
1969      return EQ_EXPR;
1970    case GT_EXPR:
1971      return LE_EXPR;
1972    case GE_EXPR:
1973      return LT_EXPR;
1974    case LT_EXPR:
1975      return GE_EXPR;
1976    case LE_EXPR:
1977      return GT_EXPR;
1978    default:
1979      abort ();
1980    }
1981}
1982
1983/* Similar, but return the comparison that results if the operands are
1984   swapped.  This is safe for floating-point.  */
1985
1986static enum tree_code
1987swap_tree_comparison (enum tree_code code)
1988{
1989  switch (code)
1990    {
1991    case EQ_EXPR:
1992    case NE_EXPR:
1993      return code;
1994    case GT_EXPR:
1995      return LT_EXPR;
1996    case GE_EXPR:
1997      return LE_EXPR;
1998    case LT_EXPR:
1999      return GT_EXPR;
2000    case LE_EXPR:
2001      return GE_EXPR;
2002    default:
2003      abort ();
2004    }
2005}
2006
2007
2008/* Convert a comparison tree code from an enum tree_code representation
2009   into a compcode bit-based encoding.  This function is the inverse of
2010   compcode_to_comparison.  */
2011
2012static int
2013comparison_to_compcode (enum tree_code code)
2014{
2015  switch (code)
2016    {
2017    case LT_EXPR:
2018      return COMPCODE_LT;
2019    case EQ_EXPR:
2020      return COMPCODE_EQ;
2021    case LE_EXPR:
2022      return COMPCODE_LE;
2023    case GT_EXPR:
2024      return COMPCODE_GT;
2025    case NE_EXPR:
2026      return COMPCODE_NE;
2027    case GE_EXPR:
2028      return COMPCODE_GE;
2029    default:
2030      abort ();
2031    }
2032}
2033
2034/* Convert a compcode bit-based encoding of a comparison operator back
2035   to GCC's enum tree_code representation.  This function is the
2036   inverse of comparison_to_compcode.  */
2037
2038static enum tree_code
2039compcode_to_comparison (int code)
2040{
2041  switch (code)
2042    {
2043    case COMPCODE_LT:
2044      return LT_EXPR;
2045    case COMPCODE_EQ:
2046      return EQ_EXPR;
2047    case COMPCODE_LE:
2048      return LE_EXPR;
2049    case COMPCODE_GT:
2050      return GT_EXPR;
2051    case COMPCODE_NE:
2052      return NE_EXPR;
2053    case COMPCODE_GE:
2054      return GE_EXPR;
2055    default:
2056      abort ();
2057    }
2058}
2059
2060/* Return nonzero if CODE is a tree code that represents a truth value.  */
2061
2062static int
2063truth_value_p (enum tree_code code)
2064{
2065  return (TREE_CODE_CLASS (code) == '<'
2066	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2067	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2068	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2069}
2070
2071/* Return nonzero if two operands (typically of the same tree node)
2072   are necessarily equal.  If either argument has side-effects this
2073   function returns zero.
2074
2075   If ONLY_CONST is nonzero, only return nonzero for constants.
2076   This function tests whether the operands are indistinguishable;
2077   it does not test whether they are equal using C's == operation.
2078   The distinction is important for IEEE floating point, because
2079   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2080   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2081
2082   If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2083   even though it may hold multiple values during a function.
2084   This is because a GCC tree node guarantees that nothing else is
2085   executed between the evaluation of its "operands" (which may often
2086   be evaluated in arbitrary order).  Hence if the operands themselves
2087   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2088   same value in each operand/subexpression.  Hence a zero value for
2089   ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2090   If comparing arbitrary expression trees, such as from different
2091   statements, ONLY_CONST must usually be nonzero.  */
2092
2093int
2094operand_equal_p (tree arg0, tree arg1, int only_const)
2095{
2096  tree fndecl;
2097
2098  /* If both types don't have the same signedness, then we can't consider
2099     them equal.  We must check this before the STRIP_NOPS calls
2100     because they may change the signedness of the arguments.  */
2101  if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2102    return 0;
2103
2104  STRIP_NOPS (arg0);
2105  STRIP_NOPS (arg1);
2106
2107  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2108      /* This is needed for conversions and for COMPONENT_REF.
2109	 Might as well play it safe and always test this.  */
2110      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2111      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2112      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2113    return 0;
2114
2115  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2116     We don't care about side effects in that case because the SAVE_EXPR
2117     takes care of that for us. In all other cases, two expressions are
2118     equal if they have no side effects.  If we have two identical
2119     expressions with side effects that should be treated the same due
2120     to the only side effects being identical SAVE_EXPR's, that will
2121     be detected in the recursive calls below.  */
2122  if (arg0 == arg1 && ! only_const
2123      && (TREE_CODE (arg0) == SAVE_EXPR
2124	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2125    return 1;
2126
2127  /* Next handle constant cases, those for which we can return 1 even
2128     if ONLY_CONST is set.  */
2129  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2130    switch (TREE_CODE (arg0))
2131      {
2132      case INTEGER_CST:
2133	return (! TREE_CONSTANT_OVERFLOW (arg0)
2134		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2135		&& tree_int_cst_equal (arg0, arg1));
2136
2137      case REAL_CST:
2138	return (! TREE_CONSTANT_OVERFLOW (arg0)
2139		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2140		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2141					  TREE_REAL_CST (arg1)));
2142
2143      case VECTOR_CST:
2144	{
2145	  tree v1, v2;
2146
2147	  if (TREE_CONSTANT_OVERFLOW (arg0)
2148	      || TREE_CONSTANT_OVERFLOW (arg1))
2149	    return 0;
2150
2151	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2152	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2153	  while (v1 && v2)
2154	    {
2155	      if (!operand_equal_p (v1, v2, only_const))
2156		return 0;
2157	      v1 = TREE_CHAIN (v1);
2158	      v2 = TREE_CHAIN (v2);
2159	    }
2160
2161	  return 1;
2162	}
2163
2164      case COMPLEX_CST:
2165	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2166				 only_const)
2167		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2168				    only_const));
2169
2170      case STRING_CST:
2171	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2172		&& ! memcmp (TREE_STRING_POINTER (arg0),
2173			      TREE_STRING_POINTER (arg1),
2174			      TREE_STRING_LENGTH (arg0)));
2175
2176      case ADDR_EXPR:
2177	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2178				0);
2179      default:
2180	break;
2181      }
2182
2183  if (only_const)
2184    return 0;
2185
2186  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2187    {
2188    case '1':
2189      /* Two conversions are equal only if signedness and modes match.  */
2190      if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2191	  && (TREE_UNSIGNED (TREE_TYPE (arg0))
2192	      != TREE_UNSIGNED (TREE_TYPE (arg1))))
2193	return 0;
2194
2195      return operand_equal_p (TREE_OPERAND (arg0, 0),
2196			      TREE_OPERAND (arg1, 0), 0);
2197
2198    case '<':
2199    case '2':
2200      if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2201	  && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2202			      0))
2203	return 1;
2204
2205      /* For commutative ops, allow the other order.  */
2206      return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2207	       || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2208	       || TREE_CODE (arg0) == BIT_IOR_EXPR
2209	       || TREE_CODE (arg0) == BIT_XOR_EXPR
2210	       || TREE_CODE (arg0) == BIT_AND_EXPR
2211	       || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2212	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2213				  TREE_OPERAND (arg1, 1), 0)
2214	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2215				  TREE_OPERAND (arg1, 0), 0));
2216
2217    case 'r':
2218      /* If either of the pointer (or reference) expressions we are
2219	 dereferencing contain a side effect, these cannot be equal.  */
2220      if (TREE_SIDE_EFFECTS (arg0)
2221	  || TREE_SIDE_EFFECTS (arg1))
2222	return 0;
2223
2224      switch (TREE_CODE (arg0))
2225	{
2226	case INDIRECT_REF:
2227	  return operand_equal_p (TREE_OPERAND (arg0, 0),
2228				  TREE_OPERAND (arg1, 0), 0);
2229
2230	case COMPONENT_REF:
2231	case ARRAY_REF:
2232	case ARRAY_RANGE_REF:
2233	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2234				   TREE_OPERAND (arg1, 0), 0)
2235		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2236				      TREE_OPERAND (arg1, 1), 0));
2237
2238	case BIT_FIELD_REF:
2239	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2240				   TREE_OPERAND (arg1, 0), 0)
2241		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2242				      TREE_OPERAND (arg1, 1), 0)
2243		  && operand_equal_p (TREE_OPERAND (arg0, 2),
2244				      TREE_OPERAND (arg1, 2), 0));
2245	default:
2246	  return 0;
2247	}
2248
2249    case 'e':
2250      switch (TREE_CODE (arg0))
2251	{
2252	case ADDR_EXPR:
2253	case TRUTH_NOT_EXPR:
2254	  return operand_equal_p (TREE_OPERAND (arg0, 0),
2255				  TREE_OPERAND (arg1, 0), 0);
2256
2257	case RTL_EXPR:
2258	  return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2259
2260	case CALL_EXPR:
2261	  /* If the CALL_EXPRs call different functions, then they
2262	     clearly can not be equal.  */
2263	  if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2264				 TREE_OPERAND (arg1, 0), 0))
2265	    return 0;
2266
2267	  /* Only consider const functions equivalent.  */
2268	  fndecl = get_callee_fndecl (arg0);
2269	  if (fndecl == NULL_TREE
2270	      || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2271	    return 0;
2272
2273	  /* Now see if all the arguments are the same.  operand_equal_p
2274	     does not handle TREE_LIST, so we walk the operands here
2275	     feeding them to operand_equal_p.  */
2276	  arg0 = TREE_OPERAND (arg0, 1);
2277	  arg1 = TREE_OPERAND (arg1, 1);
2278	  while (arg0 && arg1)
2279	    {
2280	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2281		return 0;
2282
2283	      arg0 = TREE_CHAIN (arg0);
2284	      arg1 = TREE_CHAIN (arg1);
2285	    }
2286
2287	  /* If we get here and both argument lists are exhausted
2288	     then the CALL_EXPRs are equal.  */
2289	  return ! (arg0 || arg1);
2290
2291	default:
2292	  return 0;
2293	}
2294
2295    case 'd':
2296	/* Consider __builtin_sqrt equal to sqrt.  */
2297	return TREE_CODE (arg0) == FUNCTION_DECL
2298	       && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2299	       && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2300	       && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2301
2302    default:
2303      return 0;
2304    }
2305}
2306
2307/* Similar to operand_equal_p, but see if ARG0 might have been made by
2308   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2309
2310   When in doubt, return 0.  */
2311
2312static int
2313operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2314{
2315  int unsignedp1, unsignedpo;
2316  tree primarg0, primarg1, primother;
2317  unsigned int correct_width;
2318
2319  if (operand_equal_p (arg0, arg1, 0))
2320    return 1;
2321
2322  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2323      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2324    return 0;
2325
2326  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2327     and see if the inner values are the same.  This removes any
2328     signedness comparison, which doesn't matter here.  */
2329  primarg0 = arg0, primarg1 = arg1;
2330  STRIP_NOPS (primarg0);
2331  STRIP_NOPS (primarg1);
2332  if (operand_equal_p (primarg0, primarg1, 0))
2333    return 1;
2334
2335  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2336     actual comparison operand, ARG0.
2337
2338     First throw away any conversions to wider types
2339     already present in the operands.  */
2340
2341  primarg1 = get_narrower (arg1, &unsignedp1);
2342  primother = get_narrower (other, &unsignedpo);
2343
2344  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2345  if (unsignedp1 == unsignedpo
2346      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2347      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2348    {
2349      tree type = TREE_TYPE (arg0);
2350
2351      /* Make sure shorter operand is extended the right way
2352	 to match the longer operand.  */
2353      primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2354			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2355
2356      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2357	return 1;
2358    }
2359
2360  return 0;
2361}
2362
2363/* See if ARG is an expression that is either a comparison or is performing
2364   arithmetic on comparisons.  The comparisons must only be comparing
2365   two different values, which will be stored in *CVAL1 and *CVAL2; if
2366   they are nonzero it means that some operands have already been found.
2367   No variables may be used anywhere else in the expression except in the
2368   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2369   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2370
2371   If this is true, return 1.  Otherwise, return zero.  */
2372
2373static int
2374twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2375{
2376  enum tree_code code = TREE_CODE (arg);
2377  char class = TREE_CODE_CLASS (code);
2378
2379  /* We can handle some of the 'e' cases here.  */
2380  if (class == 'e' && code == TRUTH_NOT_EXPR)
2381    class = '1';
2382  else if (class == 'e'
2383	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2384	       || code == COMPOUND_EXPR))
2385    class = '2';
2386
2387  else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2388	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2389    {
2390      /* If we've already found a CVAL1 or CVAL2, this expression is
2391	 two complex to handle.  */
2392      if (*cval1 || *cval2)
2393	return 0;
2394
2395      class = '1';
2396      *save_p = 1;
2397    }
2398
2399  switch (class)
2400    {
2401    case '1':
2402      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2403
2404    case '2':
2405      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2406	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2407				      cval1, cval2, save_p));
2408
2409    case 'c':
2410      return 1;
2411
2412    case 'e':
2413      if (code == COND_EXPR)
2414	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2415				     cval1, cval2, save_p)
2416		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2417					cval1, cval2, save_p)
2418		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2419					cval1, cval2, save_p));
2420      return 0;
2421
2422    case '<':
2423      /* First see if we can handle the first operand, then the second.  For
2424	 the second operand, we know *CVAL1 can't be zero.  It must be that
2425	 one side of the comparison is each of the values; test for the
2426	 case where this isn't true by failing if the two operands
2427	 are the same.  */
2428
2429      if (operand_equal_p (TREE_OPERAND (arg, 0),
2430			   TREE_OPERAND (arg, 1), 0))
2431	return 0;
2432
2433      if (*cval1 == 0)
2434	*cval1 = TREE_OPERAND (arg, 0);
2435      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2436	;
2437      else if (*cval2 == 0)
2438	*cval2 = TREE_OPERAND (arg, 0);
2439      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2440	;
2441      else
2442	return 0;
2443
2444      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2445	;
2446      else if (*cval2 == 0)
2447	*cval2 = TREE_OPERAND (arg, 1);
2448      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2449	;
2450      else
2451	return 0;
2452
2453      return 1;
2454
2455    default:
2456      return 0;
2457    }
2458}
2459
2460/* ARG is a tree that is known to contain just arithmetic operations and
2461   comparisons.  Evaluate the operations in the tree substituting NEW0 for
2462   any occurrence of OLD0 as an operand of a comparison and likewise for
2463   NEW1 and OLD1.  */
2464
2465static tree
2466eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2467{
2468  tree type = TREE_TYPE (arg);
2469  enum tree_code code = TREE_CODE (arg);
2470  char class = TREE_CODE_CLASS (code);
2471
2472  /* We can handle some of the 'e' cases here.  */
2473  if (class == 'e' && code == TRUTH_NOT_EXPR)
2474    class = '1';
2475  else if (class == 'e'
2476	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2477    class = '2';
2478
2479  switch (class)
2480    {
2481    case '1':
2482      return fold (build1 (code, type,
2483			   eval_subst (TREE_OPERAND (arg, 0),
2484				       old0, new0, old1, new1)));
2485
2486    case '2':
2487      return fold (build (code, type,
2488			  eval_subst (TREE_OPERAND (arg, 0),
2489				      old0, new0, old1, new1),
2490			  eval_subst (TREE_OPERAND (arg, 1),
2491				      old0, new0, old1, new1)));
2492
2493    case 'e':
2494      switch (code)
2495	{
2496	case SAVE_EXPR:
2497	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2498
2499	case COMPOUND_EXPR:
2500	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2501
2502	case COND_EXPR:
2503	  return fold (build (code, type,
2504			      eval_subst (TREE_OPERAND (arg, 0),
2505					  old0, new0, old1, new1),
2506			      eval_subst (TREE_OPERAND (arg, 1),
2507					  old0, new0, old1, new1),
2508			      eval_subst (TREE_OPERAND (arg, 2),
2509					  old0, new0, old1, new1)));
2510	default:
2511	  break;
2512	}
2513      /* Fall through - ???  */
2514
2515    case '<':
2516      {
2517	tree arg0 = TREE_OPERAND (arg, 0);
2518	tree arg1 = TREE_OPERAND (arg, 1);
2519
2520	/* We need to check both for exact equality and tree equality.  The
2521	   former will be true if the operand has a side-effect.  In that
2522	   case, we know the operand occurred exactly once.  */
2523
2524	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2525	  arg0 = new0;
2526	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2527	  arg0 = new1;
2528
2529	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2530	  arg1 = new0;
2531	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2532	  arg1 = new1;
2533
2534	return fold (build (code, type, arg0, arg1));
2535      }
2536
2537    default:
2538      return arg;
2539    }
2540}
2541
2542/* Return a tree for the case when the result of an expression is RESULT
2543   converted to TYPE and OMITTED was previously an operand of the expression
2544   but is now not needed (e.g., we folded OMITTED * 0).
2545
2546   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2547   the conversion of RESULT to TYPE.  */
2548
2549tree
2550omit_one_operand (tree type, tree result, tree omitted)
2551{
2552  tree t = fold_convert (type, result);
2553
2554  if (TREE_SIDE_EFFECTS (omitted))
2555    return build (COMPOUND_EXPR, type, omitted, t);
2556
2557  return non_lvalue (t);
2558}
2559
2560/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2561
2562static tree
2563pedantic_omit_one_operand (tree type, tree result, tree omitted)
2564{
2565  tree t = fold_convert (type, result);
2566
2567  if (TREE_SIDE_EFFECTS (omitted))
2568    return build (COMPOUND_EXPR, type, omitted, t);
2569
2570  return pedantic_non_lvalue (t);
2571}
2572
2573/* Return a simplified tree node for the truth-negation of ARG.  This
2574   never alters ARG itself.  We assume that ARG is an operation that
2575   returns a truth value (0 or 1).  */
2576
2577tree
2578invert_truthvalue (tree arg)
2579{
2580  tree type = TREE_TYPE (arg);
2581  enum tree_code code = TREE_CODE (arg);
2582
2583  if (code == ERROR_MARK)
2584    return arg;
2585
2586  /* If this is a comparison, we can simply invert it, except for
2587     floating-point non-equality comparisons, in which case we just
2588     enclose a TRUTH_NOT_EXPR around what we have.  */
2589
2590  if (TREE_CODE_CLASS (code) == '<')
2591    {
2592      if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2593	  && !flag_unsafe_math_optimizations
2594	  && code != NE_EXPR
2595	  && code != EQ_EXPR)
2596	return build1 (TRUTH_NOT_EXPR, type, arg);
2597      else
2598	return build (invert_tree_comparison (code), type,
2599		      TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2600    }
2601
2602  switch (code)
2603    {
2604    case INTEGER_CST:
2605      return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2606
2607    case TRUTH_AND_EXPR:
2608      return build (TRUTH_OR_EXPR, type,
2609		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2610		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2611
2612    case TRUTH_OR_EXPR:
2613      return build (TRUTH_AND_EXPR, type,
2614		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2615		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2616
2617    case TRUTH_XOR_EXPR:
2618      /* Here we can invert either operand.  We invert the first operand
2619	 unless the second operand is a TRUTH_NOT_EXPR in which case our
2620	 result is the XOR of the first operand with the inside of the
2621	 negation of the second operand.  */
2622
2623      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2624	return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2625		      TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2626      else
2627	return build (TRUTH_XOR_EXPR, type,
2628		      invert_truthvalue (TREE_OPERAND (arg, 0)),
2629		      TREE_OPERAND (arg, 1));
2630
2631    case TRUTH_ANDIF_EXPR:
2632      return build (TRUTH_ORIF_EXPR, type,
2633		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2634		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2635
2636    case TRUTH_ORIF_EXPR:
2637      return build (TRUTH_ANDIF_EXPR, type,
2638		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2639		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2640
2641    case TRUTH_NOT_EXPR:
2642      return TREE_OPERAND (arg, 0);
2643
2644    case COND_EXPR:
2645      return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2646		    invert_truthvalue (TREE_OPERAND (arg, 1)),
2647		    invert_truthvalue (TREE_OPERAND (arg, 2)));
2648
2649    case COMPOUND_EXPR:
2650      return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2651		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2652
2653    case WITH_RECORD_EXPR:
2654      return build (WITH_RECORD_EXPR, type,
2655		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2656		    TREE_OPERAND (arg, 1));
2657
2658    case NON_LVALUE_EXPR:
2659      return invert_truthvalue (TREE_OPERAND (arg, 0));
2660
2661    case NOP_EXPR:
2662    case CONVERT_EXPR:
2663    case FLOAT_EXPR:
2664      return build1 (TREE_CODE (arg), type,
2665		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2666
2667    case BIT_AND_EXPR:
2668      if (!integer_onep (TREE_OPERAND (arg, 1)))
2669	break;
2670      return build (EQ_EXPR, type, arg,
2671		    fold_convert (type, integer_zero_node));
2672
2673    case SAVE_EXPR:
2674      return build1 (TRUTH_NOT_EXPR, type, arg);
2675
2676    case CLEANUP_POINT_EXPR:
2677      return build1 (CLEANUP_POINT_EXPR, type,
2678		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2679
2680    default:
2681      break;
2682    }
2683  if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2684    abort ();
2685  return build1 (TRUTH_NOT_EXPR, type, arg);
2686}
2687
2688/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2689   operands are another bit-wise operation with a common input.  If so,
2690   distribute the bit operations to save an operation and possibly two if
2691   constants are involved.  For example, convert
2692	(A | B) & (A | C) into A | (B & C)
2693   Further simplification will occur if B and C are constants.
2694
2695   If this optimization cannot be done, 0 will be returned.  */
2696
2697static tree
2698distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2699{
2700  tree common;
2701  tree left, right;
2702
2703  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2704      || TREE_CODE (arg0) == code
2705      || (TREE_CODE (arg0) != BIT_AND_EXPR
2706	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
2707    return 0;
2708
2709  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2710    {
2711      common = TREE_OPERAND (arg0, 0);
2712      left = TREE_OPERAND (arg0, 1);
2713      right = TREE_OPERAND (arg1, 1);
2714    }
2715  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2716    {
2717      common = TREE_OPERAND (arg0, 0);
2718      left = TREE_OPERAND (arg0, 1);
2719      right = TREE_OPERAND (arg1, 0);
2720    }
2721  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2722    {
2723      common = TREE_OPERAND (arg0, 1);
2724      left = TREE_OPERAND (arg0, 0);
2725      right = TREE_OPERAND (arg1, 1);
2726    }
2727  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2728    {
2729      common = TREE_OPERAND (arg0, 1);
2730      left = TREE_OPERAND (arg0, 0);
2731      right = TREE_OPERAND (arg1, 0);
2732    }
2733  else
2734    return 0;
2735
2736  return fold (build (TREE_CODE (arg0), type, common,
2737		      fold (build (code, type, left, right))));
2738}
2739
2740/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2741   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
2742
2743static tree
2744make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2745		    int unsignedp)
2746{
2747  tree result = build (BIT_FIELD_REF, type, inner,
2748		       size_int (bitsize), bitsize_int (bitpos));
2749
2750  TREE_UNSIGNED (result) = unsignedp;
2751
2752  return result;
2753}
2754
2755/* Optimize a bit-field compare.
2756
2757   There are two cases:  First is a compare against a constant and the
2758   second is a comparison of two items where the fields are at the same
2759   bit position relative to the start of a chunk (byte, halfword, word)
2760   large enough to contain it.  In these cases we can avoid the shift
2761   implicit in bitfield extractions.
2762
2763   For constants, we emit a compare of the shifted constant with the
2764   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2765   compared.  For two fields at the same position, we do the ANDs with the
2766   similar mask and compare the result of the ANDs.
2767
2768   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2769   COMPARE_TYPE is the type of the comparison, and LHS and RHS
2770   are the left and right operands of the comparison, respectively.
2771
2772   If the optimization described above can be done, we return the resulting
2773   tree.  Otherwise we return zero.  */
2774
2775static tree
2776optimize_bit_field_compare (enum tree_code code, tree compare_type,
2777			    tree lhs, tree rhs)
2778{
2779  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2780  tree type = TREE_TYPE (lhs);
2781  tree signed_type, unsigned_type;
2782  int const_p = TREE_CODE (rhs) == INTEGER_CST;
2783  enum machine_mode lmode, rmode, nmode;
2784  int lunsignedp, runsignedp;
2785  int lvolatilep = 0, rvolatilep = 0;
2786  tree linner, rinner = NULL_TREE;
2787  tree mask;
2788  tree offset;
2789
2790  /* Get all the information about the extractions being done.  If the bit size
2791     if the same as the size of the underlying object, we aren't doing an
2792     extraction at all and so can do nothing.  We also don't want to
2793     do anything if the inner expression is a PLACEHOLDER_EXPR since we
2794     then will no longer be able to replace it.  */
2795  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2796				&lunsignedp, &lvolatilep);
2797  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2798      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2799    return 0;
2800
2801 if (!const_p)
2802   {
2803     /* If this is not a constant, we can only do something if bit positions,
2804	sizes, and signedness are the same.  */
2805     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2806				   &runsignedp, &rvolatilep);
2807
2808     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2809	 || lunsignedp != runsignedp || offset != 0
2810	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2811       return 0;
2812   }
2813
2814  /* See if we can find a mode to refer to this field.  We should be able to,
2815     but fail if we can't.  */
2816  nmode = get_best_mode (lbitsize, lbitpos,
2817			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2818			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2819				TYPE_ALIGN (TREE_TYPE (rinner))),
2820			 word_mode, lvolatilep || rvolatilep);
2821  if (nmode == VOIDmode)
2822    return 0;
2823
2824  /* Set signed and unsigned types of the precision of this mode for the
2825     shifts below.  */
2826  signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2827  unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2828
2829  /* Compute the bit position and size for the new reference and our offset
2830     within it. If the new reference is the same size as the original, we
2831     won't optimize anything, so return zero.  */
2832  nbitsize = GET_MODE_BITSIZE (nmode);
2833  nbitpos = lbitpos & ~ (nbitsize - 1);
2834  lbitpos -= nbitpos;
2835  if (nbitsize == lbitsize)
2836    return 0;
2837
2838  if (BYTES_BIG_ENDIAN)
2839    lbitpos = nbitsize - lbitsize - lbitpos;
2840
2841  /* Make the mask to be used against the extracted field.  */
2842  mask = build_int_2 (~0, ~0);
2843  TREE_TYPE (mask) = unsigned_type;
2844  force_fit_type (mask, 0);
2845  mask = fold_convert (unsigned_type, mask);
2846  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2847  mask = const_binop (RSHIFT_EXPR, mask,
2848		      size_int (nbitsize - lbitsize - lbitpos), 0);
2849
2850  if (! const_p)
2851    /* If not comparing with constant, just rework the comparison
2852       and return.  */
2853    return build (code, compare_type,
2854		  build (BIT_AND_EXPR, unsigned_type,
2855			 make_bit_field_ref (linner, unsigned_type,
2856					     nbitsize, nbitpos, 1),
2857			 mask),
2858		  build (BIT_AND_EXPR, unsigned_type,
2859			 make_bit_field_ref (rinner, unsigned_type,
2860					     nbitsize, nbitpos, 1),
2861			 mask));
2862
2863  /* Otherwise, we are handling the constant case. See if the constant is too
2864     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
2865     this not only for its own sake, but to avoid having to test for this
2866     error case below.  If we didn't, we might generate wrong code.
2867
2868     For unsigned fields, the constant shifted right by the field length should
2869     be all zero.  For signed fields, the high-order bits should agree with
2870     the sign bit.  */
2871
2872  if (lunsignedp)
2873    {
2874      if (! integer_zerop (const_binop (RSHIFT_EXPR,
2875					fold_convert (unsigned_type, rhs),
2876					size_int (lbitsize), 0)))
2877	{
2878	  warning ("comparison is always %d due to width of bit-field",
2879		   code == NE_EXPR);
2880	  return fold_convert (compare_type,
2881			       (code == NE_EXPR
2882				? integer_one_node : integer_zero_node));
2883	}
2884    }
2885  else
2886    {
2887      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2888			      size_int (lbitsize - 1), 0);
2889      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2890	{
2891	  warning ("comparison is always %d due to width of bit-field",
2892		   code == NE_EXPR);
2893	  return fold_convert (compare_type,
2894			       (code == NE_EXPR
2895				? integer_one_node : integer_zero_node));
2896	}
2897    }
2898
2899  /* Single-bit compares should always be against zero.  */
2900  if (lbitsize == 1 && ! integer_zerop (rhs))
2901    {
2902      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2903      rhs = fold_convert (type, integer_zero_node);
2904    }
2905
2906  /* Make a new bitfield reference, shift the constant over the
2907     appropriate number of bits and mask it with the computed mask
2908     (in case this was a signed field).  If we changed it, make a new one.  */
2909  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2910  if (lvolatilep)
2911    {
2912      TREE_SIDE_EFFECTS (lhs) = 1;
2913      TREE_THIS_VOLATILE (lhs) = 1;
2914    }
2915
2916  rhs = fold (const_binop (BIT_AND_EXPR,
2917			   const_binop (LSHIFT_EXPR,
2918					fold_convert (unsigned_type, rhs),
2919					size_int (lbitpos), 0),
2920			   mask, 0));
2921
2922  return build (code, compare_type,
2923		build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2924		rhs);
2925}
2926
2927/* Subroutine for fold_truthop: decode a field reference.
2928
2929   If EXP is a comparison reference, we return the innermost reference.
2930
2931   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2932   set to the starting bit number.
2933
2934   If the innermost field can be completely contained in a mode-sized
2935   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
2936
2937   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2938   otherwise it is not changed.
2939
2940   *PUNSIGNEDP is set to the signedness of the field.
2941
2942   *PMASK is set to the mask used.  This is either contained in a
2943   BIT_AND_EXPR or derived from the width of the field.
2944
2945   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2946
2947   Return 0 if this is not a component reference or is one that we can't
2948   do anything with.  */
2949
2950static tree
2951decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2952			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2953			int *punsignedp, int *pvolatilep,
2954			tree *pmask, tree *pand_mask)
2955{
2956  tree outer_type = 0;
2957  tree and_mask = 0;
2958  tree mask, inner, offset;
2959  tree unsigned_type;
2960  unsigned int precision;
2961
2962  /* All the optimizations using this function assume integer fields.
2963     There are problems with FP fields since the type_for_size call
2964     below can fail for, e.g., XFmode.  */
2965  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2966    return 0;
2967
2968  /* We are interested in the bare arrangement of bits, so strip everything
2969     that doesn't affect the machine mode.  However, record the type of the
2970     outermost expression if it may matter below.  */
2971  if (TREE_CODE (exp) == NOP_EXPR
2972      || TREE_CODE (exp) == CONVERT_EXPR
2973      || TREE_CODE (exp) == NON_LVALUE_EXPR)
2974    outer_type = TREE_TYPE (exp);
2975  STRIP_NOPS (exp);
2976
2977  if (TREE_CODE (exp) == BIT_AND_EXPR)
2978    {
2979      and_mask = TREE_OPERAND (exp, 1);
2980      exp = TREE_OPERAND (exp, 0);
2981      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2982      if (TREE_CODE (and_mask) != INTEGER_CST)
2983	return 0;
2984    }
2985
2986  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2987			       punsignedp, pvolatilep);
2988  if ((inner == exp && and_mask == 0)
2989      || *pbitsize < 0 || offset != 0
2990      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2991    return 0;
2992
2993  /* If the number of bits in the reference is the same as the bitsize of
2994     the outer type, then the outer type gives the signedness. Otherwise
2995     (in case of a small bitfield) the signedness is unchanged.  */
2996  if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2997    *punsignedp = TREE_UNSIGNED (outer_type);
2998
2999  /* Compute the mask to access the bitfield.  */
3000  unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3001  precision = TYPE_PRECISION (unsigned_type);
3002
3003  mask = build_int_2 (~0, ~0);
3004  TREE_TYPE (mask) = unsigned_type;
3005  force_fit_type (mask, 0);
3006  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3007  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3008
3009  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3010  if (and_mask != 0)
3011    mask = fold (build (BIT_AND_EXPR, unsigned_type,
3012			fold_convert (unsigned_type, and_mask), mask));
3013
3014  *pmask = mask;
3015  *pand_mask = and_mask;
3016  return inner;
3017}
3018
3019/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3020   bit positions.  */
3021
3022static int
3023all_ones_mask_p (tree mask, int size)
3024{
3025  tree type = TREE_TYPE (mask);
3026  unsigned int precision = TYPE_PRECISION (type);
3027  tree tmask;
3028
3029  tmask = build_int_2 (~0, ~0);
3030  TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3031  force_fit_type (tmask, 0);
3032  return
3033    tree_int_cst_equal (mask,
3034			const_binop (RSHIFT_EXPR,
3035				     const_binop (LSHIFT_EXPR, tmask,
3036						  size_int (precision - size),
3037						  0),
3038				     size_int (precision - size), 0));
3039}
3040
3041/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3042   represents the sign bit of EXP's type.  If EXP represents a sign
3043   or zero extension, also test VAL against the unextended type.
3044   The return value is the (sub)expression whose sign bit is VAL,
3045   or NULL_TREE otherwise.  */
3046
3047static tree
3048sign_bit_p (tree exp, tree val)
3049{
3050  unsigned HOST_WIDE_INT mask_lo, lo;
3051  HOST_WIDE_INT mask_hi, hi;
3052  int width;
3053  tree t;
3054
3055  /* Tree EXP must have an integral type.  */
3056  t = TREE_TYPE (exp);
3057  if (! INTEGRAL_TYPE_P (t))
3058    return NULL_TREE;
3059
3060  /* Tree VAL must be an integer constant.  */
3061  if (TREE_CODE (val) != INTEGER_CST
3062      || TREE_CONSTANT_OVERFLOW (val))
3063    return NULL_TREE;
3064
3065  width = TYPE_PRECISION (t);
3066  if (width > HOST_BITS_PER_WIDE_INT)
3067    {
3068      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3069      lo = 0;
3070
3071      mask_hi = ((unsigned HOST_WIDE_INT) -1
3072		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3073      mask_lo = -1;
3074    }
3075  else
3076    {
3077      hi = 0;
3078      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3079
3080      mask_hi = 0;
3081      mask_lo = ((unsigned HOST_WIDE_INT) -1
3082		 >> (HOST_BITS_PER_WIDE_INT - width));
3083    }
3084
3085  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3086     treat VAL as if it were unsigned.  */
3087  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3088      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3089    return exp;
3090
3091  /* Handle extension from a narrower type.  */
3092  if (TREE_CODE (exp) == NOP_EXPR
3093      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3094    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3095
3096  return NULL_TREE;
3097}
3098
3099/* Subroutine for fold_truthop: determine if an operand is simple enough
3100   to be evaluated unconditionally.  */
3101
3102static int
3103simple_operand_p (tree exp)
3104{
3105  /* Strip any conversions that don't change the machine mode.  */
3106  while ((TREE_CODE (exp) == NOP_EXPR
3107	  || TREE_CODE (exp) == CONVERT_EXPR)
3108	 && (TYPE_MODE (TREE_TYPE (exp))
3109	     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3110    exp = TREE_OPERAND (exp, 0);
3111
3112  return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3113	  || (DECL_P (exp)
3114	      && ! TREE_ADDRESSABLE (exp)
3115	      && ! TREE_THIS_VOLATILE (exp)
3116	      && ! DECL_NONLOCAL (exp)
3117	      /* Don't regard global variables as simple.  They may be
3118		 allocated in ways unknown to the compiler (shared memory,
3119		 #pragma weak, etc).  */
3120	      && ! TREE_PUBLIC (exp)
3121	      && ! DECL_EXTERNAL (exp)
3122	      /* Loading a static variable is unduly expensive, but global
3123		 registers aren't expensive.  */
3124	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3125}
3126
3127/* The following functions are subroutines to fold_range_test and allow it to
3128   try to change a logical combination of comparisons into a range test.
3129
3130   For example, both
3131	X == 2 || X == 3 || X == 4 || X == 5
3132   and
3133	X >= 2 && X <= 5
3134   are converted to
3135	(unsigned) (X - 2) <= 3
3136
3137   We describe each set of comparisons as being either inside or outside
3138   a range, using a variable named like IN_P, and then describe the
3139   range with a lower and upper bound.  If one of the bounds is omitted,
3140   it represents either the highest or lowest value of the type.
3141
3142   In the comments below, we represent a range by two numbers in brackets
3143   preceded by a "+" to designate being inside that range, or a "-" to
3144   designate being outside that range, so the condition can be inverted by
3145   flipping the prefix.  An omitted bound is represented by a "-".  For
3146   example, "- [-, 10]" means being outside the range starting at the lowest
3147   possible value and ending at 10, in other words, being greater than 10.
3148   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3149   always false.
3150
3151   We set up things so that the missing bounds are handled in a consistent
3152   manner so neither a missing bound nor "true" and "false" need to be
3153   handled using a special case.  */
3154
3155/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3156   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3157   and UPPER1_P are nonzero if the respective argument is an upper bound
3158   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3159   must be specified for a comparison.  ARG1 will be converted to ARG0's
3160   type if both are specified.  */
3161
3162static tree
3163range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3164	     tree arg1, int upper1_p)
3165{
3166  tree tem;
3167  int result;
3168  int sgn0, sgn1;
3169
3170  /* If neither arg represents infinity, do the normal operation.
3171     Else, if not a comparison, return infinity.  Else handle the special
3172     comparison rules. Note that most of the cases below won't occur, but
3173     are handled for consistency.  */
3174
3175  if (arg0 != 0 && arg1 != 0)
3176    {
3177      tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3178			 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3179      STRIP_NOPS (tem);
3180      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3181    }
3182
3183  if (TREE_CODE_CLASS (code) != '<')
3184    return 0;
3185
3186  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3187     for neither.  In real maths, we cannot assume open ended ranges are
3188     the same. But, this is computer arithmetic, where numbers are finite.
3189     We can therefore make the transformation of any unbounded range with
3190     the value Z, Z being greater than any representable number. This permits
3191     us to treat unbounded ranges as equal.  */
3192  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3193  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3194  switch (code)
3195    {
3196    case EQ_EXPR:
3197      result = sgn0 == sgn1;
3198      break;
3199    case NE_EXPR:
3200      result = sgn0 != sgn1;
3201      break;
3202    case LT_EXPR:
3203      result = sgn0 < sgn1;
3204      break;
3205    case LE_EXPR:
3206      result = sgn0 <= sgn1;
3207      break;
3208    case GT_EXPR:
3209      result = sgn0 > sgn1;
3210      break;
3211    case GE_EXPR:
3212      result = sgn0 >= sgn1;
3213      break;
3214    default:
3215      abort ();
3216    }
3217
3218  return fold_convert (type, result ? integer_one_node : integer_zero_node);
3219}
3220
3221/* Given EXP, a logical expression, set the range it is testing into
3222   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3223   actually being tested.  *PLOW and *PHIGH will be made of the same type
3224   as the returned expression.  If EXP is not a comparison, we will most
3225   likely not be returning a useful value and range.  */
3226
3227static tree
3228make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3229{
3230  enum tree_code code;
3231  tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3232  tree orig_type = NULL_TREE;
3233  int in_p, n_in_p;
3234  tree low, high, n_low, n_high;
3235
3236  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3237     and see if we can refine the range.  Some of the cases below may not
3238     happen, but it doesn't seem worth worrying about this.  We "continue"
3239     the outer loop when we've changed something; otherwise we "break"
3240     the switch, which will "break" the while.  */
3241
3242  in_p = 0;
3243  low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3244
3245  while (1)
3246    {
3247      code = TREE_CODE (exp);
3248
3249      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3250	{
3251	  if (first_rtl_op (code) > 0)
3252	    arg0 = TREE_OPERAND (exp, 0);
3253	  if (TREE_CODE_CLASS (code) == '<'
3254	      || TREE_CODE_CLASS (code) == '1'
3255	      || TREE_CODE_CLASS (code) == '2')
3256	    type = TREE_TYPE (arg0);
3257	  if (TREE_CODE_CLASS (code) == '2'
3258	      || TREE_CODE_CLASS (code) == '<'
3259	      || (TREE_CODE_CLASS (code) == 'e'
3260		  && TREE_CODE_LENGTH (code) > 1))
3261	    arg1 = TREE_OPERAND (exp, 1);
3262	}
3263
3264      /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3265	 lose a cast by accident.  */
3266      if (type != NULL_TREE && orig_type == NULL_TREE)
3267	orig_type = type;
3268
3269      switch (code)
3270	{
3271	case TRUTH_NOT_EXPR:
3272	  in_p = ! in_p, exp = arg0;
3273	  continue;
3274
3275	case EQ_EXPR: case NE_EXPR:
3276	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3277	  /* We can only do something if the range is testing for zero
3278	     and if the second operand is an integer constant.  Note that
3279	     saying something is "in" the range we make is done by
3280	     complementing IN_P since it will set in the initial case of
3281	     being not equal to zero; "out" is leaving it alone.  */
3282	  if (low == 0 || high == 0
3283	      || ! integer_zerop (low) || ! integer_zerop (high)
3284	      || TREE_CODE (arg1) != INTEGER_CST)
3285	    break;
3286
3287	  switch (code)
3288	    {
3289	    case NE_EXPR:  /* - [c, c]  */
3290	      low = high = arg1;
3291	      break;
3292	    case EQ_EXPR:  /* + [c, c]  */
3293	      in_p = ! in_p, low = high = arg1;
3294	      break;
3295	    case GT_EXPR:  /* - [-, c] */
3296	      low = 0, high = arg1;
3297	      break;
3298	    case GE_EXPR:  /* + [c, -] */
3299	      in_p = ! in_p, low = arg1, high = 0;
3300	      break;
3301	    case LT_EXPR:  /* - [c, -] */
3302	      low = arg1, high = 0;
3303	      break;
3304	    case LE_EXPR:  /* + [-, c] */
3305	      in_p = ! in_p, low = 0, high = arg1;
3306	      break;
3307	    default:
3308	      abort ();
3309	    }
3310
3311	  exp = arg0;
3312
3313	  /* If this is an unsigned comparison, we also know that EXP is
3314	     greater than or equal to zero.  We base the range tests we make
3315	     on that fact, so we record it here so we can parse existing
3316	     range tests.  */
3317	  if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3318	    {
3319	      if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3320				  1, fold_convert (type, integer_zero_node),
3321				  NULL_TREE))
3322		break;
3323
3324	      in_p = n_in_p, low = n_low, high = n_high;
3325
3326	      /* If the high bound is missing, but we have a nonzero low
3327		 bound, reverse the range so it goes from zero to the low bound
3328		 minus 1.  */
3329	      if (high == 0 && low && ! integer_zerop (low))
3330		{
3331		  in_p = ! in_p;
3332		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3333				      integer_one_node, 0);
3334		  low = fold_convert (type, integer_zero_node);
3335		}
3336	    }
3337	  continue;
3338
3339	case NEGATE_EXPR:
3340	  /* (-x) IN [a,b] -> x in [-b, -a]  */
3341	  n_low = range_binop (MINUS_EXPR, type,
3342			       fold_convert (type, integer_zero_node),
3343			       0, high, 1);
3344	  n_high = range_binop (MINUS_EXPR, type,
3345				fold_convert (type, integer_zero_node),
3346				0, low, 0);
3347	  low = n_low, high = n_high;
3348	  exp = arg0;
3349	  continue;
3350
3351	case BIT_NOT_EXPR:
3352	  /* ~ X -> -X - 1  */
3353	  exp = build (MINUS_EXPR, type, negate_expr (arg0),
3354		       fold_convert (type, integer_one_node));
3355	  continue;
3356
3357	case PLUS_EXPR:  case MINUS_EXPR:
3358	  if (TREE_CODE (arg1) != INTEGER_CST)
3359	    break;
3360
3361	  /* If EXP is signed, any overflow in the computation is undefined,
3362	     so we don't worry about it so long as our computations on
3363	     the bounds don't overflow.  For unsigned, overflow is defined
3364	     and this is exactly the right thing.  */
3365	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3366			       type, low, 0, arg1, 0);
3367	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3368				type, high, 1, arg1, 0);
3369	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
3370	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
3371	    break;
3372
3373	  /* Check for an unsigned range which has wrapped around the maximum
3374	     value thus making n_high < n_low, and normalize it.  */
3375	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3376	    {
3377	      low = range_binop (PLUS_EXPR, type, n_high, 0,
3378				 integer_one_node, 0);
3379	      high = range_binop (MINUS_EXPR, type, n_low, 0,
3380				  integer_one_node, 0);
3381
3382	      /* If the range is of the form +/- [ x+1, x ], we won't
3383		 be able to normalize it.  But then, it represents the
3384		 whole range or the empty set, so make it
3385		 +/- [ -, - ].  */
3386	      if (tree_int_cst_equal (n_low, low)
3387		  && tree_int_cst_equal (n_high, high))
3388		low = high = 0;
3389	      else
3390		in_p = ! in_p;
3391	    }
3392	  else
3393	    low = n_low, high = n_high;
3394
3395	  exp = arg0;
3396	  continue;
3397
3398	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3399	  if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3400	    break;
3401
3402	  if (! INTEGRAL_TYPE_P (type)
3403	      || (low != 0 && ! int_fits_type_p (low, type))
3404	      || (high != 0 && ! int_fits_type_p (high, type)))
3405	    break;
3406
3407	  n_low = low, n_high = high;
3408
3409	  if (n_low != 0)
3410	    n_low = fold_convert (type, n_low);
3411
3412	  if (n_high != 0)
3413	    n_high = fold_convert (type, n_high);
3414
3415	  /* If we're converting from an unsigned to a signed type,
3416	     we will be doing the comparison as unsigned.  The tests above
3417	     have already verified that LOW and HIGH are both positive.
3418
3419	     So we have to make sure that the original unsigned value will
3420	     be interpreted as positive.  */
3421	  if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3422	    {
3423	      tree equiv_type = (*lang_hooks.types.type_for_mode)
3424		(TYPE_MODE (type), 1);
3425	      tree high_positive;
3426
3427	      /* A range without an upper bound is, naturally, unbounded.
3428		 Since convert would have cropped a very large value, use
3429		 the max value for the destination type.  */
3430	      high_positive
3431		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3432		  : TYPE_MAX_VALUE (type);
3433
3434	      if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3435	        high_positive = fold (build (RSHIFT_EXPR, type,
3436					     fold_convert (type,
3437							   high_positive),
3438					     fold_convert (type,
3439							   integer_one_node)));
3440
3441	      /* If the low bound is specified, "and" the range with the
3442		 range for which the original unsigned value will be
3443		 positive.  */
3444	      if (low != 0)
3445		{
3446		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3447				      1, n_low, n_high, 1,
3448				      fold_convert (type, integer_zero_node),
3449				      high_positive))
3450		    break;
3451
3452		  in_p = (n_in_p == in_p);
3453		}
3454	      else
3455		{
3456		  /* Otherwise, "or" the range with the range of the input
3457		     that will be interpreted as negative.  */
3458		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3459				      0, n_low, n_high, 1,
3460				      fold_convert (type, integer_zero_node),
3461				      high_positive))
3462		    break;
3463
3464		  in_p = (in_p != n_in_p);
3465		}
3466	    }
3467
3468	  exp = arg0;
3469	  low = n_low, high = n_high;
3470	  continue;
3471
3472	default:
3473	  break;
3474	}
3475
3476      break;
3477    }
3478
3479  /* If EXP is a constant, we can evaluate whether this is true or false.  */
3480  if (TREE_CODE (exp) == INTEGER_CST)
3481    {
3482      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3483						 exp, 0, low, 0))
3484		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3485						    exp, 1, high, 1)));
3486      low = high = 0;
3487      exp = 0;
3488    }
3489
3490  *pin_p = in_p, *plow = low, *phigh = high;
3491  return exp;
3492}
3493
3494/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3495   type, TYPE, return an expression to test if EXP is in (or out of, depending
3496   on IN_P) the range.  */
3497
3498static tree
3499build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3500{
3501  tree etype = TREE_TYPE (exp);
3502  tree value;
3503
3504  if (! in_p
3505      && (0 != (value = build_range_check (type, exp, 1, low, high))))
3506    return invert_truthvalue (value);
3507
3508  if (low == 0 && high == 0)
3509    return fold_convert (type, integer_one_node);
3510
3511  if (low == 0)
3512    return fold (build (LE_EXPR, type, exp, high));
3513
3514  if (high == 0)
3515    return fold (build (GE_EXPR, type, exp, low));
3516
3517  if (operand_equal_p (low, high, 0))
3518    return fold (build (EQ_EXPR, type, exp, low));
3519
3520  if (integer_zerop (low))
3521    {
3522      if (! TREE_UNSIGNED (etype))
3523	{
3524	  etype = (*lang_hooks.types.unsigned_type) (etype);
3525	  high = fold_convert (etype, high);
3526	  exp = fold_convert (etype, exp);
3527	}
3528      return build_range_check (type, exp, 1, 0, high);
3529    }
3530
3531  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
3532  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3533    {
3534      unsigned HOST_WIDE_INT lo;
3535      HOST_WIDE_INT hi;
3536      int prec;
3537
3538      /* For enums the comparison will be done in the underlying type,
3539	 so using enum's precision is wrong here.
3540	 Consider e.g. enum { A, B, C, D, E }, low == B and high == D.  */
3541      if (TREE_CODE (etype) == ENUMERAL_TYPE)
3542	prec = GET_MODE_BITSIZE (TYPE_MODE (etype));
3543      else
3544	prec = TYPE_PRECISION (etype);
3545      if (prec <= HOST_BITS_PER_WIDE_INT)
3546	{
3547	  hi = 0;
3548	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3549	}
3550      else
3551	{
3552	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3553	  lo = (unsigned HOST_WIDE_INT) -1;
3554	}
3555
3556      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3557	{
3558	  if (TREE_UNSIGNED (etype))
3559	    {
3560	      etype = (*lang_hooks.types.signed_type) (etype);
3561	      exp = fold_convert (etype, exp);
3562	    }
3563	  return fold (build (GT_EXPR, type, exp,
3564			      fold_convert (etype, integer_zero_node)));
3565	}
3566    }
3567
3568  if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3569      && ! TREE_OVERFLOW (value))
3570    return build_range_check (type,
3571			      fold (build (MINUS_EXPR, etype, exp, low)),
3572			      1, fold_convert (etype, integer_zero_node),
3573			      value);
3574
3575  return 0;
3576}
3577
3578/* Given two ranges, see if we can merge them into one.  Return 1 if we
3579   can, 0 if we can't.  Set the output range into the specified parameters.  */
3580
3581static int
3582merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3583	      tree high0, int in1_p, tree low1, tree high1)
3584{
3585  int no_overlap;
3586  int subset;
3587  int temp;
3588  tree tem;
3589  int in_p;
3590  tree low, high;
3591  int lowequal = ((low0 == 0 && low1 == 0)
3592		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3593						low0, 0, low1, 0)));
3594  int highequal = ((high0 == 0 && high1 == 0)
3595		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3596						 high0, 1, high1, 1)));
3597
3598  /* Make range 0 be the range that starts first, or ends last if they
3599     start at the same value.  Swap them if it isn't.  */
3600  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3601				 low0, 0, low1, 0))
3602      || (lowequal
3603	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
3604					high1, 1, high0, 1))))
3605    {
3606      temp = in0_p, in0_p = in1_p, in1_p = temp;
3607      tem = low0, low0 = low1, low1 = tem;
3608      tem = high0, high0 = high1, high1 = tem;
3609    }
3610
3611  /* Now flag two cases, whether the ranges are disjoint or whether the
3612     second range is totally subsumed in the first.  Note that the tests
3613     below are simplified by the ones above.  */
3614  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3615					  high0, 1, low1, 0));
3616  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3617				      high1, 1, high0, 1));
3618
3619  /* We now have four cases, depending on whether we are including or
3620     excluding the two ranges.  */
3621  if (in0_p && in1_p)
3622    {
3623      /* If they don't overlap, the result is false.  If the second range
3624	 is a subset it is the result.  Otherwise, the range is from the start
3625	 of the second to the end of the first.  */
3626      if (no_overlap)
3627	in_p = 0, low = high = 0;
3628      else if (subset)
3629	in_p = 1, low = low1, high = high1;
3630      else
3631	in_p = 1, low = low1, high = high0;
3632    }
3633
3634  else if (in0_p && ! in1_p)
3635    {
3636      /* If they don't overlap, the result is the first range.  If they are
3637	 equal, the result is false.  If the second range is a subset of the
3638	 first, and the ranges begin at the same place, we go from just after
3639	 the end of the first range to the end of the second.  If the second
3640	 range is not a subset of the first, or if it is a subset and both
3641	 ranges end at the same place, the range starts at the start of the
3642	 first range and ends just before the second range.
3643	 Otherwise, we can't describe this as a single range.  */
3644      if (no_overlap)
3645	in_p = 1, low = low0, high = high0;
3646      else if (lowequal && highequal)
3647	in_p = 0, low = high = 0;
3648      else if (subset && lowequal)
3649	{
3650	  in_p = 1, high = high0;
3651	  low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3652			     integer_one_node, 0);
3653	}
3654      else if (! subset || highequal)
3655	{
3656	  in_p = 1, low = low0;
3657	  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3658			      integer_one_node, 0);
3659	}
3660      else
3661	return 0;
3662    }
3663
3664  else if (! in0_p && in1_p)
3665    {
3666      /* If they don't overlap, the result is the second range.  If the second
3667	 is a subset of the first, the result is false.  Otherwise,
3668	 the range starts just after the first range and ends at the
3669	 end of the second.  */
3670      if (no_overlap)
3671	in_p = 1, low = low1, high = high1;
3672      else if (subset || highequal)
3673	in_p = 0, low = high = 0;
3674      else
3675	{
3676	  in_p = 1, high = high1;
3677	  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3678			     integer_one_node, 0);
3679	}
3680    }
3681
3682  else
3683    {
3684      /* The case where we are excluding both ranges.  Here the complex case
3685	 is if they don't overlap.  In that case, the only time we have a
3686	 range is if they are adjacent.  If the second is a subset of the
3687	 first, the result is the first.  Otherwise, the range to exclude
3688	 starts at the beginning of the first range and ends at the end of the
3689	 second.  */
3690      if (no_overlap)
3691	{
3692	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3693					 range_binop (PLUS_EXPR, NULL_TREE,
3694						      high0, 1,
3695						      integer_one_node, 1),
3696					 1, low1, 0)))
3697	    in_p = 0, low = low0, high = high1;
3698	  else
3699	    return 0;
3700	}
3701      else if (subset)
3702	in_p = 0, low = low0, high = high0;
3703      else
3704	in_p = 0, low = low0, high = high1;
3705    }
3706
3707  *pin_p = in_p, *plow = low, *phigh = high;
3708  return 1;
3709}
3710
3711#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3712#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3713#endif
3714
3715/* EXP is some logical combination of boolean tests.  See if we can
3716   merge it into some range test.  Return the new tree if so.  */
3717
3718static tree
3719fold_range_test (tree exp)
3720{
3721  int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3722	       || TREE_CODE (exp) == TRUTH_OR_EXPR);
3723  int in0_p, in1_p, in_p;
3724  tree low0, low1, low, high0, high1, high;
3725  tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3726  tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3727  tree tem;
3728
3729  /* If this is an OR operation, invert both sides; we will invert
3730     again at the end.  */
3731  if (or_op)
3732    in0_p = ! in0_p, in1_p = ! in1_p;
3733
3734  /* If both expressions are the same, if we can merge the ranges, and we
3735     can build the range test, return it or it inverted.  If one of the
3736     ranges is always true or always false, consider it to be the same
3737     expression as the other.  */
3738  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3739      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3740		       in1_p, low1, high1)
3741      && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3742					 lhs != 0 ? lhs
3743					 : rhs != 0 ? rhs : integer_zero_node,
3744					 in_p, low, high))))
3745    return or_op ? invert_truthvalue (tem) : tem;
3746
3747  /* On machines where the branch cost is expensive, if this is a
3748     short-circuited branch and the underlying object on both sides
3749     is the same, make a non-short-circuit operation.  */
3750  else if (RANGE_TEST_NON_SHORT_CIRCUIT
3751	   && lhs != 0 && rhs != 0
3752	   && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3753	       || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3754	   && operand_equal_p (lhs, rhs, 0))
3755    {
3756      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
3757	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3758	 which cases we can't do this.  */
3759      if (simple_operand_p (lhs))
3760	return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3761		      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3762		      TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3763		      TREE_OPERAND (exp, 1));
3764
3765      else if ((*lang_hooks.decls.global_bindings_p) () == 0
3766	       && ! CONTAINS_PLACEHOLDER_P (lhs))
3767	{
3768	  tree common = save_expr (lhs);
3769
3770	  if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3771					     or_op ? ! in0_p : in0_p,
3772					     low0, high0))
3773	      && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3774						 or_op ? ! in1_p : in1_p,
3775						 low1, high1))))
3776	    return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3777			  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3778			  TREE_TYPE (exp), lhs, rhs);
3779	}
3780    }
3781
3782  return 0;
3783}
3784
3785/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3786   bit value.  Arrange things so the extra bits will be set to zero if and
3787   only if C is signed-extended to its full width.  If MASK is nonzero,
3788   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
3789
3790static tree
3791unextend (tree c, int p, int unsignedp, tree mask)
3792{
3793  tree type = TREE_TYPE (c);
3794  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3795  tree temp;
3796
3797  if (p == modesize || unsignedp)
3798    return c;
3799
3800  /* We work by getting just the sign bit into the low-order bit, then
3801     into the high-order bit, then sign-extend.  We then XOR that value
3802     with C.  */
3803  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3804  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3805
3806  /* We must use a signed type in order to get an arithmetic right shift.
3807     However, we must also avoid introducing accidental overflows, so that
3808     a subsequent call to integer_zerop will work.  Hence we must
3809     do the type conversion here.  At this point, the constant is either
3810     zero or one, and the conversion to a signed type can never overflow.
3811     We could get an overflow if this conversion is done anywhere else.  */
3812  if (TREE_UNSIGNED (type))
3813    temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3814
3815  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3816  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3817  if (mask != 0)
3818    temp = const_binop (BIT_AND_EXPR, temp,
3819			fold_convert (TREE_TYPE (c), mask), 0);
3820  /* If necessary, convert the type back to match the type of C.  */
3821  if (TREE_UNSIGNED (type))
3822    temp = fold_convert (type, temp);
3823
3824  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3825}
3826
3827/* Find ways of folding logical expressions of LHS and RHS:
3828   Try to merge two comparisons to the same innermost item.
3829   Look for range tests like "ch >= '0' && ch <= '9'".
3830   Look for combinations of simple terms on machines with expensive branches
3831   and evaluate the RHS unconditionally.
3832
3833   For example, if we have p->a == 2 && p->b == 4 and we can make an
3834   object large enough to span both A and B, we can do this with a comparison
3835   against the object ANDed with the a mask.
3836
3837   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3838   operations to do this with one comparison.
3839
3840   We check for both normal comparisons and the BIT_AND_EXPRs made this by
3841   function and the one above.
3842
3843   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
3844   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3845
3846   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3847   two operands.
3848
3849   We return the simplified tree or 0 if no optimization is possible.  */
3850
3851static tree
3852fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3853{
3854  /* If this is the "or" of two comparisons, we can do something if
3855     the comparisons are NE_EXPR.  If this is the "and", we can do something
3856     if the comparisons are EQ_EXPR.  I.e.,
3857	(a->b == 2 && a->c == 4) can become (a->new == NEW).
3858
3859     WANTED_CODE is this operation code.  For single bit fields, we can
3860     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3861     comparison for one-bit fields.  */
3862
3863  enum tree_code wanted_code;
3864  enum tree_code lcode, rcode;
3865  tree ll_arg, lr_arg, rl_arg, rr_arg;
3866  tree ll_inner, lr_inner, rl_inner, rr_inner;
3867  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3868  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3869  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3870  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3871  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3872  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3873  enum machine_mode lnmode, rnmode;
3874  tree ll_mask, lr_mask, rl_mask, rr_mask;
3875  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3876  tree l_const, r_const;
3877  tree lntype, rntype, result;
3878  int first_bit, end_bit;
3879  int volatilep;
3880
3881  /* Start by getting the comparison codes.  Fail if anything is volatile.
3882     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3883     it were surrounded with a NE_EXPR.  */
3884
3885  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3886    return 0;
3887
3888  lcode = TREE_CODE (lhs);
3889  rcode = TREE_CODE (rhs);
3890
3891  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3892    lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3893
3894  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3895    rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3896
3897  if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3898    return 0;
3899
3900  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3901	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3902
3903  ll_arg = TREE_OPERAND (lhs, 0);
3904  lr_arg = TREE_OPERAND (lhs, 1);
3905  rl_arg = TREE_OPERAND (rhs, 0);
3906  rr_arg = TREE_OPERAND (rhs, 1);
3907
3908  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
3909  if (simple_operand_p (ll_arg)
3910      && simple_operand_p (lr_arg)
3911      && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3912    {
3913      int compcode;
3914
3915      if (operand_equal_p (ll_arg, rl_arg, 0)
3916          && operand_equal_p (lr_arg, rr_arg, 0))
3917        {
3918          int lcompcode, rcompcode;
3919
3920          lcompcode = comparison_to_compcode (lcode);
3921          rcompcode = comparison_to_compcode (rcode);
3922          compcode = (code == TRUTH_AND_EXPR)
3923                     ? lcompcode & rcompcode
3924                     : lcompcode | rcompcode;
3925        }
3926      else if (operand_equal_p (ll_arg, rr_arg, 0)
3927               && operand_equal_p (lr_arg, rl_arg, 0))
3928        {
3929          int lcompcode, rcompcode;
3930
3931          rcode = swap_tree_comparison (rcode);
3932          lcompcode = comparison_to_compcode (lcode);
3933          rcompcode = comparison_to_compcode (rcode);
3934          compcode = (code == TRUTH_AND_EXPR)
3935                     ? lcompcode & rcompcode
3936                     : lcompcode | rcompcode;
3937        }
3938      else
3939	compcode = -1;
3940
3941      if (compcode == COMPCODE_TRUE)
3942	return fold_convert (truth_type, integer_one_node);
3943      else if (compcode == COMPCODE_FALSE)
3944	return fold_convert (truth_type, integer_zero_node);
3945      else if (compcode != -1)
3946	return build (compcode_to_comparison (compcode),
3947		      truth_type, ll_arg, lr_arg);
3948    }
3949
3950  /* If the RHS can be evaluated unconditionally and its operands are
3951     simple, it wins to evaluate the RHS unconditionally on machines
3952     with expensive branches.  In this case, this isn't a comparison
3953     that can be merged.  Avoid doing this if the RHS is a floating-point
3954     comparison since those can trap.  */
3955
3956  if (BRANCH_COST >= 2
3957      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3958      && simple_operand_p (rl_arg)
3959      && simple_operand_p (rr_arg))
3960    {
3961      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
3962      if (code == TRUTH_OR_EXPR
3963	  && lcode == NE_EXPR && integer_zerop (lr_arg)
3964	  && rcode == NE_EXPR && integer_zerop (rr_arg)
3965	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3966	return build (NE_EXPR, truth_type,
3967		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3968			     ll_arg, rl_arg),
3969		      integer_zero_node);
3970
3971      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
3972      if (code == TRUTH_AND_EXPR
3973	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
3974	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
3975	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3976	return build (EQ_EXPR, truth_type,
3977		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3978			     ll_arg, rl_arg),
3979		      integer_zero_node);
3980
3981      return build (code, truth_type, lhs, rhs);
3982    }
3983
3984  /* See if the comparisons can be merged.  Then get all the parameters for
3985     each side.  */
3986
3987  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3988      || (rcode != EQ_EXPR && rcode != NE_EXPR))
3989    return 0;
3990
3991  volatilep = 0;
3992  ll_inner = decode_field_reference (ll_arg,
3993				     &ll_bitsize, &ll_bitpos, &ll_mode,
3994				     &ll_unsignedp, &volatilep, &ll_mask,
3995				     &ll_and_mask);
3996  lr_inner = decode_field_reference (lr_arg,
3997				     &lr_bitsize, &lr_bitpos, &lr_mode,
3998				     &lr_unsignedp, &volatilep, &lr_mask,
3999				     &lr_and_mask);
4000  rl_inner = decode_field_reference (rl_arg,
4001				     &rl_bitsize, &rl_bitpos, &rl_mode,
4002				     &rl_unsignedp, &volatilep, &rl_mask,
4003				     &rl_and_mask);
4004  rr_inner = decode_field_reference (rr_arg,
4005				     &rr_bitsize, &rr_bitpos, &rr_mode,
4006				     &rr_unsignedp, &volatilep, &rr_mask,
4007				     &rr_and_mask);
4008
4009  /* It must be true that the inner operation on the lhs of each
4010     comparison must be the same if we are to be able to do anything.
4011     Then see if we have constants.  If not, the same must be true for
4012     the rhs's.  */
4013  if (volatilep || ll_inner == 0 || rl_inner == 0
4014      || ! operand_equal_p (ll_inner, rl_inner, 0))
4015    return 0;
4016
4017  if (TREE_CODE (lr_arg) == INTEGER_CST
4018      && TREE_CODE (rr_arg) == INTEGER_CST)
4019    l_const = lr_arg, r_const = rr_arg;
4020  else if (lr_inner == 0 || rr_inner == 0
4021	   || ! operand_equal_p (lr_inner, rr_inner, 0))
4022    return 0;
4023  else
4024    l_const = r_const = 0;
4025
4026  /* If either comparison code is not correct for our logical operation,
4027     fail.  However, we can convert a one-bit comparison against zero into
4028     the opposite comparison against that bit being set in the field.  */
4029
4030  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4031  if (lcode != wanted_code)
4032    {
4033      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4034	{
4035	  /* Make the left operand unsigned, since we are only interested
4036	     in the value of one bit.  Otherwise we are doing the wrong
4037	     thing below.  */
4038	  ll_unsignedp = 1;
4039	  l_const = ll_mask;
4040	}
4041      else
4042	return 0;
4043    }
4044
4045  /* This is analogous to the code for l_const above.  */
4046  if (rcode != wanted_code)
4047    {
4048      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4049	{
4050	  rl_unsignedp = 1;
4051	  r_const = rl_mask;
4052	}
4053      else
4054	return 0;
4055    }
4056
4057  /* After this point all optimizations will generate bit-field
4058     references, which we might not want.  */
4059  if (! (*lang_hooks.can_use_bit_fields_p) ())
4060    return 0;
4061
4062  /* See if we can find a mode that contains both fields being compared on
4063     the left.  If we can't, fail.  Otherwise, update all constants and masks
4064     to be relative to a field of that size.  */
4065  first_bit = MIN (ll_bitpos, rl_bitpos);
4066  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4067  lnmode = get_best_mode (end_bit - first_bit, first_bit,
4068			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4069			  volatilep);
4070  if (lnmode == VOIDmode)
4071    return 0;
4072
4073  lnbitsize = GET_MODE_BITSIZE (lnmode);
4074  lnbitpos = first_bit & ~ (lnbitsize - 1);
4075  lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4076  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4077
4078  if (BYTES_BIG_ENDIAN)
4079    {
4080      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4081      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4082    }
4083
4084  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4085			 size_int (xll_bitpos), 0);
4086  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4087			 size_int (xrl_bitpos), 0);
4088
4089  if (l_const)
4090    {
4091      l_const = fold_convert (lntype, l_const);
4092      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4093      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4094      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4095					fold (build1 (BIT_NOT_EXPR,
4096						      lntype, ll_mask)),
4097					0)))
4098	{
4099	  warning ("comparison is always %d", wanted_code == NE_EXPR);
4100
4101	  return fold_convert (truth_type,
4102			       wanted_code == NE_EXPR
4103			       ? integer_one_node : integer_zero_node);
4104	}
4105    }
4106  if (r_const)
4107    {
4108      r_const = fold_convert (lntype, r_const);
4109      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4110      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4111      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4112					fold (build1 (BIT_NOT_EXPR,
4113						      lntype, rl_mask)),
4114					0)))
4115	{
4116	  warning ("comparison is always %d", wanted_code == NE_EXPR);
4117
4118	  return fold_convert (truth_type,
4119			       wanted_code == NE_EXPR
4120			       ? integer_one_node : integer_zero_node);
4121	}
4122    }
4123
4124  /* If the right sides are not constant, do the same for it.  Also,
4125     disallow this optimization if a size or signedness mismatch occurs
4126     between the left and right sides.  */
4127  if (l_const == 0)
4128    {
4129      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4130	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4131	  /* Make sure the two fields on the right
4132	     correspond to the left without being swapped.  */
4133	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4134	return 0;
4135
4136      first_bit = MIN (lr_bitpos, rr_bitpos);
4137      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4138      rnmode = get_best_mode (end_bit - first_bit, first_bit,
4139			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4140			      volatilep);
4141      if (rnmode == VOIDmode)
4142	return 0;
4143
4144      rnbitsize = GET_MODE_BITSIZE (rnmode);
4145      rnbitpos = first_bit & ~ (rnbitsize - 1);
4146      rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4147      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4148
4149      if (BYTES_BIG_ENDIAN)
4150	{
4151	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4152	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4153	}
4154
4155      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4156			     size_int (xlr_bitpos), 0);
4157      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4158			     size_int (xrr_bitpos), 0);
4159
4160      /* Make a mask that corresponds to both fields being compared.
4161	 Do this for both items being compared.  If the operands are the
4162	 same size and the bits being compared are in the same position
4163	 then we can do this by masking both and comparing the masked
4164	 results.  */
4165      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4166      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4167      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4168	{
4169	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4170				    ll_unsignedp || rl_unsignedp);
4171	  if (! all_ones_mask_p (ll_mask, lnbitsize))
4172	    lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4173
4174	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4175				    lr_unsignedp || rr_unsignedp);
4176	  if (! all_ones_mask_p (lr_mask, rnbitsize))
4177	    rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4178
4179	  return build (wanted_code, truth_type, lhs, rhs);
4180	}
4181
4182      /* There is still another way we can do something:  If both pairs of
4183	 fields being compared are adjacent, we may be able to make a wider
4184	 field containing them both.
4185
4186	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
4187	 the mask must be shifted to account for the shift done by
4188	 make_bit_field_ref.  */
4189      if ((ll_bitsize + ll_bitpos == rl_bitpos
4190	   && lr_bitsize + lr_bitpos == rr_bitpos)
4191	  || (ll_bitpos == rl_bitpos + rl_bitsize
4192	      && lr_bitpos == rr_bitpos + rr_bitsize))
4193	{
4194	  tree type;
4195
4196	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4197				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4198	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4199				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4200
4201	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4202				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4203	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4204				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4205
4206	  /* Convert to the smaller type before masking out unwanted bits.  */
4207	  type = lntype;
4208	  if (lntype != rntype)
4209	    {
4210	      if (lnbitsize > rnbitsize)
4211		{
4212		  lhs = fold_convert (rntype, lhs);
4213		  ll_mask = fold_convert (rntype, ll_mask);
4214		  type = rntype;
4215		}
4216	      else if (lnbitsize < rnbitsize)
4217		{
4218		  rhs = fold_convert (lntype, rhs);
4219		  lr_mask = fold_convert (lntype, lr_mask);
4220		  type = lntype;
4221		}
4222	    }
4223
4224	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4225	    lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4226
4227	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4228	    rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4229
4230	  return build (wanted_code, truth_type, lhs, rhs);
4231	}
4232
4233      return 0;
4234    }
4235
4236  /* Handle the case of comparisons with constants.  If there is something in
4237     common between the masks, those bits of the constants must be the same.
4238     If not, the condition is always false.  Test for this to avoid generating
4239     incorrect code below.  */
4240  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4241  if (! integer_zerop (result)
4242      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4243			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4244    {
4245      if (wanted_code == NE_EXPR)
4246	{
4247	  warning ("`or' of unmatched not-equal tests is always 1");
4248	  return fold_convert (truth_type, integer_one_node);
4249	}
4250      else
4251	{
4252	  warning ("`and' of mutually exclusive equal-tests is always 0");
4253	  return fold_convert (truth_type, integer_zero_node);
4254	}
4255    }
4256
4257  /* Construct the expression we will return.  First get the component
4258     reference we will make.  Unless the mask is all ones the width of
4259     that field, perform the mask operation.  Then compare with the
4260     merged constant.  */
4261  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4262			       ll_unsignedp || rl_unsignedp);
4263
4264  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4265  if (! all_ones_mask_p (ll_mask, lnbitsize))
4266    result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4267
4268  return build (wanted_code, truth_type, result,
4269		const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4270}
4271
4272/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4273   constant.  */
4274
4275static tree
4276optimize_minmax_comparison (tree t)
4277{
4278  tree type = TREE_TYPE (t);
4279  tree arg0 = TREE_OPERAND (t, 0);
4280  enum tree_code op_code;
4281  tree comp_const = TREE_OPERAND (t, 1);
4282  tree minmax_const;
4283  int consts_equal, consts_lt;
4284  tree inner;
4285
4286  STRIP_SIGN_NOPS (arg0);
4287
4288  op_code = TREE_CODE (arg0);
4289  minmax_const = TREE_OPERAND (arg0, 1);
4290  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4291  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4292  inner = TREE_OPERAND (arg0, 0);
4293
4294  /* If something does not permit us to optimize, return the original tree.  */
4295  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4296      || TREE_CODE (comp_const) != INTEGER_CST
4297      || TREE_CONSTANT_OVERFLOW (comp_const)
4298      || TREE_CODE (minmax_const) != INTEGER_CST
4299      || TREE_CONSTANT_OVERFLOW (minmax_const))
4300    return t;
4301
4302  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
4303     and GT_EXPR, doing the rest with recursive calls using logical
4304     simplifications.  */
4305  switch (TREE_CODE (t))
4306    {
4307    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
4308      return
4309	invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4310
4311    case GE_EXPR:
4312      return
4313	fold (build (TRUTH_ORIF_EXPR, type,
4314		     optimize_minmax_comparison
4315		     (build (EQ_EXPR, type, arg0, comp_const)),
4316		     optimize_minmax_comparison
4317		     (build (GT_EXPR, type, arg0, comp_const))));
4318
4319    case EQ_EXPR:
4320      if (op_code == MAX_EXPR && consts_equal)
4321	/* MAX (X, 0) == 0  ->  X <= 0  */
4322	return fold (build (LE_EXPR, type, inner, comp_const));
4323
4324      else if (op_code == MAX_EXPR && consts_lt)
4325	/* MAX (X, 0) == 5  ->  X == 5   */
4326	return fold (build (EQ_EXPR, type, inner, comp_const));
4327
4328      else if (op_code == MAX_EXPR)
4329	/* MAX (X, 0) == -1  ->  false  */
4330	return omit_one_operand (type, integer_zero_node, inner);
4331
4332      else if (consts_equal)
4333	/* MIN (X, 0) == 0  ->  X >= 0  */
4334	return fold (build (GE_EXPR, type, inner, comp_const));
4335
4336      else if (consts_lt)
4337	/* MIN (X, 0) == 5  ->  false  */
4338	return omit_one_operand (type, integer_zero_node, inner);
4339
4340      else
4341	/* MIN (X, 0) == -1  ->  X == -1  */
4342	return fold (build (EQ_EXPR, type, inner, comp_const));
4343
4344    case GT_EXPR:
4345      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4346	/* MAX (X, 0) > 0  ->  X > 0
4347	   MAX (X, 0) > 5  ->  X > 5  */
4348	return fold (build (GT_EXPR, type, inner, comp_const));
4349
4350      else if (op_code == MAX_EXPR)
4351	/* MAX (X, 0) > -1  ->  true  */
4352	return omit_one_operand (type, integer_one_node, inner);
4353
4354      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4355	/* MIN (X, 0) > 0  ->  false
4356	   MIN (X, 0) > 5  ->  false  */
4357	return omit_one_operand (type, integer_zero_node, inner);
4358
4359      else
4360	/* MIN (X, 0) > -1  ->  X > -1  */
4361	return fold (build (GT_EXPR, type, inner, comp_const));
4362
4363    default:
4364      return t;
4365    }
4366}
4367
4368/* T is an integer expression that is being multiplied, divided, or taken a
4369   modulus (CODE says which and what kind of divide or modulus) by a
4370   constant C.  See if we can eliminate that operation by folding it with
4371   other operations already in T.  WIDE_TYPE, if non-null, is a type that
4372   should be used for the computation if wider than our type.
4373
4374   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4375   (X * 2) + (Y * 4).  We must, however, be assured that either the original
4376   expression would not overflow or that overflow is undefined for the type
4377   in the language in question.
4378
4379   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4380   the machine has a multiply-accumulate insn or that this is part of an
4381   addressing calculation.
4382
4383   If we return a non-null expression, it is an equivalent form of the
4384   original computation, but need not be in the original type.  */
4385
4386static tree
4387extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4388{
4389  /* To avoid exponential search depth, refuse to allow recursion past
4390     three levels.  Beyond that (1) it's highly unlikely that we'll find
4391     something interesting and (2) we've probably processed it before
4392     when we built the inner expression.  */
4393
4394  static int depth;
4395  tree ret;
4396
4397  if (depth > 3)
4398    return NULL;
4399
4400  depth++;
4401  ret = extract_muldiv_1 (t, c, code, wide_type);
4402  depth--;
4403
4404  return ret;
4405}
4406
4407static tree
4408extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4409{
4410  tree type = TREE_TYPE (t);
4411  enum tree_code tcode = TREE_CODE (t);
4412  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4413				   > GET_MODE_SIZE (TYPE_MODE (type)))
4414		? wide_type : type);
4415  tree t1, t2;
4416  int same_p = tcode == code;
4417  tree op0 = NULL_TREE, op1 = NULL_TREE;
4418
4419  /* Don't deal with constants of zero here; they confuse the code below.  */
4420  if (integer_zerop (c))
4421    return NULL_TREE;
4422
4423  if (TREE_CODE_CLASS (tcode) == '1')
4424    op0 = TREE_OPERAND (t, 0);
4425
4426  if (TREE_CODE_CLASS (tcode) == '2')
4427    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4428
4429  /* Note that we need not handle conditional operations here since fold
4430     already handles those cases.  So just do arithmetic here.  */
4431  switch (tcode)
4432    {
4433    case INTEGER_CST:
4434      /* For a constant, we can always simplify if we are a multiply
4435	 or (for divide and modulus) if it is a multiple of our constant.  */
4436      if (code == MULT_EXPR
4437	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4438	return const_binop (code, fold_convert (ctype, t),
4439			    fold_convert (ctype, c), 0);
4440      break;
4441
4442    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
4443      /* If op0 is an expression ...  */
4444      if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4445	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4446	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4447	   || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4448	  /* ... and is unsigned, and its type is smaller than ctype,
4449	     then we cannot pass through as widening.  */
4450	  && ((TREE_UNSIGNED (TREE_TYPE (op0))
4451	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4452		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4453	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
4454	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4455	      /* ... or its type is larger than ctype,
4456		 then we cannot pass through this truncation.  */
4457	      || (GET_MODE_SIZE (TYPE_MODE (ctype))
4458		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4459	      /* ... or signedness changes for division or modulus,
4460		 then we cannot pass through this conversion.  */
4461	      || (code != MULT_EXPR
4462		  && (TREE_UNSIGNED (ctype)
4463		      != TREE_UNSIGNED (TREE_TYPE (op0))))))
4464	break;
4465
4466      /* Pass the constant down and see if we can make a simplification.  If
4467	 we can, replace this expression with the inner simplification for
4468	 possible later conversion to our or some other type.  */
4469      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4470	  && TREE_CODE (t2) == INTEGER_CST
4471	  && ! TREE_CONSTANT_OVERFLOW (t2)
4472	  && (0 != (t1 = extract_muldiv (op0, t2, code,
4473					 code == MULT_EXPR
4474					 ? ctype : NULL_TREE))))
4475	return t1;
4476      break;
4477
4478    case NEGATE_EXPR:  case ABS_EXPR:
4479      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4480	return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4481      break;
4482
4483    case MIN_EXPR:  case MAX_EXPR:
4484      /* If widening the type changes the signedness, then we can't perform
4485	 this optimization as that changes the result.  */
4486      if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4487	break;
4488
4489      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
4490      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4491	  && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4492	{
4493	  if (tree_int_cst_sgn (c) < 0)
4494	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4495
4496	  return fold (build (tcode, ctype, fold_convert (ctype, t1),
4497			      fold_convert (ctype, t2)));
4498	}
4499      break;
4500
4501    case WITH_RECORD_EXPR:
4502      if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4503	return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4504		      TREE_OPERAND (t, 1));
4505      break;
4506
4507    case LSHIFT_EXPR:  case RSHIFT_EXPR:
4508      /* If the second operand is constant, this is a multiplication
4509	 or floor division, by a power of two, so we can treat it that
4510	 way unless the multiplier or divisor overflows.  */
4511      if (TREE_CODE (op1) == INTEGER_CST
4512	  /* const_binop may not detect overflow correctly,
4513	     so check for it explicitly here.  */
4514	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4515	  && TREE_INT_CST_HIGH (op1) == 0
4516	  && 0 != (t1 = fold_convert (ctype,
4517				      const_binop (LSHIFT_EXPR,
4518						   size_one_node,
4519						   op1, 0)))
4520	  && ! TREE_OVERFLOW (t1))
4521	return extract_muldiv (build (tcode == LSHIFT_EXPR
4522				      ? MULT_EXPR : FLOOR_DIV_EXPR,
4523				      ctype, fold_convert (ctype, op0), t1),
4524			       c, code, wide_type);
4525      break;
4526
4527    case PLUS_EXPR:  case MINUS_EXPR:
4528      /* See if we can eliminate the operation on both sides.  If we can, we
4529	 can return a new PLUS or MINUS.  If we can't, the only remaining
4530	 cases where we can do anything are if the second operand is a
4531	 constant.  */
4532      t1 = extract_muldiv (op0, c, code, wide_type);
4533      t2 = extract_muldiv (op1, c, code, wide_type);
4534      if (t1 != 0 && t2 != 0
4535	  && (code == MULT_EXPR
4536	      /* If not multiplication, we can only do this if both operands
4537		 are divisible by c.  */
4538	      || (multiple_of_p (ctype, op0, c)
4539	          && multiple_of_p (ctype, op1, c))))
4540	return fold (build (tcode, ctype, fold_convert (ctype, t1),
4541			    fold_convert (ctype, t2)));
4542
4543      /* If this was a subtraction, negate OP1 and set it to be an addition.
4544	 This simplifies the logic below.  */
4545      if (tcode == MINUS_EXPR)
4546	tcode = PLUS_EXPR, op1 = negate_expr (op1);
4547
4548      if (TREE_CODE (op1) != INTEGER_CST)
4549	break;
4550
4551      /* If either OP1 or C are negative, this optimization is not safe for
4552	 some of the division and remainder types while for others we need
4553	 to change the code.  */
4554      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4555	{
4556	  if (code == CEIL_DIV_EXPR)
4557	    code = FLOOR_DIV_EXPR;
4558	  else if (code == FLOOR_DIV_EXPR)
4559	    code = CEIL_DIV_EXPR;
4560	  else if (code != MULT_EXPR
4561		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4562	    break;
4563	}
4564
4565      /* If it's a multiply or a division/modulus operation of a multiple
4566         of our constant, do the operation and verify it doesn't overflow.  */
4567      if (code == MULT_EXPR
4568	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4569	{
4570	  op1 = const_binop (code, fold_convert (ctype, op1),
4571			     fold_convert (ctype, c), 0);
4572	  /* We allow the constant to overflow with wrapping semantics.  */
4573	  if (op1 == 0
4574	      || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4575	    break;
4576	}
4577      else
4578	break;
4579
4580      /* If we have an unsigned type is not a sizetype, we cannot widen
4581	 the operation since it will change the result if the original
4582	 computation overflowed.  */
4583      if (TREE_UNSIGNED (ctype)
4584	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4585	  && ctype != type)
4586	break;
4587
4588      /* If we were able to eliminate our operation from the first side,
4589	 apply our operation to the second side and reform the PLUS.  */
4590      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4591	return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4592
4593      /* The last case is if we are a multiply.  In that case, we can
4594	 apply the distributive law to commute the multiply and addition
4595	 if the multiplication of the constants doesn't overflow.  */
4596      if (code == MULT_EXPR)
4597	return fold (build (tcode, ctype,
4598			    fold (build (code, ctype,
4599					 fold_convert (ctype, op0),
4600					 fold_convert (ctype, c))),
4601			    op1));
4602
4603      break;
4604
4605    case MULT_EXPR:
4606      /* We have a special case here if we are doing something like
4607	 (C * 8) % 4 since we know that's zero.  */
4608      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4609	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4610	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4611	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4612	return omit_one_operand (type, integer_zero_node, op0);
4613
4614      /* ... fall through ...  */
4615
4616    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
4617    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
4618      /* If we can extract our operation from the LHS, do so and return a
4619	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
4620	 do something only if the second operand is a constant.  */
4621      if (same_p
4622	  && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4623	return fold (build (tcode, ctype, fold_convert (ctype, t1),
4624			    fold_convert (ctype, op1)));
4625      else if (tcode == MULT_EXPR && code == MULT_EXPR
4626	       && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4627	return fold (build (tcode, ctype, fold_convert (ctype, op0),
4628			    fold_convert (ctype, t1)));
4629      else if (TREE_CODE (op1) != INTEGER_CST)
4630	return 0;
4631
4632      /* If these are the same operation types, we can associate them
4633	 assuming no overflow.  */
4634      if (tcode == code
4635	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4636				     fold_convert (ctype, c), 0))
4637	  && ! TREE_OVERFLOW (t1))
4638	return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4639
4640      /* If these operations "cancel" each other, we have the main
4641	 optimizations of this pass, which occur when either constant is a
4642	 multiple of the other, in which case we replace this with either an
4643	 operation or CODE or TCODE.
4644
4645	 If we have an unsigned type that is not a sizetype, we cannot do
4646	 this since it will change the result if the original computation
4647	 overflowed.  */
4648      if ((! TREE_UNSIGNED (ctype)
4649	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4650	  && ! flag_wrapv
4651	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4652	      || (tcode == MULT_EXPR
4653		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4654		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4655	{
4656	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4657	    return fold (build (tcode, ctype, fold_convert (ctype, op0),
4658				fold_convert (ctype,
4659					      const_binop (TRUNC_DIV_EXPR,
4660							   op1, c, 0))));
4661	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4662	    return fold (build (code, ctype, fold_convert (ctype, op0),
4663				fold_convert (ctype,
4664					      const_binop (TRUNC_DIV_EXPR,
4665							   c, op1, 0))));
4666	}
4667      break;
4668
4669    default:
4670      break;
4671    }
4672
4673  return 0;
4674}
4675
4676/* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4677   S, a SAVE_EXPR, return the expression actually being evaluated.   Note
4678   that we may sometimes modify the tree.  */
4679
4680static tree
4681strip_compound_expr (tree t, tree s)
4682{
4683  enum tree_code code = TREE_CODE (t);
4684
4685  /* See if this is the COMPOUND_EXPR we want to eliminate.  */
4686  if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4687      && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4688    return TREE_OPERAND (t, 1);
4689
4690  /* See if this is a COND_EXPR or a simple arithmetic operator.   We
4691     don't bother handling any other types.  */
4692  else if (code == COND_EXPR)
4693    {
4694      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4695      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4696      TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4697    }
4698  else if (TREE_CODE_CLASS (code) == '1')
4699    TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4700  else if (TREE_CODE_CLASS (code) == '<'
4701	   || TREE_CODE_CLASS (code) == '2')
4702    {
4703      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4704      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4705    }
4706
4707  return t;
4708}
4709
4710/* Return a node which has the indicated constant VALUE (either 0 or
4711   1), and is of the indicated TYPE.  */
4712
4713static tree
4714constant_boolean_node (int value, tree type)
4715{
4716  if (type == integer_type_node)
4717    return value ? integer_one_node : integer_zero_node;
4718  else if (TREE_CODE (type) == BOOLEAN_TYPE)
4719    return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4720						integer_zero_node);
4721  else
4722    {
4723      tree t = build_int_2 (value, 0);
4724
4725      TREE_TYPE (t) = type;
4726      return t;
4727    }
4728}
4729
4730/* Utility function for the following routine, to see how complex a nesting of
4731   COND_EXPRs can be.  EXPR is the expression and LIMIT is a count beyond which
4732   we don't care (to avoid spending too much time on complex expressions.).  */
4733
4734static int
4735count_cond (tree expr, int lim)
4736{
4737  int ctrue, cfalse;
4738
4739  if (TREE_CODE (expr) != COND_EXPR)
4740    return 0;
4741  else if (lim <= 0)
4742    return 0;
4743
4744  ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4745  cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4746  return MIN (lim, 1 + ctrue + cfalse);
4747}
4748
4749/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4750   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
4751   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4752   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
4753   COND is the first argument to CODE; otherwise (as in the example
4754   given here), it is the second argument.  TYPE is the type of the
4755   original expression.  */
4756
4757static tree
4758fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4759				     tree cond, tree arg, int cond_first_p)
4760{
4761  tree test, true_value, false_value;
4762  tree lhs = NULL_TREE;
4763  tree rhs = NULL_TREE;
4764  /* In the end, we'll produce a COND_EXPR.  Both arms of the
4765     conditional expression will be binary operations.  The left-hand
4766     side of the expression to be executed if the condition is true
4767     will be pointed to by TRUE_LHS.  Similarly, the right-hand side
4768     of the expression to be executed if the condition is true will be
4769     pointed to by TRUE_RHS.  FALSE_LHS and FALSE_RHS are analogous --
4770     but apply to the expression to be executed if the conditional is
4771     false.  */
4772  tree *true_lhs;
4773  tree *true_rhs;
4774  tree *false_lhs;
4775  tree *false_rhs;
4776  /* These are the codes to use for the left-hand side and right-hand
4777     side of the COND_EXPR.  Normally, they are the same as CODE.  */
4778  enum tree_code lhs_code = code;
4779  enum tree_code rhs_code = code;
4780  /* And these are the types of the expressions.  */
4781  tree lhs_type = type;
4782  tree rhs_type = type;
4783  int save = 0;
4784
4785  if (cond_first_p)
4786    {
4787      true_rhs = false_rhs = &arg;
4788      true_lhs = &true_value;
4789      false_lhs = &false_value;
4790    }
4791  else
4792    {
4793      true_lhs = false_lhs = &arg;
4794      true_rhs = &true_value;
4795      false_rhs = &false_value;
4796    }
4797
4798  if (TREE_CODE (cond) == COND_EXPR)
4799    {
4800      test = TREE_OPERAND (cond, 0);
4801      true_value = TREE_OPERAND (cond, 1);
4802      false_value = TREE_OPERAND (cond, 2);
4803      /* If this operand throws an expression, then it does not make
4804	 sense to try to perform a logical or arithmetic operation
4805	 involving it.  Instead of building `a + throw 3' for example,
4806	 we simply build `a, throw 3'.  */
4807      if (VOID_TYPE_P (TREE_TYPE (true_value)))
4808	{
4809	  if (! cond_first_p)
4810	    {
4811	      lhs_code = COMPOUND_EXPR;
4812	      lhs_type = void_type_node;
4813	    }
4814	  else
4815	    lhs = true_value;
4816	}
4817      if (VOID_TYPE_P (TREE_TYPE (false_value)))
4818	{
4819	  if (! cond_first_p)
4820	    {
4821	      rhs_code = COMPOUND_EXPR;
4822	      rhs_type = void_type_node;
4823	    }
4824	  else
4825	    rhs = false_value;
4826	}
4827    }
4828  else
4829    {
4830      tree testtype = TREE_TYPE (cond);
4831      test = cond;
4832      true_value = fold_convert (testtype, integer_one_node);
4833      false_value = fold_convert (testtype, integer_zero_node);
4834    }
4835
4836  /* If ARG is complex we want to make sure we only evaluate it once.  Though
4837     this is only required if it is volatile, it might be more efficient even
4838     if it is not.  However, if we succeed in folding one part to a constant,
4839     we do not need to make this SAVE_EXPR.  Since we do this optimization
4840     primarily to see if we do end up with constant and this SAVE_EXPR
4841     interferes with later optimizations, suppressing it when we can is
4842     important.
4843
4844     If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4845     do so.  Don't try to see if the result is a constant if an arm is a
4846     COND_EXPR since we get exponential behavior in that case.  */
4847
4848  if (saved_expr_p (arg))
4849    save = 1;
4850  else if (lhs == 0 && rhs == 0
4851	   && !TREE_CONSTANT (arg)
4852	   && (*lang_hooks.decls.global_bindings_p) () == 0
4853	   && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4854	       || TREE_SIDE_EFFECTS (arg)))
4855    {
4856      if (TREE_CODE (true_value) != COND_EXPR)
4857	lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4858
4859      if (TREE_CODE (false_value) != COND_EXPR)
4860	rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4861
4862      if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4863	  && (rhs == 0 || !TREE_CONSTANT (rhs)))
4864	{
4865	  arg = save_expr (arg);
4866	  lhs = rhs = 0;
4867	  save = saved_expr_p (arg);
4868	}
4869    }
4870
4871  if (lhs == 0)
4872    lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4873  if (rhs == 0)
4874    rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4875
4876  test = fold (build (COND_EXPR, type, test, lhs, rhs));
4877
4878  /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4879     ahead of the COND_EXPR we made.  Otherwise we would have it only
4880     evaluated in one branch, with the other branch using the result
4881     but missing the evaluation code.  Beware that the save_expr call
4882     above might not return a SAVE_EXPR, so testing the TREE_CODE
4883     of ARG is not enough to decide here. �*/
4884  if (save)
4885    return build (COMPOUND_EXPR, type,
4886		  fold_convert (void_type_node, arg),
4887		  strip_compound_expr (test, arg));
4888  else
4889    return fold_convert (type, test);
4890}
4891
4892
4893/* Subroutine of fold() that checks for the addition of +/- 0.0.
4894
4895   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4896   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
4897   ADDEND is the same as X.
4898
4899   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4900   and finite.  The problematic cases are when X is zero, and its mode
4901   has signed zeros.  In the case of rounding towards -infinity,
4902   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
4903   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
4904
4905static bool
4906fold_real_zero_addition_p (tree type, tree addend, int negate)
4907{
4908  if (!real_zerop (addend))
4909    return false;
4910
4911  /* Don't allow the fold with -fsignaling-nans.  */
4912  if (HONOR_SNANS (TYPE_MODE (type)))
4913    return false;
4914
4915  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
4916  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4917    return true;
4918
4919  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
4920  if (TREE_CODE (addend) == REAL_CST
4921      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4922    negate = !negate;
4923
4924  /* The mode has signed zeros, and we have to honor their sign.
4925     In this situation, there is only one case we can return true for.
4926     X - 0 is the same as X unless rounding towards -infinity is
4927     supported.  */
4928  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4929}
4930
4931/* Subroutine of fold() that checks comparisons of built-in math
4932   functions against real constants.
4933
4934   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4935   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
4936   is the type of the result and ARG0 and ARG1 are the operands of the
4937   comparison.  ARG1 must be a TREE_REAL_CST.
4938
4939   The function returns the constant folded tree if a simplification
4940   can be made, and NULL_TREE otherwise.  */
4941
4942static tree
4943fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4944		     tree type, tree arg0, tree arg1)
4945{
4946  REAL_VALUE_TYPE c;
4947
4948  if (fcode == BUILT_IN_SQRT
4949      || fcode == BUILT_IN_SQRTF
4950      || fcode == BUILT_IN_SQRTL)
4951    {
4952      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4953      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4954
4955      c = TREE_REAL_CST (arg1);
4956      if (REAL_VALUE_NEGATIVE (c))
4957	{
4958	  /* sqrt(x) < y is always false, if y is negative.  */
4959	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4960	    return omit_one_operand (type,
4961				     fold_convert (type, integer_zero_node),
4962				     arg);
4963
4964	  /* sqrt(x) > y is always true, if y is negative and we
4965	     don't care about NaNs, i.e. negative values of x.  */
4966	  if (code == NE_EXPR || !HONOR_NANS (mode))
4967	    return omit_one_operand (type,
4968				     fold_convert (type, integer_one_node),
4969				     arg);
4970
4971	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
4972	  return fold (build (GE_EXPR, type, arg,
4973			      build_real (TREE_TYPE (arg), dconst0)));
4974	}
4975      else if (code == GT_EXPR || code == GE_EXPR)
4976	{
4977	  REAL_VALUE_TYPE c2;
4978
4979	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4980	  real_convert (&c2, mode, &c2);
4981
4982	  if (REAL_VALUE_ISINF (c2))
4983	    {
4984	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
4985	      if (HONOR_INFINITIES (mode))
4986		return fold (build (EQ_EXPR, type, arg,
4987				    build_real (TREE_TYPE (arg), c2)));
4988
4989	      /* sqrt(x) > y is always false, when y is very large
4990		 and we don't care about infinities.  */
4991	      return omit_one_operand (type,
4992				       fold_convert (type, integer_zero_node),
4993				       arg);
4994	    }
4995
4996	  /* sqrt(x) > c is the same as x > c*c.  */
4997	  return fold (build (code, type, arg,
4998			      build_real (TREE_TYPE (arg), c2)));
4999	}
5000      else if (code == LT_EXPR || code == LE_EXPR)
5001	{
5002	  REAL_VALUE_TYPE c2;
5003
5004	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5005	  real_convert (&c2, mode, &c2);
5006
5007	  if (REAL_VALUE_ISINF (c2))
5008	    {
5009	      /* sqrt(x) < y is always true, when y is a very large
5010		 value and we don't care about NaNs or Infinities.  */
5011	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5012		return omit_one_operand (type,
5013					 fold_convert (type, integer_one_node),
5014					 arg);
5015
5016	      /* sqrt(x) < y is x != +Inf when y is very large and we
5017		 don't care about NaNs.  */
5018	      if (! HONOR_NANS (mode))
5019		return fold (build (NE_EXPR, type, arg,
5020				    build_real (TREE_TYPE (arg), c2)));
5021
5022	      /* sqrt(x) < y is x >= 0 when y is very large and we
5023		 don't care about Infinities.  */
5024	      if (! HONOR_INFINITIES (mode))
5025		return fold (build (GE_EXPR, type, arg,
5026				    build_real (TREE_TYPE (arg), dconst0)));
5027
5028	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
5029	      if ((*lang_hooks.decls.global_bindings_p) () != 0
5030		  || CONTAINS_PLACEHOLDER_P (arg))
5031		return NULL_TREE;
5032
5033	      arg = save_expr (arg);
5034	      return fold (build (TRUTH_ANDIF_EXPR, type,
5035				  fold (build (GE_EXPR, type, arg,
5036					       build_real (TREE_TYPE (arg),
5037							   dconst0))),
5038				  fold (build (NE_EXPR, type, arg,
5039					       build_real (TREE_TYPE (arg),
5040							   c2)))));
5041	    }
5042
5043	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
5044	  if (! HONOR_NANS (mode))
5045	    return fold (build (code, type, arg,
5046				build_real (TREE_TYPE (arg), c2)));
5047
5048	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
5049	  if ((*lang_hooks.decls.global_bindings_p) () == 0
5050	      && ! CONTAINS_PLACEHOLDER_P (arg))
5051	    {
5052	      arg = save_expr (arg);
5053	      return fold (build (TRUTH_ANDIF_EXPR, type,
5054				  fold (build (GE_EXPR, type, arg,
5055					       build_real (TREE_TYPE (arg),
5056							   dconst0))),
5057				  fold (build (code, type, arg,
5058					       build_real (TREE_TYPE (arg),
5059							   c2)))));
5060	    }
5061	}
5062    }
5063
5064  return NULL_TREE;
5065}
5066
5067/* Subroutine of fold() that optimizes comparisons against Infinities,
5068   either +Inf or -Inf.
5069
5070   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5071   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5072   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5073
5074   The function returns the constant folded tree if a simplification
5075   can be made, and NULL_TREE otherwise.  */
5076
5077static tree
5078fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5079{
5080  enum machine_mode mode;
5081  REAL_VALUE_TYPE max;
5082  tree temp;
5083  bool neg;
5084
5085  mode = TYPE_MODE (TREE_TYPE (arg0));
5086
5087  /* For negative infinity swap the sense of the comparison.  */
5088  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5089  if (neg)
5090    code = swap_tree_comparison (code);
5091
5092  switch (code)
5093    {
5094    case GT_EXPR:
5095      /* x > +Inf is always false, if with ignore sNANs.  */
5096      if (HONOR_SNANS (mode))
5097        return NULL_TREE;
5098      return omit_one_operand (type,
5099			       fold_convert (type, integer_zero_node),
5100			       arg0);
5101
5102    case LE_EXPR:
5103      /* x <= +Inf is always true, if we don't case about NaNs.  */
5104      if (! HONOR_NANS (mode))
5105	return omit_one_operand (type,
5106				 fold_convert (type, integer_one_node),
5107				 arg0);
5108
5109      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
5110      if ((*lang_hooks.decls.global_bindings_p) () == 0
5111	  && ! CONTAINS_PLACEHOLDER_P (arg0))
5112	{
5113	  arg0 = save_expr (arg0);
5114	  return fold (build (EQ_EXPR, type, arg0, arg0));
5115	}
5116      break;
5117
5118    case EQ_EXPR:
5119    case GE_EXPR:
5120      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
5121      real_maxval (&max, neg, mode);
5122      return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5123			  arg0, build_real (TREE_TYPE (arg0), max)));
5124
5125    case LT_EXPR:
5126      /* x < +Inf is always equal to x <= DBL_MAX.  */
5127      real_maxval (&max, neg, mode);
5128      return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5129			  arg0, build_real (TREE_TYPE (arg0), max)));
5130
5131    case NE_EXPR:
5132      /* x != +Inf is always equal to !(x > DBL_MAX).  */
5133      real_maxval (&max, neg, mode);
5134      if (! HONOR_NANS (mode))
5135	return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5136			    arg0, build_real (TREE_TYPE (arg0), max)));
5137      temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5138			  arg0, build_real (TREE_TYPE (arg0), max)));
5139      return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5140
5141    default:
5142      break;
5143    }
5144
5145  return NULL_TREE;
5146}
5147
5148/* If CODE with arguments ARG0 and ARG1 represents a single bit
5149   equality/inequality test, then return a simplified form of
5150   the test using shifts and logical operations.  Otherwise return
5151   NULL.  TYPE is the desired result type.  */
5152
5153tree
5154fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5155		      tree result_type)
5156{
5157  /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5158     operand 0.  */
5159  if (code == TRUTH_NOT_EXPR)
5160    {
5161      code = TREE_CODE (arg0);
5162      if (code != NE_EXPR && code != EQ_EXPR)
5163	return NULL_TREE;
5164
5165      /* Extract the arguments of the EQ/NE.  */
5166      arg1 = TREE_OPERAND (arg0, 1);
5167      arg0 = TREE_OPERAND (arg0, 0);
5168
5169      /* This requires us to invert the code.  */
5170      code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5171    }
5172
5173  /* If this is testing a single bit, we can optimize the test.  */
5174  if ((code == NE_EXPR || code == EQ_EXPR)
5175      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5176      && integer_pow2p (TREE_OPERAND (arg0, 1)))
5177    {
5178      tree inner = TREE_OPERAND (arg0, 0);
5179      tree type = TREE_TYPE (arg0);
5180      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5181      enum machine_mode operand_mode = TYPE_MODE (type);
5182      int ops_unsigned;
5183      tree signed_type, unsigned_type, intermediate_type;
5184      tree arg00;
5185
5186      /* If we have (A & C) != 0 where C is the sign bit of A, convert
5187	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
5188      arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5189      if (arg00 != NULL_TREE
5190	  /* This is only a win if casting to a signed type is cheap,
5191	     i.e. when arg00's type is not a partial mode.  */
5192	  && TYPE_PRECISION (TREE_TYPE (arg00))
5193	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5194	{
5195	  tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5196	  return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5197			      fold_convert (stype, arg00),
5198			      fold_convert (stype, integer_zero_node)));
5199	}
5200
5201      /* Otherwise we have (A & C) != 0 where C is a single bit,
5202	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
5203	 Similarly for (A & C) == 0.  */
5204
5205      /* If INNER is a right shift of a constant and it plus BITNUM does
5206	 not overflow, adjust BITNUM and INNER.  */
5207      if (TREE_CODE (inner) == RSHIFT_EXPR
5208	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5209	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5210	  && bitnum < TYPE_PRECISION (type)
5211	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5212				   bitnum - TYPE_PRECISION (type)))
5213	{
5214	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5215	  inner = TREE_OPERAND (inner, 0);
5216	}
5217
5218      /* If we are going to be able to omit the AND below, we must do our
5219	 operations as unsigned.  If we must use the AND, we have a choice.
5220	 Normally unsigned is faster, but for some machines signed is.  */
5221#ifdef LOAD_EXTEND_OP
5222      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5223#else
5224      ops_unsigned = 1;
5225#endif
5226
5227      signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5228      unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5229      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5230      inner = fold_convert (intermediate_type, inner);
5231
5232      if (bitnum != 0)
5233	inner = build (RSHIFT_EXPR, intermediate_type,
5234		       inner, size_int (bitnum));
5235
5236      if (code == EQ_EXPR)
5237	inner = build (BIT_XOR_EXPR, intermediate_type,
5238		       inner, integer_one_node);
5239
5240      /* Put the AND last so it can combine with more things.  */
5241      inner = build (BIT_AND_EXPR, intermediate_type,
5242		     inner, integer_one_node);
5243
5244      /* Make sure to return the proper type.  */
5245      inner = fold_convert (result_type, inner);
5246
5247      return inner;
5248    }
5249  return NULL_TREE;
5250}
5251
5252/* Check whether we are allowed to reorder operands arg0 and arg1,
5253   such that the evaluation of arg1 occurs before arg0.  */
5254
5255static bool
5256reorder_operands_p (tree arg0, tree arg1)
5257{
5258  if (! flag_evaluation_order)
5259    return true;
5260  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5261    return true;
5262  return ! TREE_SIDE_EFFECTS (arg0)
5263	 && ! TREE_SIDE_EFFECTS (arg1);
5264}
5265
5266/* Test whether it is preferable two swap two operands, ARG0 and
5267   ARG1, for example because ARG0 is an integer constant and ARG1
5268   isn't.  If REORDER is true, only recommend swapping if we can
5269   evaluate the operands in reverse order.  */
5270
5271static bool
5272tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5273{
5274  STRIP_SIGN_NOPS (arg0);
5275  STRIP_SIGN_NOPS (arg1);
5276
5277  if (TREE_CODE (arg1) == INTEGER_CST)
5278    return 0;
5279  if (TREE_CODE (arg0) == INTEGER_CST)
5280    return 1;
5281
5282  if (TREE_CODE (arg1) == REAL_CST)
5283    return 0;
5284  if (TREE_CODE (arg0) == REAL_CST)
5285    return 1;
5286
5287  if (TREE_CODE (arg1) == COMPLEX_CST)
5288    return 0;
5289  if (TREE_CODE (arg0) == COMPLEX_CST)
5290    return 1;
5291
5292  if (TREE_CONSTANT (arg1))
5293    return 0;
5294  if (TREE_CONSTANT (arg0))
5295    return 1;
5296
5297  if (optimize_size)
5298    return 0;
5299
5300  if (reorder && flag_evaluation_order
5301      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5302    return 0;
5303
5304  if (DECL_P (arg1))
5305    return 0;
5306  if (DECL_P (arg0))
5307    return 1;
5308
5309  return 0;
5310}
5311
5312/* Perform constant folding and related simplification of EXPR.
5313   The related simplifications include x*1 => x, x*0 => 0, etc.,
5314   and application of the associative law.
5315   NOP_EXPR conversions may be removed freely (as long as we
5316   are careful not to change the C type of the overall expression)
5317   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5318   but we can constant-fold them if they have constant operands.  */
5319
5320#ifdef ENABLE_FOLD_CHECKING
5321# define fold(x) fold_1 (x)
5322static tree fold_1 (tree);
5323static
5324#endif
5325tree
5326fold (tree expr)
5327{
5328  tree t = expr, orig_t;
5329  tree t1 = NULL_TREE;
5330  tree tem;
5331  tree type = TREE_TYPE (expr);
5332  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5333  enum tree_code code = TREE_CODE (t);
5334  int kind = TREE_CODE_CLASS (code);
5335  int invert;
5336  /* WINS will be nonzero when the switch is done
5337     if all operands are constant.  */
5338  int wins = 1;
5339
5340  /* Don't try to process an RTL_EXPR since its operands aren't trees.
5341     Likewise for a SAVE_EXPR that's already been evaluated.  */
5342  if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5343    return t;
5344
5345  /* Return right away if a constant.  */
5346  if (kind == 'c')
5347    return t;
5348
5349  orig_t = t;
5350
5351  if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5352    {
5353      tree subop;
5354
5355      /* Special case for conversion ops that can have fixed point args.  */
5356      arg0 = TREE_OPERAND (t, 0);
5357
5358      /* Don't use STRIP_NOPS, because signedness of argument type matters.  */
5359      if (arg0 != 0)
5360	STRIP_SIGN_NOPS (arg0);
5361
5362      if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5363	subop = TREE_REALPART (arg0);
5364      else
5365	subop = arg0;
5366
5367      if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5368	  && TREE_CODE (subop) != REAL_CST)
5369	/* Note that TREE_CONSTANT isn't enough:
5370	   static var addresses are constant but we can't
5371	   do arithmetic on them.  */
5372	wins = 0;
5373    }
5374  else if (IS_EXPR_CODE_CLASS (kind))
5375    {
5376      int len = first_rtl_op (code);
5377      int i;
5378      for (i = 0; i < len; i++)
5379	{
5380	  tree op = TREE_OPERAND (t, i);
5381	  tree subop;
5382
5383	  if (op == 0)
5384	    continue;		/* Valid for CALL_EXPR, at least.  */
5385
5386	  if (kind == '<' || code == RSHIFT_EXPR)
5387	    {
5388	      /* Signedness matters here.  Perhaps we can refine this
5389		 later.  */
5390	      STRIP_SIGN_NOPS (op);
5391	    }
5392	  else
5393	    /* Strip any conversions that don't change the mode.  */
5394	    STRIP_NOPS (op);
5395
5396	  if (TREE_CODE (op) == COMPLEX_CST)
5397	    subop = TREE_REALPART (op);
5398	  else
5399	    subop = op;
5400
5401	  if (TREE_CODE (subop) != INTEGER_CST
5402	      && TREE_CODE (subop) != REAL_CST)
5403	    /* Note that TREE_CONSTANT isn't enough:
5404	       static var addresses are constant but we can't
5405	       do arithmetic on them.  */
5406	    wins = 0;
5407
5408	  if (i == 0)
5409	    arg0 = op;
5410	  else if (i == 1)
5411	    arg1 = op;
5412	}
5413    }
5414
5415  /* If this is a commutative operation, and ARG0 is a constant, move it
5416     to ARG1 to reduce the number of tests below.  */
5417  if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5418       || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5419       || code == BIT_AND_EXPR)
5420      && tree_swap_operands_p (arg0, arg1, true))
5421    return fold (build (code, type, TREE_OPERAND (t, 1),
5422			TREE_OPERAND (t, 0)));
5423
5424  /* Now WINS is set as described above,
5425     ARG0 is the first operand of EXPR,
5426     and ARG1 is the second operand (if it has more than one operand).
5427
5428     First check for cases where an arithmetic operation is applied to a
5429     compound, conditional, or comparison operation.  Push the arithmetic
5430     operation inside the compound or conditional to see if any folding
5431     can then be done.  Convert comparison to conditional for this purpose.
5432     The also optimizes non-constant cases that used to be done in
5433     expand_expr.
5434
5435     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5436     one of the operands is a comparison and the other is a comparison, a
5437     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
5438     code below would make the expression more complex.  Change it to a
5439     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
5440     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
5441
5442  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5443       || code == EQ_EXPR || code == NE_EXPR)
5444      && ((truth_value_p (TREE_CODE (arg0))
5445	   && (truth_value_p (TREE_CODE (arg1))
5446	       || (TREE_CODE (arg1) == BIT_AND_EXPR
5447		   && integer_onep (TREE_OPERAND (arg1, 1)))))
5448	  || (truth_value_p (TREE_CODE (arg1))
5449	      && (truth_value_p (TREE_CODE (arg0))
5450		  || (TREE_CODE (arg0) == BIT_AND_EXPR
5451		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
5452    {
5453      t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5454		       : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5455		       : TRUTH_XOR_EXPR,
5456		       type, arg0, arg1));
5457
5458      if (code == EQ_EXPR)
5459	t = invert_truthvalue (t);
5460
5461      return t;
5462    }
5463
5464  if (TREE_CODE_CLASS (code) == '1')
5465    {
5466      if (TREE_CODE (arg0) == COMPOUND_EXPR)
5467	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5468		      fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5469      else if (TREE_CODE (arg0) == COND_EXPR)
5470	{
5471	  tree arg01 = TREE_OPERAND (arg0, 1);
5472	  tree arg02 = TREE_OPERAND (arg0, 2);
5473	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5474	    arg01 = fold (build1 (code, type, arg01));
5475	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5476	    arg02 = fold (build1 (code, type, arg02));
5477	  t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5478			   arg01, arg02));
5479
5480	  /* If this was a conversion, and all we did was to move into
5481	     inside the COND_EXPR, bring it back out.  But leave it if
5482	     it is a conversion from integer to integer and the
5483	     result precision is no wider than a word since such a
5484	     conversion is cheap and may be optimized away by combine,
5485	     while it couldn't if it were outside the COND_EXPR.  Then return
5486	     so we don't get into an infinite recursion loop taking the
5487	     conversion out and then back in.  */
5488
5489	  if ((code == NOP_EXPR || code == CONVERT_EXPR
5490	       || code == NON_LVALUE_EXPR)
5491	      && TREE_CODE (t) == COND_EXPR
5492	      && TREE_CODE (TREE_OPERAND (t, 1)) == code
5493	      && TREE_CODE (TREE_OPERAND (t, 2)) == code
5494	      && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5495	      && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5496	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5497		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5498	      && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5499		    && (INTEGRAL_TYPE_P
5500			(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5501		    && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5502	    t = build1 (code, type,
5503			build (COND_EXPR,
5504			       TREE_TYPE (TREE_OPERAND
5505					  (TREE_OPERAND (t, 1), 0)),
5506			       TREE_OPERAND (t, 0),
5507			       TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5508			       TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5509	  return t;
5510	}
5511      else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5512	return fold (build (COND_EXPR, type, arg0,
5513			    fold (build1 (code, type, integer_one_node)),
5514			    fold (build1 (code, type, integer_zero_node))));
5515   }
5516  else if (TREE_CODE_CLASS (code) == '<'
5517	   && TREE_CODE (arg0) == COMPOUND_EXPR)
5518    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5519		  fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5520  else if (TREE_CODE_CLASS (code) == '<'
5521	   && TREE_CODE (arg1) == COMPOUND_EXPR)
5522    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5523		  fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5524  else if (TREE_CODE_CLASS (code) == '2'
5525	   || TREE_CODE_CLASS (code) == '<')
5526    {
5527      if (TREE_CODE (arg1) == COMPOUND_EXPR
5528	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5529	  && ! TREE_SIDE_EFFECTS (arg0))
5530	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5531		      fold (build (code, type,
5532				   arg0, TREE_OPERAND (arg1, 1))));
5533      else if ((TREE_CODE (arg1) == COND_EXPR
5534		|| (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5535		    && TREE_CODE_CLASS (code) != '<'))
5536	       && (TREE_CODE (arg0) != COND_EXPR
5537		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5538	       && (! TREE_SIDE_EFFECTS (arg0)
5539		   || ((*lang_hooks.decls.global_bindings_p) () == 0
5540		       && ! CONTAINS_PLACEHOLDER_P (arg0))))
5541	return
5542	  fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5543					       /*cond_first_p=*/0);
5544      else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5545	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5546		      fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5547      else if ((TREE_CODE (arg0) == COND_EXPR
5548		|| (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5549		    && TREE_CODE_CLASS (code) != '<'))
5550	       && (TREE_CODE (arg1) != COND_EXPR
5551		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5552	       && (! TREE_SIDE_EFFECTS (arg1)
5553		   || ((*lang_hooks.decls.global_bindings_p) () == 0
5554		       && ! CONTAINS_PLACEHOLDER_P (arg1))))
5555	return
5556	  fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5557					       /*cond_first_p=*/1);
5558    }
5559
5560  switch (code)
5561    {
5562    case INTEGER_CST:
5563    case REAL_CST:
5564    case VECTOR_CST:
5565    case STRING_CST:
5566    case COMPLEX_CST:
5567    case CONSTRUCTOR:
5568      return t;
5569
5570    case CONST_DECL:
5571      return fold (DECL_INITIAL (t));
5572
5573    case NOP_EXPR:
5574    case FLOAT_EXPR:
5575    case CONVERT_EXPR:
5576    case FIX_TRUNC_EXPR:
5577      /* Other kinds of FIX are not handled properly by fold_convert.  */
5578
5579      if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5580	return TREE_OPERAND (t, 0);
5581
5582      /* Handle cases of two conversions in a row.  */
5583      if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5584	  || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5585	{
5586	  tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5587	  tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5588	  tree final_type = TREE_TYPE (t);
5589	  int inside_int = INTEGRAL_TYPE_P (inside_type);
5590	  int inside_ptr = POINTER_TYPE_P (inside_type);
5591	  int inside_float = FLOAT_TYPE_P (inside_type);
5592	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
5593	  int inside_unsignedp = TREE_UNSIGNED (inside_type);
5594	  int inter_int = INTEGRAL_TYPE_P (inter_type);
5595	  int inter_ptr = POINTER_TYPE_P (inter_type);
5596	  int inter_float = FLOAT_TYPE_P (inter_type);
5597	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
5598	  int inter_unsignedp = TREE_UNSIGNED (inter_type);
5599	  int final_int = INTEGRAL_TYPE_P (final_type);
5600	  int final_ptr = POINTER_TYPE_P (final_type);
5601	  int final_float = FLOAT_TYPE_P (final_type);
5602	  unsigned int final_prec = TYPE_PRECISION (final_type);
5603	  int final_unsignedp = TREE_UNSIGNED (final_type);
5604
5605	  /* In addition to the cases of two conversions in a row
5606	     handled below, if we are converting something to its own
5607	     type via an object of identical or wider precision, neither
5608	     conversion is needed.  */
5609	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5610	      && ((inter_int && final_int) || (inter_float && final_float))
5611	      && inter_prec >= final_prec)
5612	    return fold (build1 (code, final_type,
5613				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5614
5615	  /* Likewise, if the intermediate and final types are either both
5616	     float or both integer, we don't need the middle conversion if
5617	     it is wider than the final type and doesn't change the signedness
5618	     (for integers).  Avoid this if the final type is a pointer
5619	     since then we sometimes need the inner conversion.  Likewise if
5620	     the outer has a precision not equal to the size of its mode.  */
5621	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5622	       || (inter_float && inside_float))
5623	      && inter_prec >= inside_prec
5624	      && (inter_float || inter_unsignedp == inside_unsignedp)
5625	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5626		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5627	      && ! final_ptr)
5628	    return fold (build1 (code, final_type,
5629				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5630
5631	  /* If we have a sign-extension of a zero-extended value, we can
5632	     replace that by a single zero-extension.  */
5633	  if (inside_int && inter_int && final_int
5634	      && inside_prec < inter_prec && inter_prec < final_prec
5635	      && inside_unsignedp && !inter_unsignedp)
5636	    return fold (build1 (code, final_type,
5637				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5638
5639	  /* Two conversions in a row are not needed unless:
5640	     - some conversion is floating-point (overstrict for now), or
5641	     - the intermediate type is narrower than both initial and
5642	       final, or
5643	     - the intermediate type and innermost type differ in signedness,
5644	       and the outermost type is wider than the intermediate, or
5645	     - the initial type is a pointer type and the precisions of the
5646	       intermediate and final types differ, or
5647	     - the final type is a pointer type and the precisions of the
5648	       initial and intermediate types differ.  */
5649	  if (! inside_float && ! inter_float && ! final_float
5650	      && (inter_prec > inside_prec || inter_prec > final_prec)
5651	      && ! (inside_int && inter_int
5652		    && inter_unsignedp != inside_unsignedp
5653		    && inter_prec < final_prec)
5654	      && ((inter_unsignedp && inter_prec > inside_prec)
5655		  == (final_unsignedp && final_prec > inter_prec))
5656	      && ! (inside_ptr && inter_prec != final_prec)
5657	      && ! (final_ptr && inside_prec != inter_prec)
5658	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5659		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5660	      && ! final_ptr)
5661	    return fold (build1 (code, final_type,
5662				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5663	}
5664
5665      if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5666	  && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5667	  /* Detect assigning a bitfield.  */
5668	  && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5669	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5670	{
5671	  /* Don't leave an assignment inside a conversion
5672	     unless assigning a bitfield.  */
5673	  tree prev = TREE_OPERAND (t, 0);
5674	  if (t == orig_t)
5675	    t = copy_node (t);
5676	  TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5677	  /* First do the assignment, then return converted constant.  */
5678	  t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5679	  TREE_NO_UNUSED_WARNING (t) = 1;
5680	  TREE_USED (t) = 1;
5681	  return t;
5682	}
5683
5684      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5685	 constants (if x has signed type, the sign bit cannot be set
5686	 in c).  This folds extension into the BIT_AND_EXPR.  */
5687      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5688	  && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5689	  && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5690	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5691	{
5692	  tree and = TREE_OPERAND (t, 0);
5693	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5694	  int change = 0;
5695
5696	  if (TREE_UNSIGNED (TREE_TYPE (and))
5697	      || (TYPE_PRECISION (TREE_TYPE (t))
5698		  <= TYPE_PRECISION (TREE_TYPE (and))))
5699	    change = 1;
5700	  else if (TYPE_PRECISION (TREE_TYPE (and1))
5701		   <= HOST_BITS_PER_WIDE_INT
5702		   && host_integerp (and1, 1))
5703	    {
5704	      unsigned HOST_WIDE_INT cst;
5705
5706	      cst = tree_low_cst (and1, 1);
5707	      cst &= (HOST_WIDE_INT) -1
5708		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5709	      change = (cst == 0);
5710#ifdef LOAD_EXTEND_OP
5711	      if (change
5712		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5713		      == ZERO_EXTEND))
5714		{
5715		  tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5716		  and0 = fold_convert (uns, and0);
5717		  and1 = fold_convert (uns, and1);
5718		}
5719#endif
5720	    }
5721	  if (change)
5722	    return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5723				fold_convert (TREE_TYPE (t), and0),
5724				fold_convert (TREE_TYPE (t), and1)));
5725	}
5726
5727      tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5728      return tem ? tem : t;
5729
5730    case VIEW_CONVERT_EXPR:
5731      if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5732	return build1 (VIEW_CONVERT_EXPR, type,
5733		       TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5734      return t;
5735
5736    case COMPONENT_REF:
5737      if (TREE_CODE (arg0) == CONSTRUCTOR
5738	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5739	{
5740	  tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5741	  if (m)
5742	    t = TREE_VALUE (m);
5743	}
5744      return t;
5745
5746    case RANGE_EXPR:
5747      if (TREE_CONSTANT (t) != wins)
5748	{
5749	  if (t == orig_t)
5750	    t = copy_node (t);
5751	  TREE_CONSTANT (t) = wins;
5752	}
5753      return t;
5754
5755    case NEGATE_EXPR:
5756      if (negate_expr_p (arg0))
5757	return fold_convert (type, negate_expr (arg0));
5758      return t;
5759
5760    case ABS_EXPR:
5761      if (wins)
5762	{
5763	  if (TREE_CODE (arg0) == INTEGER_CST)
5764	    {
5765	      /* If the value is unsigned, then the absolute value is
5766		 the same as the ordinary value.  */
5767	      if (TREE_UNSIGNED (type))
5768		return arg0;
5769	      /* Similarly, if the value is non-negative.  */
5770	      else if (INT_CST_LT (integer_minus_one_node, arg0))
5771		return arg0;
5772	      /* If the value is negative, then the absolute value is
5773		 its negation.  */
5774	      else
5775		{
5776		  unsigned HOST_WIDE_INT low;
5777		  HOST_WIDE_INT high;
5778		  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5779					     TREE_INT_CST_HIGH (arg0),
5780					     &low, &high);
5781		  t = build_int_2 (low, high);
5782		  TREE_TYPE (t) = type;
5783		  TREE_OVERFLOW (t)
5784		    = (TREE_OVERFLOW (arg0)
5785		       | force_fit_type (t, overflow));
5786		  TREE_CONSTANT_OVERFLOW (t)
5787		    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5788		}
5789	    }
5790	  else if (TREE_CODE (arg0) == REAL_CST)
5791	    {
5792	      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5793		t = build_real (type,
5794				REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5795	    }
5796	}
5797      else if (TREE_CODE (arg0) == NEGATE_EXPR)
5798	return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5799      /* Convert fabs((double)float) into (double)fabsf(float).  */
5800      else if (TREE_CODE (arg0) == NOP_EXPR
5801	       && TREE_CODE (type) == REAL_TYPE)
5802	{
5803	  tree targ0 = strip_float_extensions (arg0);
5804	  if (targ0 != arg0)
5805	    return fold_convert (type, fold (build1 (ABS_EXPR,
5806						     TREE_TYPE (targ0),
5807						     targ0)));
5808	}
5809      else if (tree_expr_nonnegative_p (arg0))
5810	return arg0;
5811      return t;
5812
5813    case CONJ_EXPR:
5814      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5815	return fold_convert (type, arg0);
5816      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5817	return build (COMPLEX_EXPR, type,
5818		      TREE_OPERAND (arg0, 0),
5819		      negate_expr (TREE_OPERAND (arg0, 1)));
5820      else if (TREE_CODE (arg0) == COMPLEX_CST)
5821	return build_complex (type, TREE_REALPART (arg0),
5822			      negate_expr (TREE_IMAGPART (arg0)));
5823      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5824	return fold (build (TREE_CODE (arg0), type,
5825			    fold (build1 (CONJ_EXPR, type,
5826					  TREE_OPERAND (arg0, 0))),
5827			    fold (build1 (CONJ_EXPR,
5828					  type, TREE_OPERAND (arg0, 1)))));
5829      else if (TREE_CODE (arg0) == CONJ_EXPR)
5830	return TREE_OPERAND (arg0, 0);
5831      return t;
5832
5833    case BIT_NOT_EXPR:
5834      if (wins)
5835	{
5836	  t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5837			   ~ TREE_INT_CST_HIGH (arg0));
5838	  TREE_TYPE (t) = type;
5839	  force_fit_type (t, 0);
5840	  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5841	  TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5842	}
5843      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5844	return TREE_OPERAND (arg0, 0);
5845      return t;
5846
5847    case PLUS_EXPR:
5848      /* A + (-B) -> A - B */
5849      if (TREE_CODE (arg1) == NEGATE_EXPR)
5850	return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5851      /* (-A) + B -> B - A */
5852      if (TREE_CODE (arg0) == NEGATE_EXPR)
5853	return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5854      else if (! FLOAT_TYPE_P (type))
5855	{
5856	  if (integer_zerop (arg1))
5857	    return non_lvalue (fold_convert (type, arg0));
5858
5859	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5860	     with a constant, and the two constants have no bits in common,
5861	     we should treat this as a BIT_IOR_EXPR since this may produce more
5862	     simplifications.  */
5863	  if (TREE_CODE (arg0) == BIT_AND_EXPR
5864	      && TREE_CODE (arg1) == BIT_AND_EXPR
5865	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5866	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5867	      && integer_zerop (const_binop (BIT_AND_EXPR,
5868					     TREE_OPERAND (arg0, 1),
5869					     TREE_OPERAND (arg1, 1), 0)))
5870	    {
5871	      code = BIT_IOR_EXPR;
5872	      goto bit_ior;
5873	    }
5874
5875	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5876	     (plus (plus (mult) (mult)) (foo)) so that we can
5877	     take advantage of the factoring cases below.  */
5878	  if ((TREE_CODE (arg0) == PLUS_EXPR
5879	       && TREE_CODE (arg1) == MULT_EXPR)
5880	      || (TREE_CODE (arg1) == PLUS_EXPR
5881		  && TREE_CODE (arg0) == MULT_EXPR))
5882	    {
5883	      tree parg0, parg1, parg, marg;
5884
5885	      if (TREE_CODE (arg0) == PLUS_EXPR)
5886		parg = arg0, marg = arg1;
5887	      else
5888		parg = arg1, marg = arg0;
5889	      parg0 = TREE_OPERAND (parg, 0);
5890	      parg1 = TREE_OPERAND (parg, 1);
5891	      STRIP_NOPS (parg0);
5892	      STRIP_NOPS (parg1);
5893
5894	      if (TREE_CODE (parg0) == MULT_EXPR
5895		  && TREE_CODE (parg1) != MULT_EXPR)
5896		return fold (build (PLUS_EXPR, type,
5897				    fold (build (PLUS_EXPR, type,
5898						 fold_convert (type, parg0),
5899						 fold_convert (type, marg))),
5900				    fold_convert (type, parg1)));
5901	      if (TREE_CODE (parg0) != MULT_EXPR
5902		  && TREE_CODE (parg1) == MULT_EXPR)
5903		return fold (build (PLUS_EXPR, type,
5904				    fold (build (PLUS_EXPR, type,
5905						 fold_convert (type, parg1),
5906						 fold_convert (type, marg))),
5907				    fold_convert (type, parg0)));
5908	    }
5909
5910	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5911	    {
5912	      tree arg00, arg01, arg10, arg11;
5913	      tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5914
5915	      /* (A * C) + (B * C) -> (A+B) * C.
5916		 We are most concerned about the case where C is a constant,
5917		 but other combinations show up during loop reduction.  Since
5918		 it is not difficult, try all four possibilities.  */
5919
5920	      arg00 = TREE_OPERAND (arg0, 0);
5921	      arg01 = TREE_OPERAND (arg0, 1);
5922	      arg10 = TREE_OPERAND (arg1, 0);
5923	      arg11 = TREE_OPERAND (arg1, 1);
5924	      same = NULL_TREE;
5925
5926	      if (operand_equal_p (arg01, arg11, 0))
5927		same = arg01, alt0 = arg00, alt1 = arg10;
5928	      else if (operand_equal_p (arg00, arg10, 0))
5929		same = arg00, alt0 = arg01, alt1 = arg11;
5930	      else if (operand_equal_p (arg00, arg11, 0))
5931		same = arg00, alt0 = arg01, alt1 = arg10;
5932	      else if (operand_equal_p (arg01, arg10, 0))
5933		same = arg01, alt0 = arg00, alt1 = arg11;
5934
5935	      /* No identical multiplicands; see if we can find a common
5936		 power-of-two factor in non-power-of-two multiplies.  This
5937		 can help in multi-dimensional array access.  */
5938	      else if (TREE_CODE (arg01) == INTEGER_CST
5939		       && TREE_CODE (arg11) == INTEGER_CST
5940		       && TREE_INT_CST_HIGH (arg01) == 0
5941		       && TREE_INT_CST_HIGH (arg11) == 0)
5942		{
5943		  HOST_WIDE_INT int01, int11, tmp;
5944		  int01 = TREE_INT_CST_LOW (arg01);
5945		  int11 = TREE_INT_CST_LOW (arg11);
5946
5947		  /* Move min of absolute values to int11.  */
5948		  if ((int01 >= 0 ? int01 : -int01)
5949		      < (int11 >= 0 ? int11 : -int11))
5950		    {
5951		      tmp = int01, int01 = int11, int11 = tmp;
5952		      alt0 = arg00, arg00 = arg10, arg10 = alt0;
5953		      alt0 = arg01, arg01 = arg11, arg11 = alt0;
5954		    }
5955
5956		  if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5957		    {
5958		      alt0 = fold (build (MULT_EXPR, type, arg00,
5959					  build_int_2 (int01 / int11, 0)));
5960		      alt1 = arg10;
5961		      same = arg11;
5962		    }
5963		}
5964
5965	      if (same)
5966		return fold (build (MULT_EXPR, type,
5967				    fold (build (PLUS_EXPR, type, alt0, alt1)),
5968				    same));
5969	    }
5970	}
5971      else
5972	{
5973	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
5974	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5975	    return non_lvalue (fold_convert (type, arg0));
5976
5977	  /* Likewise if the operands are reversed.  */
5978	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5979	    return non_lvalue (fold_convert (type, arg1));
5980
5981	  /* Convert x+x into x*2.0.  */
5982	  if (operand_equal_p (arg0, arg1, 0)
5983	      && SCALAR_FLOAT_TYPE_P (type))
5984	    return fold (build (MULT_EXPR, type, arg0,
5985				build_real (type, dconst2)));
5986
5987	  /* Convert x*c+x into x*(c+1).  */
5988	  if (flag_unsafe_math_optimizations
5989	      && TREE_CODE (arg0) == MULT_EXPR
5990	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5991	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5992	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5993	    {
5994	      REAL_VALUE_TYPE c;
5995
5996	      c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5997	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5998	      return fold (build (MULT_EXPR, type, arg1,
5999				  build_real (type, c)));
6000	    }
6001
6002	  /* Convert x+x*c into x*(c+1).  */
6003	  if (flag_unsafe_math_optimizations
6004	      && TREE_CODE (arg1) == MULT_EXPR
6005	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6006	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6007	      && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6008	    {
6009	      REAL_VALUE_TYPE c;
6010
6011	      c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6012	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6013	      return fold (build (MULT_EXPR, type, arg0,
6014				  build_real (type, c)));
6015	    }
6016
6017	  /* Convert x*c1+x*c2 into x*(c1+c2).  */
6018	  if (flag_unsafe_math_optimizations
6019	      && TREE_CODE (arg0) == MULT_EXPR
6020	      && TREE_CODE (arg1) == MULT_EXPR
6021	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6022	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6023	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6024	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6025	      && operand_equal_p (TREE_OPERAND (arg0, 0),
6026				  TREE_OPERAND (arg1, 0), 0))
6027	    {
6028	      REAL_VALUE_TYPE c1, c2;
6029
6030	      c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6031	      c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6032	      real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6033	      return fold (build (MULT_EXPR, type,
6034				  TREE_OPERAND (arg0, 0),
6035				  build_real (type, c1)));
6036	    }
6037	}
6038
6039     bit_rotate:
6040      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6041	 is a rotate of A by C1 bits.  */
6042      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6043	 is a rotate of A by B bits.  */
6044      {
6045	enum tree_code code0, code1;
6046	code0 = TREE_CODE (arg0);
6047	code1 = TREE_CODE (arg1);
6048	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6049	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6050	    && operand_equal_p (TREE_OPERAND (arg0, 0),
6051			        TREE_OPERAND (arg1, 0), 0)
6052	    && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6053	  {
6054	    tree tree01, tree11;
6055	    enum tree_code code01, code11;
6056
6057	    tree01 = TREE_OPERAND (arg0, 1);
6058	    tree11 = TREE_OPERAND (arg1, 1);
6059	    STRIP_NOPS (tree01);
6060	    STRIP_NOPS (tree11);
6061	    code01 = TREE_CODE (tree01);
6062	    code11 = TREE_CODE (tree11);
6063	    if (code01 == INTEGER_CST
6064		&& code11 == INTEGER_CST
6065		&& TREE_INT_CST_HIGH (tree01) == 0
6066		&& TREE_INT_CST_HIGH (tree11) == 0
6067		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6068		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6069	      return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6070			    code0 == LSHIFT_EXPR ? tree01 : tree11);
6071	    else if (code11 == MINUS_EXPR)
6072	      {
6073		tree tree110, tree111;
6074		tree110 = TREE_OPERAND (tree11, 0);
6075		tree111 = TREE_OPERAND (tree11, 1);
6076		STRIP_NOPS (tree110);
6077		STRIP_NOPS (tree111);
6078		if (TREE_CODE (tree110) == INTEGER_CST
6079		    && 0 == compare_tree_int (tree110,
6080					      TYPE_PRECISION
6081					      (TREE_TYPE (TREE_OPERAND
6082							  (arg0, 0))))
6083		    && operand_equal_p (tree01, tree111, 0))
6084		  return build ((code0 == LSHIFT_EXPR
6085				 ? LROTATE_EXPR
6086				 : RROTATE_EXPR),
6087				type, TREE_OPERAND (arg0, 0), tree01);
6088	      }
6089	    else if (code01 == MINUS_EXPR)
6090	      {
6091		tree tree010, tree011;
6092		tree010 = TREE_OPERAND (tree01, 0);
6093		tree011 = TREE_OPERAND (tree01, 1);
6094		STRIP_NOPS (tree010);
6095		STRIP_NOPS (tree011);
6096		if (TREE_CODE (tree010) == INTEGER_CST
6097		    && 0 == compare_tree_int (tree010,
6098					      TYPE_PRECISION
6099					      (TREE_TYPE (TREE_OPERAND
6100							  (arg0, 0))))
6101		    && operand_equal_p (tree11, tree011, 0))
6102		  return build ((code0 != LSHIFT_EXPR
6103				 ? LROTATE_EXPR
6104				 : RROTATE_EXPR),
6105				type, TREE_OPERAND (arg0, 0), tree11);
6106	      }
6107	  }
6108      }
6109
6110    associate:
6111      /* In most languages, can't associate operations on floats through
6112	 parentheses.  Rather than remember where the parentheses were, we
6113	 don't associate floats at all, unless the user has specified
6114	 -funsafe-math-optimizations.  */
6115
6116      if (! wins
6117	  && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6118	{
6119	  tree var0, con0, lit0, minus_lit0;
6120	  tree var1, con1, lit1, minus_lit1;
6121
6122	  /* Split both trees into variables, constants, and literals.  Then
6123	     associate each group together, the constants with literals,
6124	     then the result with variables.  This increases the chances of
6125	     literals being recombined later and of generating relocatable
6126	     expressions for the sum of a constant and literal.  */
6127	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6128	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6129			     code == MINUS_EXPR);
6130
6131	  /* Only do something if we found more than two objects.  Otherwise,
6132	     nothing has changed and we risk infinite recursion.  */
6133	  if (2 < ((var0 != 0) + (var1 != 0)
6134		   + (con0 != 0) + (con1 != 0)
6135		   + (lit0 != 0) + (lit1 != 0)
6136		   + (minus_lit0 != 0) + (minus_lit1 != 0)))
6137	    {
6138	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
6139	      if (code == MINUS_EXPR)
6140		code = PLUS_EXPR;
6141
6142	      var0 = associate_trees (var0, var1, code, type);
6143	      con0 = associate_trees (con0, con1, code, type);
6144	      lit0 = associate_trees (lit0, lit1, code, type);
6145	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6146
6147	      /* Preserve the MINUS_EXPR if the negative part of the literal is
6148		 greater than the positive part.  Otherwise, the multiplicative
6149		 folding code (i.e extract_muldiv) may be fooled in case
6150		 unsigned constants are subtracted, like in the following
6151		 example: ((X*2 + 4) - 8U)/2.  */
6152	      if (minus_lit0 && lit0)
6153		{
6154		  if (TREE_CODE (lit0) == INTEGER_CST
6155		      && TREE_CODE (minus_lit0) == INTEGER_CST
6156		      && tree_int_cst_lt (lit0, minus_lit0))
6157		    {
6158		      minus_lit0 = associate_trees (minus_lit0, lit0,
6159						    MINUS_EXPR, type);
6160		      lit0 = 0;
6161		    }
6162		  else
6163		    {
6164		      lit0 = associate_trees (lit0, minus_lit0,
6165					      MINUS_EXPR, type);
6166		      minus_lit0 = 0;
6167		    }
6168		}
6169	      if (minus_lit0)
6170		{
6171		  if (con0 == 0)
6172		    return fold_convert (type,
6173					 associate_trees (var0, minus_lit0,
6174							  MINUS_EXPR, type));
6175		  else
6176		    {
6177		      con0 = associate_trees (con0, minus_lit0,
6178					      MINUS_EXPR, type);
6179		      return fold_convert (type,
6180					   associate_trees (var0, con0,
6181							    PLUS_EXPR, type));
6182		    }
6183		}
6184
6185	      con0 = associate_trees (con0, lit0, code, type);
6186	      return fold_convert (type, associate_trees (var0, con0,
6187							  code, type));
6188	    }
6189	}
6190
6191    binary:
6192      if (wins)
6193	t1 = const_binop (code, arg0, arg1, 0);
6194      if (t1 != NULL_TREE)
6195	{
6196	  /* The return value should always have
6197	     the same type as the original expression.  */
6198	  if (TREE_TYPE (t1) != TREE_TYPE (t))
6199	    t1 = fold_convert (TREE_TYPE (t), t1);
6200
6201	  return t1;
6202	}
6203      return t;
6204
6205    case MINUS_EXPR:
6206      /* A - (-B) -> A + B */
6207      if (TREE_CODE (arg1) == NEGATE_EXPR)
6208	return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6209      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
6210      if (TREE_CODE (arg0) == NEGATE_EXPR
6211	  && (FLOAT_TYPE_P (type)
6212	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6213	  && negate_expr_p (arg1)
6214	  && reorder_operands_p (arg0, arg1))
6215	return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6216			    TREE_OPERAND (arg0, 0)));
6217
6218      if (! FLOAT_TYPE_P (type))
6219	{
6220	  if (! wins && integer_zerop (arg0))
6221	    return negate_expr (fold_convert (type, arg1));
6222	  if (integer_zerop (arg1))
6223	    return non_lvalue (fold_convert (type, arg0));
6224
6225	  /* (A * C) - (B * C) -> (A-B) * C.  Since we are most concerned
6226	     about the case where C is a constant, just try one of the
6227	     four possibilities.  */
6228
6229	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6230	      && operand_equal_p (TREE_OPERAND (arg0, 1),
6231				  TREE_OPERAND (arg1, 1), 0))
6232	    return fold (build (MULT_EXPR, type,
6233				fold (build (MINUS_EXPR, type,
6234					     TREE_OPERAND (arg0, 0),
6235					     TREE_OPERAND (arg1, 0))),
6236				TREE_OPERAND (arg0, 1)));
6237
6238	  /* Fold A - (A & B) into ~B & A.  */
6239	  if (!TREE_SIDE_EFFECTS (arg0)
6240	      && TREE_CODE (arg1) == BIT_AND_EXPR)
6241	    {
6242	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6243		return fold (build (BIT_AND_EXPR, type,
6244				    fold (build1 (BIT_NOT_EXPR, type,
6245						  TREE_OPERAND (arg1, 0))),
6246				    arg0));
6247	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6248		return fold (build (BIT_AND_EXPR, type,
6249				    fold (build1 (BIT_NOT_EXPR, type,
6250						  TREE_OPERAND (arg1, 1))),
6251				    arg0));
6252	    }
6253
6254	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6255	     any power of 2 minus 1.  */
6256	  if (TREE_CODE (arg0) == BIT_AND_EXPR
6257	      && TREE_CODE (arg1) == BIT_AND_EXPR
6258	      && operand_equal_p (TREE_OPERAND (arg0, 0),
6259				  TREE_OPERAND (arg1, 0), 0))
6260	    {
6261	      tree mask0 = TREE_OPERAND (arg0, 1);
6262	      tree mask1 = TREE_OPERAND (arg1, 1);
6263	      tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6264
6265	      if (operand_equal_p (tem, mask1, 0))
6266		{
6267		  tem = fold (build (BIT_XOR_EXPR, type,
6268				     TREE_OPERAND (arg0, 0), mask1));
6269		  return fold (build (MINUS_EXPR, type, tem, mask1));
6270		}
6271	    }
6272	}
6273
6274      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
6275      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6276	return non_lvalue (fold_convert (type, arg0));
6277
6278      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
6279	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6280	 (-ARG1 + ARG0) reduces to -ARG1.  */
6281      else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6282	return negate_expr (fold_convert (type, arg1));
6283
6284      /* Fold &x - &x.  This can happen from &x.foo - &x.
6285	 This is unsafe for certain floats even in non-IEEE formats.
6286	 In IEEE, it is unsafe because it does wrong for NaNs.
6287	 Also note that operand_equal_p is always false if an operand
6288	 is volatile.  */
6289
6290      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6291	  && operand_equal_p (arg0, arg1, 0))
6292	return fold_convert (type, integer_zero_node);
6293
6294      goto associate;
6295
6296    case MULT_EXPR:
6297      /* (-A) * (-B) -> A * B  */
6298      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6299	return fold (build (MULT_EXPR, type,
6300			    TREE_OPERAND (arg0, 0),
6301			    negate_expr (arg1)));
6302      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6303	return fold (build (MULT_EXPR, type,
6304			    negate_expr (arg0),
6305			    TREE_OPERAND (arg1, 0)));
6306
6307      if (! FLOAT_TYPE_P (type))
6308	{
6309	  if (integer_zerop (arg1))
6310	    return omit_one_operand (type, arg1, arg0);
6311	  if (integer_onep (arg1))
6312	    return non_lvalue (fold_convert (type, arg0));
6313
6314	  /* (a * (1 << b)) is (a << b)  */
6315	  if (TREE_CODE (arg1) == LSHIFT_EXPR
6316	      && integer_onep (TREE_OPERAND (arg1, 0)))
6317	    return fold (build (LSHIFT_EXPR, type, arg0,
6318				TREE_OPERAND (arg1, 1)));
6319	  if (TREE_CODE (arg0) == LSHIFT_EXPR
6320	      && integer_onep (TREE_OPERAND (arg0, 0)))
6321	    return fold (build (LSHIFT_EXPR, type, arg1,
6322				TREE_OPERAND (arg0, 1)));
6323
6324	  if (TREE_CODE (arg1) == INTEGER_CST
6325	      && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6326					     fold_convert (type, arg1),
6327					     code, NULL_TREE)))
6328	    return fold_convert (type, tem);
6329
6330	}
6331      else
6332	{
6333	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
6334	     when x is NaN, since x * 0 is also NaN.  Nor are they the
6335	     same in modes with signed zeros, since multiplying a
6336	     negative value by 0 gives -0, not +0.  */
6337	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6338	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6339	      && real_zerop (arg1))
6340	    return omit_one_operand (type, arg1, arg0);
6341	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
6342	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6343	      && real_onep (arg1))
6344	    return non_lvalue (fold_convert (type, arg0));
6345
6346	  /* Transform x * -1.0 into -x.  */
6347	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6348	      && real_minus_onep (arg1))
6349	    return fold (build1 (NEGATE_EXPR, type, arg0));
6350
6351	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
6352	  if (flag_unsafe_math_optimizations
6353	      && TREE_CODE (arg0) == RDIV_EXPR
6354	      && TREE_CODE (arg1) == REAL_CST
6355	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6356	    {
6357	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6358				      arg1, 0);
6359	      if (tem)
6360		return fold (build (RDIV_EXPR, type, tem,
6361				    TREE_OPERAND (arg0, 1)));
6362	    }
6363
6364	  if (flag_unsafe_math_optimizations)
6365	    {
6366	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6367	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6368
6369	      /* Optimizations of sqrt(...)*sqrt(...).  */
6370	      if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6371		  || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6372		  || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6373		{
6374		  tree sqrtfn, arg, arglist;
6375		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6376		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6377
6378		  /* Optimize sqrt(x)*sqrt(x) as x.  */
6379		  if (operand_equal_p (arg00, arg10, 0)
6380		      && ! HONOR_SNANS (TYPE_MODE (type)))
6381		    return arg00;
6382
6383	          /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y).  */
6384		  sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6385		  arg = fold (build (MULT_EXPR, type, arg00, arg10));
6386		  arglist = build_tree_list (NULL_TREE, arg);
6387		  return build_function_call_expr (sqrtfn, arglist);
6388		}
6389
6390	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
6391	      if (fcode0 == fcode1
6392		  && (fcode0 == BUILT_IN_EXP
6393		      || fcode0 == BUILT_IN_EXPF
6394		      || fcode0 == BUILT_IN_EXPL
6395		      || fcode0 == BUILT_IN_EXP2
6396		      || fcode0 == BUILT_IN_EXP2F
6397		      || fcode0 == BUILT_IN_EXP2L
6398		      || fcode0 == BUILT_IN_EXP10
6399		      || fcode0 == BUILT_IN_EXP10F
6400		      || fcode0 == BUILT_IN_EXP10L
6401		      || fcode0 == BUILT_IN_POW10
6402		      || fcode0 == BUILT_IN_POW10F
6403		      || fcode0 == BUILT_IN_POW10L))
6404		{
6405		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6406		  tree arg = build (PLUS_EXPR, type,
6407				    TREE_VALUE (TREE_OPERAND (arg0, 1)),
6408				    TREE_VALUE (TREE_OPERAND (arg1, 1)));
6409		  tree arglist = build_tree_list (NULL_TREE, fold (arg));
6410		  return build_function_call_expr (expfn, arglist);
6411		}
6412
6413	      /* Optimizations of pow(...)*pow(...).  */
6414	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6415		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6416		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6417		{
6418		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6419		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6420								     1)));
6421		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6422		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6423								     1)));
6424
6425		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
6426		  if (operand_equal_p (arg01, arg11, 0))
6427		    {
6428		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6429		      tree arg = build (MULT_EXPR, type, arg00, arg10);
6430		      tree arglist = tree_cons (NULL_TREE, fold (arg),
6431						build_tree_list (NULL_TREE,
6432								 arg01));
6433		      return build_function_call_expr (powfn, arglist);
6434		    }
6435
6436		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
6437		  if (operand_equal_p (arg00, arg10, 0))
6438		    {
6439		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6440		      tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6441		      tree arglist = tree_cons (NULL_TREE, arg00,
6442						build_tree_list (NULL_TREE,
6443								 arg));
6444		      return build_function_call_expr (powfn, arglist);
6445		    }
6446		}
6447
6448	      /* Optimize tan(x)*cos(x) as sin(x).  */
6449	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6450		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6451		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6452		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6453		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6454		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6455		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6456				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6457		{
6458		  tree sinfn;
6459
6460		  switch (fcode0)
6461		    {
6462		    case BUILT_IN_TAN:
6463		    case BUILT_IN_COS:
6464		      sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6465		      break;
6466		    case BUILT_IN_TANF:
6467		    case BUILT_IN_COSF:
6468		      sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6469		      break;
6470		    case BUILT_IN_TANL:
6471		    case BUILT_IN_COSL:
6472		      sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6473		      break;
6474		    default:
6475		      sinfn = NULL_TREE;
6476		    }
6477
6478		  if (sinfn != NULL_TREE)
6479		    return build_function_call_expr (sinfn,
6480						     TREE_OPERAND (arg0, 1));
6481		}
6482
6483	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
6484	      if (fcode1 == BUILT_IN_POW
6485		  || fcode1 == BUILT_IN_POWF
6486		  || fcode1 == BUILT_IN_POWL)
6487		{
6488		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6489		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6490								     1)));
6491		  if (TREE_CODE (arg11) == REAL_CST
6492		      && ! TREE_CONSTANT_OVERFLOW (arg11)
6493		      && operand_equal_p (arg0, arg10, 0))
6494		    {
6495		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6496		      REAL_VALUE_TYPE c;
6497		      tree arg, arglist;
6498
6499		      c = TREE_REAL_CST (arg11);
6500		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6501		      arg = build_real (type, c);
6502		      arglist = build_tree_list (NULL_TREE, arg);
6503		      arglist = tree_cons (NULL_TREE, arg0, arglist);
6504		      return build_function_call_expr (powfn, arglist);
6505		    }
6506		}
6507
6508	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
6509	      if (fcode0 == BUILT_IN_POW
6510		  || fcode0 == BUILT_IN_POWF
6511		  || fcode0 == BUILT_IN_POWL)
6512		{
6513		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6514		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6515								     1)));
6516		  if (TREE_CODE (arg01) == REAL_CST
6517		      && ! TREE_CONSTANT_OVERFLOW (arg01)
6518		      && operand_equal_p (arg1, arg00, 0))
6519		    {
6520		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6521		      REAL_VALUE_TYPE c;
6522		      tree arg, arglist;
6523
6524		      c = TREE_REAL_CST (arg01);
6525		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6526		      arg = build_real (type, c);
6527		      arglist = build_tree_list (NULL_TREE, arg);
6528		      arglist = tree_cons (NULL_TREE, arg1, arglist);
6529		      return build_function_call_expr (powfn, arglist);
6530		    }
6531		}
6532
6533	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
6534	      if (! optimize_size
6535		  && operand_equal_p (arg0, arg1, 0))
6536		{
6537		  tree powfn;
6538
6539		  if (type == double_type_node)
6540		    powfn = implicit_built_in_decls[BUILT_IN_POW];
6541		  else if (type == float_type_node)
6542		    powfn = implicit_built_in_decls[BUILT_IN_POWF];
6543		  else if (type == long_double_type_node)
6544		    powfn = implicit_built_in_decls[BUILT_IN_POWL];
6545		  else
6546		    powfn = NULL_TREE;
6547
6548		  if (powfn)
6549		    {
6550		      tree arg = build_real (type, dconst2);
6551		      tree arglist = build_tree_list (NULL_TREE, arg);
6552		      arglist = tree_cons (NULL_TREE, arg0, arglist);
6553		      return build_function_call_expr (powfn, arglist);
6554		    }
6555		}
6556	    }
6557	}
6558      goto associate;
6559
6560    case BIT_IOR_EXPR:
6561    bit_ior:
6562      if (integer_all_onesp (arg1))
6563	return omit_one_operand (type, arg1, arg0);
6564      if (integer_zerop (arg1))
6565	return non_lvalue (fold_convert (type, arg0));
6566      t1 = distribute_bit_expr (code, type, arg0, arg1);
6567      if (t1 != NULL_TREE)
6568	return t1;
6569
6570      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6571
6572	 This results in more efficient code for machines without a NAND
6573	 instruction.  Combine will canonicalize to the first form
6574	 which will allow use of NAND instructions provided by the
6575	 backend if they exist.  */
6576      if (TREE_CODE (arg0) == BIT_NOT_EXPR
6577	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
6578	{
6579	  return fold (build1 (BIT_NOT_EXPR, type,
6580			       build (BIT_AND_EXPR, type,
6581				      TREE_OPERAND (arg0, 0),
6582				      TREE_OPERAND (arg1, 0))));
6583	}
6584
6585      /* See if this can be simplified into a rotate first.  If that
6586	 is unsuccessful continue in the association code.  */
6587      goto bit_rotate;
6588
6589    case BIT_XOR_EXPR:
6590      if (integer_zerop (arg1))
6591	return non_lvalue (fold_convert (type, arg0));
6592      if (integer_all_onesp (arg1))
6593	return fold (build1 (BIT_NOT_EXPR, type, arg0));
6594
6595      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6596         with a constant, and the two constants have no bits in common,
6597	 we should treat this as a BIT_IOR_EXPR since this may produce more
6598	 simplifications.  */
6599      if (TREE_CODE (arg0) == BIT_AND_EXPR
6600	  && TREE_CODE (arg1) == BIT_AND_EXPR
6601	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6602	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6603	  && integer_zerop (const_binop (BIT_AND_EXPR,
6604					 TREE_OPERAND (arg0, 1),
6605					 TREE_OPERAND (arg1, 1), 0)))
6606	{
6607	  code = BIT_IOR_EXPR;
6608	  goto bit_ior;
6609	}
6610
6611      /* See if this can be simplified into a rotate first.  If that
6612	 is unsuccessful continue in the association code.  */
6613      goto bit_rotate;
6614
6615    case BIT_AND_EXPR:
6616      if (integer_all_onesp (arg1))
6617	return non_lvalue (fold_convert (type, arg0));
6618      if (integer_zerop (arg1))
6619	return omit_one_operand (type, arg1, arg0);
6620      t1 = distribute_bit_expr (code, type, arg0, arg1);
6621      if (t1 != NULL_TREE)
6622	return t1;
6623      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
6624      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6625	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6626	{
6627	  unsigned int prec
6628	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6629
6630	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6631	      && (~TREE_INT_CST_LOW (arg1)
6632		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6633	    return fold_convert (type, TREE_OPERAND (arg0, 0));
6634	}
6635
6636      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6637
6638	 This results in more efficient code for machines without a NOR
6639	 instruction.  Combine will canonicalize to the first form
6640	 which will allow use of NOR instructions provided by the
6641	 backend if they exist.  */
6642      if (TREE_CODE (arg0) == BIT_NOT_EXPR
6643	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
6644	{
6645	  return fold (build1 (BIT_NOT_EXPR, type,
6646			       build (BIT_IOR_EXPR, type,
6647				      TREE_OPERAND (arg0, 0),
6648				      TREE_OPERAND (arg1, 0))));
6649	}
6650
6651      goto associate;
6652
6653    case RDIV_EXPR:
6654      /* Don't touch a floating-point divide by zero unless the mode
6655	 of the constant can represent infinity.  */
6656      if (TREE_CODE (arg1) == REAL_CST
6657	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6658	  && real_zerop (arg1))
6659	return t;
6660
6661      /* (-A) / (-B) -> A / B  */
6662      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6663	return fold (build (RDIV_EXPR, type,
6664			    TREE_OPERAND (arg0, 0),
6665			    negate_expr (arg1)));
6666      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6667	return fold (build (RDIV_EXPR, type,
6668			    negate_expr (arg0),
6669			    TREE_OPERAND (arg1, 0)));
6670
6671      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
6672      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6673	  && real_onep (arg1))
6674	return non_lvalue (fold_convert (type, arg0));
6675
6676      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
6677      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6678	  && real_minus_onep (arg1))
6679	return non_lvalue (fold_convert (type, negate_expr (arg0)));
6680
6681      /* If ARG1 is a constant, we can convert this to a multiply by the
6682	 reciprocal.  This does not have the same rounding properties,
6683	 so only do this if -funsafe-math-optimizations.  We can actually
6684	 always safely do it if ARG1 is a power of two, but it's hard to
6685	 tell if it is or not in a portable manner.  */
6686      if (TREE_CODE (arg1) == REAL_CST)
6687	{
6688	  if (flag_unsafe_math_optimizations
6689	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
6690					  arg1, 0)))
6691	    return fold (build (MULT_EXPR, type, arg0, tem));
6692	  /* Find the reciprocal if optimizing and the result is exact.  */
6693	  if (optimize)
6694	    {
6695	      REAL_VALUE_TYPE r;
6696	      r = TREE_REAL_CST (arg1);
6697	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6698		{
6699		  tem = build_real (type, r);
6700		  return fold (build (MULT_EXPR, type, arg0, tem));
6701		}
6702	    }
6703	}
6704      /* Convert A/B/C to A/(B*C).  */
6705      if (flag_unsafe_math_optimizations
6706	  && TREE_CODE (arg0) == RDIV_EXPR)
6707	return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6708			    fold (build (MULT_EXPR, type,
6709					 TREE_OPERAND (arg0, 1), arg1))));
6710
6711      /* Convert A/(B/C) to (A/B)*C.  */
6712      if (flag_unsafe_math_optimizations
6713	  && TREE_CODE (arg1) == RDIV_EXPR)
6714	return fold (build (MULT_EXPR, type,
6715			    fold (build (RDIV_EXPR, type, arg0,
6716					 TREE_OPERAND (arg1, 0))),
6717			    TREE_OPERAND (arg1, 1)));
6718
6719      /* Convert C1/(X*C2) into (C1/C2)/X.  */
6720      if (flag_unsafe_math_optimizations
6721	  && TREE_CODE (arg1) == MULT_EXPR
6722	  && TREE_CODE (arg0) == REAL_CST
6723	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6724	{
6725	  tree tem = const_binop (RDIV_EXPR, arg0,
6726				  TREE_OPERAND (arg1, 1), 0);
6727	  if (tem)
6728	    return fold (build (RDIV_EXPR, type, tem,
6729				TREE_OPERAND (arg1, 0)));
6730	}
6731
6732      if (flag_unsafe_math_optimizations)
6733	{
6734	  enum built_in_function fcode = builtin_mathfn_code (arg1);
6735	  /* Optimize x/expN(y) into x*expN(-y).  */
6736	  if (fcode == BUILT_IN_EXP
6737	      || fcode == BUILT_IN_EXPF
6738	      || fcode == BUILT_IN_EXPL
6739	      || fcode == BUILT_IN_EXP2
6740	      || fcode == BUILT_IN_EXP2F
6741	      || fcode == BUILT_IN_EXP2L
6742	      || fcode == BUILT_IN_EXP10
6743	      || fcode == BUILT_IN_EXP10F
6744	      || fcode == BUILT_IN_EXP10L
6745	      || fcode == BUILT_IN_POW10
6746	      || fcode == BUILT_IN_POW10F
6747	      || fcode == BUILT_IN_POW10L)
6748	    {
6749	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6750	      tree arg = build1 (NEGATE_EXPR, type,
6751				 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6752	      tree arglist = build_tree_list (NULL_TREE, fold (arg));
6753	      arg1 = build_function_call_expr (expfn, arglist);
6754	      return fold (build (MULT_EXPR, type, arg0, arg1));
6755	    }
6756
6757	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
6758	  if (fcode == BUILT_IN_POW
6759	      || fcode == BUILT_IN_POWF
6760	      || fcode == BUILT_IN_POWL)
6761	    {
6762	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6763	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6764	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6765	      tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6766	      tree arglist = tree_cons(NULL_TREE, arg10,
6767				       build_tree_list (NULL_TREE, neg11));
6768	      arg1 = build_function_call_expr (powfn, arglist);
6769	      return fold (build (MULT_EXPR, type, arg0, arg1));
6770	    }
6771	}
6772
6773      if (flag_unsafe_math_optimizations)
6774	{
6775	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6776	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6777
6778	  /* Optimize sin(x)/cos(x) as tan(x).  */
6779	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6780	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6781	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6782	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6783				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6784	    {
6785	      tree tanfn;
6786
6787	      if (fcode0 == BUILT_IN_SIN)
6788		tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6789	      else if (fcode0 == BUILT_IN_SINF)
6790		tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6791	      else if (fcode0 == BUILT_IN_SINL)
6792		tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6793	      else
6794		tanfn = NULL_TREE;
6795
6796	      if (tanfn != NULL_TREE)
6797		return build_function_call_expr (tanfn,
6798						 TREE_OPERAND (arg0, 1));
6799	    }
6800
6801	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
6802	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6803	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6804	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6805	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6806				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6807	    {
6808	      tree tanfn;
6809
6810	      if (fcode0 == BUILT_IN_COS)
6811		tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6812	      else if (fcode0 == BUILT_IN_COSF)
6813		tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6814	      else if (fcode0 == BUILT_IN_COSL)
6815		tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6816	      else
6817		tanfn = NULL_TREE;
6818
6819	      if (tanfn != NULL_TREE)
6820		{
6821		  tree tmp = TREE_OPERAND (arg0, 1);
6822		  tmp = build_function_call_expr (tanfn, tmp);
6823		  return fold (build (RDIV_EXPR, type,
6824				      build_real (type, dconst1),
6825				      tmp));
6826		}
6827	    }
6828
6829	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
6830	  if (fcode0 == BUILT_IN_POW
6831	      || fcode0 == BUILT_IN_POWF
6832	      || fcode0 == BUILT_IN_POWL)
6833	    {
6834	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6835	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6836	      if (TREE_CODE (arg01) == REAL_CST
6837		  && ! TREE_CONSTANT_OVERFLOW (arg01)
6838		  && operand_equal_p (arg1, arg00, 0))
6839		{
6840		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6841		  REAL_VALUE_TYPE c;
6842		  tree arg, arglist;
6843
6844		  c = TREE_REAL_CST (arg01);
6845		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6846		  arg = build_real (type, c);
6847		  arglist = build_tree_list (NULL_TREE, arg);
6848		  arglist = tree_cons (NULL_TREE, arg1, arglist);
6849		  return build_function_call_expr (powfn, arglist);
6850		}
6851	    }
6852	}
6853      goto binary;
6854
6855    case TRUNC_DIV_EXPR:
6856    case ROUND_DIV_EXPR:
6857    case FLOOR_DIV_EXPR:
6858    case CEIL_DIV_EXPR:
6859    case EXACT_DIV_EXPR:
6860      if (integer_onep (arg1))
6861	return non_lvalue (fold_convert (type, arg0));
6862      if (integer_zerop (arg1))
6863	return t;
6864
6865      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6866	 operation, EXACT_DIV_EXPR.
6867
6868	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6869	 At one time others generated faster code, it's not clear if they do
6870	 after the last round to changes to the DIV code in expmed.c.  */
6871      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6872	  && multiple_of_p (type, arg0, arg1))
6873	return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6874
6875      if (TREE_CODE (arg1) == INTEGER_CST
6876	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6877					 code, NULL_TREE)))
6878	return fold_convert (type, tem);
6879
6880      goto binary;
6881
6882    case CEIL_MOD_EXPR:
6883    case FLOOR_MOD_EXPR:
6884    case ROUND_MOD_EXPR:
6885    case TRUNC_MOD_EXPR:
6886      if (integer_onep (arg1))
6887	return omit_one_operand (type, integer_zero_node, arg0);
6888      if (integer_zerop (arg1))
6889	return t;
6890
6891      if (TREE_CODE (arg1) == INTEGER_CST
6892	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6893					 code, NULL_TREE)))
6894	return fold_convert (type, tem);
6895
6896      goto binary;
6897
6898    case LROTATE_EXPR:
6899    case RROTATE_EXPR:
6900      if (integer_all_onesp (arg0))
6901	return omit_one_operand (type, arg0, arg1);
6902      goto shift;
6903
6904    case RSHIFT_EXPR:
6905      /* Optimize -1 >> x for arithmetic right shifts.  */
6906      if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6907	return omit_one_operand (type, arg0, arg1);
6908      /* ... fall through ...  */
6909
6910    case LSHIFT_EXPR:
6911    shift:
6912      if (integer_zerop (arg1))
6913	return non_lvalue (fold_convert (type, arg0));
6914      if (integer_zerop (arg0))
6915	return omit_one_operand (type, arg0, arg1);
6916
6917      /* Since negative shift count is not well-defined,
6918	 don't try to compute it in the compiler.  */
6919      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6920	return t;
6921      /* Rewrite an LROTATE_EXPR by a constant into an
6922	 RROTATE_EXPR by a new constant.  */
6923      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6924	{
6925	  tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6926	  tem = fold_convert (TREE_TYPE (arg1), tem);
6927	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6928	  return fold (build (RROTATE_EXPR, type, arg0, tem));
6929	}
6930
6931      /* If we have a rotate of a bit operation with the rotate count and
6932	 the second operand of the bit operation both constant,
6933	 permute the two operations.  */
6934      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6935	  && (TREE_CODE (arg0) == BIT_AND_EXPR
6936	      || TREE_CODE (arg0) == BIT_IOR_EXPR
6937	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
6938	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6939	return fold (build (TREE_CODE (arg0), type,
6940			    fold (build (code, type,
6941					 TREE_OPERAND (arg0, 0), arg1)),
6942			    fold (build (code, type,
6943					 TREE_OPERAND (arg0, 1), arg1))));
6944
6945      /* Two consecutive rotates adding up to the width of the mode can
6946	 be ignored.  */
6947      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6948	  && TREE_CODE (arg0) == RROTATE_EXPR
6949	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6950	  && TREE_INT_CST_HIGH (arg1) == 0
6951	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6952	  && ((TREE_INT_CST_LOW (arg1)
6953	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6954	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6955	return TREE_OPERAND (arg0, 0);
6956
6957      goto binary;
6958
6959    case MIN_EXPR:
6960      if (operand_equal_p (arg0, arg1, 0))
6961	return omit_one_operand (type, arg0, arg1);
6962      if (INTEGRAL_TYPE_P (type)
6963	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6964	return omit_one_operand (type, arg1, arg0);
6965      goto associate;
6966
6967    case MAX_EXPR:
6968      if (operand_equal_p (arg0, arg1, 0))
6969	return omit_one_operand (type, arg0, arg1);
6970      if (INTEGRAL_TYPE_P (type)
6971	  && TYPE_MAX_VALUE (type)
6972	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6973	return omit_one_operand (type, arg1, arg0);
6974      goto associate;
6975
6976    case TRUTH_NOT_EXPR:
6977      /* Note that the operand of this must be an int
6978	 and its values must be 0 or 1.
6979	 ("true" is a fixed value perhaps depending on the language,
6980	 but we don't handle values other than 1 correctly yet.)  */
6981      tem = invert_truthvalue (arg0);
6982      /* Avoid infinite recursion.  */
6983      if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6984	{
6985	  tem = fold_single_bit_test (code, arg0, arg1, type);
6986	  if (tem)
6987	    return tem;
6988	  return t;
6989	}
6990      return fold_convert (type, tem);
6991
6992    case TRUTH_ANDIF_EXPR:
6993      /* Note that the operands of this must be ints
6994	 and their values must be 0 or 1.
6995	 ("true" is a fixed value perhaps depending on the language.)  */
6996      /* If first arg is constant zero, return it.  */
6997      if (integer_zerop (arg0))
6998	return fold_convert (type, arg0);
6999    case TRUTH_AND_EXPR:
7000      /* If either arg is constant true, drop it.  */
7001      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7002	return non_lvalue (fold_convert (type, arg1));
7003      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7004	  /* Preserve sequence points.  */
7005	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7006	return non_lvalue (fold_convert (type, arg0));
7007      /* If second arg is constant zero, result is zero, but first arg
7008	 must be evaluated.  */
7009      if (integer_zerop (arg1))
7010	return omit_one_operand (type, arg1, arg0);
7011      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7012	 case will be handled here.  */
7013      if (integer_zerop (arg0))
7014	return omit_one_operand (type, arg0, arg1);
7015
7016    truth_andor:
7017      /* We only do these simplifications if we are optimizing.  */
7018      if (!optimize)
7019	return t;
7020
7021      /* Check for things like (A || B) && (A || C).  We can convert this
7022	 to A || (B && C).  Note that either operator can be any of the four
7023	 truth and/or operations and the transformation will still be
7024	 valid.   Also note that we only care about order for the
7025	 ANDIF and ORIF operators.  If B contains side effects, this
7026	 might change the truth-value of A.  */
7027      if (TREE_CODE (arg0) == TREE_CODE (arg1)
7028	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7029	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7030	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
7031	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7032	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7033	{
7034	  tree a00 = TREE_OPERAND (arg0, 0);
7035	  tree a01 = TREE_OPERAND (arg0, 1);
7036	  tree a10 = TREE_OPERAND (arg1, 0);
7037	  tree a11 = TREE_OPERAND (arg1, 1);
7038	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7039			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7040			     && (code == TRUTH_AND_EXPR
7041				 || code == TRUTH_OR_EXPR));
7042
7043	  if (operand_equal_p (a00, a10, 0))
7044	    return fold (build (TREE_CODE (arg0), type, a00,
7045				fold (build (code, type, a01, a11))));
7046	  else if (commutative && operand_equal_p (a00, a11, 0))
7047	    return fold (build (TREE_CODE (arg0), type, a00,
7048				fold (build (code, type, a01, a10))));
7049	  else if (commutative && operand_equal_p (a01, a10, 0))
7050	    return fold (build (TREE_CODE (arg0), type, a01,
7051				fold (build (code, type, a00, a11))));
7052
7053	  /* This case if tricky because we must either have commutative
7054	     operators or else A10 must not have side-effects.  */
7055
7056	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7057		   && operand_equal_p (a01, a11, 0))
7058	    return fold (build (TREE_CODE (arg0), type,
7059				fold (build (code, type, a00, a10)),
7060				a01));
7061	}
7062
7063      /* See if we can build a range comparison.  */
7064      if (0 != (tem = fold_range_test (t)))
7065	return tem;
7066
7067      /* Check for the possibility of merging component references.  If our
7068	 lhs is another similar operation, try to merge its rhs with our
7069	 rhs.  Then try to merge our lhs and rhs.  */
7070      if (TREE_CODE (arg0) == code
7071	  && 0 != (tem = fold_truthop (code, type,
7072				       TREE_OPERAND (arg0, 1), arg1)))
7073	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7074
7075      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7076	return tem;
7077
7078      return t;
7079
7080    case TRUTH_ORIF_EXPR:
7081      /* Note that the operands of this must be ints
7082	 and their values must be 0 or true.
7083	 ("true" is a fixed value perhaps depending on the language.)  */
7084      /* If first arg is constant true, return it.  */
7085      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7086	return fold_convert (type, arg0);
7087    case TRUTH_OR_EXPR:
7088      /* If either arg is constant zero, drop it.  */
7089      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7090	return non_lvalue (fold_convert (type, arg1));
7091      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7092	  /* Preserve sequence points.  */
7093	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7094	return non_lvalue (fold_convert (type, arg0));
7095      /* If second arg is constant true, result is true, but we must
7096	 evaluate first arg.  */
7097      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7098	return omit_one_operand (type, arg1, arg0);
7099      /* Likewise for first arg, but note this only occurs here for
7100	 TRUTH_OR_EXPR.  */
7101      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7102	return omit_one_operand (type, arg0, arg1);
7103      goto truth_andor;
7104
7105    case TRUTH_XOR_EXPR:
7106      /* If either arg is constant zero, drop it.  */
7107      if (integer_zerop (arg0))
7108	return non_lvalue (fold_convert (type, arg1));
7109      if (integer_zerop (arg1))
7110	return non_lvalue (fold_convert (type, arg0));
7111      /* If either arg is constant true, this is a logical inversion.  */
7112      if (integer_onep (arg0))
7113	return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7114      if (integer_onep (arg1))
7115	return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7116      return t;
7117
7118    case EQ_EXPR:
7119    case NE_EXPR:
7120    case LT_EXPR:
7121    case GT_EXPR:
7122    case LE_EXPR:
7123    case GE_EXPR:
7124      /* If one arg is a real or integer constant, put it last.  */
7125      if (tree_swap_operands_p (arg0, arg1, true))
7126	return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7127
7128      if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7129	{
7130	  tree targ0 = strip_float_extensions (arg0);
7131	  tree targ1 = strip_float_extensions (arg1);
7132	  tree newtype = TREE_TYPE (targ0);
7133
7134	  if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7135	    newtype = TREE_TYPE (targ1);
7136
7137	  /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
7138	  if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7139	    return fold (build (code, type, fold_convert (newtype, targ0),
7140				fold_convert (newtype, targ1)));
7141
7142	  /* (-a) CMP (-b) -> b CMP a  */
7143	  if (TREE_CODE (arg0) == NEGATE_EXPR
7144	      && TREE_CODE (arg1) == NEGATE_EXPR)
7145	    return fold (build (code, type, TREE_OPERAND (arg1, 0),
7146				TREE_OPERAND (arg0, 0)));
7147
7148	  if (TREE_CODE (arg1) == REAL_CST)
7149	  {
7150	    REAL_VALUE_TYPE cst;
7151	    cst = TREE_REAL_CST (arg1);
7152
7153	    /* (-a) CMP CST -> a swap(CMP) (-CST)  */
7154	    if (TREE_CODE (arg0) == NEGATE_EXPR)
7155	      return
7156		fold (build (swap_tree_comparison (code), type,
7157			     TREE_OPERAND (arg0, 0),
7158			     build_real (TREE_TYPE (arg1),
7159					 REAL_VALUE_NEGATE (cst))));
7160
7161	    /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
7162	    /* a CMP (-0) -> a CMP 0  */
7163	    if (REAL_VALUE_MINUS_ZERO (cst))
7164	      return fold (build (code, type, arg0,
7165				  build_real (TREE_TYPE (arg1), dconst0)));
7166
7167	    /* x != NaN is always true, other ops are always false.  */
7168	    if (REAL_VALUE_ISNAN (cst)
7169		&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7170	      {
7171		t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7172		return omit_one_operand (type, fold_convert (type, t), arg0);
7173	      }
7174
7175	    /* Fold comparisons against infinity.  */
7176	    if (REAL_VALUE_ISINF (cst))
7177	      {
7178		tem = fold_inf_compare (code, type, arg0, arg1);
7179		if (tem != NULL_TREE)
7180		  return tem;
7181	      }
7182	  }
7183
7184	  /* If this is a comparison of a real constant with a PLUS_EXPR
7185	     or a MINUS_EXPR of a real constant, we can convert it into a
7186	     comparison with a revised real constant as long as no overflow
7187	     occurs when unsafe_math_optimizations are enabled.  */
7188	  if (flag_unsafe_math_optimizations
7189	      && TREE_CODE (arg1) == REAL_CST
7190	      && (TREE_CODE (arg0) == PLUS_EXPR
7191		  || TREE_CODE (arg0) == MINUS_EXPR)
7192	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7193	      && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7194					  ? MINUS_EXPR : PLUS_EXPR,
7195					  arg1, TREE_OPERAND (arg0, 1), 0))
7196	      && ! TREE_CONSTANT_OVERFLOW (tem))
7197	    return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7198
7199	  /* Likewise, we can simplify a comparison of a real constant with
7200	     a MINUS_EXPR whose first operand is also a real constant, i.e.
7201	     (c1 - x) < c2 becomes x > c1-c2.  */
7202	  if (flag_unsafe_math_optimizations
7203	      && TREE_CODE (arg1) == REAL_CST
7204	      && TREE_CODE (arg0) == MINUS_EXPR
7205	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7206	      && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7207					  arg1, 0))
7208	      && ! TREE_CONSTANT_OVERFLOW (tem))
7209	    return fold (build (swap_tree_comparison (code), type,
7210				TREE_OPERAND (arg0, 1), tem));
7211
7212	  /* Fold comparisons against built-in math functions.  */
7213	  if (TREE_CODE (arg1) == REAL_CST
7214	      && flag_unsafe_math_optimizations
7215	      && ! flag_errno_math)
7216	    {
7217	      enum built_in_function fcode = builtin_mathfn_code (arg0);
7218
7219	      if (fcode != END_BUILTINS)
7220		{
7221		  tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7222		  if (tem != NULL_TREE)
7223		    return tem;
7224		}
7225	    }
7226	}
7227
7228      /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
7229      if (TREE_CONSTANT (arg1)
7230	  && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7231	      || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7232	  /* This optimization is invalid for ordered comparisons
7233	     if CONST+INCR overflows or if foo+incr might overflow.
7234	     This optimization is invalid for floating point due to rounding.
7235	     For pointer types we assume overflow doesn't happen.  */
7236	  && (POINTER_TYPE_P (TREE_TYPE (arg0))
7237	      || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7238		  && (code == EQ_EXPR || code == NE_EXPR))))
7239	{
7240	  tree varop, newconst;
7241
7242	  if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7243	    {
7244	      newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7245				      arg1, TREE_OPERAND (arg0, 1)));
7246	      varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7247			     TREE_OPERAND (arg0, 0),
7248			     TREE_OPERAND (arg0, 1));
7249	    }
7250	  else
7251	    {
7252	      newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7253				      arg1, TREE_OPERAND (arg0, 1)));
7254	      varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7255			     TREE_OPERAND (arg0, 0),
7256			     TREE_OPERAND (arg0, 1));
7257	    }
7258
7259
7260	  /* If VAROP is a reference to a bitfield, we must mask
7261	     the constant by the width of the field.  */
7262	  if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7263	      && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7264	    {
7265	      tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7266	      int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7267	      tree folded_compare, shift;
7268
7269	      /* First check whether the comparison would come out
7270		 always the same.  If we don't do that we would
7271		 change the meaning with the masking.  */
7272	      folded_compare = fold (build (code, type,
7273					    TREE_OPERAND (varop, 0),
7274					    arg1));
7275	      if (integer_zerop (folded_compare)
7276		  || integer_onep (folded_compare))
7277		return omit_one_operand (type, folded_compare, varop);
7278
7279	      shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7280				   0);
7281	      newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7282				      newconst, shift));
7283	      newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7284				      newconst, shift));
7285	    }
7286
7287	  return fold (build (code, type, varop, newconst));
7288	}
7289
7290      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7291	 This transformation affects the cases which are handled in later
7292	 optimizations involving comparisons with non-negative constants.  */
7293      if (TREE_CODE (arg1) == INTEGER_CST
7294	  && TREE_CODE (arg0) != INTEGER_CST
7295	  && tree_int_cst_sgn (arg1) > 0)
7296	{
7297	  switch (code)
7298	    {
7299	    case GE_EXPR:
7300	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7301	      return fold (build (GT_EXPR, type, arg0, arg1));
7302
7303	    case LT_EXPR:
7304	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7305	      return fold (build (LE_EXPR, type, arg0, arg1));
7306
7307	    default:
7308	      break;
7309	    }
7310	}
7311
7312      /* Comparisons with the highest or lowest possible integer of
7313	 the specified size will have known values.  */
7314      {
7315	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7316
7317	if (TREE_CODE (arg1) == INTEGER_CST
7318	    && ! TREE_CONSTANT_OVERFLOW (arg1)
7319	    && width <= HOST_BITS_PER_WIDE_INT
7320	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7321		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
7322	  {
7323	    unsigned HOST_WIDE_INT signed_max;
7324	    unsigned HOST_WIDE_INT max, min;
7325
7326	    signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7327
7328	    if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7329	      {
7330	        max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7331		min = 0;
7332	      }
7333	    else
7334	      {
7335	        max = signed_max;
7336		min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7337	      }
7338
7339	    if (TREE_INT_CST_HIGH (arg1) == 0
7340		&& TREE_INT_CST_LOW (arg1) == max)
7341	      switch (code)
7342		{
7343		case GT_EXPR:
7344		  return omit_one_operand (type,
7345					   fold_convert (type,
7346							 integer_zero_node),
7347					   arg0);
7348		case GE_EXPR:
7349		  return fold (build (EQ_EXPR, type, arg0, arg1));
7350
7351		case LE_EXPR:
7352		  return omit_one_operand (type,
7353					   fold_convert (type,
7354							 integer_one_node),
7355					   arg0);
7356		case LT_EXPR:
7357		  return fold (build (NE_EXPR, type, arg0, arg1));
7358
7359		/* The GE_EXPR and LT_EXPR cases above are not normally
7360		   reached because of previous transformations.  */
7361
7362		default:
7363		  break;
7364		}
7365	    else if (TREE_INT_CST_HIGH (arg1) == 0
7366		     && TREE_INT_CST_LOW (arg1) == max - 1)
7367	      switch (code)
7368		{
7369		case GT_EXPR:
7370		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7371		  return fold (build (EQ_EXPR, type, arg0, arg1));
7372		case LE_EXPR:
7373		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7374		  return fold (build (NE_EXPR, type, arg0, arg1));
7375		default:
7376		  break;
7377		}
7378	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7379		     && TREE_INT_CST_LOW (arg1) == min)
7380	      switch (code)
7381		{
7382		case LT_EXPR:
7383		  return omit_one_operand (type,
7384					   fold_convert (type,
7385							 integer_zero_node),
7386					   arg0);
7387		case LE_EXPR:
7388		  return fold (build (EQ_EXPR, type, arg0, arg1));
7389
7390		case GE_EXPR:
7391		  return omit_one_operand (type,
7392					   fold_convert (type,
7393							 integer_one_node),
7394					   arg0);
7395		case GT_EXPR:
7396		  return fold (build (NE_EXPR, type, arg0, arg1));
7397
7398		default:
7399		  break;
7400		}
7401	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7402		     && TREE_INT_CST_LOW (arg1) == min + 1)
7403	      switch (code)
7404		{
7405		case GE_EXPR:
7406		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7407		  return fold (build (NE_EXPR, type, arg0, arg1));
7408		case LT_EXPR:
7409		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7410		  return fold (build (EQ_EXPR, type, arg0, arg1));
7411		default:
7412		  break;
7413		}
7414
7415	    else if (TREE_INT_CST_HIGH (arg1) == 0
7416		     && TREE_INT_CST_LOW (arg1) == signed_max
7417		     && TREE_UNSIGNED (TREE_TYPE (arg1))
7418		     /* signed_type does not work on pointer types.  */
7419		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7420	      {
7421		/* The following case also applies to X < signed_max+1
7422		   and X >= signed_max+1 because previous transformations.  */
7423		if (code == LE_EXPR || code == GT_EXPR)
7424		  {
7425		    tree st0, st1;
7426		    st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7427		    st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7428		    return fold
7429		      (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7430			      type, fold_convert (st0, arg0),
7431			      fold_convert (st1, integer_zero_node)));
7432		  }
7433	      }
7434	  }
7435      }
7436
7437      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7438	 a MINUS_EXPR of a constant, we can convert it into a comparison with
7439	 a revised constant as long as no overflow occurs.  */
7440      if ((code == EQ_EXPR || code == NE_EXPR)
7441	  && TREE_CODE (arg1) == INTEGER_CST
7442	  && (TREE_CODE (arg0) == PLUS_EXPR
7443	      || TREE_CODE (arg0) == MINUS_EXPR)
7444	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7445	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7446				      ? MINUS_EXPR : PLUS_EXPR,
7447				      arg1, TREE_OPERAND (arg0, 1), 0))
7448	  && ! TREE_CONSTANT_OVERFLOW (tem))
7449	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7450
7451      /* Similarly for a NEGATE_EXPR.  */
7452      else if ((code == EQ_EXPR || code == NE_EXPR)
7453	       && TREE_CODE (arg0) == NEGATE_EXPR
7454	       && TREE_CODE (arg1) == INTEGER_CST
7455	       && 0 != (tem = negate_expr (arg1))
7456	       && TREE_CODE (tem) == INTEGER_CST
7457	       && ! TREE_CONSTANT_OVERFLOW (tem))
7458	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7459
7460      /* If we have X - Y == 0, we can convert that to X == Y and similarly
7461	 for !=.  Don't do this for ordered comparisons due to overflow.  */
7462      else if ((code == NE_EXPR || code == EQ_EXPR)
7463	       && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7464	return fold (build (code, type,
7465			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7466
7467      /* If we are widening one operand of an integer comparison,
7468	 see if the other operand is similarly being widened.  Perhaps we
7469	 can do the comparison in the narrower type.  */
7470      else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7471	       && TREE_CODE (arg0) == NOP_EXPR
7472	       && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7473	       && (code == EQ_EXPR || code == NE_EXPR
7474		   || TREE_UNSIGNED (TREE_TYPE (arg0))
7475		      == TREE_UNSIGNED (TREE_TYPE (tem)))
7476	       && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7477	       && (TREE_TYPE (t1) == TREE_TYPE (tem)
7478		   || (TREE_CODE (t1) == INTEGER_CST
7479		       && int_fits_type_p (t1, TREE_TYPE (tem)))))
7480	return fold (build (code, type, tem,
7481			    fold_convert (TREE_TYPE (tem), t1)));
7482
7483      /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7484	 constant, we can simplify it.  */
7485      else if (TREE_CODE (arg1) == INTEGER_CST
7486	       && (TREE_CODE (arg0) == MIN_EXPR
7487		   || TREE_CODE (arg0) == MAX_EXPR)
7488	       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7489	return optimize_minmax_comparison (t);
7490
7491      /* If we are comparing an ABS_EXPR with a constant, we can
7492	 convert all the cases into explicit comparisons, but they may
7493	 well not be faster than doing the ABS and one comparison.
7494	 But ABS (X) <= C is a range comparison, which becomes a subtraction
7495	 and a comparison, and is probably faster.  */
7496      else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7497	       && TREE_CODE (arg0) == ABS_EXPR
7498	       && ! TREE_SIDE_EFFECTS (arg0)
7499	       && (0 != (tem = negate_expr (arg1)))
7500	       && TREE_CODE (tem) == INTEGER_CST
7501	       && ! TREE_CONSTANT_OVERFLOW (tem))
7502	return fold (build (TRUTH_ANDIF_EXPR, type,
7503			    build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7504			    build (LE_EXPR, type,
7505				   TREE_OPERAND (arg0, 0), arg1)));
7506
7507      /* If this is an EQ or NE comparison with zero and ARG0 is
7508	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
7509	 two operations, but the latter can be done in one less insn
7510	 on machines that have only two-operand insns or on which a
7511	 constant cannot be the first operand.  */
7512      if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7513	  && TREE_CODE (arg0) == BIT_AND_EXPR)
7514	{
7515	  if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7516	      && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7517	    return
7518	      fold (build (code, type,
7519			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
7520				  build (RSHIFT_EXPR,
7521					 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7522					 TREE_OPERAND (arg0, 1),
7523					 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7524				  fold_convert (TREE_TYPE (arg0),
7525						integer_one_node)),
7526			   arg1));
7527	  else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7528		   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7529	    return
7530	      fold (build (code, type,
7531			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
7532				  build (RSHIFT_EXPR,
7533					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7534					 TREE_OPERAND (arg0, 0),
7535					 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7536				  fold_convert (TREE_TYPE (arg0),
7537						integer_one_node)),
7538			   arg1));
7539	}
7540
7541      /* If this is an NE or EQ comparison of zero against the result of a
7542	 signed MOD operation whose second operand is a power of 2, make
7543	 the MOD operation unsigned since it is simpler and equivalent.  */
7544      if ((code == NE_EXPR || code == EQ_EXPR)
7545	  && integer_zerop (arg1)
7546	  && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7547	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7548	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
7549	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7550	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7551	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
7552	{
7553	  tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7554	  tree newmod = build (TREE_CODE (arg0), newtype,
7555			       fold_convert (newtype,
7556					     TREE_OPERAND (arg0, 0)),
7557			       fold_convert (newtype,
7558					     TREE_OPERAND (arg0, 1)));
7559
7560	  return build (code, type, newmod, fold_convert (newtype, arg1));
7561	}
7562
7563      /* If this is an NE comparison of zero with an AND of one, remove the
7564	 comparison since the AND will give the correct value.  */
7565      if (code == NE_EXPR && integer_zerop (arg1)
7566	  && TREE_CODE (arg0) == BIT_AND_EXPR
7567	  && integer_onep (TREE_OPERAND (arg0, 1)))
7568	return fold_convert (type, arg0);
7569
7570      /* If we have (A & C) == C where C is a power of 2, convert this into
7571	 (A & C) != 0.  Similarly for NE_EXPR.  */
7572      if ((code == EQ_EXPR || code == NE_EXPR)
7573	  && TREE_CODE (arg0) == BIT_AND_EXPR
7574	  && integer_pow2p (TREE_OPERAND (arg0, 1))
7575	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7576	return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7577			    arg0, integer_zero_node));
7578
7579      /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7580	 2, then fold the expression into shifts and logical operations.  */
7581      tem = fold_single_bit_test (code, arg0, arg1, type);
7582      if (tem)
7583	return tem;
7584
7585      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7586	 Similarly for NE_EXPR.  */
7587      if ((code == EQ_EXPR || code == NE_EXPR)
7588	  && TREE_CODE (arg0) == BIT_AND_EXPR
7589	  && TREE_CODE (arg1) == INTEGER_CST
7590	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7591	{
7592	  tree dandnotc
7593	    = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7594			   arg1, build1 (BIT_NOT_EXPR,
7595					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7596					 TREE_OPERAND (arg0, 1))));
7597	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7598	  if (integer_nonzerop (dandnotc))
7599	    return omit_one_operand (type, rslt, arg0);
7600	}
7601
7602      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7603	 Similarly for NE_EXPR.  */
7604      if ((code == EQ_EXPR || code == NE_EXPR)
7605	  && TREE_CODE (arg0) == BIT_IOR_EXPR
7606	  && TREE_CODE (arg1) == INTEGER_CST
7607	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7608	{
7609	  tree candnotd
7610	    = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7611			   TREE_OPERAND (arg0, 1),
7612			   build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7613	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7614	  if (integer_nonzerop (candnotd))
7615	    return omit_one_operand (type, rslt, arg0);
7616	}
7617
7618      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7619	 and similarly for >= into !=.  */
7620      if ((code == LT_EXPR || code == GE_EXPR)
7621	  && TREE_UNSIGNED (TREE_TYPE (arg0))
7622	  && TREE_CODE (arg1) == LSHIFT_EXPR
7623	  && integer_onep (TREE_OPERAND (arg1, 0)))
7624	return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7625		      build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7626			     TREE_OPERAND (arg1, 1)),
7627		      fold_convert (TREE_TYPE (arg0), integer_zero_node));
7628
7629      else if ((code == LT_EXPR || code == GE_EXPR)
7630	       && TREE_UNSIGNED (TREE_TYPE (arg0))
7631	       && (TREE_CODE (arg1) == NOP_EXPR
7632		   || TREE_CODE (arg1) == CONVERT_EXPR)
7633	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7634	       && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7635	return
7636	  build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7637		 fold_convert (TREE_TYPE (arg0),
7638			       build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7639				      TREE_OPERAND (TREE_OPERAND (arg1, 0),
7640						    1))),
7641		 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7642
7643      /* Simplify comparison of something with itself.  (For IEEE
7644	 floating-point, we can only do some of these simplifications.)  */
7645      if (operand_equal_p (arg0, arg1, 0))
7646	{
7647	  switch (code)
7648	    {
7649	    case EQ_EXPR:
7650	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7651		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7652		return constant_boolean_node (1, type);
7653	      break;
7654
7655	    case GE_EXPR:
7656	    case LE_EXPR:
7657	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7658		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7659		return constant_boolean_node (1, type);
7660	      return fold (build (EQ_EXPR, type, arg0, arg1));
7661
7662	    case NE_EXPR:
7663	      /* For NE, we can only do this simplification if integer
7664		 or we don't honor IEEE floating point NaNs.  */
7665	      if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7666		  && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7667		break;
7668	      /* ... fall through ...  */
7669	    case GT_EXPR:
7670	    case LT_EXPR:
7671	      return constant_boolean_node (0, type);
7672	    default:
7673	      abort ();
7674	    }
7675	}
7676
7677      /* If we are comparing an expression that just has comparisons
7678	 of two integer values, arithmetic expressions of those comparisons,
7679	 and constants, we can simplify it.  There are only three cases
7680	 to check: the two values can either be equal, the first can be
7681	 greater, or the second can be greater.  Fold the expression for
7682	 those three values.  Since each value must be 0 or 1, we have
7683	 eight possibilities, each of which corresponds to the constant 0
7684	 or 1 or one of the six possible comparisons.
7685
7686	 This handles common cases like (a > b) == 0 but also handles
7687	 expressions like  ((x > y) - (y > x)) > 0, which supposedly
7688	 occur in macroized code.  */
7689
7690      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7691	{
7692	  tree cval1 = 0, cval2 = 0;
7693	  int save_p = 0;
7694
7695	  if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7696	      /* Don't handle degenerate cases here; they should already
7697		 have been handled anyway.  */
7698	      && cval1 != 0 && cval2 != 0
7699	      && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7700	      && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7701	      && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7702	      && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7703	      && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7704	      && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7705				    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7706	    {
7707	      tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7708	      tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7709
7710	      /* We can't just pass T to eval_subst in case cval1 or cval2
7711		 was the same as ARG1.  */
7712
7713	      tree high_result
7714		= fold (build (code, type,
7715			       eval_subst (arg0, cval1, maxval, cval2, minval),
7716			       arg1));
7717	      tree equal_result
7718		= fold (build (code, type,
7719			       eval_subst (arg0, cval1, maxval, cval2, maxval),
7720			       arg1));
7721	      tree low_result
7722		= fold (build (code, type,
7723			       eval_subst (arg0, cval1, minval, cval2, maxval),
7724			       arg1));
7725
7726	      /* All three of these results should be 0 or 1.  Confirm they
7727		 are.  Then use those values to select the proper code
7728		 to use.  */
7729
7730	      if ((integer_zerop (high_result)
7731		   || integer_onep (high_result))
7732		  && (integer_zerop (equal_result)
7733		      || integer_onep (equal_result))
7734		  && (integer_zerop (low_result)
7735		      || integer_onep (low_result)))
7736		{
7737		  /* Make a 3-bit mask with the high-order bit being the
7738		     value for `>', the next for '=', and the low for '<'.  */
7739		  switch ((integer_onep (high_result) * 4)
7740			  + (integer_onep (equal_result) * 2)
7741			  + integer_onep (low_result))
7742		    {
7743		    case 0:
7744		      /* Always false.  */
7745		      return omit_one_operand (type, integer_zero_node, arg0);
7746		    case 1:
7747		      code = LT_EXPR;
7748		      break;
7749		    case 2:
7750		      code = EQ_EXPR;
7751		      break;
7752		    case 3:
7753		      code = LE_EXPR;
7754		      break;
7755		    case 4:
7756		      code = GT_EXPR;
7757		      break;
7758		    case 5:
7759		      code = NE_EXPR;
7760		      break;
7761		    case 6:
7762		      code = GE_EXPR;
7763		      break;
7764		    case 7:
7765		      /* Always true.  */
7766		      return omit_one_operand (type, integer_one_node, arg0);
7767		    }
7768
7769		  t = build (code, type, cval1, cval2);
7770		  if (save_p)
7771		    return save_expr (t);
7772		  else
7773		    return fold (t);
7774		}
7775	    }
7776	}
7777
7778      /* If this is a comparison of a field, we may be able to simplify it.  */
7779      if (((TREE_CODE (arg0) == COMPONENT_REF
7780	    && (*lang_hooks.can_use_bit_fields_p) ())
7781	   || TREE_CODE (arg0) == BIT_FIELD_REF)
7782	  && (code == EQ_EXPR || code == NE_EXPR)
7783	  /* Handle the constant case even without -O
7784	     to make sure the warnings are given.  */
7785	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7786	{
7787	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7788	  if (t1)
7789	    return t1;
7790	}
7791
7792      /* If this is a comparison of complex values and either or both sides
7793	 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7794	 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7795	 This may prevent needless evaluations.  */
7796      if ((code == EQ_EXPR || code == NE_EXPR)
7797	  && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7798	  && (TREE_CODE (arg0) == COMPLEX_EXPR
7799	      || TREE_CODE (arg1) == COMPLEX_EXPR
7800	      || TREE_CODE (arg0) == COMPLEX_CST
7801	      || TREE_CODE (arg1) == COMPLEX_CST))
7802	{
7803	  tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7804	  tree real0, imag0, real1, imag1;
7805
7806	  arg0 = save_expr (arg0);
7807	  arg1 = save_expr (arg1);
7808	  real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7809	  imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7810	  real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7811	  imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7812
7813	  return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7814			       : TRUTH_ORIF_EXPR),
7815			      type,
7816			      fold (build (code, type, real0, real1)),
7817			      fold (build (code, type, imag0, imag1))));
7818	}
7819
7820      /* Optimize comparisons of strlen vs zero to a compare of the
7821	 first character of the string vs zero.  To wit,
7822		strlen(ptr) == 0   =>  *ptr == 0
7823		strlen(ptr) != 0   =>  *ptr != 0
7824	 Other cases should reduce to one of these two (or a constant)
7825	 due to the return value of strlen being unsigned.  */
7826      if ((code == EQ_EXPR || code == NE_EXPR)
7827	  && integer_zerop (arg1)
7828	  && TREE_CODE (arg0) == CALL_EXPR)
7829	{
7830	  tree fndecl = get_callee_fndecl (arg0);
7831	  tree arglist;
7832
7833	  if (fndecl
7834	      && DECL_BUILT_IN (fndecl)
7835	      && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7836	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7837	      && (arglist = TREE_OPERAND (arg0, 1))
7838	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7839	      && ! TREE_CHAIN (arglist))
7840	    return fold (build (code, type,
7841				build1 (INDIRECT_REF, char_type_node,
7842					TREE_VALUE(arglist)),
7843				integer_zero_node));
7844	}
7845
7846      /* From here on, the only cases we handle are when the result is
7847	 known to be a constant.
7848
7849	 To compute GT, swap the arguments and do LT.
7850	 To compute GE, do LT and invert the result.
7851	 To compute LE, swap the arguments, do LT and invert the result.
7852	 To compute NE, do EQ and invert the result.
7853
7854	 Therefore, the code below must handle only EQ and LT.  */
7855
7856      if (code == LE_EXPR || code == GT_EXPR)
7857	{
7858	  tem = arg0, arg0 = arg1, arg1 = tem;
7859	  code = swap_tree_comparison (code);
7860	}
7861
7862      /* Note that it is safe to invert for real values here because we
7863	 will check below in the one case that it matters.  */
7864
7865      t1 = NULL_TREE;
7866      invert = 0;
7867      if (code == NE_EXPR || code == GE_EXPR)
7868	{
7869	  invert = 1;
7870	  code = invert_tree_comparison (code);
7871	}
7872
7873      /* Compute a result for LT or EQ if args permit;
7874	 otherwise return T.  */
7875      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7876	{
7877	  if (code == EQ_EXPR)
7878	    t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7879	  else
7880	    t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7881			       ? INT_CST_LT_UNSIGNED (arg0, arg1)
7882			       : INT_CST_LT (arg0, arg1)),
7883			      0);
7884	}
7885
7886#if 0 /* This is no longer useful, but breaks some real code.  */
7887      /* Assume a nonexplicit constant cannot equal an explicit one,
7888	 since such code would be undefined anyway.
7889	 Exception: on sysvr4, using #pragma weak,
7890	 a label can come out as 0.  */
7891      else if (TREE_CODE (arg1) == INTEGER_CST
7892	       && !integer_zerop (arg1)
7893	       && TREE_CONSTANT (arg0)
7894	       && TREE_CODE (arg0) == ADDR_EXPR
7895	       && code == EQ_EXPR)
7896	t1 = build_int_2 (0, 0);
7897#endif
7898      /* Two real constants can be compared explicitly.  */
7899      else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7900	{
7901	  /* If either operand is a NaN, the result is false with two
7902	     exceptions: First, an NE_EXPR is true on NaNs, but that case
7903	     is already handled correctly since we will be inverting the
7904	     result for NE_EXPR.  Second, if we had inverted a LE_EXPR
7905	     or a GE_EXPR into a LT_EXPR, we must return true so that it
7906	     will be inverted into false.  */
7907
7908	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7909	      || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7910	    t1 = build_int_2 (invert && code == LT_EXPR, 0);
7911
7912	  else if (code == EQ_EXPR)
7913	    t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7914						 TREE_REAL_CST (arg1)),
7915			      0);
7916	  else
7917	    t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7918						TREE_REAL_CST (arg1)),
7919			      0);
7920	}
7921
7922      if (t1 == NULL_TREE)
7923	return t;
7924
7925      if (invert)
7926	TREE_INT_CST_LOW (t1) ^= 1;
7927
7928      TREE_TYPE (t1) = type;
7929      if (TREE_CODE (type) == BOOLEAN_TYPE)
7930	return (*lang_hooks.truthvalue_conversion) (t1);
7931      return t1;
7932
7933    case COND_EXPR:
7934      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7935	 so all simple results must be passed through pedantic_non_lvalue.  */
7936      if (TREE_CODE (arg0) == INTEGER_CST)
7937	{
7938	  tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7939	  /* Only optimize constant conditions when the selected branch
7940	     has the same type as the COND_EXPR.  This avoids optimizing
7941	     away "c ? x : throw", where the throw has a void type.  */
7942	  if (! VOID_TYPE_P (TREE_TYPE (tem))
7943	      || VOID_TYPE_P (TREE_TYPE (t)))
7944	    return pedantic_non_lvalue (tem);
7945	  return t;
7946	}
7947      if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7948	return pedantic_omit_one_operand (type, arg1, arg0);
7949
7950      /* If we have A op B ? A : C, we may be able to convert this to a
7951	 simpler expression, depending on the operation and the values
7952	 of B and C.  Signed zeros prevent all of these transformations,
7953	 for reasons given above each one.  */
7954
7955      if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7956	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7957					     arg1, TREE_OPERAND (arg0, 1))
7958	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7959	{
7960	  tree arg2 = TREE_OPERAND (t, 2);
7961	  enum tree_code comp_code = TREE_CODE (arg0);
7962
7963	  STRIP_NOPS (arg2);
7964
7965	  /* If we have A op 0 ? A : -A, consider applying the following
7966	     transformations:
7967
7968	     A == 0? A : -A    same as -A
7969	     A != 0? A : -A    same as A
7970	     A >= 0? A : -A    same as abs (A)
7971	     A > 0?  A : -A    same as abs (A)
7972	     A <= 0? A : -A    same as -abs (A)
7973	     A < 0?  A : -A    same as -abs (A)
7974
7975	     None of these transformations work for modes with signed
7976	     zeros.  If A is +/-0, the first two transformations will
7977	     change the sign of the result (from +0 to -0, or vice
7978	     versa).  The last four will fix the sign of the result,
7979	     even though the original expressions could be positive or
7980	     negative, depending on the sign of A.
7981
7982	     Note that all these transformations are correct if A is
7983	     NaN, since the two alternatives (A and -A) are also NaNs.  */
7984	  if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7985	       ? real_zerop (TREE_OPERAND (arg0, 1))
7986	       : integer_zerop (TREE_OPERAND (arg0, 1)))
7987	      && TREE_CODE (arg2) == NEGATE_EXPR
7988	      && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7989	    switch (comp_code)
7990	      {
7991	      case EQ_EXPR:
7992		tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
7993		tem = fold_convert (type, negate_expr (tem));
7994		return pedantic_non_lvalue (tem);
7995	      case NE_EXPR:
7996		return pedantic_non_lvalue (fold_convert (type, arg1));
7997	      case GE_EXPR:
7998	      case GT_EXPR:
7999		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8000		  arg1 = fold_convert ((*lang_hooks.types.signed_type)
8001				       (TREE_TYPE (arg1)), arg1);
8002		arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8003		return pedantic_non_lvalue (fold_convert (type, arg1));
8004	      case LE_EXPR:
8005	      case LT_EXPR:
8006		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8007		  arg1 = fold_convert ((lang_hooks.types.signed_type)
8008				       (TREE_TYPE (arg1)), arg1);
8009		arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8010		arg1 = negate_expr (fold_convert (type, arg1));
8011		return pedantic_non_lvalue (arg1);
8012	      default:
8013		abort ();
8014	      }
8015
8016	  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
8017	     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
8018	     both transformations are correct when A is NaN: A != 0
8019	     is then true, and A == 0 is false.  */
8020
8021	  if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8022	    {
8023	      if (comp_code == NE_EXPR)
8024		return pedantic_non_lvalue (fold_convert (type, arg1));
8025	      else if (comp_code == EQ_EXPR)
8026		return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8027	    }
8028
8029	  /* Try some transformations of A op B ? A : B.
8030
8031	     A == B? A : B    same as B
8032	     A != B? A : B    same as A
8033	     A >= B? A : B    same as max (A, B)
8034	     A > B?  A : B    same as max (B, A)
8035	     A <= B? A : B    same as min (A, B)
8036	     A < B?  A : B    same as min (B, A)
8037
8038	     As above, these transformations don't work in the presence
8039	     of signed zeros.  For example, if A and B are zeros of
8040	     opposite sign, the first two transformations will change
8041	     the sign of the result.  In the last four, the original
8042	     expressions give different results for (A=+0, B=-0) and
8043	     (A=-0, B=+0), but the transformed expressions do not.
8044
8045	     The first two transformations are correct if either A or B
8046	     is a NaN.  In the first transformation, the condition will
8047	     be false, and B will indeed be chosen.  In the case of the
8048	     second transformation, the condition A != B will be true,
8049	     and A will be chosen.
8050
8051	     The conversions to max() and min() are not correct if B is
8052	     a number and A is not.  The conditions in the original
8053	     expressions will be false, so all four give B.  The min()
8054	     and max() versions would give a NaN instead.  */
8055	  if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8056					      arg2, TREE_OPERAND (arg0, 0)))
8057	    {
8058	      tree comp_op0 = TREE_OPERAND (arg0, 0);
8059	      tree comp_op1 = TREE_OPERAND (arg0, 1);
8060	      tree comp_type = TREE_TYPE (comp_op0);
8061
8062	      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
8063	      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8064		{
8065		  comp_type = type;
8066		  comp_op0 = arg1;
8067		  comp_op1 = arg2;
8068		}
8069
8070	      switch (comp_code)
8071		{
8072		case EQ_EXPR:
8073		  return pedantic_non_lvalue (fold_convert (type, arg2));
8074		case NE_EXPR:
8075		  return pedantic_non_lvalue (fold_convert (type, arg1));
8076		case LE_EXPR:
8077		case LT_EXPR:
8078		  /* In C++ a ?: expression can be an lvalue, so put the
8079		     operand which will be used if they are equal first
8080		     so that we can convert this back to the
8081		     corresponding COND_EXPR.  */
8082		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8083		    return pedantic_non_lvalue (fold_convert
8084		      (type, fold (build (MIN_EXPR, comp_type,
8085					  (comp_code == LE_EXPR
8086					   ? comp_op0 : comp_op1),
8087					  (comp_code == LE_EXPR
8088					   ? comp_op1 : comp_op0)))));
8089		  break;
8090		case GE_EXPR:
8091		case GT_EXPR:
8092		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8093		    return pedantic_non_lvalue (fold_convert
8094		      (type, fold (build (MAX_EXPR, comp_type,
8095					  (comp_code == GE_EXPR
8096					   ? comp_op0 : comp_op1),
8097					  (comp_code == GE_EXPR
8098					   ? comp_op1 : comp_op0)))));
8099		  break;
8100		default:
8101		  abort ();
8102		}
8103	    }
8104
8105	  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8106	     we might still be able to simplify this.  For example,
8107	     if C1 is one less or one more than C2, this might have started
8108	     out as a MIN or MAX and been transformed by this function.
8109	     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
8110
8111	  if (INTEGRAL_TYPE_P (type)
8112	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8113	      && TREE_CODE (arg2) == INTEGER_CST)
8114	    switch (comp_code)
8115	      {
8116	      case EQ_EXPR:
8117		/* We can replace A with C1 in this case.  */
8118		arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8119		return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8120				    TREE_OPERAND (t, 2)));
8121
8122	      case LT_EXPR:
8123		/* If C1 is C2 + 1, this is min(A, C2).  */
8124		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8125		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8126					const_binop (PLUS_EXPR, arg2,
8127						     integer_one_node, 0), 1))
8128		  return pedantic_non_lvalue
8129		    (fold (build (MIN_EXPR, type, arg1, arg2)));
8130		break;
8131
8132	      case LE_EXPR:
8133		/* If C1 is C2 - 1, this is min(A, C2).  */
8134		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8135		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8136					const_binop (MINUS_EXPR, arg2,
8137						     integer_one_node, 0), 1))
8138		  return pedantic_non_lvalue
8139		    (fold (build (MIN_EXPR, type, arg1, arg2)));
8140		break;
8141
8142	      case GT_EXPR:
8143		/* If C1 is C2 - 1, this is max(A, C2).  */
8144		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8145		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8146					const_binop (MINUS_EXPR, arg2,
8147						     integer_one_node, 0), 1))
8148		  return pedantic_non_lvalue
8149		    (fold (build (MAX_EXPR, type, arg1, arg2)));
8150		break;
8151
8152	      case GE_EXPR:
8153		/* If C1 is C2 + 1, this is max(A, C2).  */
8154		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8155		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8156					const_binop (PLUS_EXPR, arg2,
8157						     integer_one_node, 0), 1))
8158		  return pedantic_non_lvalue
8159		    (fold (build (MAX_EXPR, type, arg1, arg2)));
8160		break;
8161	      case NE_EXPR:
8162		break;
8163	      default:
8164		abort ();
8165	      }
8166	}
8167
8168      /* If the second operand is simpler than the third, swap them
8169	 since that produces better jump optimization results.  */
8170      if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8171				TREE_OPERAND (t, 2), false))
8172	{
8173	  /* See if this can be inverted.  If it can't, possibly because
8174	     it was a floating-point inequality comparison, don't do
8175	     anything.  */
8176	  tem = invert_truthvalue (arg0);
8177
8178	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8179	    return fold (build (code, type, tem,
8180			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8181	}
8182
8183      /* Convert A ? 1 : 0 to simply A.  */
8184      if (integer_onep (TREE_OPERAND (t, 1))
8185	  && integer_zerop (TREE_OPERAND (t, 2))
8186	  /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8187	     call to fold will try to move the conversion inside
8188	     a COND, which will recurse.  In that case, the COND_EXPR
8189	     is probably the best choice, so leave it alone.  */
8190	  && type == TREE_TYPE (arg0))
8191	return pedantic_non_lvalue (arg0);
8192
8193      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
8194	 over COND_EXPR in cases such as floating point comparisons.  */
8195      if (integer_zerop (TREE_OPERAND (t, 1))
8196	  && integer_onep (TREE_OPERAND (t, 2))
8197	  && truth_value_p (TREE_CODE (arg0)))
8198	return pedantic_non_lvalue (fold_convert (type,
8199						  invert_truthvalue (arg0)));
8200
8201      /* Look for expressions of the form A & 2 ? 2 : 0.  The result of this
8202	 operation is simply A & 2.  */
8203
8204      if (integer_zerop (TREE_OPERAND (t, 2))
8205	  && TREE_CODE (arg0) == NE_EXPR
8206	  && integer_zerop (TREE_OPERAND (arg0, 1))
8207	  && integer_pow2p (arg1)
8208	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8209	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8210			      arg1, 1))
8211	return pedantic_non_lvalue (fold_convert (type,
8212						  TREE_OPERAND (arg0, 0)));
8213
8214      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
8215      if (integer_zerop (TREE_OPERAND (t, 2))
8216	  && truth_value_p (TREE_CODE (arg0))
8217	  && truth_value_p (TREE_CODE (arg1)))
8218	return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8219						 arg0, arg1)));
8220
8221      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
8222      if (integer_onep (TREE_OPERAND (t, 2))
8223	  && truth_value_p (TREE_CODE (arg0))
8224	  && truth_value_p (TREE_CODE (arg1)))
8225	{
8226	  /* Only perform transformation if ARG0 is easily inverted.  */
8227	  tem = invert_truthvalue (arg0);
8228	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8229	    return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8230						     tem, arg1)));
8231	}
8232
8233      return t;
8234
8235    case COMPOUND_EXPR:
8236      /* When pedantic, a compound expression can be neither an lvalue
8237	 nor an integer constant expression.  */
8238      if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8239	return t;
8240      /* Don't let (0, 0) be null pointer constant.  */
8241      if (integer_zerop (arg1))
8242	return build1 (NOP_EXPR, type, arg1);
8243      return fold_convert (type, arg1);
8244
8245    case COMPLEX_EXPR:
8246      if (wins)
8247	return build_complex (type, arg0, arg1);
8248      return t;
8249
8250    case REALPART_EXPR:
8251      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8252	return t;
8253      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8254	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8255				 TREE_OPERAND (arg0, 1));
8256      else if (TREE_CODE (arg0) == COMPLEX_CST)
8257	return TREE_REALPART (arg0);
8258      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8259	return fold (build (TREE_CODE (arg0), type,
8260			    fold (build1 (REALPART_EXPR, type,
8261					  TREE_OPERAND (arg0, 0))),
8262			    fold (build1 (REALPART_EXPR,
8263					  type, TREE_OPERAND (arg0, 1)))));
8264      return t;
8265
8266    case IMAGPART_EXPR:
8267      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8268	return fold_convert (type, integer_zero_node);
8269      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8270	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8271				 TREE_OPERAND (arg0, 0));
8272      else if (TREE_CODE (arg0) == COMPLEX_CST)
8273	return TREE_IMAGPART (arg0);
8274      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8275	return fold (build (TREE_CODE (arg0), type,
8276			    fold (build1 (IMAGPART_EXPR, type,
8277					  TREE_OPERAND (arg0, 0))),
8278			    fold (build1 (IMAGPART_EXPR, type,
8279					  TREE_OPERAND (arg0, 1)))));
8280      return t;
8281
8282      /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8283         appropriate.  */
8284    case CLEANUP_POINT_EXPR:
8285      if (! has_cleanups (arg0))
8286	return TREE_OPERAND (t, 0);
8287
8288      {
8289	enum tree_code code0 = TREE_CODE (arg0);
8290	int kind0 = TREE_CODE_CLASS (code0);
8291	tree arg00 = TREE_OPERAND (arg0, 0);
8292	tree arg01;
8293
8294	if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8295	  return fold (build1 (code0, type,
8296			       fold (build1 (CLEANUP_POINT_EXPR,
8297					     TREE_TYPE (arg00), arg00))));
8298
8299	if (kind0 == '<' || kind0 == '2'
8300	    || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8301	    || code0 == TRUTH_AND_EXPR   || code0 == TRUTH_OR_EXPR
8302	    || code0 == TRUTH_XOR_EXPR)
8303	  {
8304	    arg01 = TREE_OPERAND (arg0, 1);
8305
8306	    if (TREE_CONSTANT (arg00)
8307		|| ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8308		    && ! has_cleanups (arg00)))
8309	      return fold (build (code0, type, arg00,
8310				  fold (build1 (CLEANUP_POINT_EXPR,
8311						TREE_TYPE (arg01), arg01))));
8312
8313	    if (TREE_CONSTANT (arg01))
8314	      return fold (build (code0, type,
8315				  fold (build1 (CLEANUP_POINT_EXPR,
8316						TREE_TYPE (arg00), arg00)),
8317				  arg01));
8318	  }
8319
8320	return t;
8321      }
8322
8323    case CALL_EXPR:
8324      /* Check for a built-in function.  */
8325      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8326	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8327	      == FUNCTION_DECL)
8328	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8329	{
8330	  tree tmp = fold_builtin (expr);
8331	  if (tmp)
8332	    return tmp;
8333	}
8334      return t;
8335
8336    default:
8337      return t;
8338    } /* switch (code) */
8339}
8340
8341#ifdef ENABLE_FOLD_CHECKING
8342#undef fold
8343
8344static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8345static void fold_check_failed (tree, tree);
8346void print_fold_checksum (tree);
8347
8348/* When --enable-checking=fold, compute a digest of expr before
8349   and after actual fold call to see if fold did not accidentally
8350   change original expr.  */
8351
8352tree
8353fold (tree expr)
8354{
8355  tree ret;
8356  struct md5_ctx ctx;
8357  unsigned char checksum_before[16], checksum_after[16];
8358  htab_t ht;
8359
8360  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8361  md5_init_ctx (&ctx);
8362  fold_checksum_tree (expr, &ctx, ht);
8363  md5_finish_ctx (&ctx, checksum_before);
8364  htab_empty (ht);
8365
8366  ret = fold_1 (expr);
8367
8368  md5_init_ctx (&ctx);
8369  fold_checksum_tree (expr, &ctx, ht);
8370  md5_finish_ctx (&ctx, checksum_after);
8371  htab_delete (ht);
8372
8373  if (memcmp (checksum_before, checksum_after, 16))
8374    fold_check_failed (expr, ret);
8375
8376  return ret;
8377}
8378
8379void
8380print_fold_checksum (tree expr)
8381{
8382  struct md5_ctx ctx;
8383  unsigned char checksum[16], cnt;
8384  htab_t ht;
8385
8386  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8387  md5_init_ctx (&ctx);
8388  fold_checksum_tree (expr, &ctx, ht);
8389  md5_finish_ctx (&ctx, checksum);
8390  htab_delete (ht);
8391  for (cnt = 0; cnt < 16; ++cnt)
8392    fprintf (stderr, "%02x", checksum[cnt]);
8393  putc ('\n', stderr);
8394}
8395
8396static void
8397fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8398{
8399  internal_error ("fold check: original tree changed by fold");
8400}
8401
8402static void
8403fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8404{
8405  void **slot;
8406  enum tree_code code;
8407  char buf[sizeof (struct tree_decl)];
8408  int i, len;
8409
8410  if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8411      > sizeof (struct tree_decl)
8412      || sizeof (struct tree_type) > sizeof (struct tree_decl))
8413    abort ();
8414  if (expr == NULL)
8415    return;
8416  slot = htab_find_slot (ht, expr, INSERT);
8417  if (*slot != NULL)
8418    return;
8419  *slot = expr;
8420  code = TREE_CODE (expr);
8421  if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8422    {
8423      /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified.  */
8424      memcpy (buf, expr, tree_size (expr));
8425      expr = (tree) buf;
8426      SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8427    }
8428  else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8429    {
8430      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
8431      memcpy (buf, expr, tree_size (expr));
8432      expr = (tree) buf;
8433      SET_DECL_ASSEMBLER_NAME (expr, NULL);
8434    }
8435  else if (TREE_CODE_CLASS (code) == 't'
8436	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8437    {
8438      /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified.  */
8439      memcpy (buf, expr, tree_size (expr));
8440      expr = (tree) buf;
8441      TYPE_POINTER_TO (expr) = NULL;
8442      TYPE_REFERENCE_TO (expr) = NULL;
8443    }
8444  md5_process_bytes (expr, tree_size (expr), ctx);
8445  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8446  if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8447    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8448  len = TREE_CODE_LENGTH (code);
8449  switch (TREE_CODE_CLASS (code))
8450    {
8451    case 'c':
8452      switch (code)
8453	{
8454	case STRING_CST:
8455	  md5_process_bytes (TREE_STRING_POINTER (expr),
8456			     TREE_STRING_LENGTH (expr), ctx);
8457	  break;
8458	case COMPLEX_CST:
8459	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8460	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8461	  break;
8462	case VECTOR_CST:
8463	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8464	  break;
8465	default:
8466	  break;
8467	}
8468      break;
8469    case 'x':
8470      switch (code)
8471	{
8472	case TREE_LIST:
8473	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8474	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8475	  break;
8476	case TREE_VEC:
8477	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8478	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8479	  break;
8480	default:
8481	  break;
8482	}
8483      break;
8484    case 'e':
8485      switch (code)
8486	{
8487	case SAVE_EXPR: len = 2; break;
8488	case GOTO_SUBROUTINE_EXPR: len = 0; break;
8489	case RTL_EXPR: len = 0; break;
8490	case WITH_CLEANUP_EXPR: len = 2; break;
8491	default: break;
8492	}
8493      /* Fall through.  */
8494    case 'r':
8495    case '<':
8496    case '1':
8497    case '2':
8498    case 's':
8499      for (i = 0; i < len; ++i)
8500	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8501      break;
8502    case 'd':
8503      fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8504      fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8505      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8506      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8507      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8508      fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8509      fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8510      fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8511      fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8512      fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8513      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8514      break;
8515    case 't':
8516      fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8517      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8518      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8519      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8520      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8521      fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8522      fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8523      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8524      fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8525      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8526      break;
8527    default:
8528      break;
8529    }
8530}
8531
8532#endif
8533
8534/* Perform constant folding and related simplification of initializer
8535   expression EXPR.  This behaves identically to "fold" but ignores
8536   potential run-time traps and exceptions that fold must preserve.  */
8537
8538tree
8539fold_initializer (tree expr)
8540{
8541  int saved_signaling_nans = flag_signaling_nans;
8542  int saved_trapping_math = flag_trapping_math;
8543  int saved_trapv = flag_trapv;
8544  tree result;
8545
8546  flag_signaling_nans = 0;
8547  flag_trapping_math = 0;
8548  flag_trapv = 0;
8549
8550  result = fold (expr);
8551
8552  flag_signaling_nans = saved_signaling_nans;
8553  flag_trapping_math = saved_trapping_math;
8554  flag_trapv = saved_trapv;
8555
8556  return result;
8557}
8558
8559/* Determine if first argument is a multiple of second argument.  Return 0 if
8560   it is not, or we cannot easily determined it to be.
8561
8562   An example of the sort of thing we care about (at this point; this routine
8563   could surely be made more general, and expanded to do what the *_DIV_EXPR's
8564   fold cases do now) is discovering that
8565
8566     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8567
8568   is a multiple of
8569
8570     SAVE_EXPR (J * 8)
8571
8572   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8573
8574   This code also handles discovering that
8575
8576     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8577
8578   is a multiple of 8 so we don't have to worry about dealing with a
8579   possible remainder.
8580
8581   Note that we *look* inside a SAVE_EXPR only to determine how it was
8582   calculated; it is not safe for fold to do much of anything else with the
8583   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8584   at run time.  For example, the latter example above *cannot* be implemented
8585   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8586   evaluation time of the original SAVE_EXPR is not necessarily the same at
8587   the time the new expression is evaluated.  The only optimization of this
8588   sort that would be valid is changing
8589
8590     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8591
8592   divided by 8 to
8593
8594     SAVE_EXPR (I) * SAVE_EXPR (J)
8595
8596   (where the same SAVE_EXPR (J) is used in the original and the
8597   transformed version).  */
8598
8599static int
8600multiple_of_p (tree type, tree top, tree bottom)
8601{
8602  if (operand_equal_p (top, bottom, 0))
8603    return 1;
8604
8605  if (TREE_CODE (type) != INTEGER_TYPE)
8606    return 0;
8607
8608  switch (TREE_CODE (top))
8609    {
8610    case MULT_EXPR:
8611      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8612	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8613
8614    case PLUS_EXPR:
8615    case MINUS_EXPR:
8616      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8617	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8618
8619    case LSHIFT_EXPR:
8620      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8621	{
8622	  tree op1, t1;
8623
8624	  op1 = TREE_OPERAND (top, 1);
8625	  /* const_binop may not detect overflow correctly,
8626	     so check for it explicitly here.  */
8627	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8628	      > TREE_INT_CST_LOW (op1)
8629	      && TREE_INT_CST_HIGH (op1) == 0
8630	      && 0 != (t1 = fold_convert (type,
8631					  const_binop (LSHIFT_EXPR,
8632						       size_one_node,
8633						       op1, 0)))
8634	      && ! TREE_OVERFLOW (t1))
8635	    return multiple_of_p (type, t1, bottom);
8636	}
8637      return 0;
8638
8639    case NOP_EXPR:
8640      /* Can't handle conversions from non-integral or wider integral type.  */
8641      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8642	  || (TYPE_PRECISION (type)
8643	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8644	return 0;
8645
8646      /* .. fall through ...  */
8647
8648    case SAVE_EXPR:
8649      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8650
8651    case INTEGER_CST:
8652      if (TREE_CODE (bottom) != INTEGER_CST
8653	  || (TREE_UNSIGNED (type)
8654	      && (tree_int_cst_sgn (top) < 0
8655		  || tree_int_cst_sgn (bottom) < 0)))
8656	return 0;
8657      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8658					 top, bottom, 0));
8659
8660    default:
8661      return 0;
8662    }
8663}
8664
8665/* Return true if `t' is known to be non-negative.  */
8666
8667int
8668tree_expr_nonnegative_p (tree t)
8669{
8670  switch (TREE_CODE (t))
8671    {
8672    case ABS_EXPR:
8673      return 1;
8674
8675    case INTEGER_CST:
8676      return tree_int_cst_sgn (t) >= 0;
8677
8678    case REAL_CST:
8679      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8680
8681    case PLUS_EXPR:
8682      if (FLOAT_TYPE_P (TREE_TYPE (t)))
8683	return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8684	       && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8685
8686      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8687	 both unsigned and at least 2 bits shorter than the result.  */
8688      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8689	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8690	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8691	{
8692	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8693	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8694	  if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8695	      && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8696	    {
8697	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
8698				       TYPE_PRECISION (inner2)) + 1;
8699	      return prec < TYPE_PRECISION (TREE_TYPE (t));
8700	    }
8701	}
8702      break;
8703
8704    case MULT_EXPR:
8705      if (FLOAT_TYPE_P (TREE_TYPE (t)))
8706	{
8707	  /* x * x for floating point x is always non-negative.  */
8708	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8709	    return 1;
8710	  return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8711		 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8712	}
8713
8714      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8715	 both unsigned and their total bits is shorter than the result.  */
8716      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8717	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8718	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8719	{
8720	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8721	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8722	  if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8723	      && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8724	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8725		   < TYPE_PRECISION (TREE_TYPE (t));
8726	}
8727      return 0;
8728
8729    case TRUNC_DIV_EXPR:
8730    case CEIL_DIV_EXPR:
8731    case FLOOR_DIV_EXPR:
8732    case ROUND_DIV_EXPR:
8733      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8734	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8735
8736    case TRUNC_MOD_EXPR:
8737    case CEIL_MOD_EXPR:
8738    case FLOOR_MOD_EXPR:
8739    case ROUND_MOD_EXPR:
8740      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8741
8742    case RDIV_EXPR:
8743      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8744	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8745
8746    case NOP_EXPR:
8747      {
8748	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8749	tree outer_type = TREE_TYPE (t);
8750
8751	if (TREE_CODE (outer_type) == REAL_TYPE)
8752	  {
8753	    if (TREE_CODE (inner_type) == REAL_TYPE)
8754	      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8755	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
8756	      {
8757		if (TREE_UNSIGNED (inner_type))
8758		  return 1;
8759		return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8760	      }
8761	  }
8762	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8763	  {
8764	    if (TREE_CODE (inner_type) == REAL_TYPE)
8765	      return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8766	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
8767	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8768		      && TREE_UNSIGNED (inner_type);
8769	  }
8770      }
8771      break;
8772
8773    case COND_EXPR:
8774      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8775	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8776    case COMPOUND_EXPR:
8777      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8778    case MIN_EXPR:
8779      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8780	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8781    case MAX_EXPR:
8782      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8783	|| tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8784    case MODIFY_EXPR:
8785      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8786    case BIND_EXPR:
8787      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8788    case SAVE_EXPR:
8789      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8790    case NON_LVALUE_EXPR:
8791      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8792    case FLOAT_EXPR:
8793      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8794    case RTL_EXPR:
8795      return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8796
8797    case CALL_EXPR:
8798      {
8799	tree fndecl = get_callee_fndecl (t);
8800	tree arglist = TREE_OPERAND (t, 1);
8801	if (fndecl
8802	    && DECL_BUILT_IN (fndecl)
8803	    && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8804	  switch (DECL_FUNCTION_CODE (fndecl))
8805	    {
8806	    case BUILT_IN_CABS:
8807	    case BUILT_IN_CABSL:
8808	    case BUILT_IN_CABSF:
8809	    case BUILT_IN_EXP:
8810	    case BUILT_IN_EXPF:
8811	    case BUILT_IN_EXPL:
8812	    case BUILT_IN_EXP2:
8813	    case BUILT_IN_EXP2F:
8814	    case BUILT_IN_EXP2L:
8815	    case BUILT_IN_EXP10:
8816	    case BUILT_IN_EXP10F:
8817	    case BUILT_IN_EXP10L:
8818	    case BUILT_IN_FABS:
8819	    case BUILT_IN_FABSF:
8820	    case BUILT_IN_FABSL:
8821	    case BUILT_IN_FFS:
8822	    case BUILT_IN_FFSL:
8823	    case BUILT_IN_FFSLL:
8824	    case BUILT_IN_PARITY:
8825	    case BUILT_IN_PARITYL:
8826	    case BUILT_IN_PARITYLL:
8827	    case BUILT_IN_POPCOUNT:
8828	    case BUILT_IN_POPCOUNTL:
8829	    case BUILT_IN_POPCOUNTLL:
8830	    case BUILT_IN_POW10:
8831	    case BUILT_IN_POW10F:
8832	    case BUILT_IN_POW10L:
8833	    case BUILT_IN_SQRT:
8834	    case BUILT_IN_SQRTF:
8835	    case BUILT_IN_SQRTL:
8836	      return 1;
8837
8838	    case BUILT_IN_ATAN:
8839	    case BUILT_IN_ATANF:
8840	    case BUILT_IN_ATANL:
8841	    case BUILT_IN_CEIL:
8842	    case BUILT_IN_CEILF:
8843	    case BUILT_IN_CEILL:
8844	    case BUILT_IN_FLOOR:
8845	    case BUILT_IN_FLOORF:
8846	    case BUILT_IN_FLOORL:
8847	    case BUILT_IN_NEARBYINT:
8848	    case BUILT_IN_NEARBYINTF:
8849	    case BUILT_IN_NEARBYINTL:
8850	    case BUILT_IN_ROUND:
8851	    case BUILT_IN_ROUNDF:
8852	    case BUILT_IN_ROUNDL:
8853	    case BUILT_IN_TRUNC:
8854	    case BUILT_IN_TRUNCF:
8855	    case BUILT_IN_TRUNCL:
8856	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8857
8858	    case BUILT_IN_POW:
8859	    case BUILT_IN_POWF:
8860	    case BUILT_IN_POWL:
8861	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8862
8863	    default:
8864	      break;
8865	    }
8866      }
8867
8868      /* ... fall through ...  */
8869
8870    default:
8871      if (truth_value_p (TREE_CODE (t)))
8872	/* Truth values evaluate to 0 or 1, which is nonnegative.  */
8873	return 1;
8874    }
8875
8876  /* We don't know sign of `t', so be conservative and return false.  */
8877  return 0;
8878}
8879
8880/* Return true if `r' is known to be non-negative.
8881   Only handles constants at the moment.  */
8882
8883int
8884rtl_expr_nonnegative_p (rtx r)
8885{
8886  switch (GET_CODE (r))
8887    {
8888    case CONST_INT:
8889      return INTVAL (r) >= 0;
8890
8891    case CONST_DOUBLE:
8892      if (GET_MODE (r) == VOIDmode)
8893	return CONST_DOUBLE_HIGH (r) >= 0;
8894      return 0;
8895
8896    case CONST_VECTOR:
8897      {
8898	int units, i;
8899	rtx elt;
8900
8901	units = CONST_VECTOR_NUNITS (r);
8902
8903	for (i = 0; i < units; ++i)
8904	  {
8905	    elt = CONST_VECTOR_ELT (r, i);
8906	    if (!rtl_expr_nonnegative_p (elt))
8907	      return 0;
8908	  }
8909
8910	return 1;
8911      }
8912
8913    case SYMBOL_REF:
8914    case LABEL_REF:
8915      /* These are always nonnegative.  */
8916      return 1;
8917
8918    default:
8919      return 0;
8920    }
8921}
8922
8923#include "gt-fold-const.h"
8924