fold-const.c revision 146895
1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22/*@@ This file should be rewritten to use an arbitrary precision
23  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25  @@ The routines that translate from the ap rep should
26  @@ warn if precision et. al. is lost.
27  @@ This would also make life easier when this technology is used
28  @@ for cross-compilers.  */
29
30/* The entry points in this file are fold, size_int_wide, size_binop
31   and force_fit_type.
32
33   fold takes a tree as argument and returns a simplified tree.
34
35   size_binop takes a tree code for an arithmetic operation
36   and two operands that are trees, and produces a tree for the
37   result, assuming the type comes from `sizetype'.
38
39   size_int takes an integer value, and creates a tree constant
40   with type from `sizetype'.
41
42   force_fit_type takes a constant and prior overflow indicator, and
43   forces the value to fit the type.  It returns an overflow indicator.  */
44
45#include "config.h"
46#include "system.h"
47#include "coretypes.h"
48#include "tm.h"
49#include "flags.h"
50#include "tree.h"
51#include "real.h"
52#include "rtl.h"
53#include "expr.h"
54#include "tm_p.h"
55#include "toplev.h"
56#include "ggc.h"
57#include "hashtab.h"
58#include "langhooks.h"
59#include "md5.h"
60
61static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63static bool negate_mathfn_p (enum built_in_function);
64static bool negate_expr_p (tree);
65static tree negate_expr (tree);
66static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67static tree associate_trees (tree, tree, enum tree_code, tree);
68static tree int_const_binop (enum tree_code, tree, tree, int);
69static tree const_binop (enum tree_code, tree, tree, int);
70static hashval_t size_htab_hash (const void *);
71static int size_htab_eq (const void *, const void *);
72static tree fold_convert_const (enum tree_code, tree, tree);
73static tree fold_convert (tree, tree);
74static enum tree_code invert_tree_comparison (enum tree_code);
75static enum tree_code swap_tree_comparison (enum tree_code);
76static int comparison_to_compcode (enum tree_code);
77static enum tree_code compcode_to_comparison (int);
78static int truth_value_p (enum tree_code);
79static int operand_equal_for_comparison_p (tree, tree, tree);
80static int twoval_comparison_p (tree, tree *, tree *, int *);
81static tree eval_subst (tree, tree, tree, tree, tree);
82static tree pedantic_omit_one_operand (tree, tree, tree);
83static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
84static tree make_bit_field_ref (tree, tree, int, int, int);
85static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
86static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
87				    enum machine_mode *, int *, int *,
88				    tree *, tree *);
89static int all_ones_mask_p (tree, int);
90static tree sign_bit_p (tree, tree);
91static int simple_operand_p (tree);
92static tree range_binop (enum tree_code, tree, tree, int, tree, int);
93static tree make_range (tree, int *, tree *, tree *);
94static tree build_range_check (tree, tree, int, tree, tree);
95static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
96			 tree);
97static tree fold_range_test (tree);
98static tree unextend (tree, int, int, tree);
99static tree fold_truthop (enum tree_code, tree, tree, tree);
100static tree optimize_minmax_comparison (tree);
101static tree extract_muldiv (tree, tree, enum tree_code, tree);
102static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
103static tree strip_compound_expr (tree, tree);
104static int multiple_of_p (tree, tree, tree);
105static tree constant_boolean_node (int, tree);
106static int count_cond (tree, int);
107static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
108						 tree, int);
109static bool fold_real_zero_addition_p (tree, tree, int);
110static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
111				 tree, tree, tree);
112static tree fold_inf_compare (enum tree_code, tree, tree, tree);
113static bool reorder_operands_p (tree, tree);
114static bool tree_swap_operands_p (tree, tree, bool);
115
116/* The following constants represent a bit based encoding of GCC's
117   comparison operators.  This encoding simplifies transformations
118   on relational comparison operators, such as AND and OR.  */
119#define COMPCODE_FALSE   0
120#define COMPCODE_LT      1
121#define COMPCODE_EQ      2
122#define COMPCODE_LE      3
123#define COMPCODE_GT      4
124#define COMPCODE_NE      5
125#define COMPCODE_GE      6
126#define COMPCODE_TRUE    7
127
128/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
129   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
130   and SUM1.  Then this yields nonzero if overflow occurred during the
131   addition.
132
133   Overflow occurs if A and B have the same sign, but A and SUM differ in
134   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
135   sign.  */
136#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
137
138/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
139   We do that by representing the two-word integer in 4 words, with only
140   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
141   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
142
143#define LOWPART(x) \
144  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
145#define HIGHPART(x) \
146  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
147#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
148
149/* Unpack a two-word integer into 4 words.
150   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
151   WORDS points to the array of HOST_WIDE_INTs.  */
152
153static void
154encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
155{
156  words[0] = LOWPART (low);
157  words[1] = HIGHPART (low);
158  words[2] = LOWPART (hi);
159  words[3] = HIGHPART (hi);
160}
161
162/* Pack an array of 4 words into a two-word integer.
163   WORDS points to the array of words.
164   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
165
166static void
167decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
168	HOST_WIDE_INT *hi)
169{
170  *low = words[0] + words[1] * BASE;
171  *hi = words[2] + words[3] * BASE;
172}
173
174/* Make the integer constant T valid for its type by setting to 0 or 1 all
175   the bits in the constant that don't belong in the type.
176
177   Return 1 if a signed overflow occurs, 0 otherwise.  If OVERFLOW is
178   nonzero, a signed overflow has already occurred in calculating T, so
179   propagate it.  */
180
181int
182force_fit_type (tree t, int overflow)
183{
184  unsigned HOST_WIDE_INT low;
185  HOST_WIDE_INT high;
186  unsigned int prec;
187
188  if (TREE_CODE (t) == REAL_CST)
189    {
190      /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
191	 Consider doing it via real_convert now.  */
192      return overflow;
193    }
194
195  else if (TREE_CODE (t) != INTEGER_CST)
196    return overflow;
197
198  low = TREE_INT_CST_LOW (t);
199  high = TREE_INT_CST_HIGH (t);
200
201  if (POINTER_TYPE_P (TREE_TYPE (t))
202      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
203    prec = POINTER_SIZE;
204  else
205    prec = TYPE_PRECISION (TREE_TYPE (t));
206
207  /* First clear all bits that are beyond the type's precision.  */
208
209  if (prec == 2 * HOST_BITS_PER_WIDE_INT)
210    ;
211  else if (prec > HOST_BITS_PER_WIDE_INT)
212    TREE_INT_CST_HIGH (t)
213      &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
214  else
215    {
216      TREE_INT_CST_HIGH (t) = 0;
217      if (prec < HOST_BITS_PER_WIDE_INT)
218	TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
219    }
220
221  /* Unsigned types do not suffer sign extension or overflow unless they
222     are a sizetype.  */
223  if (TREE_UNSIGNED (TREE_TYPE (t))
224      && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
225	    && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
226    return overflow;
227
228  /* If the value's sign bit is set, extend the sign.  */
229  if (prec != 2 * HOST_BITS_PER_WIDE_INT
230      && (prec > HOST_BITS_PER_WIDE_INT
231	  ? 0 != (TREE_INT_CST_HIGH (t)
232		  & ((HOST_WIDE_INT) 1
233		     << (prec - HOST_BITS_PER_WIDE_INT - 1)))
234	  : 0 != (TREE_INT_CST_LOW (t)
235		  & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
236    {
237      /* Value is negative:
238	 set to 1 all the bits that are outside this type's precision.  */
239      if (prec > HOST_BITS_PER_WIDE_INT)
240	TREE_INT_CST_HIGH (t)
241	  |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
242      else
243	{
244	  TREE_INT_CST_HIGH (t) = -1;
245	  if (prec < HOST_BITS_PER_WIDE_INT)
246	    TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
247	}
248    }
249
250  /* Return nonzero if signed overflow occurred.  */
251  return
252    ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
253     != 0);
254}
255
256/* Add two doubleword integers with doubleword result.
257   Each argument is given as two `HOST_WIDE_INT' pieces.
258   One argument is L1 and H1; the other, L2 and H2.
259   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
260
261int
262add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
263	    unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
264	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
265{
266  unsigned HOST_WIDE_INT l;
267  HOST_WIDE_INT h;
268
269  l = l1 + l2;
270  h = h1 + h2 + (l < l1);
271
272  *lv = l;
273  *hv = h;
274  return OVERFLOW_SUM_SIGN (h1, h2, h);
275}
276
277/* Negate a doubleword integer with doubleword result.
278   Return nonzero if the operation overflows, assuming it's signed.
279   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
280   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
281
282int
283neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
284	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
285{
286  if (l1 == 0)
287    {
288      *lv = 0;
289      *hv = - h1;
290      return (*hv & h1) < 0;
291    }
292  else
293    {
294      *lv = -l1;
295      *hv = ~h1;
296      return 0;
297    }
298}
299
300/* Multiply two doubleword integers with doubleword result.
301   Return nonzero if the operation overflows, assuming it's signed.
302   Each argument is given as two `HOST_WIDE_INT' pieces.
303   One argument is L1 and H1; the other, L2 and H2.
304   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
305
306int
307mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
308	    unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
309	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
310{
311  HOST_WIDE_INT arg1[4];
312  HOST_WIDE_INT arg2[4];
313  HOST_WIDE_INT prod[4 * 2];
314  unsigned HOST_WIDE_INT carry;
315  int i, j, k;
316  unsigned HOST_WIDE_INT toplow, neglow;
317  HOST_WIDE_INT tophigh, neghigh;
318
319  encode (arg1, l1, h1);
320  encode (arg2, l2, h2);
321
322  memset (prod, 0, sizeof prod);
323
324  for (i = 0; i < 4; i++)
325    {
326      carry = 0;
327      for (j = 0; j < 4; j++)
328	{
329	  k = i + j;
330	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
331	  carry += arg1[i] * arg2[j];
332	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
333	  carry += prod[k];
334	  prod[k] = LOWPART (carry);
335	  carry = HIGHPART (carry);
336	}
337      prod[i + 4] = carry;
338    }
339
340  decode (prod, lv, hv);	/* This ignores prod[4] through prod[4*2-1] */
341
342  /* Check for overflow by calculating the top half of the answer in full;
343     it should agree with the low half's sign bit.  */
344  decode (prod + 4, &toplow, &tophigh);
345  if (h1 < 0)
346    {
347      neg_double (l2, h2, &neglow, &neghigh);
348      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
349    }
350  if (h2 < 0)
351    {
352      neg_double (l1, h1, &neglow, &neghigh);
353      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
354    }
355  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
356}
357
358/* Shift the doubleword integer in L1, H1 left by COUNT places
359   keeping only PREC bits of result.
360   Shift right if COUNT is negative.
361   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
363
364void
365lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
366	       HOST_WIDE_INT count, unsigned int prec,
367	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
368{
369  unsigned HOST_WIDE_INT signmask;
370
371  if (count < 0)
372    {
373      rshift_double (l1, h1, -count, prec, lv, hv, arith);
374      return;
375    }
376
377#ifdef SHIFT_COUNT_TRUNCATED
378  if (SHIFT_COUNT_TRUNCATED)
379    count %= prec;
380#endif
381
382  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
383    {
384      /* Shifting by the host word size is undefined according to the
385	 ANSI standard, so we must handle this as a special case.  */
386      *hv = 0;
387      *lv = 0;
388    }
389  else if (count >= HOST_BITS_PER_WIDE_INT)
390    {
391      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
392      *lv = 0;
393    }
394  else
395    {
396      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
397	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
398      *lv = l1 << count;
399    }
400
401  /* Sign extend all bits that are beyond the precision.  */
402
403  signmask = -((prec > HOST_BITS_PER_WIDE_INT
404		? ((unsigned HOST_WIDE_INT) *hv
405		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
406		: (*lv >> (prec - 1))) & 1);
407
408  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
409    ;
410  else if (prec >= HOST_BITS_PER_WIDE_INT)
411    {
412      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
413      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
414    }
415  else
416    {
417      *hv = signmask;
418      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
419      *lv |= signmask << prec;
420    }
421}
422
423/* Shift the doubleword integer in L1, H1 right by COUNT places
424   keeping only PREC bits of result.  COUNT must be positive.
425   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
426   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
427
428void
429rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
430	       HOST_WIDE_INT count, unsigned int prec,
431	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
432	       int arith)
433{
434  unsigned HOST_WIDE_INT signmask;
435
436  signmask = (arith
437	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
438	      : 0);
439
440#ifdef SHIFT_COUNT_TRUNCATED
441  if (SHIFT_COUNT_TRUNCATED)
442    count %= prec;
443#endif
444
445  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
446    {
447      /* Shifting by the host word size is undefined according to the
448	 ANSI standard, so we must handle this as a special case.  */
449      *hv = 0;
450      *lv = 0;
451    }
452  else if (count >= HOST_BITS_PER_WIDE_INT)
453    {
454      *hv = 0;
455      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
456    }
457  else
458    {
459      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
460      *lv = ((l1 >> count)
461	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
462    }
463
464  /* Zero / sign extend all bits that are beyond the precision.  */
465
466  if (count >= (HOST_WIDE_INT)prec)
467    {
468      *hv = signmask;
469      *lv = signmask;
470    }
471  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
472    ;
473  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
474    {
475      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
476      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
477    }
478  else
479    {
480      *hv = signmask;
481      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
482      *lv |= signmask << (prec - count);
483    }
484}
485
486/* Rotate the doubleword integer in L1, H1 left by COUNT places
487   keeping only PREC bits of result.
488   Rotate right if COUNT is negative.
489   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
490
491void
492lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
493		HOST_WIDE_INT count, unsigned int prec,
494		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
495{
496  unsigned HOST_WIDE_INT s1l, s2l;
497  HOST_WIDE_INT s1h, s2h;
498
499  count %= prec;
500  if (count < 0)
501    count += prec;
502
503  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
504  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
505  *lv = s1l | s2l;
506  *hv = s1h | s2h;
507}
508
509/* Rotate the doubleword integer in L1, H1 left by COUNT places
510   keeping only PREC bits of result.  COUNT must be positive.
511   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
512
513void
514rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
515		HOST_WIDE_INT count, unsigned int prec,
516		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
517{
518  unsigned HOST_WIDE_INT s1l, s2l;
519  HOST_WIDE_INT s1h, s2h;
520
521  count %= prec;
522  if (count < 0)
523    count += prec;
524
525  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
526  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
527  *lv = s1l | s2l;
528  *hv = s1h | s2h;
529}
530
531/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
532   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
533   CODE is a tree code for a kind of division, one of
534   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
535   or EXACT_DIV_EXPR
536   It controls how the quotient is rounded to an integer.
537   Return nonzero if the operation overflows.
538   UNS nonzero says do unsigned division.  */
539
540int
541div_and_round_double (enum tree_code code, int uns,
542		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
543		      HOST_WIDE_INT hnum_orig,
544		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
545		      HOST_WIDE_INT hden_orig,
546		      unsigned HOST_WIDE_INT *lquo,
547		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
548		      HOST_WIDE_INT *hrem)
549{
550  int quo_neg = 0;
551  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
552  HOST_WIDE_INT den[4], quo[4];
553  int i, j;
554  unsigned HOST_WIDE_INT work;
555  unsigned HOST_WIDE_INT carry = 0;
556  unsigned HOST_WIDE_INT lnum = lnum_orig;
557  HOST_WIDE_INT hnum = hnum_orig;
558  unsigned HOST_WIDE_INT lden = lden_orig;
559  HOST_WIDE_INT hden = hden_orig;
560  int overflow = 0;
561
562  if (hden == 0 && lden == 0)
563    overflow = 1, lden = 1;
564
565  /* Calculate quotient sign and convert operands to unsigned.  */
566  if (!uns)
567    {
568      if (hnum < 0)
569	{
570	  quo_neg = ~ quo_neg;
571	  /* (minimum integer) / (-1) is the only overflow case.  */
572	  if (neg_double (lnum, hnum, &lnum, &hnum)
573	      && ((HOST_WIDE_INT) lden & hden) == -1)
574	    overflow = 1;
575	}
576      if (hden < 0)
577	{
578	  quo_neg = ~ quo_neg;
579	  neg_double (lden, hden, &lden, &hden);
580	}
581    }
582
583  if (hnum == 0 && hden == 0)
584    {				/* single precision */
585      *hquo = *hrem = 0;
586      /* This unsigned division rounds toward zero.  */
587      *lquo = lnum / lden;
588      goto finish_up;
589    }
590
591  if (hnum == 0)
592    {				/* trivial case: dividend < divisor */
593      /* hden != 0 already checked.  */
594      *hquo = *lquo = 0;
595      *hrem = hnum;
596      *lrem = lnum;
597      goto finish_up;
598    }
599
600  memset (quo, 0, sizeof quo);
601
602  memset (num, 0, sizeof num);	/* to zero 9th element */
603  memset (den, 0, sizeof den);
604
605  encode (num, lnum, hnum);
606  encode (den, lden, hden);
607
608  /* Special code for when the divisor < BASE.  */
609  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
610    {
611      /* hnum != 0 already checked.  */
612      for (i = 4 - 1; i >= 0; i--)
613	{
614	  work = num[i] + carry * BASE;
615	  quo[i] = work / lden;
616	  carry = work % lden;
617	}
618    }
619  else
620    {
621      /* Full double precision division,
622	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
623      int num_hi_sig, den_hi_sig;
624      unsigned HOST_WIDE_INT quo_est, scale;
625
626      /* Find the highest nonzero divisor digit.  */
627      for (i = 4 - 1;; i--)
628	if (den[i] != 0)
629	  {
630	    den_hi_sig = i;
631	    break;
632	  }
633
634      /* Insure that the first digit of the divisor is at least BASE/2.
635	 This is required by the quotient digit estimation algorithm.  */
636
637      scale = BASE / (den[den_hi_sig] + 1);
638      if (scale > 1)
639	{		/* scale divisor and dividend */
640	  carry = 0;
641	  for (i = 0; i <= 4 - 1; i++)
642	    {
643	      work = (num[i] * scale) + carry;
644	      num[i] = LOWPART (work);
645	      carry = HIGHPART (work);
646	    }
647
648	  num[4] = carry;
649	  carry = 0;
650	  for (i = 0; i <= 4 - 1; i++)
651	    {
652	      work = (den[i] * scale) + carry;
653	      den[i] = LOWPART (work);
654	      carry = HIGHPART (work);
655	      if (den[i] != 0) den_hi_sig = i;
656	    }
657	}
658
659      num_hi_sig = 4;
660
661      /* Main loop */
662      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
663	{
664	  /* Guess the next quotient digit, quo_est, by dividing the first
665	     two remaining dividend digits by the high order quotient digit.
666	     quo_est is never low and is at most 2 high.  */
667	  unsigned HOST_WIDE_INT tmp;
668
669	  num_hi_sig = i + den_hi_sig + 1;
670	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
671	  if (num[num_hi_sig] != den[den_hi_sig])
672	    quo_est = work / den[den_hi_sig];
673	  else
674	    quo_est = BASE - 1;
675
676	  /* Refine quo_est so it's usually correct, and at most one high.  */
677	  tmp = work - quo_est * den[den_hi_sig];
678	  if (tmp < BASE
679	      && (den[den_hi_sig - 1] * quo_est
680		  > (tmp * BASE + num[num_hi_sig - 2])))
681	    quo_est--;
682
683	  /* Try QUO_EST as the quotient digit, by multiplying the
684	     divisor by QUO_EST and subtracting from the remaining dividend.
685	     Keep in mind that QUO_EST is the I - 1st digit.  */
686
687	  carry = 0;
688	  for (j = 0; j <= den_hi_sig; j++)
689	    {
690	      work = quo_est * den[j] + carry;
691	      carry = HIGHPART (work);
692	      work = num[i + j] - LOWPART (work);
693	      num[i + j] = LOWPART (work);
694	      carry += HIGHPART (work) != 0;
695	    }
696
697	  /* If quo_est was high by one, then num[i] went negative and
698	     we need to correct things.  */
699	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
700	    {
701	      quo_est--;
702	      carry = 0;		/* add divisor back in */
703	      for (j = 0; j <= den_hi_sig; j++)
704		{
705		  work = num[i + j] + den[j] + carry;
706		  carry = HIGHPART (work);
707		  num[i + j] = LOWPART (work);
708		}
709
710	      num [num_hi_sig] += carry;
711	    }
712
713	  /* Store the quotient digit.  */
714	  quo[i] = quo_est;
715	}
716    }
717
718  decode (quo, lquo, hquo);
719
720 finish_up:
721  /* If result is negative, make it so.  */
722  if (quo_neg)
723    neg_double (*lquo, *hquo, lquo, hquo);
724
725  /* compute trial remainder:  rem = num - (quo * den)  */
726  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
727  neg_double (*lrem, *hrem, lrem, hrem);
728  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
729
730  switch (code)
731    {
732    case TRUNC_DIV_EXPR:
733    case TRUNC_MOD_EXPR:	/* round toward zero */
734    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
735      return overflow;
736
737    case FLOOR_DIV_EXPR:
738    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
739      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
740	{
741	  /* quo = quo - 1;  */
742	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
743		      lquo, hquo);
744	}
745      else
746	return overflow;
747      break;
748
749    case CEIL_DIV_EXPR:
750    case CEIL_MOD_EXPR:		/* round toward positive infinity */
751      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
752	{
753	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
754		      lquo, hquo);
755	}
756      else
757	return overflow;
758      break;
759
760    case ROUND_DIV_EXPR:
761    case ROUND_MOD_EXPR:	/* round to closest integer */
762      {
763	unsigned HOST_WIDE_INT labs_rem = *lrem;
764	HOST_WIDE_INT habs_rem = *hrem;
765	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
766	HOST_WIDE_INT habs_den = hden, htwice;
767
768	/* Get absolute values.  */
769	if (*hrem < 0)
770	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
771	if (hden < 0)
772	  neg_double (lden, hden, &labs_den, &habs_den);
773
774	/* If (2 * abs (lrem) >= abs (lden)) */
775	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
776		    labs_rem, habs_rem, &ltwice, &htwice);
777
778	if (((unsigned HOST_WIDE_INT) habs_den
779	     < (unsigned HOST_WIDE_INT) htwice)
780	    || (((unsigned HOST_WIDE_INT) habs_den
781		 == (unsigned HOST_WIDE_INT) htwice)
782		&& (labs_den < ltwice)))
783	  {
784	    if (*hquo < 0)
785	      /* quo = quo - 1;  */
786	      add_double (*lquo, *hquo,
787			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
788	    else
789	      /* quo = quo + 1; */
790	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
791			  lquo, hquo);
792	  }
793	else
794	  return overflow;
795      }
796      break;
797
798    default:
799      abort ();
800    }
801
802  /* Compute true remainder:  rem = num - (quo * den)  */
803  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
804  neg_double (*lrem, *hrem, lrem, hrem);
805  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
806  return overflow;
807}
808
809/* Return true if built-in mathematical function specified by CODE
810   preserves the sign of it argument, i.e. -f(x) == f(-x).  */
811
812static bool
813negate_mathfn_p (enum built_in_function code)
814{
815  switch (code)
816    {
817    case BUILT_IN_ASIN:
818    case BUILT_IN_ASINF:
819    case BUILT_IN_ASINL:
820    case BUILT_IN_ATAN:
821    case BUILT_IN_ATANF:
822    case BUILT_IN_ATANL:
823    case BUILT_IN_SIN:
824    case BUILT_IN_SINF:
825    case BUILT_IN_SINL:
826    case BUILT_IN_TAN:
827    case BUILT_IN_TANF:
828    case BUILT_IN_TANL:
829      return true;
830
831    default:
832      break;
833    }
834  return false;
835}
836
837
838/* Determine whether an expression T can be cheaply negated using
839   the function negate_expr.  */
840
841static bool
842negate_expr_p (tree t)
843{
844  unsigned HOST_WIDE_INT val;
845  unsigned int prec;
846  tree type;
847
848  if (t == 0)
849    return false;
850
851  type = TREE_TYPE (t);
852
853  STRIP_SIGN_NOPS (t);
854  switch (TREE_CODE (t))
855    {
856    case INTEGER_CST:
857      if (TREE_UNSIGNED (type) || ! flag_trapv)
858	return true;
859
860      /* Check that -CST will not overflow type.  */
861      prec = TYPE_PRECISION (type);
862      if (prec > HOST_BITS_PER_WIDE_INT)
863	{
864	  if (TREE_INT_CST_LOW (t) != 0)
865	    return true;
866	  prec -= HOST_BITS_PER_WIDE_INT;
867	  val = TREE_INT_CST_HIGH (t);
868	}
869      else
870	val = TREE_INT_CST_LOW (t);
871      if (prec < HOST_BITS_PER_WIDE_INT)
872	val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
873      return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
874
875    case REAL_CST:
876    case NEGATE_EXPR:
877      return true;
878
879    case COMPLEX_CST:
880      return negate_expr_p (TREE_REALPART (t))
881	     && negate_expr_p (TREE_IMAGPART (t));
882
883    case MINUS_EXPR:
884      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
885      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
886	     && reorder_operands_p (TREE_OPERAND (t, 0),
887				    TREE_OPERAND (t, 1));
888
889    case MULT_EXPR:
890      if (TREE_UNSIGNED (TREE_TYPE (t)))
891        break;
892
893      /* Fall through.  */
894
895    case RDIV_EXPR:
896      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
897	return negate_expr_p (TREE_OPERAND (t, 1))
898	       || negate_expr_p (TREE_OPERAND (t, 0));
899      break;
900
901    case NOP_EXPR:
902      /* Negate -((double)float) as (double)(-float).  */
903      if (TREE_CODE (type) == REAL_TYPE)
904	{
905	  tree tem = strip_float_extensions (t);
906	  if (tem != t)
907	    return negate_expr_p (tem);
908	}
909      break;
910
911    case CALL_EXPR:
912      /* Negate -f(x) as f(-x).  */
913      if (negate_mathfn_p (builtin_mathfn_code (t)))
914	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
915      break;
916
917    default:
918      break;
919    }
920  return false;
921}
922
923/* Given T, an expression, return the negation of T.  Allow for T to be
924   null, in which case return null.  */
925
926static tree
927negate_expr (tree t)
928{
929  tree type;
930  tree tem;
931
932  if (t == 0)
933    return 0;
934
935  type = TREE_TYPE (t);
936  STRIP_SIGN_NOPS (t);
937
938  switch (TREE_CODE (t))
939    {
940    case INTEGER_CST:
941      {
942	unsigned HOST_WIDE_INT low;
943	HOST_WIDE_INT high;
944	int overflow = neg_double (TREE_INT_CST_LOW (t),
945				   TREE_INT_CST_HIGH (t),
946				   &low, &high);
947	tem = build_int_2 (low, high);
948	TREE_TYPE (tem) = type;
949	TREE_OVERFLOW (tem)
950	  = (TREE_OVERFLOW (t)
951	     | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
952	TREE_CONSTANT_OVERFLOW (tem)
953	  = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
954      }
955      if (! TREE_OVERFLOW (tem)
956	  || TREE_UNSIGNED (type)
957	  || ! flag_trapv)
958	return tem;
959      break;
960
961    case REAL_CST:
962      tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
963      /* Two's complement FP formats, such as c4x, may overflow.  */
964      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
965	return fold_convert (type, tem);
966      break;
967
968    case COMPLEX_CST:
969      {
970	tree rpart = negate_expr (TREE_REALPART (t));
971	tree ipart = negate_expr (TREE_IMAGPART (t));
972
973	if ((TREE_CODE (rpart) == REAL_CST
974	     && TREE_CODE (ipart) == REAL_CST)
975	    || (TREE_CODE (rpart) == INTEGER_CST
976		&& TREE_CODE (ipart) == INTEGER_CST))
977	  return build_complex (type, rpart, ipart);
978      }
979      break;
980
981    case NEGATE_EXPR:
982      return fold_convert (type, TREE_OPERAND (t, 0));
983
984    case MINUS_EXPR:
985      /* - (A - B) -> B - A  */
986      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
987	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
988	return fold_convert (type,
989			     fold (build (MINUS_EXPR, TREE_TYPE (t),
990					  TREE_OPERAND (t, 1),
991					  TREE_OPERAND (t, 0))));
992      break;
993
994    case MULT_EXPR:
995      if (TREE_UNSIGNED (TREE_TYPE (t)))
996        break;
997
998      /* Fall through.  */
999
1000    case RDIV_EXPR:
1001      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1002	{
1003	  tem = TREE_OPERAND (t, 1);
1004	  if (negate_expr_p (tem))
1005	    return fold_convert (type,
1006				 fold (build (TREE_CODE (t), TREE_TYPE (t),
1007					      TREE_OPERAND (t, 0),
1008					      negate_expr (tem))));
1009	  tem = TREE_OPERAND (t, 0);
1010	  if (negate_expr_p (tem))
1011	    return fold_convert (type,
1012				 fold (build (TREE_CODE (t), TREE_TYPE (t),
1013					      negate_expr (tem),
1014					      TREE_OPERAND (t, 1))));
1015	}
1016      break;
1017
1018    case NOP_EXPR:
1019      /* Convert -((double)float) into (double)(-float).  */
1020      if (TREE_CODE (type) == REAL_TYPE)
1021	{
1022	  tem = strip_float_extensions (t);
1023	  if (tem != t && negate_expr_p (tem))
1024	    return fold_convert (type, negate_expr (tem));
1025	}
1026      break;
1027
1028    case CALL_EXPR:
1029      /* Negate -f(x) as f(-x).  */
1030      if (negate_mathfn_p (builtin_mathfn_code (t))
1031	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1032	{
1033	  tree fndecl, arg, arglist;
1034
1035	  fndecl = get_callee_fndecl (t);
1036	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1037	  arglist = build_tree_list (NULL_TREE, arg);
1038	  return build_function_call_expr (fndecl, arglist);
1039	}
1040      break;
1041
1042    default:
1043      break;
1044    }
1045
1046  tem = fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t));
1047  return fold_convert (type, tem);
1048}
1049
1050/* Split a tree IN into a constant, literal and variable parts that could be
1051   combined with CODE to make IN.  "constant" means an expression with
1052   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1053   commutative arithmetic operation.  Store the constant part into *CONP,
1054   the literal in *LITP and return the variable part.  If a part isn't
1055   present, set it to null.  If the tree does not decompose in this way,
1056   return the entire tree as the variable part and the other parts as null.
1057
1058   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1059   case, we negate an operand that was subtracted.  Except if it is a
1060   literal for which we use *MINUS_LITP instead.
1061
1062   If NEGATE_P is true, we are negating all of IN, again except a literal
1063   for which we use *MINUS_LITP instead.
1064
1065   If IN is itself a literal or constant, return it as appropriate.
1066
1067   Note that we do not guarantee that any of the three values will be the
1068   same type as IN, but they will have the same signedness and mode.  */
1069
1070static tree
1071split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1072	    tree *minus_litp, int negate_p)
1073{
1074  tree var = 0;
1075
1076  *conp = 0;
1077  *litp = 0;
1078  *minus_litp = 0;
1079
1080  /* Strip any conversions that don't change the machine mode or signedness.  */
1081  STRIP_SIGN_NOPS (in);
1082
1083  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1084    *litp = in;
1085  else if (TREE_CODE (in) == code
1086	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1087	       /* We can associate addition and subtraction together (even
1088		  though the C standard doesn't say so) for integers because
1089		  the value is not affected.  For reals, the value might be
1090		  affected, so we can't.  */
1091	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1092		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1093    {
1094      tree op0 = TREE_OPERAND (in, 0);
1095      tree op1 = TREE_OPERAND (in, 1);
1096      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1097      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1098
1099      /* First see if either of the operands is a literal, then a constant.  */
1100      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1101	*litp = op0, op0 = 0;
1102      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1103	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1104
1105      if (op0 != 0 && TREE_CONSTANT (op0))
1106	*conp = op0, op0 = 0;
1107      else if (op1 != 0 && TREE_CONSTANT (op1))
1108	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1109
1110      /* If we haven't dealt with either operand, this is not a case we can
1111	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1112      if (op0 != 0 && op1 != 0)
1113	var = in;
1114      else if (op0 != 0)
1115	var = op0;
1116      else
1117	var = op1, neg_var_p = neg1_p;
1118
1119      /* Now do any needed negations.  */
1120      if (neg_litp_p)
1121	*minus_litp = *litp, *litp = 0;
1122      if (neg_conp_p)
1123	*conp = negate_expr (*conp);
1124      if (neg_var_p)
1125	var = negate_expr (var);
1126    }
1127  else if (TREE_CONSTANT (in))
1128    *conp = in;
1129  else
1130    var = in;
1131
1132  if (negate_p)
1133    {
1134      if (*litp)
1135	*minus_litp = *litp, *litp = 0;
1136      else if (*minus_litp)
1137	*litp = *minus_litp, *minus_litp = 0;
1138      *conp = negate_expr (*conp);
1139      var = negate_expr (var);
1140    }
1141
1142  return var;
1143}
1144
1145/* Re-associate trees split by the above function.  T1 and T2 are either
1146   expressions to associate or null.  Return the new expression, if any.  If
1147   we build an operation, do it in TYPE and with CODE.  */
1148
1149static tree
1150associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1151{
1152  if (t1 == 0)
1153    return t2;
1154  else if (t2 == 0)
1155    return t1;
1156
1157  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1158     try to fold this since we will have infinite recursion.  But do
1159     deal with any NEGATE_EXPRs.  */
1160  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1161      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1162    {
1163      if (code == PLUS_EXPR)
1164	{
1165	  if (TREE_CODE (t1) == NEGATE_EXPR)
1166	    return build (MINUS_EXPR, type, fold_convert (type, t2),
1167			  fold_convert (type, TREE_OPERAND (t1, 0)));
1168	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1169	    return build (MINUS_EXPR, type, fold_convert (type, t1),
1170			  fold_convert (type, TREE_OPERAND (t2, 0)));
1171	}
1172      return build (code, type, fold_convert (type, t1),
1173		    fold_convert (type, t2));
1174    }
1175
1176  return fold (build (code, type, fold_convert (type, t1),
1177		      fold_convert (type, t2)));
1178}
1179
1180/* Combine two integer constants ARG1 and ARG2 under operation CODE
1181   to produce a new constant.
1182
1183   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1184
1185static tree
1186int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1187{
1188  unsigned HOST_WIDE_INT int1l, int2l;
1189  HOST_WIDE_INT int1h, int2h;
1190  unsigned HOST_WIDE_INT low;
1191  HOST_WIDE_INT hi;
1192  unsigned HOST_WIDE_INT garbagel;
1193  HOST_WIDE_INT garbageh;
1194  tree t;
1195  tree type = TREE_TYPE (arg1);
1196  int uns = TREE_UNSIGNED (type);
1197  int is_sizetype
1198    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1199  int overflow = 0;
1200  int no_overflow = 0;
1201
1202  int1l = TREE_INT_CST_LOW (arg1);
1203  int1h = TREE_INT_CST_HIGH (arg1);
1204  int2l = TREE_INT_CST_LOW (arg2);
1205  int2h = TREE_INT_CST_HIGH (arg2);
1206
1207  switch (code)
1208    {
1209    case BIT_IOR_EXPR:
1210      low = int1l | int2l, hi = int1h | int2h;
1211      break;
1212
1213    case BIT_XOR_EXPR:
1214      low = int1l ^ int2l, hi = int1h ^ int2h;
1215      break;
1216
1217    case BIT_AND_EXPR:
1218      low = int1l & int2l, hi = int1h & int2h;
1219      break;
1220
1221    case RSHIFT_EXPR:
1222      int2l = -int2l;
1223    case LSHIFT_EXPR:
1224      /* It's unclear from the C standard whether shifts can overflow.
1225	 The following code ignores overflow; perhaps a C standard
1226	 interpretation ruling is needed.  */
1227      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1228		     &low, &hi, !uns);
1229      no_overflow = 1;
1230      break;
1231
1232    case RROTATE_EXPR:
1233      int2l = - int2l;
1234    case LROTATE_EXPR:
1235      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1236		      &low, &hi);
1237      break;
1238
1239    case PLUS_EXPR:
1240      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1241      break;
1242
1243    case MINUS_EXPR:
1244      neg_double (int2l, int2h, &low, &hi);
1245      add_double (int1l, int1h, low, hi, &low, &hi);
1246      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1247      break;
1248
1249    case MULT_EXPR:
1250      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1251      break;
1252
1253    case TRUNC_DIV_EXPR:
1254    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1255    case EXACT_DIV_EXPR:
1256      /* This is a shortcut for a common special case.  */
1257      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1258	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1259	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1260	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1261	{
1262	  if (code == CEIL_DIV_EXPR)
1263	    int1l += int2l - 1;
1264
1265	  low = int1l / int2l, hi = 0;
1266	  break;
1267	}
1268
1269      /* ... fall through ...  */
1270
1271    case ROUND_DIV_EXPR:
1272      if (int2h == 0 && int2l == 1)
1273	{
1274	  low = int1l, hi = int1h;
1275	  break;
1276	}
1277      if (int1l == int2l && int1h == int2h
1278	  && ! (int1l == 0 && int1h == 0))
1279	{
1280	  low = 1, hi = 0;
1281	  break;
1282	}
1283      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1284				       &low, &hi, &garbagel, &garbageh);
1285      break;
1286
1287    case TRUNC_MOD_EXPR:
1288    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1289      /* This is a shortcut for a common special case.  */
1290      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1291	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1292	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1293	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1294	{
1295	  if (code == CEIL_MOD_EXPR)
1296	    int1l += int2l - 1;
1297	  low = int1l % int2l, hi = 0;
1298	  break;
1299	}
1300
1301      /* ... fall through ...  */
1302
1303    case ROUND_MOD_EXPR:
1304      overflow = div_and_round_double (code, uns,
1305				       int1l, int1h, int2l, int2h,
1306				       &garbagel, &garbageh, &low, &hi);
1307      break;
1308
1309    case MIN_EXPR:
1310    case MAX_EXPR:
1311      if (uns)
1312	low = (((unsigned HOST_WIDE_INT) int1h
1313		< (unsigned HOST_WIDE_INT) int2h)
1314	       || (((unsigned HOST_WIDE_INT) int1h
1315		    == (unsigned HOST_WIDE_INT) int2h)
1316		   && int1l < int2l));
1317      else
1318	low = (int1h < int2h
1319	       || (int1h == int2h && int1l < int2l));
1320
1321      if (low == (code == MIN_EXPR))
1322	low = int1l, hi = int1h;
1323      else
1324	low = int2l, hi = int2h;
1325      break;
1326
1327    default:
1328      abort ();
1329    }
1330
1331  /* If this is for a sizetype, can be represented as one (signed)
1332     HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1333     constants.  */
1334  if (is_sizetype
1335      && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1336	  || (hi == -1 && (HOST_WIDE_INT) low < 0))
1337      && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1338    return size_int_type_wide (low, type);
1339  else
1340    {
1341      t = build_int_2 (low, hi);
1342      TREE_TYPE (t) = TREE_TYPE (arg1);
1343    }
1344
1345  TREE_OVERFLOW (t)
1346    = ((notrunc
1347	? (!uns || is_sizetype) && overflow
1348	: (force_fit_type (t, (!uns || is_sizetype) && overflow)
1349	   && ! no_overflow))
1350       | TREE_OVERFLOW (arg1)
1351       | TREE_OVERFLOW (arg2));
1352
1353  /* If we're doing a size calculation, unsigned arithmetic does overflow.
1354     So check if force_fit_type truncated the value.  */
1355  if (is_sizetype
1356      && ! TREE_OVERFLOW (t)
1357      && (TREE_INT_CST_HIGH (t) != hi
1358	  || TREE_INT_CST_LOW (t) != low))
1359    TREE_OVERFLOW (t) = 1;
1360
1361  TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1362				| TREE_CONSTANT_OVERFLOW (arg1)
1363				| TREE_CONSTANT_OVERFLOW (arg2));
1364  return t;
1365}
1366
1367/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1368   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1369   are the same kind of constant and the same machine mode.
1370
1371   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1372
1373static tree
1374const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1375{
1376  STRIP_NOPS (arg1);
1377  STRIP_NOPS (arg2);
1378
1379  if (TREE_CODE (arg1) == INTEGER_CST)
1380    return int_const_binop (code, arg1, arg2, notrunc);
1381
1382  if (TREE_CODE (arg1) == REAL_CST)
1383    {
1384      enum machine_mode mode;
1385      REAL_VALUE_TYPE d1;
1386      REAL_VALUE_TYPE d2;
1387      REAL_VALUE_TYPE value;
1388      tree t, type;
1389
1390      d1 = TREE_REAL_CST (arg1);
1391      d2 = TREE_REAL_CST (arg2);
1392
1393      type = TREE_TYPE (arg1);
1394      mode = TYPE_MODE (type);
1395
1396      /* Don't perform operation if we honor signaling NaNs and
1397	 either operand is a NaN.  */
1398      if (HONOR_SNANS (mode)
1399	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1400	return NULL_TREE;
1401
1402      /* Don't perform operation if it would raise a division
1403	 by zero exception.  */
1404      if (code == RDIV_EXPR
1405	  && REAL_VALUES_EQUAL (d2, dconst0)
1406	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1407	return NULL_TREE;
1408
1409      /* If either operand is a NaN, just return it.  Otherwise, set up
1410	 for floating-point trap; we return an overflow.  */
1411      if (REAL_VALUE_ISNAN (d1))
1412	return arg1;
1413      else if (REAL_VALUE_ISNAN (d2))
1414	return arg2;
1415
1416      REAL_ARITHMETIC (value, code, d1, d2);
1417
1418      t = build_real (type, real_value_truncate (mode, value));
1419
1420      TREE_OVERFLOW (t)
1421	= (force_fit_type (t, 0)
1422	   | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1423      TREE_CONSTANT_OVERFLOW (t)
1424	= TREE_OVERFLOW (t)
1425	  | TREE_CONSTANT_OVERFLOW (arg1)
1426	  | TREE_CONSTANT_OVERFLOW (arg2);
1427      return t;
1428    }
1429  if (TREE_CODE (arg1) == COMPLEX_CST)
1430    {
1431      tree type = TREE_TYPE (arg1);
1432      tree r1 = TREE_REALPART (arg1);
1433      tree i1 = TREE_IMAGPART (arg1);
1434      tree r2 = TREE_REALPART (arg2);
1435      tree i2 = TREE_IMAGPART (arg2);
1436      tree t;
1437
1438      switch (code)
1439	{
1440	case PLUS_EXPR:
1441	  t = build_complex (type,
1442			     const_binop (PLUS_EXPR, r1, r2, notrunc),
1443			     const_binop (PLUS_EXPR, i1, i2, notrunc));
1444	  break;
1445
1446	case MINUS_EXPR:
1447	  t = build_complex (type,
1448			     const_binop (MINUS_EXPR, r1, r2, notrunc),
1449			     const_binop (MINUS_EXPR, i1, i2, notrunc));
1450	  break;
1451
1452	case MULT_EXPR:
1453	  t = build_complex (type,
1454			     const_binop (MINUS_EXPR,
1455					  const_binop (MULT_EXPR,
1456						       r1, r2, notrunc),
1457					  const_binop (MULT_EXPR,
1458						       i1, i2, notrunc),
1459					  notrunc),
1460			     const_binop (PLUS_EXPR,
1461					  const_binop (MULT_EXPR,
1462						       r1, i2, notrunc),
1463					  const_binop (MULT_EXPR,
1464						       i1, r2, notrunc),
1465					  notrunc));
1466	  break;
1467
1468	case RDIV_EXPR:
1469	  {
1470	    tree magsquared
1471	      = const_binop (PLUS_EXPR,
1472			     const_binop (MULT_EXPR, r2, r2, notrunc),
1473			     const_binop (MULT_EXPR, i2, i2, notrunc),
1474			     notrunc);
1475
1476	    t = build_complex (type,
1477			       const_binop
1478			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1479				? TRUNC_DIV_EXPR : RDIV_EXPR,
1480				const_binop (PLUS_EXPR,
1481					     const_binop (MULT_EXPR, r1, r2,
1482							  notrunc),
1483					     const_binop (MULT_EXPR, i1, i2,
1484							  notrunc),
1485					     notrunc),
1486				magsquared, notrunc),
1487			       const_binop
1488			       (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1489				? TRUNC_DIV_EXPR : RDIV_EXPR,
1490				const_binop (MINUS_EXPR,
1491					     const_binop (MULT_EXPR, i1, r2,
1492							  notrunc),
1493					     const_binop (MULT_EXPR, r1, i2,
1494							  notrunc),
1495					     notrunc),
1496				magsquared, notrunc));
1497	  }
1498	  break;
1499
1500	default:
1501	  abort ();
1502	}
1503      return t;
1504    }
1505  return 0;
1506}
1507
1508/* These are the hash table functions for the hash table of INTEGER_CST
1509   nodes of a sizetype.  */
1510
1511/* Return the hash code code X, an INTEGER_CST.  */
1512
1513static hashval_t
1514size_htab_hash (const void *x)
1515{
1516  tree t = (tree) x;
1517
1518  return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1519	  ^ htab_hash_pointer (TREE_TYPE (t))
1520	  ^ (TREE_OVERFLOW (t) << 20));
1521}
1522
1523/* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1524   is the same as that given by *Y, which is the same.  */
1525
1526static int
1527size_htab_eq (const void *x, const void *y)
1528{
1529  tree xt = (tree) x;
1530  tree yt = (tree) y;
1531
1532  return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1533	  && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1534	  && TREE_TYPE (xt) == TREE_TYPE (yt)
1535	  && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1536}
1537
1538/* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1539   bits are given by NUMBER and of the sizetype represented by KIND.  */
1540
1541tree
1542size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1543{
1544  return size_int_type_wide (number, sizetype_tab[(int) kind]);
1545}
1546
1547/* Likewise, but the desired type is specified explicitly.  */
1548
1549static GTY (()) tree new_const;
1550static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1551     htab_t size_htab;
1552
1553tree
1554size_int_type_wide (HOST_WIDE_INT number, tree type)
1555{
1556  void **slot;
1557
1558  if (size_htab == 0)
1559    {
1560      size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1561      new_const = make_node (INTEGER_CST);
1562    }
1563
1564  /* Adjust NEW_CONST to be the constant we want.  If it's already in the
1565     hash table, we return the value from the hash table.  Otherwise, we
1566     place that in the hash table and make a new node for the next time.  */
1567  TREE_INT_CST_LOW (new_const) = number;
1568  TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1569  TREE_TYPE (new_const) = type;
1570  TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1571    = force_fit_type (new_const, 0);
1572
1573  slot = htab_find_slot (size_htab, new_const, INSERT);
1574  if (*slot == 0)
1575    {
1576      tree t = new_const;
1577
1578      *slot = new_const;
1579      new_const = make_node (INTEGER_CST);
1580      return t;
1581    }
1582  else
1583    return (tree) *slot;
1584}
1585
1586/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1587   is a tree code.  The type of the result is taken from the operands.
1588   Both must be the same type integer type and it must be a size type.
1589   If the operands are constant, so is the result.  */
1590
1591tree
1592size_binop (enum tree_code code, tree arg0, tree arg1)
1593{
1594  tree type = TREE_TYPE (arg0);
1595
1596  if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1597      || type != TREE_TYPE (arg1))
1598    abort ();
1599
1600  /* Handle the special case of two integer constants faster.  */
1601  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1602    {
1603      /* And some specific cases even faster than that.  */
1604      if (code == PLUS_EXPR && integer_zerop (arg0))
1605	return arg1;
1606      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1607	       && integer_zerop (arg1))
1608	return arg0;
1609      else if (code == MULT_EXPR && integer_onep (arg0))
1610	return arg1;
1611
1612      /* Handle general case of two integer constants.  */
1613      return int_const_binop (code, arg0, arg1, 0);
1614    }
1615
1616  if (arg0 == error_mark_node || arg1 == error_mark_node)
1617    return error_mark_node;
1618
1619  return fold (build (code, type, arg0, arg1));
1620}
1621
1622/* Given two values, either both of sizetype or both of bitsizetype,
1623   compute the difference between the two values.  Return the value
1624   in signed type corresponding to the type of the operands.  */
1625
1626tree
1627size_diffop (tree arg0, tree arg1)
1628{
1629  tree type = TREE_TYPE (arg0);
1630  tree ctype;
1631
1632  if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1633      || type != TREE_TYPE (arg1))
1634    abort ();
1635
1636  /* If the type is already signed, just do the simple thing.  */
1637  if (! TREE_UNSIGNED (type))
1638    return size_binop (MINUS_EXPR, arg0, arg1);
1639
1640  ctype = (type == bitsizetype || type == ubitsizetype
1641	   ? sbitsizetype : ssizetype);
1642
1643  /* If either operand is not a constant, do the conversions to the signed
1644     type and subtract.  The hardware will do the right thing with any
1645     overflow in the subtraction.  */
1646  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1647    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1648		       fold_convert (ctype, arg1));
1649
1650  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1651     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1652     overflow) and negate (which can't either).  Special-case a result
1653     of zero while we're here.  */
1654  if (tree_int_cst_equal (arg0, arg1))
1655    return fold_convert (ctype, integer_zero_node);
1656  else if (tree_int_cst_lt (arg1, arg0))
1657    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1658  else
1659    return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1660		       fold_convert (ctype, size_binop (MINUS_EXPR,
1661							arg1, arg0)));
1662}
1663
1664
1665/* Attempt to fold type conversion operation CODE of expression ARG1 to
1666   type TYPE.  If no simplification can be done return NULL_TREE.  */
1667
1668static tree
1669fold_convert_const (enum tree_code code ATTRIBUTE_UNUSED, tree type,
1670		    tree arg1)
1671{
1672  int overflow = 0;
1673  tree t;
1674
1675  if (TREE_TYPE (arg1) == type)
1676    return arg1;
1677
1678  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1679    {
1680      if (TREE_CODE (arg1) == INTEGER_CST)
1681	{
1682	  /* If we would build a constant wider than GCC supports,
1683	     leave the conversion unfolded.  */
1684	  if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1685	    return NULL_TREE;
1686
1687	  /* If we are trying to make a sizetype for a small integer, use
1688	     size_int to pick up cached types to reduce duplicate nodes.  */
1689	  if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1690	      && !TREE_CONSTANT_OVERFLOW (arg1)
1691	      && compare_tree_int (arg1, 10000) < 0)
1692	    return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1693
1694	  /* Given an integer constant, make new constant with new type,
1695	     appropriately sign-extended or truncated.  */
1696	  t = build_int_2 (TREE_INT_CST_LOW (arg1),
1697			   TREE_INT_CST_HIGH (arg1));
1698	  TREE_TYPE (t) = type;
1699	  /* Indicate an overflow if (1) ARG1 already overflowed,
1700	     or (2) force_fit_type indicates an overflow.
1701	     Tell force_fit_type that an overflow has already occurred
1702	     if ARG1 is a too-large unsigned value and T is signed.
1703	     But don't indicate an overflow if converting a pointer.  */
1704	  TREE_OVERFLOW (t)
1705	    = ((force_fit_type (t,
1706				(TREE_INT_CST_HIGH (arg1) < 0
1707				 && (TREE_UNSIGNED (type)
1708				    < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1709		&& ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1710	       || TREE_OVERFLOW (arg1));
1711	  TREE_CONSTANT_OVERFLOW (t)
1712	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1713	  return t;
1714	}
1715      else if (TREE_CODE (arg1) == REAL_CST)
1716	{
1717	  /* The following code implements the floating point to integer
1718	     conversion rules required by the Java Language Specification,
1719	     that IEEE NaNs are mapped to zero and values that overflow
1720	     the target precision saturate, i.e. values greater than
1721	     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1722	     are mapped to INT_MIN.  These semantics are allowed by the
1723	     C and C++ standards that simply state that the behavior of
1724	     FP-to-integer conversion is unspecified upon overflow.  */
1725
1726	  HOST_WIDE_INT high, low;
1727
1728	  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1729	  /* If x is NaN, return zero and show we have an overflow.  */
1730	  if (REAL_VALUE_ISNAN (x))
1731	    {
1732	      overflow = 1;
1733	      high = 0;
1734	      low = 0;
1735	    }
1736
1737	  /* See if X will be in range after truncation towards 0.
1738	     To compensate for truncation, move the bounds away from 0,
1739	     but reject if X exactly equals the adjusted bounds.  */
1740
1741	  if (! overflow)
1742	    {
1743	      tree lt = TYPE_MIN_VALUE (type);
1744	      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1745	      REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1746	      if (! REAL_VALUES_LESS (l, x))
1747		{
1748		  overflow = 1;
1749		  high = TREE_INT_CST_HIGH (lt);
1750		  low = TREE_INT_CST_LOW (lt);
1751		}
1752	    }
1753
1754	  if (! overflow)
1755	    {
1756	      tree ut = TYPE_MAX_VALUE (type);
1757	      if (ut)
1758		{
1759		  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1760		  REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1761		  if (! REAL_VALUES_LESS (x, u))
1762		    {
1763		      overflow = 1;
1764		      high = TREE_INT_CST_HIGH (ut);
1765		      low = TREE_INT_CST_LOW (ut);
1766		    }
1767		}
1768	    }
1769
1770	  if (! overflow)
1771	    REAL_VALUE_TO_INT (&low, &high, x);
1772
1773	  t = build_int_2 (low, high);
1774	  TREE_TYPE (t) = type;
1775	  TREE_OVERFLOW (t)
1776	    = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1777	  TREE_CONSTANT_OVERFLOW (t)
1778	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1779	  return t;
1780	}
1781    }
1782  else if (TREE_CODE (type) == REAL_TYPE)
1783    {
1784      if (TREE_CODE (arg1) == INTEGER_CST)
1785	return build_real_from_int_cst (type, arg1);
1786      if (TREE_CODE (arg1) == REAL_CST)
1787	{
1788	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1789	    {
1790	      /* We make a copy of ARG1 so that we don't modify an
1791		 existing constant tree.  */
1792	      t = copy_node (arg1);
1793	      TREE_TYPE (t) = type;
1794	      return t;
1795	    }
1796
1797	  t = build_real (type,
1798			  real_value_truncate (TYPE_MODE (type),
1799					       TREE_REAL_CST (arg1)));
1800
1801	  TREE_OVERFLOW (t)
1802	    = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1803	  TREE_CONSTANT_OVERFLOW (t)
1804	    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1805	  return t;
1806	}
1807    }
1808  return NULL_TREE;
1809}
1810
1811/* Convert expression ARG to type TYPE.  Used by the middle-end for
1812   simple conversions in preference to calling the front-end's convert.  */
1813
1814static tree
1815fold_convert (tree type, tree arg)
1816{
1817  tree orig = TREE_TYPE (arg);
1818  tree tem;
1819
1820  if (type == orig)
1821    return arg;
1822
1823  if (TREE_CODE (arg) == ERROR_MARK
1824      || TREE_CODE (type) == ERROR_MARK
1825      || TREE_CODE (orig) == ERROR_MARK)
1826    return error_mark_node;
1827
1828  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1829    return fold (build1 (NOP_EXPR, type, arg));
1830
1831  if (INTEGRAL_TYPE_P (type) || POINTER_TYPE_P (type)
1832      || TREE_CODE (type) == OFFSET_TYPE)
1833    {
1834      if (TREE_CODE (arg) == INTEGER_CST)
1835	{
1836	  tem = fold_convert_const (NOP_EXPR, type, arg);
1837	  if (tem != NULL_TREE)
1838	    return tem;
1839	}
1840      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1841	  || TREE_CODE (orig) == OFFSET_TYPE)
1842        return fold (build1 (NOP_EXPR, type, arg));
1843      if (TREE_CODE (orig) == COMPLEX_TYPE)
1844	{
1845	  tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1846	  return fold_convert (type, tem);
1847	}
1848      if (TREE_CODE (orig) == VECTOR_TYPE
1849	  && GET_MODE_SIZE (TYPE_MODE (type))
1850	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1851	return fold (build1 (NOP_EXPR, type, arg));
1852    }
1853  else if (TREE_CODE (type) == REAL_TYPE)
1854    {
1855      if (TREE_CODE (arg) == INTEGER_CST)
1856	{
1857	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1858	  if (tem != NULL_TREE)
1859	    return tem;
1860	}
1861      else if (TREE_CODE (arg) == REAL_CST)
1862	{
1863	  tem = fold_convert_const (NOP_EXPR, type, arg);
1864	  if (tem != NULL_TREE)
1865	    return tem;
1866	}
1867
1868      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1869        return fold (build1 (FLOAT_EXPR, type, arg));
1870      if (TREE_CODE (orig) == REAL_TYPE)
1871	return fold (build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1872			     type, arg));
1873      if (TREE_CODE (orig) == COMPLEX_TYPE)
1874	{
1875	  tem = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1876	  return fold_convert (type, tem);
1877	}
1878    }
1879  else if (TREE_CODE (type) == COMPLEX_TYPE)
1880    {
1881      if (INTEGRAL_TYPE_P (orig)
1882	  || POINTER_TYPE_P (orig)
1883	  || TREE_CODE (orig) == REAL_TYPE)
1884	return build (COMPLEX_EXPR, type,
1885		      fold_convert (TREE_TYPE (type), arg),
1886		      fold_convert (TREE_TYPE (type), integer_zero_node));
1887      if (TREE_CODE (orig) == COMPLEX_TYPE)
1888	{
1889	  tree rpart, ipart;
1890
1891	  if (TREE_CODE (arg) == COMPLEX_EXPR)
1892	    {
1893	      rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
1894	      ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
1895	      return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1896	    }
1897
1898	  arg = save_expr (arg);
1899	  rpart = fold (build1 (REALPART_EXPR, TREE_TYPE (orig), arg));
1900	  ipart = fold (build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg));
1901	  rpart = fold_convert (TREE_TYPE (type), rpart);
1902	  ipart = fold_convert (TREE_TYPE (type), ipart);
1903	  return fold (build (COMPLEX_EXPR, type, rpart, ipart));
1904	}
1905    }
1906  else if (TREE_CODE (type) == VECTOR_TYPE)
1907    {
1908      if ((INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig))
1909	  && GET_MODE_SIZE (TYPE_MODE (type))
1910	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1911	return fold (build1 (NOP_EXPR, type, arg));
1912      if (TREE_CODE (orig) == VECTOR_TYPE
1913	  && GET_MODE_SIZE (TYPE_MODE (type))
1914	     == GET_MODE_SIZE (TYPE_MODE (orig)))
1915	return fold (build1 (NOP_EXPR, type, arg));
1916    }
1917  else if (VOID_TYPE_P (type))
1918    return fold (build1 (CONVERT_EXPR, type, arg));
1919  abort ();
1920}
1921
1922/* Return an expr equal to X but certainly not valid as an lvalue.  */
1923
1924tree
1925non_lvalue (tree x)
1926{
1927  tree result;
1928
1929  /* These things are certainly not lvalues.  */
1930  if (TREE_CODE (x) == NON_LVALUE_EXPR
1931      || TREE_CODE (x) == INTEGER_CST
1932      || TREE_CODE (x) == REAL_CST
1933      || TREE_CODE (x) == STRING_CST
1934      || TREE_CODE (x) == ADDR_EXPR)
1935    return x;
1936
1937  result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1938  TREE_CONSTANT (result) = TREE_CONSTANT (x);
1939  return result;
1940}
1941
1942/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1943   Zero means allow extended lvalues.  */
1944
1945int pedantic_lvalues;
1946
1947/* When pedantic, return an expr equal to X but certainly not valid as a
1948   pedantic lvalue.  Otherwise, return X.  */
1949
1950tree
1951pedantic_non_lvalue (tree x)
1952{
1953  if (pedantic_lvalues)
1954    return non_lvalue (x);
1955  else
1956    return x;
1957}
1958
1959/* Given a tree comparison code, return the code that is the logical inverse
1960   of the given code.  It is not safe to do this for floating-point
1961   comparisons, except for NE_EXPR and EQ_EXPR.  */
1962
1963static enum tree_code
1964invert_tree_comparison (enum tree_code code)
1965{
1966  switch (code)
1967    {
1968    case EQ_EXPR:
1969      return NE_EXPR;
1970    case NE_EXPR:
1971      return EQ_EXPR;
1972    case GT_EXPR:
1973      return LE_EXPR;
1974    case GE_EXPR:
1975      return LT_EXPR;
1976    case LT_EXPR:
1977      return GE_EXPR;
1978    case LE_EXPR:
1979      return GT_EXPR;
1980    default:
1981      abort ();
1982    }
1983}
1984
1985/* Similar, but return the comparison that results if the operands are
1986   swapped.  This is safe for floating-point.  */
1987
1988static enum tree_code
1989swap_tree_comparison (enum tree_code code)
1990{
1991  switch (code)
1992    {
1993    case EQ_EXPR:
1994    case NE_EXPR:
1995      return code;
1996    case GT_EXPR:
1997      return LT_EXPR;
1998    case GE_EXPR:
1999      return LE_EXPR;
2000    case LT_EXPR:
2001      return GT_EXPR;
2002    case LE_EXPR:
2003      return GE_EXPR;
2004    default:
2005      abort ();
2006    }
2007}
2008
2009
2010/* Convert a comparison tree code from an enum tree_code representation
2011   into a compcode bit-based encoding.  This function is the inverse of
2012   compcode_to_comparison.  */
2013
2014static int
2015comparison_to_compcode (enum tree_code code)
2016{
2017  switch (code)
2018    {
2019    case LT_EXPR:
2020      return COMPCODE_LT;
2021    case EQ_EXPR:
2022      return COMPCODE_EQ;
2023    case LE_EXPR:
2024      return COMPCODE_LE;
2025    case GT_EXPR:
2026      return COMPCODE_GT;
2027    case NE_EXPR:
2028      return COMPCODE_NE;
2029    case GE_EXPR:
2030      return COMPCODE_GE;
2031    default:
2032      abort ();
2033    }
2034}
2035
2036/* Convert a compcode bit-based encoding of a comparison operator back
2037   to GCC's enum tree_code representation.  This function is the
2038   inverse of comparison_to_compcode.  */
2039
2040static enum tree_code
2041compcode_to_comparison (int code)
2042{
2043  switch (code)
2044    {
2045    case COMPCODE_LT:
2046      return LT_EXPR;
2047    case COMPCODE_EQ:
2048      return EQ_EXPR;
2049    case COMPCODE_LE:
2050      return LE_EXPR;
2051    case COMPCODE_GT:
2052      return GT_EXPR;
2053    case COMPCODE_NE:
2054      return NE_EXPR;
2055    case COMPCODE_GE:
2056      return GE_EXPR;
2057    default:
2058      abort ();
2059    }
2060}
2061
2062/* Return nonzero if CODE is a tree code that represents a truth value.  */
2063
2064static int
2065truth_value_p (enum tree_code code)
2066{
2067  return (TREE_CODE_CLASS (code) == '<'
2068	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2069	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2070	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2071}
2072
2073/* Return nonzero if two operands (typically of the same tree node)
2074   are necessarily equal.  If either argument has side-effects this
2075   function returns zero.
2076
2077   If ONLY_CONST is nonzero, only return nonzero for constants.
2078   This function tests whether the operands are indistinguishable;
2079   it does not test whether they are equal using C's == operation.
2080   The distinction is important for IEEE floating point, because
2081   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2082   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2083
2084   If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2085   even though it may hold multiple values during a function.
2086   This is because a GCC tree node guarantees that nothing else is
2087   executed between the evaluation of its "operands" (which may often
2088   be evaluated in arbitrary order).  Hence if the operands themselves
2089   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2090   same value in each operand/subexpression.  Hence a zero value for
2091   ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2092   If comparing arbitrary expression trees, such as from different
2093   statements, ONLY_CONST must usually be nonzero.  */
2094
2095int
2096operand_equal_p (tree arg0, tree arg1, int only_const)
2097{
2098  tree fndecl;
2099
2100  /* If both types don't have the same signedness, then we can't consider
2101     them equal.  We must check this before the STRIP_NOPS calls
2102     because they may change the signedness of the arguments.  */
2103  if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2104    return 0;
2105
2106  STRIP_NOPS (arg0);
2107  STRIP_NOPS (arg1);
2108
2109  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2110      /* This is needed for conversions and for COMPONENT_REF.
2111	 Might as well play it safe and always test this.  */
2112      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2113      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2114      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2115    return 0;
2116
2117  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2118     We don't care about side effects in that case because the SAVE_EXPR
2119     takes care of that for us. In all other cases, two expressions are
2120     equal if they have no side effects.  If we have two identical
2121     expressions with side effects that should be treated the same due
2122     to the only side effects being identical SAVE_EXPR's, that will
2123     be detected in the recursive calls below.  */
2124  if (arg0 == arg1 && ! only_const
2125      && (TREE_CODE (arg0) == SAVE_EXPR
2126	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2127    return 1;
2128
2129  /* Next handle constant cases, those for which we can return 1 even
2130     if ONLY_CONST is set.  */
2131  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2132    switch (TREE_CODE (arg0))
2133      {
2134      case INTEGER_CST:
2135	return (! TREE_CONSTANT_OVERFLOW (arg0)
2136		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2137		&& tree_int_cst_equal (arg0, arg1));
2138
2139      case REAL_CST:
2140	return (! TREE_CONSTANT_OVERFLOW (arg0)
2141		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2142		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2143					  TREE_REAL_CST (arg1)));
2144
2145      case VECTOR_CST:
2146	{
2147	  tree v1, v2;
2148
2149	  if (TREE_CONSTANT_OVERFLOW (arg0)
2150	      || TREE_CONSTANT_OVERFLOW (arg1))
2151	    return 0;
2152
2153	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2154	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2155	  while (v1 && v2)
2156	    {
2157	      if (!operand_equal_p (v1, v2, only_const))
2158		return 0;
2159	      v1 = TREE_CHAIN (v1);
2160	      v2 = TREE_CHAIN (v2);
2161	    }
2162
2163	  return 1;
2164	}
2165
2166      case COMPLEX_CST:
2167	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2168				 only_const)
2169		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2170				    only_const));
2171
2172      case STRING_CST:
2173	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2174		&& ! memcmp (TREE_STRING_POINTER (arg0),
2175			      TREE_STRING_POINTER (arg1),
2176			      TREE_STRING_LENGTH (arg0)));
2177
2178      case ADDR_EXPR:
2179	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2180				0);
2181      default:
2182	break;
2183      }
2184
2185  if (only_const)
2186    return 0;
2187
2188  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2189    {
2190    case '1':
2191      /* Two conversions are equal only if signedness and modes match.  */
2192      switch (TREE_CODE (arg0))
2193        {
2194        case NOP_EXPR:
2195        case CONVERT_EXPR:
2196        case FIX_CEIL_EXPR:
2197        case FIX_TRUNC_EXPR:
2198        case FIX_FLOOR_EXPR:
2199        case FIX_ROUND_EXPR:
2200	  if (TREE_UNSIGNED (TREE_TYPE (arg0))
2201	      != TREE_UNSIGNED (TREE_TYPE (arg1)))
2202	    return 0;
2203	  break;
2204	default:
2205	  break;
2206	}
2207
2208      return operand_equal_p (TREE_OPERAND (arg0, 0),
2209			      TREE_OPERAND (arg1, 0), 0);
2210
2211    case '<':
2212    case '2':
2213      if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2214	  && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2215			      0))
2216	return 1;
2217
2218      /* For commutative ops, allow the other order.  */
2219      return ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MULT_EXPR
2220	       || TREE_CODE (arg0) == MIN_EXPR || TREE_CODE (arg0) == MAX_EXPR
2221	       || TREE_CODE (arg0) == BIT_IOR_EXPR
2222	       || TREE_CODE (arg0) == BIT_XOR_EXPR
2223	       || TREE_CODE (arg0) == BIT_AND_EXPR
2224	       || TREE_CODE (arg0) == NE_EXPR || TREE_CODE (arg0) == EQ_EXPR)
2225	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2226				  TREE_OPERAND (arg1, 1), 0)
2227	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2228				  TREE_OPERAND (arg1, 0), 0));
2229
2230    case 'r':
2231      /* If either of the pointer (or reference) expressions we are
2232	 dereferencing contain a side effect, these cannot be equal.  */
2233      if (TREE_SIDE_EFFECTS (arg0)
2234	  || TREE_SIDE_EFFECTS (arg1))
2235	return 0;
2236
2237      switch (TREE_CODE (arg0))
2238	{
2239	case INDIRECT_REF:
2240	  return operand_equal_p (TREE_OPERAND (arg0, 0),
2241				  TREE_OPERAND (arg1, 0), 0);
2242
2243	case COMPONENT_REF:
2244	case ARRAY_REF:
2245	case ARRAY_RANGE_REF:
2246	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2247				   TREE_OPERAND (arg1, 0), 0)
2248		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2249				      TREE_OPERAND (arg1, 1), 0));
2250
2251	case BIT_FIELD_REF:
2252	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2253				   TREE_OPERAND (arg1, 0), 0)
2254		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2255				      TREE_OPERAND (arg1, 1), 0)
2256		  && operand_equal_p (TREE_OPERAND (arg0, 2),
2257				      TREE_OPERAND (arg1, 2), 0));
2258	default:
2259	  return 0;
2260	}
2261
2262    case 'e':
2263      switch (TREE_CODE (arg0))
2264	{
2265	case ADDR_EXPR:
2266	case TRUTH_NOT_EXPR:
2267	  return operand_equal_p (TREE_OPERAND (arg0, 0),
2268				  TREE_OPERAND (arg1, 0), 0);
2269
2270	case RTL_EXPR:
2271	  return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2272
2273	case CALL_EXPR:
2274	  /* If the CALL_EXPRs call different functions, then they
2275	     clearly can not be equal.  */
2276	  if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2277				 TREE_OPERAND (arg1, 0), 0))
2278	    return 0;
2279
2280	  /* Only consider const functions equivalent.  */
2281	  fndecl = get_callee_fndecl (arg0);
2282	  if (fndecl == NULL_TREE
2283	      || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2284	    return 0;
2285
2286	  /* Now see if all the arguments are the same.  operand_equal_p
2287	     does not handle TREE_LIST, so we walk the operands here
2288	     feeding them to operand_equal_p.  */
2289	  arg0 = TREE_OPERAND (arg0, 1);
2290	  arg1 = TREE_OPERAND (arg1, 1);
2291	  while (arg0 && arg1)
2292	    {
2293	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2294		return 0;
2295
2296	      arg0 = TREE_CHAIN (arg0);
2297	      arg1 = TREE_CHAIN (arg1);
2298	    }
2299
2300	  /* If we get here and both argument lists are exhausted
2301	     then the CALL_EXPRs are equal.  */
2302	  return ! (arg0 || arg1);
2303
2304	default:
2305	  return 0;
2306	}
2307
2308    case 'd':
2309	/* Consider __builtin_sqrt equal to sqrt.  */
2310	return TREE_CODE (arg0) == FUNCTION_DECL
2311	       && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2312	       && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2313	       && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2314
2315    default:
2316      return 0;
2317    }
2318}
2319
2320/* Similar to operand_equal_p, but see if ARG0 might have been made by
2321   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2322
2323   When in doubt, return 0.  */
2324
2325static int
2326operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2327{
2328  int unsignedp1, unsignedpo;
2329  tree primarg0, primarg1, primother;
2330  unsigned int correct_width;
2331
2332  if (operand_equal_p (arg0, arg1, 0))
2333    return 1;
2334
2335  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2336      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2337    return 0;
2338
2339  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2340     and see if the inner values are the same.  This removes any
2341     signedness comparison, which doesn't matter here.  */
2342  primarg0 = arg0, primarg1 = arg1;
2343  STRIP_NOPS (primarg0);
2344  STRIP_NOPS (primarg1);
2345  if (operand_equal_p (primarg0, primarg1, 0))
2346    return 1;
2347
2348  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2349     actual comparison operand, ARG0.
2350
2351     First throw away any conversions to wider types
2352     already present in the operands.  */
2353
2354  primarg1 = get_narrower (arg1, &unsignedp1);
2355  primother = get_narrower (other, &unsignedpo);
2356
2357  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2358  if (unsignedp1 == unsignedpo
2359      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2360      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2361    {
2362      tree type = TREE_TYPE (arg0);
2363
2364      /* Make sure shorter operand is extended the right way
2365	 to match the longer operand.  */
2366      primarg1 = fold_convert ((*lang_hooks.types.signed_or_unsigned_type)
2367			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2368
2369      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2370	return 1;
2371    }
2372
2373  return 0;
2374}
2375
2376/* See if ARG is an expression that is either a comparison or is performing
2377   arithmetic on comparisons.  The comparisons must only be comparing
2378   two different values, which will be stored in *CVAL1 and *CVAL2; if
2379   they are nonzero it means that some operands have already been found.
2380   No variables may be used anywhere else in the expression except in the
2381   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2382   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2383
2384   If this is true, return 1.  Otherwise, return zero.  */
2385
2386static int
2387twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2388{
2389  enum tree_code code = TREE_CODE (arg);
2390  char class = TREE_CODE_CLASS (code);
2391
2392  /* We can handle some of the 'e' cases here.  */
2393  if (class == 'e' && code == TRUTH_NOT_EXPR)
2394    class = '1';
2395  else if (class == 'e'
2396	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2397	       || code == COMPOUND_EXPR))
2398    class = '2';
2399
2400  else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2401	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2402    {
2403      /* If we've already found a CVAL1 or CVAL2, this expression is
2404	 two complex to handle.  */
2405      if (*cval1 || *cval2)
2406	return 0;
2407
2408      class = '1';
2409      *save_p = 1;
2410    }
2411
2412  switch (class)
2413    {
2414    case '1':
2415      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2416
2417    case '2':
2418      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2419	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2420				      cval1, cval2, save_p));
2421
2422    case 'c':
2423      return 1;
2424
2425    case 'e':
2426      if (code == COND_EXPR)
2427	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2428				     cval1, cval2, save_p)
2429		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2430					cval1, cval2, save_p)
2431		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2432					cval1, cval2, save_p));
2433      return 0;
2434
2435    case '<':
2436      /* First see if we can handle the first operand, then the second.  For
2437	 the second operand, we know *CVAL1 can't be zero.  It must be that
2438	 one side of the comparison is each of the values; test for the
2439	 case where this isn't true by failing if the two operands
2440	 are the same.  */
2441
2442      if (operand_equal_p (TREE_OPERAND (arg, 0),
2443			   TREE_OPERAND (arg, 1), 0))
2444	return 0;
2445
2446      if (*cval1 == 0)
2447	*cval1 = TREE_OPERAND (arg, 0);
2448      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2449	;
2450      else if (*cval2 == 0)
2451	*cval2 = TREE_OPERAND (arg, 0);
2452      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2453	;
2454      else
2455	return 0;
2456
2457      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2458	;
2459      else if (*cval2 == 0)
2460	*cval2 = TREE_OPERAND (arg, 1);
2461      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2462	;
2463      else
2464	return 0;
2465
2466      return 1;
2467
2468    default:
2469      return 0;
2470    }
2471}
2472
2473/* ARG is a tree that is known to contain just arithmetic operations and
2474   comparisons.  Evaluate the operations in the tree substituting NEW0 for
2475   any occurrence of OLD0 as an operand of a comparison and likewise for
2476   NEW1 and OLD1.  */
2477
2478static tree
2479eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2480{
2481  tree type = TREE_TYPE (arg);
2482  enum tree_code code = TREE_CODE (arg);
2483  char class = TREE_CODE_CLASS (code);
2484
2485  /* We can handle some of the 'e' cases here.  */
2486  if (class == 'e' && code == TRUTH_NOT_EXPR)
2487    class = '1';
2488  else if (class == 'e'
2489	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2490    class = '2';
2491
2492  switch (class)
2493    {
2494    case '1':
2495      return fold (build1 (code, type,
2496			   eval_subst (TREE_OPERAND (arg, 0),
2497				       old0, new0, old1, new1)));
2498
2499    case '2':
2500      return fold (build (code, type,
2501			  eval_subst (TREE_OPERAND (arg, 0),
2502				      old0, new0, old1, new1),
2503			  eval_subst (TREE_OPERAND (arg, 1),
2504				      old0, new0, old1, new1)));
2505
2506    case 'e':
2507      switch (code)
2508	{
2509	case SAVE_EXPR:
2510	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2511
2512	case COMPOUND_EXPR:
2513	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2514
2515	case COND_EXPR:
2516	  return fold (build (code, type,
2517			      eval_subst (TREE_OPERAND (arg, 0),
2518					  old0, new0, old1, new1),
2519			      eval_subst (TREE_OPERAND (arg, 1),
2520					  old0, new0, old1, new1),
2521			      eval_subst (TREE_OPERAND (arg, 2),
2522					  old0, new0, old1, new1)));
2523	default:
2524	  break;
2525	}
2526      /* Fall through - ???  */
2527
2528    case '<':
2529      {
2530	tree arg0 = TREE_OPERAND (arg, 0);
2531	tree arg1 = TREE_OPERAND (arg, 1);
2532
2533	/* We need to check both for exact equality and tree equality.  The
2534	   former will be true if the operand has a side-effect.  In that
2535	   case, we know the operand occurred exactly once.  */
2536
2537	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2538	  arg0 = new0;
2539	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2540	  arg0 = new1;
2541
2542	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2543	  arg1 = new0;
2544	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2545	  arg1 = new1;
2546
2547	return fold (build (code, type, arg0, arg1));
2548      }
2549
2550    default:
2551      return arg;
2552    }
2553}
2554
2555/* Return a tree for the case when the result of an expression is RESULT
2556   converted to TYPE and OMITTED was previously an operand of the expression
2557   but is now not needed (e.g., we folded OMITTED * 0).
2558
2559   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2560   the conversion of RESULT to TYPE.  */
2561
2562tree
2563omit_one_operand (tree type, tree result, tree omitted)
2564{
2565  tree t = fold_convert (type, result);
2566
2567  if (TREE_SIDE_EFFECTS (omitted))
2568    return build (COMPOUND_EXPR, type, omitted, t);
2569
2570  return non_lvalue (t);
2571}
2572
2573/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2574
2575static tree
2576pedantic_omit_one_operand (tree type, tree result, tree omitted)
2577{
2578  tree t = fold_convert (type, result);
2579
2580  if (TREE_SIDE_EFFECTS (omitted))
2581    return build (COMPOUND_EXPR, type, omitted, t);
2582
2583  return pedantic_non_lvalue (t);
2584}
2585
2586/* Return a simplified tree node for the truth-negation of ARG.  This
2587   never alters ARG itself.  We assume that ARG is an operation that
2588   returns a truth value (0 or 1).  */
2589
2590tree
2591invert_truthvalue (tree arg)
2592{
2593  tree type = TREE_TYPE (arg);
2594  enum tree_code code = TREE_CODE (arg);
2595
2596  if (code == ERROR_MARK)
2597    return arg;
2598
2599  /* If this is a comparison, we can simply invert it, except for
2600     floating-point non-equality comparisons, in which case we just
2601     enclose a TRUTH_NOT_EXPR around what we have.  */
2602
2603  if (TREE_CODE_CLASS (code) == '<')
2604    {
2605      if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2606	  && !flag_unsafe_math_optimizations
2607	  && code != NE_EXPR
2608	  && code != EQ_EXPR)
2609	return build1 (TRUTH_NOT_EXPR, type, arg);
2610      else
2611	return build (invert_tree_comparison (code), type,
2612		      TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2613    }
2614
2615  switch (code)
2616    {
2617    case INTEGER_CST:
2618      return fold_convert (type, build_int_2 (integer_zerop (arg), 0));
2619
2620    case TRUTH_AND_EXPR:
2621      return build (TRUTH_OR_EXPR, type,
2622		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2623		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2624
2625    case TRUTH_OR_EXPR:
2626      return build (TRUTH_AND_EXPR, type,
2627		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2628		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2629
2630    case TRUTH_XOR_EXPR:
2631      /* Here we can invert either operand.  We invert the first operand
2632	 unless the second operand is a TRUTH_NOT_EXPR in which case our
2633	 result is the XOR of the first operand with the inside of the
2634	 negation of the second operand.  */
2635
2636      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2637	return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2638		      TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2639      else
2640	return build (TRUTH_XOR_EXPR, type,
2641		      invert_truthvalue (TREE_OPERAND (arg, 0)),
2642		      TREE_OPERAND (arg, 1));
2643
2644    case TRUTH_ANDIF_EXPR:
2645      return build (TRUTH_ORIF_EXPR, type,
2646		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2647		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2648
2649    case TRUTH_ORIF_EXPR:
2650      return build (TRUTH_ANDIF_EXPR, type,
2651		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2652		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2653
2654    case TRUTH_NOT_EXPR:
2655      return TREE_OPERAND (arg, 0);
2656
2657    case COND_EXPR:
2658      return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2659		    invert_truthvalue (TREE_OPERAND (arg, 1)),
2660		    invert_truthvalue (TREE_OPERAND (arg, 2)));
2661
2662    case COMPOUND_EXPR:
2663      return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2664		    invert_truthvalue (TREE_OPERAND (arg, 1)));
2665
2666    case WITH_RECORD_EXPR:
2667      return build (WITH_RECORD_EXPR, type,
2668		    invert_truthvalue (TREE_OPERAND (arg, 0)),
2669		    TREE_OPERAND (arg, 1));
2670
2671    case NON_LVALUE_EXPR:
2672      return invert_truthvalue (TREE_OPERAND (arg, 0));
2673
2674    case NOP_EXPR:
2675    case CONVERT_EXPR:
2676    case FLOAT_EXPR:
2677      return build1 (TREE_CODE (arg), type,
2678		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2679
2680    case BIT_AND_EXPR:
2681      if (!integer_onep (TREE_OPERAND (arg, 1)))
2682	break;
2683      return build (EQ_EXPR, type, arg,
2684		    fold_convert (type, integer_zero_node));
2685
2686    case SAVE_EXPR:
2687      return build1 (TRUTH_NOT_EXPR, type, arg);
2688
2689    case CLEANUP_POINT_EXPR:
2690      return build1 (CLEANUP_POINT_EXPR, type,
2691		     invert_truthvalue (TREE_OPERAND (arg, 0)));
2692
2693    default:
2694      break;
2695    }
2696  if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2697    abort ();
2698  return build1 (TRUTH_NOT_EXPR, type, arg);
2699}
2700
2701/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2702   operands are another bit-wise operation with a common input.  If so,
2703   distribute the bit operations to save an operation and possibly two if
2704   constants are involved.  For example, convert
2705	(A | B) & (A | C) into A | (B & C)
2706   Further simplification will occur if B and C are constants.
2707
2708   If this optimization cannot be done, 0 will be returned.  */
2709
2710static tree
2711distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2712{
2713  tree common;
2714  tree left, right;
2715
2716  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2717      || TREE_CODE (arg0) == code
2718      || (TREE_CODE (arg0) != BIT_AND_EXPR
2719	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
2720    return 0;
2721
2722  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2723    {
2724      common = TREE_OPERAND (arg0, 0);
2725      left = TREE_OPERAND (arg0, 1);
2726      right = TREE_OPERAND (arg1, 1);
2727    }
2728  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2729    {
2730      common = TREE_OPERAND (arg0, 0);
2731      left = TREE_OPERAND (arg0, 1);
2732      right = TREE_OPERAND (arg1, 0);
2733    }
2734  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2735    {
2736      common = TREE_OPERAND (arg0, 1);
2737      left = TREE_OPERAND (arg0, 0);
2738      right = TREE_OPERAND (arg1, 1);
2739    }
2740  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2741    {
2742      common = TREE_OPERAND (arg0, 1);
2743      left = TREE_OPERAND (arg0, 0);
2744      right = TREE_OPERAND (arg1, 0);
2745    }
2746  else
2747    return 0;
2748
2749  return fold (build (TREE_CODE (arg0), type, common,
2750		      fold (build (code, type, left, right))));
2751}
2752
2753/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2754   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
2755
2756static tree
2757make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2758		    int unsignedp)
2759{
2760  tree result = build (BIT_FIELD_REF, type, inner,
2761		       size_int (bitsize), bitsize_int (bitpos));
2762
2763  TREE_UNSIGNED (result) = unsignedp;
2764
2765  return result;
2766}
2767
2768/* Optimize a bit-field compare.
2769
2770   There are two cases:  First is a compare against a constant and the
2771   second is a comparison of two items where the fields are at the same
2772   bit position relative to the start of a chunk (byte, halfword, word)
2773   large enough to contain it.  In these cases we can avoid the shift
2774   implicit in bitfield extractions.
2775
2776   For constants, we emit a compare of the shifted constant with the
2777   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2778   compared.  For two fields at the same position, we do the ANDs with the
2779   similar mask and compare the result of the ANDs.
2780
2781   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2782   COMPARE_TYPE is the type of the comparison, and LHS and RHS
2783   are the left and right operands of the comparison, respectively.
2784
2785   If the optimization described above can be done, we return the resulting
2786   tree.  Otherwise we return zero.  */
2787
2788static tree
2789optimize_bit_field_compare (enum tree_code code, tree compare_type,
2790			    tree lhs, tree rhs)
2791{
2792  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2793  tree type = TREE_TYPE (lhs);
2794  tree signed_type, unsigned_type;
2795  int const_p = TREE_CODE (rhs) == INTEGER_CST;
2796  enum machine_mode lmode, rmode, nmode;
2797  int lunsignedp, runsignedp;
2798  int lvolatilep = 0, rvolatilep = 0;
2799  tree linner, rinner = NULL_TREE;
2800  tree mask;
2801  tree offset;
2802
2803  /* Get all the information about the extractions being done.  If the bit size
2804     if the same as the size of the underlying object, we aren't doing an
2805     extraction at all and so can do nothing.  We also don't want to
2806     do anything if the inner expression is a PLACEHOLDER_EXPR since we
2807     then will no longer be able to replace it.  */
2808  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2809				&lunsignedp, &lvolatilep);
2810  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2811      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2812    return 0;
2813
2814 if (!const_p)
2815   {
2816     /* If this is not a constant, we can only do something if bit positions,
2817	sizes, and signedness are the same.  */
2818     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2819				   &runsignedp, &rvolatilep);
2820
2821     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2822	 || lunsignedp != runsignedp || offset != 0
2823	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2824       return 0;
2825   }
2826
2827  /* See if we can find a mode to refer to this field.  We should be able to,
2828     but fail if we can't.  */
2829  nmode = get_best_mode (lbitsize, lbitpos,
2830			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2831			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2832				TYPE_ALIGN (TREE_TYPE (rinner))),
2833			 word_mode, lvolatilep || rvolatilep);
2834  if (nmode == VOIDmode)
2835    return 0;
2836
2837  /* Set signed and unsigned types of the precision of this mode for the
2838     shifts below.  */
2839  signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2840  unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2841
2842  /* Compute the bit position and size for the new reference and our offset
2843     within it. If the new reference is the same size as the original, we
2844     won't optimize anything, so return zero.  */
2845  nbitsize = GET_MODE_BITSIZE (nmode);
2846  nbitpos = lbitpos & ~ (nbitsize - 1);
2847  lbitpos -= nbitpos;
2848  if (nbitsize == lbitsize)
2849    return 0;
2850
2851  if (BYTES_BIG_ENDIAN)
2852    lbitpos = nbitsize - lbitsize - lbitpos;
2853
2854  /* Make the mask to be used against the extracted field.  */
2855  mask = build_int_2 (~0, ~0);
2856  TREE_TYPE (mask) = unsigned_type;
2857  force_fit_type (mask, 0);
2858  mask = fold_convert (unsigned_type, mask);
2859  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2860  mask = const_binop (RSHIFT_EXPR, mask,
2861		      size_int (nbitsize - lbitsize - lbitpos), 0);
2862
2863  if (! const_p)
2864    /* If not comparing with constant, just rework the comparison
2865       and return.  */
2866    return build (code, compare_type,
2867		  build (BIT_AND_EXPR, unsigned_type,
2868			 make_bit_field_ref (linner, unsigned_type,
2869					     nbitsize, nbitpos, 1),
2870			 mask),
2871		  build (BIT_AND_EXPR, unsigned_type,
2872			 make_bit_field_ref (rinner, unsigned_type,
2873					     nbitsize, nbitpos, 1),
2874			 mask));
2875
2876  /* Otherwise, we are handling the constant case. See if the constant is too
2877     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
2878     this not only for its own sake, but to avoid having to test for this
2879     error case below.  If we didn't, we might generate wrong code.
2880
2881     For unsigned fields, the constant shifted right by the field length should
2882     be all zero.  For signed fields, the high-order bits should agree with
2883     the sign bit.  */
2884
2885  if (lunsignedp)
2886    {
2887      if (! integer_zerop (const_binop (RSHIFT_EXPR,
2888					fold_convert (unsigned_type, rhs),
2889					size_int (lbitsize), 0)))
2890	{
2891	  warning ("comparison is always %d due to width of bit-field",
2892		   code == NE_EXPR);
2893	  return fold_convert (compare_type,
2894			       (code == NE_EXPR
2895				? integer_one_node : integer_zero_node));
2896	}
2897    }
2898  else
2899    {
2900      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
2901			      size_int (lbitsize - 1), 0);
2902      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2903	{
2904	  warning ("comparison is always %d due to width of bit-field",
2905		   code == NE_EXPR);
2906	  return fold_convert (compare_type,
2907			       (code == NE_EXPR
2908				? integer_one_node : integer_zero_node));
2909	}
2910    }
2911
2912  /* Single-bit compares should always be against zero.  */
2913  if (lbitsize == 1 && ! integer_zerop (rhs))
2914    {
2915      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2916      rhs = fold_convert (type, integer_zero_node);
2917    }
2918
2919  /* Make a new bitfield reference, shift the constant over the
2920     appropriate number of bits and mask it with the computed mask
2921     (in case this was a signed field).  If we changed it, make a new one.  */
2922  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2923  if (lvolatilep)
2924    {
2925      TREE_SIDE_EFFECTS (lhs) = 1;
2926      TREE_THIS_VOLATILE (lhs) = 1;
2927    }
2928
2929  rhs = fold (const_binop (BIT_AND_EXPR,
2930			   const_binop (LSHIFT_EXPR,
2931					fold_convert (unsigned_type, rhs),
2932					size_int (lbitpos), 0),
2933			   mask, 0));
2934
2935  return build (code, compare_type,
2936		build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2937		rhs);
2938}
2939
2940/* Subroutine for fold_truthop: decode a field reference.
2941
2942   If EXP is a comparison reference, we return the innermost reference.
2943
2944   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2945   set to the starting bit number.
2946
2947   If the innermost field can be completely contained in a mode-sized
2948   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
2949
2950   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2951   otherwise it is not changed.
2952
2953   *PUNSIGNEDP is set to the signedness of the field.
2954
2955   *PMASK is set to the mask used.  This is either contained in a
2956   BIT_AND_EXPR or derived from the width of the field.
2957
2958   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2959
2960   Return 0 if this is not a component reference or is one that we can't
2961   do anything with.  */
2962
2963static tree
2964decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2965			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2966			int *punsignedp, int *pvolatilep,
2967			tree *pmask, tree *pand_mask)
2968{
2969  tree outer_type = 0;
2970  tree and_mask = 0;
2971  tree mask, inner, offset;
2972  tree unsigned_type;
2973  unsigned int precision;
2974
2975  /* All the optimizations using this function assume integer fields.
2976     There are problems with FP fields since the type_for_size call
2977     below can fail for, e.g., XFmode.  */
2978  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2979    return 0;
2980
2981  /* We are interested in the bare arrangement of bits, so strip everything
2982     that doesn't affect the machine mode.  However, record the type of the
2983     outermost expression if it may matter below.  */
2984  if (TREE_CODE (exp) == NOP_EXPR
2985      || TREE_CODE (exp) == CONVERT_EXPR
2986      || TREE_CODE (exp) == NON_LVALUE_EXPR)
2987    outer_type = TREE_TYPE (exp);
2988  STRIP_NOPS (exp);
2989
2990  if (TREE_CODE (exp) == BIT_AND_EXPR)
2991    {
2992      and_mask = TREE_OPERAND (exp, 1);
2993      exp = TREE_OPERAND (exp, 0);
2994      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2995      if (TREE_CODE (and_mask) != INTEGER_CST)
2996	return 0;
2997    }
2998
2999  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3000			       punsignedp, pvolatilep);
3001  if ((inner == exp && and_mask == 0)
3002      || *pbitsize < 0 || offset != 0
3003      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3004    return 0;
3005
3006  /* If the number of bits in the reference is the same as the bitsize of
3007     the outer type, then the outer type gives the signedness. Otherwise
3008     (in case of a small bitfield) the signedness is unchanged.  */
3009  if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
3010    *punsignedp = TREE_UNSIGNED (outer_type);
3011
3012  /* Compute the mask to access the bitfield.  */
3013  unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
3014  precision = TYPE_PRECISION (unsigned_type);
3015
3016  mask = build_int_2 (~0, ~0);
3017  TREE_TYPE (mask) = unsigned_type;
3018  force_fit_type (mask, 0);
3019  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3020  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3021
3022  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3023  if (and_mask != 0)
3024    mask = fold (build (BIT_AND_EXPR, unsigned_type,
3025			fold_convert (unsigned_type, and_mask), mask));
3026
3027  *pmask = mask;
3028  *pand_mask = and_mask;
3029  return inner;
3030}
3031
3032/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3033   bit positions.  */
3034
3035static int
3036all_ones_mask_p (tree mask, int size)
3037{
3038  tree type = TREE_TYPE (mask);
3039  unsigned int precision = TYPE_PRECISION (type);
3040  tree tmask;
3041
3042  tmask = build_int_2 (~0, ~0);
3043  TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
3044  force_fit_type (tmask, 0);
3045  return
3046    tree_int_cst_equal (mask,
3047			const_binop (RSHIFT_EXPR,
3048				     const_binop (LSHIFT_EXPR, tmask,
3049						  size_int (precision - size),
3050						  0),
3051				     size_int (precision - size), 0));
3052}
3053
3054/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3055   represents the sign bit of EXP's type.  If EXP represents a sign
3056   or zero extension, also test VAL against the unextended type.
3057   The return value is the (sub)expression whose sign bit is VAL,
3058   or NULL_TREE otherwise.  */
3059
3060static tree
3061sign_bit_p (tree exp, tree val)
3062{
3063  unsigned HOST_WIDE_INT mask_lo, lo;
3064  HOST_WIDE_INT mask_hi, hi;
3065  int width;
3066  tree t;
3067
3068  /* Tree EXP must have an integral type.  */
3069  t = TREE_TYPE (exp);
3070  if (! INTEGRAL_TYPE_P (t))
3071    return NULL_TREE;
3072
3073  /* Tree VAL must be an integer constant.  */
3074  if (TREE_CODE (val) != INTEGER_CST
3075      || TREE_CONSTANT_OVERFLOW (val))
3076    return NULL_TREE;
3077
3078  width = TYPE_PRECISION (t);
3079  if (width > HOST_BITS_PER_WIDE_INT)
3080    {
3081      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3082      lo = 0;
3083
3084      mask_hi = ((unsigned HOST_WIDE_INT) -1
3085		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3086      mask_lo = -1;
3087    }
3088  else
3089    {
3090      hi = 0;
3091      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3092
3093      mask_hi = 0;
3094      mask_lo = ((unsigned HOST_WIDE_INT) -1
3095		 >> (HOST_BITS_PER_WIDE_INT - width));
3096    }
3097
3098  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3099     treat VAL as if it were unsigned.  */
3100  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3101      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3102    return exp;
3103
3104  /* Handle extension from a narrower type.  */
3105  if (TREE_CODE (exp) == NOP_EXPR
3106      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3107    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3108
3109  return NULL_TREE;
3110}
3111
3112/* Subroutine for fold_truthop: determine if an operand is simple enough
3113   to be evaluated unconditionally.  */
3114
3115static int
3116simple_operand_p (tree exp)
3117{
3118  /* Strip any conversions that don't change the machine mode.  */
3119  while ((TREE_CODE (exp) == NOP_EXPR
3120	  || TREE_CODE (exp) == CONVERT_EXPR)
3121	 && (TYPE_MODE (TREE_TYPE (exp))
3122	     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3123    exp = TREE_OPERAND (exp, 0);
3124
3125  return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3126	  || (DECL_P (exp)
3127	      && ! TREE_ADDRESSABLE (exp)
3128	      && ! TREE_THIS_VOLATILE (exp)
3129	      && ! DECL_NONLOCAL (exp)
3130	      /* Don't regard global variables as simple.  They may be
3131		 allocated in ways unknown to the compiler (shared memory,
3132		 #pragma weak, etc).  */
3133	      && ! TREE_PUBLIC (exp)
3134	      && ! DECL_EXTERNAL (exp)
3135	      /* Loading a static variable is unduly expensive, but global
3136		 registers aren't expensive.  */
3137	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3138}
3139
3140/* The following functions are subroutines to fold_range_test and allow it to
3141   try to change a logical combination of comparisons into a range test.
3142
3143   For example, both
3144	X == 2 || X == 3 || X == 4 || X == 5
3145   and
3146	X >= 2 && X <= 5
3147   are converted to
3148	(unsigned) (X - 2) <= 3
3149
3150   We describe each set of comparisons as being either inside or outside
3151   a range, using a variable named like IN_P, and then describe the
3152   range with a lower and upper bound.  If one of the bounds is omitted,
3153   it represents either the highest or lowest value of the type.
3154
3155   In the comments below, we represent a range by two numbers in brackets
3156   preceded by a "+" to designate being inside that range, or a "-" to
3157   designate being outside that range, so the condition can be inverted by
3158   flipping the prefix.  An omitted bound is represented by a "-".  For
3159   example, "- [-, 10]" means being outside the range starting at the lowest
3160   possible value and ending at 10, in other words, being greater than 10.
3161   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3162   always false.
3163
3164   We set up things so that the missing bounds are handled in a consistent
3165   manner so neither a missing bound nor "true" and "false" need to be
3166   handled using a special case.  */
3167
3168/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3169   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3170   and UPPER1_P are nonzero if the respective argument is an upper bound
3171   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3172   must be specified for a comparison.  ARG1 will be converted to ARG0's
3173   type if both are specified.  */
3174
3175static tree
3176range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3177	     tree arg1, int upper1_p)
3178{
3179  tree tem;
3180  int result;
3181  int sgn0, sgn1;
3182
3183  /* If neither arg represents infinity, do the normal operation.
3184     Else, if not a comparison, return infinity.  Else handle the special
3185     comparison rules. Note that most of the cases below won't occur, but
3186     are handled for consistency.  */
3187
3188  if (arg0 != 0 && arg1 != 0)
3189    {
3190      tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3191			 arg0, fold_convert (TREE_TYPE (arg0), arg1)));
3192      STRIP_NOPS (tem);
3193      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3194    }
3195
3196  if (TREE_CODE_CLASS (code) != '<')
3197    return 0;
3198
3199  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3200     for neither.  In real maths, we cannot assume open ended ranges are
3201     the same. But, this is computer arithmetic, where numbers are finite.
3202     We can therefore make the transformation of any unbounded range with
3203     the value Z, Z being greater than any representable number. This permits
3204     us to treat unbounded ranges as equal.  */
3205  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3206  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3207  switch (code)
3208    {
3209    case EQ_EXPR:
3210      result = sgn0 == sgn1;
3211      break;
3212    case NE_EXPR:
3213      result = sgn0 != sgn1;
3214      break;
3215    case LT_EXPR:
3216      result = sgn0 < sgn1;
3217      break;
3218    case LE_EXPR:
3219      result = sgn0 <= sgn1;
3220      break;
3221    case GT_EXPR:
3222      result = sgn0 > sgn1;
3223      break;
3224    case GE_EXPR:
3225      result = sgn0 >= sgn1;
3226      break;
3227    default:
3228      abort ();
3229    }
3230
3231  return fold_convert (type, result ? integer_one_node : integer_zero_node);
3232}
3233
3234/* Given EXP, a logical expression, set the range it is testing into
3235   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3236   actually being tested.  *PLOW and *PHIGH will be made of the same type
3237   as the returned expression.  If EXP is not a comparison, we will most
3238   likely not be returning a useful value and range.  */
3239
3240static tree
3241make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3242{
3243  enum tree_code code;
3244  tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3245  tree orig_type = NULL_TREE;
3246  int in_p, n_in_p;
3247  tree low, high, n_low, n_high;
3248
3249  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3250     and see if we can refine the range.  Some of the cases below may not
3251     happen, but it doesn't seem worth worrying about this.  We "continue"
3252     the outer loop when we've changed something; otherwise we "break"
3253     the switch, which will "break" the while.  */
3254
3255  in_p = 0;
3256  low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3257
3258  while (1)
3259    {
3260      code = TREE_CODE (exp);
3261
3262      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3263	{
3264	  if (first_rtl_op (code) > 0)
3265	    arg0 = TREE_OPERAND (exp, 0);
3266	  if (TREE_CODE_CLASS (code) == '<'
3267	      || TREE_CODE_CLASS (code) == '1'
3268	      || TREE_CODE_CLASS (code) == '2')
3269	    type = TREE_TYPE (arg0);
3270	  if (TREE_CODE_CLASS (code) == '2'
3271	      || TREE_CODE_CLASS (code) == '<'
3272	      || (TREE_CODE_CLASS (code) == 'e'
3273		  && TREE_CODE_LENGTH (code) > 1))
3274	    arg1 = TREE_OPERAND (exp, 1);
3275	}
3276
3277      /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3278	 lose a cast by accident.  */
3279      if (type != NULL_TREE && orig_type == NULL_TREE)
3280	orig_type = type;
3281
3282      switch (code)
3283	{
3284	case TRUTH_NOT_EXPR:
3285	  in_p = ! in_p, exp = arg0;
3286	  continue;
3287
3288	case EQ_EXPR: case NE_EXPR:
3289	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3290	  /* We can only do something if the range is testing for zero
3291	     and if the second operand is an integer constant.  Note that
3292	     saying something is "in" the range we make is done by
3293	     complementing IN_P since it will set in the initial case of
3294	     being not equal to zero; "out" is leaving it alone.  */
3295	  if (low == 0 || high == 0
3296	      || ! integer_zerop (low) || ! integer_zerop (high)
3297	      || TREE_CODE (arg1) != INTEGER_CST)
3298	    break;
3299
3300	  switch (code)
3301	    {
3302	    case NE_EXPR:  /* - [c, c]  */
3303	      low = high = arg1;
3304	      break;
3305	    case EQ_EXPR:  /* + [c, c]  */
3306	      in_p = ! in_p, low = high = arg1;
3307	      break;
3308	    case GT_EXPR:  /* - [-, c] */
3309	      low = 0, high = arg1;
3310	      break;
3311	    case GE_EXPR:  /* + [c, -] */
3312	      in_p = ! in_p, low = arg1, high = 0;
3313	      break;
3314	    case LT_EXPR:  /* - [c, -] */
3315	      low = arg1, high = 0;
3316	      break;
3317	    case LE_EXPR:  /* + [-, c] */
3318	      in_p = ! in_p, low = 0, high = arg1;
3319	      break;
3320	    default:
3321	      abort ();
3322	    }
3323
3324	  exp = arg0;
3325
3326	  /* If this is an unsigned comparison, we also know that EXP is
3327	     greater than or equal to zero.  We base the range tests we make
3328	     on that fact, so we record it here so we can parse existing
3329	     range tests.  */
3330	  if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3331	    {
3332	      if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3333				  1, fold_convert (type, integer_zero_node),
3334				  NULL_TREE))
3335		break;
3336
3337	      in_p = n_in_p, low = n_low, high = n_high;
3338
3339	      /* If the high bound is missing, but we have a nonzero low
3340		 bound, reverse the range so it goes from zero to the low bound
3341		 minus 1.  */
3342	      if (high == 0 && low && ! integer_zerop (low))
3343		{
3344		  in_p = ! in_p;
3345		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3346				      integer_one_node, 0);
3347		  low = fold_convert (type, integer_zero_node);
3348		}
3349	    }
3350	  continue;
3351
3352	case NEGATE_EXPR:
3353	  /* (-x) IN [a,b] -> x in [-b, -a]  */
3354	  n_low = range_binop (MINUS_EXPR, type,
3355			       fold_convert (type, integer_zero_node),
3356			       0, high, 1);
3357	  n_high = range_binop (MINUS_EXPR, type,
3358				fold_convert (type, integer_zero_node),
3359				0, low, 0);
3360	  low = n_low, high = n_high;
3361	  exp = arg0;
3362	  continue;
3363
3364	case BIT_NOT_EXPR:
3365	  /* ~ X -> -X - 1  */
3366	  exp = build (MINUS_EXPR, type, negate_expr (arg0),
3367		       fold_convert (type, integer_one_node));
3368	  continue;
3369
3370	case PLUS_EXPR:  case MINUS_EXPR:
3371	  if (TREE_CODE (arg1) != INTEGER_CST)
3372	    break;
3373
3374	  /* If EXP is signed, any overflow in the computation is undefined,
3375	     so we don't worry about it so long as our computations on
3376	     the bounds don't overflow.  For unsigned, overflow is defined
3377	     and this is exactly the right thing.  */
3378	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3379			       type, low, 0, arg1, 0);
3380	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3381				type, high, 1, arg1, 0);
3382	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
3383	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
3384	    break;
3385
3386	  /* Check for an unsigned range which has wrapped around the maximum
3387	     value thus making n_high < n_low, and normalize it.  */
3388	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3389	    {
3390	      low = range_binop (PLUS_EXPR, type, n_high, 0,
3391				 integer_one_node, 0);
3392	      high = range_binop (MINUS_EXPR, type, n_low, 0,
3393				  integer_one_node, 0);
3394
3395	      /* If the range is of the form +/- [ x+1, x ], we won't
3396		 be able to normalize it.  But then, it represents the
3397		 whole range or the empty set, so make it
3398		 +/- [ -, - ].  */
3399	      if (tree_int_cst_equal (n_low, low)
3400		  && tree_int_cst_equal (n_high, high))
3401		low = high = 0;
3402	      else
3403		in_p = ! in_p;
3404	    }
3405	  else
3406	    low = n_low, high = n_high;
3407
3408	  exp = arg0;
3409	  continue;
3410
3411	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3412	  if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3413	    break;
3414
3415	  if (! INTEGRAL_TYPE_P (type)
3416	      || (low != 0 && ! int_fits_type_p (low, type))
3417	      || (high != 0 && ! int_fits_type_p (high, type)))
3418	    break;
3419
3420	  n_low = low, n_high = high;
3421
3422	  if (n_low != 0)
3423	    n_low = fold_convert (type, n_low);
3424
3425	  if (n_high != 0)
3426	    n_high = fold_convert (type, n_high);
3427
3428	  /* If we're converting from an unsigned to a signed type,
3429	     we will be doing the comparison as unsigned.  The tests above
3430	     have already verified that LOW and HIGH are both positive.
3431
3432	     So we have to make sure that the original unsigned value will
3433	     be interpreted as positive.  */
3434	  if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3435	    {
3436	      tree equiv_type = (*lang_hooks.types.type_for_mode)
3437		(TYPE_MODE (type), 1);
3438	      tree high_positive;
3439
3440	      /* A range without an upper bound is, naturally, unbounded.
3441		 Since convert would have cropped a very large value, use
3442		 the max value for the destination type.  */
3443	      high_positive
3444		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3445		  : TYPE_MAX_VALUE (type);
3446
3447	      if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3448	        high_positive = fold (build (RSHIFT_EXPR, type,
3449					     fold_convert (type,
3450							   high_positive),
3451					     fold_convert (type,
3452							   integer_one_node)));
3453
3454	      /* If the low bound is specified, "and" the range with the
3455		 range for which the original unsigned value will be
3456		 positive.  */
3457	      if (low != 0)
3458		{
3459		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3460				      1, n_low, n_high, 1,
3461				      fold_convert (type, integer_zero_node),
3462				      high_positive))
3463		    break;
3464
3465		  in_p = (n_in_p == in_p);
3466		}
3467	      else
3468		{
3469		  /* Otherwise, "or" the range with the range of the input
3470		     that will be interpreted as negative.  */
3471		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3472				      0, n_low, n_high, 1,
3473				      fold_convert (type, integer_zero_node),
3474				      high_positive))
3475		    break;
3476
3477		  in_p = (in_p != n_in_p);
3478		}
3479	    }
3480
3481	  exp = arg0;
3482	  low = n_low, high = n_high;
3483	  continue;
3484
3485	default:
3486	  break;
3487	}
3488
3489      break;
3490    }
3491
3492  /* If EXP is a constant, we can evaluate whether this is true or false.  */
3493  if (TREE_CODE (exp) == INTEGER_CST)
3494    {
3495      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3496						 exp, 0, low, 0))
3497		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3498						    exp, 1, high, 1)));
3499      low = high = 0;
3500      exp = 0;
3501    }
3502
3503  *pin_p = in_p, *plow = low, *phigh = high;
3504  return exp;
3505}
3506
3507/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3508   type, TYPE, return an expression to test if EXP is in (or out of, depending
3509   on IN_P) the range.  */
3510
3511static tree
3512build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3513{
3514  tree etype = TREE_TYPE (exp);
3515  tree value;
3516
3517  if (! in_p
3518      && (0 != (value = build_range_check (type, exp, 1, low, high))))
3519    return invert_truthvalue (value);
3520
3521  if (low == 0 && high == 0)
3522    return fold_convert (type, integer_one_node);
3523
3524  if (low == 0)
3525    return fold (build (LE_EXPR, type, exp, high));
3526
3527  if (high == 0)
3528    return fold (build (GE_EXPR, type, exp, low));
3529
3530  if (operand_equal_p (low, high, 0))
3531    return fold (build (EQ_EXPR, type, exp, low));
3532
3533  if (integer_zerop (low))
3534    {
3535      if (! TREE_UNSIGNED (etype))
3536	{
3537	  etype = (*lang_hooks.types.unsigned_type) (etype);
3538	  high = fold_convert (etype, high);
3539	  exp = fold_convert (etype, exp);
3540	}
3541      return build_range_check (type, exp, 1, 0, high);
3542    }
3543
3544  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
3545  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3546    {
3547      unsigned HOST_WIDE_INT lo;
3548      HOST_WIDE_INT hi;
3549      int prec;
3550
3551      /* For enums the comparison will be done in the underlying type,
3552	 so using enum's precision is wrong here.
3553	 Consider e.g. enum { A, B, C, D, E }, low == B and high == D.  */
3554      if (TREE_CODE (etype) == ENUMERAL_TYPE)
3555	prec = GET_MODE_BITSIZE (TYPE_MODE (etype));
3556      else
3557	prec = TYPE_PRECISION (etype);
3558      if (prec <= HOST_BITS_PER_WIDE_INT)
3559	{
3560	  hi = 0;
3561	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3562	}
3563      else
3564	{
3565	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3566	  lo = (unsigned HOST_WIDE_INT) -1;
3567	}
3568
3569      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3570	{
3571	  if (TREE_UNSIGNED (etype))
3572	    {
3573	      etype = (*lang_hooks.types.signed_type) (etype);
3574	      exp = fold_convert (etype, exp);
3575	    }
3576	  return fold (build (GT_EXPR, type, exp,
3577			      fold_convert (etype, integer_zero_node)));
3578	}
3579    }
3580
3581  if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3582      && ! TREE_OVERFLOW (value))
3583    return build_range_check (type,
3584			      fold (build (MINUS_EXPR, etype, exp, low)),
3585			      1, fold_convert (etype, integer_zero_node),
3586			      value);
3587
3588  return 0;
3589}
3590
3591/* Given two ranges, see if we can merge them into one.  Return 1 if we
3592   can, 0 if we can't.  Set the output range into the specified parameters.  */
3593
3594static int
3595merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3596	      tree high0, int in1_p, tree low1, tree high1)
3597{
3598  int no_overlap;
3599  int subset;
3600  int temp;
3601  tree tem;
3602  int in_p;
3603  tree low, high;
3604  int lowequal = ((low0 == 0 && low1 == 0)
3605		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3606						low0, 0, low1, 0)));
3607  int highequal = ((high0 == 0 && high1 == 0)
3608		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3609						 high0, 1, high1, 1)));
3610
3611  /* Make range 0 be the range that starts first, or ends last if they
3612     start at the same value.  Swap them if it isn't.  */
3613  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3614				 low0, 0, low1, 0))
3615      || (lowequal
3616	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
3617					high1, 1, high0, 1))))
3618    {
3619      temp = in0_p, in0_p = in1_p, in1_p = temp;
3620      tem = low0, low0 = low1, low1 = tem;
3621      tem = high0, high0 = high1, high1 = tem;
3622    }
3623
3624  /* Now flag two cases, whether the ranges are disjoint or whether the
3625     second range is totally subsumed in the first.  Note that the tests
3626     below are simplified by the ones above.  */
3627  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3628					  high0, 1, low1, 0));
3629  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3630				      high1, 1, high0, 1));
3631
3632  /* We now have four cases, depending on whether we are including or
3633     excluding the two ranges.  */
3634  if (in0_p && in1_p)
3635    {
3636      /* If they don't overlap, the result is false.  If the second range
3637	 is a subset it is the result.  Otherwise, the range is from the start
3638	 of the second to the end of the first.  */
3639      if (no_overlap)
3640	in_p = 0, low = high = 0;
3641      else if (subset)
3642	in_p = 1, low = low1, high = high1;
3643      else
3644	in_p = 1, low = low1, high = high0;
3645    }
3646
3647  else if (in0_p && ! in1_p)
3648    {
3649      /* If they don't overlap, the result is the first range.  If they are
3650	 equal, the result is false.  If the second range is a subset of the
3651	 first, and the ranges begin at the same place, we go from just after
3652	 the end of the first range to the end of the second.  If the second
3653	 range is not a subset of the first, or if it is a subset and both
3654	 ranges end at the same place, the range starts at the start of the
3655	 first range and ends just before the second range.
3656	 Otherwise, we can't describe this as a single range.  */
3657      if (no_overlap)
3658	in_p = 1, low = low0, high = high0;
3659      else if (lowequal && highequal)
3660	in_p = 0, low = high = 0;
3661      else if (subset && lowequal)
3662	{
3663	  in_p = 1, high = high0;
3664	  low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3665			     integer_one_node, 0);
3666	}
3667      else if (! subset || highequal)
3668	{
3669	  in_p = 1, low = low0;
3670	  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3671			      integer_one_node, 0);
3672	}
3673      else
3674	return 0;
3675    }
3676
3677  else if (! in0_p && in1_p)
3678    {
3679      /* If they don't overlap, the result is the second range.  If the second
3680	 is a subset of the first, the result is false.  Otherwise,
3681	 the range starts just after the first range and ends at the
3682	 end of the second.  */
3683      if (no_overlap)
3684	in_p = 1, low = low1, high = high1;
3685      else if (subset || highequal)
3686	in_p = 0, low = high = 0;
3687      else
3688	{
3689	  in_p = 1, high = high1;
3690	  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3691			     integer_one_node, 0);
3692	}
3693    }
3694
3695  else
3696    {
3697      /* The case where we are excluding both ranges.  Here the complex case
3698	 is if they don't overlap.  In that case, the only time we have a
3699	 range is if they are adjacent.  If the second is a subset of the
3700	 first, the result is the first.  Otherwise, the range to exclude
3701	 starts at the beginning of the first range and ends at the end of the
3702	 second.  */
3703      if (no_overlap)
3704	{
3705	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3706					 range_binop (PLUS_EXPR, NULL_TREE,
3707						      high0, 1,
3708						      integer_one_node, 1),
3709					 1, low1, 0)))
3710	    in_p = 0, low = low0, high = high1;
3711	  else
3712	    return 0;
3713	}
3714      else if (subset)
3715	in_p = 0, low = low0, high = high0;
3716      else
3717	in_p = 0, low = low0, high = high1;
3718    }
3719
3720  *pin_p = in_p, *plow = low, *phigh = high;
3721  return 1;
3722}
3723
3724#ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3725#define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3726#endif
3727
3728/* EXP is some logical combination of boolean tests.  See if we can
3729   merge it into some range test.  Return the new tree if so.  */
3730
3731static tree
3732fold_range_test (tree exp)
3733{
3734  int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3735	       || TREE_CODE (exp) == TRUTH_OR_EXPR);
3736  int in0_p, in1_p, in_p;
3737  tree low0, low1, low, high0, high1, high;
3738  tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3739  tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3740  tree tem;
3741
3742  /* If this is an OR operation, invert both sides; we will invert
3743     again at the end.  */
3744  if (or_op)
3745    in0_p = ! in0_p, in1_p = ! in1_p;
3746
3747  /* If both expressions are the same, if we can merge the ranges, and we
3748     can build the range test, return it or it inverted.  If one of the
3749     ranges is always true or always false, consider it to be the same
3750     expression as the other.  */
3751  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3752      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3753		       in1_p, low1, high1)
3754      && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3755					 lhs != 0 ? lhs
3756					 : rhs != 0 ? rhs : integer_zero_node,
3757					 in_p, low, high))))
3758    return or_op ? invert_truthvalue (tem) : tem;
3759
3760  /* On machines where the branch cost is expensive, if this is a
3761     short-circuited branch and the underlying object on both sides
3762     is the same, make a non-short-circuit operation.  */
3763  else if (RANGE_TEST_NON_SHORT_CIRCUIT
3764	   && lhs != 0 && rhs != 0
3765	   && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3766	       || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3767	   && operand_equal_p (lhs, rhs, 0))
3768    {
3769      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
3770	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3771	 which cases we can't do this.  */
3772      if (simple_operand_p (lhs))
3773	return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3774		      ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3775		      TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3776		      TREE_OPERAND (exp, 1));
3777
3778      else if ((*lang_hooks.decls.global_bindings_p) () == 0
3779	       && ! CONTAINS_PLACEHOLDER_P (lhs))
3780	{
3781	  tree common = save_expr (lhs);
3782
3783	  if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3784					     or_op ? ! in0_p : in0_p,
3785					     low0, high0))
3786	      && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3787						 or_op ? ! in1_p : in1_p,
3788						 low1, high1))))
3789	    return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3790			  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3791			  TREE_TYPE (exp), lhs, rhs);
3792	}
3793    }
3794
3795  return 0;
3796}
3797
3798/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3799   bit value.  Arrange things so the extra bits will be set to zero if and
3800   only if C is signed-extended to its full width.  If MASK is nonzero,
3801   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
3802
3803static tree
3804unextend (tree c, int p, int unsignedp, tree mask)
3805{
3806  tree type = TREE_TYPE (c);
3807  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3808  tree temp;
3809
3810  if (p == modesize || unsignedp)
3811    return c;
3812
3813  /* We work by getting just the sign bit into the low-order bit, then
3814     into the high-order bit, then sign-extend.  We then XOR that value
3815     with C.  */
3816  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3817  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3818
3819  /* We must use a signed type in order to get an arithmetic right shift.
3820     However, we must also avoid introducing accidental overflows, so that
3821     a subsequent call to integer_zerop will work.  Hence we must
3822     do the type conversion here.  At this point, the constant is either
3823     zero or one, and the conversion to a signed type can never overflow.
3824     We could get an overflow if this conversion is done anywhere else.  */
3825  if (TREE_UNSIGNED (type))
3826    temp = fold_convert ((*lang_hooks.types.signed_type) (type), temp);
3827
3828  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3829  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3830  if (mask != 0)
3831    temp = const_binop (BIT_AND_EXPR, temp,
3832			fold_convert (TREE_TYPE (c), mask), 0);
3833  /* If necessary, convert the type back to match the type of C.  */
3834  if (TREE_UNSIGNED (type))
3835    temp = fold_convert (type, temp);
3836
3837  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3838}
3839
3840/* Find ways of folding logical expressions of LHS and RHS:
3841   Try to merge two comparisons to the same innermost item.
3842   Look for range tests like "ch >= '0' && ch <= '9'".
3843   Look for combinations of simple terms on machines with expensive branches
3844   and evaluate the RHS unconditionally.
3845
3846   For example, if we have p->a == 2 && p->b == 4 and we can make an
3847   object large enough to span both A and B, we can do this with a comparison
3848   against the object ANDed with the a mask.
3849
3850   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3851   operations to do this with one comparison.
3852
3853   We check for both normal comparisons and the BIT_AND_EXPRs made this by
3854   function and the one above.
3855
3856   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
3857   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3858
3859   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3860   two operands.
3861
3862   We return the simplified tree or 0 if no optimization is possible.  */
3863
3864static tree
3865fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3866{
3867  /* If this is the "or" of two comparisons, we can do something if
3868     the comparisons are NE_EXPR.  If this is the "and", we can do something
3869     if the comparisons are EQ_EXPR.  I.e.,
3870	(a->b == 2 && a->c == 4) can become (a->new == NEW).
3871
3872     WANTED_CODE is this operation code.  For single bit fields, we can
3873     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3874     comparison for one-bit fields.  */
3875
3876  enum tree_code wanted_code;
3877  enum tree_code lcode, rcode;
3878  tree ll_arg, lr_arg, rl_arg, rr_arg;
3879  tree ll_inner, lr_inner, rl_inner, rr_inner;
3880  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3881  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3882  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3883  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3884  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3885  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3886  enum machine_mode lnmode, rnmode;
3887  tree ll_mask, lr_mask, rl_mask, rr_mask;
3888  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3889  tree l_const, r_const;
3890  tree lntype, rntype, result;
3891  int first_bit, end_bit;
3892  int volatilep;
3893
3894  /* Start by getting the comparison codes.  Fail if anything is volatile.
3895     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3896     it were surrounded with a NE_EXPR.  */
3897
3898  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3899    return 0;
3900
3901  lcode = TREE_CODE (lhs);
3902  rcode = TREE_CODE (rhs);
3903
3904  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3905    lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3906
3907  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3908    rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3909
3910  if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3911    return 0;
3912
3913  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3914	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3915
3916  ll_arg = TREE_OPERAND (lhs, 0);
3917  lr_arg = TREE_OPERAND (lhs, 1);
3918  rl_arg = TREE_OPERAND (rhs, 0);
3919  rr_arg = TREE_OPERAND (rhs, 1);
3920
3921  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
3922  if (simple_operand_p (ll_arg)
3923      && simple_operand_p (lr_arg)
3924      && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3925    {
3926      int compcode;
3927
3928      if (operand_equal_p (ll_arg, rl_arg, 0)
3929          && operand_equal_p (lr_arg, rr_arg, 0))
3930        {
3931          int lcompcode, rcompcode;
3932
3933          lcompcode = comparison_to_compcode (lcode);
3934          rcompcode = comparison_to_compcode (rcode);
3935          compcode = (code == TRUTH_AND_EXPR)
3936                     ? lcompcode & rcompcode
3937                     : lcompcode | rcompcode;
3938        }
3939      else if (operand_equal_p (ll_arg, rr_arg, 0)
3940               && operand_equal_p (lr_arg, rl_arg, 0))
3941        {
3942          int lcompcode, rcompcode;
3943
3944          rcode = swap_tree_comparison (rcode);
3945          lcompcode = comparison_to_compcode (lcode);
3946          rcompcode = comparison_to_compcode (rcode);
3947          compcode = (code == TRUTH_AND_EXPR)
3948                     ? lcompcode & rcompcode
3949                     : lcompcode | rcompcode;
3950        }
3951      else
3952	compcode = -1;
3953
3954      if (compcode == COMPCODE_TRUE)
3955	return fold_convert (truth_type, integer_one_node);
3956      else if (compcode == COMPCODE_FALSE)
3957	return fold_convert (truth_type, integer_zero_node);
3958      else if (compcode != -1)
3959	return build (compcode_to_comparison (compcode),
3960		      truth_type, ll_arg, lr_arg);
3961    }
3962
3963  /* If the RHS can be evaluated unconditionally and its operands are
3964     simple, it wins to evaluate the RHS unconditionally on machines
3965     with expensive branches.  In this case, this isn't a comparison
3966     that can be merged.  Avoid doing this if the RHS is a floating-point
3967     comparison since those can trap.  */
3968
3969  if (BRANCH_COST >= 2
3970      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3971      && simple_operand_p (rl_arg)
3972      && simple_operand_p (rr_arg))
3973    {
3974      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
3975      if (code == TRUTH_OR_EXPR
3976	  && lcode == NE_EXPR && integer_zerop (lr_arg)
3977	  && rcode == NE_EXPR && integer_zerop (rr_arg)
3978	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3979	return build (NE_EXPR, truth_type,
3980		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3981			     ll_arg, rl_arg),
3982		      integer_zero_node);
3983
3984      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
3985      if (code == TRUTH_AND_EXPR
3986	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
3987	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
3988	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3989	return build (EQ_EXPR, truth_type,
3990		      build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3991			     ll_arg, rl_arg),
3992		      integer_zero_node);
3993
3994      return build (code, truth_type, lhs, rhs);
3995    }
3996
3997  /* See if the comparisons can be merged.  Then get all the parameters for
3998     each side.  */
3999
4000  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4001      || (rcode != EQ_EXPR && rcode != NE_EXPR))
4002    return 0;
4003
4004  volatilep = 0;
4005  ll_inner = decode_field_reference (ll_arg,
4006				     &ll_bitsize, &ll_bitpos, &ll_mode,
4007				     &ll_unsignedp, &volatilep, &ll_mask,
4008				     &ll_and_mask);
4009  lr_inner = decode_field_reference (lr_arg,
4010				     &lr_bitsize, &lr_bitpos, &lr_mode,
4011				     &lr_unsignedp, &volatilep, &lr_mask,
4012				     &lr_and_mask);
4013  rl_inner = decode_field_reference (rl_arg,
4014				     &rl_bitsize, &rl_bitpos, &rl_mode,
4015				     &rl_unsignedp, &volatilep, &rl_mask,
4016				     &rl_and_mask);
4017  rr_inner = decode_field_reference (rr_arg,
4018				     &rr_bitsize, &rr_bitpos, &rr_mode,
4019				     &rr_unsignedp, &volatilep, &rr_mask,
4020				     &rr_and_mask);
4021
4022  /* It must be true that the inner operation on the lhs of each
4023     comparison must be the same if we are to be able to do anything.
4024     Then see if we have constants.  If not, the same must be true for
4025     the rhs's.  */
4026  if (volatilep || ll_inner == 0 || rl_inner == 0
4027      || ! operand_equal_p (ll_inner, rl_inner, 0))
4028    return 0;
4029
4030  if (TREE_CODE (lr_arg) == INTEGER_CST
4031      && TREE_CODE (rr_arg) == INTEGER_CST)
4032    l_const = lr_arg, r_const = rr_arg;
4033  else if (lr_inner == 0 || rr_inner == 0
4034	   || ! operand_equal_p (lr_inner, rr_inner, 0))
4035    return 0;
4036  else
4037    l_const = r_const = 0;
4038
4039  /* If either comparison code is not correct for our logical operation,
4040     fail.  However, we can convert a one-bit comparison against zero into
4041     the opposite comparison against that bit being set in the field.  */
4042
4043  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4044  if (lcode != wanted_code)
4045    {
4046      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4047	{
4048	  /* Make the left operand unsigned, since we are only interested
4049	     in the value of one bit.  Otherwise we are doing the wrong
4050	     thing below.  */
4051	  ll_unsignedp = 1;
4052	  l_const = ll_mask;
4053	}
4054      else
4055	return 0;
4056    }
4057
4058  /* This is analogous to the code for l_const above.  */
4059  if (rcode != wanted_code)
4060    {
4061      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4062	{
4063	  rl_unsignedp = 1;
4064	  r_const = rl_mask;
4065	}
4066      else
4067	return 0;
4068    }
4069
4070  /* After this point all optimizations will generate bit-field
4071     references, which we might not want.  */
4072  if (! (*lang_hooks.can_use_bit_fields_p) ())
4073    return 0;
4074
4075  /* See if we can find a mode that contains both fields being compared on
4076     the left.  If we can't, fail.  Otherwise, update all constants and masks
4077     to be relative to a field of that size.  */
4078  first_bit = MIN (ll_bitpos, rl_bitpos);
4079  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4080  lnmode = get_best_mode (end_bit - first_bit, first_bit,
4081			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4082			  volatilep);
4083  if (lnmode == VOIDmode)
4084    return 0;
4085
4086  lnbitsize = GET_MODE_BITSIZE (lnmode);
4087  lnbitpos = first_bit & ~ (lnbitsize - 1);
4088  lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
4089  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4090
4091  if (BYTES_BIG_ENDIAN)
4092    {
4093      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4094      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4095    }
4096
4097  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4098			 size_int (xll_bitpos), 0);
4099  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4100			 size_int (xrl_bitpos), 0);
4101
4102  if (l_const)
4103    {
4104      l_const = fold_convert (lntype, l_const);
4105      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4106      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4107      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4108					fold (build1 (BIT_NOT_EXPR,
4109						      lntype, ll_mask)),
4110					0)))
4111	{
4112	  warning ("comparison is always %d", wanted_code == NE_EXPR);
4113
4114	  return fold_convert (truth_type,
4115			       wanted_code == NE_EXPR
4116			       ? integer_one_node : integer_zero_node);
4117	}
4118    }
4119  if (r_const)
4120    {
4121      r_const = fold_convert (lntype, r_const);
4122      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4123      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4124      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4125					fold (build1 (BIT_NOT_EXPR,
4126						      lntype, rl_mask)),
4127					0)))
4128	{
4129	  warning ("comparison is always %d", wanted_code == NE_EXPR);
4130
4131	  return fold_convert (truth_type,
4132			       wanted_code == NE_EXPR
4133			       ? integer_one_node : integer_zero_node);
4134	}
4135    }
4136
4137  /* If the right sides are not constant, do the same for it.  Also,
4138     disallow this optimization if a size or signedness mismatch occurs
4139     between the left and right sides.  */
4140  if (l_const == 0)
4141    {
4142      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4143	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4144	  /* Make sure the two fields on the right
4145	     correspond to the left without being swapped.  */
4146	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4147	return 0;
4148
4149      first_bit = MIN (lr_bitpos, rr_bitpos);
4150      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4151      rnmode = get_best_mode (end_bit - first_bit, first_bit,
4152			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4153			      volatilep);
4154      if (rnmode == VOIDmode)
4155	return 0;
4156
4157      rnbitsize = GET_MODE_BITSIZE (rnmode);
4158      rnbitpos = first_bit & ~ (rnbitsize - 1);
4159      rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4160      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4161
4162      if (BYTES_BIG_ENDIAN)
4163	{
4164	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4165	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4166	}
4167
4168      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
4169			     size_int (xlr_bitpos), 0);
4170      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
4171			     size_int (xrr_bitpos), 0);
4172
4173      /* Make a mask that corresponds to both fields being compared.
4174	 Do this for both items being compared.  If the operands are the
4175	 same size and the bits being compared are in the same position
4176	 then we can do this by masking both and comparing the masked
4177	 results.  */
4178      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4179      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4180      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4181	{
4182	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4183				    ll_unsignedp || rl_unsignedp);
4184	  if (! all_ones_mask_p (ll_mask, lnbitsize))
4185	    lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4186
4187	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4188				    lr_unsignedp || rr_unsignedp);
4189	  if (! all_ones_mask_p (lr_mask, rnbitsize))
4190	    rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4191
4192	  return build (wanted_code, truth_type, lhs, rhs);
4193	}
4194
4195      /* There is still another way we can do something:  If both pairs of
4196	 fields being compared are adjacent, we may be able to make a wider
4197	 field containing them both.
4198
4199	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
4200	 the mask must be shifted to account for the shift done by
4201	 make_bit_field_ref.  */
4202      if ((ll_bitsize + ll_bitpos == rl_bitpos
4203	   && lr_bitsize + lr_bitpos == rr_bitpos)
4204	  || (ll_bitpos == rl_bitpos + rl_bitsize
4205	      && lr_bitpos == rr_bitpos + rr_bitsize))
4206	{
4207	  tree type;
4208
4209	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4210				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4211	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4212				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4213
4214	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4215				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4216	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4217				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4218
4219	  /* Convert to the smaller type before masking out unwanted bits.  */
4220	  type = lntype;
4221	  if (lntype != rntype)
4222	    {
4223	      if (lnbitsize > rnbitsize)
4224		{
4225		  lhs = fold_convert (rntype, lhs);
4226		  ll_mask = fold_convert (rntype, ll_mask);
4227		  type = rntype;
4228		}
4229	      else if (lnbitsize < rnbitsize)
4230		{
4231		  rhs = fold_convert (lntype, rhs);
4232		  lr_mask = fold_convert (lntype, lr_mask);
4233		  type = lntype;
4234		}
4235	    }
4236
4237	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4238	    lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4239
4240	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4241	    rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4242
4243	  return build (wanted_code, truth_type, lhs, rhs);
4244	}
4245
4246      return 0;
4247    }
4248
4249  /* Handle the case of comparisons with constants.  If there is something in
4250     common between the masks, those bits of the constants must be the same.
4251     If not, the condition is always false.  Test for this to avoid generating
4252     incorrect code below.  */
4253  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4254  if (! integer_zerop (result)
4255      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4256			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4257    {
4258      if (wanted_code == NE_EXPR)
4259	{
4260	  warning ("`or' of unmatched not-equal tests is always 1");
4261	  return fold_convert (truth_type, integer_one_node);
4262	}
4263      else
4264	{
4265	  warning ("`and' of mutually exclusive equal-tests is always 0");
4266	  return fold_convert (truth_type, integer_zero_node);
4267	}
4268    }
4269
4270  /* Construct the expression we will return.  First get the component
4271     reference we will make.  Unless the mask is all ones the width of
4272     that field, perform the mask operation.  Then compare with the
4273     merged constant.  */
4274  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4275			       ll_unsignedp || rl_unsignedp);
4276
4277  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4278  if (! all_ones_mask_p (ll_mask, lnbitsize))
4279    result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4280
4281  return build (wanted_code, truth_type, result,
4282		const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4283}
4284
4285/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4286   constant.  */
4287
4288static tree
4289optimize_minmax_comparison (tree t)
4290{
4291  tree type = TREE_TYPE (t);
4292  tree arg0 = TREE_OPERAND (t, 0);
4293  enum tree_code op_code;
4294  tree comp_const = TREE_OPERAND (t, 1);
4295  tree minmax_const;
4296  int consts_equal, consts_lt;
4297  tree inner;
4298
4299  STRIP_SIGN_NOPS (arg0);
4300
4301  op_code = TREE_CODE (arg0);
4302  minmax_const = TREE_OPERAND (arg0, 1);
4303  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4304  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4305  inner = TREE_OPERAND (arg0, 0);
4306
4307  /* If something does not permit us to optimize, return the original tree.  */
4308  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4309      || TREE_CODE (comp_const) != INTEGER_CST
4310      || TREE_CONSTANT_OVERFLOW (comp_const)
4311      || TREE_CODE (minmax_const) != INTEGER_CST
4312      || TREE_CONSTANT_OVERFLOW (minmax_const))
4313    return t;
4314
4315  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
4316     and GT_EXPR, doing the rest with recursive calls using logical
4317     simplifications.  */
4318  switch (TREE_CODE (t))
4319    {
4320    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
4321      return
4322	invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4323
4324    case GE_EXPR:
4325      return
4326	fold (build (TRUTH_ORIF_EXPR, type,
4327		     optimize_minmax_comparison
4328		     (build (EQ_EXPR, type, arg0, comp_const)),
4329		     optimize_minmax_comparison
4330		     (build (GT_EXPR, type, arg0, comp_const))));
4331
4332    case EQ_EXPR:
4333      if (op_code == MAX_EXPR && consts_equal)
4334	/* MAX (X, 0) == 0  ->  X <= 0  */
4335	return fold (build (LE_EXPR, type, inner, comp_const));
4336
4337      else if (op_code == MAX_EXPR && consts_lt)
4338	/* MAX (X, 0) == 5  ->  X == 5   */
4339	return fold (build (EQ_EXPR, type, inner, comp_const));
4340
4341      else if (op_code == MAX_EXPR)
4342	/* MAX (X, 0) == -1  ->  false  */
4343	return omit_one_operand (type, integer_zero_node, inner);
4344
4345      else if (consts_equal)
4346	/* MIN (X, 0) == 0  ->  X >= 0  */
4347	return fold (build (GE_EXPR, type, inner, comp_const));
4348
4349      else if (consts_lt)
4350	/* MIN (X, 0) == 5  ->  false  */
4351	return omit_one_operand (type, integer_zero_node, inner);
4352
4353      else
4354	/* MIN (X, 0) == -1  ->  X == -1  */
4355	return fold (build (EQ_EXPR, type, inner, comp_const));
4356
4357    case GT_EXPR:
4358      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4359	/* MAX (X, 0) > 0  ->  X > 0
4360	   MAX (X, 0) > 5  ->  X > 5  */
4361	return fold (build (GT_EXPR, type, inner, comp_const));
4362
4363      else if (op_code == MAX_EXPR)
4364	/* MAX (X, 0) > -1  ->  true  */
4365	return omit_one_operand (type, integer_one_node, inner);
4366
4367      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4368	/* MIN (X, 0) > 0  ->  false
4369	   MIN (X, 0) > 5  ->  false  */
4370	return omit_one_operand (type, integer_zero_node, inner);
4371
4372      else
4373	/* MIN (X, 0) > -1  ->  X > -1  */
4374	return fold (build (GT_EXPR, type, inner, comp_const));
4375
4376    default:
4377      return t;
4378    }
4379}
4380
4381/* T is an integer expression that is being multiplied, divided, or taken a
4382   modulus (CODE says which and what kind of divide or modulus) by a
4383   constant C.  See if we can eliminate that operation by folding it with
4384   other operations already in T.  WIDE_TYPE, if non-null, is a type that
4385   should be used for the computation if wider than our type.
4386
4387   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4388   (X * 2) + (Y * 4).  We must, however, be assured that either the original
4389   expression would not overflow or that overflow is undefined for the type
4390   in the language in question.
4391
4392   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4393   the machine has a multiply-accumulate insn or that this is part of an
4394   addressing calculation.
4395
4396   If we return a non-null expression, it is an equivalent form of the
4397   original computation, but need not be in the original type.  */
4398
4399static tree
4400extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4401{
4402  /* To avoid exponential search depth, refuse to allow recursion past
4403     three levels.  Beyond that (1) it's highly unlikely that we'll find
4404     something interesting and (2) we've probably processed it before
4405     when we built the inner expression.  */
4406
4407  static int depth;
4408  tree ret;
4409
4410  if (depth > 3)
4411    return NULL;
4412
4413  depth++;
4414  ret = extract_muldiv_1 (t, c, code, wide_type);
4415  depth--;
4416
4417  return ret;
4418}
4419
4420static tree
4421extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4422{
4423  tree type = TREE_TYPE (t);
4424  enum tree_code tcode = TREE_CODE (t);
4425  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4426				   > GET_MODE_SIZE (TYPE_MODE (type)))
4427		? wide_type : type);
4428  tree t1, t2;
4429  int same_p = tcode == code;
4430  tree op0 = NULL_TREE, op1 = NULL_TREE;
4431
4432  /* Don't deal with constants of zero here; they confuse the code below.  */
4433  if (integer_zerop (c))
4434    return NULL_TREE;
4435
4436  if (TREE_CODE_CLASS (tcode) == '1')
4437    op0 = TREE_OPERAND (t, 0);
4438
4439  if (TREE_CODE_CLASS (tcode) == '2')
4440    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4441
4442  /* Note that we need not handle conditional operations here since fold
4443     already handles those cases.  So just do arithmetic here.  */
4444  switch (tcode)
4445    {
4446    case INTEGER_CST:
4447      /* For a constant, we can always simplify if we are a multiply
4448	 or (for divide and modulus) if it is a multiple of our constant.  */
4449      if (code == MULT_EXPR
4450	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4451	return const_binop (code, fold_convert (ctype, t),
4452			    fold_convert (ctype, c), 0);
4453      break;
4454
4455    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
4456      /* If op0 is an expression ...  */
4457      if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4458	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4459	   || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4460	   || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4461	  /* ... and is unsigned, and its type is smaller than ctype,
4462	     then we cannot pass through as widening.  */
4463	  && ((TREE_UNSIGNED (TREE_TYPE (op0))
4464	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4465		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4466	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
4467	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4468	      /* ... or this is a truncation (t is narrower than op0),
4469		 then we cannot pass through this narrowing.  */
4470	      || (GET_MODE_SIZE (TYPE_MODE (type))
4471		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4472	      /* ... or signedness changes for division or modulus,
4473		 then we cannot pass through this conversion.  */
4474	      || (code != MULT_EXPR
4475		  && (TREE_UNSIGNED (ctype)
4476		      != TREE_UNSIGNED (TREE_TYPE (op0))))))
4477	break;
4478
4479      /* Pass the constant down and see if we can make a simplification.  If
4480	 we can, replace this expression with the inner simplification for
4481	 possible later conversion to our or some other type.  */
4482      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
4483	  && TREE_CODE (t2) == INTEGER_CST
4484	  && ! TREE_CONSTANT_OVERFLOW (t2)
4485	  && (0 != (t1 = extract_muldiv (op0, t2, code,
4486					 code == MULT_EXPR
4487					 ? ctype : NULL_TREE))))
4488	return t1;
4489      break;
4490
4491    case ABS_EXPR:
4492      /* If widening the type changes it from signed to unsigned, then we
4493         must avoid building ABS_EXPR itself as unsigned.  */
4494      if (TREE_UNSIGNED (ctype) && !TREE_UNSIGNED (type))
4495        {
4496          tree cstype = (*lang_hooks.types.signed_type) (ctype);
4497          if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
4498            {
4499              t1 = fold (build1 (tcode, cstype, fold_convert (cstype, t1)));
4500              return fold_convert (ctype, t1);
4501            }
4502          break;
4503        }
4504      /* FALLTHROUGH */
4505    case NEGATE_EXPR:
4506      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4507	return fold (build1 (tcode, ctype, fold_convert (ctype, t1)));
4508      break;
4509
4510    case MIN_EXPR:  case MAX_EXPR:
4511      /* If widening the type changes the signedness, then we can't perform
4512	 this optimization as that changes the result.  */
4513      if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4514	break;
4515
4516      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
4517      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4518	  && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4519	{
4520	  if (tree_int_cst_sgn (c) < 0)
4521	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4522
4523	  return fold (build (tcode, ctype, fold_convert (ctype, t1),
4524			      fold_convert (ctype, t2)));
4525	}
4526      break;
4527
4528    case WITH_RECORD_EXPR:
4529      if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4530	return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4531		      TREE_OPERAND (t, 1));
4532      break;
4533
4534    case LSHIFT_EXPR:  case RSHIFT_EXPR:
4535      /* If the second operand is constant, this is a multiplication
4536	 or floor division, by a power of two, so we can treat it that
4537	 way unless the multiplier or divisor overflows.  */
4538      if (TREE_CODE (op1) == INTEGER_CST
4539	  /* const_binop may not detect overflow correctly,
4540	     so check for it explicitly here.  */
4541	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4542	  && TREE_INT_CST_HIGH (op1) == 0
4543	  && 0 != (t1 = fold_convert (ctype,
4544				      const_binop (LSHIFT_EXPR,
4545						   size_one_node,
4546						   op1, 0)))
4547	  && ! TREE_OVERFLOW (t1))
4548	return extract_muldiv (build (tcode == LSHIFT_EXPR
4549				      ? MULT_EXPR : FLOOR_DIV_EXPR,
4550				      ctype, fold_convert (ctype, op0), t1),
4551			       c, code, wide_type);
4552      break;
4553
4554    case PLUS_EXPR:  case MINUS_EXPR:
4555      /* See if we can eliminate the operation on both sides.  If we can, we
4556	 can return a new PLUS or MINUS.  If we can't, the only remaining
4557	 cases where we can do anything are if the second operand is a
4558	 constant.  */
4559      t1 = extract_muldiv (op0, c, code, wide_type);
4560      t2 = extract_muldiv (op1, c, code, wide_type);
4561      if (t1 != 0 && t2 != 0
4562	  && (code == MULT_EXPR
4563	      /* If not multiplication, we can only do this if both operands
4564		 are divisible by c.  */
4565	      || (multiple_of_p (ctype, op0, c)
4566	          && multiple_of_p (ctype, op1, c))))
4567	return fold (build (tcode, ctype, fold_convert (ctype, t1),
4568			    fold_convert (ctype, t2)));
4569
4570      /* If this was a subtraction, negate OP1 and set it to be an addition.
4571	 This simplifies the logic below.  */
4572      if (tcode == MINUS_EXPR)
4573	tcode = PLUS_EXPR, op1 = negate_expr (op1);
4574
4575      if (TREE_CODE (op1) != INTEGER_CST)
4576	break;
4577
4578      /* If either OP1 or C are negative, this optimization is not safe for
4579	 some of the division and remainder types while for others we need
4580	 to change the code.  */
4581      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4582	{
4583	  if (code == CEIL_DIV_EXPR)
4584	    code = FLOOR_DIV_EXPR;
4585	  else if (code == FLOOR_DIV_EXPR)
4586	    code = CEIL_DIV_EXPR;
4587	  else if (code != MULT_EXPR
4588		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4589	    break;
4590	}
4591
4592      /* If it's a multiply or a division/modulus operation of a multiple
4593         of our constant, do the operation and verify it doesn't overflow.  */
4594      if (code == MULT_EXPR
4595	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4596	{
4597	  op1 = const_binop (code, fold_convert (ctype, op1),
4598			     fold_convert (ctype, c), 0);
4599	  /* We allow the constant to overflow with wrapping semantics.  */
4600	  if (op1 == 0
4601	      || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4602	    break;
4603	}
4604      else
4605	break;
4606
4607      /* If we have an unsigned type is not a sizetype, we cannot widen
4608	 the operation since it will change the result if the original
4609	 computation overflowed.  */
4610      if (TREE_UNSIGNED (ctype)
4611	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4612	  && ctype != type)
4613	break;
4614
4615      /* If we were able to eliminate our operation from the first side,
4616	 apply our operation to the second side and reform the PLUS.  */
4617      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4618	return fold (build (tcode, ctype, fold_convert (ctype, t1), op1));
4619
4620      /* The last case is if we are a multiply.  In that case, we can
4621	 apply the distributive law to commute the multiply and addition
4622	 if the multiplication of the constants doesn't overflow.  */
4623      if (code == MULT_EXPR)
4624	return fold (build (tcode, ctype,
4625			    fold (build (code, ctype,
4626					 fold_convert (ctype, op0),
4627					 fold_convert (ctype, c))),
4628			    op1));
4629
4630      break;
4631
4632    case MULT_EXPR:
4633      /* We have a special case here if we are doing something like
4634	 (C * 8) % 4 since we know that's zero.  */
4635      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4636	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4637	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4638	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4639	return omit_one_operand (type, integer_zero_node, op0);
4640
4641      /* ... fall through ...  */
4642
4643    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
4644    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
4645      /* If we can extract our operation from the LHS, do so and return a
4646	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
4647	 do something only if the second operand is a constant.  */
4648      if (same_p
4649	  && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4650	return fold (build (tcode, ctype, fold_convert (ctype, t1),
4651			    fold_convert (ctype, op1)));
4652      else if (tcode == MULT_EXPR && code == MULT_EXPR
4653	       && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4654	return fold (build (tcode, ctype, fold_convert (ctype, op0),
4655			    fold_convert (ctype, t1)));
4656      else if (TREE_CODE (op1) != INTEGER_CST)
4657	return 0;
4658
4659      /* If these are the same operation types, we can associate them
4660	 assuming no overflow.  */
4661      if (tcode == code
4662	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
4663				     fold_convert (ctype, c), 0))
4664	  && ! TREE_OVERFLOW (t1))
4665	return fold (build (tcode, ctype, fold_convert (ctype, op0), t1));
4666
4667      /* If these operations "cancel" each other, we have the main
4668	 optimizations of this pass, which occur when either constant is a
4669	 multiple of the other, in which case we replace this with either an
4670	 operation or CODE or TCODE.
4671
4672	 If we have an unsigned type that is not a sizetype, we cannot do
4673	 this since it will change the result if the original computation
4674	 overflowed.  */
4675      if ((! TREE_UNSIGNED (ctype)
4676	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4677	  && ! flag_wrapv
4678	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4679	      || (tcode == MULT_EXPR
4680		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4681		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4682	{
4683	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4684	    return fold (build (tcode, ctype, fold_convert (ctype, op0),
4685				fold_convert (ctype,
4686					      const_binop (TRUNC_DIV_EXPR,
4687							   op1, c, 0))));
4688	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4689	    return fold (build (code, ctype, fold_convert (ctype, op0),
4690				fold_convert (ctype,
4691					      const_binop (TRUNC_DIV_EXPR,
4692							   c, op1, 0))));
4693	}
4694      break;
4695
4696    default:
4697      break;
4698    }
4699
4700  return 0;
4701}
4702
4703/* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4704   S, a SAVE_EXPR, return the expression actually being evaluated.   Note
4705   that we may sometimes modify the tree.  */
4706
4707static tree
4708strip_compound_expr (tree t, tree s)
4709{
4710  enum tree_code code = TREE_CODE (t);
4711
4712  /* See if this is the COMPOUND_EXPR we want to eliminate.  */
4713  if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4714      && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4715    return TREE_OPERAND (t, 1);
4716
4717  /* See if this is a COND_EXPR or a simple arithmetic operator.   We
4718     don't bother handling any other types.  */
4719  else if (code == COND_EXPR)
4720    {
4721      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4722      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4723      TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4724    }
4725  else if (TREE_CODE_CLASS (code) == '1')
4726    TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4727  else if (TREE_CODE_CLASS (code) == '<'
4728	   || TREE_CODE_CLASS (code) == '2')
4729    {
4730      TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4731      TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4732    }
4733
4734  return t;
4735}
4736
4737/* Return a node which has the indicated constant VALUE (either 0 or
4738   1), and is of the indicated TYPE.  */
4739
4740static tree
4741constant_boolean_node (int value, tree type)
4742{
4743  if (type == integer_type_node)
4744    return value ? integer_one_node : integer_zero_node;
4745  else if (TREE_CODE (type) == BOOLEAN_TYPE)
4746    return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4747						integer_zero_node);
4748  else
4749    {
4750      tree t = build_int_2 (value, 0);
4751
4752      TREE_TYPE (t) = type;
4753      return t;
4754    }
4755}
4756
4757/* Utility function for the following routine, to see how complex a nesting of
4758   COND_EXPRs can be.  EXPR is the expression and LIMIT is a count beyond which
4759   we don't care (to avoid spending too much time on complex expressions.).  */
4760
4761static int
4762count_cond (tree expr, int lim)
4763{
4764  int ctrue, cfalse;
4765
4766  if (TREE_CODE (expr) != COND_EXPR)
4767    return 0;
4768  else if (lim <= 0)
4769    return 0;
4770
4771  ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4772  cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4773  return MIN (lim, 1 + ctrue + cfalse);
4774}
4775
4776/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4777   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
4778   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4779   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
4780   COND is the first argument to CODE; otherwise (as in the example
4781   given here), it is the second argument.  TYPE is the type of the
4782   original expression.  */
4783
4784static tree
4785fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4786				     tree cond, tree arg, int cond_first_p)
4787{
4788  tree test, true_value, false_value;
4789  tree lhs = NULL_TREE;
4790  tree rhs = NULL_TREE;
4791  /* In the end, we'll produce a COND_EXPR.  Both arms of the
4792     conditional expression will be binary operations.  The left-hand
4793     side of the expression to be executed if the condition is true
4794     will be pointed to by TRUE_LHS.  Similarly, the right-hand side
4795     of the expression to be executed if the condition is true will be
4796     pointed to by TRUE_RHS.  FALSE_LHS and FALSE_RHS are analogous --
4797     but apply to the expression to be executed if the conditional is
4798     false.  */
4799  tree *true_lhs;
4800  tree *true_rhs;
4801  tree *false_lhs;
4802  tree *false_rhs;
4803  /* These are the codes to use for the left-hand side and right-hand
4804     side of the COND_EXPR.  Normally, they are the same as CODE.  */
4805  enum tree_code lhs_code = code;
4806  enum tree_code rhs_code = code;
4807  /* And these are the types of the expressions.  */
4808  tree lhs_type = type;
4809  tree rhs_type = type;
4810  int save = 0;
4811
4812  if (cond_first_p)
4813    {
4814      true_rhs = false_rhs = &arg;
4815      true_lhs = &true_value;
4816      false_lhs = &false_value;
4817    }
4818  else
4819    {
4820      true_lhs = false_lhs = &arg;
4821      true_rhs = &true_value;
4822      false_rhs = &false_value;
4823    }
4824
4825  if (TREE_CODE (cond) == COND_EXPR)
4826    {
4827      test = TREE_OPERAND (cond, 0);
4828      true_value = TREE_OPERAND (cond, 1);
4829      false_value = TREE_OPERAND (cond, 2);
4830      /* If this operand throws an expression, then it does not make
4831	 sense to try to perform a logical or arithmetic operation
4832	 involving it.  Instead of building `a + throw 3' for example,
4833	 we simply build `a, throw 3'.  */
4834      if (VOID_TYPE_P (TREE_TYPE (true_value)))
4835	{
4836	  if (! cond_first_p)
4837	    {
4838	      lhs_code = COMPOUND_EXPR;
4839	      lhs_type = void_type_node;
4840	    }
4841	  else
4842	    lhs = true_value;
4843	}
4844      if (VOID_TYPE_P (TREE_TYPE (false_value)))
4845	{
4846	  if (! cond_first_p)
4847	    {
4848	      rhs_code = COMPOUND_EXPR;
4849	      rhs_type = void_type_node;
4850	    }
4851	  else
4852	    rhs = false_value;
4853	}
4854    }
4855  else
4856    {
4857      tree testtype = TREE_TYPE (cond);
4858      test = cond;
4859      true_value = fold_convert (testtype, integer_one_node);
4860      false_value = fold_convert (testtype, integer_zero_node);
4861    }
4862
4863  /* If ARG is complex we want to make sure we only evaluate it once.  Though
4864     this is only required if it is volatile, it might be more efficient even
4865     if it is not.  However, if we succeed in folding one part to a constant,
4866     we do not need to make this SAVE_EXPR.  Since we do this optimization
4867     primarily to see if we do end up with constant and this SAVE_EXPR
4868     interferes with later optimizations, suppressing it when we can is
4869     important.
4870
4871     If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4872     do so.  Don't try to see if the result is a constant if an arm is a
4873     COND_EXPR since we get exponential behavior in that case.  */
4874
4875  if (saved_expr_p (arg))
4876    save = 1;
4877  else if (lhs == 0 && rhs == 0
4878	   && !TREE_CONSTANT (arg)
4879	   && (*lang_hooks.decls.global_bindings_p) () == 0
4880	   && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4881	       || TREE_SIDE_EFFECTS (arg)))
4882    {
4883      if (TREE_CODE (true_value) != COND_EXPR)
4884	lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4885
4886      if (TREE_CODE (false_value) != COND_EXPR)
4887	rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4888
4889      if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4890	  && (rhs == 0 || !TREE_CONSTANT (rhs)))
4891	{
4892	  arg = save_expr (arg);
4893	  lhs = rhs = 0;
4894	  save = saved_expr_p (arg);
4895	}
4896    }
4897
4898  if (lhs == 0)
4899    lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4900  if (rhs == 0)
4901    rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4902
4903  test = fold (build (COND_EXPR, type, test, lhs, rhs));
4904
4905  /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4906     ahead of the COND_EXPR we made.  Otherwise we would have it only
4907     evaluated in one branch, with the other branch using the result
4908     but missing the evaluation code.  Beware that the save_expr call
4909     above might not return a SAVE_EXPR, so testing the TREE_CODE
4910     of ARG is not enough to decide here. �*/
4911  if (save)
4912    return build (COMPOUND_EXPR, type,
4913		  fold_convert (void_type_node, arg),
4914		  strip_compound_expr (test, arg));
4915  else
4916    return fold_convert (type, test);
4917}
4918
4919
4920/* Subroutine of fold() that checks for the addition of +/- 0.0.
4921
4922   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4923   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
4924   ADDEND is the same as X.
4925
4926   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4927   and finite.  The problematic cases are when X is zero, and its mode
4928   has signed zeros.  In the case of rounding towards -infinity,
4929   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
4930   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
4931
4932static bool
4933fold_real_zero_addition_p (tree type, tree addend, int negate)
4934{
4935  if (!real_zerop (addend))
4936    return false;
4937
4938  /* Don't allow the fold with -fsignaling-nans.  */
4939  if (HONOR_SNANS (TYPE_MODE (type)))
4940    return false;
4941
4942  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
4943  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4944    return true;
4945
4946  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
4947  if (TREE_CODE (addend) == REAL_CST
4948      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4949    negate = !negate;
4950
4951  /* The mode has signed zeros, and we have to honor their sign.
4952     In this situation, there is only one case we can return true for.
4953     X - 0 is the same as X unless rounding towards -infinity is
4954     supported.  */
4955  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4956}
4957
4958/* Subroutine of fold() that checks comparisons of built-in math
4959   functions against real constants.
4960
4961   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4962   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
4963   is the type of the result and ARG0 and ARG1 are the operands of the
4964   comparison.  ARG1 must be a TREE_REAL_CST.
4965
4966   The function returns the constant folded tree if a simplification
4967   can be made, and NULL_TREE otherwise.  */
4968
4969static tree
4970fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4971		     tree type, tree arg0, tree arg1)
4972{
4973  REAL_VALUE_TYPE c;
4974
4975  if (fcode == BUILT_IN_SQRT
4976      || fcode == BUILT_IN_SQRTF
4977      || fcode == BUILT_IN_SQRTL)
4978    {
4979      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4980      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4981
4982      c = TREE_REAL_CST (arg1);
4983      if (REAL_VALUE_NEGATIVE (c))
4984	{
4985	  /* sqrt(x) < y is always false, if y is negative.  */
4986	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4987	    return omit_one_operand (type,
4988				     fold_convert (type, integer_zero_node),
4989				     arg);
4990
4991	  /* sqrt(x) > y is always true, if y is negative and we
4992	     don't care about NaNs, i.e. negative values of x.  */
4993	  if (code == NE_EXPR || !HONOR_NANS (mode))
4994	    return omit_one_operand (type,
4995				     fold_convert (type, integer_one_node),
4996				     arg);
4997
4998	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
4999	  return fold (build (GE_EXPR, type, arg,
5000			      build_real (TREE_TYPE (arg), dconst0)));
5001	}
5002      else if (code == GT_EXPR || code == GE_EXPR)
5003	{
5004	  REAL_VALUE_TYPE c2;
5005
5006	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5007	  real_convert (&c2, mode, &c2);
5008
5009	  if (REAL_VALUE_ISINF (c2))
5010	    {
5011	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
5012	      if (HONOR_INFINITIES (mode))
5013		return fold (build (EQ_EXPR, type, arg,
5014				    build_real (TREE_TYPE (arg), c2)));
5015
5016	      /* sqrt(x) > y is always false, when y is very large
5017		 and we don't care about infinities.  */
5018	      return omit_one_operand (type,
5019				       fold_convert (type, integer_zero_node),
5020				       arg);
5021	    }
5022
5023	  /* sqrt(x) > c is the same as x > c*c.  */
5024	  return fold (build (code, type, arg,
5025			      build_real (TREE_TYPE (arg), c2)));
5026	}
5027      else if (code == LT_EXPR || code == LE_EXPR)
5028	{
5029	  REAL_VALUE_TYPE c2;
5030
5031	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5032	  real_convert (&c2, mode, &c2);
5033
5034	  if (REAL_VALUE_ISINF (c2))
5035	    {
5036	      /* sqrt(x) < y is always true, when y is a very large
5037		 value and we don't care about NaNs or Infinities.  */
5038	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5039		return omit_one_operand (type,
5040					 fold_convert (type, integer_one_node),
5041					 arg);
5042
5043	      /* sqrt(x) < y is x != +Inf when y is very large and we
5044		 don't care about NaNs.  */
5045	      if (! HONOR_NANS (mode))
5046		return fold (build (NE_EXPR, type, arg,
5047				    build_real (TREE_TYPE (arg), c2)));
5048
5049	      /* sqrt(x) < y is x >= 0 when y is very large and we
5050		 don't care about Infinities.  */
5051	      if (! HONOR_INFINITIES (mode))
5052		return fold (build (GE_EXPR, type, arg,
5053				    build_real (TREE_TYPE (arg), dconst0)));
5054
5055	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
5056	      if ((*lang_hooks.decls.global_bindings_p) () != 0
5057		  || CONTAINS_PLACEHOLDER_P (arg))
5058		return NULL_TREE;
5059
5060	      arg = save_expr (arg);
5061	      return fold (build (TRUTH_ANDIF_EXPR, type,
5062				  fold (build (GE_EXPR, type, arg,
5063					       build_real (TREE_TYPE (arg),
5064							   dconst0))),
5065				  fold (build (NE_EXPR, type, arg,
5066					       build_real (TREE_TYPE (arg),
5067							   c2)))));
5068	    }
5069
5070	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
5071	  if (! HONOR_NANS (mode))
5072	    return fold (build (code, type, arg,
5073				build_real (TREE_TYPE (arg), c2)));
5074
5075	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
5076	  if ((*lang_hooks.decls.global_bindings_p) () == 0
5077	      && ! CONTAINS_PLACEHOLDER_P (arg))
5078	    {
5079	      arg = save_expr (arg);
5080	      return fold (build (TRUTH_ANDIF_EXPR, type,
5081				  fold (build (GE_EXPR, type, arg,
5082					       build_real (TREE_TYPE (arg),
5083							   dconst0))),
5084				  fold (build (code, type, arg,
5085					       build_real (TREE_TYPE (arg),
5086							   c2)))));
5087	    }
5088	}
5089    }
5090
5091  return NULL_TREE;
5092}
5093
5094/* Subroutine of fold() that optimizes comparisons against Infinities,
5095   either +Inf or -Inf.
5096
5097   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5098   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5099   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5100
5101   The function returns the constant folded tree if a simplification
5102   can be made, and NULL_TREE otherwise.  */
5103
5104static tree
5105fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5106{
5107  enum machine_mode mode;
5108  REAL_VALUE_TYPE max;
5109  tree temp;
5110  bool neg;
5111
5112  mode = TYPE_MODE (TREE_TYPE (arg0));
5113
5114  /* For negative infinity swap the sense of the comparison.  */
5115  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5116  if (neg)
5117    code = swap_tree_comparison (code);
5118
5119  switch (code)
5120    {
5121    case GT_EXPR:
5122      /* x > +Inf is always false, if with ignore sNANs.  */
5123      if (HONOR_SNANS (mode))
5124        return NULL_TREE;
5125      return omit_one_operand (type,
5126			       fold_convert (type, integer_zero_node),
5127			       arg0);
5128
5129    case LE_EXPR:
5130      /* x <= +Inf is always true, if we don't case about NaNs.  */
5131      if (! HONOR_NANS (mode))
5132	return omit_one_operand (type,
5133				 fold_convert (type, integer_one_node),
5134				 arg0);
5135
5136      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
5137      if ((*lang_hooks.decls.global_bindings_p) () == 0
5138	  && ! CONTAINS_PLACEHOLDER_P (arg0))
5139	{
5140	  arg0 = save_expr (arg0);
5141	  return fold (build (EQ_EXPR, type, arg0, arg0));
5142	}
5143      break;
5144
5145    case EQ_EXPR:
5146    case GE_EXPR:
5147      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
5148      real_maxval (&max, neg, mode);
5149      return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5150			  arg0, build_real (TREE_TYPE (arg0), max)));
5151
5152    case LT_EXPR:
5153      /* x < +Inf is always equal to x <= DBL_MAX.  */
5154      real_maxval (&max, neg, mode);
5155      return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5156			  arg0, build_real (TREE_TYPE (arg0), max)));
5157
5158    case NE_EXPR:
5159      /* x != +Inf is always equal to !(x > DBL_MAX).  */
5160      real_maxval (&max, neg, mode);
5161      if (! HONOR_NANS (mode))
5162	return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5163			    arg0, build_real (TREE_TYPE (arg0), max)));
5164      temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5165			  arg0, build_real (TREE_TYPE (arg0), max)));
5166      return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5167
5168    default:
5169      break;
5170    }
5171
5172  return NULL_TREE;
5173}
5174
5175/* If CODE with arguments ARG0 and ARG1 represents a single bit
5176   equality/inequality test, then return a simplified form of
5177   the test using shifts and logical operations.  Otherwise return
5178   NULL.  TYPE is the desired result type.  */
5179
5180tree
5181fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5182		      tree result_type)
5183{
5184  /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5185     operand 0.  */
5186  if (code == TRUTH_NOT_EXPR)
5187    {
5188      code = TREE_CODE (arg0);
5189      if (code != NE_EXPR && code != EQ_EXPR)
5190	return NULL_TREE;
5191
5192      /* Extract the arguments of the EQ/NE.  */
5193      arg1 = TREE_OPERAND (arg0, 1);
5194      arg0 = TREE_OPERAND (arg0, 0);
5195
5196      /* This requires us to invert the code.  */
5197      code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5198    }
5199
5200  /* If this is testing a single bit, we can optimize the test.  */
5201  if ((code == NE_EXPR || code == EQ_EXPR)
5202      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5203      && integer_pow2p (TREE_OPERAND (arg0, 1)))
5204    {
5205      tree inner = TREE_OPERAND (arg0, 0);
5206      tree type = TREE_TYPE (arg0);
5207      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5208      enum machine_mode operand_mode = TYPE_MODE (type);
5209      int ops_unsigned;
5210      tree signed_type, unsigned_type, intermediate_type;
5211      tree arg00;
5212
5213      /* If we have (A & C) != 0 where C is the sign bit of A, convert
5214	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
5215      arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5216      if (arg00 != NULL_TREE
5217	  /* This is only a win if casting to a signed type is cheap,
5218	     i.e. when arg00's type is not a partial mode.  */
5219	  && TYPE_PRECISION (TREE_TYPE (arg00))
5220	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
5221	{
5222	  tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5223	  return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5224			      fold_convert (stype, arg00),
5225			      fold_convert (stype, integer_zero_node)));
5226	}
5227
5228      /* Otherwise we have (A & C) != 0 where C is a single bit,
5229	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
5230	 Similarly for (A & C) == 0.  */
5231
5232      /* If INNER is a right shift of a constant and it plus BITNUM does
5233	 not overflow, adjust BITNUM and INNER.  */
5234      if (TREE_CODE (inner) == RSHIFT_EXPR
5235	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5236	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5237	  && bitnum < TYPE_PRECISION (type)
5238	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5239				   bitnum - TYPE_PRECISION (type)))
5240	{
5241	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5242	  inner = TREE_OPERAND (inner, 0);
5243	}
5244
5245      /* If we are going to be able to omit the AND below, we must do our
5246	 operations as unsigned.  If we must use the AND, we have a choice.
5247	 Normally unsigned is faster, but for some machines signed is.  */
5248#ifdef LOAD_EXTEND_OP
5249      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5250#else
5251      ops_unsigned = 1;
5252#endif
5253
5254      signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5255      unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5256      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5257      inner = fold_convert (intermediate_type, inner);
5258
5259      if (bitnum != 0)
5260	inner = build (RSHIFT_EXPR, intermediate_type,
5261		       inner, size_int (bitnum));
5262
5263      if (code == EQ_EXPR)
5264	inner = build (BIT_XOR_EXPR, intermediate_type,
5265		       inner, integer_one_node);
5266
5267      /* Put the AND last so it can combine with more things.  */
5268      inner = build (BIT_AND_EXPR, intermediate_type,
5269		     inner, integer_one_node);
5270
5271      /* Make sure to return the proper type.  */
5272      inner = fold_convert (result_type, inner);
5273
5274      return inner;
5275    }
5276  return NULL_TREE;
5277}
5278
5279/* Check whether we are allowed to reorder operands arg0 and arg1,
5280   such that the evaluation of arg1 occurs before arg0.  */
5281
5282static bool
5283reorder_operands_p (tree arg0, tree arg1)
5284{
5285  if (! flag_evaluation_order)
5286    return true;
5287  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5288    return true;
5289  return ! TREE_SIDE_EFFECTS (arg0)
5290	 && ! TREE_SIDE_EFFECTS (arg1);
5291}
5292
5293/* Test whether it is preferable two swap two operands, ARG0 and
5294   ARG1, for example because ARG0 is an integer constant and ARG1
5295   isn't.  If REORDER is true, only recommend swapping if we can
5296   evaluate the operands in reverse order.  */
5297
5298static bool
5299tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5300{
5301  STRIP_SIGN_NOPS (arg0);
5302  STRIP_SIGN_NOPS (arg1);
5303
5304  if (TREE_CODE (arg1) == INTEGER_CST)
5305    return 0;
5306  if (TREE_CODE (arg0) == INTEGER_CST)
5307    return 1;
5308
5309  if (TREE_CODE (arg1) == REAL_CST)
5310    return 0;
5311  if (TREE_CODE (arg0) == REAL_CST)
5312    return 1;
5313
5314  if (TREE_CODE (arg1) == COMPLEX_CST)
5315    return 0;
5316  if (TREE_CODE (arg0) == COMPLEX_CST)
5317    return 1;
5318
5319  if (TREE_CONSTANT (arg1))
5320    return 0;
5321  if (TREE_CONSTANT (arg0))
5322    return 1;
5323
5324  if (optimize_size)
5325    return 0;
5326
5327  if (reorder && flag_evaluation_order
5328      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5329    return 0;
5330
5331  if (DECL_P (arg1))
5332    return 0;
5333  if (DECL_P (arg0))
5334    return 1;
5335
5336  return 0;
5337}
5338
5339/* Perform constant folding and related simplification of EXPR.
5340   The related simplifications include x*1 => x, x*0 => 0, etc.,
5341   and application of the associative law.
5342   NOP_EXPR conversions may be removed freely (as long as we
5343   are careful not to change the C type of the overall expression)
5344   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5345   but we can constant-fold them if they have constant operands.  */
5346
5347#ifdef ENABLE_FOLD_CHECKING
5348# define fold(x) fold_1 (x)
5349static tree fold_1 (tree);
5350static
5351#endif
5352tree
5353fold (tree expr)
5354{
5355  tree t = expr, orig_t;
5356  tree t1 = NULL_TREE;
5357  tree tem;
5358  tree type = TREE_TYPE (expr);
5359  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5360  enum tree_code code = TREE_CODE (t);
5361  int kind = TREE_CODE_CLASS (code);
5362  int invert;
5363  /* WINS will be nonzero when the switch is done
5364     if all operands are constant.  */
5365  int wins = 1;
5366
5367  /* Don't try to process an RTL_EXPR since its operands aren't trees.
5368     Likewise for a SAVE_EXPR that's already been evaluated.  */
5369  if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5370    return t;
5371
5372  /* Return right away if a constant.  */
5373  if (kind == 'c')
5374    return t;
5375
5376  orig_t = t;
5377
5378  if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5379    {
5380      tree subop;
5381
5382      /* Special case for conversion ops that can have fixed point args.  */
5383      arg0 = TREE_OPERAND (t, 0);
5384
5385      /* Don't use STRIP_NOPS, because signedness of argument type matters.  */
5386      if (arg0 != 0)
5387	STRIP_SIGN_NOPS (arg0);
5388
5389      if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5390	subop = TREE_REALPART (arg0);
5391      else
5392	subop = arg0;
5393
5394      if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5395	  && TREE_CODE (subop) != REAL_CST)
5396	/* Note that TREE_CONSTANT isn't enough:
5397	   static var addresses are constant but we can't
5398	   do arithmetic on them.  */
5399	wins = 0;
5400    }
5401  else if (IS_EXPR_CODE_CLASS (kind))
5402    {
5403      int len = first_rtl_op (code);
5404      int i;
5405      for (i = 0; i < len; i++)
5406	{
5407	  tree op = TREE_OPERAND (t, i);
5408	  tree subop;
5409
5410	  if (op == 0)
5411	    continue;		/* Valid for CALL_EXPR, at least.  */
5412
5413	  if (kind == '<' || code == RSHIFT_EXPR)
5414	    {
5415	      /* Signedness matters here.  Perhaps we can refine this
5416		 later.  */
5417	      STRIP_SIGN_NOPS (op);
5418	    }
5419	  else
5420	    /* Strip any conversions that don't change the mode.  */
5421	    STRIP_NOPS (op);
5422
5423	  if (TREE_CODE (op) == COMPLEX_CST)
5424	    subop = TREE_REALPART (op);
5425	  else
5426	    subop = op;
5427
5428	  if (TREE_CODE (subop) != INTEGER_CST
5429	      && TREE_CODE (subop) != REAL_CST)
5430	    /* Note that TREE_CONSTANT isn't enough:
5431	       static var addresses are constant but we can't
5432	       do arithmetic on them.  */
5433	    wins = 0;
5434
5435	  if (i == 0)
5436	    arg0 = op;
5437	  else if (i == 1)
5438	    arg1 = op;
5439	}
5440    }
5441
5442  /* If this is a commutative operation, and ARG0 is a constant, move it
5443     to ARG1 to reduce the number of tests below.  */
5444  if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
5445       || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
5446       || code == BIT_AND_EXPR)
5447      && tree_swap_operands_p (arg0, arg1, true))
5448    return fold (build (code, type, TREE_OPERAND (t, 1),
5449			TREE_OPERAND (t, 0)));
5450
5451  /* Now WINS is set as described above,
5452     ARG0 is the first operand of EXPR,
5453     and ARG1 is the second operand (if it has more than one operand).
5454
5455     First check for cases where an arithmetic operation is applied to a
5456     compound, conditional, or comparison operation.  Push the arithmetic
5457     operation inside the compound or conditional to see if any folding
5458     can then be done.  Convert comparison to conditional for this purpose.
5459     The also optimizes non-constant cases that used to be done in
5460     expand_expr.
5461
5462     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5463     one of the operands is a comparison and the other is a comparison, a
5464     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
5465     code below would make the expression more complex.  Change it to a
5466     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
5467     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
5468
5469  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5470       || code == EQ_EXPR || code == NE_EXPR)
5471      && ((truth_value_p (TREE_CODE (arg0))
5472	   && (truth_value_p (TREE_CODE (arg1))
5473	       || (TREE_CODE (arg1) == BIT_AND_EXPR
5474		   && integer_onep (TREE_OPERAND (arg1, 1)))))
5475	  || (truth_value_p (TREE_CODE (arg1))
5476	      && (truth_value_p (TREE_CODE (arg0))
5477		  || (TREE_CODE (arg0) == BIT_AND_EXPR
5478		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
5479    {
5480      t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5481		       : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5482		       : TRUTH_XOR_EXPR,
5483		       type, arg0, arg1));
5484
5485      if (code == EQ_EXPR)
5486	t = invert_truthvalue (t);
5487
5488      return t;
5489    }
5490
5491  if (TREE_CODE_CLASS (code) == '1')
5492    {
5493      if (TREE_CODE (arg0) == COMPOUND_EXPR)
5494	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5495		      fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5496      else if (TREE_CODE (arg0) == COND_EXPR)
5497	{
5498	  tree arg01 = TREE_OPERAND (arg0, 1);
5499	  tree arg02 = TREE_OPERAND (arg0, 2);
5500	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5501	    arg01 = fold (build1 (code, type, arg01));
5502	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5503	    arg02 = fold (build1 (code, type, arg02));
5504	  t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5505			   arg01, arg02));
5506
5507	  /* If this was a conversion, and all we did was to move into
5508	     inside the COND_EXPR, bring it back out.  But leave it if
5509	     it is a conversion from integer to integer and the
5510	     result precision is no wider than a word since such a
5511	     conversion is cheap and may be optimized away by combine,
5512	     while it couldn't if it were outside the COND_EXPR.  Then return
5513	     so we don't get into an infinite recursion loop taking the
5514	     conversion out and then back in.  */
5515
5516	  if ((code == NOP_EXPR || code == CONVERT_EXPR
5517	       || code == NON_LVALUE_EXPR)
5518	      && TREE_CODE (t) == COND_EXPR
5519	      && TREE_CODE (TREE_OPERAND (t, 1)) == code
5520	      && TREE_CODE (TREE_OPERAND (t, 2)) == code
5521	      && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5522	      && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5523	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5524		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5525	      && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5526		    && (INTEGRAL_TYPE_P
5527			(TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5528		    && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5529	    t = build1 (code, type,
5530			build (COND_EXPR,
5531			       TREE_TYPE (TREE_OPERAND
5532					  (TREE_OPERAND (t, 1), 0)),
5533			       TREE_OPERAND (t, 0),
5534			       TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5535			       TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5536	  return t;
5537	}
5538      else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5539	return fold (build (COND_EXPR, type, arg0,
5540			    fold (build1 (code, type, integer_one_node)),
5541			    fold (build1 (code, type, integer_zero_node))));
5542   }
5543  else if (TREE_CODE_CLASS (code) == '<'
5544	   && TREE_CODE (arg0) == COMPOUND_EXPR)
5545    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5546		  fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5547  else if (TREE_CODE_CLASS (code) == '<'
5548	   && TREE_CODE (arg1) == COMPOUND_EXPR)
5549    return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5550		  fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5551  else if (TREE_CODE_CLASS (code) == '2'
5552	   || TREE_CODE_CLASS (code) == '<')
5553    {
5554      if (TREE_CODE (arg1) == COMPOUND_EXPR
5555	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5556	  && ! TREE_SIDE_EFFECTS (arg0))
5557	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5558		      fold (build (code, type,
5559				   arg0, TREE_OPERAND (arg1, 1))));
5560      else if ((TREE_CODE (arg1) == COND_EXPR
5561		|| (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5562		    && TREE_CODE_CLASS (code) != '<'))
5563	       && (TREE_CODE (arg0) != COND_EXPR
5564		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5565	       && (! TREE_SIDE_EFFECTS (arg0)
5566		   || ((*lang_hooks.decls.global_bindings_p) () == 0
5567		       && ! CONTAINS_PLACEHOLDER_P (arg0))))
5568	return
5569	  fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5570					       /*cond_first_p=*/0);
5571      else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5572	return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5573		      fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5574      else if ((TREE_CODE (arg0) == COND_EXPR
5575		|| (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5576		    && TREE_CODE_CLASS (code) != '<'))
5577	       && (TREE_CODE (arg1) != COND_EXPR
5578		   || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5579	       && (! TREE_SIDE_EFFECTS (arg1)
5580		   || ((*lang_hooks.decls.global_bindings_p) () == 0
5581		       && ! CONTAINS_PLACEHOLDER_P (arg1))))
5582	return
5583	  fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5584					       /*cond_first_p=*/1);
5585    }
5586
5587  switch (code)
5588    {
5589    case INTEGER_CST:
5590    case REAL_CST:
5591    case VECTOR_CST:
5592    case STRING_CST:
5593    case COMPLEX_CST:
5594    case CONSTRUCTOR:
5595      return t;
5596
5597    case CONST_DECL:
5598      return fold (DECL_INITIAL (t));
5599
5600    case NOP_EXPR:
5601    case FLOAT_EXPR:
5602    case CONVERT_EXPR:
5603    case FIX_TRUNC_EXPR:
5604      /* Other kinds of FIX are not handled properly by fold_convert.  */
5605
5606      if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5607	return TREE_OPERAND (t, 0);
5608
5609      /* Handle cases of two conversions in a row.  */
5610      if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5611	  || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5612	{
5613	  tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5614	  tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5615	  tree final_type = TREE_TYPE (t);
5616	  int inside_int = INTEGRAL_TYPE_P (inside_type);
5617	  int inside_ptr = POINTER_TYPE_P (inside_type);
5618	  int inside_float = FLOAT_TYPE_P (inside_type);
5619	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
5620	  int inside_unsignedp = TREE_UNSIGNED (inside_type);
5621	  int inter_int = INTEGRAL_TYPE_P (inter_type);
5622	  int inter_ptr = POINTER_TYPE_P (inter_type);
5623	  int inter_float = FLOAT_TYPE_P (inter_type);
5624	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
5625	  int inter_unsignedp = TREE_UNSIGNED (inter_type);
5626	  int final_int = INTEGRAL_TYPE_P (final_type);
5627	  int final_ptr = POINTER_TYPE_P (final_type);
5628	  int final_float = FLOAT_TYPE_P (final_type);
5629	  unsigned int final_prec = TYPE_PRECISION (final_type);
5630	  int final_unsignedp = TREE_UNSIGNED (final_type);
5631
5632	  /* In addition to the cases of two conversions in a row
5633	     handled below, if we are converting something to its own
5634	     type via an object of identical or wider precision, neither
5635	     conversion is needed.  */
5636	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5637	      && ((inter_int && final_int) || (inter_float && final_float))
5638	      && inter_prec >= final_prec)
5639	    return fold (build1 (code, final_type,
5640				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5641
5642	  /* Likewise, if the intermediate and final types are either both
5643	     float or both integer, we don't need the middle conversion if
5644	     it is wider than the final type and doesn't change the signedness
5645	     (for integers).  Avoid this if the final type is a pointer
5646	     since then we sometimes need the inner conversion.  Likewise if
5647	     the outer has a precision not equal to the size of its mode.  */
5648	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5649	       || (inter_float && inside_float))
5650	      && inter_prec >= inside_prec
5651	      && (inter_float || inter_unsignedp == inside_unsignedp)
5652	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5653		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5654	      && ! final_ptr)
5655	    return fold (build1 (code, final_type,
5656				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5657
5658	  /* If we have a sign-extension of a zero-extended value, we can
5659	     replace that by a single zero-extension.  */
5660	  if (inside_int && inter_int && final_int
5661	      && inside_prec < inter_prec && inter_prec < final_prec
5662	      && inside_unsignedp && !inter_unsignedp)
5663	    return fold (build1 (code, final_type,
5664				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5665
5666	  /* Two conversions in a row are not needed unless:
5667	     - some conversion is floating-point (overstrict for now), or
5668	     - the intermediate type is narrower than both initial and
5669	       final, or
5670	     - the intermediate type and innermost type differ in signedness,
5671	       and the outermost type is wider than the intermediate, or
5672	     - the initial type is a pointer type and the precisions of the
5673	       intermediate and final types differ, or
5674	     - the final type is a pointer type and the precisions of the
5675	       initial and intermediate types differ.  */
5676	  if (! inside_float && ! inter_float && ! final_float
5677	      && (inter_prec > inside_prec || inter_prec > final_prec)
5678	      && ! (inside_int && inter_int
5679		    && inter_unsignedp != inside_unsignedp
5680		    && inter_prec < final_prec)
5681	      && ((inter_unsignedp && inter_prec > inside_prec)
5682		  == (final_unsignedp && final_prec > inter_prec))
5683	      && ! (inside_ptr && inter_prec != final_prec)
5684	      && ! (final_ptr && inside_prec != inter_prec)
5685	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5686		    && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5687	      && ! final_ptr)
5688	    return fold (build1 (code, final_type,
5689				 TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
5690	}
5691
5692      if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5693	  && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5694	  /* Detect assigning a bitfield.  */
5695	  && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5696	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5697	{
5698	  /* Don't leave an assignment inside a conversion
5699	     unless assigning a bitfield.  */
5700	  tree prev = TREE_OPERAND (t, 0);
5701	  if (t == orig_t)
5702	    t = copy_node (t);
5703	  TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5704	  /* First do the assignment, then return converted constant.  */
5705	  t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5706	  TREE_NO_UNUSED_WARNING (t) = 1;
5707	  TREE_USED (t) = 1;
5708	  return t;
5709	}
5710
5711      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5712	 constants (if x has signed type, the sign bit cannot be set
5713	 in c).  This folds extension into the BIT_AND_EXPR.  */
5714      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5715	  && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5716	  && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5717	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5718	{
5719	  tree and = TREE_OPERAND (t, 0);
5720	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5721	  int change = 0;
5722
5723	  if (TREE_UNSIGNED (TREE_TYPE (and))
5724	      || (TYPE_PRECISION (TREE_TYPE (t))
5725		  <= TYPE_PRECISION (TREE_TYPE (and))))
5726	    change = 1;
5727	  else if (TYPE_PRECISION (TREE_TYPE (and1))
5728		   <= HOST_BITS_PER_WIDE_INT
5729		   && host_integerp (and1, 1))
5730	    {
5731	      unsigned HOST_WIDE_INT cst;
5732
5733	      cst = tree_low_cst (and1, 1);
5734	      cst &= (HOST_WIDE_INT) -1
5735		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5736	      change = (cst == 0);
5737#ifdef LOAD_EXTEND_OP
5738	      if (change
5739		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5740		      == ZERO_EXTEND))
5741		{
5742		  tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5743		  and0 = fold_convert (uns, and0);
5744		  and1 = fold_convert (uns, and1);
5745		}
5746#endif
5747	    }
5748	  if (change)
5749	    return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5750				fold_convert (TREE_TYPE (t), and0),
5751				fold_convert (TREE_TYPE (t), and1)));
5752	}
5753
5754      tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5755      return tem ? tem : t;
5756
5757    case VIEW_CONVERT_EXPR:
5758      if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5759	return build1 (VIEW_CONVERT_EXPR, type,
5760		       TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5761      return t;
5762
5763    case COMPONENT_REF:
5764      if (TREE_CODE (arg0) == CONSTRUCTOR
5765	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5766	{
5767	  tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5768	  if (m)
5769	    t = TREE_VALUE (m);
5770	}
5771      return t;
5772
5773    case RANGE_EXPR:
5774      if (TREE_CONSTANT (t) != wins)
5775	{
5776	  if (t == orig_t)
5777	    t = copy_node (t);
5778	  TREE_CONSTANT (t) = wins;
5779	}
5780      return t;
5781
5782    case NEGATE_EXPR:
5783      if (negate_expr_p (arg0))
5784	return fold_convert (type, negate_expr (arg0));
5785      return t;
5786
5787    case ABS_EXPR:
5788      if (wins)
5789	{
5790	  if (TREE_CODE (arg0) == INTEGER_CST)
5791	    {
5792	      /* If the value is unsigned, then the absolute value is
5793		 the same as the ordinary value.  */
5794	      if (TREE_UNSIGNED (type))
5795		return arg0;
5796	      /* Similarly, if the value is non-negative.  */
5797	      else if (INT_CST_LT (integer_minus_one_node, arg0))
5798		return arg0;
5799	      /* If the value is negative, then the absolute value is
5800		 its negation.  */
5801	      else
5802		{
5803		  unsigned HOST_WIDE_INT low;
5804		  HOST_WIDE_INT high;
5805		  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5806					     TREE_INT_CST_HIGH (arg0),
5807					     &low, &high);
5808		  t = build_int_2 (low, high);
5809		  TREE_TYPE (t) = type;
5810		  TREE_OVERFLOW (t)
5811		    = (TREE_OVERFLOW (arg0)
5812		       | force_fit_type (t, overflow));
5813		  TREE_CONSTANT_OVERFLOW (t)
5814		    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5815		}
5816	    }
5817	  else if (TREE_CODE (arg0) == REAL_CST)
5818	    {
5819	      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5820		t = build_real (type,
5821				REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5822	    }
5823	}
5824      else if (TREE_CODE (arg0) == NEGATE_EXPR)
5825	return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5826      /* Convert fabs((double)float) into (double)fabsf(float).  */
5827      else if (TREE_CODE (arg0) == NOP_EXPR
5828	       && TREE_CODE (type) == REAL_TYPE)
5829	{
5830	  tree targ0 = strip_float_extensions (arg0);
5831	  if (targ0 != arg0)
5832	    return fold_convert (type, fold (build1 (ABS_EXPR,
5833						     TREE_TYPE (targ0),
5834						     targ0)));
5835	}
5836      else if (tree_expr_nonnegative_p (arg0))
5837	return arg0;
5838      return t;
5839
5840    case CONJ_EXPR:
5841      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5842	return fold_convert (type, arg0);
5843      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5844	return build (COMPLEX_EXPR, type,
5845		      TREE_OPERAND (arg0, 0),
5846		      negate_expr (TREE_OPERAND (arg0, 1)));
5847      else if (TREE_CODE (arg0) == COMPLEX_CST)
5848	return build_complex (type, TREE_REALPART (arg0),
5849			      negate_expr (TREE_IMAGPART (arg0)));
5850      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5851	return fold (build (TREE_CODE (arg0), type,
5852			    fold (build1 (CONJ_EXPR, type,
5853					  TREE_OPERAND (arg0, 0))),
5854			    fold (build1 (CONJ_EXPR,
5855					  type, TREE_OPERAND (arg0, 1)))));
5856      else if (TREE_CODE (arg0) == CONJ_EXPR)
5857	return TREE_OPERAND (arg0, 0);
5858      return t;
5859
5860    case BIT_NOT_EXPR:
5861      if (wins)
5862	{
5863	  t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5864			   ~ TREE_INT_CST_HIGH (arg0));
5865	  TREE_TYPE (t) = type;
5866	  force_fit_type (t, 0);
5867	  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5868	  TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5869	}
5870      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5871	return TREE_OPERAND (arg0, 0);
5872      return t;
5873
5874    case PLUS_EXPR:
5875      /* A + (-B) -> A - B */
5876      if (TREE_CODE (arg1) == NEGATE_EXPR)
5877	return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5878      /* (-A) + B -> B - A */
5879      if (TREE_CODE (arg0) == NEGATE_EXPR)
5880	return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5881      else if (! FLOAT_TYPE_P (type))
5882	{
5883	  if (integer_zerop (arg1))
5884	    return non_lvalue (fold_convert (type, arg0));
5885
5886	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5887	     with a constant, and the two constants have no bits in common,
5888	     we should treat this as a BIT_IOR_EXPR since this may produce more
5889	     simplifications.  */
5890	  if (TREE_CODE (arg0) == BIT_AND_EXPR
5891	      && TREE_CODE (arg1) == BIT_AND_EXPR
5892	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5893	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5894	      && integer_zerop (const_binop (BIT_AND_EXPR,
5895					     TREE_OPERAND (arg0, 1),
5896					     TREE_OPERAND (arg1, 1), 0)))
5897	    {
5898	      code = BIT_IOR_EXPR;
5899	      goto bit_ior;
5900	    }
5901
5902	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5903	     (plus (plus (mult) (mult)) (foo)) so that we can
5904	     take advantage of the factoring cases below.  */
5905	  if ((TREE_CODE (arg0) == PLUS_EXPR
5906	       && TREE_CODE (arg1) == MULT_EXPR)
5907	      || (TREE_CODE (arg1) == PLUS_EXPR
5908		  && TREE_CODE (arg0) == MULT_EXPR))
5909	    {
5910	      tree parg0, parg1, parg, marg;
5911
5912	      if (TREE_CODE (arg0) == PLUS_EXPR)
5913		parg = arg0, marg = arg1;
5914	      else
5915		parg = arg1, marg = arg0;
5916	      parg0 = TREE_OPERAND (parg, 0);
5917	      parg1 = TREE_OPERAND (parg, 1);
5918	      STRIP_NOPS (parg0);
5919	      STRIP_NOPS (parg1);
5920
5921	      if (TREE_CODE (parg0) == MULT_EXPR
5922		  && TREE_CODE (parg1) != MULT_EXPR)
5923		return fold (build (PLUS_EXPR, type,
5924				    fold (build (PLUS_EXPR, type,
5925						 fold_convert (type, parg0),
5926						 fold_convert (type, marg))),
5927				    fold_convert (type, parg1)));
5928	      if (TREE_CODE (parg0) != MULT_EXPR
5929		  && TREE_CODE (parg1) == MULT_EXPR)
5930		return fold (build (PLUS_EXPR, type,
5931				    fold (build (PLUS_EXPR, type,
5932						 fold_convert (type, parg1),
5933						 fold_convert (type, marg))),
5934				    fold_convert (type, parg0)));
5935	    }
5936
5937	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5938	    {
5939	      tree arg00, arg01, arg10, arg11;
5940	      tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5941
5942	      /* (A * C) + (B * C) -> (A+B) * C.
5943		 We are most concerned about the case where C is a constant,
5944		 but other combinations show up during loop reduction.  Since
5945		 it is not difficult, try all four possibilities.  */
5946
5947	      arg00 = TREE_OPERAND (arg0, 0);
5948	      arg01 = TREE_OPERAND (arg0, 1);
5949	      arg10 = TREE_OPERAND (arg1, 0);
5950	      arg11 = TREE_OPERAND (arg1, 1);
5951	      same = NULL_TREE;
5952
5953	      if (operand_equal_p (arg01, arg11, 0))
5954		same = arg01, alt0 = arg00, alt1 = arg10;
5955	      else if (operand_equal_p (arg00, arg10, 0))
5956		same = arg00, alt0 = arg01, alt1 = arg11;
5957	      else if (operand_equal_p (arg00, arg11, 0))
5958		same = arg00, alt0 = arg01, alt1 = arg10;
5959	      else if (operand_equal_p (arg01, arg10, 0))
5960		same = arg01, alt0 = arg00, alt1 = arg11;
5961
5962	      /* No identical multiplicands; see if we can find a common
5963		 power-of-two factor in non-power-of-two multiplies.  This
5964		 can help in multi-dimensional array access.  */
5965	      else if (TREE_CODE (arg01) == INTEGER_CST
5966		       && TREE_CODE (arg11) == INTEGER_CST
5967		       && TREE_INT_CST_HIGH (arg01) == 0
5968		       && TREE_INT_CST_HIGH (arg11) == 0)
5969		{
5970		  HOST_WIDE_INT int01, int11, tmp;
5971		  int01 = TREE_INT_CST_LOW (arg01);
5972		  int11 = TREE_INT_CST_LOW (arg11);
5973
5974		  /* Move min of absolute values to int11.  */
5975		  if ((int01 >= 0 ? int01 : -int01)
5976		      < (int11 >= 0 ? int11 : -int11))
5977		    {
5978		      tmp = int01, int01 = int11, int11 = tmp;
5979		      alt0 = arg00, arg00 = arg10, arg10 = alt0;
5980		      alt0 = arg01, arg01 = arg11, arg11 = alt0;
5981		    }
5982
5983		  if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5984		    {
5985		      alt0 = fold (build (MULT_EXPR, type, arg00,
5986					  build_int_2 (int01 / int11, 0)));
5987		      alt1 = arg10;
5988		      same = arg11;
5989		    }
5990		}
5991
5992	      if (same)
5993		return fold (build (MULT_EXPR, type,
5994				    fold (build (PLUS_EXPR, type, alt0, alt1)),
5995				    same));
5996	    }
5997	}
5998      else
5999	{
6000	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
6001	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
6002	    return non_lvalue (fold_convert (type, arg0));
6003
6004	  /* Likewise if the operands are reversed.  */
6005	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6006	    return non_lvalue (fold_convert (type, arg1));
6007
6008	  /* Convert x+x into x*2.0.  */
6009	  if (operand_equal_p (arg0, arg1, 0)
6010	      && SCALAR_FLOAT_TYPE_P (type))
6011	    return fold (build (MULT_EXPR, type, arg0,
6012				build_real (type, dconst2)));
6013
6014	  /* Convert x*c+x into x*(c+1).  */
6015	  if (flag_unsafe_math_optimizations
6016	      && TREE_CODE (arg0) == MULT_EXPR
6017	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6018	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6019	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
6020	    {
6021	      REAL_VALUE_TYPE c;
6022
6023	      c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6024	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6025	      return fold (build (MULT_EXPR, type, arg1,
6026				  build_real (type, c)));
6027	    }
6028
6029	  /* Convert x+x*c into x*(c+1).  */
6030	  if (flag_unsafe_math_optimizations
6031	      && TREE_CODE (arg1) == MULT_EXPR
6032	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6033	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6034	      && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
6035	    {
6036	      REAL_VALUE_TYPE c;
6037
6038	      c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6039	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6040	      return fold (build (MULT_EXPR, type, arg0,
6041				  build_real (type, c)));
6042	    }
6043
6044	  /* Convert x*c1+x*c2 into x*(c1+c2).  */
6045	  if (flag_unsafe_math_optimizations
6046	      && TREE_CODE (arg0) == MULT_EXPR
6047	      && TREE_CODE (arg1) == MULT_EXPR
6048	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
6049	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
6050	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
6051	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
6052	      && operand_equal_p (TREE_OPERAND (arg0, 0),
6053				  TREE_OPERAND (arg1, 0), 0))
6054	    {
6055	      REAL_VALUE_TYPE c1, c2;
6056
6057	      c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
6058	      c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
6059	      real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
6060	      return fold (build (MULT_EXPR, type,
6061				  TREE_OPERAND (arg0, 0),
6062				  build_real (type, c1)));
6063	    }
6064	}
6065
6066     bit_rotate:
6067      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
6068	 is a rotate of A by C1 bits.  */
6069      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
6070	 is a rotate of A by B bits.  */
6071      {
6072	enum tree_code code0, code1;
6073	code0 = TREE_CODE (arg0);
6074	code1 = TREE_CODE (arg1);
6075	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
6076	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
6077	    && operand_equal_p (TREE_OPERAND (arg0, 0),
6078			        TREE_OPERAND (arg1, 0), 0)
6079	    && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6080	  {
6081	    tree tree01, tree11;
6082	    enum tree_code code01, code11;
6083
6084	    tree01 = TREE_OPERAND (arg0, 1);
6085	    tree11 = TREE_OPERAND (arg1, 1);
6086	    STRIP_NOPS (tree01);
6087	    STRIP_NOPS (tree11);
6088	    code01 = TREE_CODE (tree01);
6089	    code11 = TREE_CODE (tree11);
6090	    if (code01 == INTEGER_CST
6091		&& code11 == INTEGER_CST
6092		&& TREE_INT_CST_HIGH (tree01) == 0
6093		&& TREE_INT_CST_HIGH (tree11) == 0
6094		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
6095		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
6096	      return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
6097			    code0 == LSHIFT_EXPR ? tree01 : tree11);
6098	    else if (code11 == MINUS_EXPR)
6099	      {
6100		tree tree110, tree111;
6101		tree110 = TREE_OPERAND (tree11, 0);
6102		tree111 = TREE_OPERAND (tree11, 1);
6103		STRIP_NOPS (tree110);
6104		STRIP_NOPS (tree111);
6105		if (TREE_CODE (tree110) == INTEGER_CST
6106		    && 0 == compare_tree_int (tree110,
6107					      TYPE_PRECISION
6108					      (TREE_TYPE (TREE_OPERAND
6109							  (arg0, 0))))
6110		    && operand_equal_p (tree01, tree111, 0))
6111		  return build ((code0 == LSHIFT_EXPR
6112				 ? LROTATE_EXPR
6113				 : RROTATE_EXPR),
6114				type, TREE_OPERAND (arg0, 0), tree01);
6115	      }
6116	    else if (code01 == MINUS_EXPR)
6117	      {
6118		tree tree010, tree011;
6119		tree010 = TREE_OPERAND (tree01, 0);
6120		tree011 = TREE_OPERAND (tree01, 1);
6121		STRIP_NOPS (tree010);
6122		STRIP_NOPS (tree011);
6123		if (TREE_CODE (tree010) == INTEGER_CST
6124		    && 0 == compare_tree_int (tree010,
6125					      TYPE_PRECISION
6126					      (TREE_TYPE (TREE_OPERAND
6127							  (arg0, 0))))
6128		    && operand_equal_p (tree11, tree011, 0))
6129		  return build ((code0 != LSHIFT_EXPR
6130				 ? LROTATE_EXPR
6131				 : RROTATE_EXPR),
6132				type, TREE_OPERAND (arg0, 0), tree11);
6133	      }
6134	  }
6135      }
6136
6137    associate:
6138      /* In most languages, can't associate operations on floats through
6139	 parentheses.  Rather than remember where the parentheses were, we
6140	 don't associate floats at all, unless the user has specified
6141	 -funsafe-math-optimizations.  */
6142
6143      if (! wins
6144	  && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6145	{
6146	  tree var0, con0, lit0, minus_lit0;
6147	  tree var1, con1, lit1, minus_lit1;
6148
6149	  /* Split both trees into variables, constants, and literals.  Then
6150	     associate each group together, the constants with literals,
6151	     then the result with variables.  This increases the chances of
6152	     literals being recombined later and of generating relocatable
6153	     expressions for the sum of a constant and literal.  */
6154	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6155	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6156			     code == MINUS_EXPR);
6157
6158	  /* Only do something if we found more than two objects.  Otherwise,
6159	     nothing has changed and we risk infinite recursion.  */
6160	  if (2 < ((var0 != 0) + (var1 != 0)
6161		   + (con0 != 0) + (con1 != 0)
6162		   + (lit0 != 0) + (lit1 != 0)
6163		   + (minus_lit0 != 0) + (minus_lit1 != 0)))
6164	    {
6165	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
6166	      if (code == MINUS_EXPR)
6167		code = PLUS_EXPR;
6168
6169	      var0 = associate_trees (var0, var1, code, type);
6170	      con0 = associate_trees (con0, con1, code, type);
6171	      lit0 = associate_trees (lit0, lit1, code, type);
6172	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6173
6174	      /* Preserve the MINUS_EXPR if the negative part of the literal is
6175		 greater than the positive part.  Otherwise, the multiplicative
6176		 folding code (i.e extract_muldiv) may be fooled in case
6177		 unsigned constants are subtracted, like in the following
6178		 example: ((X*2 + 4) - 8U)/2.  */
6179	      if (minus_lit0 && lit0)
6180		{
6181		  if (TREE_CODE (lit0) == INTEGER_CST
6182		      && TREE_CODE (minus_lit0) == INTEGER_CST
6183		      && tree_int_cst_lt (lit0, minus_lit0))
6184		    {
6185		      minus_lit0 = associate_trees (minus_lit0, lit0,
6186						    MINUS_EXPR, type);
6187		      lit0 = 0;
6188		    }
6189		  else
6190		    {
6191		      lit0 = associate_trees (lit0, minus_lit0,
6192					      MINUS_EXPR, type);
6193		      minus_lit0 = 0;
6194		    }
6195		}
6196	      if (minus_lit0)
6197		{
6198		  if (con0 == 0)
6199		    return fold_convert (type,
6200					 associate_trees (var0, minus_lit0,
6201							  MINUS_EXPR, type));
6202		  else
6203		    {
6204		      con0 = associate_trees (con0, minus_lit0,
6205					      MINUS_EXPR, type);
6206		      return fold_convert (type,
6207					   associate_trees (var0, con0,
6208							    PLUS_EXPR, type));
6209		    }
6210		}
6211
6212	      con0 = associate_trees (con0, lit0, code, type);
6213	      return fold_convert (type, associate_trees (var0, con0,
6214							  code, type));
6215	    }
6216	}
6217
6218    binary:
6219      if (wins)
6220	t1 = const_binop (code, arg0, arg1, 0);
6221      if (t1 != NULL_TREE)
6222	{
6223	  /* The return value should always have
6224	     the same type as the original expression.  */
6225	  if (TREE_TYPE (t1) != TREE_TYPE (t))
6226	    t1 = fold_convert (TREE_TYPE (t), t1);
6227
6228	  return t1;
6229	}
6230      return t;
6231
6232    case MINUS_EXPR:
6233      /* A - (-B) -> A + B */
6234      if (TREE_CODE (arg1) == NEGATE_EXPR)
6235	return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6236      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
6237      if (TREE_CODE (arg0) == NEGATE_EXPR
6238	  && (FLOAT_TYPE_P (type)
6239	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6240	  && negate_expr_p (arg1)
6241	  && reorder_operands_p (arg0, arg1))
6242	return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6243			    TREE_OPERAND (arg0, 0)));
6244
6245      if (! FLOAT_TYPE_P (type))
6246	{
6247	  if (! wins && integer_zerop (arg0))
6248	    return negate_expr (fold_convert (type, arg1));
6249	  if (integer_zerop (arg1))
6250	    return non_lvalue (fold_convert (type, arg0));
6251
6252	  /* (A * C) - (B * C) -> (A-B) * C.  Since we are most concerned
6253	     about the case where C is a constant, just try one of the
6254	     four possibilities.  */
6255
6256	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
6257	      && operand_equal_p (TREE_OPERAND (arg0, 1),
6258				  TREE_OPERAND (arg1, 1), 0))
6259	    return fold (build (MULT_EXPR, type,
6260				fold (build (MINUS_EXPR, type,
6261					     TREE_OPERAND (arg0, 0),
6262					     TREE_OPERAND (arg1, 0))),
6263				TREE_OPERAND (arg0, 1)));
6264
6265	  /* Fold A - (A & B) into ~B & A.  */
6266	  if (!TREE_SIDE_EFFECTS (arg0)
6267	      && TREE_CODE (arg1) == BIT_AND_EXPR)
6268	    {
6269	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6270		return fold (build (BIT_AND_EXPR, type,
6271				    fold (build1 (BIT_NOT_EXPR, type,
6272						  TREE_OPERAND (arg1, 0))),
6273				    arg0));
6274	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6275		return fold (build (BIT_AND_EXPR, type,
6276				    fold (build1 (BIT_NOT_EXPR, type,
6277						  TREE_OPERAND (arg1, 1))),
6278				    arg0));
6279	    }
6280
6281	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6282	     any power of 2 minus 1.  */
6283	  if (TREE_CODE (arg0) == BIT_AND_EXPR
6284	      && TREE_CODE (arg1) == BIT_AND_EXPR
6285	      && operand_equal_p (TREE_OPERAND (arg0, 0),
6286				  TREE_OPERAND (arg1, 0), 0))
6287	    {
6288	      tree mask0 = TREE_OPERAND (arg0, 1);
6289	      tree mask1 = TREE_OPERAND (arg1, 1);
6290	      tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6291
6292	      if (operand_equal_p (tem, mask1, 0))
6293		{
6294		  tem = fold (build (BIT_XOR_EXPR, type,
6295				     TREE_OPERAND (arg0, 0), mask1));
6296		  return fold (build (MINUS_EXPR, type, tem, mask1));
6297		}
6298	    }
6299	}
6300
6301      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
6302      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6303	return non_lvalue (fold_convert (type, arg0));
6304
6305      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
6306	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6307	 (-ARG1 + ARG0) reduces to -ARG1.  */
6308      else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6309	return negate_expr (fold_convert (type, arg1));
6310
6311      /* Fold &x - &x.  This can happen from &x.foo - &x.
6312	 This is unsafe for certain floats even in non-IEEE formats.
6313	 In IEEE, it is unsafe because it does wrong for NaNs.
6314	 Also note that operand_equal_p is always false if an operand
6315	 is volatile.  */
6316
6317      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6318	  && operand_equal_p (arg0, arg1, 0))
6319	return fold_convert (type, integer_zero_node);
6320
6321      goto associate;
6322
6323    case MULT_EXPR:
6324      /* (-A) * (-B) -> A * B  */
6325      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6326	return fold (build (MULT_EXPR, type,
6327			    TREE_OPERAND (arg0, 0),
6328			    negate_expr (arg1)));
6329      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6330	return fold (build (MULT_EXPR, type,
6331			    negate_expr (arg0),
6332			    TREE_OPERAND (arg1, 0)));
6333
6334      if (! FLOAT_TYPE_P (type))
6335	{
6336	  if (integer_zerop (arg1))
6337	    return omit_one_operand (type, arg1, arg0);
6338	  if (integer_onep (arg1))
6339	    return non_lvalue (fold_convert (type, arg0));
6340
6341	  /* (a * (1 << b)) is (a << b)  */
6342	  if (TREE_CODE (arg1) == LSHIFT_EXPR
6343	      && integer_onep (TREE_OPERAND (arg1, 0)))
6344	    return fold (build (LSHIFT_EXPR, type, arg0,
6345				TREE_OPERAND (arg1, 1)));
6346	  if (TREE_CODE (arg0) == LSHIFT_EXPR
6347	      && integer_onep (TREE_OPERAND (arg0, 0)))
6348	    return fold (build (LSHIFT_EXPR, type, arg1,
6349				TREE_OPERAND (arg0, 1)));
6350
6351	  if (TREE_CODE (arg1) == INTEGER_CST
6352	      && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6353					     fold_convert (type, arg1),
6354					     code, NULL_TREE)))
6355	    return fold_convert (type, tem);
6356
6357	}
6358      else
6359	{
6360	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
6361	     when x is NaN, since x * 0 is also NaN.  Nor are they the
6362	     same in modes with signed zeros, since multiplying a
6363	     negative value by 0 gives -0, not +0.  */
6364	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6365	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6366	      && real_zerop (arg1))
6367	    return omit_one_operand (type, arg1, arg0);
6368	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
6369	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6370	      && real_onep (arg1))
6371	    return non_lvalue (fold_convert (type, arg0));
6372
6373	  /* Transform x * -1.0 into -x.  */
6374	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6375	      && real_minus_onep (arg1))
6376	    return fold (build1 (NEGATE_EXPR, type, arg0));
6377
6378	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
6379	  if (flag_unsafe_math_optimizations
6380	      && TREE_CODE (arg0) == RDIV_EXPR
6381	      && TREE_CODE (arg1) == REAL_CST
6382	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6383	    {
6384	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6385				      arg1, 0);
6386	      if (tem)
6387		return fold (build (RDIV_EXPR, type, tem,
6388				    TREE_OPERAND (arg0, 1)));
6389	    }
6390
6391	  if (flag_unsafe_math_optimizations)
6392	    {
6393	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6394	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6395
6396	      /* Optimizations of sqrt(...)*sqrt(...).  */
6397	      if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6398		  || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6399		  || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6400		{
6401		  tree sqrtfn, arg, arglist;
6402		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6403		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6404
6405		  /* Optimize sqrt(x)*sqrt(x) as x.  */
6406		  if (operand_equal_p (arg00, arg10, 0)
6407		      && ! HONOR_SNANS (TYPE_MODE (type)))
6408		    return arg00;
6409
6410	          /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y).  */
6411		  sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6412		  arg = fold (build (MULT_EXPR, type, arg00, arg10));
6413		  arglist = build_tree_list (NULL_TREE, arg);
6414		  return build_function_call_expr (sqrtfn, arglist);
6415		}
6416
6417	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
6418	      if (fcode0 == fcode1
6419		  && (fcode0 == BUILT_IN_EXP
6420		      || fcode0 == BUILT_IN_EXPF
6421		      || fcode0 == BUILT_IN_EXPL
6422		      || fcode0 == BUILT_IN_EXP2
6423		      || fcode0 == BUILT_IN_EXP2F
6424		      || fcode0 == BUILT_IN_EXP2L
6425		      || fcode0 == BUILT_IN_EXP10
6426		      || fcode0 == BUILT_IN_EXP10F
6427		      || fcode0 == BUILT_IN_EXP10L
6428		      || fcode0 == BUILT_IN_POW10
6429		      || fcode0 == BUILT_IN_POW10F
6430		      || fcode0 == BUILT_IN_POW10L))
6431		{
6432		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6433		  tree arg = build (PLUS_EXPR, type,
6434				    TREE_VALUE (TREE_OPERAND (arg0, 1)),
6435				    TREE_VALUE (TREE_OPERAND (arg1, 1)));
6436		  tree arglist = build_tree_list (NULL_TREE, fold (arg));
6437		  return build_function_call_expr (expfn, arglist);
6438		}
6439
6440	      /* Optimizations of pow(...)*pow(...).  */
6441	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6442		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6443		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6444		{
6445		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6446		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6447								     1)));
6448		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6449		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6450								     1)));
6451
6452		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
6453		  if (operand_equal_p (arg01, arg11, 0))
6454		    {
6455		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6456		      tree arg = build (MULT_EXPR, type, arg00, arg10);
6457		      tree arglist = tree_cons (NULL_TREE, fold (arg),
6458						build_tree_list (NULL_TREE,
6459								 arg01));
6460		      return build_function_call_expr (powfn, arglist);
6461		    }
6462
6463		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
6464		  if (operand_equal_p (arg00, arg10, 0))
6465		    {
6466		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6467		      tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6468		      tree arglist = tree_cons (NULL_TREE, arg00,
6469						build_tree_list (NULL_TREE,
6470								 arg));
6471		      return build_function_call_expr (powfn, arglist);
6472		    }
6473		}
6474
6475	      /* Optimize tan(x)*cos(x) as sin(x).  */
6476	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6477		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6478		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6479		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6480		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6481		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6482		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6483				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6484		{
6485		  tree sinfn;
6486
6487		  switch (fcode0)
6488		    {
6489		    case BUILT_IN_TAN:
6490		    case BUILT_IN_COS:
6491		      sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6492		      break;
6493		    case BUILT_IN_TANF:
6494		    case BUILT_IN_COSF:
6495		      sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6496		      break;
6497		    case BUILT_IN_TANL:
6498		    case BUILT_IN_COSL:
6499		      sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6500		      break;
6501		    default:
6502		      sinfn = NULL_TREE;
6503		    }
6504
6505		  if (sinfn != NULL_TREE)
6506		    return build_function_call_expr (sinfn,
6507						     TREE_OPERAND (arg0, 1));
6508		}
6509
6510	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
6511	      if (fcode1 == BUILT_IN_POW
6512		  || fcode1 == BUILT_IN_POWF
6513		  || fcode1 == BUILT_IN_POWL)
6514		{
6515		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6516		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6517								     1)));
6518		  if (TREE_CODE (arg11) == REAL_CST
6519		      && ! TREE_CONSTANT_OVERFLOW (arg11)
6520		      && operand_equal_p (arg0, arg10, 0))
6521		    {
6522		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6523		      REAL_VALUE_TYPE c;
6524		      tree arg, arglist;
6525
6526		      c = TREE_REAL_CST (arg11);
6527		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6528		      arg = build_real (type, c);
6529		      arglist = build_tree_list (NULL_TREE, arg);
6530		      arglist = tree_cons (NULL_TREE, arg0, arglist);
6531		      return build_function_call_expr (powfn, arglist);
6532		    }
6533		}
6534
6535	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
6536	      if (fcode0 == BUILT_IN_POW
6537		  || fcode0 == BUILT_IN_POWF
6538		  || fcode0 == BUILT_IN_POWL)
6539		{
6540		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6541		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6542								     1)));
6543		  if (TREE_CODE (arg01) == REAL_CST
6544		      && ! TREE_CONSTANT_OVERFLOW (arg01)
6545		      && operand_equal_p (arg1, arg00, 0))
6546		    {
6547		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6548		      REAL_VALUE_TYPE c;
6549		      tree arg, arglist;
6550
6551		      c = TREE_REAL_CST (arg01);
6552		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6553		      arg = build_real (type, c);
6554		      arglist = build_tree_list (NULL_TREE, arg);
6555		      arglist = tree_cons (NULL_TREE, arg1, arglist);
6556		      return build_function_call_expr (powfn, arglist);
6557		    }
6558		}
6559
6560	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
6561	      if (! optimize_size
6562		  && operand_equal_p (arg0, arg1, 0))
6563		{
6564		  tree powfn;
6565
6566		  if (type == double_type_node)
6567		    powfn = implicit_built_in_decls[BUILT_IN_POW];
6568		  else if (type == float_type_node)
6569		    powfn = implicit_built_in_decls[BUILT_IN_POWF];
6570		  else if (type == long_double_type_node)
6571		    powfn = implicit_built_in_decls[BUILT_IN_POWL];
6572		  else
6573		    powfn = NULL_TREE;
6574
6575		  if (powfn)
6576		    {
6577		      tree arg = build_real (type, dconst2);
6578		      tree arglist = build_tree_list (NULL_TREE, arg);
6579		      arglist = tree_cons (NULL_TREE, arg0, arglist);
6580		      return build_function_call_expr (powfn, arglist);
6581		    }
6582		}
6583	    }
6584	}
6585      goto associate;
6586
6587    case BIT_IOR_EXPR:
6588    bit_ior:
6589      if (integer_all_onesp (arg1))
6590	return omit_one_operand (type, arg1, arg0);
6591      if (integer_zerop (arg1))
6592	return non_lvalue (fold_convert (type, arg0));
6593      t1 = distribute_bit_expr (code, type, arg0, arg1);
6594      if (t1 != NULL_TREE)
6595	return t1;
6596
6597      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6598
6599	 This results in more efficient code for machines without a NAND
6600	 instruction.  Combine will canonicalize to the first form
6601	 which will allow use of NAND instructions provided by the
6602	 backend if they exist.  */
6603      if (TREE_CODE (arg0) == BIT_NOT_EXPR
6604	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
6605	{
6606	  return fold (build1 (BIT_NOT_EXPR, type,
6607			       build (BIT_AND_EXPR, type,
6608				      TREE_OPERAND (arg0, 0),
6609				      TREE_OPERAND (arg1, 0))));
6610	}
6611
6612      /* See if this can be simplified into a rotate first.  If that
6613	 is unsuccessful continue in the association code.  */
6614      goto bit_rotate;
6615
6616    case BIT_XOR_EXPR:
6617      if (integer_zerop (arg1))
6618	return non_lvalue (fold_convert (type, arg0));
6619      if (integer_all_onesp (arg1))
6620	return fold (build1 (BIT_NOT_EXPR, type, arg0));
6621
6622      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6623         with a constant, and the two constants have no bits in common,
6624	 we should treat this as a BIT_IOR_EXPR since this may produce more
6625	 simplifications.  */
6626      if (TREE_CODE (arg0) == BIT_AND_EXPR
6627	  && TREE_CODE (arg1) == BIT_AND_EXPR
6628	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6629	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6630	  && integer_zerop (const_binop (BIT_AND_EXPR,
6631					 TREE_OPERAND (arg0, 1),
6632					 TREE_OPERAND (arg1, 1), 0)))
6633	{
6634	  code = BIT_IOR_EXPR;
6635	  goto bit_ior;
6636	}
6637
6638      /* See if this can be simplified into a rotate first.  If that
6639	 is unsuccessful continue in the association code.  */
6640      goto bit_rotate;
6641
6642    case BIT_AND_EXPR:
6643      if (integer_all_onesp (arg1))
6644	return non_lvalue (fold_convert (type, arg0));
6645      if (integer_zerop (arg1))
6646	return omit_one_operand (type, arg1, arg0);
6647      t1 = distribute_bit_expr (code, type, arg0, arg1);
6648      if (t1 != NULL_TREE)
6649	return t1;
6650      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
6651      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6652	  && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6653	{
6654	  unsigned int prec
6655	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6656
6657	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6658	      && (~TREE_INT_CST_LOW (arg1)
6659		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6660	    return fold_convert (type, TREE_OPERAND (arg0, 0));
6661	}
6662
6663      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6664
6665	 This results in more efficient code for machines without a NOR
6666	 instruction.  Combine will canonicalize to the first form
6667	 which will allow use of NOR instructions provided by the
6668	 backend if they exist.  */
6669      if (TREE_CODE (arg0) == BIT_NOT_EXPR
6670	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
6671	{
6672	  return fold (build1 (BIT_NOT_EXPR, type,
6673			       build (BIT_IOR_EXPR, type,
6674				      TREE_OPERAND (arg0, 0),
6675				      TREE_OPERAND (arg1, 0))));
6676	}
6677
6678      goto associate;
6679
6680    case RDIV_EXPR:
6681      /* Don't touch a floating-point divide by zero unless the mode
6682	 of the constant can represent infinity.  */
6683      if (TREE_CODE (arg1) == REAL_CST
6684	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6685	  && real_zerop (arg1))
6686	return t;
6687
6688      /* (-A) / (-B) -> A / B  */
6689      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6690	return fold (build (RDIV_EXPR, type,
6691			    TREE_OPERAND (arg0, 0),
6692			    negate_expr (arg1)));
6693      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6694	return fold (build (RDIV_EXPR, type,
6695			    negate_expr (arg0),
6696			    TREE_OPERAND (arg1, 0)));
6697
6698      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
6699      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6700	  && real_onep (arg1))
6701	return non_lvalue (fold_convert (type, arg0));
6702
6703      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
6704      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6705	  && real_minus_onep (arg1))
6706	return non_lvalue (fold_convert (type, negate_expr (arg0)));
6707
6708      /* If ARG1 is a constant, we can convert this to a multiply by the
6709	 reciprocal.  This does not have the same rounding properties,
6710	 so only do this if -funsafe-math-optimizations.  We can actually
6711	 always safely do it if ARG1 is a power of two, but it's hard to
6712	 tell if it is or not in a portable manner.  */
6713      if (TREE_CODE (arg1) == REAL_CST)
6714	{
6715	  if (flag_unsafe_math_optimizations
6716	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
6717					  arg1, 0)))
6718	    return fold (build (MULT_EXPR, type, arg0, tem));
6719	  /* Find the reciprocal if optimizing and the result is exact.  */
6720	  if (optimize)
6721	    {
6722	      REAL_VALUE_TYPE r;
6723	      r = TREE_REAL_CST (arg1);
6724	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6725		{
6726		  tem = build_real (type, r);
6727		  return fold (build (MULT_EXPR, type, arg0, tem));
6728		}
6729	    }
6730	}
6731      /* Convert A/B/C to A/(B*C).  */
6732      if (flag_unsafe_math_optimizations
6733	  && TREE_CODE (arg0) == RDIV_EXPR)
6734	return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6735			    fold (build (MULT_EXPR, type,
6736					 TREE_OPERAND (arg0, 1), arg1))));
6737
6738      /* Convert A/(B/C) to (A/B)*C.  */
6739      if (flag_unsafe_math_optimizations
6740	  && TREE_CODE (arg1) == RDIV_EXPR)
6741	return fold (build (MULT_EXPR, type,
6742			    fold (build (RDIV_EXPR, type, arg0,
6743					 TREE_OPERAND (arg1, 0))),
6744			    TREE_OPERAND (arg1, 1)));
6745
6746      /* Convert C1/(X*C2) into (C1/C2)/X.  */
6747      if (flag_unsafe_math_optimizations
6748	  && TREE_CODE (arg1) == MULT_EXPR
6749	  && TREE_CODE (arg0) == REAL_CST
6750	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6751	{
6752	  tree tem = const_binop (RDIV_EXPR, arg0,
6753				  TREE_OPERAND (arg1, 1), 0);
6754	  if (tem)
6755	    return fold (build (RDIV_EXPR, type, tem,
6756				TREE_OPERAND (arg1, 0)));
6757	}
6758
6759      if (flag_unsafe_math_optimizations)
6760	{
6761	  enum built_in_function fcode = builtin_mathfn_code (arg1);
6762	  /* Optimize x/expN(y) into x*expN(-y).  */
6763	  if (fcode == BUILT_IN_EXP
6764	      || fcode == BUILT_IN_EXPF
6765	      || fcode == BUILT_IN_EXPL
6766	      || fcode == BUILT_IN_EXP2
6767	      || fcode == BUILT_IN_EXP2F
6768	      || fcode == BUILT_IN_EXP2L
6769	      || fcode == BUILT_IN_EXP10
6770	      || fcode == BUILT_IN_EXP10F
6771	      || fcode == BUILT_IN_EXP10L
6772	      || fcode == BUILT_IN_POW10
6773	      || fcode == BUILT_IN_POW10F
6774	      || fcode == BUILT_IN_POW10L)
6775	    {
6776	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6777	      tree arg = build1 (NEGATE_EXPR, type,
6778				 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6779	      tree arglist = build_tree_list (NULL_TREE, fold (arg));
6780	      arg1 = build_function_call_expr (expfn, arglist);
6781	      return fold (build (MULT_EXPR, type, arg0, arg1));
6782	    }
6783
6784	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
6785	  if (fcode == BUILT_IN_POW
6786	      || fcode == BUILT_IN_POWF
6787	      || fcode == BUILT_IN_POWL)
6788	    {
6789	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6790	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6791	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6792	      tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6793	      tree arglist = tree_cons(NULL_TREE, arg10,
6794				       build_tree_list (NULL_TREE, neg11));
6795	      arg1 = build_function_call_expr (powfn, arglist);
6796	      return fold (build (MULT_EXPR, type, arg0, arg1));
6797	    }
6798	}
6799
6800      if (flag_unsafe_math_optimizations)
6801	{
6802	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6803	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6804
6805	  /* Optimize sin(x)/cos(x) as tan(x).  */
6806	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6807	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6808	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6809	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6810				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6811	    {
6812	      tree tanfn;
6813
6814	      if (fcode0 == BUILT_IN_SIN)
6815		tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6816	      else if (fcode0 == BUILT_IN_SINF)
6817		tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6818	      else if (fcode0 == BUILT_IN_SINL)
6819		tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6820	      else
6821		tanfn = NULL_TREE;
6822
6823	      if (tanfn != NULL_TREE)
6824		return build_function_call_expr (tanfn,
6825						 TREE_OPERAND (arg0, 1));
6826	    }
6827
6828	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
6829	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6830	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6831	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6832	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6833				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6834	    {
6835	      tree tanfn;
6836
6837	      if (fcode0 == BUILT_IN_COS)
6838		tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6839	      else if (fcode0 == BUILT_IN_COSF)
6840		tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6841	      else if (fcode0 == BUILT_IN_COSL)
6842		tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6843	      else
6844		tanfn = NULL_TREE;
6845
6846	      if (tanfn != NULL_TREE)
6847		{
6848		  tree tmp = TREE_OPERAND (arg0, 1);
6849		  tmp = build_function_call_expr (tanfn, tmp);
6850		  return fold (build (RDIV_EXPR, type,
6851				      build_real (type, dconst1),
6852				      tmp));
6853		}
6854	    }
6855
6856	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
6857	  if (fcode0 == BUILT_IN_POW
6858	      || fcode0 == BUILT_IN_POWF
6859	      || fcode0 == BUILT_IN_POWL)
6860	    {
6861	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6862	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6863	      if (TREE_CODE (arg01) == REAL_CST
6864		  && ! TREE_CONSTANT_OVERFLOW (arg01)
6865		  && operand_equal_p (arg1, arg00, 0))
6866		{
6867		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6868		  REAL_VALUE_TYPE c;
6869		  tree arg, arglist;
6870
6871		  c = TREE_REAL_CST (arg01);
6872		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6873		  arg = build_real (type, c);
6874		  arglist = build_tree_list (NULL_TREE, arg);
6875		  arglist = tree_cons (NULL_TREE, arg1, arglist);
6876		  return build_function_call_expr (powfn, arglist);
6877		}
6878	    }
6879	}
6880      goto binary;
6881
6882    case TRUNC_DIV_EXPR:
6883    case ROUND_DIV_EXPR:
6884    case FLOOR_DIV_EXPR:
6885    case CEIL_DIV_EXPR:
6886    case EXACT_DIV_EXPR:
6887      if (integer_onep (arg1))
6888	return non_lvalue (fold_convert (type, arg0));
6889      if (integer_zerop (arg1))
6890	return t;
6891
6892      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6893	 operation, EXACT_DIV_EXPR.
6894
6895	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6896	 At one time others generated faster code, it's not clear if they do
6897	 after the last round to changes to the DIV code in expmed.c.  */
6898      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6899	  && multiple_of_p (type, arg0, arg1))
6900	return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6901
6902      if (TREE_CODE (arg1) == INTEGER_CST
6903	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6904					 code, NULL_TREE)))
6905	return fold_convert (type, tem);
6906
6907      goto binary;
6908
6909    case CEIL_MOD_EXPR:
6910    case FLOOR_MOD_EXPR:
6911    case ROUND_MOD_EXPR:
6912    case TRUNC_MOD_EXPR:
6913      if (integer_onep (arg1))
6914	return omit_one_operand (type, integer_zero_node, arg0);
6915      if (integer_zerop (arg1))
6916	return t;
6917
6918      if (TREE_CODE (arg1) == INTEGER_CST
6919	  && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6920					 code, NULL_TREE)))
6921	return fold_convert (type, tem);
6922
6923      goto binary;
6924
6925    case LROTATE_EXPR:
6926    case RROTATE_EXPR:
6927      if (integer_all_onesp (arg0))
6928	return omit_one_operand (type, arg0, arg1);
6929      goto shift;
6930
6931    case RSHIFT_EXPR:
6932      /* Optimize -1 >> x for arithmetic right shifts.  */
6933      if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6934	return omit_one_operand (type, arg0, arg1);
6935      /* ... fall through ...  */
6936
6937    case LSHIFT_EXPR:
6938    shift:
6939      if (integer_zerop (arg1))
6940	return non_lvalue (fold_convert (type, arg0));
6941      if (integer_zerop (arg0))
6942	return omit_one_operand (type, arg0, arg1);
6943
6944      /* Since negative shift count is not well-defined,
6945	 don't try to compute it in the compiler.  */
6946      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6947	return t;
6948      /* Rewrite an LROTATE_EXPR by a constant into an
6949	 RROTATE_EXPR by a new constant.  */
6950      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6951	{
6952	  tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6953	  tem = fold_convert (TREE_TYPE (arg1), tem);
6954	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6955	  return fold (build (RROTATE_EXPR, type, arg0, tem));
6956	}
6957
6958      /* If we have a rotate of a bit operation with the rotate count and
6959	 the second operand of the bit operation both constant,
6960	 permute the two operations.  */
6961      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6962	  && (TREE_CODE (arg0) == BIT_AND_EXPR
6963	      || TREE_CODE (arg0) == BIT_IOR_EXPR
6964	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
6965	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6966	return fold (build (TREE_CODE (arg0), type,
6967			    fold (build (code, type,
6968					 TREE_OPERAND (arg0, 0), arg1)),
6969			    fold (build (code, type,
6970					 TREE_OPERAND (arg0, 1), arg1))));
6971
6972      /* Two consecutive rotates adding up to the width of the mode can
6973	 be ignored.  */
6974      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6975	  && TREE_CODE (arg0) == RROTATE_EXPR
6976	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6977	  && TREE_INT_CST_HIGH (arg1) == 0
6978	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6979	  && ((TREE_INT_CST_LOW (arg1)
6980	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6981	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6982	return TREE_OPERAND (arg0, 0);
6983
6984      goto binary;
6985
6986    case MIN_EXPR:
6987      if (operand_equal_p (arg0, arg1, 0))
6988	return omit_one_operand (type, arg0, arg1);
6989      if (INTEGRAL_TYPE_P (type)
6990	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6991	return omit_one_operand (type, arg1, arg0);
6992      goto associate;
6993
6994    case MAX_EXPR:
6995      if (operand_equal_p (arg0, arg1, 0))
6996	return omit_one_operand (type, arg0, arg1);
6997      if (INTEGRAL_TYPE_P (type)
6998	  && TYPE_MAX_VALUE (type)
6999	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
7000	return omit_one_operand (type, arg1, arg0);
7001      goto associate;
7002
7003    case TRUTH_NOT_EXPR:
7004      /* Note that the operand of this must be an int
7005	 and its values must be 0 or 1.
7006	 ("true" is a fixed value perhaps depending on the language,
7007	 but we don't handle values other than 1 correctly yet.)  */
7008      tem = invert_truthvalue (arg0);
7009      /* Avoid infinite recursion.  */
7010      if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7011	{
7012	  tem = fold_single_bit_test (code, arg0, arg1, type);
7013	  if (tem)
7014	    return tem;
7015	  return t;
7016	}
7017      return fold_convert (type, tem);
7018
7019    case TRUTH_ANDIF_EXPR:
7020      /* Note that the operands of this must be ints
7021	 and their values must be 0 or 1.
7022	 ("true" is a fixed value perhaps depending on the language.)  */
7023      /* If first arg is constant zero, return it.  */
7024      if (integer_zerop (arg0))
7025	return fold_convert (type, arg0);
7026    case TRUTH_AND_EXPR:
7027      /* If either arg is constant true, drop it.  */
7028      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7029	return non_lvalue (fold_convert (type, arg1));
7030      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
7031	  /* Preserve sequence points.  */
7032	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7033	return non_lvalue (fold_convert (type, arg0));
7034      /* If second arg is constant zero, result is zero, but first arg
7035	 must be evaluated.  */
7036      if (integer_zerop (arg1))
7037	return omit_one_operand (type, arg1, arg0);
7038      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
7039	 case will be handled here.  */
7040      if (integer_zerop (arg0))
7041	return omit_one_operand (type, arg0, arg1);
7042
7043    truth_andor:
7044      /* We only do these simplifications if we are optimizing.  */
7045      if (!optimize)
7046	return t;
7047
7048      /* Check for things like (A || B) && (A || C).  We can convert this
7049	 to A || (B && C).  Note that either operator can be any of the four
7050	 truth and/or operations and the transformation will still be
7051	 valid.   Also note that we only care about order for the
7052	 ANDIF and ORIF operators.  If B contains side effects, this
7053	 might change the truth-value of A.  */
7054      if (TREE_CODE (arg0) == TREE_CODE (arg1)
7055	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
7056	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
7057	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
7058	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
7059	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
7060	{
7061	  tree a00 = TREE_OPERAND (arg0, 0);
7062	  tree a01 = TREE_OPERAND (arg0, 1);
7063	  tree a10 = TREE_OPERAND (arg1, 0);
7064	  tree a11 = TREE_OPERAND (arg1, 1);
7065	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
7066			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
7067			     && (code == TRUTH_AND_EXPR
7068				 || code == TRUTH_OR_EXPR));
7069
7070	  if (operand_equal_p (a00, a10, 0))
7071	    return fold (build (TREE_CODE (arg0), type, a00,
7072				fold (build (code, type, a01, a11))));
7073	  else if (commutative && operand_equal_p (a00, a11, 0))
7074	    return fold (build (TREE_CODE (arg0), type, a00,
7075				fold (build (code, type, a01, a10))));
7076	  else if (commutative && operand_equal_p (a01, a10, 0))
7077	    return fold (build (TREE_CODE (arg0), type, a01,
7078				fold (build (code, type, a00, a11))));
7079
7080	  /* This case if tricky because we must either have commutative
7081	     operators or else A10 must not have side-effects.  */
7082
7083	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
7084		   && operand_equal_p (a01, a11, 0))
7085	    return fold (build (TREE_CODE (arg0), type,
7086				fold (build (code, type, a00, a10)),
7087				a01));
7088	}
7089
7090      /* See if we can build a range comparison.  */
7091      if (0 != (tem = fold_range_test (t)))
7092	return tem;
7093
7094      /* Check for the possibility of merging component references.  If our
7095	 lhs is another similar operation, try to merge its rhs with our
7096	 rhs.  Then try to merge our lhs and rhs.  */
7097      if (TREE_CODE (arg0) == code
7098	  && 0 != (tem = fold_truthop (code, type,
7099				       TREE_OPERAND (arg0, 1), arg1)))
7100	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7101
7102      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
7103	return tem;
7104
7105      return t;
7106
7107    case TRUTH_ORIF_EXPR:
7108      /* Note that the operands of this must be ints
7109	 and their values must be 0 or true.
7110	 ("true" is a fixed value perhaps depending on the language.)  */
7111      /* If first arg is constant true, return it.  */
7112      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7113	return fold_convert (type, arg0);
7114    case TRUTH_OR_EXPR:
7115      /* If either arg is constant zero, drop it.  */
7116      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7117	return non_lvalue (fold_convert (type, arg1));
7118      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7119	  /* Preserve sequence points.  */
7120	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7121	return non_lvalue (fold_convert (type, arg0));
7122      /* If second arg is constant true, result is true, but we must
7123	 evaluate first arg.  */
7124      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7125	return omit_one_operand (type, arg1, arg0);
7126      /* Likewise for first arg, but note this only occurs here for
7127	 TRUTH_OR_EXPR.  */
7128      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7129	return omit_one_operand (type, arg0, arg1);
7130      goto truth_andor;
7131
7132    case TRUTH_XOR_EXPR:
7133      /* If either arg is constant zero, drop it.  */
7134      if (integer_zerop (arg0))
7135	return non_lvalue (fold_convert (type, arg1));
7136      if (integer_zerop (arg1))
7137	return non_lvalue (fold_convert (type, arg0));
7138      /* If either arg is constant true, this is a logical inversion.  */
7139      if (integer_onep (arg0))
7140	return non_lvalue (fold_convert (type, invert_truthvalue (arg1)));
7141      if (integer_onep (arg1))
7142	return non_lvalue (fold_convert (type, invert_truthvalue (arg0)));
7143      return t;
7144
7145    case EQ_EXPR:
7146    case NE_EXPR:
7147    case LT_EXPR:
7148    case GT_EXPR:
7149    case LE_EXPR:
7150    case GE_EXPR:
7151      /* If one arg is a real or integer constant, put it last.  */
7152      if (tree_swap_operands_p (arg0, arg1, true))
7153	return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7154
7155      if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7156	{
7157	  tree targ0 = strip_float_extensions (arg0);
7158	  tree targ1 = strip_float_extensions (arg1);
7159	  tree newtype = TREE_TYPE (targ0);
7160
7161	  if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7162	    newtype = TREE_TYPE (targ1);
7163
7164	  /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
7165	  if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7166	    return fold (build (code, type, fold_convert (newtype, targ0),
7167				fold_convert (newtype, targ1)));
7168
7169	  /* (-a) CMP (-b) -> b CMP a  */
7170	  if (TREE_CODE (arg0) == NEGATE_EXPR
7171	      && TREE_CODE (arg1) == NEGATE_EXPR)
7172	    return fold (build (code, type, TREE_OPERAND (arg1, 0),
7173				TREE_OPERAND (arg0, 0)));
7174
7175	  if (TREE_CODE (arg1) == REAL_CST)
7176	  {
7177	    REAL_VALUE_TYPE cst;
7178	    cst = TREE_REAL_CST (arg1);
7179
7180	    /* (-a) CMP CST -> a swap(CMP) (-CST)  */
7181	    if (TREE_CODE (arg0) == NEGATE_EXPR)
7182	      return
7183		fold (build (swap_tree_comparison (code), type,
7184			     TREE_OPERAND (arg0, 0),
7185			     build_real (TREE_TYPE (arg1),
7186					 REAL_VALUE_NEGATE (cst))));
7187
7188	    /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
7189	    /* a CMP (-0) -> a CMP 0  */
7190	    if (REAL_VALUE_MINUS_ZERO (cst))
7191	      return fold (build (code, type, arg0,
7192				  build_real (TREE_TYPE (arg1), dconst0)));
7193
7194	    /* x != NaN is always true, other ops are always false.  */
7195	    if (REAL_VALUE_ISNAN (cst)
7196		&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7197	      {
7198		t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7199		return omit_one_operand (type, fold_convert (type, t), arg0);
7200	      }
7201
7202	    /* Fold comparisons against infinity.  */
7203	    if (REAL_VALUE_ISINF (cst))
7204	      {
7205		tem = fold_inf_compare (code, type, arg0, arg1);
7206		if (tem != NULL_TREE)
7207		  return tem;
7208	      }
7209	  }
7210
7211	  /* If this is a comparison of a real constant with a PLUS_EXPR
7212	     or a MINUS_EXPR of a real constant, we can convert it into a
7213	     comparison with a revised real constant as long as no overflow
7214	     occurs when unsafe_math_optimizations are enabled.  */
7215	  if (flag_unsafe_math_optimizations
7216	      && TREE_CODE (arg1) == REAL_CST
7217	      && (TREE_CODE (arg0) == PLUS_EXPR
7218		  || TREE_CODE (arg0) == MINUS_EXPR)
7219	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7220	      && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7221					  ? MINUS_EXPR : PLUS_EXPR,
7222					  arg1, TREE_OPERAND (arg0, 1), 0))
7223	      && ! TREE_CONSTANT_OVERFLOW (tem))
7224	    return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7225
7226	  /* Likewise, we can simplify a comparison of a real constant with
7227	     a MINUS_EXPR whose first operand is also a real constant, i.e.
7228	     (c1 - x) < c2 becomes x > c1-c2.  */
7229	  if (flag_unsafe_math_optimizations
7230	      && TREE_CODE (arg1) == REAL_CST
7231	      && TREE_CODE (arg0) == MINUS_EXPR
7232	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7233	      && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7234					  arg1, 0))
7235	      && ! TREE_CONSTANT_OVERFLOW (tem))
7236	    return fold (build (swap_tree_comparison (code), type,
7237				TREE_OPERAND (arg0, 1), tem));
7238
7239	  /* Fold comparisons against built-in math functions.  */
7240	  if (TREE_CODE (arg1) == REAL_CST
7241	      && flag_unsafe_math_optimizations
7242	      && ! flag_errno_math)
7243	    {
7244	      enum built_in_function fcode = builtin_mathfn_code (arg0);
7245
7246	      if (fcode != END_BUILTINS)
7247		{
7248		  tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7249		  if (tem != NULL_TREE)
7250		    return tem;
7251		}
7252	    }
7253	}
7254
7255      /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
7256      if (TREE_CONSTANT (arg1)
7257	  && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
7258	      || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
7259	  /* This optimization is invalid for ordered comparisons
7260	     if CONST+INCR overflows or if foo+incr might overflow.
7261	     This optimization is invalid for floating point due to rounding.
7262	     For pointer types we assume overflow doesn't happen.  */
7263	  && (POINTER_TYPE_P (TREE_TYPE (arg0))
7264	      || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
7265		  && (code == EQ_EXPR || code == NE_EXPR))))
7266	{
7267	  tree varop, newconst;
7268
7269	  if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
7270	    {
7271	      newconst = fold (build (PLUS_EXPR, TREE_TYPE (arg0),
7272				      arg1, TREE_OPERAND (arg0, 1)));
7273	      varop = build (PREINCREMENT_EXPR, TREE_TYPE (arg0),
7274			     TREE_OPERAND (arg0, 0),
7275			     TREE_OPERAND (arg0, 1));
7276	    }
7277	  else
7278	    {
7279	      newconst = fold (build (MINUS_EXPR, TREE_TYPE (arg0),
7280				      arg1, TREE_OPERAND (arg0, 1)));
7281	      varop = build (PREDECREMENT_EXPR, TREE_TYPE (arg0),
7282			     TREE_OPERAND (arg0, 0),
7283			     TREE_OPERAND (arg0, 1));
7284	    }
7285
7286
7287	  /* If VAROP is a reference to a bitfield, we must mask
7288	     the constant by the width of the field.  */
7289	  if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7290	      && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1)))
7291	    {
7292	      tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
7293	      int size = TREE_INT_CST_LOW (DECL_SIZE (fielddecl));
7294	      tree folded_compare, shift;
7295
7296	      /* First check whether the comparison would come out
7297		 always the same.  If we don't do that we would
7298		 change the meaning with the masking.  */
7299	      folded_compare = fold (build (code, type,
7300					    TREE_OPERAND (varop, 0),
7301					    arg1));
7302	      if (integer_zerop (folded_compare)
7303		  || integer_onep (folded_compare))
7304		return omit_one_operand (type, folded_compare, varop);
7305
7306	      shift = build_int_2 (TYPE_PRECISION (TREE_TYPE (varop)) - size,
7307				   0);
7308	      newconst = fold (build (LSHIFT_EXPR, TREE_TYPE (varop),
7309				      newconst, shift));
7310	      newconst = fold (build (RSHIFT_EXPR, TREE_TYPE (varop),
7311				      newconst, shift));
7312	    }
7313
7314	  return fold (build (code, type, varop, newconst));
7315	}
7316
7317      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7318	 This transformation affects the cases which are handled in later
7319	 optimizations involving comparisons with non-negative constants.  */
7320      if (TREE_CODE (arg1) == INTEGER_CST
7321	  && TREE_CODE (arg0) != INTEGER_CST
7322	  && tree_int_cst_sgn (arg1) > 0)
7323	{
7324	  switch (code)
7325	    {
7326	    case GE_EXPR:
7327	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7328	      return fold (build (GT_EXPR, type, arg0, arg1));
7329
7330	    case LT_EXPR:
7331	      arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7332	      return fold (build (LE_EXPR, type, arg0, arg1));
7333
7334	    default:
7335	      break;
7336	    }
7337	}
7338
7339      /* Comparisons with the highest or lowest possible integer of
7340	 the specified size will have known values.  */
7341      {
7342	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7343
7344	if (TREE_CODE (arg1) == INTEGER_CST
7345	    && ! TREE_CONSTANT_OVERFLOW (arg1)
7346	    && width <= HOST_BITS_PER_WIDE_INT
7347	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7348		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
7349	  {
7350	    unsigned HOST_WIDE_INT signed_max;
7351	    unsigned HOST_WIDE_INT max, min;
7352
7353	    signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7354
7355	    if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7356	      {
7357	        max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7358		min = 0;
7359	      }
7360	    else
7361	      {
7362	        max = signed_max;
7363		min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7364	      }
7365
7366	    if (TREE_INT_CST_HIGH (arg1) == 0
7367		&& TREE_INT_CST_LOW (arg1) == max)
7368	      switch (code)
7369		{
7370		case GT_EXPR:
7371		  return omit_one_operand (type,
7372					   fold_convert (type,
7373							 integer_zero_node),
7374					   arg0);
7375		case GE_EXPR:
7376		  return fold (build (EQ_EXPR, type, arg0, arg1));
7377
7378		case LE_EXPR:
7379		  return omit_one_operand (type,
7380					   fold_convert (type,
7381							 integer_one_node),
7382					   arg0);
7383		case LT_EXPR:
7384		  return fold (build (NE_EXPR, type, arg0, arg1));
7385
7386		/* The GE_EXPR and LT_EXPR cases above are not normally
7387		   reached because of previous transformations.  */
7388
7389		default:
7390		  break;
7391		}
7392	    else if (TREE_INT_CST_HIGH (arg1) == 0
7393		     && TREE_INT_CST_LOW (arg1) == max - 1)
7394	      switch (code)
7395		{
7396		case GT_EXPR:
7397		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7398		  return fold (build (EQ_EXPR, type, arg0, arg1));
7399		case LE_EXPR:
7400		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7401		  return fold (build (NE_EXPR, type, arg0, arg1));
7402		default:
7403		  break;
7404		}
7405	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7406		     && TREE_INT_CST_LOW (arg1) == min)
7407	      switch (code)
7408		{
7409		case LT_EXPR:
7410		  return omit_one_operand (type,
7411					   fold_convert (type,
7412							 integer_zero_node),
7413					   arg0);
7414		case LE_EXPR:
7415		  return fold (build (EQ_EXPR, type, arg0, arg1));
7416
7417		case GE_EXPR:
7418		  return omit_one_operand (type,
7419					   fold_convert (type,
7420							 integer_one_node),
7421					   arg0);
7422		case GT_EXPR:
7423		  return fold (build (NE_EXPR, type, arg0, arg1));
7424
7425		default:
7426		  break;
7427		}
7428	    else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7429		     && TREE_INT_CST_LOW (arg1) == min + 1)
7430	      switch (code)
7431		{
7432		case GE_EXPR:
7433		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7434		  return fold (build (NE_EXPR, type, arg0, arg1));
7435		case LT_EXPR:
7436		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7437		  return fold (build (EQ_EXPR, type, arg0, arg1));
7438		default:
7439		  break;
7440		}
7441
7442	    else if (TREE_INT_CST_HIGH (arg1) == 0
7443		     && TREE_INT_CST_LOW (arg1) == signed_max
7444		     && TREE_UNSIGNED (TREE_TYPE (arg1))
7445		     /* signed_type does not work on pointer types.  */
7446		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7447	      {
7448		/* The following case also applies to X < signed_max+1
7449		   and X >= signed_max+1 because previous transformations.  */
7450		if (code == LE_EXPR || code == GT_EXPR)
7451		  {
7452		    tree st0, st1;
7453		    st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7454		    st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7455		    return fold
7456		      (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7457			      type, fold_convert (st0, arg0),
7458			      fold_convert (st1, integer_zero_node)));
7459		  }
7460	      }
7461	  }
7462      }
7463
7464      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7465	 a MINUS_EXPR of a constant, we can convert it into a comparison with
7466	 a revised constant as long as no overflow occurs.  */
7467      if ((code == EQ_EXPR || code == NE_EXPR)
7468	  && TREE_CODE (arg1) == INTEGER_CST
7469	  && (TREE_CODE (arg0) == PLUS_EXPR
7470	      || TREE_CODE (arg0) == MINUS_EXPR)
7471	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7472	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7473				      ? MINUS_EXPR : PLUS_EXPR,
7474				      arg1, TREE_OPERAND (arg0, 1), 0))
7475	  && ! TREE_CONSTANT_OVERFLOW (tem))
7476	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7477
7478      /* Similarly for a NEGATE_EXPR.  */
7479      else if ((code == EQ_EXPR || code == NE_EXPR)
7480	       && TREE_CODE (arg0) == NEGATE_EXPR
7481	       && TREE_CODE (arg1) == INTEGER_CST
7482	       && 0 != (tem = negate_expr (arg1))
7483	       && TREE_CODE (tem) == INTEGER_CST
7484	       && ! TREE_CONSTANT_OVERFLOW (tem))
7485	return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7486
7487      /* If we have X - Y == 0, we can convert that to X == Y and similarly
7488	 for !=.  Don't do this for ordered comparisons due to overflow.  */
7489      else if ((code == NE_EXPR || code == EQ_EXPR)
7490	       && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7491	return fold (build (code, type,
7492			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7493
7494      /* If we are widening one operand of an integer comparison,
7495	 see if the other operand is similarly being widened.  Perhaps we
7496	 can do the comparison in the narrower type.  */
7497      else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7498	       && TREE_CODE (arg0) == NOP_EXPR
7499	       && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7500	       && (code == EQ_EXPR || code == NE_EXPR
7501		   || TREE_UNSIGNED (TREE_TYPE (arg0))
7502		      == TREE_UNSIGNED (TREE_TYPE (tem)))
7503	       && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7504	       && (TREE_TYPE (t1) == TREE_TYPE (tem)
7505		   || (TREE_CODE (t1) == INTEGER_CST
7506		       && int_fits_type_p (t1, TREE_TYPE (tem)))))
7507	return fold (build (code, type, tem,
7508			    fold_convert (TREE_TYPE (tem), t1)));
7509
7510      /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7511	 constant, we can simplify it.  */
7512      else if (TREE_CODE (arg1) == INTEGER_CST
7513	       && (TREE_CODE (arg0) == MIN_EXPR
7514		   || TREE_CODE (arg0) == MAX_EXPR)
7515	       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7516	return optimize_minmax_comparison (t);
7517
7518      /* If we are comparing an ABS_EXPR with a constant, we can
7519	 convert all the cases into explicit comparisons, but they may
7520	 well not be faster than doing the ABS and one comparison.
7521	 But ABS (X) <= C is a range comparison, which becomes a subtraction
7522	 and a comparison, and is probably faster.  */
7523      else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7524	       && TREE_CODE (arg0) == ABS_EXPR
7525	       && ! TREE_SIDE_EFFECTS (arg0)
7526	       && (0 != (tem = negate_expr (arg1)))
7527	       && TREE_CODE (tem) == INTEGER_CST
7528	       && ! TREE_CONSTANT_OVERFLOW (tem))
7529	return fold (build (TRUTH_ANDIF_EXPR, type,
7530			    build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7531			    build (LE_EXPR, type,
7532				   TREE_OPERAND (arg0, 0), arg1)));
7533
7534      /* If this is an EQ or NE comparison with zero and ARG0 is
7535	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
7536	 two operations, but the latter can be done in one less insn
7537	 on machines that have only two-operand insns or on which a
7538	 constant cannot be the first operand.  */
7539      if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7540	  && TREE_CODE (arg0) == BIT_AND_EXPR)
7541	{
7542	  if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7543	      && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7544	    return
7545	      fold (build (code, type,
7546			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
7547				  build (RSHIFT_EXPR,
7548					 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7549					 TREE_OPERAND (arg0, 1),
7550					 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7551				  fold_convert (TREE_TYPE (arg0),
7552						integer_one_node)),
7553			   arg1));
7554	  else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7555		   && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7556	    return
7557	      fold (build (code, type,
7558			   build (BIT_AND_EXPR, TREE_TYPE (arg0),
7559				  build (RSHIFT_EXPR,
7560					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7561					 TREE_OPERAND (arg0, 0),
7562					 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7563				  fold_convert (TREE_TYPE (arg0),
7564						integer_one_node)),
7565			   arg1));
7566	}
7567
7568      /* If this is an NE or EQ comparison of zero against the result of a
7569	 signed MOD operation whose second operand is a power of 2, make
7570	 the MOD operation unsigned since it is simpler and equivalent.  */
7571      if ((code == NE_EXPR || code == EQ_EXPR)
7572	  && integer_zerop (arg1)
7573	  && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7574	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7575	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
7576	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7577	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7578	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
7579	{
7580	  tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7581	  tree newmod = build (TREE_CODE (arg0), newtype,
7582			       fold_convert (newtype,
7583					     TREE_OPERAND (arg0, 0)),
7584			       fold_convert (newtype,
7585					     TREE_OPERAND (arg0, 1)));
7586
7587	  return build (code, type, newmod, fold_convert (newtype, arg1));
7588	}
7589
7590      /* If this is an NE comparison of zero with an AND of one, remove the
7591	 comparison since the AND will give the correct value.  */
7592      if (code == NE_EXPR && integer_zerop (arg1)
7593	  && TREE_CODE (arg0) == BIT_AND_EXPR
7594	  && integer_onep (TREE_OPERAND (arg0, 1)))
7595	return fold_convert (type, arg0);
7596
7597      /* If we have (A & C) == C where C is a power of 2, convert this into
7598	 (A & C) != 0.  Similarly for NE_EXPR.  */
7599      if ((code == EQ_EXPR || code == NE_EXPR)
7600	  && TREE_CODE (arg0) == BIT_AND_EXPR
7601	  && integer_pow2p (TREE_OPERAND (arg0, 1))
7602	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7603	return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7604			    arg0, integer_zero_node));
7605
7606      /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7607	 2, then fold the expression into shifts and logical operations.  */
7608      tem = fold_single_bit_test (code, arg0, arg1, type);
7609      if (tem)
7610	return tem;
7611
7612      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7613	 Similarly for NE_EXPR.  */
7614      if ((code == EQ_EXPR || code == NE_EXPR)
7615	  && TREE_CODE (arg0) == BIT_AND_EXPR
7616	  && TREE_CODE (arg1) == INTEGER_CST
7617	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7618	{
7619	  tree dandnotc
7620	    = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7621			   arg1, build1 (BIT_NOT_EXPR,
7622					 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7623					 TREE_OPERAND (arg0, 1))));
7624	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7625	  if (integer_nonzerop (dandnotc))
7626	    return omit_one_operand (type, rslt, arg0);
7627	}
7628
7629      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7630	 Similarly for NE_EXPR.  */
7631      if ((code == EQ_EXPR || code == NE_EXPR)
7632	  && TREE_CODE (arg0) == BIT_IOR_EXPR
7633	  && TREE_CODE (arg1) == INTEGER_CST
7634	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7635	{
7636	  tree candnotd
7637	    = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7638			   TREE_OPERAND (arg0, 1),
7639			   build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7640	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7641	  if (integer_nonzerop (candnotd))
7642	    return omit_one_operand (type, rslt, arg0);
7643	}
7644
7645      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7646	 and similarly for >= into !=.  */
7647      if ((code == LT_EXPR || code == GE_EXPR)
7648	  && TREE_UNSIGNED (TREE_TYPE (arg0))
7649	  && TREE_CODE (arg1) == LSHIFT_EXPR
7650	  && integer_onep (TREE_OPERAND (arg1, 0)))
7651	return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7652		      build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7653			     TREE_OPERAND (arg1, 1)),
7654		      fold_convert (TREE_TYPE (arg0), integer_zero_node));
7655
7656      else if ((code == LT_EXPR || code == GE_EXPR)
7657	       && TREE_UNSIGNED (TREE_TYPE (arg0))
7658	       && (TREE_CODE (arg1) == NOP_EXPR
7659		   || TREE_CODE (arg1) == CONVERT_EXPR)
7660	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7661	       && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7662	return
7663	  build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7664		 fold_convert (TREE_TYPE (arg0),
7665			       build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7666				      TREE_OPERAND (TREE_OPERAND (arg1, 0),
7667						    1))),
7668		 fold_convert (TREE_TYPE (arg0), integer_zero_node));
7669
7670      /* Simplify comparison of something with itself.  (For IEEE
7671	 floating-point, we can only do some of these simplifications.)  */
7672      if (operand_equal_p (arg0, arg1, 0))
7673	{
7674	  switch (code)
7675	    {
7676	    case EQ_EXPR:
7677	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7678		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7679		return constant_boolean_node (1, type);
7680	      break;
7681
7682	    case GE_EXPR:
7683	    case LE_EXPR:
7684	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7685		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7686		return constant_boolean_node (1, type);
7687	      return fold (build (EQ_EXPR, type, arg0, arg1));
7688
7689	    case NE_EXPR:
7690	      /* For NE, we can only do this simplification if integer
7691		 or we don't honor IEEE floating point NaNs.  */
7692	      if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7693		  && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7694		break;
7695	      /* ... fall through ...  */
7696	    case GT_EXPR:
7697	    case LT_EXPR:
7698	      return constant_boolean_node (0, type);
7699	    default:
7700	      abort ();
7701	    }
7702	}
7703
7704      /* If we are comparing an expression that just has comparisons
7705	 of two integer values, arithmetic expressions of those comparisons,
7706	 and constants, we can simplify it.  There are only three cases
7707	 to check: the two values can either be equal, the first can be
7708	 greater, or the second can be greater.  Fold the expression for
7709	 those three values.  Since each value must be 0 or 1, we have
7710	 eight possibilities, each of which corresponds to the constant 0
7711	 or 1 or one of the six possible comparisons.
7712
7713	 This handles common cases like (a > b) == 0 but also handles
7714	 expressions like  ((x > y) - (y > x)) > 0, which supposedly
7715	 occur in macroized code.  */
7716
7717      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7718	{
7719	  tree cval1 = 0, cval2 = 0;
7720	  int save_p = 0;
7721
7722	  if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7723	      /* Don't handle degenerate cases here; they should already
7724		 have been handled anyway.  */
7725	      && cval1 != 0 && cval2 != 0
7726	      && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7727	      && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7728	      && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7729	      && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7730	      && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7731	      && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7732				    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7733	    {
7734	      tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7735	      tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7736
7737	      /* We can't just pass T to eval_subst in case cval1 or cval2
7738		 was the same as ARG1.  */
7739
7740	      tree high_result
7741		= fold (build (code, type,
7742			       eval_subst (arg0, cval1, maxval, cval2, minval),
7743			       arg1));
7744	      tree equal_result
7745		= fold (build (code, type,
7746			       eval_subst (arg0, cval1, maxval, cval2, maxval),
7747			       arg1));
7748	      tree low_result
7749		= fold (build (code, type,
7750			       eval_subst (arg0, cval1, minval, cval2, maxval),
7751			       arg1));
7752
7753	      /* All three of these results should be 0 or 1.  Confirm they
7754		 are.  Then use those values to select the proper code
7755		 to use.  */
7756
7757	      if ((integer_zerop (high_result)
7758		   || integer_onep (high_result))
7759		  && (integer_zerop (equal_result)
7760		      || integer_onep (equal_result))
7761		  && (integer_zerop (low_result)
7762		      || integer_onep (low_result)))
7763		{
7764		  /* Make a 3-bit mask with the high-order bit being the
7765		     value for `>', the next for '=', and the low for '<'.  */
7766		  switch ((integer_onep (high_result) * 4)
7767			  + (integer_onep (equal_result) * 2)
7768			  + integer_onep (low_result))
7769		    {
7770		    case 0:
7771		      /* Always false.  */
7772		      return omit_one_operand (type, integer_zero_node, arg0);
7773		    case 1:
7774		      code = LT_EXPR;
7775		      break;
7776		    case 2:
7777		      code = EQ_EXPR;
7778		      break;
7779		    case 3:
7780		      code = LE_EXPR;
7781		      break;
7782		    case 4:
7783		      code = GT_EXPR;
7784		      break;
7785		    case 5:
7786		      code = NE_EXPR;
7787		      break;
7788		    case 6:
7789		      code = GE_EXPR;
7790		      break;
7791		    case 7:
7792		      /* Always true.  */
7793		      return omit_one_operand (type, integer_one_node, arg0);
7794		    }
7795
7796		  t = build (code, type, cval1, cval2);
7797		  if (save_p)
7798		    return save_expr (t);
7799		  else
7800		    return fold (t);
7801		}
7802	    }
7803	}
7804
7805      /* If this is a comparison of a field, we may be able to simplify it.  */
7806      if (((TREE_CODE (arg0) == COMPONENT_REF
7807	    && (*lang_hooks.can_use_bit_fields_p) ())
7808	   || TREE_CODE (arg0) == BIT_FIELD_REF)
7809	  && (code == EQ_EXPR || code == NE_EXPR)
7810	  /* Handle the constant case even without -O
7811	     to make sure the warnings are given.  */
7812	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7813	{
7814	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7815	  if (t1)
7816	    return t1;
7817	}
7818
7819      /* If this is a comparison of complex values and either or both sides
7820	 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7821	 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7822	 This may prevent needless evaluations.  */
7823      if ((code == EQ_EXPR || code == NE_EXPR)
7824	  && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7825	  && (TREE_CODE (arg0) == COMPLEX_EXPR
7826	      || TREE_CODE (arg1) == COMPLEX_EXPR
7827	      || TREE_CODE (arg0) == COMPLEX_CST
7828	      || TREE_CODE (arg1) == COMPLEX_CST))
7829	{
7830	  tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7831	  tree real0, imag0, real1, imag1;
7832
7833	  arg0 = save_expr (arg0);
7834	  arg1 = save_expr (arg1);
7835	  real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7836	  imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7837	  real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7838	  imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7839
7840	  return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7841			       : TRUTH_ORIF_EXPR),
7842			      type,
7843			      fold (build (code, type, real0, real1)),
7844			      fold (build (code, type, imag0, imag1))));
7845	}
7846
7847      /* Optimize comparisons of strlen vs zero to a compare of the
7848	 first character of the string vs zero.  To wit,
7849		strlen(ptr) == 0   =>  *ptr == 0
7850		strlen(ptr) != 0   =>  *ptr != 0
7851	 Other cases should reduce to one of these two (or a constant)
7852	 due to the return value of strlen being unsigned.  */
7853      if ((code == EQ_EXPR || code == NE_EXPR)
7854	  && integer_zerop (arg1)
7855	  && TREE_CODE (arg0) == CALL_EXPR)
7856	{
7857	  tree fndecl = get_callee_fndecl (arg0);
7858	  tree arglist;
7859
7860	  if (fndecl
7861	      && DECL_BUILT_IN (fndecl)
7862	      && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7863	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7864	      && (arglist = TREE_OPERAND (arg0, 1))
7865	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7866	      && ! TREE_CHAIN (arglist))
7867	    return fold (build (code, type,
7868				build1 (INDIRECT_REF, char_type_node,
7869					TREE_VALUE(arglist)),
7870				integer_zero_node));
7871	}
7872
7873      /* From here on, the only cases we handle are when the result is
7874	 known to be a constant.
7875
7876	 To compute GT, swap the arguments and do LT.
7877	 To compute GE, do LT and invert the result.
7878	 To compute LE, swap the arguments, do LT and invert the result.
7879	 To compute NE, do EQ and invert the result.
7880
7881	 Therefore, the code below must handle only EQ and LT.  */
7882
7883      if (code == LE_EXPR || code == GT_EXPR)
7884	{
7885	  tem = arg0, arg0 = arg1, arg1 = tem;
7886	  code = swap_tree_comparison (code);
7887	}
7888
7889      /* Note that it is safe to invert for real values here because we
7890	 will check below in the one case that it matters.  */
7891
7892      t1 = NULL_TREE;
7893      invert = 0;
7894      if (code == NE_EXPR || code == GE_EXPR)
7895	{
7896	  invert = 1;
7897	  code = invert_tree_comparison (code);
7898	}
7899
7900      /* Compute a result for LT or EQ if args permit;
7901	 otherwise return T.  */
7902      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7903	{
7904	  if (code == EQ_EXPR)
7905	    t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7906	  else
7907	    t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7908			       ? INT_CST_LT_UNSIGNED (arg0, arg1)
7909			       : INT_CST_LT (arg0, arg1)),
7910			      0);
7911	}
7912
7913#if 0 /* This is no longer useful, but breaks some real code.  */
7914      /* Assume a nonexplicit constant cannot equal an explicit one,
7915	 since such code would be undefined anyway.
7916	 Exception: on sysvr4, using #pragma weak,
7917	 a label can come out as 0.  */
7918      else if (TREE_CODE (arg1) == INTEGER_CST
7919	       && !integer_zerop (arg1)
7920	       && TREE_CONSTANT (arg0)
7921	       && TREE_CODE (arg0) == ADDR_EXPR
7922	       && code == EQ_EXPR)
7923	t1 = build_int_2 (0, 0);
7924#endif
7925      /* Two real constants can be compared explicitly.  */
7926      else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7927	{
7928	  /* If either operand is a NaN, the result is false with two
7929	     exceptions: First, an NE_EXPR is true on NaNs, but that case
7930	     is already handled correctly since we will be inverting the
7931	     result for NE_EXPR.  Second, if we had inverted a LE_EXPR
7932	     or a GE_EXPR into a LT_EXPR, we must return true so that it
7933	     will be inverted into false.  */
7934
7935	  if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7936	      || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7937	    t1 = build_int_2 (invert && code == LT_EXPR, 0);
7938
7939	  else if (code == EQ_EXPR)
7940	    t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7941						 TREE_REAL_CST (arg1)),
7942			      0);
7943	  else
7944	    t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7945						TREE_REAL_CST (arg1)),
7946			      0);
7947	}
7948
7949      if (t1 == NULL_TREE)
7950	return t;
7951
7952      if (invert)
7953	TREE_INT_CST_LOW (t1) ^= 1;
7954
7955      TREE_TYPE (t1) = type;
7956      if (TREE_CODE (type) == BOOLEAN_TYPE)
7957	return (*lang_hooks.truthvalue_conversion) (t1);
7958      return t1;
7959
7960    case COND_EXPR:
7961      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7962	 so all simple results must be passed through pedantic_non_lvalue.  */
7963      if (TREE_CODE (arg0) == INTEGER_CST)
7964	{
7965	  tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7966	  /* Only optimize constant conditions when the selected branch
7967	     has the same type as the COND_EXPR.  This avoids optimizing
7968	     away "c ? x : throw", where the throw has a void type.  */
7969	  if (! VOID_TYPE_P (TREE_TYPE (tem))
7970	      || VOID_TYPE_P (TREE_TYPE (t)))
7971	    return pedantic_non_lvalue (tem);
7972	  return t;
7973	}
7974      if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7975	return pedantic_omit_one_operand (type, arg1, arg0);
7976
7977      /* If we have A op B ? A : C, we may be able to convert this to a
7978	 simpler expression, depending on the operation and the values
7979	 of B and C.  Signed zeros prevent all of these transformations,
7980	 for reasons given above each one.  */
7981
7982      if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7983	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7984					     arg1, TREE_OPERAND (arg0, 1))
7985	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7986	{
7987	  tree arg2 = TREE_OPERAND (t, 2);
7988	  enum tree_code comp_code = TREE_CODE (arg0);
7989
7990	  STRIP_NOPS (arg2);
7991
7992	  /* If we have A op 0 ? A : -A, consider applying the following
7993	     transformations:
7994
7995	     A == 0? A : -A    same as -A
7996	     A != 0? A : -A    same as A
7997	     A >= 0? A : -A    same as abs (A)
7998	     A > 0?  A : -A    same as abs (A)
7999	     A <= 0? A : -A    same as -abs (A)
8000	     A < 0?  A : -A    same as -abs (A)
8001
8002	     None of these transformations work for modes with signed
8003	     zeros.  If A is +/-0, the first two transformations will
8004	     change the sign of the result (from +0 to -0, or vice
8005	     versa).  The last four will fix the sign of the result,
8006	     even though the original expressions could be positive or
8007	     negative, depending on the sign of A.
8008
8009	     Note that all these transformations are correct if A is
8010	     NaN, since the two alternatives (A and -A) are also NaNs.  */
8011	  if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
8012	       ? real_zerop (TREE_OPERAND (arg0, 1))
8013	       : integer_zerop (TREE_OPERAND (arg0, 1)))
8014	      && TREE_CODE (arg2) == NEGATE_EXPR
8015	      && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
8016	    switch (comp_code)
8017	      {
8018	      case EQ_EXPR:
8019		tem = fold_convert (TREE_TYPE (TREE_OPERAND (t, 1)), arg1);
8020		tem = fold_convert (type, negate_expr (tem));
8021		return pedantic_non_lvalue (tem);
8022	      case NE_EXPR:
8023		return pedantic_non_lvalue (fold_convert (type, arg1));
8024	      case GE_EXPR:
8025	      case GT_EXPR:
8026		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8027		  arg1 = fold_convert ((*lang_hooks.types.signed_type)
8028				       (TREE_TYPE (arg1)), arg1);
8029		arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8030		return pedantic_non_lvalue (fold_convert (type, arg1));
8031	      case LE_EXPR:
8032	      case LT_EXPR:
8033		if (TREE_UNSIGNED (TREE_TYPE (arg1)))
8034		  arg1 = fold_convert ((lang_hooks.types.signed_type)
8035				       (TREE_TYPE (arg1)), arg1);
8036		arg1 = fold (build1 (ABS_EXPR, TREE_TYPE (arg1), arg1));
8037		arg1 = negate_expr (fold_convert (type, arg1));
8038		return pedantic_non_lvalue (arg1);
8039	      default:
8040		abort ();
8041	      }
8042
8043	  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
8044	     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
8045	     both transformations are correct when A is NaN: A != 0
8046	     is then true, and A == 0 is false.  */
8047
8048	  if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8049	    {
8050	      if (comp_code == NE_EXPR)
8051		return pedantic_non_lvalue (fold_convert (type, arg1));
8052	      else if (comp_code == EQ_EXPR)
8053		return pedantic_non_lvalue (fold_convert (type, integer_zero_node));
8054	    }
8055
8056	  /* Try some transformations of A op B ? A : B.
8057
8058	     A == B? A : B    same as B
8059	     A != B? A : B    same as A
8060	     A >= B? A : B    same as max (A, B)
8061	     A > B?  A : B    same as max (B, A)
8062	     A <= B? A : B    same as min (A, B)
8063	     A < B?  A : B    same as min (B, A)
8064
8065	     As above, these transformations don't work in the presence
8066	     of signed zeros.  For example, if A and B are zeros of
8067	     opposite sign, the first two transformations will change
8068	     the sign of the result.  In the last four, the original
8069	     expressions give different results for (A=+0, B=-0) and
8070	     (A=-0, B=+0), but the transformed expressions do not.
8071
8072	     The first two transformations are correct if either A or B
8073	     is a NaN.  In the first transformation, the condition will
8074	     be false, and B will indeed be chosen.  In the case of the
8075	     second transformation, the condition A != B will be true,
8076	     and A will be chosen.
8077
8078	     The conversions to max() and min() are not correct if B is
8079	     a number and A is not.  The conditions in the original
8080	     expressions will be false, so all four give B.  The min()
8081	     and max() versions would give a NaN instead.  */
8082	  if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8083					      arg2, TREE_OPERAND (arg0, 0)))
8084	    {
8085	      tree comp_op0 = TREE_OPERAND (arg0, 0);
8086	      tree comp_op1 = TREE_OPERAND (arg0, 1);
8087	      tree comp_type = TREE_TYPE (comp_op0);
8088
8089	      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
8090	      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8091		{
8092		  comp_type = type;
8093		  comp_op0 = arg1;
8094		  comp_op1 = arg2;
8095		}
8096
8097	      switch (comp_code)
8098		{
8099		case EQ_EXPR:
8100		  return pedantic_non_lvalue (fold_convert (type, arg2));
8101		case NE_EXPR:
8102		  return pedantic_non_lvalue (fold_convert (type, arg1));
8103		case LE_EXPR:
8104		case LT_EXPR:
8105		  /* In C++ a ?: expression can be an lvalue, so put the
8106		     operand which will be used if they are equal first
8107		     so that we can convert this back to the
8108		     corresponding COND_EXPR.  */
8109		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8110		    return pedantic_non_lvalue (fold_convert
8111		      (type, fold (build (MIN_EXPR, comp_type,
8112					  (comp_code == LE_EXPR
8113					   ? comp_op0 : comp_op1),
8114					  (comp_code == LE_EXPR
8115					   ? comp_op1 : comp_op0)))));
8116		  break;
8117		case GE_EXPR:
8118		case GT_EXPR:
8119		  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8120		    return pedantic_non_lvalue (fold_convert
8121		      (type, fold (build (MAX_EXPR, comp_type,
8122					  (comp_code == GE_EXPR
8123					   ? comp_op0 : comp_op1),
8124					  (comp_code == GE_EXPR
8125					   ? comp_op1 : comp_op0)))));
8126		  break;
8127		default:
8128		  abort ();
8129		}
8130	    }
8131
8132	  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8133	     we might still be able to simplify this.  For example,
8134	     if C1 is one less or one more than C2, this might have started
8135	     out as a MIN or MAX and been transformed by this function.
8136	     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
8137
8138	  if (INTEGRAL_TYPE_P (type)
8139	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8140	      && TREE_CODE (arg2) == INTEGER_CST)
8141	    switch (comp_code)
8142	      {
8143	      case EQ_EXPR:
8144		/* We can replace A with C1 in this case.  */
8145		arg1 = fold_convert (type, TREE_OPERAND (arg0, 1));
8146		return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8147				    TREE_OPERAND (t, 2)));
8148
8149	      case LT_EXPR:
8150		/* If C1 is C2 + 1, this is min(A, C2).  */
8151		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8152		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8153					const_binop (PLUS_EXPR, arg2,
8154						     integer_one_node, 0), 1))
8155		  return pedantic_non_lvalue
8156		    (fold (build (MIN_EXPR, type, arg1, arg2)));
8157		break;
8158
8159	      case LE_EXPR:
8160		/* If C1 is C2 - 1, this is min(A, C2).  */
8161		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8162		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8163					const_binop (MINUS_EXPR, arg2,
8164						     integer_one_node, 0), 1))
8165		  return pedantic_non_lvalue
8166		    (fold (build (MIN_EXPR, type, arg1, arg2)));
8167		break;
8168
8169	      case GT_EXPR:
8170		/* If C1 is C2 - 1, this is max(A, C2).  */
8171		if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8172		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8173					const_binop (MINUS_EXPR, arg2,
8174						     integer_one_node, 0), 1))
8175		  return pedantic_non_lvalue
8176		    (fold (build (MAX_EXPR, type, arg1, arg2)));
8177		break;
8178
8179	      case GE_EXPR:
8180		/* If C1 is C2 + 1, this is max(A, C2).  */
8181		if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8182		    && operand_equal_p (TREE_OPERAND (arg0, 1),
8183					const_binop (PLUS_EXPR, arg2,
8184						     integer_one_node, 0), 1))
8185		  return pedantic_non_lvalue
8186		    (fold (build (MAX_EXPR, type, arg1, arg2)));
8187		break;
8188	      case NE_EXPR:
8189		break;
8190	      default:
8191		abort ();
8192	      }
8193	}
8194
8195      /* If the second operand is simpler than the third, swap them
8196	 since that produces better jump optimization results.  */
8197      if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8198				TREE_OPERAND (t, 2), false))
8199	{
8200	  /* See if this can be inverted.  If it can't, possibly because
8201	     it was a floating-point inequality comparison, don't do
8202	     anything.  */
8203	  tem = invert_truthvalue (arg0);
8204
8205	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8206	    return fold (build (code, type, tem,
8207			 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8208	}
8209
8210      /* Convert A ? 1 : 0 to simply A.  */
8211      if (integer_onep (TREE_OPERAND (t, 1))
8212	  && integer_zerop (TREE_OPERAND (t, 2))
8213	  /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8214	     call to fold will try to move the conversion inside
8215	     a COND, which will recurse.  In that case, the COND_EXPR
8216	     is probably the best choice, so leave it alone.  */
8217	  && type == TREE_TYPE (arg0))
8218	return pedantic_non_lvalue (arg0);
8219
8220      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
8221	 over COND_EXPR in cases such as floating point comparisons.  */
8222      if (integer_zerop (TREE_OPERAND (t, 1))
8223	  && integer_onep (TREE_OPERAND (t, 2))
8224	  && truth_value_p (TREE_CODE (arg0)))
8225	return pedantic_non_lvalue (fold_convert (type,
8226						  invert_truthvalue (arg0)));
8227
8228      /* Look for expressions of the form A & 2 ? 2 : 0.  The result of this
8229	 operation is simply A & 2.  */
8230
8231      if (integer_zerop (TREE_OPERAND (t, 2))
8232	  && TREE_CODE (arg0) == NE_EXPR
8233	  && integer_zerop (TREE_OPERAND (arg0, 1))
8234	  && integer_pow2p (arg1)
8235	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8236	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8237			      arg1, 1))
8238	return pedantic_non_lvalue (fold_convert (type,
8239						  TREE_OPERAND (arg0, 0)));
8240
8241      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
8242      if (integer_zerop (TREE_OPERAND (t, 2))
8243	  && truth_value_p (TREE_CODE (arg0))
8244	  && truth_value_p (TREE_CODE (arg1)))
8245	return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8246						 arg0, arg1)));
8247
8248      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
8249      if (integer_onep (TREE_OPERAND (t, 2))
8250	  && truth_value_p (TREE_CODE (arg0))
8251	  && truth_value_p (TREE_CODE (arg1)))
8252	{
8253	  /* Only perform transformation if ARG0 is easily inverted.  */
8254	  tem = invert_truthvalue (arg0);
8255	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8256	    return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8257						     tem, arg1)));
8258	}
8259
8260      return t;
8261
8262    case COMPOUND_EXPR:
8263      /* When pedantic, a compound expression can be neither an lvalue
8264	 nor an integer constant expression.  */
8265      if (TREE_SIDE_EFFECTS (arg0) || pedantic)
8266	return t;
8267      /* Don't let (0, 0) be null pointer constant.  */
8268      if (integer_zerop (arg1))
8269	return build1 (NOP_EXPR, type, arg1);
8270      return fold_convert (type, arg1);
8271
8272    case COMPLEX_EXPR:
8273      if (wins)
8274	return build_complex (type, arg0, arg1);
8275      return t;
8276
8277    case REALPART_EXPR:
8278      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8279	return t;
8280      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8281	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8282				 TREE_OPERAND (arg0, 1));
8283      else if (TREE_CODE (arg0) == COMPLEX_CST)
8284	return TREE_REALPART (arg0);
8285      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8286	return fold (build (TREE_CODE (arg0), type,
8287			    fold (build1 (REALPART_EXPR, type,
8288					  TREE_OPERAND (arg0, 0))),
8289			    fold (build1 (REALPART_EXPR,
8290					  type, TREE_OPERAND (arg0, 1)))));
8291      return t;
8292
8293    case IMAGPART_EXPR:
8294      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8295	return fold_convert (type, integer_zero_node);
8296      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8297	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8298				 TREE_OPERAND (arg0, 0));
8299      else if (TREE_CODE (arg0) == COMPLEX_CST)
8300	return TREE_IMAGPART (arg0);
8301      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8302	return fold (build (TREE_CODE (arg0), type,
8303			    fold (build1 (IMAGPART_EXPR, type,
8304					  TREE_OPERAND (arg0, 0))),
8305			    fold (build1 (IMAGPART_EXPR, type,
8306					  TREE_OPERAND (arg0, 1)))));
8307      return t;
8308
8309      /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8310         appropriate.  */
8311    case CLEANUP_POINT_EXPR:
8312      if (! has_cleanups (arg0))
8313	return TREE_OPERAND (t, 0);
8314
8315      {
8316	enum tree_code code0 = TREE_CODE (arg0);
8317	int kind0 = TREE_CODE_CLASS (code0);
8318	tree arg00 = TREE_OPERAND (arg0, 0);
8319	tree arg01;
8320
8321	if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8322	  return fold (build1 (code0, type,
8323			       fold (build1 (CLEANUP_POINT_EXPR,
8324					     TREE_TYPE (arg00), arg00))));
8325
8326	if (kind0 == '<' || kind0 == '2'
8327	    || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8328	    || code0 == TRUTH_AND_EXPR   || code0 == TRUTH_OR_EXPR
8329	    || code0 == TRUTH_XOR_EXPR)
8330	  {
8331	    arg01 = TREE_OPERAND (arg0, 1);
8332
8333	    if (TREE_CONSTANT (arg00)
8334		|| ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8335		    && ! has_cleanups (arg00)))
8336	      return fold (build (code0, type, arg00,
8337				  fold (build1 (CLEANUP_POINT_EXPR,
8338						TREE_TYPE (arg01), arg01))));
8339
8340	    if (TREE_CONSTANT (arg01))
8341	      return fold (build (code0, type,
8342				  fold (build1 (CLEANUP_POINT_EXPR,
8343						TREE_TYPE (arg00), arg00)),
8344				  arg01));
8345	  }
8346
8347	return t;
8348      }
8349
8350    case CALL_EXPR:
8351      /* Check for a built-in function.  */
8352      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8353	  && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8354	      == FUNCTION_DECL)
8355	  && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8356	{
8357	  tree tmp = fold_builtin (expr);
8358	  if (tmp)
8359	    return tmp;
8360	}
8361      return t;
8362
8363    default:
8364      return t;
8365    } /* switch (code) */
8366}
8367
8368#ifdef ENABLE_FOLD_CHECKING
8369#undef fold
8370
8371static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8372static void fold_check_failed (tree, tree);
8373void print_fold_checksum (tree);
8374
8375/* When --enable-checking=fold, compute a digest of expr before
8376   and after actual fold call to see if fold did not accidentally
8377   change original expr.  */
8378
8379tree
8380fold (tree expr)
8381{
8382  tree ret;
8383  struct md5_ctx ctx;
8384  unsigned char checksum_before[16], checksum_after[16];
8385  htab_t ht;
8386
8387  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8388  md5_init_ctx (&ctx);
8389  fold_checksum_tree (expr, &ctx, ht);
8390  md5_finish_ctx (&ctx, checksum_before);
8391  htab_empty (ht);
8392
8393  ret = fold_1 (expr);
8394
8395  md5_init_ctx (&ctx);
8396  fold_checksum_tree (expr, &ctx, ht);
8397  md5_finish_ctx (&ctx, checksum_after);
8398  htab_delete (ht);
8399
8400  if (memcmp (checksum_before, checksum_after, 16))
8401    fold_check_failed (expr, ret);
8402
8403  return ret;
8404}
8405
8406void
8407print_fold_checksum (tree expr)
8408{
8409  struct md5_ctx ctx;
8410  unsigned char checksum[16], cnt;
8411  htab_t ht;
8412
8413  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8414  md5_init_ctx (&ctx);
8415  fold_checksum_tree (expr, &ctx, ht);
8416  md5_finish_ctx (&ctx, checksum);
8417  htab_delete (ht);
8418  for (cnt = 0; cnt < 16; ++cnt)
8419    fprintf (stderr, "%02x", checksum[cnt]);
8420  putc ('\n', stderr);
8421}
8422
8423static void
8424fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8425{
8426  internal_error ("fold check: original tree changed by fold");
8427}
8428
8429static void
8430fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8431{
8432  void **slot;
8433  enum tree_code code;
8434  char buf[sizeof (struct tree_decl)];
8435  int i, len;
8436
8437  if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8438      > sizeof (struct tree_decl)
8439      || sizeof (struct tree_type) > sizeof (struct tree_decl))
8440    abort ();
8441  if (expr == NULL)
8442    return;
8443  slot = htab_find_slot (ht, expr, INSERT);
8444  if (*slot != NULL)
8445    return;
8446  *slot = expr;
8447  code = TREE_CODE (expr);
8448  if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8449    {
8450      /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified.  */
8451      memcpy (buf, expr, tree_size (expr));
8452      expr = (tree) buf;
8453      SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8454    }
8455  else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8456    {
8457      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
8458      memcpy (buf, expr, tree_size (expr));
8459      expr = (tree) buf;
8460      SET_DECL_ASSEMBLER_NAME (expr, NULL);
8461    }
8462  else if (TREE_CODE_CLASS (code) == 't'
8463	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8464    {
8465      /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified.  */
8466      memcpy (buf, expr, tree_size (expr));
8467      expr = (tree) buf;
8468      TYPE_POINTER_TO (expr) = NULL;
8469      TYPE_REFERENCE_TO (expr) = NULL;
8470    }
8471  md5_process_bytes (expr, tree_size (expr), ctx);
8472  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8473  if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8474    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8475  len = TREE_CODE_LENGTH (code);
8476  switch (TREE_CODE_CLASS (code))
8477    {
8478    case 'c':
8479      switch (code)
8480	{
8481	case STRING_CST:
8482	  md5_process_bytes (TREE_STRING_POINTER (expr),
8483			     TREE_STRING_LENGTH (expr), ctx);
8484	  break;
8485	case COMPLEX_CST:
8486	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8487	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8488	  break;
8489	case VECTOR_CST:
8490	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8491	  break;
8492	default:
8493	  break;
8494	}
8495      break;
8496    case 'x':
8497      switch (code)
8498	{
8499	case TREE_LIST:
8500	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8501	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8502	  break;
8503	case TREE_VEC:
8504	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8505	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8506	  break;
8507	default:
8508	  break;
8509	}
8510      break;
8511    case 'e':
8512      switch (code)
8513	{
8514	case SAVE_EXPR: len = 2; break;
8515	case GOTO_SUBROUTINE_EXPR: len = 0; break;
8516	case RTL_EXPR: len = 0; break;
8517	case WITH_CLEANUP_EXPR: len = 2; break;
8518	default: break;
8519	}
8520      /* Fall through.  */
8521    case 'r':
8522    case '<':
8523    case '1':
8524    case '2':
8525    case 's':
8526      for (i = 0; i < len; ++i)
8527	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8528      break;
8529    case 'd':
8530      fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8531      fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8532      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8533      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8534      fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8535      fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8536      fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8537      fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8538      fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8539      fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8540      fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8541      break;
8542    case 't':
8543      fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8544      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8545      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8546      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8547      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8548      fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8549      fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8550      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8551      fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8552      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8553      break;
8554    default:
8555      break;
8556    }
8557}
8558
8559#endif
8560
8561/* Perform constant folding and related simplification of initializer
8562   expression EXPR.  This behaves identically to "fold" but ignores
8563   potential run-time traps and exceptions that fold must preserve.  */
8564
8565tree
8566fold_initializer (tree expr)
8567{
8568  int saved_signaling_nans = flag_signaling_nans;
8569  int saved_trapping_math = flag_trapping_math;
8570  int saved_trapv = flag_trapv;
8571  tree result;
8572
8573  flag_signaling_nans = 0;
8574  flag_trapping_math = 0;
8575  flag_trapv = 0;
8576
8577  result = fold (expr);
8578
8579  flag_signaling_nans = saved_signaling_nans;
8580  flag_trapping_math = saved_trapping_math;
8581  flag_trapv = saved_trapv;
8582
8583  return result;
8584}
8585
8586/* Determine if first argument is a multiple of second argument.  Return 0 if
8587   it is not, or we cannot easily determined it to be.
8588
8589   An example of the sort of thing we care about (at this point; this routine
8590   could surely be made more general, and expanded to do what the *_DIV_EXPR's
8591   fold cases do now) is discovering that
8592
8593     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8594
8595   is a multiple of
8596
8597     SAVE_EXPR (J * 8)
8598
8599   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8600
8601   This code also handles discovering that
8602
8603     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8604
8605   is a multiple of 8 so we don't have to worry about dealing with a
8606   possible remainder.
8607
8608   Note that we *look* inside a SAVE_EXPR only to determine how it was
8609   calculated; it is not safe for fold to do much of anything else with the
8610   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8611   at run time.  For example, the latter example above *cannot* be implemented
8612   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8613   evaluation time of the original SAVE_EXPR is not necessarily the same at
8614   the time the new expression is evaluated.  The only optimization of this
8615   sort that would be valid is changing
8616
8617     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8618
8619   divided by 8 to
8620
8621     SAVE_EXPR (I) * SAVE_EXPR (J)
8622
8623   (where the same SAVE_EXPR (J) is used in the original and the
8624   transformed version).  */
8625
8626static int
8627multiple_of_p (tree type, tree top, tree bottom)
8628{
8629  if (operand_equal_p (top, bottom, 0))
8630    return 1;
8631
8632  if (TREE_CODE (type) != INTEGER_TYPE)
8633    return 0;
8634
8635  switch (TREE_CODE (top))
8636    {
8637    case MULT_EXPR:
8638      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8639	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8640
8641    case PLUS_EXPR:
8642    case MINUS_EXPR:
8643      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8644	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8645
8646    case LSHIFT_EXPR:
8647      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8648	{
8649	  tree op1, t1;
8650
8651	  op1 = TREE_OPERAND (top, 1);
8652	  /* const_binop may not detect overflow correctly,
8653	     so check for it explicitly here.  */
8654	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8655	      > TREE_INT_CST_LOW (op1)
8656	      && TREE_INT_CST_HIGH (op1) == 0
8657	      && 0 != (t1 = fold_convert (type,
8658					  const_binop (LSHIFT_EXPR,
8659						       size_one_node,
8660						       op1, 0)))
8661	      && ! TREE_OVERFLOW (t1))
8662	    return multiple_of_p (type, t1, bottom);
8663	}
8664      return 0;
8665
8666    case NOP_EXPR:
8667      /* Can't handle conversions from non-integral or wider integral type.  */
8668      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8669	  || (TYPE_PRECISION (type)
8670	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8671	return 0;
8672
8673      /* .. fall through ...  */
8674
8675    case SAVE_EXPR:
8676      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8677
8678    case INTEGER_CST:
8679      if (TREE_CODE (bottom) != INTEGER_CST
8680	  || (TREE_UNSIGNED (type)
8681	      && (tree_int_cst_sgn (top) < 0
8682		  || tree_int_cst_sgn (bottom) < 0)))
8683	return 0;
8684      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8685					 top, bottom, 0));
8686
8687    default:
8688      return 0;
8689    }
8690}
8691
8692/* Return true if `t' is known to be non-negative.  */
8693
8694int
8695tree_expr_nonnegative_p (tree t)
8696{
8697  switch (TREE_CODE (t))
8698    {
8699    case ABS_EXPR:
8700      return 1;
8701
8702    case INTEGER_CST:
8703      return tree_int_cst_sgn (t) >= 0;
8704
8705    case REAL_CST:
8706      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8707
8708    case PLUS_EXPR:
8709      if (FLOAT_TYPE_P (TREE_TYPE (t)))
8710	return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8711	       && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8712
8713      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8714	 both unsigned and at least 2 bits shorter than the result.  */
8715      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8716	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8717	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8718	{
8719	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8720	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8721	  if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8722	      && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8723	    {
8724	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
8725				       TYPE_PRECISION (inner2)) + 1;
8726	      return prec < TYPE_PRECISION (TREE_TYPE (t));
8727	    }
8728	}
8729      break;
8730
8731    case MULT_EXPR:
8732      if (FLOAT_TYPE_P (TREE_TYPE (t)))
8733	{
8734	  /* x * x for floating point x is always non-negative.  */
8735	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8736	    return 1;
8737	  return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8738		 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8739	}
8740
8741      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8742	 both unsigned and their total bits is shorter than the result.  */
8743      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8744	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8745	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8746	{
8747	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8748	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8749	  if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8750	      && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8751	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8752		   < TYPE_PRECISION (TREE_TYPE (t));
8753	}
8754      return 0;
8755
8756    case TRUNC_DIV_EXPR:
8757    case CEIL_DIV_EXPR:
8758    case FLOOR_DIV_EXPR:
8759    case ROUND_DIV_EXPR:
8760      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8761	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8762
8763    case TRUNC_MOD_EXPR:
8764    case CEIL_MOD_EXPR:
8765    case FLOOR_MOD_EXPR:
8766    case ROUND_MOD_EXPR:
8767      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8768
8769    case RDIV_EXPR:
8770      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8771	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8772
8773    case NOP_EXPR:
8774      {
8775	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8776	tree outer_type = TREE_TYPE (t);
8777
8778	if (TREE_CODE (outer_type) == REAL_TYPE)
8779	  {
8780	    if (TREE_CODE (inner_type) == REAL_TYPE)
8781	      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8782	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
8783	      {
8784		if (TREE_UNSIGNED (inner_type))
8785		  return 1;
8786		return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8787	      }
8788	  }
8789	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8790	  {
8791	    if (TREE_CODE (inner_type) == REAL_TYPE)
8792	      return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8793	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
8794	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8795		      && TREE_UNSIGNED (inner_type);
8796	  }
8797      }
8798      break;
8799
8800    case COND_EXPR:
8801      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8802	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8803    case COMPOUND_EXPR:
8804      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8805    case MIN_EXPR:
8806      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8807	&& tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8808    case MAX_EXPR:
8809      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8810	|| tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8811    case MODIFY_EXPR:
8812      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8813    case BIND_EXPR:
8814      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8815    case SAVE_EXPR:
8816      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8817    case NON_LVALUE_EXPR:
8818      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8819    case FLOAT_EXPR:
8820      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8821    case RTL_EXPR:
8822      return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8823
8824    case CALL_EXPR:
8825      {
8826	tree fndecl = get_callee_fndecl (t);
8827	tree arglist = TREE_OPERAND (t, 1);
8828	if (fndecl
8829	    && DECL_BUILT_IN (fndecl)
8830	    && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8831	  switch (DECL_FUNCTION_CODE (fndecl))
8832	    {
8833	    case BUILT_IN_CABS:
8834	    case BUILT_IN_CABSL:
8835	    case BUILT_IN_CABSF:
8836	    case BUILT_IN_EXP:
8837	    case BUILT_IN_EXPF:
8838	    case BUILT_IN_EXPL:
8839	    case BUILT_IN_EXP2:
8840	    case BUILT_IN_EXP2F:
8841	    case BUILT_IN_EXP2L:
8842	    case BUILT_IN_EXP10:
8843	    case BUILT_IN_EXP10F:
8844	    case BUILT_IN_EXP10L:
8845	    case BUILT_IN_FABS:
8846	    case BUILT_IN_FABSF:
8847	    case BUILT_IN_FABSL:
8848	    case BUILT_IN_FFS:
8849	    case BUILT_IN_FFSL:
8850	    case BUILT_IN_FFSLL:
8851	    case BUILT_IN_PARITY:
8852	    case BUILT_IN_PARITYL:
8853	    case BUILT_IN_PARITYLL:
8854	    case BUILT_IN_POPCOUNT:
8855	    case BUILT_IN_POPCOUNTL:
8856	    case BUILT_IN_POPCOUNTLL:
8857	    case BUILT_IN_POW10:
8858	    case BUILT_IN_POW10F:
8859	    case BUILT_IN_POW10L:
8860	    case BUILT_IN_SQRT:
8861	    case BUILT_IN_SQRTF:
8862	    case BUILT_IN_SQRTL:
8863	      return 1;
8864
8865	    case BUILT_IN_ATAN:
8866	    case BUILT_IN_ATANF:
8867	    case BUILT_IN_ATANL:
8868	    case BUILT_IN_CEIL:
8869	    case BUILT_IN_CEILF:
8870	    case BUILT_IN_CEILL:
8871	    case BUILT_IN_FLOOR:
8872	    case BUILT_IN_FLOORF:
8873	    case BUILT_IN_FLOORL:
8874	    case BUILT_IN_NEARBYINT:
8875	    case BUILT_IN_NEARBYINTF:
8876	    case BUILT_IN_NEARBYINTL:
8877	    case BUILT_IN_ROUND:
8878	    case BUILT_IN_ROUNDF:
8879	    case BUILT_IN_ROUNDL:
8880	    case BUILT_IN_TRUNC:
8881	    case BUILT_IN_TRUNCF:
8882	    case BUILT_IN_TRUNCL:
8883	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8884
8885	    case BUILT_IN_POW:
8886	    case BUILT_IN_POWF:
8887	    case BUILT_IN_POWL:
8888	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8889
8890	    default:
8891	      break;
8892	    }
8893      }
8894
8895      /* ... fall through ...  */
8896
8897    default:
8898      if (truth_value_p (TREE_CODE (t)))
8899	/* Truth values evaluate to 0 or 1, which is nonnegative.  */
8900	return 1;
8901    }
8902
8903  /* We don't know sign of `t', so be conservative and return false.  */
8904  return 0;
8905}
8906
8907/* Return true if `r' is known to be non-negative.
8908   Only handles constants at the moment.  */
8909
8910int
8911rtl_expr_nonnegative_p (rtx r)
8912{
8913  switch (GET_CODE (r))
8914    {
8915    case CONST_INT:
8916      return INTVAL (r) >= 0;
8917
8918    case CONST_DOUBLE:
8919      if (GET_MODE (r) == VOIDmode)
8920	return CONST_DOUBLE_HIGH (r) >= 0;
8921      return 0;
8922
8923    case CONST_VECTOR:
8924      {
8925	int units, i;
8926	rtx elt;
8927
8928	units = CONST_VECTOR_NUNITS (r);
8929
8930	for (i = 0; i < units; ++i)
8931	  {
8932	    elt = CONST_VECTOR_ELT (r, i);
8933	    if (!rtl_expr_nonnegative_p (elt))
8934	      return 0;
8935	  }
8936
8937	return 1;
8938      }
8939
8940    case SYMBOL_REF:
8941    case LABEL_REF:
8942      /* These are always nonnegative.  */
8943      return 1;
8944
8945    default:
8946      return 0;
8947    }
8948}
8949
8950#include "gt-fold-const.h"
8951