1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 2, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING.  If not, write to the Free
20Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
2102110-1301, USA.  */
22
23/*@@ This file should be rewritten to use an arbitrary precision
24  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
25  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
26  @@ The routines that translate from the ap rep should
27  @@ warn if precision et. al. is lost.
28  @@ This would also make life easier when this technology is used
29  @@ for cross-compilers.  */
30
31/* The entry points in this file are fold, size_int_wide, size_binop
32   and force_fit_type.
33
34   fold takes a tree as argument and returns a simplified tree.
35
36   size_binop takes a tree code for an arithmetic operation
37   and two operands that are trees, and produces a tree for the
38   result, assuming the type comes from `sizetype'.
39
40   size_int takes an integer value, and creates a tree constant
41   with type from `sizetype'.
42
43   force_fit_type takes a constant, an overflowable flag and prior
44   overflow indicators.  It forces the value to fit the type and sets
45   TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.  */
46
47#include "config.h"
48#include "system.h"
49#include "coretypes.h"
50#include "tm.h"
51#include "flags.h"
52#include "tree.h"
53#include "real.h"
54#include "rtl.h"
55#include "expr.h"
56#include "tm_p.h"
57#include "toplev.h"
58#include "ggc.h"
59#include "hashtab.h"
60#include "langhooks.h"
61#include "md5.h"
62
63/* The following constants represent a bit based encoding of GCC's
64   comparison operators.  This encoding simplifies transformations
65   on relational comparison operators, such as AND and OR.  */
66enum comparison_code {
67  COMPCODE_FALSE = 0,
68  COMPCODE_LT = 1,
69  COMPCODE_EQ = 2,
70  COMPCODE_LE = 3,
71  COMPCODE_GT = 4,
72  COMPCODE_LTGT = 5,
73  COMPCODE_GE = 6,
74  COMPCODE_ORD = 7,
75  COMPCODE_UNORD = 8,
76  COMPCODE_UNLT = 9,
77  COMPCODE_UNEQ = 10,
78  COMPCODE_UNLE = 11,
79  COMPCODE_UNGT = 12,
80  COMPCODE_NE = 13,
81  COMPCODE_UNGE = 14,
82  COMPCODE_TRUE = 15
83};
84
85static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
86static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
87static bool negate_mathfn_p (enum built_in_function);
88static bool negate_expr_p (tree);
89static tree negate_expr (tree);
90static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
91static tree associate_trees (tree, tree, enum tree_code, tree);
92static tree const_binop (enum tree_code, tree, tree, int);
93static enum comparison_code comparison_to_compcode (enum tree_code);
94static enum tree_code compcode_to_comparison (enum comparison_code);
95static tree combine_comparisons (enum tree_code, enum tree_code,
96				 enum tree_code, tree, tree, tree);
97static int truth_value_p (enum tree_code);
98static int operand_equal_for_comparison_p (tree, tree, tree);
99static int twoval_comparison_p (tree, tree *, tree *, int *);
100static tree eval_subst (tree, tree, tree, tree, tree);
101static tree pedantic_omit_one_operand (tree, tree, tree);
102static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
103static tree make_bit_field_ref (tree, tree, int, int, int);
104static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
105static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
106				    enum machine_mode *, int *, int *,
107				    tree *, tree *);
108static int all_ones_mask_p (tree, int);
109static tree sign_bit_p (tree, tree);
110static int simple_operand_p (tree);
111static tree range_binop (enum tree_code, tree, tree, int, tree, int);
112static tree make_range (tree, int *, tree *, tree *);
113static tree build_range_check (tree, tree, int, tree, tree);
114static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
115			 tree);
116static tree fold_range_test (enum tree_code, tree, tree, tree);
117static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
118static tree unextend (tree, int, int, tree);
119static tree fold_truthop (enum tree_code, tree, tree, tree);
120static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
121static tree extract_muldiv (tree, tree, enum tree_code, tree);
122static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
123static int multiple_of_p (tree, tree, tree);
124static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
125						 tree, tree,
126						 tree, tree, int);
127static bool fold_real_zero_addition_p (tree, tree, int);
128static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
129				 tree, tree, tree);
130static tree fold_inf_compare (enum tree_code, tree, tree, tree);
131static tree fold_div_compare (enum tree_code, tree, tree, tree);
132static bool reorder_operands_p (tree, tree);
133static tree fold_negate_const (tree, tree);
134static tree fold_not_const (tree, tree);
135static tree fold_relational_const (enum tree_code, tree, tree, tree);
136
137/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
138   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
139   and SUM1.  Then this yields nonzero if overflow occurred during the
140   addition.
141
142   Overflow occurs if A and B have the same sign, but A and SUM differ in
143   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
144   sign.  */
145#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
146
147/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
148   We do that by representing the two-word integer in 4 words, with only
149   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
150   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
151
152#define LOWPART(x) \
153  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
154#define HIGHPART(x) \
155  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
156#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
157
158/* Unpack a two-word integer into 4 words.
159   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
160   WORDS points to the array of HOST_WIDE_INTs.  */
161
162static void
163encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
164{
165  words[0] = LOWPART (low);
166  words[1] = HIGHPART (low);
167  words[2] = LOWPART (hi);
168  words[3] = HIGHPART (hi);
169}
170
171/* Pack an array of 4 words into a two-word integer.
172   WORDS points to the array of words.
173   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
174
175static void
176decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
177	HOST_WIDE_INT *hi)
178{
179  *low = words[0] + words[1] * BASE;
180  *hi = words[2] + words[3] * BASE;
181}
182
183/* T is an INT_CST node.  OVERFLOWABLE indicates if we are interested
184   in overflow of the value, when >0 we are only interested in signed
185   overflow, for <0 we are interested in any overflow.  OVERFLOWED
186   indicates whether overflow has already occurred.  CONST_OVERFLOWED
187   indicates whether constant overflow has already occurred.  We force
188   T's value to be within range of T's type (by setting to 0 or 1 all
189   the bits outside the type's range).  We set TREE_OVERFLOWED if,
190  	OVERFLOWED is nonzero,
191	or OVERFLOWABLE is >0 and signed overflow occurs
192	or OVERFLOWABLE is <0 and any overflow occurs
193   We set TREE_CONSTANT_OVERFLOWED if,
194        CONST_OVERFLOWED is nonzero
195	or we set TREE_OVERFLOWED.
196  We return either the original T, or a copy.  */
197
198tree
199force_fit_type (tree t, int overflowable,
200		bool overflowed, bool overflowed_const)
201{
202  unsigned HOST_WIDE_INT low;
203  HOST_WIDE_INT high;
204  unsigned int prec;
205  int sign_extended_type;
206
207  gcc_assert (TREE_CODE (t) == INTEGER_CST);
208
209  low = TREE_INT_CST_LOW (t);
210  high = TREE_INT_CST_HIGH (t);
211
212  if (POINTER_TYPE_P (TREE_TYPE (t))
213      || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
214    prec = POINTER_SIZE;
215  else
216    prec = TYPE_PRECISION (TREE_TYPE (t));
217  /* Size types *are* sign extended.  */
218  sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
219			|| (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
220			    && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
221
222  /* First clear all bits that are beyond the type's precision.  */
223
224  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
225    ;
226  else if (prec > HOST_BITS_PER_WIDE_INT)
227    high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
228  else
229    {
230      high = 0;
231      if (prec < HOST_BITS_PER_WIDE_INT)
232	low &= ~((HOST_WIDE_INT) (-1) << prec);
233    }
234
235  if (!sign_extended_type)
236    /* No sign extension */;
237  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
238    /* Correct width already.  */;
239  else if (prec > HOST_BITS_PER_WIDE_INT)
240    {
241      /* Sign extend top half? */
242      if (high & ((unsigned HOST_WIDE_INT)1
243		  << (prec - HOST_BITS_PER_WIDE_INT - 1)))
244	high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
245    }
246  else if (prec == HOST_BITS_PER_WIDE_INT)
247    {
248      if ((HOST_WIDE_INT)low < 0)
249	high = -1;
250    }
251  else
252    {
253      /* Sign extend bottom half? */
254      if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
255	{
256	  high = -1;
257	  low |= (HOST_WIDE_INT)(-1) << prec;
258	}
259    }
260
261  /* If the value changed, return a new node.  */
262  if (overflowed || overflowed_const
263      || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
264    {
265      t = build_int_cst_wide (TREE_TYPE (t), low, high);
266
267      if (overflowed
268	  || overflowable < 0
269	  || (overflowable > 0 && sign_extended_type))
270	{
271	  t = copy_node (t);
272	  TREE_OVERFLOW (t) = 1;
273	  TREE_CONSTANT_OVERFLOW (t) = 1;
274	}
275      else if (overflowed_const)
276	{
277	  t = copy_node (t);
278	  TREE_CONSTANT_OVERFLOW (t) = 1;
279	}
280    }
281
282  return t;
283}
284
285/* Add two doubleword integers with doubleword result.
286   Return nonzero if the operation overflows according to UNSIGNED_P.
287   Each argument is given as two `HOST_WIDE_INT' pieces.
288   One argument is L1 and H1; the other, L2 and H2.
289   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
290
291int
292add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
293		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
294		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
295		      bool unsigned_p)
296{
297  unsigned HOST_WIDE_INT l;
298  HOST_WIDE_INT h;
299
300  l = l1 + l2;
301  h = h1 + h2 + (l < l1);
302
303  *lv = l;
304  *hv = h;
305
306  if (unsigned_p)
307    return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
308  else
309    return OVERFLOW_SUM_SIGN (h1, h2, h);
310}
311
312/* Negate a doubleword integer with doubleword result.
313   Return nonzero if the operation overflows, assuming it's signed.
314   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
315   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
316
317int
318neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
319	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
320{
321  if (l1 == 0)
322    {
323      *lv = 0;
324      *hv = - h1;
325      return (*hv & h1) < 0;
326    }
327  else
328    {
329      *lv = -l1;
330      *hv = ~h1;
331      return 0;
332    }
333}
334
335/* Multiply two doubleword integers with doubleword result.
336   Return nonzero if the operation overflows according to UNSIGNED_P.
337   Each argument is given as two `HOST_WIDE_INT' pieces.
338   One argument is L1 and H1; the other, L2 and H2.
339   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
340
341int
342mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
343		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
344		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
345		      bool unsigned_p)
346{
347  HOST_WIDE_INT arg1[4];
348  HOST_WIDE_INT arg2[4];
349  HOST_WIDE_INT prod[4 * 2];
350  unsigned HOST_WIDE_INT carry;
351  int i, j, k;
352  unsigned HOST_WIDE_INT toplow, neglow;
353  HOST_WIDE_INT tophigh, neghigh;
354
355  encode (arg1, l1, h1);
356  encode (arg2, l2, h2);
357
358  memset (prod, 0, sizeof prod);
359
360  for (i = 0; i < 4; i++)
361    {
362      carry = 0;
363      for (j = 0; j < 4; j++)
364	{
365	  k = i + j;
366	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
367	  carry += arg1[i] * arg2[j];
368	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
369	  carry += prod[k];
370	  prod[k] = LOWPART (carry);
371	  carry = HIGHPART (carry);
372	}
373      prod[i + 4] = carry;
374    }
375
376  decode (prod, lv, hv);
377  decode (prod + 4, &toplow, &tophigh);
378
379  /* Unsigned overflow is immediate.  */
380  if (unsigned_p)
381    return (toplow | tophigh) != 0;
382
383  /* Check for signed overflow by calculating the signed representation of the
384     top half of the result; it should agree with the low half's sign bit.  */
385  if (h1 < 0)
386    {
387      neg_double (l2, h2, &neglow, &neghigh);
388      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
389    }
390  if (h2 < 0)
391    {
392      neg_double (l1, h1, &neglow, &neghigh);
393      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
394    }
395  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
396}
397
398/* Shift the doubleword integer in L1, H1 left by COUNT places
399   keeping only PREC bits of result.
400   Shift right if COUNT is negative.
401   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
402   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
403
404void
405lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
406	       HOST_WIDE_INT count, unsigned int prec,
407	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
408{
409  unsigned HOST_WIDE_INT signmask;
410
411  if (count < 0)
412    {
413      rshift_double (l1, h1, -count, prec, lv, hv, arith);
414      return;
415    }
416
417  if (SHIFT_COUNT_TRUNCATED)
418    count %= prec;
419
420  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
421    {
422      /* Shifting by the host word size is undefined according to the
423	 ANSI standard, so we must handle this as a special case.  */
424      *hv = 0;
425      *lv = 0;
426    }
427  else if (count >= HOST_BITS_PER_WIDE_INT)
428    {
429      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
430      *lv = 0;
431    }
432  else
433    {
434      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
435	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
436      *lv = l1 << count;
437    }
438
439  /* Sign extend all bits that are beyond the precision.  */
440
441  signmask = -((prec > HOST_BITS_PER_WIDE_INT
442		? ((unsigned HOST_WIDE_INT) *hv
443		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
444		: (*lv >> (prec - 1))) & 1);
445
446  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
447    ;
448  else if (prec >= HOST_BITS_PER_WIDE_INT)
449    {
450      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
451      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
452    }
453  else
454    {
455      *hv = signmask;
456      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
457      *lv |= signmask << prec;
458    }
459}
460
461/* Shift the doubleword integer in L1, H1 right by COUNT places
462   keeping only PREC bits of result.  COUNT must be positive.
463   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
464   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
465
466void
467rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
468	       HOST_WIDE_INT count, unsigned int prec,
469	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
470	       int arith)
471{
472  unsigned HOST_WIDE_INT signmask;
473
474  signmask = (arith
475	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
476	      : 0);
477
478  if (SHIFT_COUNT_TRUNCATED)
479    count %= prec;
480
481  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
482    {
483      /* Shifting by the host word size is undefined according to the
484	 ANSI standard, so we must handle this as a special case.  */
485      *hv = 0;
486      *lv = 0;
487    }
488  else if (count >= HOST_BITS_PER_WIDE_INT)
489    {
490      *hv = 0;
491      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
492    }
493  else
494    {
495      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
496      *lv = ((l1 >> count)
497	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
498    }
499
500  /* Zero / sign extend all bits that are beyond the precision.  */
501
502  if (count >= (HOST_WIDE_INT)prec)
503    {
504      *hv = signmask;
505      *lv = signmask;
506    }
507  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
508    ;
509  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
510    {
511      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
512      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
513    }
514  else
515    {
516      *hv = signmask;
517      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
518      *lv |= signmask << (prec - count);
519    }
520}
521
522/* Rotate the doubleword integer in L1, H1 left by COUNT places
523   keeping only PREC bits of result.
524   Rotate right if COUNT is negative.
525   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
526
527void
528lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
529		HOST_WIDE_INT count, unsigned int prec,
530		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
531{
532  unsigned HOST_WIDE_INT s1l, s2l;
533  HOST_WIDE_INT s1h, s2h;
534
535  count %= prec;
536  if (count < 0)
537    count += prec;
538
539  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
540  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
541  *lv = s1l | s2l;
542  *hv = s1h | s2h;
543}
544
545/* Rotate the doubleword integer in L1, H1 left by COUNT places
546   keeping only PREC bits of result.  COUNT must be positive.
547   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
548
549void
550rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
551		HOST_WIDE_INT count, unsigned int prec,
552		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
553{
554  unsigned HOST_WIDE_INT s1l, s2l;
555  HOST_WIDE_INT s1h, s2h;
556
557  count %= prec;
558  if (count < 0)
559    count += prec;
560
561  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
562  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
563  *lv = s1l | s2l;
564  *hv = s1h | s2h;
565}
566
567/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
568   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
569   CODE is a tree code for a kind of division, one of
570   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
571   or EXACT_DIV_EXPR
572   It controls how the quotient is rounded to an integer.
573   Return nonzero if the operation overflows.
574   UNS nonzero says do unsigned division.  */
575
576int
577div_and_round_double (enum tree_code code, int uns,
578		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
579		      HOST_WIDE_INT hnum_orig,
580		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
581		      HOST_WIDE_INT hden_orig,
582		      unsigned HOST_WIDE_INT *lquo,
583		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
584		      HOST_WIDE_INT *hrem)
585{
586  int quo_neg = 0;
587  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
588  HOST_WIDE_INT den[4], quo[4];
589  int i, j;
590  unsigned HOST_WIDE_INT work;
591  unsigned HOST_WIDE_INT carry = 0;
592  unsigned HOST_WIDE_INT lnum = lnum_orig;
593  HOST_WIDE_INT hnum = hnum_orig;
594  unsigned HOST_WIDE_INT lden = lden_orig;
595  HOST_WIDE_INT hden = hden_orig;
596  int overflow = 0;
597
598  if (hden == 0 && lden == 0)
599    overflow = 1, lden = 1;
600
601  /* Calculate quotient sign and convert operands to unsigned.  */
602  if (!uns)
603    {
604      if (hnum < 0)
605	{
606	  quo_neg = ~ quo_neg;
607	  /* (minimum integer) / (-1) is the only overflow case.  */
608	  if (neg_double (lnum, hnum, &lnum, &hnum)
609	      && ((HOST_WIDE_INT) lden & hden) == -1)
610	    overflow = 1;
611	}
612      if (hden < 0)
613	{
614	  quo_neg = ~ quo_neg;
615	  neg_double (lden, hden, &lden, &hden);
616	}
617    }
618
619  if (hnum == 0 && hden == 0)
620    {				/* single precision */
621      *hquo = *hrem = 0;
622      /* This unsigned division rounds toward zero.  */
623      *lquo = lnum / lden;
624      goto finish_up;
625    }
626
627  if (hnum == 0)
628    {				/* trivial case: dividend < divisor */
629      /* hden != 0 already checked.  */
630      *hquo = *lquo = 0;
631      *hrem = hnum;
632      *lrem = lnum;
633      goto finish_up;
634    }
635
636  memset (quo, 0, sizeof quo);
637
638  memset (num, 0, sizeof num);	/* to zero 9th element */
639  memset (den, 0, sizeof den);
640
641  encode (num, lnum, hnum);
642  encode (den, lden, hden);
643
644  /* Special code for when the divisor < BASE.  */
645  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
646    {
647      /* hnum != 0 already checked.  */
648      for (i = 4 - 1; i >= 0; i--)
649	{
650	  work = num[i] + carry * BASE;
651	  quo[i] = work / lden;
652	  carry = work % lden;
653	}
654    }
655  else
656    {
657      /* Full double precision division,
658	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
659      int num_hi_sig, den_hi_sig;
660      unsigned HOST_WIDE_INT quo_est, scale;
661
662      /* Find the highest nonzero divisor digit.  */
663      for (i = 4 - 1;; i--)
664	if (den[i] != 0)
665	  {
666	    den_hi_sig = i;
667	    break;
668	  }
669
670      /* Insure that the first digit of the divisor is at least BASE/2.
671	 This is required by the quotient digit estimation algorithm.  */
672
673      scale = BASE / (den[den_hi_sig] + 1);
674      if (scale > 1)
675	{		/* scale divisor and dividend */
676	  carry = 0;
677	  for (i = 0; i <= 4 - 1; i++)
678	    {
679	      work = (num[i] * scale) + carry;
680	      num[i] = LOWPART (work);
681	      carry = HIGHPART (work);
682	    }
683
684	  num[4] = carry;
685	  carry = 0;
686	  for (i = 0; i <= 4 - 1; i++)
687	    {
688	      work = (den[i] * scale) + carry;
689	      den[i] = LOWPART (work);
690	      carry = HIGHPART (work);
691	      if (den[i] != 0) den_hi_sig = i;
692	    }
693	}
694
695      num_hi_sig = 4;
696
697      /* Main loop */
698      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
699	{
700	  /* Guess the next quotient digit, quo_est, by dividing the first
701	     two remaining dividend digits by the high order quotient digit.
702	     quo_est is never low and is at most 2 high.  */
703	  unsigned HOST_WIDE_INT tmp;
704
705	  num_hi_sig = i + den_hi_sig + 1;
706	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
707	  if (num[num_hi_sig] != den[den_hi_sig])
708	    quo_est = work / den[den_hi_sig];
709	  else
710	    quo_est = BASE - 1;
711
712	  /* Refine quo_est so it's usually correct, and at most one high.  */
713	  tmp = work - quo_est * den[den_hi_sig];
714	  if (tmp < BASE
715	      && (den[den_hi_sig - 1] * quo_est
716		  > (tmp * BASE + num[num_hi_sig - 2])))
717	    quo_est--;
718
719	  /* Try QUO_EST as the quotient digit, by multiplying the
720	     divisor by QUO_EST and subtracting from the remaining dividend.
721	     Keep in mind that QUO_EST is the I - 1st digit.  */
722
723	  carry = 0;
724	  for (j = 0; j <= den_hi_sig; j++)
725	    {
726	      work = quo_est * den[j] + carry;
727	      carry = HIGHPART (work);
728	      work = num[i + j] - LOWPART (work);
729	      num[i + j] = LOWPART (work);
730	      carry += HIGHPART (work) != 0;
731	    }
732
733	  /* If quo_est was high by one, then num[i] went negative and
734	     we need to correct things.  */
735	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
736	    {
737	      quo_est--;
738	      carry = 0;		/* add divisor back in */
739	      for (j = 0; j <= den_hi_sig; j++)
740		{
741		  work = num[i + j] + den[j] + carry;
742		  carry = HIGHPART (work);
743		  num[i + j] = LOWPART (work);
744		}
745
746	      num [num_hi_sig] += carry;
747	    }
748
749	  /* Store the quotient digit.  */
750	  quo[i] = quo_est;
751	}
752    }
753
754  decode (quo, lquo, hquo);
755
756 finish_up:
757  /* If result is negative, make it so.  */
758  if (quo_neg)
759    neg_double (*lquo, *hquo, lquo, hquo);
760
761  /* Compute trial remainder:  rem = num - (quo * den)  */
762  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
763  neg_double (*lrem, *hrem, lrem, hrem);
764  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
765
766  switch (code)
767    {
768    case TRUNC_DIV_EXPR:
769    case TRUNC_MOD_EXPR:	/* round toward zero */
770    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
771      return overflow;
772
773    case FLOOR_DIV_EXPR:
774    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
775      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
776	{
777	  /* quo = quo - 1;  */
778	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
779		      lquo, hquo);
780	}
781      else
782	return overflow;
783      break;
784
785    case CEIL_DIV_EXPR:
786    case CEIL_MOD_EXPR:		/* round toward positive infinity */
787      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
788	{
789	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790		      lquo, hquo);
791	}
792      else
793	return overflow;
794      break;
795
796    case ROUND_DIV_EXPR:
797    case ROUND_MOD_EXPR:	/* round to closest integer */
798      {
799	unsigned HOST_WIDE_INT labs_rem = *lrem;
800	HOST_WIDE_INT habs_rem = *hrem;
801	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
802	HOST_WIDE_INT habs_den = hden, htwice;
803
804	/* Get absolute values.  */
805	if (*hrem < 0)
806	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
807	if (hden < 0)
808	  neg_double (lden, hden, &labs_den, &habs_den);
809
810	/* If (2 * abs (lrem) >= abs (lden)) */
811	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
812		    labs_rem, habs_rem, &ltwice, &htwice);
813
814	if (((unsigned HOST_WIDE_INT) habs_den
815	     < (unsigned HOST_WIDE_INT) htwice)
816	    || (((unsigned HOST_WIDE_INT) habs_den
817		 == (unsigned HOST_WIDE_INT) htwice)
818		&& (labs_den < ltwice)))
819	  {
820	    if (*hquo < 0)
821	      /* quo = quo - 1;  */
822	      add_double (*lquo, *hquo,
823			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
824	    else
825	      /* quo = quo + 1; */
826	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
827			  lquo, hquo);
828	  }
829	else
830	  return overflow;
831      }
832      break;
833
834    default:
835      gcc_unreachable ();
836    }
837
838  /* Compute true remainder:  rem = num - (quo * den)  */
839  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
840  neg_double (*lrem, *hrem, lrem, hrem);
841  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
842  return overflow;
843}
844
845/* If ARG2 divides ARG1 with zero remainder, carries out the division
846   of type CODE and returns the quotient.
847   Otherwise returns NULL_TREE.  */
848
849static tree
850div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
851{
852  unsigned HOST_WIDE_INT int1l, int2l;
853  HOST_WIDE_INT int1h, int2h;
854  unsigned HOST_WIDE_INT quol, reml;
855  HOST_WIDE_INT quoh, remh;
856  tree type = TREE_TYPE (arg1);
857  int uns = TYPE_UNSIGNED (type);
858
859  int1l = TREE_INT_CST_LOW (arg1);
860  int1h = TREE_INT_CST_HIGH (arg1);
861  int2l = TREE_INT_CST_LOW (arg2);
862  int2h = TREE_INT_CST_HIGH (arg2);
863
864  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
865		  	&quol, &quoh, &reml, &remh);
866  if (remh != 0 || reml != 0)
867    return NULL_TREE;
868
869  return build_int_cst_wide (type, quol, quoh);
870}
871
872/* Return true if the built-in mathematical function specified by CODE
873   is odd, i.e. -f(x) == f(-x).  */
874
875static bool
876negate_mathfn_p (enum built_in_function code)
877{
878  switch (code)
879    {
880    case BUILT_IN_ASIN:
881    case BUILT_IN_ASINF:
882    case BUILT_IN_ASINL:
883    case BUILT_IN_ATAN:
884    case BUILT_IN_ATANF:
885    case BUILT_IN_ATANL:
886    case BUILT_IN_SIN:
887    case BUILT_IN_SINF:
888    case BUILT_IN_SINL:
889    case BUILT_IN_TAN:
890    case BUILT_IN_TANF:
891    case BUILT_IN_TANL:
892      return true;
893
894    default:
895      break;
896    }
897  return false;
898}
899
900/* Check whether we may negate an integer constant T without causing
901   overflow.  */
902
903bool
904may_negate_without_overflow_p (tree t)
905{
906  unsigned HOST_WIDE_INT val;
907  unsigned int prec;
908  tree type;
909
910  gcc_assert (TREE_CODE (t) == INTEGER_CST);
911
912  type = TREE_TYPE (t);
913  if (TYPE_UNSIGNED (type))
914    return false;
915
916  prec = TYPE_PRECISION (type);
917  if (prec > HOST_BITS_PER_WIDE_INT)
918    {
919      if (TREE_INT_CST_LOW (t) != 0)
920	return true;
921      prec -= HOST_BITS_PER_WIDE_INT;
922      val = TREE_INT_CST_HIGH (t);
923    }
924  else
925    val = TREE_INT_CST_LOW (t);
926  if (prec < HOST_BITS_PER_WIDE_INT)
927    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
928  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
929}
930
931/* Determine whether an expression T can be cheaply negated using
932   the function negate_expr.  */
933
934static bool
935negate_expr_p (tree t)
936{
937  tree type;
938
939  if (t == 0)
940    return false;
941
942  type = TREE_TYPE (t);
943
944  STRIP_SIGN_NOPS (t);
945  switch (TREE_CODE (t))
946    {
947    case INTEGER_CST:
948      if (TYPE_UNSIGNED (type) || ! flag_trapv)
949	return true;
950
951      /* Check that -CST will not overflow type.  */
952      return may_negate_without_overflow_p (t);
953
954    case REAL_CST:
955    case NEGATE_EXPR:
956      return true;
957
958    case COMPLEX_CST:
959      return negate_expr_p (TREE_REALPART (t))
960	     && negate_expr_p (TREE_IMAGPART (t));
961
962    case PLUS_EXPR:
963      if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
964	return false;
965      /* -(A + B) -> (-B) - A.  */
966      if (negate_expr_p (TREE_OPERAND (t, 1))
967	  && reorder_operands_p (TREE_OPERAND (t, 0),
968				 TREE_OPERAND (t, 1)))
969	return true;
970      /* -(A + B) -> (-A) - B.  */
971      return negate_expr_p (TREE_OPERAND (t, 0));
972
973    case MINUS_EXPR:
974      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
975      return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
976	     && reorder_operands_p (TREE_OPERAND (t, 0),
977				    TREE_OPERAND (t, 1));
978
979    case MULT_EXPR:
980      if (TYPE_UNSIGNED (TREE_TYPE (t)))
981        break;
982
983      /* Fall through.  */
984
985    case RDIV_EXPR:
986      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
987	return negate_expr_p (TREE_OPERAND (t, 1))
988	       || negate_expr_p (TREE_OPERAND (t, 0));
989      break;
990
991    case NOP_EXPR:
992      /* Negate -((double)float) as (double)(-float).  */
993      if (TREE_CODE (type) == REAL_TYPE)
994	{
995	  tree tem = strip_float_extensions (t);
996	  if (tem != t)
997	    return negate_expr_p (tem);
998	}
999      break;
1000
1001    case CALL_EXPR:
1002      /* Negate -f(x) as f(-x).  */
1003      if (negate_mathfn_p (builtin_mathfn_code (t)))
1004	return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1005      break;
1006
1007    case RSHIFT_EXPR:
1008      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1009      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1010	{
1011	  tree op1 = TREE_OPERAND (t, 1);
1012	  if (TREE_INT_CST_HIGH (op1) == 0
1013	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1014		 == TREE_INT_CST_LOW (op1))
1015	    return true;
1016	}
1017      break;
1018
1019    default:
1020      break;
1021    }
1022  return false;
1023}
1024
1025/* Given T, an expression, return the negation of T.  Allow for T to be
1026   null, in which case return null.  */
1027
1028static tree
1029negate_expr (tree t)
1030{
1031  tree type;
1032  tree tem;
1033
1034  if (t == 0)
1035    return 0;
1036
1037  type = TREE_TYPE (t);
1038  STRIP_SIGN_NOPS (t);
1039
1040  switch (TREE_CODE (t))
1041    {
1042    case INTEGER_CST:
1043      tem = fold_negate_const (t, type);
1044      if (! TREE_OVERFLOW (tem)
1045	  || TYPE_UNSIGNED (type)
1046	  || ! flag_trapv)
1047	return tem;
1048      return build1 (NEGATE_EXPR, type, t);
1049
1050    case REAL_CST:
1051      tem = fold_negate_const (t, type);
1052      /* Two's complement FP formats, such as c4x, may overflow.  */
1053      if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1054	return fold_convert (type, tem);
1055      return build1 (NEGATE_EXPR, type, t);
1056
1057    case COMPLEX_CST:
1058      {
1059	tree rpart = negate_expr (TREE_REALPART (t));
1060	tree ipart = negate_expr (TREE_IMAGPART (t));
1061
1062	if ((TREE_CODE (rpart) == REAL_CST
1063	     && TREE_CODE (ipart) == REAL_CST)
1064	    || (TREE_CODE (rpart) == INTEGER_CST
1065		&& TREE_CODE (ipart) == INTEGER_CST))
1066	  return build_complex (type, rpart, ipart);
1067      }
1068      break;
1069
1070    case NEGATE_EXPR:
1071      return fold_convert (type, TREE_OPERAND (t, 0));
1072
1073    case PLUS_EXPR:
1074      if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1075	{
1076	  /* -(A + B) -> (-B) - A.  */
1077	  if (negate_expr_p (TREE_OPERAND (t, 1))
1078	      && reorder_operands_p (TREE_OPERAND (t, 0),
1079				     TREE_OPERAND (t, 1)))
1080	    {
1081	      tem = negate_expr (TREE_OPERAND (t, 1));
1082	      tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1083				 tem, TREE_OPERAND (t, 0));
1084	      return fold_convert (type, tem);
1085	    }
1086
1087	  /* -(A + B) -> (-A) - B.  */
1088	  if (negate_expr_p (TREE_OPERAND (t, 0)))
1089	    {
1090	      tem = negate_expr (TREE_OPERAND (t, 0));
1091	      tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1092				 tem, TREE_OPERAND (t, 1));
1093	      return fold_convert (type, tem);
1094	    }
1095	}
1096      break;
1097
1098    case MINUS_EXPR:
1099      /* - (A - B) -> B - A  */
1100      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1101	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1102	return fold_convert (type,
1103			     fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1104					  TREE_OPERAND (t, 1),
1105					  TREE_OPERAND (t, 0)));
1106      break;
1107
1108    case MULT_EXPR:
1109      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1110        break;
1111
1112      /* Fall through.  */
1113
1114    case RDIV_EXPR:
1115      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1116	{
1117	  tem = TREE_OPERAND (t, 1);
1118	  if (negate_expr_p (tem))
1119	    return fold_convert (type,
1120				 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1121					      TREE_OPERAND (t, 0),
1122					      negate_expr (tem)));
1123	  tem = TREE_OPERAND (t, 0);
1124	  if (negate_expr_p (tem))
1125	    return fold_convert (type,
1126				 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1127					      negate_expr (tem),
1128					      TREE_OPERAND (t, 1)));
1129	}
1130      break;
1131
1132    case NOP_EXPR:
1133      /* Convert -((double)float) into (double)(-float).  */
1134      if (TREE_CODE (type) == REAL_TYPE)
1135	{
1136	  tem = strip_float_extensions (t);
1137	  if (tem != t && negate_expr_p (tem))
1138	    return fold_convert (type, negate_expr (tem));
1139	}
1140      break;
1141
1142    case CALL_EXPR:
1143      /* Negate -f(x) as f(-x).  */
1144      if (negate_mathfn_p (builtin_mathfn_code (t))
1145	  && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1146	{
1147	  tree fndecl, arg, arglist;
1148
1149	  fndecl = get_callee_fndecl (t);
1150	  arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1151	  arglist = build_tree_list (NULL_TREE, arg);
1152	  return build_function_call_expr (fndecl, arglist);
1153	}
1154      break;
1155
1156    case RSHIFT_EXPR:
1157      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1158      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1159	{
1160	  tree op1 = TREE_OPERAND (t, 1);
1161	  if (TREE_INT_CST_HIGH (op1) == 0
1162	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1163		 == TREE_INT_CST_LOW (op1))
1164	    {
1165	      tree ntype = TYPE_UNSIGNED (type)
1166			   ? lang_hooks.types.signed_type (type)
1167			   : lang_hooks.types.unsigned_type (type);
1168	      tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1169	      temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1170	      return fold_convert (type, temp);
1171	    }
1172	}
1173      break;
1174
1175    default:
1176      break;
1177    }
1178
1179  tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1180  return fold_convert (type, tem);
1181}
1182
1183/* Split a tree IN into a constant, literal and variable parts that could be
1184   combined with CODE to make IN.  "constant" means an expression with
1185   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1186   commutative arithmetic operation.  Store the constant part into *CONP,
1187   the literal in *LITP and return the variable part.  If a part isn't
1188   present, set it to null.  If the tree does not decompose in this way,
1189   return the entire tree as the variable part and the other parts as null.
1190
1191   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1192   case, we negate an operand that was subtracted.  Except if it is a
1193   literal for which we use *MINUS_LITP instead.
1194
1195   If NEGATE_P is true, we are negating all of IN, again except a literal
1196   for which we use *MINUS_LITP instead.
1197
1198   If IN is itself a literal or constant, return it as appropriate.
1199
1200   Note that we do not guarantee that any of the three values will be the
1201   same type as IN, but they will have the same signedness and mode.  */
1202
1203static tree
1204split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1205	    tree *minus_litp, int negate_p)
1206{
1207  tree var = 0;
1208
1209  *conp = 0;
1210  *litp = 0;
1211  *minus_litp = 0;
1212
1213  /* Strip any conversions that don't change the machine mode or signedness.  */
1214  STRIP_SIGN_NOPS (in);
1215
1216  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1217    *litp = in;
1218  else if (TREE_CODE (in) == code
1219	   || (! FLOAT_TYPE_P (TREE_TYPE (in))
1220	       /* We can associate addition and subtraction together (even
1221		  though the C standard doesn't say so) for integers because
1222		  the value is not affected.  For reals, the value might be
1223		  affected, so we can't.  */
1224	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1225		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1226    {
1227      tree op0 = TREE_OPERAND (in, 0);
1228      tree op1 = TREE_OPERAND (in, 1);
1229      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1230      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1231
1232      /* First see if either of the operands is a literal, then a constant.  */
1233      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1234	*litp = op0, op0 = 0;
1235      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1236	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1237
1238      if (op0 != 0 && TREE_CONSTANT (op0))
1239	*conp = op0, op0 = 0;
1240      else if (op1 != 0 && TREE_CONSTANT (op1))
1241	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1242
1243      /* If we haven't dealt with either operand, this is not a case we can
1244	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1245      if (op0 != 0 && op1 != 0)
1246	var = in;
1247      else if (op0 != 0)
1248	var = op0;
1249      else
1250	var = op1, neg_var_p = neg1_p;
1251
1252      /* Now do any needed negations.  */
1253      if (neg_litp_p)
1254	*minus_litp = *litp, *litp = 0;
1255      if (neg_conp_p)
1256	*conp = negate_expr (*conp);
1257      if (neg_var_p)
1258	var = negate_expr (var);
1259    }
1260  else if (TREE_CONSTANT (in))
1261    *conp = in;
1262  else
1263    var = in;
1264
1265  if (negate_p)
1266    {
1267      if (*litp)
1268	*minus_litp = *litp, *litp = 0;
1269      else if (*minus_litp)
1270	*litp = *minus_litp, *minus_litp = 0;
1271      *conp = negate_expr (*conp);
1272      var = negate_expr (var);
1273    }
1274
1275  return var;
1276}
1277
1278/* Re-associate trees split by the above function.  T1 and T2 are either
1279   expressions to associate or null.  Return the new expression, if any.  If
1280   we build an operation, do it in TYPE and with CODE.  */
1281
1282static tree
1283associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1284{
1285  if (t1 == 0)
1286    return t2;
1287  else if (t2 == 0)
1288    return t1;
1289
1290  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1291     try to fold this since we will have infinite recursion.  But do
1292     deal with any NEGATE_EXPRs.  */
1293  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1294      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1295    {
1296      if (code == PLUS_EXPR)
1297	{
1298	  if (TREE_CODE (t1) == NEGATE_EXPR)
1299	    return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1300			   fold_convert (type, TREE_OPERAND (t1, 0)));
1301	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1302	    return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1303			   fold_convert (type, TREE_OPERAND (t2, 0)));
1304	  else if (integer_zerop (t2))
1305	    return fold_convert (type, t1);
1306	}
1307      else if (code == MINUS_EXPR)
1308	{
1309	  if (integer_zerop (t2))
1310	    return fold_convert (type, t1);
1311	}
1312
1313      return build2 (code, type, fold_convert (type, t1),
1314		     fold_convert (type, t2));
1315    }
1316
1317  return fold_build2 (code, type, fold_convert (type, t1),
1318		      fold_convert (type, t2));
1319}
1320
1321/* Combine two integer constants ARG1 and ARG2 under operation CODE
1322   to produce a new constant.
1323
1324   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1325
1326tree
1327int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1328{
1329  unsigned HOST_WIDE_INT int1l, int2l;
1330  HOST_WIDE_INT int1h, int2h;
1331  unsigned HOST_WIDE_INT low;
1332  HOST_WIDE_INT hi;
1333  unsigned HOST_WIDE_INT garbagel;
1334  HOST_WIDE_INT garbageh;
1335  tree t;
1336  tree type = TREE_TYPE (arg1);
1337  int uns = TYPE_UNSIGNED (type);
1338  int is_sizetype
1339    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1340  int overflow = 0;
1341
1342  int1l = TREE_INT_CST_LOW (arg1);
1343  int1h = TREE_INT_CST_HIGH (arg1);
1344  int2l = TREE_INT_CST_LOW (arg2);
1345  int2h = TREE_INT_CST_HIGH (arg2);
1346
1347  switch (code)
1348    {
1349    case BIT_IOR_EXPR:
1350      low = int1l | int2l, hi = int1h | int2h;
1351      break;
1352
1353    case BIT_XOR_EXPR:
1354      low = int1l ^ int2l, hi = int1h ^ int2h;
1355      break;
1356
1357    case BIT_AND_EXPR:
1358      low = int1l & int2l, hi = int1h & int2h;
1359      break;
1360
1361    case RSHIFT_EXPR:
1362      int2l = -int2l;
1363    case LSHIFT_EXPR:
1364      /* It's unclear from the C standard whether shifts can overflow.
1365	 The following code ignores overflow; perhaps a C standard
1366	 interpretation ruling is needed.  */
1367      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1368		     &low, &hi, !uns);
1369      break;
1370
1371    case RROTATE_EXPR:
1372      int2l = - int2l;
1373    case LROTATE_EXPR:
1374      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1375		      &low, &hi);
1376      break;
1377
1378    case PLUS_EXPR:
1379      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1380      break;
1381
1382    case MINUS_EXPR:
1383      neg_double (int2l, int2h, &low, &hi);
1384      add_double (int1l, int1h, low, hi, &low, &hi);
1385      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1386      break;
1387
1388    case MULT_EXPR:
1389      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1390      break;
1391
1392    case TRUNC_DIV_EXPR:
1393    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1394    case EXACT_DIV_EXPR:
1395      /* This is a shortcut for a common special case.  */
1396      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1397	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1398	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1399	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1400	{
1401	  if (code == CEIL_DIV_EXPR)
1402	    int1l += int2l - 1;
1403
1404	  low = int1l / int2l, hi = 0;
1405	  break;
1406	}
1407
1408      /* ... fall through ...  */
1409
1410    case ROUND_DIV_EXPR:
1411      if (int2h == 0 && int2l == 1)
1412	{
1413	  low = int1l, hi = int1h;
1414	  break;
1415	}
1416      if (int1l == int2l && int1h == int2h
1417	  && ! (int1l == 0 && int1h == 0))
1418	{
1419	  low = 1, hi = 0;
1420	  break;
1421	}
1422      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1423				       &low, &hi, &garbagel, &garbageh);
1424      break;
1425
1426    case TRUNC_MOD_EXPR:
1427    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1428      /* This is a shortcut for a common special case.  */
1429      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1430	  && ! TREE_CONSTANT_OVERFLOW (arg1)
1431	  && ! TREE_CONSTANT_OVERFLOW (arg2)
1432	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1433	{
1434	  if (code == CEIL_MOD_EXPR)
1435	    int1l += int2l - 1;
1436	  low = int1l % int2l, hi = 0;
1437	  break;
1438	}
1439
1440      /* ... fall through ...  */
1441
1442    case ROUND_MOD_EXPR:
1443      overflow = div_and_round_double (code, uns,
1444				       int1l, int1h, int2l, int2h,
1445				       &garbagel, &garbageh, &low, &hi);
1446      break;
1447
1448    case MIN_EXPR:
1449    case MAX_EXPR:
1450      if (uns)
1451	low = (((unsigned HOST_WIDE_INT) int1h
1452		< (unsigned HOST_WIDE_INT) int2h)
1453	       || (((unsigned HOST_WIDE_INT) int1h
1454		    == (unsigned HOST_WIDE_INT) int2h)
1455		   && int1l < int2l));
1456      else
1457	low = (int1h < int2h
1458	       || (int1h == int2h && int1l < int2l));
1459
1460      if (low == (code == MIN_EXPR))
1461	low = int1l, hi = int1h;
1462      else
1463	low = int2l, hi = int2h;
1464      break;
1465
1466    default:
1467      gcc_unreachable ();
1468    }
1469
1470  t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1471
1472  if (notrunc)
1473    {
1474      /* Propagate overflow flags ourselves.  */
1475      if (((!uns || is_sizetype) && overflow)
1476	  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1477	{
1478	  t = copy_node (t);
1479	  TREE_OVERFLOW (t) = 1;
1480	  TREE_CONSTANT_OVERFLOW (t) = 1;
1481	}
1482      else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1483	{
1484	  t = copy_node (t);
1485	  TREE_CONSTANT_OVERFLOW (t) = 1;
1486	}
1487    }
1488  else
1489    t = force_fit_type (t, 1,
1490			((!uns || is_sizetype) && overflow)
1491			| TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1492			TREE_CONSTANT_OVERFLOW (arg1)
1493			| TREE_CONSTANT_OVERFLOW (arg2));
1494
1495  return t;
1496}
1497
1498/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1499   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1500   are the same kind of constant and the same machine mode.  Return zero if
1501   combining the constants is not allowed in the current operating mode.
1502
1503   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1504
1505static tree
1506const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1507{
1508  /* Sanity check for the recursive cases.  */
1509  if (!arg1 || !arg2)
1510    return NULL_TREE;
1511
1512  STRIP_NOPS (arg1);
1513  STRIP_NOPS (arg2);
1514
1515  if (TREE_CODE (arg1) == INTEGER_CST)
1516    return int_const_binop (code, arg1, arg2, notrunc);
1517
1518  if (TREE_CODE (arg1) == REAL_CST)
1519    {
1520      enum machine_mode mode;
1521      REAL_VALUE_TYPE d1;
1522      REAL_VALUE_TYPE d2;
1523      REAL_VALUE_TYPE value;
1524      REAL_VALUE_TYPE result;
1525      bool inexact;
1526      tree t, type;
1527
1528      d1 = TREE_REAL_CST (arg1);
1529      d2 = TREE_REAL_CST (arg2);
1530
1531      type = TREE_TYPE (arg1);
1532      mode = TYPE_MODE (type);
1533
1534      /* Don't perform operation if we honor signaling NaNs and
1535	 either operand is a NaN.  */
1536      if (HONOR_SNANS (mode)
1537	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1538	return NULL_TREE;
1539
1540      /* Don't perform operation if it would raise a division
1541	 by zero exception.  */
1542      if (code == RDIV_EXPR
1543	  && REAL_VALUES_EQUAL (d2, dconst0)
1544	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1545	return NULL_TREE;
1546
1547      /* If either operand is a NaN, just return it.  Otherwise, set up
1548	 for floating-point trap; we return an overflow.  */
1549      if (REAL_VALUE_ISNAN (d1))
1550	return arg1;
1551      else if (REAL_VALUE_ISNAN (d2))
1552	return arg2;
1553
1554      inexact = real_arithmetic (&value, code, &d1, &d2);
1555      real_convert (&result, mode, &value);
1556
1557      /* Don't constant fold this floating point operation if
1558	 the result has overflowed and flag_trapping_math.  */
1559      if (flag_trapping_math
1560	  && MODE_HAS_INFINITIES (mode)
1561	  && REAL_VALUE_ISINF (result)
1562	  && !REAL_VALUE_ISINF (d1)
1563	  && !REAL_VALUE_ISINF (d2))
1564	return NULL_TREE;
1565
1566      /* Don't constant fold this floating point operation if the
1567	 result may dependent upon the run-time rounding mode and
1568	 flag_rounding_math is set, or if GCC's software emulation
1569	 is unable to accurately represent the result.  */
1570      if ((flag_rounding_math
1571	   || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1572	       && !flag_unsafe_math_optimizations))
1573	  && (inexact || !real_identical (&result, &value)))
1574	return NULL_TREE;
1575
1576      t = build_real (type, result);
1577
1578      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1579      TREE_CONSTANT_OVERFLOW (t)
1580	= TREE_OVERFLOW (t)
1581	  | TREE_CONSTANT_OVERFLOW (arg1)
1582	  | TREE_CONSTANT_OVERFLOW (arg2);
1583      return t;
1584    }
1585
1586  if (TREE_CODE (arg1) == COMPLEX_CST)
1587    {
1588      tree type = TREE_TYPE (arg1);
1589      tree r1 = TREE_REALPART (arg1);
1590      tree i1 = TREE_IMAGPART (arg1);
1591      tree r2 = TREE_REALPART (arg2);
1592      tree i2 = TREE_IMAGPART (arg2);
1593      tree real, imag;
1594
1595      switch (code)
1596	{
1597	case PLUS_EXPR:
1598	case MINUS_EXPR:
1599	  real = const_binop (code, r1, r2, notrunc);
1600	  imag = const_binop (code, i1, i2, notrunc);
1601	  break;
1602
1603	case MULT_EXPR:
1604	  real = const_binop (MINUS_EXPR,
1605			      const_binop (MULT_EXPR, r1, r2, notrunc),
1606			      const_binop (MULT_EXPR, i1, i2, notrunc),
1607			      notrunc);
1608	  imag = const_binop (PLUS_EXPR,
1609			      const_binop (MULT_EXPR, r1, i2, notrunc),
1610			      const_binop (MULT_EXPR, i1, r2, notrunc),
1611			      notrunc);
1612	  break;
1613
1614	case RDIV_EXPR:
1615	  {
1616	    tree magsquared
1617	      = const_binop (PLUS_EXPR,
1618			     const_binop (MULT_EXPR, r2, r2, notrunc),
1619			     const_binop (MULT_EXPR, i2, i2, notrunc),
1620			     notrunc);
1621	    tree t1
1622	      = const_binop (PLUS_EXPR,
1623			     const_binop (MULT_EXPR, r1, r2, notrunc),
1624			     const_binop (MULT_EXPR, i1, i2, notrunc),
1625			     notrunc);
1626	    tree t2
1627	      = const_binop (MINUS_EXPR,
1628			     const_binop (MULT_EXPR, i1, r2, notrunc),
1629			     const_binop (MULT_EXPR, r1, i2, notrunc),
1630			     notrunc);
1631
1632	    if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1633	      code = TRUNC_DIV_EXPR;
1634
1635	    real = const_binop (code, t1, magsquared, notrunc);
1636	    imag = const_binop (code, t2, magsquared, notrunc);
1637	  }
1638	  break;
1639
1640	default:
1641	  gcc_unreachable ();
1642	}
1643
1644      if (real && imag)
1645	return build_complex (type, real, imag);
1646    }
1647
1648  return NULL_TREE;
1649}
1650
1651/* Create a size type INT_CST node with NUMBER sign extended.  KIND
1652   indicates which particular sizetype to create.  */
1653
1654tree
1655size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1656{
1657  return build_int_cst (sizetype_tab[(int) kind], number);
1658}
1659
1660/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
1661   is a tree code.  The type of the result is taken from the operands.
1662   Both must be the same type integer type and it must be a size type.
1663   If the operands are constant, so is the result.  */
1664
1665tree
1666size_binop (enum tree_code code, tree arg0, tree arg1)
1667{
1668  tree type = TREE_TYPE (arg0);
1669
1670  if (arg0 == error_mark_node || arg1 == error_mark_node)
1671    return error_mark_node;
1672
1673  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1674	      && type == TREE_TYPE (arg1));
1675
1676  /* Handle the special case of two integer constants faster.  */
1677  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1678    {
1679      /* And some specific cases even faster than that.  */
1680      if (code == PLUS_EXPR && integer_zerop (arg0))
1681	return arg1;
1682      else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1683	       && integer_zerop (arg1))
1684	return arg0;
1685      else if (code == MULT_EXPR && integer_onep (arg0))
1686	return arg1;
1687
1688      /* Handle general case of two integer constants.  */
1689      return int_const_binop (code, arg0, arg1, 0);
1690    }
1691
1692  return fold_build2 (code, type, arg0, arg1);
1693}
1694
1695/* Given two values, either both of sizetype or both of bitsizetype,
1696   compute the difference between the two values.  Return the value
1697   in signed type corresponding to the type of the operands.  */
1698
1699tree
1700size_diffop (tree arg0, tree arg1)
1701{
1702  tree type = TREE_TYPE (arg0);
1703  tree ctype;
1704
1705  gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1706	      && type == TREE_TYPE (arg1));
1707
1708  /* If the type is already signed, just do the simple thing.  */
1709  if (!TYPE_UNSIGNED (type))
1710    return size_binop (MINUS_EXPR, arg0, arg1);
1711
1712  ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1713
1714  /* If either operand is not a constant, do the conversions to the signed
1715     type and subtract.  The hardware will do the right thing with any
1716     overflow in the subtraction.  */
1717  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1718    return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1719		       fold_convert (ctype, arg1));
1720
1721  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1722     Otherwise, subtract the other way, convert to CTYPE (we know that can't
1723     overflow) and negate (which can't either).  Special-case a result
1724     of zero while we're here.  */
1725  if (tree_int_cst_equal (arg0, arg1))
1726    return fold_convert (ctype, integer_zero_node);
1727  else if (tree_int_cst_lt (arg1, arg0))
1728    return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1729  else
1730    return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1731		       fold_convert (ctype, size_binop (MINUS_EXPR,
1732							arg1, arg0)));
1733}
1734
1735/* A subroutine of fold_convert_const handling conversions of an
1736   INTEGER_CST to another integer type.  */
1737
1738static tree
1739fold_convert_const_int_from_int (tree type, tree arg1)
1740{
1741  tree t;
1742
1743  /* Given an integer constant, make new constant with new type,
1744     appropriately sign-extended or truncated.  */
1745  t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1746			  TREE_INT_CST_HIGH (arg1));
1747
1748  t = force_fit_type (t,
1749		      /* Don't set the overflow when
1750		      	 converting a pointer  */
1751		      !POINTER_TYPE_P (TREE_TYPE (arg1)),
1752		      (TREE_INT_CST_HIGH (arg1) < 0
1753		       && (TYPE_UNSIGNED (type)
1754			   < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1755		      | TREE_OVERFLOW (arg1),
1756		      TREE_CONSTANT_OVERFLOW (arg1));
1757
1758  return t;
1759}
1760
1761/* A subroutine of fold_convert_const handling conversions a REAL_CST
1762   to an integer type.  */
1763
1764static tree
1765fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1766{
1767  int overflow = 0;
1768  tree t;
1769
1770  /* The following code implements the floating point to integer
1771     conversion rules required by the Java Language Specification,
1772     that IEEE NaNs are mapped to zero and values that overflow
1773     the target precision saturate, i.e. values greater than
1774     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1775     are mapped to INT_MIN.  These semantics are allowed by the
1776     C and C++ standards that simply state that the behavior of
1777     FP-to-integer conversion is unspecified upon overflow.  */
1778
1779  HOST_WIDE_INT high, low;
1780  REAL_VALUE_TYPE r;
1781  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1782
1783  switch (code)
1784    {
1785    case FIX_TRUNC_EXPR:
1786      real_trunc (&r, VOIDmode, &x);
1787      break;
1788
1789    case FIX_CEIL_EXPR:
1790      real_ceil (&r, VOIDmode, &x);
1791      break;
1792
1793    case FIX_FLOOR_EXPR:
1794      real_floor (&r, VOIDmode, &x);
1795      break;
1796
1797    case FIX_ROUND_EXPR:
1798      real_round (&r, VOIDmode, &x);
1799      break;
1800
1801    default:
1802      gcc_unreachable ();
1803    }
1804
1805  /* If R is NaN, return zero and show we have an overflow.  */
1806  if (REAL_VALUE_ISNAN (r))
1807    {
1808      overflow = 1;
1809      high = 0;
1810      low = 0;
1811    }
1812
1813  /* See if R is less than the lower bound or greater than the
1814     upper bound.  */
1815
1816  if (! overflow)
1817    {
1818      tree lt = TYPE_MIN_VALUE (type);
1819      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1820      if (REAL_VALUES_LESS (r, l))
1821	{
1822	  overflow = 1;
1823	  high = TREE_INT_CST_HIGH (lt);
1824	  low = TREE_INT_CST_LOW (lt);
1825	}
1826    }
1827
1828  if (! overflow)
1829    {
1830      tree ut = TYPE_MAX_VALUE (type);
1831      if (ut)
1832	{
1833	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1834	  if (REAL_VALUES_LESS (u, r))
1835	    {
1836	      overflow = 1;
1837	      high = TREE_INT_CST_HIGH (ut);
1838	      low = TREE_INT_CST_LOW (ut);
1839	    }
1840	}
1841    }
1842
1843  if (! overflow)
1844    REAL_VALUE_TO_INT (&low, &high, r);
1845
1846  t = build_int_cst_wide (type, low, high);
1847
1848  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1849		      TREE_CONSTANT_OVERFLOW (arg1));
1850  return t;
1851}
1852
1853/* A subroutine of fold_convert_const handling conversions a REAL_CST
1854   to another floating point type.  */
1855
1856static tree
1857fold_convert_const_real_from_real (tree type, tree arg1)
1858{
1859  REAL_VALUE_TYPE value;
1860  tree t;
1861
1862  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1863  t = build_real (type, value);
1864
1865  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1866  TREE_CONSTANT_OVERFLOW (t)
1867    = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1868  return t;
1869}
1870
1871/* Attempt to fold type conversion operation CODE of expression ARG1 to
1872   type TYPE.  If no simplification can be done return NULL_TREE.  */
1873
1874static tree
1875fold_convert_const (enum tree_code code, tree type, tree arg1)
1876{
1877  if (TREE_TYPE (arg1) == type)
1878    return arg1;
1879
1880  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1881    {
1882      if (TREE_CODE (arg1) == INTEGER_CST)
1883	return fold_convert_const_int_from_int (type, arg1);
1884      else if (TREE_CODE (arg1) == REAL_CST)
1885	return fold_convert_const_int_from_real (code, type, arg1);
1886    }
1887  else if (TREE_CODE (type) == REAL_TYPE)
1888    {
1889      if (TREE_CODE (arg1) == INTEGER_CST)
1890	return build_real_from_int_cst (type, arg1);
1891      if (TREE_CODE (arg1) == REAL_CST)
1892	return fold_convert_const_real_from_real (type, arg1);
1893    }
1894  return NULL_TREE;
1895}
1896
1897/* Construct a vector of zero elements of vector type TYPE.  */
1898
1899static tree
1900build_zero_vector (tree type)
1901{
1902  tree elem, list;
1903  int i, units;
1904
1905  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1906  units = TYPE_VECTOR_SUBPARTS (type);
1907
1908  list = NULL_TREE;
1909  for (i = 0; i < units; i++)
1910    list = tree_cons (NULL_TREE, elem, list);
1911  return build_vector (type, list);
1912}
1913
1914/* Convert expression ARG to type TYPE.  Used by the middle-end for
1915   simple conversions in preference to calling the front-end's convert.  */
1916
1917tree
1918fold_convert (tree type, tree arg)
1919{
1920  tree orig = TREE_TYPE (arg);
1921  tree tem;
1922
1923  if (type == orig)
1924    return arg;
1925
1926  if (TREE_CODE (arg) == ERROR_MARK
1927      || TREE_CODE (type) == ERROR_MARK
1928      || TREE_CODE (orig) == ERROR_MARK)
1929    return error_mark_node;
1930
1931  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1932      || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1933					TYPE_MAIN_VARIANT (orig)))
1934    return fold_build1 (NOP_EXPR, type, arg);
1935
1936  switch (TREE_CODE (type))
1937    {
1938    case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1939    case POINTER_TYPE: case REFERENCE_TYPE:
1940    case OFFSET_TYPE:
1941      if (TREE_CODE (arg) == INTEGER_CST)
1942	{
1943	  tem = fold_convert_const (NOP_EXPR, type, arg);
1944	  if (tem != NULL_TREE)
1945	    return tem;
1946	}
1947      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1948	  || TREE_CODE (orig) == OFFSET_TYPE)
1949        return fold_build1 (NOP_EXPR, type, arg);
1950      if (TREE_CODE (orig) == COMPLEX_TYPE)
1951	{
1952	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1953	  return fold_convert (type, tem);
1954	}
1955      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1956		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1957      return fold_build1 (NOP_EXPR, type, arg);
1958
1959    case REAL_TYPE:
1960      if (TREE_CODE (arg) == INTEGER_CST)
1961	{
1962	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
1963	  if (tem != NULL_TREE)
1964	    return tem;
1965	}
1966      else if (TREE_CODE (arg) == REAL_CST)
1967	{
1968	  tem = fold_convert_const (NOP_EXPR, type, arg);
1969	  if (tem != NULL_TREE)
1970	    return tem;
1971	}
1972
1973      switch (TREE_CODE (orig))
1974	{
1975	case INTEGER_TYPE: case CHAR_TYPE:
1976	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1977	case POINTER_TYPE: case REFERENCE_TYPE:
1978	  return fold_build1 (FLOAT_EXPR, type, arg);
1979
1980	case REAL_TYPE:
1981	  return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1982			      type, arg);
1983
1984	case COMPLEX_TYPE:
1985	  tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1986	  return fold_convert (type, tem);
1987
1988	default:
1989	  gcc_unreachable ();
1990	}
1991
1992    case COMPLEX_TYPE:
1993      switch (TREE_CODE (orig))
1994	{
1995	case INTEGER_TYPE: case CHAR_TYPE:
1996	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1997	case POINTER_TYPE: case REFERENCE_TYPE:
1998	case REAL_TYPE:
1999	  return build2 (COMPLEX_EXPR, type,
2000			 fold_convert (TREE_TYPE (type), arg),
2001			 fold_convert (TREE_TYPE (type), integer_zero_node));
2002	case COMPLEX_TYPE:
2003	  {
2004	    tree rpart, ipart;
2005
2006	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2007	      {
2008		rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2009		ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2010		return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2011	      }
2012
2013	    arg = save_expr (arg);
2014	    rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2015	    ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2016	    rpart = fold_convert (TREE_TYPE (type), rpart);
2017	    ipart = fold_convert (TREE_TYPE (type), ipart);
2018	    return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2019	  }
2020
2021	default:
2022	  gcc_unreachable ();
2023	}
2024
2025    case VECTOR_TYPE:
2026      if (integer_zerop (arg))
2027	return build_zero_vector (type);
2028      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2029      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2030		  || TREE_CODE (orig) == VECTOR_TYPE);
2031      return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2032
2033    case VOID_TYPE:
2034      return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2035
2036    default:
2037      gcc_unreachable ();
2038    }
2039}
2040
2041/* Return false if expr can be assumed not to be an lvalue, true
2042   otherwise.  */
2043
2044static bool
2045maybe_lvalue_p (tree x)
2046{
2047  /* We only need to wrap lvalue tree codes.  */
2048  switch (TREE_CODE (x))
2049  {
2050  case VAR_DECL:
2051  case PARM_DECL:
2052  case RESULT_DECL:
2053  case LABEL_DECL:
2054  case FUNCTION_DECL:
2055  case SSA_NAME:
2056
2057  case COMPONENT_REF:
2058  case INDIRECT_REF:
2059  case ALIGN_INDIRECT_REF:
2060  case MISALIGNED_INDIRECT_REF:
2061  case ARRAY_REF:
2062  case ARRAY_RANGE_REF:
2063  case BIT_FIELD_REF:
2064  case OBJ_TYPE_REF:
2065
2066  case REALPART_EXPR:
2067  case IMAGPART_EXPR:
2068  case PREINCREMENT_EXPR:
2069  case PREDECREMENT_EXPR:
2070  case SAVE_EXPR:
2071  case TRY_CATCH_EXPR:
2072  case WITH_CLEANUP_EXPR:
2073  case COMPOUND_EXPR:
2074  case MODIFY_EXPR:
2075  case TARGET_EXPR:
2076  case COND_EXPR:
2077  case BIND_EXPR:
2078  case MIN_EXPR:
2079  case MAX_EXPR:
2080    break;
2081
2082  default:
2083    /* Assume the worst for front-end tree codes.  */
2084    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2085      break;
2086    return false;
2087  }
2088
2089  return true;
2090}
2091
2092/* Return an expr equal to X but certainly not valid as an lvalue.  */
2093
2094tree
2095non_lvalue (tree x)
2096{
2097  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2098     us.  */
2099  if (in_gimple_form)
2100    return x;
2101
2102  if (! maybe_lvalue_p (x))
2103    return x;
2104  return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2105}
2106
2107/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2108   Zero means allow extended lvalues.  */
2109
2110int pedantic_lvalues;
2111
2112/* When pedantic, return an expr equal to X but certainly not valid as a
2113   pedantic lvalue.  Otherwise, return X.  */
2114
2115static tree
2116pedantic_non_lvalue (tree x)
2117{
2118  if (pedantic_lvalues)
2119    return non_lvalue (x);
2120  else
2121    return x;
2122}
2123
2124/* Given a tree comparison code, return the code that is the logical inverse
2125   of the given code.  It is not safe to do this for floating-point
2126   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2127   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2128
2129enum tree_code
2130invert_tree_comparison (enum tree_code code, bool honor_nans)
2131{
2132  if (honor_nans && flag_trapping_math)
2133    return ERROR_MARK;
2134
2135  switch (code)
2136    {
2137    case EQ_EXPR:
2138      return NE_EXPR;
2139    case NE_EXPR:
2140      return EQ_EXPR;
2141    case GT_EXPR:
2142      return honor_nans ? UNLE_EXPR : LE_EXPR;
2143    case GE_EXPR:
2144      return honor_nans ? UNLT_EXPR : LT_EXPR;
2145    case LT_EXPR:
2146      return honor_nans ? UNGE_EXPR : GE_EXPR;
2147    case LE_EXPR:
2148      return honor_nans ? UNGT_EXPR : GT_EXPR;
2149    case LTGT_EXPR:
2150      return UNEQ_EXPR;
2151    case UNEQ_EXPR:
2152      return LTGT_EXPR;
2153    case UNGT_EXPR:
2154      return LE_EXPR;
2155    case UNGE_EXPR:
2156      return LT_EXPR;
2157    case UNLT_EXPR:
2158      return GE_EXPR;
2159    case UNLE_EXPR:
2160      return GT_EXPR;
2161    case ORDERED_EXPR:
2162      return UNORDERED_EXPR;
2163    case UNORDERED_EXPR:
2164      return ORDERED_EXPR;
2165    default:
2166      gcc_unreachable ();
2167    }
2168}
2169
2170/* Similar, but return the comparison that results if the operands are
2171   swapped.  This is safe for floating-point.  */
2172
2173enum tree_code
2174swap_tree_comparison (enum tree_code code)
2175{
2176  switch (code)
2177    {
2178    case EQ_EXPR:
2179    case NE_EXPR:
2180    case ORDERED_EXPR:
2181    case UNORDERED_EXPR:
2182    case LTGT_EXPR:
2183    case UNEQ_EXPR:
2184      return code;
2185    case GT_EXPR:
2186      return LT_EXPR;
2187    case GE_EXPR:
2188      return LE_EXPR;
2189    case LT_EXPR:
2190      return GT_EXPR;
2191    case LE_EXPR:
2192      return GE_EXPR;
2193    case UNGT_EXPR:
2194      return UNLT_EXPR;
2195    case UNGE_EXPR:
2196      return UNLE_EXPR;
2197    case UNLT_EXPR:
2198      return UNGT_EXPR;
2199    case UNLE_EXPR:
2200      return UNGE_EXPR;
2201    default:
2202      gcc_unreachable ();
2203    }
2204}
2205
2206
2207/* Convert a comparison tree code from an enum tree_code representation
2208   into a compcode bit-based encoding.  This function is the inverse of
2209   compcode_to_comparison.  */
2210
2211static enum comparison_code
2212comparison_to_compcode (enum tree_code code)
2213{
2214  switch (code)
2215    {
2216    case LT_EXPR:
2217      return COMPCODE_LT;
2218    case EQ_EXPR:
2219      return COMPCODE_EQ;
2220    case LE_EXPR:
2221      return COMPCODE_LE;
2222    case GT_EXPR:
2223      return COMPCODE_GT;
2224    case NE_EXPR:
2225      return COMPCODE_NE;
2226    case GE_EXPR:
2227      return COMPCODE_GE;
2228    case ORDERED_EXPR:
2229      return COMPCODE_ORD;
2230    case UNORDERED_EXPR:
2231      return COMPCODE_UNORD;
2232    case UNLT_EXPR:
2233      return COMPCODE_UNLT;
2234    case UNEQ_EXPR:
2235      return COMPCODE_UNEQ;
2236    case UNLE_EXPR:
2237      return COMPCODE_UNLE;
2238    case UNGT_EXPR:
2239      return COMPCODE_UNGT;
2240    case LTGT_EXPR:
2241      return COMPCODE_LTGT;
2242    case UNGE_EXPR:
2243      return COMPCODE_UNGE;
2244    default:
2245      gcc_unreachable ();
2246    }
2247}
2248
2249/* Convert a compcode bit-based encoding of a comparison operator back
2250   to GCC's enum tree_code representation.  This function is the
2251   inverse of comparison_to_compcode.  */
2252
2253static enum tree_code
2254compcode_to_comparison (enum comparison_code code)
2255{
2256  switch (code)
2257    {
2258    case COMPCODE_LT:
2259      return LT_EXPR;
2260    case COMPCODE_EQ:
2261      return EQ_EXPR;
2262    case COMPCODE_LE:
2263      return LE_EXPR;
2264    case COMPCODE_GT:
2265      return GT_EXPR;
2266    case COMPCODE_NE:
2267      return NE_EXPR;
2268    case COMPCODE_GE:
2269      return GE_EXPR;
2270    case COMPCODE_ORD:
2271      return ORDERED_EXPR;
2272    case COMPCODE_UNORD:
2273      return UNORDERED_EXPR;
2274    case COMPCODE_UNLT:
2275      return UNLT_EXPR;
2276    case COMPCODE_UNEQ:
2277      return UNEQ_EXPR;
2278    case COMPCODE_UNLE:
2279      return UNLE_EXPR;
2280    case COMPCODE_UNGT:
2281      return UNGT_EXPR;
2282    case COMPCODE_LTGT:
2283      return LTGT_EXPR;
2284    case COMPCODE_UNGE:
2285      return UNGE_EXPR;
2286    default:
2287      gcc_unreachable ();
2288    }
2289}
2290
2291/* Return a tree for the comparison which is the combination of
2292   doing the AND or OR (depending on CODE) of the two operations LCODE
2293   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
2294   the possibility of trapping if the mode has NaNs, and return NULL_TREE
2295   if this makes the transformation invalid.  */
2296
2297tree
2298combine_comparisons (enum tree_code code, enum tree_code lcode,
2299		     enum tree_code rcode, tree truth_type,
2300		     tree ll_arg, tree lr_arg)
2301{
2302  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2303  enum comparison_code lcompcode = comparison_to_compcode (lcode);
2304  enum comparison_code rcompcode = comparison_to_compcode (rcode);
2305  enum comparison_code compcode;
2306
2307  switch (code)
2308    {
2309    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2310      compcode = lcompcode & rcompcode;
2311      break;
2312
2313    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2314      compcode = lcompcode | rcompcode;
2315      break;
2316
2317    default:
2318      return NULL_TREE;
2319    }
2320
2321  if (!honor_nans)
2322    {
2323      /* Eliminate unordered comparisons, as well as LTGT and ORD
2324	 which are not used unless the mode has NaNs.  */
2325      compcode &= ~COMPCODE_UNORD;
2326      if (compcode == COMPCODE_LTGT)
2327	compcode = COMPCODE_NE;
2328      else if (compcode == COMPCODE_ORD)
2329	compcode = COMPCODE_TRUE;
2330    }
2331   else if (flag_trapping_math)
2332     {
2333	/* Check that the original operation and the optimized ones will trap
2334	   under the same condition.  */
2335	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2336		     && (lcompcode != COMPCODE_EQ)
2337		     && (lcompcode != COMPCODE_ORD);
2338	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2339		     && (rcompcode != COMPCODE_EQ)
2340		     && (rcompcode != COMPCODE_ORD);
2341	bool trap = (compcode & COMPCODE_UNORD) == 0
2342		    && (compcode != COMPCODE_EQ)
2343		    && (compcode != COMPCODE_ORD);
2344
2345        /* In a short-circuited boolean expression the LHS might be
2346	   such that the RHS, if evaluated, will never trap.  For
2347	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
2348	   if neither x nor y is NaN.  (This is a mixed blessing: for
2349	   example, the expression above will never trap, hence
2350	   optimizing it to x < y would be invalid).  */
2351        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2352            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2353          rtrap = false;
2354
2355        /* If the comparison was short-circuited, and only the RHS
2356	   trapped, we may now generate a spurious trap.  */
2357	if (rtrap && !ltrap
2358	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2359	  return NULL_TREE;
2360
2361	/* If we changed the conditions that cause a trap, we lose.  */
2362	if ((ltrap || rtrap) != trap)
2363	  return NULL_TREE;
2364      }
2365
2366  if (compcode == COMPCODE_TRUE)
2367    return constant_boolean_node (true, truth_type);
2368  else if (compcode == COMPCODE_FALSE)
2369    return constant_boolean_node (false, truth_type);
2370  else
2371    return fold_build2 (compcode_to_comparison (compcode),
2372			truth_type, ll_arg, lr_arg);
2373}
2374
2375/* Return nonzero if CODE is a tree code that represents a truth value.  */
2376
2377static int
2378truth_value_p (enum tree_code code)
2379{
2380  return (TREE_CODE_CLASS (code) == tcc_comparison
2381	  || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2382	  || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2383	  || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2384}
2385
2386/* Return nonzero if two operands (typically of the same tree node)
2387   are necessarily equal.  If either argument has side-effects this
2388   function returns zero.  FLAGS modifies behavior as follows:
2389
2390   If OEP_ONLY_CONST is set, only return nonzero for constants.
2391   This function tests whether the operands are indistinguishable;
2392   it does not test whether they are equal using C's == operation.
2393   The distinction is important for IEEE floating point, because
2394   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2395   (2) two NaNs may be indistinguishable, but NaN!=NaN.
2396
2397   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2398   even though it may hold multiple values during a function.
2399   This is because a GCC tree node guarantees that nothing else is
2400   executed between the evaluation of its "operands" (which may often
2401   be evaluated in arbitrary order).  Hence if the operands themselves
2402   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2403   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
2404   unset means assuming isochronic (or instantaneous) tree equivalence.
2405   Unless comparing arbitrary expression trees, such as from different
2406   statements, this flag can usually be left unset.
2407
2408   If OEP_PURE_SAME is set, then pure functions with identical arguments
2409   are considered the same.  It is used when the caller has other ways
2410   to ensure that global memory is unchanged in between.  */
2411
2412int
2413operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2414{
2415  /* If either is ERROR_MARK, they aren't equal.  */
2416  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2417    return 0;
2418
2419  /* If both types don't have the same signedness, then we can't consider
2420     them equal.  We must check this before the STRIP_NOPS calls
2421     because they may change the signedness of the arguments.  */
2422  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2423    return 0;
2424
2425  /* If both types don't have the same precision, then it is not safe
2426     to strip NOPs.  */
2427  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2428    return 0;
2429
2430  STRIP_NOPS (arg0);
2431  STRIP_NOPS (arg1);
2432
2433  if (TREE_CODE (arg0) != TREE_CODE (arg1)
2434      /* This is needed for conversions and for COMPONENT_REF.
2435	 Might as well play it safe and always test this.  */
2436      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2437      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2438      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2439    return 0;
2440
2441  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2442     We don't care about side effects in that case because the SAVE_EXPR
2443     takes care of that for us. In all other cases, two expressions are
2444     equal if they have no side effects.  If we have two identical
2445     expressions with side effects that should be treated the same due
2446     to the only side effects being identical SAVE_EXPR's, that will
2447     be detected in the recursive calls below.  */
2448  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2449      && (TREE_CODE (arg0) == SAVE_EXPR
2450	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2451    return 1;
2452
2453  /* Next handle constant cases, those for which we can return 1 even
2454     if ONLY_CONST is set.  */
2455  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2456    switch (TREE_CODE (arg0))
2457      {
2458      case INTEGER_CST:
2459	return (! TREE_CONSTANT_OVERFLOW (arg0)
2460		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2461		&& tree_int_cst_equal (arg0, arg1));
2462
2463      case REAL_CST:
2464	return (! TREE_CONSTANT_OVERFLOW (arg0)
2465		&& ! TREE_CONSTANT_OVERFLOW (arg1)
2466		&& REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2467					  TREE_REAL_CST (arg1)));
2468
2469      case VECTOR_CST:
2470	{
2471	  tree v1, v2;
2472
2473	  if (TREE_CONSTANT_OVERFLOW (arg0)
2474	      || TREE_CONSTANT_OVERFLOW (arg1))
2475	    return 0;
2476
2477	  v1 = TREE_VECTOR_CST_ELTS (arg0);
2478	  v2 = TREE_VECTOR_CST_ELTS (arg1);
2479	  while (v1 && v2)
2480	    {
2481	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2482				    flags))
2483		return 0;
2484	      v1 = TREE_CHAIN (v1);
2485	      v2 = TREE_CHAIN (v2);
2486	    }
2487
2488	  return v1 == v2;
2489	}
2490
2491      case COMPLEX_CST:
2492	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2493				 flags)
2494		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2495				    flags));
2496
2497      case STRING_CST:
2498	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2499		&& ! memcmp (TREE_STRING_POINTER (arg0),
2500			      TREE_STRING_POINTER (arg1),
2501			      TREE_STRING_LENGTH (arg0)));
2502
2503      case ADDR_EXPR:
2504	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2505				0);
2506      default:
2507	break;
2508      }
2509
2510  if (flags & OEP_ONLY_CONST)
2511    return 0;
2512
2513/* Define macros to test an operand from arg0 and arg1 for equality and a
2514   variant that allows null and views null as being different from any
2515   non-null value.  In the latter case, if either is null, the both
2516   must be; otherwise, do the normal comparison.  */
2517#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
2518				    TREE_OPERAND (arg1, N), flags)
2519
2520#define OP_SAME_WITH_NULL(N)				\
2521  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
2522   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2523
2524  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2525    {
2526    case tcc_unary:
2527      /* Two conversions are equal only if signedness and modes match.  */
2528      switch (TREE_CODE (arg0))
2529        {
2530        case NOP_EXPR:
2531        case CONVERT_EXPR:
2532        case FIX_CEIL_EXPR:
2533        case FIX_TRUNC_EXPR:
2534        case FIX_FLOOR_EXPR:
2535        case FIX_ROUND_EXPR:
2536	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2537	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2538	    return 0;
2539	  break;
2540	default:
2541	  break;
2542	}
2543
2544      return OP_SAME (0);
2545
2546
2547    case tcc_comparison:
2548    case tcc_binary:
2549      if (OP_SAME (0) && OP_SAME (1))
2550	return 1;
2551
2552      /* For commutative ops, allow the other order.  */
2553      return (commutative_tree_code (TREE_CODE (arg0))
2554	      && operand_equal_p (TREE_OPERAND (arg0, 0),
2555				  TREE_OPERAND (arg1, 1), flags)
2556	      && operand_equal_p (TREE_OPERAND (arg0, 1),
2557				  TREE_OPERAND (arg1, 0), flags));
2558
2559    case tcc_reference:
2560      /* If either of the pointer (or reference) expressions we are
2561	 dereferencing contain a side effect, these cannot be equal.  */
2562      if (TREE_SIDE_EFFECTS (arg0)
2563	  || TREE_SIDE_EFFECTS (arg1))
2564	return 0;
2565
2566      switch (TREE_CODE (arg0))
2567	{
2568	case INDIRECT_REF:
2569	case ALIGN_INDIRECT_REF:
2570	case MISALIGNED_INDIRECT_REF:
2571	case REALPART_EXPR:
2572	case IMAGPART_EXPR:
2573	  return OP_SAME (0);
2574
2575	case ARRAY_REF:
2576	case ARRAY_RANGE_REF:
2577	  /* Operands 2 and 3 may be null.  */
2578	  return (OP_SAME (0)
2579		  && OP_SAME (1)
2580		  && OP_SAME_WITH_NULL (2)
2581		  && OP_SAME_WITH_NULL (3));
2582
2583	case COMPONENT_REF:
2584	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
2585	     may be NULL when we're called to compare MEM_EXPRs.  */
2586	  return OP_SAME_WITH_NULL (0)
2587		 && OP_SAME (1)
2588		 && OP_SAME_WITH_NULL (2);
2589
2590	case BIT_FIELD_REF:
2591	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2592
2593	default:
2594	  return 0;
2595	}
2596
2597    case tcc_expression:
2598      switch (TREE_CODE (arg0))
2599	{
2600	case ADDR_EXPR:
2601	case TRUTH_NOT_EXPR:
2602	  return OP_SAME (0);
2603
2604	case TRUTH_ANDIF_EXPR:
2605	case TRUTH_ORIF_EXPR:
2606	  return OP_SAME (0) && OP_SAME (1);
2607
2608	case TRUTH_AND_EXPR:
2609	case TRUTH_OR_EXPR:
2610	case TRUTH_XOR_EXPR:
2611	  if (OP_SAME (0) && OP_SAME (1))
2612	    return 1;
2613
2614	  /* Otherwise take into account this is a commutative operation.  */
2615	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
2616				   TREE_OPERAND (arg1, 1), flags)
2617		  && operand_equal_p (TREE_OPERAND (arg0, 1),
2618				      TREE_OPERAND (arg1, 0), flags));
2619
2620	case CALL_EXPR:
2621	  /* If the CALL_EXPRs call different functions, then they
2622	     clearly can not be equal.  */
2623	  if (!OP_SAME (0))
2624	    return 0;
2625
2626	  {
2627	    unsigned int cef = call_expr_flags (arg0);
2628	    if (flags & OEP_PURE_SAME)
2629	      cef &= ECF_CONST | ECF_PURE;
2630	    else
2631	      cef &= ECF_CONST;
2632	    if (!cef)
2633	      return 0;
2634	  }
2635
2636	  /* Now see if all the arguments are the same.  operand_equal_p
2637	     does not handle TREE_LIST, so we walk the operands here
2638	     feeding them to operand_equal_p.  */
2639	  arg0 = TREE_OPERAND (arg0, 1);
2640	  arg1 = TREE_OPERAND (arg1, 1);
2641	  while (arg0 && arg1)
2642	    {
2643	      if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2644				     flags))
2645		return 0;
2646
2647	      arg0 = TREE_CHAIN (arg0);
2648	      arg1 = TREE_CHAIN (arg1);
2649	    }
2650
2651	  /* If we get here and both argument lists are exhausted
2652	     then the CALL_EXPRs are equal.  */
2653	  return ! (arg0 || arg1);
2654
2655	default:
2656	  return 0;
2657	}
2658
2659    case tcc_declaration:
2660      /* Consider __builtin_sqrt equal to sqrt.  */
2661      return (TREE_CODE (arg0) == FUNCTION_DECL
2662	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2663	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2664	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2665
2666    default:
2667      return 0;
2668    }
2669
2670#undef OP_SAME
2671#undef OP_SAME_WITH_NULL
2672}
2673
2674/* Similar to operand_equal_p, but see if ARG0 might have been made by
2675   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2676
2677   When in doubt, return 0.  */
2678
2679static int
2680operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2681{
2682  int unsignedp1, unsignedpo;
2683  tree primarg0, primarg1, primother;
2684  unsigned int correct_width;
2685
2686  if (operand_equal_p (arg0, arg1, 0))
2687    return 1;
2688
2689  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2690      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2691    return 0;
2692
2693  /* Discard any conversions that don't change the modes of ARG0 and ARG1
2694     and see if the inner values are the same.  This removes any
2695     signedness comparison, which doesn't matter here.  */
2696  primarg0 = arg0, primarg1 = arg1;
2697  STRIP_NOPS (primarg0);
2698  STRIP_NOPS (primarg1);
2699  if (operand_equal_p (primarg0, primarg1, 0))
2700    return 1;
2701
2702  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2703     actual comparison operand, ARG0.
2704
2705     First throw away any conversions to wider types
2706     already present in the operands.  */
2707
2708  primarg1 = get_narrower (arg1, &unsignedp1);
2709  primother = get_narrower (other, &unsignedpo);
2710
2711  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2712  if (unsignedp1 == unsignedpo
2713      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2714      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2715    {
2716      tree type = TREE_TYPE (arg0);
2717
2718      /* Make sure shorter operand is extended the right way
2719	 to match the longer operand.  */
2720      primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2721			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2722
2723      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2724	return 1;
2725    }
2726
2727  return 0;
2728}
2729
2730/* See if ARG is an expression that is either a comparison or is performing
2731   arithmetic on comparisons.  The comparisons must only be comparing
2732   two different values, which will be stored in *CVAL1 and *CVAL2; if
2733   they are nonzero it means that some operands have already been found.
2734   No variables may be used anywhere else in the expression except in the
2735   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
2736   the expression and save_expr needs to be called with CVAL1 and CVAL2.
2737
2738   If this is true, return 1.  Otherwise, return zero.  */
2739
2740static int
2741twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2742{
2743  enum tree_code code = TREE_CODE (arg);
2744  enum tree_code_class class = TREE_CODE_CLASS (code);
2745
2746  /* We can handle some of the tcc_expression cases here.  */
2747  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2748    class = tcc_unary;
2749  else if (class == tcc_expression
2750	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2751	       || code == COMPOUND_EXPR))
2752    class = tcc_binary;
2753
2754  else if (class == tcc_expression && code == SAVE_EXPR
2755	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2756    {
2757      /* If we've already found a CVAL1 or CVAL2, this expression is
2758	 two complex to handle.  */
2759      if (*cval1 || *cval2)
2760	return 0;
2761
2762      class = tcc_unary;
2763      *save_p = 1;
2764    }
2765
2766  switch (class)
2767    {
2768    case tcc_unary:
2769      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2770
2771    case tcc_binary:
2772      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2773	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
2774				      cval1, cval2, save_p));
2775
2776    case tcc_constant:
2777      return 1;
2778
2779    case tcc_expression:
2780      if (code == COND_EXPR)
2781	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2782				     cval1, cval2, save_p)
2783		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
2784					cval1, cval2, save_p)
2785		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
2786					cval1, cval2, save_p));
2787      return 0;
2788
2789    case tcc_comparison:
2790      /* First see if we can handle the first operand, then the second.  For
2791	 the second operand, we know *CVAL1 can't be zero.  It must be that
2792	 one side of the comparison is each of the values; test for the
2793	 case where this isn't true by failing if the two operands
2794	 are the same.  */
2795
2796      if (operand_equal_p (TREE_OPERAND (arg, 0),
2797			   TREE_OPERAND (arg, 1), 0))
2798	return 0;
2799
2800      if (*cval1 == 0)
2801	*cval1 = TREE_OPERAND (arg, 0);
2802      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2803	;
2804      else if (*cval2 == 0)
2805	*cval2 = TREE_OPERAND (arg, 0);
2806      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2807	;
2808      else
2809	return 0;
2810
2811      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2812	;
2813      else if (*cval2 == 0)
2814	*cval2 = TREE_OPERAND (arg, 1);
2815      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2816	;
2817      else
2818	return 0;
2819
2820      return 1;
2821
2822    default:
2823      return 0;
2824    }
2825}
2826
2827/* ARG is a tree that is known to contain just arithmetic operations and
2828   comparisons.  Evaluate the operations in the tree substituting NEW0 for
2829   any occurrence of OLD0 as an operand of a comparison and likewise for
2830   NEW1 and OLD1.  */
2831
2832static tree
2833eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2834{
2835  tree type = TREE_TYPE (arg);
2836  enum tree_code code = TREE_CODE (arg);
2837  enum tree_code_class class = TREE_CODE_CLASS (code);
2838
2839  /* We can handle some of the tcc_expression cases here.  */
2840  if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2841    class = tcc_unary;
2842  else if (class == tcc_expression
2843	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2844    class = tcc_binary;
2845
2846  switch (class)
2847    {
2848    case tcc_unary:
2849      return fold_build1 (code, type,
2850			  eval_subst (TREE_OPERAND (arg, 0),
2851				      old0, new0, old1, new1));
2852
2853    case tcc_binary:
2854      return fold_build2 (code, type,
2855			  eval_subst (TREE_OPERAND (arg, 0),
2856				      old0, new0, old1, new1),
2857			  eval_subst (TREE_OPERAND (arg, 1),
2858				      old0, new0, old1, new1));
2859
2860    case tcc_expression:
2861      switch (code)
2862	{
2863	case SAVE_EXPR:
2864	  return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2865
2866	case COMPOUND_EXPR:
2867	  return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2868
2869	case COND_EXPR:
2870	  return fold_build3 (code, type,
2871			      eval_subst (TREE_OPERAND (arg, 0),
2872					  old0, new0, old1, new1),
2873			      eval_subst (TREE_OPERAND (arg, 1),
2874					  old0, new0, old1, new1),
2875			      eval_subst (TREE_OPERAND (arg, 2),
2876					  old0, new0, old1, new1));
2877	default:
2878	  break;
2879	}
2880      /* Fall through - ???  */
2881
2882    case tcc_comparison:
2883      {
2884	tree arg0 = TREE_OPERAND (arg, 0);
2885	tree arg1 = TREE_OPERAND (arg, 1);
2886
2887	/* We need to check both for exact equality and tree equality.  The
2888	   former will be true if the operand has a side-effect.  In that
2889	   case, we know the operand occurred exactly once.  */
2890
2891	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2892	  arg0 = new0;
2893	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2894	  arg0 = new1;
2895
2896	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2897	  arg1 = new0;
2898	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2899	  arg1 = new1;
2900
2901	return fold_build2 (code, type, arg0, arg1);
2902      }
2903
2904    default:
2905      return arg;
2906    }
2907}
2908
2909/* Return a tree for the case when the result of an expression is RESULT
2910   converted to TYPE and OMITTED was previously an operand of the expression
2911   but is now not needed (e.g., we folded OMITTED * 0).
2912
2913   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
2914   the conversion of RESULT to TYPE.  */
2915
2916tree
2917omit_one_operand (tree type, tree result, tree omitted)
2918{
2919  tree t = fold_convert (type, result);
2920
2921  if (TREE_SIDE_EFFECTS (omitted))
2922    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2923
2924  return non_lvalue (t);
2925}
2926
2927/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
2928
2929static tree
2930pedantic_omit_one_operand (tree type, tree result, tree omitted)
2931{
2932  tree t = fold_convert (type, result);
2933
2934  if (TREE_SIDE_EFFECTS (omitted))
2935    return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2936
2937  return pedantic_non_lvalue (t);
2938}
2939
2940/* Return a tree for the case when the result of an expression is RESULT
2941   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2942   of the expression but are now not needed.
2943
2944   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2945   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2946   evaluated before OMITTED2.  Otherwise, if neither has side effects,
2947   just do the conversion of RESULT to TYPE.  */
2948
2949tree
2950omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2951{
2952  tree t = fold_convert (type, result);
2953
2954  if (TREE_SIDE_EFFECTS (omitted2))
2955    t = build2 (COMPOUND_EXPR, type, omitted2, t);
2956  if (TREE_SIDE_EFFECTS (omitted1))
2957    t = build2 (COMPOUND_EXPR, type, omitted1, t);
2958
2959  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2960}
2961
2962
2963/* Return a simplified tree node for the truth-negation of ARG.  This
2964   never alters ARG itself.  We assume that ARG is an operation that
2965   returns a truth value (0 or 1).
2966
2967   FIXME: one would think we would fold the result, but it causes
2968   problems with the dominator optimizer.  */
2969tree
2970invert_truthvalue (tree arg)
2971{
2972  tree type = TREE_TYPE (arg);
2973  enum tree_code code = TREE_CODE (arg);
2974
2975  if (code == ERROR_MARK)
2976    return arg;
2977
2978  /* If this is a comparison, we can simply invert it, except for
2979     floating-point non-equality comparisons, in which case we just
2980     enclose a TRUTH_NOT_EXPR around what we have.  */
2981
2982  if (TREE_CODE_CLASS (code) == tcc_comparison)
2983    {
2984      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2985      if (FLOAT_TYPE_P (op_type)
2986	  && flag_trapping_math
2987	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
2988	  && code != NE_EXPR && code != EQ_EXPR)
2989	return build1 (TRUTH_NOT_EXPR, type, arg);
2990      else
2991	{
2992	  code = invert_tree_comparison (code,
2993					 HONOR_NANS (TYPE_MODE (op_type)));
2994	  if (code == ERROR_MARK)
2995	    return build1 (TRUTH_NOT_EXPR, type, arg);
2996	  else
2997	    return build2 (code, type,
2998			   TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2999	}
3000    }
3001
3002  switch (code)
3003    {
3004    case INTEGER_CST:
3005      return constant_boolean_node (integer_zerop (arg), type);
3006
3007    case TRUTH_AND_EXPR:
3008      return build2 (TRUTH_OR_EXPR, type,
3009		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3010		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3011
3012    case TRUTH_OR_EXPR:
3013      return build2 (TRUTH_AND_EXPR, type,
3014		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3015		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3016
3017    case TRUTH_XOR_EXPR:
3018      /* Here we can invert either operand.  We invert the first operand
3019	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3020	 result is the XOR of the first operand with the inside of the
3021	 negation of the second operand.  */
3022
3023      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3024	return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3025		       TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3026      else
3027	return build2 (TRUTH_XOR_EXPR, type,
3028		       invert_truthvalue (TREE_OPERAND (arg, 0)),
3029		       TREE_OPERAND (arg, 1));
3030
3031    case TRUTH_ANDIF_EXPR:
3032      return build2 (TRUTH_ORIF_EXPR, type,
3033		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3034		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3035
3036    case TRUTH_ORIF_EXPR:
3037      return build2 (TRUTH_ANDIF_EXPR, type,
3038		     invert_truthvalue (TREE_OPERAND (arg, 0)),
3039		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3040
3041    case TRUTH_NOT_EXPR:
3042      return TREE_OPERAND (arg, 0);
3043
3044    case COND_EXPR:
3045      {
3046	tree arg1 = TREE_OPERAND (arg, 1);
3047	tree arg2 = TREE_OPERAND (arg, 2);
3048	/* A COND_EXPR may have a throw as one operand, which
3049	   then has void type.  Just leave void operands
3050	   as they are.  */
3051	return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3052		       VOID_TYPE_P (TREE_TYPE (arg1))
3053		       ? arg1 : invert_truthvalue (arg1),
3054		       VOID_TYPE_P (TREE_TYPE (arg2))
3055		       ? arg2 : invert_truthvalue (arg2));
3056      }
3057
3058    case COMPOUND_EXPR:
3059      return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3060		     invert_truthvalue (TREE_OPERAND (arg, 1)));
3061
3062    case NON_LVALUE_EXPR:
3063      return invert_truthvalue (TREE_OPERAND (arg, 0));
3064
3065    case NOP_EXPR:
3066      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3067        break;
3068
3069    case CONVERT_EXPR:
3070    case FLOAT_EXPR:
3071      return build1 (TREE_CODE (arg), type,
3072		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3073
3074    case BIT_AND_EXPR:
3075      if (!integer_onep (TREE_OPERAND (arg, 1)))
3076	break;
3077      return build2 (EQ_EXPR, type, arg,
3078		     fold_convert (type, integer_zero_node));
3079
3080    case SAVE_EXPR:
3081      return build1 (TRUTH_NOT_EXPR, type, arg);
3082
3083    case CLEANUP_POINT_EXPR:
3084      return build1 (CLEANUP_POINT_EXPR, type,
3085		     invert_truthvalue (TREE_OPERAND (arg, 0)));
3086
3087    default:
3088      break;
3089    }
3090  gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3091  return build1 (TRUTH_NOT_EXPR, type, arg);
3092}
3093
3094/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3095   operands are another bit-wise operation with a common input.  If so,
3096   distribute the bit operations to save an operation and possibly two if
3097   constants are involved.  For example, convert
3098	(A | B) & (A | C) into A | (B & C)
3099   Further simplification will occur if B and C are constants.
3100
3101   If this optimization cannot be done, 0 will be returned.  */
3102
3103static tree
3104distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3105{
3106  tree common;
3107  tree left, right;
3108
3109  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3110      || TREE_CODE (arg0) == code
3111      || (TREE_CODE (arg0) != BIT_AND_EXPR
3112	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
3113    return 0;
3114
3115  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3116    {
3117      common = TREE_OPERAND (arg0, 0);
3118      left = TREE_OPERAND (arg0, 1);
3119      right = TREE_OPERAND (arg1, 1);
3120    }
3121  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3122    {
3123      common = TREE_OPERAND (arg0, 0);
3124      left = TREE_OPERAND (arg0, 1);
3125      right = TREE_OPERAND (arg1, 0);
3126    }
3127  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3128    {
3129      common = TREE_OPERAND (arg0, 1);
3130      left = TREE_OPERAND (arg0, 0);
3131      right = TREE_OPERAND (arg1, 1);
3132    }
3133  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3134    {
3135      common = TREE_OPERAND (arg0, 1);
3136      left = TREE_OPERAND (arg0, 0);
3137      right = TREE_OPERAND (arg1, 0);
3138    }
3139  else
3140    return 0;
3141
3142  return fold_build2 (TREE_CODE (arg0), type, common,
3143		      fold_build2 (code, type, left, right));
3144}
3145
3146/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3147   with code CODE.  This optimization is unsafe.  */
3148static tree
3149distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3150{
3151  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3152  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3153
3154  /* (A / C) +- (B / C) -> (A +- B) / C.  */
3155  if (mul0 == mul1
3156      && operand_equal_p (TREE_OPERAND (arg0, 1),
3157		       TREE_OPERAND (arg1, 1), 0))
3158    return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3159			fold_build2 (code, type,
3160				     TREE_OPERAND (arg0, 0),
3161				     TREE_OPERAND (arg1, 0)),
3162			TREE_OPERAND (arg0, 1));
3163
3164  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
3165  if (operand_equal_p (TREE_OPERAND (arg0, 0),
3166		       TREE_OPERAND (arg1, 0), 0)
3167      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3168      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3169    {
3170      REAL_VALUE_TYPE r0, r1;
3171      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3172      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3173      if (!mul0)
3174	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3175      if (!mul1)
3176        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3177      real_arithmetic (&r0, code, &r0, &r1);
3178      return fold_build2 (MULT_EXPR, type,
3179			  TREE_OPERAND (arg0, 0),
3180			  build_real (type, r0));
3181    }
3182
3183  return NULL_TREE;
3184}
3185
3186/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3187   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
3188
3189static tree
3190make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3191		    int unsignedp)
3192{
3193  tree result;
3194
3195  if (bitpos == 0)
3196    {
3197      tree size = TYPE_SIZE (TREE_TYPE (inner));
3198      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3199	   || POINTER_TYPE_P (TREE_TYPE (inner)))
3200	  && host_integerp (size, 0)
3201	  && tree_low_cst (size, 0) == bitsize)
3202	return fold_convert (type, inner);
3203    }
3204
3205  result = build3 (BIT_FIELD_REF, type, inner,
3206		   size_int (bitsize), bitsize_int (bitpos));
3207
3208  BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3209
3210  return result;
3211}
3212
3213/* Optimize a bit-field compare.
3214
3215   There are two cases:  First is a compare against a constant and the
3216   second is a comparison of two items where the fields are at the same
3217   bit position relative to the start of a chunk (byte, halfword, word)
3218   large enough to contain it.  In these cases we can avoid the shift
3219   implicit in bitfield extractions.
3220
3221   For constants, we emit a compare of the shifted constant with the
3222   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3223   compared.  For two fields at the same position, we do the ANDs with the
3224   similar mask and compare the result of the ANDs.
3225
3226   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3227   COMPARE_TYPE is the type of the comparison, and LHS and RHS
3228   are the left and right operands of the comparison, respectively.
3229
3230   If the optimization described above can be done, we return the resulting
3231   tree.  Otherwise we return zero.  */
3232
3233static tree
3234optimize_bit_field_compare (enum tree_code code, tree compare_type,
3235			    tree lhs, tree rhs)
3236{
3237  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3238  tree type = TREE_TYPE (lhs);
3239  tree signed_type, unsigned_type;
3240  int const_p = TREE_CODE (rhs) == INTEGER_CST;
3241  enum machine_mode lmode, rmode, nmode;
3242  int lunsignedp, runsignedp;
3243  int lvolatilep = 0, rvolatilep = 0;
3244  tree linner, rinner = NULL_TREE;
3245  tree mask;
3246  tree offset;
3247
3248  /* Get all the information about the extractions being done.  If the bit size
3249     if the same as the size of the underlying object, we aren't doing an
3250     extraction at all and so can do nothing.  We also don't want to
3251     do anything if the inner expression is a PLACEHOLDER_EXPR since we
3252     then will no longer be able to replace it.  */
3253  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3254				&lunsignedp, &lvolatilep, false);
3255  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3256      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3257    return 0;
3258
3259 if (!const_p)
3260   {
3261     /* If this is not a constant, we can only do something if bit positions,
3262	sizes, and signedness are the same.  */
3263     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3264				   &runsignedp, &rvolatilep, false);
3265
3266     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3267	 || lunsignedp != runsignedp || offset != 0
3268	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3269       return 0;
3270   }
3271
3272  /* See if we can find a mode to refer to this field.  We should be able to,
3273     but fail if we can't.  */
3274  nmode = get_best_mode (lbitsize, lbitpos,
3275			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3276			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3277				TYPE_ALIGN (TREE_TYPE (rinner))),
3278			 word_mode, lvolatilep || rvolatilep);
3279  if (nmode == VOIDmode)
3280    return 0;
3281
3282  /* Set signed and unsigned types of the precision of this mode for the
3283     shifts below.  */
3284  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3285  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3286
3287  /* Compute the bit position and size for the new reference and our offset
3288     within it. If the new reference is the same size as the original, we
3289     won't optimize anything, so return zero.  */
3290  nbitsize = GET_MODE_BITSIZE (nmode);
3291  nbitpos = lbitpos & ~ (nbitsize - 1);
3292  lbitpos -= nbitpos;
3293  if (nbitsize == lbitsize)
3294    return 0;
3295
3296  if (BYTES_BIG_ENDIAN)
3297    lbitpos = nbitsize - lbitsize - lbitpos;
3298
3299  /* Make the mask to be used against the extracted field.  */
3300  mask = build_int_cst (unsigned_type, -1);
3301  mask = force_fit_type (mask, 0, false, false);
3302  mask = fold_convert (unsigned_type, mask);
3303  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3304  mask = const_binop (RSHIFT_EXPR, mask,
3305		      size_int (nbitsize - lbitsize - lbitpos), 0);
3306
3307  if (! const_p)
3308    /* If not comparing with constant, just rework the comparison
3309       and return.  */
3310    return build2 (code, compare_type,
3311		   build2 (BIT_AND_EXPR, unsigned_type,
3312			   make_bit_field_ref (linner, unsigned_type,
3313					       nbitsize, nbitpos, 1),
3314			   mask),
3315		   build2 (BIT_AND_EXPR, unsigned_type,
3316			   make_bit_field_ref (rinner, unsigned_type,
3317					       nbitsize, nbitpos, 1),
3318			   mask));
3319
3320  /* Otherwise, we are handling the constant case. See if the constant is too
3321     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
3322     this not only for its own sake, but to avoid having to test for this
3323     error case below.  If we didn't, we might generate wrong code.
3324
3325     For unsigned fields, the constant shifted right by the field length should
3326     be all zero.  For signed fields, the high-order bits should agree with
3327     the sign bit.  */
3328
3329  if (lunsignedp)
3330    {
3331      if (! integer_zerop (const_binop (RSHIFT_EXPR,
3332					fold_convert (unsigned_type, rhs),
3333					size_int (lbitsize), 0)))
3334	{
3335	  warning (0, "comparison is always %d due to width of bit-field",
3336		   code == NE_EXPR);
3337	  return constant_boolean_node (code == NE_EXPR, compare_type);
3338	}
3339    }
3340  else
3341    {
3342      tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3343			      size_int (lbitsize - 1), 0);
3344      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3345	{
3346	  warning (0, "comparison is always %d due to width of bit-field",
3347		   code == NE_EXPR);
3348	  return constant_boolean_node (code == NE_EXPR, compare_type);
3349	}
3350    }
3351
3352  /* Single-bit compares should always be against zero.  */
3353  if (lbitsize == 1 && ! integer_zerop (rhs))
3354    {
3355      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3356      rhs = fold_convert (type, integer_zero_node);
3357    }
3358
3359  /* Make a new bitfield reference, shift the constant over the
3360     appropriate number of bits and mask it with the computed mask
3361     (in case this was a signed field).  If we changed it, make a new one.  */
3362  lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3363  if (lvolatilep)
3364    {
3365      TREE_SIDE_EFFECTS (lhs) = 1;
3366      TREE_THIS_VOLATILE (lhs) = 1;
3367    }
3368
3369  rhs = const_binop (BIT_AND_EXPR,
3370		     const_binop (LSHIFT_EXPR,
3371				  fold_convert (unsigned_type, rhs),
3372				  size_int (lbitpos), 0),
3373		     mask, 0);
3374
3375  return build2 (code, compare_type,
3376		 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3377		 rhs);
3378}
3379
3380/* Subroutine for fold_truthop: decode a field reference.
3381
3382   If EXP is a comparison reference, we return the innermost reference.
3383
3384   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3385   set to the starting bit number.
3386
3387   If the innermost field can be completely contained in a mode-sized
3388   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
3389
3390   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3391   otherwise it is not changed.
3392
3393   *PUNSIGNEDP is set to the signedness of the field.
3394
3395   *PMASK is set to the mask used.  This is either contained in a
3396   BIT_AND_EXPR or derived from the width of the field.
3397
3398   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3399
3400   Return 0 if this is not a component reference or is one that we can't
3401   do anything with.  */
3402
3403static tree
3404decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3405			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3406			int *punsignedp, int *pvolatilep,
3407			tree *pmask, tree *pand_mask)
3408{
3409  tree outer_type = 0;
3410  tree and_mask = 0;
3411  tree mask, inner, offset;
3412  tree unsigned_type;
3413  unsigned int precision;
3414
3415  /* All the optimizations using this function assume integer fields.
3416     There are problems with FP fields since the type_for_size call
3417     below can fail for, e.g., XFmode.  */
3418  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3419    return 0;
3420
3421  /* We are interested in the bare arrangement of bits, so strip everything
3422     that doesn't affect the machine mode.  However, record the type of the
3423     outermost expression if it may matter below.  */
3424  if (TREE_CODE (exp) == NOP_EXPR
3425      || TREE_CODE (exp) == CONVERT_EXPR
3426      || TREE_CODE (exp) == NON_LVALUE_EXPR)
3427    outer_type = TREE_TYPE (exp);
3428  STRIP_NOPS (exp);
3429
3430  if (TREE_CODE (exp) == BIT_AND_EXPR)
3431    {
3432      and_mask = TREE_OPERAND (exp, 1);
3433      exp = TREE_OPERAND (exp, 0);
3434      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3435      if (TREE_CODE (and_mask) != INTEGER_CST)
3436	return 0;
3437    }
3438
3439  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3440			       punsignedp, pvolatilep, false);
3441  if ((inner == exp && and_mask == 0)
3442      || *pbitsize < 0 || offset != 0
3443      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3444    return 0;
3445
3446  /* If the number of bits in the reference is the same as the bitsize of
3447     the outer type, then the outer type gives the signedness. Otherwise
3448     (in case of a small bitfield) the signedness is unchanged.  */
3449  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3450    *punsignedp = TYPE_UNSIGNED (outer_type);
3451
3452  /* Compute the mask to access the bitfield.  */
3453  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3454  precision = TYPE_PRECISION (unsigned_type);
3455
3456  mask = build_int_cst (unsigned_type, -1);
3457  mask = force_fit_type (mask, 0, false, false);
3458
3459  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3460  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3461
3462  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
3463  if (and_mask != 0)
3464    mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3465			fold_convert (unsigned_type, and_mask), mask);
3466
3467  *pmask = mask;
3468  *pand_mask = and_mask;
3469  return inner;
3470}
3471
3472/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3473   bit positions.  */
3474
3475static int
3476all_ones_mask_p (tree mask, int size)
3477{
3478  tree type = TREE_TYPE (mask);
3479  unsigned int precision = TYPE_PRECISION (type);
3480  tree tmask;
3481
3482  tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3483  tmask = force_fit_type (tmask, 0, false, false);
3484
3485  return
3486    tree_int_cst_equal (mask,
3487			const_binop (RSHIFT_EXPR,
3488				     const_binop (LSHIFT_EXPR, tmask,
3489						  size_int (precision - size),
3490						  0),
3491				     size_int (precision - size), 0));
3492}
3493
3494/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3495   represents the sign bit of EXP's type.  If EXP represents a sign
3496   or zero extension, also test VAL against the unextended type.
3497   The return value is the (sub)expression whose sign bit is VAL,
3498   or NULL_TREE otherwise.  */
3499
3500static tree
3501sign_bit_p (tree exp, tree val)
3502{
3503  unsigned HOST_WIDE_INT mask_lo, lo;
3504  HOST_WIDE_INT mask_hi, hi;
3505  int width;
3506  tree t;
3507
3508  /* Tree EXP must have an integral type.  */
3509  t = TREE_TYPE (exp);
3510  if (! INTEGRAL_TYPE_P (t))
3511    return NULL_TREE;
3512
3513  /* Tree VAL must be an integer constant.  */
3514  if (TREE_CODE (val) != INTEGER_CST
3515      || TREE_CONSTANT_OVERFLOW (val))
3516    return NULL_TREE;
3517
3518  width = TYPE_PRECISION (t);
3519  if (width > HOST_BITS_PER_WIDE_INT)
3520    {
3521      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3522      lo = 0;
3523
3524      mask_hi = ((unsigned HOST_WIDE_INT) -1
3525		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3526      mask_lo = -1;
3527    }
3528  else
3529    {
3530      hi = 0;
3531      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3532
3533      mask_hi = 0;
3534      mask_lo = ((unsigned HOST_WIDE_INT) -1
3535		 >> (HOST_BITS_PER_WIDE_INT - width));
3536    }
3537
3538  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3539     treat VAL as if it were unsigned.  */
3540  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3541      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3542    return exp;
3543
3544  /* Handle extension from a narrower type.  */
3545  if (TREE_CODE (exp) == NOP_EXPR
3546      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3547    return sign_bit_p (TREE_OPERAND (exp, 0), val);
3548
3549  return NULL_TREE;
3550}
3551
3552/* Subroutine for fold_truthop: determine if an operand is simple enough
3553   to be evaluated unconditionally.  */
3554
3555static int
3556simple_operand_p (tree exp)
3557{
3558  /* Strip any conversions that don't change the machine mode.  */
3559  STRIP_NOPS (exp);
3560
3561  return (CONSTANT_CLASS_P (exp)
3562	  || TREE_CODE (exp) == SSA_NAME
3563	  || (DECL_P (exp)
3564	      && ! TREE_ADDRESSABLE (exp)
3565	      && ! TREE_THIS_VOLATILE (exp)
3566	      && ! DECL_NONLOCAL (exp)
3567	      /* Don't regard global variables as simple.  They may be
3568		 allocated in ways unknown to the compiler (shared memory,
3569		 #pragma weak, etc).  */
3570	      && ! TREE_PUBLIC (exp)
3571	      && ! DECL_EXTERNAL (exp)
3572	      /* Loading a static variable is unduly expensive, but global
3573		 registers aren't expensive.  */
3574	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3575}
3576
3577/* The following functions are subroutines to fold_range_test and allow it to
3578   try to change a logical combination of comparisons into a range test.
3579
3580   For example, both
3581	X == 2 || X == 3 || X == 4 || X == 5
3582   and
3583	X >= 2 && X <= 5
3584   are converted to
3585	(unsigned) (X - 2) <= 3
3586
3587   We describe each set of comparisons as being either inside or outside
3588   a range, using a variable named like IN_P, and then describe the
3589   range with a lower and upper bound.  If one of the bounds is omitted,
3590   it represents either the highest or lowest value of the type.
3591
3592   In the comments below, we represent a range by two numbers in brackets
3593   preceded by a "+" to designate being inside that range, or a "-" to
3594   designate being outside that range, so the condition can be inverted by
3595   flipping the prefix.  An omitted bound is represented by a "-".  For
3596   example, "- [-, 10]" means being outside the range starting at the lowest
3597   possible value and ending at 10, in other words, being greater than 10.
3598   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3599   always false.
3600
3601   We set up things so that the missing bounds are handled in a consistent
3602   manner so neither a missing bound nor "true" and "false" need to be
3603   handled using a special case.  */
3604
3605/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3606   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3607   and UPPER1_P are nonzero if the respective argument is an upper bound
3608   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
3609   must be specified for a comparison.  ARG1 will be converted to ARG0's
3610   type if both are specified.  */
3611
3612static tree
3613range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3614	     tree arg1, int upper1_p)
3615{
3616  tree tem;
3617  int result;
3618  int sgn0, sgn1;
3619
3620  /* If neither arg represents infinity, do the normal operation.
3621     Else, if not a comparison, return infinity.  Else handle the special
3622     comparison rules. Note that most of the cases below won't occur, but
3623     are handled for consistency.  */
3624
3625  if (arg0 != 0 && arg1 != 0)
3626    {
3627      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3628			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3629      STRIP_NOPS (tem);
3630      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3631    }
3632
3633  if (TREE_CODE_CLASS (code) != tcc_comparison)
3634    return 0;
3635
3636  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3637     for neither.  In real maths, we cannot assume open ended ranges are
3638     the same. But, this is computer arithmetic, where numbers are finite.
3639     We can therefore make the transformation of any unbounded range with
3640     the value Z, Z being greater than any representable number. This permits
3641     us to treat unbounded ranges as equal.  */
3642  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3643  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3644  switch (code)
3645    {
3646    case EQ_EXPR:
3647      result = sgn0 == sgn1;
3648      break;
3649    case NE_EXPR:
3650      result = sgn0 != sgn1;
3651      break;
3652    case LT_EXPR:
3653      result = sgn0 < sgn1;
3654      break;
3655    case LE_EXPR:
3656      result = sgn0 <= sgn1;
3657      break;
3658    case GT_EXPR:
3659      result = sgn0 > sgn1;
3660      break;
3661    case GE_EXPR:
3662      result = sgn0 >= sgn1;
3663      break;
3664    default:
3665      gcc_unreachable ();
3666    }
3667
3668  return constant_boolean_node (result, type);
3669}
3670
3671/* Given EXP, a logical expression, set the range it is testing into
3672   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
3673   actually being tested.  *PLOW and *PHIGH will be made of the same type
3674   as the returned expression.  If EXP is not a comparison, we will most
3675   likely not be returning a useful value and range.  */
3676
3677static tree
3678make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3679{
3680  enum tree_code code;
3681  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3682  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3683  int in_p, n_in_p;
3684  tree low, high, n_low, n_high;
3685
3686  /* Start with simply saying "EXP != 0" and then look at the code of EXP
3687     and see if we can refine the range.  Some of the cases below may not
3688     happen, but it doesn't seem worth worrying about this.  We "continue"
3689     the outer loop when we've changed something; otherwise we "break"
3690     the switch, which will "break" the while.  */
3691
3692  in_p = 0;
3693  low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3694
3695  while (1)
3696    {
3697      code = TREE_CODE (exp);
3698      exp_type = TREE_TYPE (exp);
3699
3700      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3701	{
3702	  if (TREE_CODE_LENGTH (code) > 0)
3703	    arg0 = TREE_OPERAND (exp, 0);
3704	  if (TREE_CODE_CLASS (code) == tcc_comparison
3705	      || TREE_CODE_CLASS (code) == tcc_unary
3706	      || TREE_CODE_CLASS (code) == tcc_binary)
3707	    arg0_type = TREE_TYPE (arg0);
3708	  if (TREE_CODE_CLASS (code) == tcc_binary
3709	      || TREE_CODE_CLASS (code) == tcc_comparison
3710	      || (TREE_CODE_CLASS (code) == tcc_expression
3711		  && TREE_CODE_LENGTH (code) > 1))
3712	    arg1 = TREE_OPERAND (exp, 1);
3713	}
3714
3715      switch (code)
3716	{
3717	case TRUTH_NOT_EXPR:
3718	  in_p = ! in_p, exp = arg0;
3719	  continue;
3720
3721	case EQ_EXPR: case NE_EXPR:
3722	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3723	  /* We can only do something if the range is testing for zero
3724	     and if the second operand is an integer constant.  Note that
3725	     saying something is "in" the range we make is done by
3726	     complementing IN_P since it will set in the initial case of
3727	     being not equal to zero; "out" is leaving it alone.  */
3728	  if (low == 0 || high == 0
3729	      || ! integer_zerop (low) || ! integer_zerop (high)
3730	      || TREE_CODE (arg1) != INTEGER_CST)
3731	    break;
3732
3733	  switch (code)
3734	    {
3735	    case NE_EXPR:  /* - [c, c]  */
3736	      low = high = arg1;
3737	      break;
3738	    case EQ_EXPR:  /* + [c, c]  */
3739	      in_p = ! in_p, low = high = arg1;
3740	      break;
3741	    case GT_EXPR:  /* - [-, c] */
3742	      low = 0, high = arg1;
3743	      break;
3744	    case GE_EXPR:  /* + [c, -] */
3745	      in_p = ! in_p, low = arg1, high = 0;
3746	      break;
3747	    case LT_EXPR:  /* - [c, -] */
3748	      low = arg1, high = 0;
3749	      break;
3750	    case LE_EXPR:  /* + [-, c] */
3751	      in_p = ! in_p, low = 0, high = arg1;
3752	      break;
3753	    default:
3754	      gcc_unreachable ();
3755	    }
3756
3757	  /* If this is an unsigned comparison, we also know that EXP is
3758	     greater than or equal to zero.  We base the range tests we make
3759	     on that fact, so we record it here so we can parse existing
3760	     range tests.  We test arg0_type since often the return type
3761	     of, e.g. EQ_EXPR, is boolean.  */
3762	  if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3763	    {
3764	      if (! merge_ranges (&n_in_p, &n_low, &n_high,
3765				  in_p, low, high, 1,
3766				  fold_convert (arg0_type, integer_zero_node),
3767				  NULL_TREE))
3768		break;
3769
3770	      in_p = n_in_p, low = n_low, high = n_high;
3771
3772	      /* If the high bound is missing, but we have a nonzero low
3773		 bound, reverse the range so it goes from zero to the low bound
3774		 minus 1.  */
3775	      if (high == 0 && low && ! integer_zerop (low))
3776		{
3777		  in_p = ! in_p;
3778		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3779				      integer_one_node, 0);
3780		  low = fold_convert (arg0_type, integer_zero_node);
3781		}
3782	    }
3783
3784	  exp = arg0;
3785	  continue;
3786
3787	case NEGATE_EXPR:
3788	  /* (-x) IN [a,b] -> x in [-b, -a]  */
3789	  n_low = range_binop (MINUS_EXPR, exp_type,
3790			       fold_convert (exp_type, integer_zero_node),
3791			       0, high, 1);
3792	  n_high = range_binop (MINUS_EXPR, exp_type,
3793				fold_convert (exp_type, integer_zero_node),
3794				0, low, 0);
3795	  low = n_low, high = n_high;
3796	  exp = arg0;
3797	  continue;
3798
3799	case BIT_NOT_EXPR:
3800	  /* ~ X -> -X - 1  */
3801	  exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3802			fold_convert (exp_type, integer_one_node));
3803	  continue;
3804
3805	case PLUS_EXPR:  case MINUS_EXPR:
3806	  if (TREE_CODE (arg1) != INTEGER_CST)
3807	    break;
3808
3809	  /* If EXP is signed, any overflow in the computation is undefined,
3810	     so we don't worry about it so long as our computations on
3811	     the bounds don't overflow.  For unsigned, overflow is defined
3812	     and this is exactly the right thing.  */
3813	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3814			       arg0_type, low, 0, arg1, 0);
3815	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3816				arg0_type, high, 1, arg1, 0);
3817	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
3818	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
3819	    break;
3820
3821	  /* Check for an unsigned range which has wrapped around the maximum
3822	     value thus making n_high < n_low, and normalize it.  */
3823	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3824	    {
3825	      low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3826				 integer_one_node, 0);
3827	      high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3828				  integer_one_node, 0);
3829
3830	      /* If the range is of the form +/- [ x+1, x ], we won't
3831		 be able to normalize it.  But then, it represents the
3832		 whole range or the empty set, so make it
3833		 +/- [ -, - ].  */
3834	      if (tree_int_cst_equal (n_low, low)
3835		  && tree_int_cst_equal (n_high, high))
3836		low = high = 0;
3837	      else
3838		in_p = ! in_p;
3839	    }
3840	  else
3841	    low = n_low, high = n_high;
3842
3843	  exp = arg0;
3844	  continue;
3845
3846	case NOP_EXPR:  case NON_LVALUE_EXPR:  case CONVERT_EXPR:
3847	  if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3848	    break;
3849
3850	  if (! INTEGRAL_TYPE_P (arg0_type)
3851	      || (low != 0 && ! int_fits_type_p (low, arg0_type))
3852	      || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3853	    break;
3854
3855	  n_low = low, n_high = high;
3856
3857	  if (n_low != 0)
3858	    n_low = fold_convert (arg0_type, n_low);
3859
3860	  if (n_high != 0)
3861	    n_high = fold_convert (arg0_type, n_high);
3862
3863
3864	  /* If we're converting arg0 from an unsigned type, to exp,
3865	     a signed type,  we will be doing the comparison as unsigned.
3866	     The tests above have already verified that LOW and HIGH
3867	     are both positive.
3868
3869	     So we have to ensure that we will handle large unsigned
3870	     values the same way that the current signed bounds treat
3871	     negative values.  */
3872
3873	  if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3874	    {
3875	      tree high_positive;
3876	      tree equiv_type = lang_hooks.types.type_for_mode
3877		(TYPE_MODE (arg0_type), 1);
3878
3879	      /* A range without an upper bound is, naturally, unbounded.
3880		 Since convert would have cropped a very large value, use
3881		 the max value for the destination type.  */
3882	      high_positive
3883		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3884		: TYPE_MAX_VALUE (arg0_type);
3885
3886	      if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3887		high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3888					     fold_convert (arg0_type,
3889							   high_positive),
3890					     fold_convert (arg0_type,
3891							   integer_one_node));
3892
3893	      /* If the low bound is specified, "and" the range with the
3894		 range for which the original unsigned value will be
3895		 positive.  */
3896	      if (low != 0)
3897		{
3898		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3899				      1, n_low, n_high, 1,
3900				      fold_convert (arg0_type,
3901						    integer_zero_node),
3902				      high_positive))
3903		    break;
3904
3905		  in_p = (n_in_p == in_p);
3906		}
3907	      else
3908		{
3909		  /* Otherwise, "or" the range with the range of the input
3910		     that will be interpreted as negative.  */
3911		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
3912				      0, n_low, n_high, 1,
3913				      fold_convert (arg0_type,
3914						    integer_zero_node),
3915				      high_positive))
3916		    break;
3917
3918		  in_p = (in_p != n_in_p);
3919		}
3920	    }
3921
3922	  exp = arg0;
3923	  low = n_low, high = n_high;
3924	  continue;
3925
3926	default:
3927	  break;
3928	}
3929
3930      break;
3931    }
3932
3933  /* If EXP is a constant, we can evaluate whether this is true or false.  */
3934  if (TREE_CODE (exp) == INTEGER_CST)
3935    {
3936      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3937						 exp, 0, low, 0))
3938		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
3939						    exp, 1, high, 1)));
3940      low = high = 0;
3941      exp = 0;
3942    }
3943
3944  *pin_p = in_p, *plow = low, *phigh = high;
3945  return exp;
3946}
3947
3948/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3949   type, TYPE, return an expression to test if EXP is in (or out of, depending
3950   on IN_P) the range.  Return 0 if the test couldn't be created.  */
3951
3952static tree
3953build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3954{
3955  tree etype = TREE_TYPE (exp);
3956  tree value;
3957
3958#ifdef HAVE_canonicalize_funcptr_for_compare
3959  /* Disable this optimization for function pointer expressions
3960     on targets that require function pointer canonicalization.  */
3961  if (HAVE_canonicalize_funcptr_for_compare
3962      && TREE_CODE (etype) == POINTER_TYPE
3963      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3964    return NULL_TREE;
3965#endif
3966
3967  if (! in_p)
3968    {
3969      value = build_range_check (type, exp, 1, low, high);
3970      if (value != 0)
3971        return invert_truthvalue (value);
3972
3973      return 0;
3974    }
3975
3976  if (low == 0 && high == 0)
3977    return fold_convert (type, integer_one_node);
3978
3979  if (low == 0)
3980    return fold_build2 (LE_EXPR, type, exp,
3981			fold_convert (etype, high));
3982
3983  if (high == 0)
3984    return fold_build2 (GE_EXPR, type, exp,
3985			fold_convert (etype, low));
3986
3987  if (operand_equal_p (low, high, 0))
3988    return fold_build2 (EQ_EXPR, type, exp,
3989			fold_convert (etype, low));
3990
3991  if (integer_zerop (low))
3992    {
3993      if (! TYPE_UNSIGNED (etype))
3994	{
3995	  etype = lang_hooks.types.unsigned_type (etype);
3996	  high = fold_convert (etype, high);
3997	  exp = fold_convert (etype, exp);
3998	}
3999      return build_range_check (type, exp, 1, 0, high);
4000    }
4001
4002  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4003  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4004    {
4005      unsigned HOST_WIDE_INT lo;
4006      HOST_WIDE_INT hi;
4007      int prec;
4008
4009      prec = TYPE_PRECISION (etype);
4010      if (prec <= HOST_BITS_PER_WIDE_INT)
4011	{
4012	  hi = 0;
4013	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4014	}
4015      else
4016	{
4017	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4018	  lo = (unsigned HOST_WIDE_INT) -1;
4019	}
4020
4021      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4022	{
4023	  if (TYPE_UNSIGNED (etype))
4024	    {
4025	      etype = lang_hooks.types.signed_type (etype);
4026	      exp = fold_convert (etype, exp);
4027	    }
4028	  return fold_build2 (GT_EXPR, type, exp,
4029			      fold_convert (etype, integer_zero_node));
4030	}
4031    }
4032
4033  value = const_binop (MINUS_EXPR, high, low, 0);
4034  if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4035      && ! TYPE_UNSIGNED (etype))
4036    {
4037      tree utype, minv, maxv;
4038
4039      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4040	 for the type in question, as we rely on this here.  */
4041      switch (TREE_CODE (etype))
4042	{
4043	case INTEGER_TYPE:
4044	case ENUMERAL_TYPE:
4045	case CHAR_TYPE:
4046	  /* There is no requirement that LOW be within the range of ETYPE
4047	     if the latter is a subtype.  It must, however, be within the base
4048	     type of ETYPE.  So be sure we do the subtraction in that type.  */
4049	  if (TREE_TYPE (etype))
4050	    etype = TREE_TYPE (etype);
4051	  utype = lang_hooks.types.unsigned_type (etype);
4052	  maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4053	  maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4054			      integer_one_node, 1);
4055	  minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4056	  if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4057					  minv, 1, maxv, 1)))
4058	    {
4059	      etype = utype;
4060	      high = fold_convert (etype, high);
4061	      low = fold_convert (etype, low);
4062	      exp = fold_convert (etype, exp);
4063	      value = const_binop (MINUS_EXPR, high, low, 0);
4064	    }
4065	  break;
4066	default:
4067	  break;
4068	}
4069    }
4070
4071  if (value != 0 && ! TREE_OVERFLOW (value))
4072    {
4073      /* There is no requirement that LOW be within the range of ETYPE
4074	 if the latter is a subtype.  It must, however, be within the base
4075	 type of ETYPE.  So be sure we do the subtraction in that type.  */
4076      if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4077	{
4078	  etype = TREE_TYPE (etype);
4079	  exp = fold_convert (etype, exp);
4080	  low = fold_convert (etype, low);
4081	  value = fold_convert (etype, value);
4082	}
4083
4084      return build_range_check (type,
4085				fold_build2 (MINUS_EXPR, etype, exp, low),
4086				1, build_int_cst (etype, 0), value);
4087    }
4088
4089  return 0;
4090}
4091
4092/* Given two ranges, see if we can merge them into one.  Return 1 if we
4093   can, 0 if we can't.  Set the output range into the specified parameters.  */
4094
4095static int
4096merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4097	      tree high0, int in1_p, tree low1, tree high1)
4098{
4099  int no_overlap;
4100  int subset;
4101  int temp;
4102  tree tem;
4103  int in_p;
4104  tree low, high;
4105  int lowequal = ((low0 == 0 && low1 == 0)
4106		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4107						low0, 0, low1, 0)));
4108  int highequal = ((high0 == 0 && high1 == 0)
4109		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4110						 high0, 1, high1, 1)));
4111
4112  /* Make range 0 be the range that starts first, or ends last if they
4113     start at the same value.  Swap them if it isn't.  */
4114  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4115				 low0, 0, low1, 0))
4116      || (lowequal
4117	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
4118					high1, 1, high0, 1))))
4119    {
4120      temp = in0_p, in0_p = in1_p, in1_p = temp;
4121      tem = low0, low0 = low1, low1 = tem;
4122      tem = high0, high0 = high1, high1 = tem;
4123    }
4124
4125  /* Now flag two cases, whether the ranges are disjoint or whether the
4126     second range is totally subsumed in the first.  Note that the tests
4127     below are simplified by the ones above.  */
4128  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4129					  high0, 1, low1, 0));
4130  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4131				      high1, 1, high0, 1));
4132
4133  /* We now have four cases, depending on whether we are including or
4134     excluding the two ranges.  */
4135  if (in0_p && in1_p)
4136    {
4137      /* If they don't overlap, the result is false.  If the second range
4138	 is a subset it is the result.  Otherwise, the range is from the start
4139	 of the second to the end of the first.  */
4140      if (no_overlap)
4141	in_p = 0, low = high = 0;
4142      else if (subset)
4143	in_p = 1, low = low1, high = high1;
4144      else
4145	in_p = 1, low = low1, high = high0;
4146    }
4147
4148  else if (in0_p && ! in1_p)
4149    {
4150      /* If they don't overlap, the result is the first range.  If they are
4151	 equal, the result is false.  If the second range is a subset of the
4152	 first, and the ranges begin at the same place, we go from just after
4153	 the end of the first range to the end of the second.  If the second
4154	 range is not a subset of the first, or if it is a subset and both
4155	 ranges end at the same place, the range starts at the start of the
4156	 first range and ends just before the second range.
4157	 Otherwise, we can't describe this as a single range.  */
4158      if (no_overlap)
4159	in_p = 1, low = low0, high = high0;
4160      else if (lowequal && highequal)
4161	in_p = 0, low = high = 0;
4162      else if (subset && lowequal)
4163	{
4164	  in_p = 1, high = high0;
4165	  low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4166			     integer_one_node, 0);
4167	}
4168      else if (! subset || highequal)
4169	{
4170	  in_p = 1, low = low0;
4171	  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4172			      integer_one_node, 0);
4173	}
4174      else
4175	return 0;
4176    }
4177
4178  else if (! in0_p && in1_p)
4179    {
4180      /* If they don't overlap, the result is the second range.  If the second
4181	 is a subset of the first, the result is false.  Otherwise,
4182	 the range starts just after the first range and ends at the
4183	 end of the second.  */
4184      if (no_overlap)
4185	in_p = 1, low = low1, high = high1;
4186      else if (subset || highequal)
4187	in_p = 0, low = high = 0;
4188      else
4189	{
4190	  in_p = 1, high = high1;
4191	  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4192			     integer_one_node, 0);
4193	}
4194    }
4195
4196  else
4197    {
4198      /* The case where we are excluding both ranges.  Here the complex case
4199	 is if they don't overlap.  In that case, the only time we have a
4200	 range is if they are adjacent.  If the second is a subset of the
4201	 first, the result is the first.  Otherwise, the range to exclude
4202	 starts at the beginning of the first range and ends at the end of the
4203	 second.  */
4204      if (no_overlap)
4205	{
4206	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4207					 range_binop (PLUS_EXPR, NULL_TREE,
4208						      high0, 1,
4209						      integer_one_node, 1),
4210					 1, low1, 0)))
4211	    in_p = 0, low = low0, high = high1;
4212	  else
4213	    {
4214	      /* Canonicalize - [min, x] into - [-, x].  */
4215	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
4216		switch (TREE_CODE (TREE_TYPE (low0)))
4217		  {
4218		  case ENUMERAL_TYPE:
4219		    if (TYPE_PRECISION (TREE_TYPE (low0))
4220			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4221		      break;
4222		    /* FALLTHROUGH */
4223		  case INTEGER_TYPE:
4224		  case CHAR_TYPE:
4225		    if (tree_int_cst_equal (low0,
4226					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
4227		      low0 = 0;
4228		    break;
4229		  case POINTER_TYPE:
4230		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
4231			&& integer_zerop (low0))
4232		      low0 = 0;
4233		    break;
4234		  default:
4235		    break;
4236		  }
4237
4238	      /* Canonicalize - [x, max] into - [x, -].  */
4239	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
4240		switch (TREE_CODE (TREE_TYPE (high1)))
4241		  {
4242		  case ENUMERAL_TYPE:
4243		    if (TYPE_PRECISION (TREE_TYPE (high1))
4244			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4245		      break;
4246		    /* FALLTHROUGH */
4247		  case INTEGER_TYPE:
4248		  case CHAR_TYPE:
4249		    if (tree_int_cst_equal (high1,
4250					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
4251		      high1 = 0;
4252		    break;
4253		  case POINTER_TYPE:
4254		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
4255			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4256						       high1, 1,
4257						       integer_one_node, 1)))
4258		      high1 = 0;
4259		    break;
4260		  default:
4261		    break;
4262		  }
4263
4264	      /* The ranges might be also adjacent between the maximum and
4265	         minimum values of the given type.  For
4266	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4267	         return + [x + 1, y - 1].  */
4268	      if (low0 == 0 && high1 == 0)
4269	        {
4270		  low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4271				     integer_one_node, 1);
4272		  high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4273				      integer_one_node, 0);
4274		  if (low == 0 || high == 0)
4275		    return 0;
4276
4277		  in_p = 1;
4278		}
4279	      else
4280		return 0;
4281	    }
4282	}
4283      else if (subset)
4284	in_p = 0, low = low0, high = high0;
4285      else
4286	in_p = 0, low = low0, high = high1;
4287    }
4288
4289  *pin_p = in_p, *plow = low, *phigh = high;
4290  return 1;
4291}
4292
4293
4294/* Subroutine of fold, looking inside expressions of the form
4295   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4296   of the COND_EXPR.  This function is being used also to optimize
4297   A op B ? C : A, by reversing the comparison first.
4298
4299   Return a folded expression whose code is not a COND_EXPR
4300   anymore, or NULL_TREE if no folding opportunity is found.  */
4301
4302static tree
4303fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4304{
4305  enum tree_code comp_code = TREE_CODE (arg0);
4306  tree arg00 = TREE_OPERAND (arg0, 0);
4307  tree arg01 = TREE_OPERAND (arg0, 1);
4308  tree arg1_type = TREE_TYPE (arg1);
4309  tree tem;
4310
4311  STRIP_NOPS (arg1);
4312  STRIP_NOPS (arg2);
4313
4314  /* If we have A op 0 ? A : -A, consider applying the following
4315     transformations:
4316
4317     A == 0? A : -A    same as -A
4318     A != 0? A : -A    same as A
4319     A >= 0? A : -A    same as abs (A)
4320     A > 0?  A : -A    same as abs (A)
4321     A <= 0? A : -A    same as -abs (A)
4322     A < 0?  A : -A    same as -abs (A)
4323
4324     None of these transformations work for modes with signed
4325     zeros.  If A is +/-0, the first two transformations will
4326     change the sign of the result (from +0 to -0, or vice
4327     versa).  The last four will fix the sign of the result,
4328     even though the original expressions could be positive or
4329     negative, depending on the sign of A.
4330
4331     Note that all these transformations are correct if A is
4332     NaN, since the two alternatives (A and -A) are also NaNs.  */
4333  if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4334       ? real_zerop (arg01)
4335       : integer_zerop (arg01))
4336      && ((TREE_CODE (arg2) == NEGATE_EXPR
4337	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4338	     /* In the case that A is of the form X-Y, '-A' (arg2) may
4339	        have already been folded to Y-X, check for that. */
4340	  || (TREE_CODE (arg1) == MINUS_EXPR
4341	      && TREE_CODE (arg2) == MINUS_EXPR
4342	      && operand_equal_p (TREE_OPERAND (arg1, 0),
4343				  TREE_OPERAND (arg2, 1), 0)
4344	      && operand_equal_p (TREE_OPERAND (arg1, 1),
4345				  TREE_OPERAND (arg2, 0), 0))))
4346    switch (comp_code)
4347      {
4348      case EQ_EXPR:
4349      case UNEQ_EXPR:
4350	tem = fold_convert (arg1_type, arg1);
4351	return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4352      case NE_EXPR:
4353      case LTGT_EXPR:
4354	return pedantic_non_lvalue (fold_convert (type, arg1));
4355      case UNGE_EXPR:
4356      case UNGT_EXPR:
4357	if (flag_trapping_math)
4358	  break;
4359	/* Fall through.  */
4360      case GE_EXPR:
4361      case GT_EXPR:
4362	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4363	  arg1 = fold_convert (lang_hooks.types.signed_type
4364			       (TREE_TYPE (arg1)), arg1);
4365	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4366	return pedantic_non_lvalue (fold_convert (type, tem));
4367      case UNLE_EXPR:
4368      case UNLT_EXPR:
4369	if (flag_trapping_math)
4370	  break;
4371      case LE_EXPR:
4372      case LT_EXPR:
4373	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4374	  arg1 = fold_convert (lang_hooks.types.signed_type
4375			       (TREE_TYPE (arg1)), arg1);
4376	tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4377	return negate_expr (fold_convert (type, tem));
4378      default:
4379	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4380	break;
4381      }
4382
4383  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
4384     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
4385     both transformations are correct when A is NaN: A != 0
4386     is then true, and A == 0 is false.  */
4387
4388  if (integer_zerop (arg01) && integer_zerop (arg2))
4389    {
4390      if (comp_code == NE_EXPR)
4391	return pedantic_non_lvalue (fold_convert (type, arg1));
4392      else if (comp_code == EQ_EXPR)
4393	return fold_convert (type, integer_zero_node);
4394    }
4395
4396  /* Try some transformations of A op B ? A : B.
4397
4398     A == B? A : B    same as B
4399     A != B? A : B    same as A
4400     A >= B? A : B    same as max (A, B)
4401     A > B?  A : B    same as max (B, A)
4402     A <= B? A : B    same as min (A, B)
4403     A < B?  A : B    same as min (B, A)
4404
4405     As above, these transformations don't work in the presence
4406     of signed zeros.  For example, if A and B are zeros of
4407     opposite sign, the first two transformations will change
4408     the sign of the result.  In the last four, the original
4409     expressions give different results for (A=+0, B=-0) and
4410     (A=-0, B=+0), but the transformed expressions do not.
4411
4412     The first two transformations are correct if either A or B
4413     is a NaN.  In the first transformation, the condition will
4414     be false, and B will indeed be chosen.  In the case of the
4415     second transformation, the condition A != B will be true,
4416     and A will be chosen.
4417
4418     The conversions to max() and min() are not correct if B is
4419     a number and A is not.  The conditions in the original
4420     expressions will be false, so all four give B.  The min()
4421     and max() versions would give a NaN instead.  */
4422  if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4423      /* Avoid these transformations if the COND_EXPR may be used
4424	 as an lvalue in the C++ front-end.  PR c++/19199.  */
4425      && (in_gimple_form
4426	  || strcmp (lang_hooks.name, "GNU C++") != 0
4427	  || ! maybe_lvalue_p (arg1)
4428	  || ! maybe_lvalue_p (arg2)))
4429    {
4430      tree comp_op0 = arg00;
4431      tree comp_op1 = arg01;
4432      tree comp_type = TREE_TYPE (comp_op0);
4433
4434      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
4435      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4436	{
4437	  comp_type = type;
4438	  comp_op0 = arg1;
4439	  comp_op1 = arg2;
4440	}
4441
4442      switch (comp_code)
4443	{
4444	case EQ_EXPR:
4445	  return pedantic_non_lvalue (fold_convert (type, arg2));
4446	case NE_EXPR:
4447	  return pedantic_non_lvalue (fold_convert (type, arg1));
4448	case LE_EXPR:
4449	case LT_EXPR:
4450	case UNLE_EXPR:
4451	case UNLT_EXPR:
4452	  /* In C++ a ?: expression can be an lvalue, so put the
4453	     operand which will be used if they are equal first
4454	     so that we can convert this back to the
4455	     corresponding COND_EXPR.  */
4456	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4457	    {
4458	      comp_op0 = fold_convert (comp_type, comp_op0);
4459	      comp_op1 = fold_convert (comp_type, comp_op1);
4460	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4461		    ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4462		    : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4463	      return pedantic_non_lvalue (fold_convert (type, tem));
4464	    }
4465	  break;
4466	case GE_EXPR:
4467	case GT_EXPR:
4468	case UNGE_EXPR:
4469	case UNGT_EXPR:
4470	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4471	    {
4472	      comp_op0 = fold_convert (comp_type, comp_op0);
4473	      comp_op1 = fold_convert (comp_type, comp_op1);
4474	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4475		    ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4476		    : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4477	      return pedantic_non_lvalue (fold_convert (type, tem));
4478	    }
4479	  break;
4480	case UNEQ_EXPR:
4481	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4482	    return pedantic_non_lvalue (fold_convert (type, arg2));
4483	  break;
4484	case LTGT_EXPR:
4485	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4486	    return pedantic_non_lvalue (fold_convert (type, arg1));
4487	  break;
4488	default:
4489	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4490	  break;
4491	}
4492    }
4493
4494  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4495     we might still be able to simplify this.  For example,
4496     if C1 is one less or one more than C2, this might have started
4497     out as a MIN or MAX and been transformed by this function.
4498     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
4499
4500  if (INTEGRAL_TYPE_P (type)
4501      && TREE_CODE (arg01) == INTEGER_CST
4502      && TREE_CODE (arg2) == INTEGER_CST)
4503    switch (comp_code)
4504      {
4505      case EQ_EXPR:
4506	/* We can replace A with C1 in this case.  */
4507	arg1 = fold_convert (type, arg01);
4508	return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4509
4510      case LT_EXPR:
4511	/* If C1 is C2 + 1, this is min(A, C2).  */
4512	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4513			       OEP_ONLY_CONST)
4514	    && operand_equal_p (arg01,
4515				const_binop (PLUS_EXPR, arg2,
4516					     integer_one_node, 0),
4517				OEP_ONLY_CONST))
4518	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4519						   type, arg1, arg2));
4520	break;
4521
4522      case LE_EXPR:
4523	/* If C1 is C2 - 1, this is min(A, C2).  */
4524	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4525			       OEP_ONLY_CONST)
4526	    && operand_equal_p (arg01,
4527				const_binop (MINUS_EXPR, arg2,
4528					     integer_one_node, 0),
4529				OEP_ONLY_CONST))
4530	  return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4531						   type, arg1, arg2));
4532	break;
4533
4534      case GT_EXPR:
4535	/* If C1 is C2 - 1, this is max(A, C2).  */
4536	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4537			       OEP_ONLY_CONST)
4538	    && operand_equal_p (arg01,
4539				const_binop (MINUS_EXPR, arg2,
4540					     integer_one_node, 0),
4541				OEP_ONLY_CONST))
4542	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4543						   type, arg1, arg2));
4544	break;
4545
4546      case GE_EXPR:
4547	/* If C1 is C2 + 1, this is max(A, C2).  */
4548	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4549			       OEP_ONLY_CONST)
4550	    && operand_equal_p (arg01,
4551				const_binop (PLUS_EXPR, arg2,
4552					     integer_one_node, 0),
4553				OEP_ONLY_CONST))
4554	  return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4555						   type, arg1, arg2));
4556	break;
4557      case NE_EXPR:
4558	break;
4559      default:
4560	gcc_unreachable ();
4561      }
4562
4563  return NULL_TREE;
4564}
4565
4566
4567
4568#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4569#define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4570#endif
4571
4572/* EXP is some logical combination of boolean tests.  See if we can
4573   merge it into some range test.  Return the new tree if so.  */
4574
4575static tree
4576fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4577{
4578  int or_op = (code == TRUTH_ORIF_EXPR
4579	       || code == TRUTH_OR_EXPR);
4580  int in0_p, in1_p, in_p;
4581  tree low0, low1, low, high0, high1, high;
4582  tree lhs = make_range (op0, &in0_p, &low0, &high0);
4583  tree rhs = make_range (op1, &in1_p, &low1, &high1);
4584  tree tem;
4585
4586  /* If this is an OR operation, invert both sides; we will invert
4587     again at the end.  */
4588  if (or_op)
4589    in0_p = ! in0_p, in1_p = ! in1_p;
4590
4591  /* If both expressions are the same, if we can merge the ranges, and we
4592     can build the range test, return it or it inverted.  If one of the
4593     ranges is always true or always false, consider it to be the same
4594     expression as the other.  */
4595  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4596      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4597		       in1_p, low1, high1)
4598      && 0 != (tem = (build_range_check (type,
4599					 lhs != 0 ? lhs
4600					 : rhs != 0 ? rhs : integer_zero_node,
4601					 in_p, low, high))))
4602    return or_op ? invert_truthvalue (tem) : tem;
4603
4604  /* On machines where the branch cost is expensive, if this is a
4605     short-circuited branch and the underlying object on both sides
4606     is the same, make a non-short-circuit operation.  */
4607  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4608	   && lhs != 0 && rhs != 0
4609	   && (code == TRUTH_ANDIF_EXPR
4610	       || code == TRUTH_ORIF_EXPR)
4611	   && operand_equal_p (lhs, rhs, 0))
4612    {
4613      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
4614	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4615	 which cases we can't do this.  */
4616      if (simple_operand_p (lhs))
4617	return build2 (code == TRUTH_ANDIF_EXPR
4618		       ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4619		       type, op0, op1);
4620
4621      else if (lang_hooks.decls.global_bindings_p () == 0
4622	       && ! CONTAINS_PLACEHOLDER_P (lhs))
4623	{
4624	  tree common = save_expr (lhs);
4625
4626	  if (0 != (lhs = build_range_check (type, common,
4627					     or_op ? ! in0_p : in0_p,
4628					     low0, high0))
4629	      && (0 != (rhs = build_range_check (type, common,
4630						 or_op ? ! in1_p : in1_p,
4631						 low1, high1))))
4632	    return build2 (code == TRUTH_ANDIF_EXPR
4633			   ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4634			   type, lhs, rhs);
4635	}
4636    }
4637
4638  return 0;
4639}
4640
4641/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4642   bit value.  Arrange things so the extra bits will be set to zero if and
4643   only if C is signed-extended to its full width.  If MASK is nonzero,
4644   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
4645
4646static tree
4647unextend (tree c, int p, int unsignedp, tree mask)
4648{
4649  tree type = TREE_TYPE (c);
4650  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4651  tree temp;
4652
4653  if (p == modesize || unsignedp)
4654    return c;
4655
4656  /* We work by getting just the sign bit into the low-order bit, then
4657     into the high-order bit, then sign-extend.  We then XOR that value
4658     with C.  */
4659  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4660  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4661
4662  /* We must use a signed type in order to get an arithmetic right shift.
4663     However, we must also avoid introducing accidental overflows, so that
4664     a subsequent call to integer_zerop will work.  Hence we must
4665     do the type conversion here.  At this point, the constant is either
4666     zero or one, and the conversion to a signed type can never overflow.
4667     We could get an overflow if this conversion is done anywhere else.  */
4668  if (TYPE_UNSIGNED (type))
4669    temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4670
4671  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4672  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4673  if (mask != 0)
4674    temp = const_binop (BIT_AND_EXPR, temp,
4675			fold_convert (TREE_TYPE (c), mask), 0);
4676  /* If necessary, convert the type back to match the type of C.  */
4677  if (TYPE_UNSIGNED (type))
4678    temp = fold_convert (type, temp);
4679
4680  return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4681}
4682
4683/* Find ways of folding logical expressions of LHS and RHS:
4684   Try to merge two comparisons to the same innermost item.
4685   Look for range tests like "ch >= '0' && ch <= '9'".
4686   Look for combinations of simple terms on machines with expensive branches
4687   and evaluate the RHS unconditionally.
4688
4689   For example, if we have p->a == 2 && p->b == 4 and we can make an
4690   object large enough to span both A and B, we can do this with a comparison
4691   against the object ANDed with the a mask.
4692
4693   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4694   operations to do this with one comparison.
4695
4696   We check for both normal comparisons and the BIT_AND_EXPRs made this by
4697   function and the one above.
4698
4699   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
4700   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4701
4702   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4703   two operands.
4704
4705   We return the simplified tree or 0 if no optimization is possible.  */
4706
4707static tree
4708fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4709{
4710  /* If this is the "or" of two comparisons, we can do something if
4711     the comparisons are NE_EXPR.  If this is the "and", we can do something
4712     if the comparisons are EQ_EXPR.  I.e.,
4713	(a->b == 2 && a->c == 4) can become (a->new == NEW).
4714
4715     WANTED_CODE is this operation code.  For single bit fields, we can
4716     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4717     comparison for one-bit fields.  */
4718
4719  enum tree_code wanted_code;
4720  enum tree_code lcode, rcode;
4721  tree ll_arg, lr_arg, rl_arg, rr_arg;
4722  tree ll_inner, lr_inner, rl_inner, rr_inner;
4723  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4724  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4725  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4726  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4727  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4728  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4729  enum machine_mode lnmode, rnmode;
4730  tree ll_mask, lr_mask, rl_mask, rr_mask;
4731  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4732  tree l_const, r_const;
4733  tree lntype, rntype, result;
4734  int first_bit, end_bit;
4735  int volatilep;
4736
4737  /* Start by getting the comparison codes.  Fail if anything is volatile.
4738     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4739     it were surrounded with a NE_EXPR.  */
4740
4741  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4742    return 0;
4743
4744  lcode = TREE_CODE (lhs);
4745  rcode = TREE_CODE (rhs);
4746
4747  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4748    {
4749      lhs = build2 (NE_EXPR, truth_type, lhs,
4750		    fold_convert (TREE_TYPE (lhs), integer_zero_node));
4751      lcode = NE_EXPR;
4752    }
4753
4754  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4755    {
4756      rhs = build2 (NE_EXPR, truth_type, rhs,
4757		    fold_convert (TREE_TYPE (rhs), integer_zero_node));
4758      rcode = NE_EXPR;
4759    }
4760
4761  if (TREE_CODE_CLASS (lcode) != tcc_comparison
4762      || TREE_CODE_CLASS (rcode) != tcc_comparison)
4763    return 0;
4764
4765  ll_arg = TREE_OPERAND (lhs, 0);
4766  lr_arg = TREE_OPERAND (lhs, 1);
4767  rl_arg = TREE_OPERAND (rhs, 0);
4768  rr_arg = TREE_OPERAND (rhs, 1);
4769
4770  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
4771  if (simple_operand_p (ll_arg)
4772      && simple_operand_p (lr_arg))
4773    {
4774      tree result;
4775      if (operand_equal_p (ll_arg, rl_arg, 0)
4776          && operand_equal_p (lr_arg, rr_arg, 0))
4777	{
4778          result = combine_comparisons (code, lcode, rcode,
4779					truth_type, ll_arg, lr_arg);
4780	  if (result)
4781	    return result;
4782	}
4783      else if (operand_equal_p (ll_arg, rr_arg, 0)
4784               && operand_equal_p (lr_arg, rl_arg, 0))
4785	{
4786          result = combine_comparisons (code, lcode,
4787					swap_tree_comparison (rcode),
4788					truth_type, ll_arg, lr_arg);
4789	  if (result)
4790	    return result;
4791	}
4792    }
4793
4794  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4795	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4796
4797  /* If the RHS can be evaluated unconditionally and its operands are
4798     simple, it wins to evaluate the RHS unconditionally on machines
4799     with expensive branches.  In this case, this isn't a comparison
4800     that can be merged.  Avoid doing this if the RHS is a floating-point
4801     comparison since those can trap.  */
4802
4803  if (BRANCH_COST >= 2
4804      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4805      && simple_operand_p (rl_arg)
4806      && simple_operand_p (rr_arg))
4807    {
4808      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
4809      if (code == TRUTH_OR_EXPR
4810	  && lcode == NE_EXPR && integer_zerop (lr_arg)
4811	  && rcode == NE_EXPR && integer_zerop (rr_arg)
4812	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4813	return build2 (NE_EXPR, truth_type,
4814		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4815			       ll_arg, rl_arg),
4816		       fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4817
4818      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
4819      if (code == TRUTH_AND_EXPR
4820	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
4821	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
4822	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4823	return build2 (EQ_EXPR, truth_type,
4824		       build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4825			       ll_arg, rl_arg),
4826		       fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4827
4828      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4829	return build2 (code, truth_type, lhs, rhs);
4830    }
4831
4832  /* See if the comparisons can be merged.  Then get all the parameters for
4833     each side.  */
4834
4835  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4836      || (rcode != EQ_EXPR && rcode != NE_EXPR))
4837    return 0;
4838
4839  volatilep = 0;
4840  ll_inner = decode_field_reference (ll_arg,
4841				     &ll_bitsize, &ll_bitpos, &ll_mode,
4842				     &ll_unsignedp, &volatilep, &ll_mask,
4843				     &ll_and_mask);
4844  lr_inner = decode_field_reference (lr_arg,
4845				     &lr_bitsize, &lr_bitpos, &lr_mode,
4846				     &lr_unsignedp, &volatilep, &lr_mask,
4847				     &lr_and_mask);
4848  rl_inner = decode_field_reference (rl_arg,
4849				     &rl_bitsize, &rl_bitpos, &rl_mode,
4850				     &rl_unsignedp, &volatilep, &rl_mask,
4851				     &rl_and_mask);
4852  rr_inner = decode_field_reference (rr_arg,
4853				     &rr_bitsize, &rr_bitpos, &rr_mode,
4854				     &rr_unsignedp, &volatilep, &rr_mask,
4855				     &rr_and_mask);
4856
4857  /* It must be true that the inner operation on the lhs of each
4858     comparison must be the same if we are to be able to do anything.
4859     Then see if we have constants.  If not, the same must be true for
4860     the rhs's.  */
4861  if (volatilep || ll_inner == 0 || rl_inner == 0
4862      || ! operand_equal_p (ll_inner, rl_inner, 0))
4863    return 0;
4864
4865  if (TREE_CODE (lr_arg) == INTEGER_CST
4866      && TREE_CODE (rr_arg) == INTEGER_CST)
4867    l_const = lr_arg, r_const = rr_arg;
4868  else if (lr_inner == 0 || rr_inner == 0
4869	   || ! operand_equal_p (lr_inner, rr_inner, 0))
4870    return 0;
4871  else
4872    l_const = r_const = 0;
4873
4874  /* If either comparison code is not correct for our logical operation,
4875     fail.  However, we can convert a one-bit comparison against zero into
4876     the opposite comparison against that bit being set in the field.  */
4877
4878  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4879  if (lcode != wanted_code)
4880    {
4881      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4882	{
4883	  /* Make the left operand unsigned, since we are only interested
4884	     in the value of one bit.  Otherwise we are doing the wrong
4885	     thing below.  */
4886	  ll_unsignedp = 1;
4887	  l_const = ll_mask;
4888	}
4889      else
4890	return 0;
4891    }
4892
4893  /* This is analogous to the code for l_const above.  */
4894  if (rcode != wanted_code)
4895    {
4896      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4897	{
4898	  rl_unsignedp = 1;
4899	  r_const = rl_mask;
4900	}
4901      else
4902	return 0;
4903    }
4904
4905  /* After this point all optimizations will generate bit-field
4906     references, which we might not want.  */
4907  if (! lang_hooks.can_use_bit_fields_p ())
4908    return 0;
4909
4910  /* See if we can find a mode that contains both fields being compared on
4911     the left.  If we can't, fail.  Otherwise, update all constants and masks
4912     to be relative to a field of that size.  */
4913  first_bit = MIN (ll_bitpos, rl_bitpos);
4914  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4915  lnmode = get_best_mode (end_bit - first_bit, first_bit,
4916			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4917			  volatilep);
4918  if (lnmode == VOIDmode)
4919    return 0;
4920
4921  lnbitsize = GET_MODE_BITSIZE (lnmode);
4922  lnbitpos = first_bit & ~ (lnbitsize - 1);
4923  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4924  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4925
4926  if (BYTES_BIG_ENDIAN)
4927    {
4928      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4929      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4930    }
4931
4932  ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4933			 size_int (xll_bitpos), 0);
4934  rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4935			 size_int (xrl_bitpos), 0);
4936
4937  if (l_const)
4938    {
4939      l_const = fold_convert (lntype, l_const);
4940      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4941      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4942      if (integer_nonzerop (const_binop (BIT_AND_EXPR, l_const,
4943					 fold_build1 (BIT_NOT_EXPR,
4944						      lntype, ll_mask),
4945					 0)))
4946	{
4947	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4948
4949	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4950	}
4951    }
4952  if (r_const)
4953    {
4954      r_const = fold_convert (lntype, r_const);
4955      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4956      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4957      if (integer_nonzerop (const_binop (BIT_AND_EXPR, r_const,
4958					 fold_build1 (BIT_NOT_EXPR,
4959						      lntype, rl_mask),
4960					 0)))
4961	{
4962	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4963
4964	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4965	}
4966    }
4967
4968  /* If the right sides are not constant, do the same for it.  Also,
4969     disallow this optimization if a size or signedness mismatch occurs
4970     between the left and right sides.  */
4971  if (l_const == 0)
4972    {
4973      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4974	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4975	  /* Make sure the two fields on the right
4976	     correspond to the left without being swapped.  */
4977	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4978	return 0;
4979
4980      first_bit = MIN (lr_bitpos, rr_bitpos);
4981      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4982      rnmode = get_best_mode (end_bit - first_bit, first_bit,
4983			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4984			      volatilep);
4985      if (rnmode == VOIDmode)
4986	return 0;
4987
4988      rnbitsize = GET_MODE_BITSIZE (rnmode);
4989      rnbitpos = first_bit & ~ (rnbitsize - 1);
4990      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4991      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4992
4993      if (BYTES_BIG_ENDIAN)
4994	{
4995	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4996	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4997	}
4998
4999      lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5000			     size_int (xlr_bitpos), 0);
5001      rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5002			     size_int (xrr_bitpos), 0);
5003
5004      /* Make a mask that corresponds to both fields being compared.
5005	 Do this for both items being compared.  If the operands are the
5006	 same size and the bits being compared are in the same position
5007	 then we can do this by masking both and comparing the masked
5008	 results.  */
5009      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5010      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5011      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5012	{
5013	  lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5014				    ll_unsignedp || rl_unsignedp);
5015	  if (! all_ones_mask_p (ll_mask, lnbitsize))
5016	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5017
5018	  rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5019				    lr_unsignedp || rr_unsignedp);
5020	  if (! all_ones_mask_p (lr_mask, rnbitsize))
5021	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5022
5023	  return build2 (wanted_code, truth_type, lhs, rhs);
5024	}
5025
5026      /* There is still another way we can do something:  If both pairs of
5027	 fields being compared are adjacent, we may be able to make a wider
5028	 field containing them both.
5029
5030	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
5031	 the mask must be shifted to account for the shift done by
5032	 make_bit_field_ref.  */
5033      if ((ll_bitsize + ll_bitpos == rl_bitpos
5034	   && lr_bitsize + lr_bitpos == rr_bitpos)
5035	  || (ll_bitpos == rl_bitpos + rl_bitsize
5036	      && lr_bitpos == rr_bitpos + rr_bitsize))
5037	{
5038	  tree type;
5039
5040	  lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5041				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5042	  rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5043				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5044
5045	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5046				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5047	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5048				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5049
5050	  /* Convert to the smaller type before masking out unwanted bits.  */
5051	  type = lntype;
5052	  if (lntype != rntype)
5053	    {
5054	      if (lnbitsize > rnbitsize)
5055		{
5056		  lhs = fold_convert (rntype, lhs);
5057		  ll_mask = fold_convert (rntype, ll_mask);
5058		  type = rntype;
5059		}
5060	      else if (lnbitsize < rnbitsize)
5061		{
5062		  rhs = fold_convert (lntype, rhs);
5063		  lr_mask = fold_convert (lntype, lr_mask);
5064		  type = lntype;
5065		}
5066	    }
5067
5068	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5069	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5070
5071	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5072	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5073
5074	  return build2 (wanted_code, truth_type, lhs, rhs);
5075	}
5076
5077      return 0;
5078    }
5079
5080  /* Handle the case of comparisons with constants.  If there is something in
5081     common between the masks, those bits of the constants must be the same.
5082     If not, the condition is always false.  Test for this to avoid generating
5083     incorrect code below.  */
5084  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5085  if (! integer_zerop (result)
5086      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5087			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5088    {
5089      if (wanted_code == NE_EXPR)
5090	{
5091	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
5092	  return constant_boolean_node (true, truth_type);
5093	}
5094      else
5095	{
5096	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5097	  return constant_boolean_node (false, truth_type);
5098	}
5099    }
5100
5101  /* Construct the expression we will return.  First get the component
5102     reference we will make.  Unless the mask is all ones the width of
5103     that field, perform the mask operation.  Then compare with the
5104     merged constant.  */
5105  result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5106			       ll_unsignedp || rl_unsignedp);
5107
5108  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5109  if (! all_ones_mask_p (ll_mask, lnbitsize))
5110    result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5111
5112  return build2 (wanted_code, truth_type, result,
5113		 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5114}
5115
5116/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5117   constant.  */
5118
5119static tree
5120optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5121{
5122  tree arg0 = op0;
5123  enum tree_code op_code;
5124  tree comp_const = op1;
5125  tree minmax_const;
5126  int consts_equal, consts_lt;
5127  tree inner;
5128
5129  STRIP_SIGN_NOPS (arg0);
5130
5131  op_code = TREE_CODE (arg0);
5132  minmax_const = TREE_OPERAND (arg0, 1);
5133  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5134  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5135  inner = TREE_OPERAND (arg0, 0);
5136
5137  /* If something does not permit us to optimize, return the original tree.  */
5138  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5139      || TREE_CODE (comp_const) != INTEGER_CST
5140      || TREE_CONSTANT_OVERFLOW (comp_const)
5141      || TREE_CODE (minmax_const) != INTEGER_CST
5142      || TREE_CONSTANT_OVERFLOW (minmax_const))
5143    return NULL_TREE;
5144
5145  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
5146     and GT_EXPR, doing the rest with recursive calls using logical
5147     simplifications.  */
5148  switch (code)
5149    {
5150    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
5151      {
5152	/* FIXME: We should be able to invert code without building a
5153	   scratch tree node, but doing so would require us to
5154	   duplicate a part of invert_truthvalue here.  */
5155	tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5156	tem = optimize_minmax_comparison (TREE_CODE (tem),
5157					  TREE_TYPE (tem),
5158					  TREE_OPERAND (tem, 0),
5159					  TREE_OPERAND (tem, 1));
5160	return invert_truthvalue (tem);
5161      }
5162
5163    case GE_EXPR:
5164      return
5165	fold_build2 (TRUTH_ORIF_EXPR, type,
5166		     optimize_minmax_comparison
5167		     (EQ_EXPR, type, arg0, comp_const),
5168		     optimize_minmax_comparison
5169		     (GT_EXPR, type, arg0, comp_const));
5170
5171    case EQ_EXPR:
5172      if (op_code == MAX_EXPR && consts_equal)
5173	/* MAX (X, 0) == 0  ->  X <= 0  */
5174	return fold_build2 (LE_EXPR, type, inner, comp_const);
5175
5176      else if (op_code == MAX_EXPR && consts_lt)
5177	/* MAX (X, 0) == 5  ->  X == 5   */
5178	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5179
5180      else if (op_code == MAX_EXPR)
5181	/* MAX (X, 0) == -1  ->  false  */
5182	return omit_one_operand (type, integer_zero_node, inner);
5183
5184      else if (consts_equal)
5185	/* MIN (X, 0) == 0  ->  X >= 0  */
5186	return fold_build2 (GE_EXPR, type, inner, comp_const);
5187
5188      else if (consts_lt)
5189	/* MIN (X, 0) == 5  ->  false  */
5190	return omit_one_operand (type, integer_zero_node, inner);
5191
5192      else
5193	/* MIN (X, 0) == -1  ->  X == -1  */
5194	return fold_build2 (EQ_EXPR, type, inner, comp_const);
5195
5196    case GT_EXPR:
5197      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5198	/* MAX (X, 0) > 0  ->  X > 0
5199	   MAX (X, 0) > 5  ->  X > 5  */
5200	return fold_build2 (GT_EXPR, type, inner, comp_const);
5201
5202      else if (op_code == MAX_EXPR)
5203	/* MAX (X, 0) > -1  ->  true  */
5204	return omit_one_operand (type, integer_one_node, inner);
5205
5206      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5207	/* MIN (X, 0) > 0  ->  false
5208	   MIN (X, 0) > 5  ->  false  */
5209	return omit_one_operand (type, integer_zero_node, inner);
5210
5211      else
5212	/* MIN (X, 0) > -1  ->  X > -1  */
5213	return fold_build2 (GT_EXPR, type, inner, comp_const);
5214
5215    default:
5216      return NULL_TREE;
5217    }
5218}
5219
5220/* T is an integer expression that is being multiplied, divided, or taken a
5221   modulus (CODE says which and what kind of divide or modulus) by a
5222   constant C.  See if we can eliminate that operation by folding it with
5223   other operations already in T.  WIDE_TYPE, if non-null, is a type that
5224   should be used for the computation if wider than our type.
5225
5226   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5227   (X * 2) + (Y * 4).  We must, however, be assured that either the original
5228   expression would not overflow or that overflow is undefined for the type
5229   in the language in question.
5230
5231   We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5232   the machine has a multiply-accumulate insn or that this is part of an
5233   addressing calculation.
5234
5235   If we return a non-null expression, it is an equivalent form of the
5236   original computation, but need not be in the original type.  */
5237
5238static tree
5239extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5240{
5241  /* To avoid exponential search depth, refuse to allow recursion past
5242     three levels.  Beyond that (1) it's highly unlikely that we'll find
5243     something interesting and (2) we've probably processed it before
5244     when we built the inner expression.  */
5245
5246  static int depth;
5247  tree ret;
5248
5249  if (depth > 3)
5250    return NULL;
5251
5252  depth++;
5253  ret = extract_muldiv_1 (t, c, code, wide_type);
5254  depth--;
5255
5256  return ret;
5257}
5258
5259static tree
5260extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5261{
5262  tree type = TREE_TYPE (t);
5263  enum tree_code tcode = TREE_CODE (t);
5264  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5265				   > GET_MODE_SIZE (TYPE_MODE (type)))
5266		? wide_type : type);
5267  tree t1, t2;
5268  int same_p = tcode == code;
5269  tree op0 = NULL_TREE, op1 = NULL_TREE;
5270
5271  /* Don't deal with constants of zero here; they confuse the code below.  */
5272  if (integer_zerop (c))
5273    return NULL_TREE;
5274
5275  if (TREE_CODE_CLASS (tcode) == tcc_unary)
5276    op0 = TREE_OPERAND (t, 0);
5277
5278  if (TREE_CODE_CLASS (tcode) == tcc_binary)
5279    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5280
5281  /* Note that we need not handle conditional operations here since fold
5282     already handles those cases.  So just do arithmetic here.  */
5283  switch (tcode)
5284    {
5285    case INTEGER_CST:
5286      /* For a constant, we can always simplify if we are a multiply
5287	 or (for divide and modulus) if it is a multiple of our constant.  */
5288      if (code == MULT_EXPR
5289	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5290	return const_binop (code, fold_convert (ctype, t),
5291			    fold_convert (ctype, c), 0);
5292      break;
5293
5294    case CONVERT_EXPR:  case NON_LVALUE_EXPR:  case NOP_EXPR:
5295      /* If op0 is an expression ...  */
5296      if ((COMPARISON_CLASS_P (op0)
5297	   || UNARY_CLASS_P (op0)
5298	   || BINARY_CLASS_P (op0)
5299	   || EXPRESSION_CLASS_P (op0))
5300	  /* ... and is unsigned, and its type is smaller than ctype,
5301	     then we cannot pass through as widening.  */
5302	  && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5303	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5304		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5305	       && (GET_MODE_SIZE (TYPE_MODE (ctype))
5306	           > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5307	      /* ... or this is a truncation (t is narrower than op0),
5308		 then we cannot pass through this narrowing.  */
5309	      || (GET_MODE_SIZE (TYPE_MODE (type))
5310		  < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5311	      /* ... or signedness changes for division or modulus,
5312		 then we cannot pass through this conversion.  */
5313	      || (code != MULT_EXPR
5314		  && (TYPE_UNSIGNED (ctype)
5315		      != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5316	break;
5317
5318      /* Pass the constant down and see if we can make a simplification.  If
5319	 we can, replace this expression with the inner simplification for
5320	 possible later conversion to our or some other type.  */
5321      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5322	  && TREE_CODE (t2) == INTEGER_CST
5323	  && ! TREE_CONSTANT_OVERFLOW (t2)
5324	  && (0 != (t1 = extract_muldiv (op0, t2, code,
5325					 code == MULT_EXPR
5326					 ? ctype : NULL_TREE))))
5327	return t1;
5328      break;
5329
5330    case ABS_EXPR:
5331      /* If widening the type changes it from signed to unsigned, then we
5332         must avoid building ABS_EXPR itself as unsigned.  */
5333      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5334        {
5335          tree cstype = (*lang_hooks.types.signed_type) (ctype);
5336          if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5337            {
5338              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5339              return fold_convert (ctype, t1);
5340            }
5341          break;
5342        }
5343      /* If the constant is negative, we cannot simplify this.  */
5344      if (tree_int_cst_sgn (c) == -1)
5345        break;
5346      /* FALLTHROUGH */
5347    case NEGATE_EXPR:
5348      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5349	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5350      break;
5351
5352    case MIN_EXPR:  case MAX_EXPR:
5353      /* If widening the type changes the signedness, then we can't perform
5354	 this optimization as that changes the result.  */
5355      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5356	break;
5357
5358      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
5359      if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5360	  && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5361	{
5362	  if (tree_int_cst_sgn (c) < 0)
5363	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5364
5365	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5366			      fold_convert (ctype, t2));
5367	}
5368      break;
5369
5370    case LSHIFT_EXPR:  case RSHIFT_EXPR:
5371      /* If the second operand is constant, this is a multiplication
5372	 or floor division, by a power of two, so we can treat it that
5373	 way unless the multiplier or divisor overflows.  Signed
5374	 left-shift overflow is implementation-defined rather than
5375	 undefined in C90, so do not convert signed left shift into
5376	 multiplication.  */
5377      if (TREE_CODE (op1) == INTEGER_CST
5378	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5379	  /* const_binop may not detect overflow correctly,
5380	     so check for it explicitly here.  */
5381	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5382	  && TREE_INT_CST_HIGH (op1) == 0
5383	  && 0 != (t1 = fold_convert (ctype,
5384				      const_binop (LSHIFT_EXPR,
5385						   size_one_node,
5386						   op1, 0)))
5387	  && ! TREE_OVERFLOW (t1))
5388	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5389				       ? MULT_EXPR : FLOOR_DIV_EXPR,
5390				       ctype, fold_convert (ctype, op0), t1),
5391			       c, code, wide_type);
5392      break;
5393
5394    case PLUS_EXPR:  case MINUS_EXPR:
5395      /* See if we can eliminate the operation on both sides.  If we can, we
5396	 can return a new PLUS or MINUS.  If we can't, the only remaining
5397	 cases where we can do anything are if the second operand is a
5398	 constant.  */
5399      t1 = extract_muldiv (op0, c, code, wide_type);
5400      t2 = extract_muldiv (op1, c, code, wide_type);
5401      if (t1 != 0 && t2 != 0
5402	  && (code == MULT_EXPR
5403	      /* If not multiplication, we can only do this if both operands
5404		 are divisible by c.  */
5405	      || (multiple_of_p (ctype, op0, c)
5406	          && multiple_of_p (ctype, op1, c))))
5407	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5408			    fold_convert (ctype, t2));
5409
5410      /* If this was a subtraction, negate OP1 and set it to be an addition.
5411	 This simplifies the logic below.  */
5412      if (tcode == MINUS_EXPR)
5413	tcode = PLUS_EXPR, op1 = negate_expr (op1);
5414
5415      if (TREE_CODE (op1) != INTEGER_CST)
5416	break;
5417
5418      /* If either OP1 or C are negative, this optimization is not safe for
5419	 some of the division and remainder types while for others we need
5420	 to change the code.  */
5421      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5422	{
5423	  if (code == CEIL_DIV_EXPR)
5424	    code = FLOOR_DIV_EXPR;
5425	  else if (code == FLOOR_DIV_EXPR)
5426	    code = CEIL_DIV_EXPR;
5427	  else if (code != MULT_EXPR
5428		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5429	    break;
5430	}
5431
5432      /* If it's a multiply or a division/modulus operation of a multiple
5433         of our constant, do the operation and verify it doesn't overflow.  */
5434      if (code == MULT_EXPR
5435	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5436	{
5437	  op1 = const_binop (code, fold_convert (ctype, op1),
5438			     fold_convert (ctype, c), 0);
5439	  /* We allow the constant to overflow with wrapping semantics.  */
5440	  if (op1 == 0
5441	      || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5442	    break;
5443	}
5444      else
5445	break;
5446
5447      /* If we have an unsigned type is not a sizetype, we cannot widen
5448	 the operation since it will change the result if the original
5449	 computation overflowed.  */
5450      if (TYPE_UNSIGNED (ctype)
5451	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5452	  && ctype != type)
5453	break;
5454
5455      /* If we were able to eliminate our operation from the first side,
5456	 apply our operation to the second side and reform the PLUS.  */
5457      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5458	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5459
5460      /* The last case is if we are a multiply.  In that case, we can
5461	 apply the distributive law to commute the multiply and addition
5462	 if the multiplication of the constants doesn't overflow.  */
5463      if (code == MULT_EXPR)
5464	return fold_build2 (tcode, ctype,
5465			    fold_build2 (code, ctype,
5466					 fold_convert (ctype, op0),
5467					 fold_convert (ctype, c)),
5468			    op1);
5469
5470      break;
5471
5472    case MULT_EXPR:
5473      /* We have a special case here if we are doing something like
5474	 (C * 8) % 4 since we know that's zero.  */
5475      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5476	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5477	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5478	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5479	return omit_one_operand (type, integer_zero_node, op0);
5480
5481      /* ... fall through ...  */
5482
5483    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
5484    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
5485      /* If we can extract our operation from the LHS, do so and return a
5486	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
5487	 do something only if the second operand is a constant.  */
5488      if (same_p
5489	  && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5490	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5491			    fold_convert (ctype, op1));
5492      else if (tcode == MULT_EXPR && code == MULT_EXPR
5493	       && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5494	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5495			    fold_convert (ctype, t1));
5496      else if (TREE_CODE (op1) != INTEGER_CST)
5497	return 0;
5498
5499      /* If these are the same operation types, we can associate them
5500	 assuming no overflow.  */
5501      if (tcode == code
5502	  && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5503				     fold_convert (ctype, c), 0))
5504	  && ! TREE_OVERFLOW (t1))
5505	return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5506
5507      /* If these operations "cancel" each other, we have the main
5508	 optimizations of this pass, which occur when either constant is a
5509	 multiple of the other, in which case we replace this with either an
5510	 operation or CODE or TCODE.
5511
5512	 If we have an unsigned type that is not a sizetype, we cannot do
5513	 this since it will change the result if the original computation
5514	 overflowed.  */
5515      if ((! TYPE_UNSIGNED (ctype)
5516	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5517	  && ! flag_wrapv
5518	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5519	      || (tcode == MULT_EXPR
5520		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5521		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5522	{
5523	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5524	    return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5525				fold_convert (ctype,
5526					      const_binop (TRUNC_DIV_EXPR,
5527							   op1, c, 0)));
5528	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5529	    return fold_build2 (code, ctype, fold_convert (ctype, op0),
5530				fold_convert (ctype,
5531					      const_binop (TRUNC_DIV_EXPR,
5532							   c, op1, 0)));
5533	}
5534      break;
5535
5536    default:
5537      break;
5538    }
5539
5540  return 0;
5541}
5542
5543/* Return a node which has the indicated constant VALUE (either 0 or
5544   1), and is of the indicated TYPE.  */
5545
5546tree
5547constant_boolean_node (int value, tree type)
5548{
5549  if (type == integer_type_node)
5550    return value ? integer_one_node : integer_zero_node;
5551  else if (type == boolean_type_node)
5552    return value ? boolean_true_node : boolean_false_node;
5553  else
5554    return build_int_cst (type, value);
5555}
5556
5557
5558/* Return true if expr looks like an ARRAY_REF and set base and
5559   offset to the appropriate trees.  If there is no offset,
5560   offset is set to NULL_TREE.  Base will be canonicalized to
5561   something you can get the element type from using
5562   TREE_TYPE (TREE_TYPE (base)).  Offset will be the offset
5563   in bytes to the base.  */
5564
5565static bool
5566extract_array_ref (tree expr, tree *base, tree *offset)
5567{
5568  /* One canonical form is a PLUS_EXPR with the first
5569     argument being an ADDR_EXPR with a possible NOP_EXPR
5570     attached.  */
5571  if (TREE_CODE (expr) == PLUS_EXPR)
5572    {
5573      tree op0 = TREE_OPERAND (expr, 0);
5574      tree inner_base, dummy1;
5575      /* Strip NOP_EXPRs here because the C frontends and/or
5576	 folders present us (int *)&x.a + 4B possibly.  */
5577      STRIP_NOPS (op0);
5578      if (extract_array_ref (op0, &inner_base, &dummy1))
5579	{
5580	  *base = inner_base;
5581	  if (dummy1 == NULL_TREE)
5582	    *offset = TREE_OPERAND (expr, 1);
5583	  else
5584	    *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5585				   dummy1, TREE_OPERAND (expr, 1));
5586	  return true;
5587	}
5588    }
5589  /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5590     which we transform into an ADDR_EXPR with appropriate
5591     offset.  For other arguments to the ADDR_EXPR we assume
5592     zero offset and as such do not care about the ADDR_EXPR
5593     type and strip possible nops from it.  */
5594  else if (TREE_CODE (expr) == ADDR_EXPR)
5595    {
5596      tree op0 = TREE_OPERAND (expr, 0);
5597      if (TREE_CODE (op0) == ARRAY_REF)
5598	{
5599	  tree idx = TREE_OPERAND (op0, 1);
5600	  *base = TREE_OPERAND (op0, 0);
5601	  *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5602				 array_ref_element_size (op0));
5603	}
5604      else
5605	{
5606	  /* Handle array-to-pointer decay as &a.  */
5607	  if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5608	    *base = TREE_OPERAND (expr, 0);
5609	  else
5610	    *base = expr;
5611	  *offset = NULL_TREE;
5612	}
5613      return true;
5614    }
5615  /* The next canonical form is a VAR_DECL with POINTER_TYPE.  */
5616  else if (SSA_VAR_P (expr)
5617	   && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5618    {
5619      *base = expr;
5620      *offset = NULL_TREE;
5621      return true;
5622    }
5623
5624  return false;
5625}
5626
5627
5628/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5629   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
5630   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5631   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
5632   COND is the first argument to CODE; otherwise (as in the example
5633   given here), it is the second argument.  TYPE is the type of the
5634   original expression.  Return NULL_TREE if no simplification is
5635   possible.  */
5636
5637static tree
5638fold_binary_op_with_conditional_arg (enum tree_code code,
5639				     tree type, tree op0, tree op1,
5640				     tree cond, tree arg, int cond_first_p)
5641{
5642  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5643  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5644  tree test, true_value, false_value;
5645  tree lhs = NULL_TREE;
5646  tree rhs = NULL_TREE;
5647
5648  /* This transformation is only worthwhile if we don't have to wrap
5649     arg in a SAVE_EXPR, and the operation can be simplified on at least
5650     one of the branches once its pushed inside the COND_EXPR.  */
5651  if (!TREE_CONSTANT (arg))
5652    return NULL_TREE;
5653
5654  if (TREE_CODE (cond) == COND_EXPR)
5655    {
5656      test = TREE_OPERAND (cond, 0);
5657      true_value = TREE_OPERAND (cond, 1);
5658      false_value = TREE_OPERAND (cond, 2);
5659      /* If this operand throws an expression, then it does not make
5660	 sense to try to perform a logical or arithmetic operation
5661	 involving it.  */
5662      if (VOID_TYPE_P (TREE_TYPE (true_value)))
5663	lhs = true_value;
5664      if (VOID_TYPE_P (TREE_TYPE (false_value)))
5665	rhs = false_value;
5666    }
5667  else
5668    {
5669      tree testtype = TREE_TYPE (cond);
5670      test = cond;
5671      true_value = constant_boolean_node (true, testtype);
5672      false_value = constant_boolean_node (false, testtype);
5673    }
5674
5675  arg = fold_convert (arg_type, arg);
5676  if (lhs == 0)
5677    {
5678      true_value = fold_convert (cond_type, true_value);
5679      if (cond_first_p)
5680	lhs = fold_build2 (code, type, true_value, arg);
5681      else
5682	lhs = fold_build2 (code, type, arg, true_value);
5683    }
5684  if (rhs == 0)
5685    {
5686      false_value = fold_convert (cond_type, false_value);
5687      if (cond_first_p)
5688	rhs = fold_build2 (code, type, false_value, arg);
5689      else
5690	rhs = fold_build2 (code, type, arg, false_value);
5691    }
5692
5693  test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5694  return fold_convert (type, test);
5695}
5696
5697
5698/* Subroutine of fold() that checks for the addition of +/- 0.0.
5699
5700   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5701   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
5702   ADDEND is the same as X.
5703
5704   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5705   and finite.  The problematic cases are when X is zero, and its mode
5706   has signed zeros.  In the case of rounding towards -infinity,
5707   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
5708   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
5709
5710static bool
5711fold_real_zero_addition_p (tree type, tree addend, int negate)
5712{
5713  if (!real_zerop (addend))
5714    return false;
5715
5716  /* Don't allow the fold with -fsignaling-nans.  */
5717  if (HONOR_SNANS (TYPE_MODE (type)))
5718    return false;
5719
5720  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
5721  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5722    return true;
5723
5724  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
5725  if (TREE_CODE (addend) == REAL_CST
5726      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5727    negate = !negate;
5728
5729  /* The mode has signed zeros, and we have to honor their sign.
5730     In this situation, there is only one case we can return true for.
5731     X - 0 is the same as X unless rounding towards -infinity is
5732     supported.  */
5733  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5734}
5735
5736/* Subroutine of fold() that checks comparisons of built-in math
5737   functions against real constants.
5738
5739   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5740   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
5741   is the type of the result and ARG0 and ARG1 are the operands of the
5742   comparison.  ARG1 must be a TREE_REAL_CST.
5743
5744   The function returns the constant folded tree if a simplification
5745   can be made, and NULL_TREE otherwise.  */
5746
5747static tree
5748fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5749		     tree type, tree arg0, tree arg1)
5750{
5751  REAL_VALUE_TYPE c;
5752
5753  if (BUILTIN_SQRT_P (fcode))
5754    {
5755      tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5756      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5757
5758      c = TREE_REAL_CST (arg1);
5759      if (REAL_VALUE_NEGATIVE (c))
5760	{
5761	  /* sqrt(x) < y is always false, if y is negative.  */
5762	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5763	    return omit_one_operand (type, integer_zero_node, arg);
5764
5765	  /* sqrt(x) > y is always true, if y is negative and we
5766	     don't care about NaNs, i.e. negative values of x.  */
5767	  if (code == NE_EXPR || !HONOR_NANS (mode))
5768	    return omit_one_operand (type, integer_one_node, arg);
5769
5770	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
5771	  return fold_build2 (GE_EXPR, type, arg,
5772			      build_real (TREE_TYPE (arg), dconst0));
5773	}
5774      else if (code == GT_EXPR || code == GE_EXPR)
5775	{
5776	  REAL_VALUE_TYPE c2;
5777
5778	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5779	  real_convert (&c2, mode, &c2);
5780
5781	  if (REAL_VALUE_ISINF (c2))
5782	    {
5783	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
5784	      if (HONOR_INFINITIES (mode))
5785		return fold_build2 (EQ_EXPR, type, arg,
5786				    build_real (TREE_TYPE (arg), c2));
5787
5788	      /* sqrt(x) > y is always false, when y is very large
5789		 and we don't care about infinities.  */
5790	      return omit_one_operand (type, integer_zero_node, arg);
5791	    }
5792
5793	  /* sqrt(x) > c is the same as x > c*c.  */
5794	  return fold_build2 (code, type, arg,
5795			      build_real (TREE_TYPE (arg), c2));
5796	}
5797      else if (code == LT_EXPR || code == LE_EXPR)
5798	{
5799	  REAL_VALUE_TYPE c2;
5800
5801	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5802	  real_convert (&c2, mode, &c2);
5803
5804	  if (REAL_VALUE_ISINF (c2))
5805	    {
5806	      /* sqrt(x) < y is always true, when y is a very large
5807		 value and we don't care about NaNs or Infinities.  */
5808	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5809		return omit_one_operand (type, integer_one_node, arg);
5810
5811	      /* sqrt(x) < y is x != +Inf when y is very large and we
5812		 don't care about NaNs.  */
5813	      if (! HONOR_NANS (mode))
5814		return fold_build2 (NE_EXPR, type, arg,
5815				    build_real (TREE_TYPE (arg), c2));
5816
5817	      /* sqrt(x) < y is x >= 0 when y is very large and we
5818		 don't care about Infinities.  */
5819	      if (! HONOR_INFINITIES (mode))
5820		return fold_build2 (GE_EXPR, type, arg,
5821				    build_real (TREE_TYPE (arg), dconst0));
5822
5823	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
5824	      if (lang_hooks.decls.global_bindings_p () != 0
5825		  || CONTAINS_PLACEHOLDER_P (arg))
5826		return NULL_TREE;
5827
5828	      arg = save_expr (arg);
5829	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
5830				  fold_build2 (GE_EXPR, type, arg,
5831					       build_real (TREE_TYPE (arg),
5832							   dconst0)),
5833				  fold_build2 (NE_EXPR, type, arg,
5834					       build_real (TREE_TYPE (arg),
5835							   c2)));
5836	    }
5837
5838	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
5839	  if (! HONOR_NANS (mode))
5840	    return fold_build2 (code, type, arg,
5841				build_real (TREE_TYPE (arg), c2));
5842
5843	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
5844	  if (lang_hooks.decls.global_bindings_p () == 0
5845	      && ! CONTAINS_PLACEHOLDER_P (arg))
5846	    {
5847	      arg = save_expr (arg);
5848	      return fold_build2 (TRUTH_ANDIF_EXPR, type,
5849				  fold_build2 (GE_EXPR, type, arg,
5850					       build_real (TREE_TYPE (arg),
5851							   dconst0)),
5852				  fold_build2 (code, type, arg,
5853					       build_real (TREE_TYPE (arg),
5854							   c2)));
5855	    }
5856	}
5857    }
5858
5859  return NULL_TREE;
5860}
5861
5862/* Subroutine of fold() that optimizes comparisons against Infinities,
5863   either +Inf or -Inf.
5864
5865   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5866   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5867   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5868
5869   The function returns the constant folded tree if a simplification
5870   can be made, and NULL_TREE otherwise.  */
5871
5872static tree
5873fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5874{
5875  enum machine_mode mode;
5876  REAL_VALUE_TYPE max;
5877  tree temp;
5878  bool neg;
5879
5880  mode = TYPE_MODE (TREE_TYPE (arg0));
5881
5882  /* For negative infinity swap the sense of the comparison.  */
5883  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5884  if (neg)
5885    code = swap_tree_comparison (code);
5886
5887  switch (code)
5888    {
5889    case GT_EXPR:
5890      /* x > +Inf is always false, if with ignore sNANs.  */
5891      if (HONOR_SNANS (mode))
5892        return NULL_TREE;
5893      return omit_one_operand (type, integer_zero_node, arg0);
5894
5895    case LE_EXPR:
5896      /* x <= +Inf is always true, if we don't case about NaNs.  */
5897      if (! HONOR_NANS (mode))
5898	return omit_one_operand (type, integer_one_node, arg0);
5899
5900      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
5901      if (lang_hooks.decls.global_bindings_p () == 0
5902	  && ! CONTAINS_PLACEHOLDER_P (arg0))
5903	{
5904	  arg0 = save_expr (arg0);
5905	  return fold_build2 (EQ_EXPR, type, arg0, arg0);
5906	}
5907      break;
5908
5909    case EQ_EXPR:
5910    case GE_EXPR:
5911      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
5912      real_maxval (&max, neg, mode);
5913      return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5914			  arg0, build_real (TREE_TYPE (arg0), max));
5915
5916    case LT_EXPR:
5917      /* x < +Inf is always equal to x <= DBL_MAX.  */
5918      real_maxval (&max, neg, mode);
5919      return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5920			  arg0, build_real (TREE_TYPE (arg0), max));
5921
5922    case NE_EXPR:
5923      /* x != +Inf is always equal to !(x > DBL_MAX).  */
5924      real_maxval (&max, neg, mode);
5925      if (! HONOR_NANS (mode))
5926	return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5927			    arg0, build_real (TREE_TYPE (arg0), max));
5928
5929      /* The transformation below creates non-gimple code and thus is
5930	 not appropriate if we are in gimple form.  */
5931      if (in_gimple_form)
5932	return NULL_TREE;
5933
5934      temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5935			  arg0, build_real (TREE_TYPE (arg0), max));
5936      return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5937
5938    default:
5939      break;
5940    }
5941
5942  return NULL_TREE;
5943}
5944
5945/* Subroutine of fold() that optimizes comparisons of a division by
5946   a nonzero integer constant against an integer constant, i.e.
5947   X/C1 op C2.
5948
5949   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5950   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
5951   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
5952
5953   The function returns the constant folded tree if a simplification
5954   can be made, and NULL_TREE otherwise.  */
5955
5956static tree
5957fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5958{
5959  tree prod, tmp, hi, lo;
5960  tree arg00 = TREE_OPERAND (arg0, 0);
5961  tree arg01 = TREE_OPERAND (arg0, 1);
5962  unsigned HOST_WIDE_INT lpart;
5963  HOST_WIDE_INT hpart;
5964  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
5965  int overflow;
5966
5967  /* We have to do this the hard way to detect unsigned overflow.
5968     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
5969  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
5970				   TREE_INT_CST_HIGH (arg01),
5971				   TREE_INT_CST_LOW (arg1),
5972				   TREE_INT_CST_HIGH (arg1),
5973				   &lpart, &hpart, unsigned_p);
5974  prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5975  prod = force_fit_type (prod, -1, overflow, false);
5976
5977  if (unsigned_p)
5978    {
5979      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5980      lo = prod;
5981
5982      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
5983      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
5984				       TREE_INT_CST_HIGH (prod),
5985				       TREE_INT_CST_LOW (tmp),
5986				       TREE_INT_CST_HIGH (tmp),
5987				       &lpart, &hpart, unsigned_p);
5988      hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5989      hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5990			   TREE_CONSTANT_OVERFLOW (prod));
5991    }
5992  else if (tree_int_cst_sgn (arg01) >= 0)
5993    {
5994      tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5995      switch (tree_int_cst_sgn (arg1))
5996	{
5997	case -1:
5998	  lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
5999	  hi = prod;
6000	  break;
6001
6002	case  0:
6003	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6004	  hi = tmp;
6005	  break;
6006
6007	case  1:
6008          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6009	  lo = prod;
6010	  break;
6011
6012	default:
6013	  gcc_unreachable ();
6014	}
6015    }
6016  else
6017    {
6018      /* A negative divisor reverses the relational operators.  */
6019      code = swap_tree_comparison (code);
6020
6021      tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6022      switch (tree_int_cst_sgn (arg1))
6023	{
6024	case -1:
6025	  hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6026	  lo = prod;
6027	  break;
6028
6029	case  0:
6030	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6031	  lo = tmp;
6032	  break;
6033
6034	case  1:
6035          lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6036	  hi = prod;
6037	  break;
6038
6039	default:
6040	  gcc_unreachable ();
6041	}
6042    }
6043
6044  switch (code)
6045    {
6046    case EQ_EXPR:
6047      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6048	return omit_one_operand (type, integer_zero_node, arg00);
6049      if (TREE_OVERFLOW (hi))
6050	return fold_build2 (GE_EXPR, type, arg00, lo);
6051      if (TREE_OVERFLOW (lo))
6052	return fold_build2 (LE_EXPR, type, arg00, hi);
6053      return build_range_check (type, arg00, 1, lo, hi);
6054
6055    case NE_EXPR:
6056      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6057	return omit_one_operand (type, integer_one_node, arg00);
6058      if (TREE_OVERFLOW (hi))
6059	return fold_build2 (LT_EXPR, type, arg00, lo);
6060      if (TREE_OVERFLOW (lo))
6061	return fold_build2 (GT_EXPR, type, arg00, hi);
6062      return build_range_check (type, arg00, 0, lo, hi);
6063
6064    case LT_EXPR:
6065      if (TREE_OVERFLOW (lo))
6066	return omit_one_operand (type, integer_zero_node, arg00);
6067      return fold_build2 (LT_EXPR, type, arg00, lo);
6068
6069    case LE_EXPR:
6070      if (TREE_OVERFLOW (hi))
6071	return omit_one_operand (type, integer_one_node, arg00);
6072      return fold_build2 (LE_EXPR, type, arg00, hi);
6073
6074    case GT_EXPR:
6075      if (TREE_OVERFLOW (hi))
6076	return omit_one_operand (type, integer_zero_node, arg00);
6077      return fold_build2 (GT_EXPR, type, arg00, hi);
6078
6079    case GE_EXPR:
6080      if (TREE_OVERFLOW (lo))
6081	return omit_one_operand (type, integer_one_node, arg00);
6082      return fold_build2 (GE_EXPR, type, arg00, lo);
6083
6084    default:
6085      break;
6086    }
6087
6088  return NULL_TREE;
6089}
6090
6091
6092/* If CODE with arguments ARG0 and ARG1 represents a single bit
6093   equality/inequality test, then return a simplified form of the test
6094   using a sign testing.  Otherwise return NULL.  TYPE is the desired
6095   result type.  */
6096
6097static tree
6098fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6099				     tree result_type)
6100{
6101  /* If this is testing a single bit, we can optimize the test.  */
6102  if ((code == NE_EXPR || code == EQ_EXPR)
6103      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6104      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6105    {
6106      /* If we have (A & C) != 0 where C is the sign bit of A, convert
6107	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
6108      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6109
6110      if (arg00 != NULL_TREE
6111	  /* This is only a win if casting to a signed type is cheap,
6112	     i.e. when arg00's type is not a partial mode.  */
6113	  && TYPE_PRECISION (TREE_TYPE (arg00))
6114	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6115	{
6116	  tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6117	  return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6118			      result_type, fold_convert (stype, arg00),
6119			      fold_convert (stype, integer_zero_node));
6120	}
6121    }
6122
6123  return NULL_TREE;
6124}
6125
6126/* If CODE with arguments ARG0 and ARG1 represents a single bit
6127   equality/inequality test, then return a simplified form of
6128   the test using shifts and logical operations.  Otherwise return
6129   NULL.  TYPE is the desired result type.  */
6130
6131tree
6132fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6133		      tree result_type)
6134{
6135  /* If this is testing a single bit, we can optimize the test.  */
6136  if ((code == NE_EXPR || code == EQ_EXPR)
6137      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6138      && integer_pow2p (TREE_OPERAND (arg0, 1)))
6139    {
6140      tree inner = TREE_OPERAND (arg0, 0);
6141      tree type = TREE_TYPE (arg0);
6142      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6143      enum machine_mode operand_mode = TYPE_MODE (type);
6144      int ops_unsigned;
6145      tree signed_type, unsigned_type, intermediate_type;
6146      tree tem;
6147
6148      /* First, see if we can fold the single bit test into a sign-bit
6149	 test.  */
6150      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6151						 result_type);
6152      if (tem)
6153	return tem;
6154
6155      /* Otherwise we have (A & C) != 0 where C is a single bit,
6156	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
6157	 Similarly for (A & C) == 0.  */
6158
6159      /* If INNER is a right shift of a constant and it plus BITNUM does
6160	 not overflow, adjust BITNUM and INNER.  */
6161      if (TREE_CODE (inner) == RSHIFT_EXPR
6162	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6163	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6164	  && bitnum < TYPE_PRECISION (type)
6165	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6166				   bitnum - TYPE_PRECISION (type)))
6167	{
6168	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6169	  inner = TREE_OPERAND (inner, 0);
6170	}
6171
6172      /* If we are going to be able to omit the AND below, we must do our
6173	 operations as unsigned.  If we must use the AND, we have a choice.
6174	 Normally unsigned is faster, but for some machines signed is.  */
6175#ifdef LOAD_EXTEND_OP
6176      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6177		      && !flag_syntax_only) ? 0 : 1;
6178#else
6179      ops_unsigned = 1;
6180#endif
6181
6182      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6183      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6184      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6185      inner = fold_convert (intermediate_type, inner);
6186
6187      if (bitnum != 0)
6188	inner = build2 (RSHIFT_EXPR, intermediate_type,
6189			inner, size_int (bitnum));
6190
6191      if (code == EQ_EXPR)
6192	inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6193			     inner, integer_one_node);
6194
6195      /* Put the AND last so it can combine with more things.  */
6196      inner = build2 (BIT_AND_EXPR, intermediate_type,
6197		      inner, integer_one_node);
6198
6199      /* Make sure to return the proper type.  */
6200      inner = fold_convert (result_type, inner);
6201
6202      return inner;
6203    }
6204  return NULL_TREE;
6205}
6206
6207/* Check whether we are allowed to reorder operands arg0 and arg1,
6208   such that the evaluation of arg1 occurs before arg0.  */
6209
6210static bool
6211reorder_operands_p (tree arg0, tree arg1)
6212{
6213  if (! flag_evaluation_order)
6214      return true;
6215  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6216    return true;
6217  return ! TREE_SIDE_EFFECTS (arg0)
6218	 && ! TREE_SIDE_EFFECTS (arg1);
6219}
6220
6221/* Test whether it is preferable two swap two operands, ARG0 and
6222   ARG1, for example because ARG0 is an integer constant and ARG1
6223   isn't.  If REORDER is true, only recommend swapping if we can
6224   evaluate the operands in reverse order.  */
6225
6226bool
6227tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6228{
6229  STRIP_SIGN_NOPS (arg0);
6230  STRIP_SIGN_NOPS (arg1);
6231
6232  if (TREE_CODE (arg1) == INTEGER_CST)
6233    return 0;
6234  if (TREE_CODE (arg0) == INTEGER_CST)
6235    return 1;
6236
6237  if (TREE_CODE (arg1) == REAL_CST)
6238    return 0;
6239  if (TREE_CODE (arg0) == REAL_CST)
6240    return 1;
6241
6242  if (TREE_CODE (arg1) == COMPLEX_CST)
6243    return 0;
6244  if (TREE_CODE (arg0) == COMPLEX_CST)
6245    return 1;
6246
6247  if (TREE_CONSTANT (arg1))
6248    return 0;
6249  if (TREE_CONSTANT (arg0))
6250    return 1;
6251
6252  if (optimize_size)
6253    return 0;
6254
6255  if (reorder && flag_evaluation_order
6256      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6257    return 0;
6258
6259  if (DECL_P (arg1))
6260    return 0;
6261  if (DECL_P (arg0))
6262    return 1;
6263
6264  /* It is preferable to swap two SSA_NAME to ensure a canonical form
6265     for commutative and comparison operators.  Ensuring a canonical
6266     form allows the optimizers to find additional redundancies without
6267     having to explicitly check for both orderings.  */
6268  if (TREE_CODE (arg0) == SSA_NAME
6269      && TREE_CODE (arg1) == SSA_NAME
6270      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6271    return 1;
6272
6273  return 0;
6274}
6275
6276/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6277   ARG0 is extended to a wider type.  */
6278
6279static tree
6280fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6281{
6282  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6283  tree arg1_unw;
6284  tree shorter_type, outer_type;
6285  tree min, max;
6286  bool above, below;
6287
6288  if (arg0_unw == arg0)
6289    return NULL_TREE;
6290  shorter_type = TREE_TYPE (arg0_unw);
6291
6292#ifdef HAVE_canonicalize_funcptr_for_compare
6293  /* Disable this optimization if we're casting a function pointer
6294     type on targets that require function pointer canonicalization.  */
6295  if (HAVE_canonicalize_funcptr_for_compare
6296      && TREE_CODE (shorter_type) == POINTER_TYPE
6297      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6298    return NULL_TREE;
6299#endif
6300
6301  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6302    return NULL_TREE;
6303
6304  arg1_unw = get_unwidened (arg1, NULL_TREE);
6305
6306  /* If possible, express the comparison in the shorter mode.  */
6307  if ((code == EQ_EXPR || code == NE_EXPR
6308       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6309      && (TREE_TYPE (arg1_unw) == shorter_type
6310	  || (TYPE_PRECISION (shorter_type)
6311	      > TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6312	  || ((TYPE_PRECISION (shorter_type)
6313	       == TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6314	      && (TYPE_UNSIGNED (shorter_type)
6315		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6316	  || (TREE_CODE (arg1_unw) == INTEGER_CST
6317	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
6318		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6319	      && int_fits_type_p (arg1_unw, shorter_type))))
6320    return fold_build2 (code, type, arg0_unw,
6321		       fold_convert (shorter_type, arg1_unw));
6322
6323  if (TREE_CODE (arg1_unw) != INTEGER_CST
6324      || TREE_CODE (shorter_type) != INTEGER_TYPE
6325      || !int_fits_type_p (arg1_unw, shorter_type))
6326    return NULL_TREE;
6327
6328  /* If we are comparing with the integer that does not fit into the range
6329     of the shorter type, the result is known.  */
6330  outer_type = TREE_TYPE (arg1_unw);
6331  min = lower_bound_in_type (outer_type, shorter_type);
6332  max = upper_bound_in_type (outer_type, shorter_type);
6333
6334  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6335						   max, arg1_unw));
6336  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6337						   arg1_unw, min));
6338
6339  switch (code)
6340    {
6341    case EQ_EXPR:
6342      if (above || below)
6343	return omit_one_operand (type, integer_zero_node, arg0);
6344      break;
6345
6346    case NE_EXPR:
6347      if (above || below)
6348	return omit_one_operand (type, integer_one_node, arg0);
6349      break;
6350
6351    case LT_EXPR:
6352    case LE_EXPR:
6353      if (above)
6354	return omit_one_operand (type, integer_one_node, arg0);
6355      else if (below)
6356	return omit_one_operand (type, integer_zero_node, arg0);
6357
6358    case GT_EXPR:
6359    case GE_EXPR:
6360      if (above)
6361	return omit_one_operand (type, integer_zero_node, arg0);
6362      else if (below)
6363	return omit_one_operand (type, integer_one_node, arg0);
6364
6365    default:
6366      break;
6367    }
6368
6369  return NULL_TREE;
6370}
6371
6372/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6373   ARG0 just the signedness is changed.  */
6374
6375static tree
6376fold_sign_changed_comparison (enum tree_code code, tree type,
6377			      tree arg0, tree arg1)
6378{
6379  tree arg0_inner, tmp;
6380  tree inner_type, outer_type;
6381
6382  if (TREE_CODE (arg0) != NOP_EXPR
6383      && TREE_CODE (arg0) != CONVERT_EXPR)
6384    return NULL_TREE;
6385
6386  outer_type = TREE_TYPE (arg0);
6387  arg0_inner = TREE_OPERAND (arg0, 0);
6388  inner_type = TREE_TYPE (arg0_inner);
6389
6390#ifdef HAVE_canonicalize_funcptr_for_compare
6391  /* Disable this optimization if we're casting a function pointer
6392     type on targets that require function pointer canonicalization.  */
6393  if (HAVE_canonicalize_funcptr_for_compare
6394      && TREE_CODE (inner_type) == POINTER_TYPE
6395      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6396    return NULL_TREE;
6397#endif
6398
6399  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6400    return NULL_TREE;
6401
6402  if (TREE_CODE (arg1) != INTEGER_CST
6403      && !((TREE_CODE (arg1) == NOP_EXPR
6404	    || TREE_CODE (arg1) == CONVERT_EXPR)
6405	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6406    return NULL_TREE;
6407
6408  if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6409      && code != NE_EXPR
6410      && code != EQ_EXPR)
6411    return NULL_TREE;
6412
6413  if (TREE_CODE (arg1) == INTEGER_CST)
6414    {
6415      tmp = build_int_cst_wide (inner_type,
6416				TREE_INT_CST_LOW (arg1),
6417				TREE_INT_CST_HIGH (arg1));
6418      arg1 = force_fit_type (tmp, 0,
6419			     TREE_OVERFLOW (arg1),
6420			     TREE_CONSTANT_OVERFLOW (arg1));
6421    }
6422  else
6423    arg1 = fold_convert (inner_type, arg1);
6424
6425  return fold_build2 (code, type, arg0_inner, arg1);
6426}
6427
6428/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6429   step of the array.  Reconstructs s and delta in the case of s * delta
6430   being an integer constant (and thus already folded).
6431   ADDR is the address. MULT is the multiplicative expression.
6432   If the function succeeds, the new address expression is returned.  Otherwise
6433   NULL_TREE is returned.  */
6434
6435static tree
6436try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6437{
6438  tree s, delta, step;
6439  tree ref = TREE_OPERAND (addr, 0), pref;
6440  tree ret, pos;
6441  tree itype;
6442
6443  /* Canonicalize op1 into a possibly non-constant delta
6444     and an INTEGER_CST s.  */
6445  if (TREE_CODE (op1) == MULT_EXPR)
6446    {
6447      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6448
6449      STRIP_NOPS (arg0);
6450      STRIP_NOPS (arg1);
6451
6452      if (TREE_CODE (arg0) == INTEGER_CST)
6453        {
6454          s = arg0;
6455          delta = arg1;
6456        }
6457      else if (TREE_CODE (arg1) == INTEGER_CST)
6458        {
6459          s = arg1;
6460          delta = arg0;
6461        }
6462      else
6463        return NULL_TREE;
6464    }
6465  else if (TREE_CODE (op1) == INTEGER_CST)
6466    {
6467      delta = op1;
6468      s = NULL_TREE;
6469    }
6470  else
6471    {
6472      /* Simulate we are delta * 1.  */
6473      delta = op1;
6474      s = integer_one_node;
6475    }
6476
6477  for (;; ref = TREE_OPERAND (ref, 0))
6478    {
6479      if (TREE_CODE (ref) == ARRAY_REF)
6480	{
6481	  itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6482	  if (! itype)
6483	    continue;
6484
6485	  step = array_ref_element_size (ref);
6486	  if (TREE_CODE (step) != INTEGER_CST)
6487	    continue;
6488
6489	  if (s)
6490	    {
6491	      if (! tree_int_cst_equal (step, s))
6492                continue;
6493	    }
6494	  else
6495	    {
6496	      /* Try if delta is a multiple of step.  */
6497	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6498	      if (! tmp)
6499		continue;
6500	      delta = tmp;
6501	    }
6502
6503	  break;
6504	}
6505
6506      if (!handled_component_p (ref))
6507	return NULL_TREE;
6508    }
6509
6510  /* We found the suitable array reference.  So copy everything up to it,
6511     and replace the index.  */
6512
6513  pref = TREE_OPERAND (addr, 0);
6514  ret = copy_node (pref);
6515  pos = ret;
6516
6517  while (pref != ref)
6518    {
6519      pref = TREE_OPERAND (pref, 0);
6520      TREE_OPERAND (pos, 0) = copy_node (pref);
6521      pos = TREE_OPERAND (pos, 0);
6522    }
6523
6524  TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6525				       fold_convert (itype,
6526						     TREE_OPERAND (pos, 1)),
6527				       fold_convert (itype, delta));
6528
6529  return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6530}
6531
6532
6533/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
6534   means A >= Y && A != MAX, but in this case we know that
6535   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
6536
6537static tree
6538fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6539{
6540  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6541
6542  if (TREE_CODE (bound) == LT_EXPR)
6543    a = TREE_OPERAND (bound, 0);
6544  else if (TREE_CODE (bound) == GT_EXPR)
6545    a = TREE_OPERAND (bound, 1);
6546  else
6547    return NULL_TREE;
6548
6549  typea = TREE_TYPE (a);
6550  if (!INTEGRAL_TYPE_P (typea)
6551      && !POINTER_TYPE_P (typea))
6552    return NULL_TREE;
6553
6554  if (TREE_CODE (ineq) == LT_EXPR)
6555    {
6556      a1 = TREE_OPERAND (ineq, 1);
6557      y = TREE_OPERAND (ineq, 0);
6558    }
6559  else if (TREE_CODE (ineq) == GT_EXPR)
6560    {
6561      a1 = TREE_OPERAND (ineq, 0);
6562      y = TREE_OPERAND (ineq, 1);
6563    }
6564  else
6565    return NULL_TREE;
6566
6567  if (TREE_TYPE (a1) != typea)
6568    return NULL_TREE;
6569
6570  diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6571  if (!integer_onep (diff))
6572    return NULL_TREE;
6573
6574  return fold_build2 (GE_EXPR, type, a, y);
6575}
6576
6577/* Fold a unary expression of code CODE and type TYPE with operand
6578   OP0.  Return the folded expression if folding is successful.
6579   Otherwise, return NULL_TREE.  */
6580
6581tree
6582fold_unary (enum tree_code code, tree type, tree op0)
6583{
6584  tree tem;
6585  tree arg0;
6586  enum tree_code_class kind = TREE_CODE_CLASS (code);
6587
6588  gcc_assert (IS_EXPR_CODE_CLASS (kind)
6589	      && TREE_CODE_LENGTH (code) == 1);
6590
6591  arg0 = op0;
6592  if (arg0)
6593    {
6594      if (code == NOP_EXPR || code == CONVERT_EXPR
6595	  || code == FLOAT_EXPR || code == ABS_EXPR)
6596	{
6597	  /* Don't use STRIP_NOPS, because signedness of argument type
6598	     matters.  */
6599	  STRIP_SIGN_NOPS (arg0);
6600	}
6601      else
6602	{
6603	  /* Strip any conversions that don't change the mode.  This
6604	     is safe for every expression, except for a comparison
6605	     expression because its signedness is derived from its
6606	     operands.
6607
6608	     Note that this is done as an internal manipulation within
6609	     the constant folder, in order to find the simplest
6610	     representation of the arguments so that their form can be
6611	     studied.  In any cases, the appropriate type conversions
6612	     should be put back in the tree that will get out of the
6613	     constant folder.  */
6614	  STRIP_NOPS (arg0);
6615	}
6616    }
6617
6618  if (TREE_CODE_CLASS (code) == tcc_unary)
6619    {
6620      if (TREE_CODE (arg0) == COMPOUND_EXPR)
6621	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6622		       fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6623      else if (TREE_CODE (arg0) == COND_EXPR)
6624	{
6625	  tree arg01 = TREE_OPERAND (arg0, 1);
6626	  tree arg02 = TREE_OPERAND (arg0, 2);
6627	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6628	    arg01 = fold_build1 (code, type, arg01);
6629	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6630	    arg02 = fold_build1 (code, type, arg02);
6631	  tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6632			     arg01, arg02);
6633
6634	  /* If this was a conversion, and all we did was to move into
6635	     inside the COND_EXPR, bring it back out.  But leave it if
6636	     it is a conversion from integer to integer and the
6637	     result precision is no wider than a word since such a
6638	     conversion is cheap and may be optimized away by combine,
6639	     while it couldn't if it were outside the COND_EXPR.  Then return
6640	     so we don't get into an infinite recursion loop taking the
6641	     conversion out and then back in.  */
6642
6643	  if ((code == NOP_EXPR || code == CONVERT_EXPR
6644	       || code == NON_LVALUE_EXPR)
6645	      && TREE_CODE (tem) == COND_EXPR
6646	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6647	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6648	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6649	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6650	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6651		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6652	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6653		     && (INTEGRAL_TYPE_P
6654			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6655		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6656		  || flag_syntax_only))
6657	    tem = build1 (code, type,
6658			  build3 (COND_EXPR,
6659				  TREE_TYPE (TREE_OPERAND
6660					     (TREE_OPERAND (tem, 1), 0)),
6661				  TREE_OPERAND (tem, 0),
6662				  TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6663				  TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6664	  return tem;
6665	}
6666      else if (COMPARISON_CLASS_P (arg0))
6667	{
6668	  if (TREE_CODE (type) == BOOLEAN_TYPE)
6669	    {
6670	      arg0 = copy_node (arg0);
6671	      TREE_TYPE (arg0) = type;
6672	      return arg0;
6673	    }
6674	  else if (TREE_CODE (type) != INTEGER_TYPE)
6675	    return fold_build3 (COND_EXPR, type, arg0,
6676				fold_build1 (code, type,
6677					     integer_one_node),
6678				fold_build1 (code, type,
6679					     integer_zero_node));
6680	}
6681   }
6682
6683  switch (code)
6684    {
6685    case NOP_EXPR:
6686    case FLOAT_EXPR:
6687    case CONVERT_EXPR:
6688    case FIX_TRUNC_EXPR:
6689    case FIX_CEIL_EXPR:
6690    case FIX_FLOOR_EXPR:
6691    case FIX_ROUND_EXPR:
6692      if (TREE_TYPE (op0) == type)
6693	return op0;
6694
6695      /* Handle cases of two conversions in a row.  */
6696      if (TREE_CODE (op0) == NOP_EXPR
6697	  || TREE_CODE (op0) == CONVERT_EXPR)
6698	{
6699	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6700	  tree inter_type = TREE_TYPE (op0);
6701	  int inside_int = INTEGRAL_TYPE_P (inside_type);
6702	  int inside_ptr = POINTER_TYPE_P (inside_type);
6703	  int inside_float = FLOAT_TYPE_P (inside_type);
6704	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6705	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
6706	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6707	  int inter_int = INTEGRAL_TYPE_P (inter_type);
6708	  int inter_ptr = POINTER_TYPE_P (inter_type);
6709	  int inter_float = FLOAT_TYPE_P (inter_type);
6710	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6711	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
6712	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6713	  int final_int = INTEGRAL_TYPE_P (type);
6714	  int final_ptr = POINTER_TYPE_P (type);
6715	  int final_float = FLOAT_TYPE_P (type);
6716	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6717	  unsigned int final_prec = TYPE_PRECISION (type);
6718	  int final_unsignedp = TYPE_UNSIGNED (type);
6719
6720	  /* In addition to the cases of two conversions in a row
6721	     handled below, if we are converting something to its own
6722	     type via an object of identical or wider precision, neither
6723	     conversion is needed.  */
6724	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6725	      && ((inter_int && final_int) || (inter_float && final_float))
6726	      && inter_prec >= final_prec)
6727	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6728
6729	  /* Likewise, if the intermediate and final types are either both
6730	     float or both integer, we don't need the middle conversion if
6731	     it is wider than the final type and doesn't change the signedness
6732	     (for integers).  Avoid this if the final type is a pointer
6733	     since then we sometimes need the inner conversion.  Likewise if
6734	     the outer has a precision not equal to the size of its mode.  */
6735	  if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6736	       || (inter_float && inside_float)
6737	       || (inter_vec && inside_vec))
6738	      && inter_prec >= inside_prec
6739	      && (inter_float || inter_vec
6740		  || inter_unsignedp == inside_unsignedp)
6741	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6742		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
6743	      && ! final_ptr
6744	      && (! final_vec || inter_prec == inside_prec))
6745	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6746
6747	  /* If we have a sign-extension of a zero-extended value, we can
6748	     replace that by a single zero-extension.  */
6749	  if (inside_int && inter_int && final_int
6750	      && inside_prec < inter_prec && inter_prec < final_prec
6751	      && inside_unsignedp && !inter_unsignedp)
6752	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6753
6754	  /* Two conversions in a row are not needed unless:
6755	     - some conversion is floating-point (overstrict for now), or
6756	     - some conversion is a vector (overstrict for now), or
6757	     - the intermediate type is narrower than both initial and
6758	       final, or
6759	     - the intermediate type and innermost type differ in signedness,
6760	       and the outermost type is wider than the intermediate, or
6761	     - the initial type is a pointer type and the precisions of the
6762	       intermediate and final types differ, or
6763	     - the final type is a pointer type and the precisions of the
6764	       initial and intermediate types differ.  */
6765	  if (! inside_float && ! inter_float && ! final_float
6766	      && ! inside_vec && ! inter_vec && ! final_vec
6767	      && (inter_prec > inside_prec || inter_prec > final_prec)
6768	      && ! (inside_int && inter_int
6769		    && inter_unsignedp != inside_unsignedp
6770		    && inter_prec < final_prec)
6771	      && ((inter_unsignedp && inter_prec > inside_prec)
6772		  == (final_unsignedp && final_prec > inter_prec))
6773	      && ! (inside_ptr && inter_prec != final_prec)
6774	      && ! (final_ptr && inside_prec != inter_prec)
6775	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6776		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
6777	      && ! final_ptr)
6778	    return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6779	}
6780
6781      /* Handle (T *)&A.B.C for A being of type T and B and C
6782	 living at offset zero.  This occurs frequently in
6783	 C++ upcasting and then accessing the base.  */
6784      if (TREE_CODE (op0) == ADDR_EXPR
6785	  && POINTER_TYPE_P (type)
6786	  && handled_component_p (TREE_OPERAND (op0, 0)))
6787        {
6788	  HOST_WIDE_INT bitsize, bitpos;
6789	  tree offset;
6790	  enum machine_mode mode;
6791	  int unsignedp, volatilep;
6792          tree base = TREE_OPERAND (op0, 0);
6793	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6794				      &mode, &unsignedp, &volatilep, false);
6795	  /* If the reference was to a (constant) zero offset, we can use
6796	     the address of the base if it has the same base type
6797	     as the result type.  */
6798	  if (! offset && bitpos == 0
6799	      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6800		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6801	    return fold_convert (type, build_fold_addr_expr (base));
6802        }
6803
6804      if (TREE_CODE (op0) == MODIFY_EXPR
6805	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6806	  /* Detect assigning a bitfield.  */
6807	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6808	       && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6809	{
6810	  /* Don't leave an assignment inside a conversion
6811	     unless assigning a bitfield.  */
6812	  tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6813	  /* First do the assignment, then return converted constant.  */
6814	  tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6815	  TREE_NO_WARNING (tem) = 1;
6816	  TREE_USED (tem) = 1;
6817	  return tem;
6818	}
6819
6820      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6821	 constants (if x has signed type, the sign bit cannot be set
6822	 in c).  This folds extension into the BIT_AND_EXPR.  */
6823      if (INTEGRAL_TYPE_P (type)
6824	  && TREE_CODE (type) != BOOLEAN_TYPE
6825	  && TREE_CODE (op0) == BIT_AND_EXPR
6826	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6827	{
6828	  tree and = op0;
6829	  tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6830	  int change = 0;
6831
6832	  if (TYPE_UNSIGNED (TREE_TYPE (and))
6833	      || (TYPE_PRECISION (type)
6834		  <= TYPE_PRECISION (TREE_TYPE (and))))
6835	    change = 1;
6836	  else if (TYPE_PRECISION (TREE_TYPE (and1))
6837		   <= HOST_BITS_PER_WIDE_INT
6838		   && host_integerp (and1, 1))
6839	    {
6840	      unsigned HOST_WIDE_INT cst;
6841
6842	      cst = tree_low_cst (and1, 1);
6843	      cst &= (HOST_WIDE_INT) -1
6844		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6845	      change = (cst == 0);
6846#ifdef LOAD_EXTEND_OP
6847	      if (change
6848		  && !flag_syntax_only
6849		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6850		      == ZERO_EXTEND))
6851		{
6852		  tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6853		  and0 = fold_convert (uns, and0);
6854		  and1 = fold_convert (uns, and1);
6855		}
6856#endif
6857	    }
6858	  if (change)
6859	    {
6860	      tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6861					TREE_INT_CST_HIGH (and1));
6862	      tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6863				    TREE_CONSTANT_OVERFLOW (and1));
6864	      return fold_build2 (BIT_AND_EXPR, type,
6865				  fold_convert (type, and0), tem);
6866	    }
6867	}
6868
6869      /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6870	 T2 being pointers to types of the same size.  */
6871      if (POINTER_TYPE_P (type)
6872	  && BINARY_CLASS_P (arg0)
6873	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6874	  && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6875	{
6876	  tree arg00 = TREE_OPERAND (arg0, 0);
6877	  tree t0 = type;
6878	  tree t1 = TREE_TYPE (arg00);
6879	  tree tt0 = TREE_TYPE (t0);
6880	  tree tt1 = TREE_TYPE (t1);
6881	  tree s0 = TYPE_SIZE (tt0);
6882	  tree s1 = TYPE_SIZE (tt1);
6883
6884	  if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6885	    return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6886			   TREE_OPERAND (arg0, 1));
6887	}
6888
6889      tem = fold_convert_const (code, type, arg0);
6890      return tem ? tem : NULL_TREE;
6891
6892    case VIEW_CONVERT_EXPR:
6893      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6894	return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6895      return NULL_TREE;
6896
6897    case NEGATE_EXPR:
6898      if (negate_expr_p (arg0))
6899	return fold_convert (type, negate_expr (arg0));
6900      /* Convert - (~A) to A + 1.  */
6901      if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
6902	return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
6903			    build_int_cst (type, 1));
6904      return NULL_TREE;
6905
6906    case ABS_EXPR:
6907      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
6908	return fold_abs_const (arg0, type);
6909      else if (TREE_CODE (arg0) == NEGATE_EXPR)
6910	return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
6911      /* Convert fabs((double)float) into (double)fabsf(float).  */
6912      else if (TREE_CODE (arg0) == NOP_EXPR
6913	       && TREE_CODE (type) == REAL_TYPE)
6914	{
6915	  tree targ0 = strip_float_extensions (arg0);
6916	  if (targ0 != arg0)
6917	    return fold_convert (type, fold_build1 (ABS_EXPR,
6918						    TREE_TYPE (targ0),
6919						    targ0));
6920	}
6921      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
6922      else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
6923	return arg0;
6924
6925      /* Strip sign ops from argument.  */
6926      if (TREE_CODE (type) == REAL_TYPE)
6927	{
6928	  tem = fold_strip_sign_ops (arg0);
6929	  if (tem)
6930	    return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
6931	}
6932      return NULL_TREE;
6933
6934    case CONJ_EXPR:
6935      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
6936	return fold_convert (type, arg0);
6937      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
6938	return build2 (COMPLEX_EXPR, type,
6939		       TREE_OPERAND (arg0, 0),
6940		       negate_expr (TREE_OPERAND (arg0, 1)));
6941      else if (TREE_CODE (arg0) == COMPLEX_CST)
6942	return build_complex (type, TREE_REALPART (arg0),
6943			      negate_expr (TREE_IMAGPART (arg0)));
6944      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
6945	return fold_build2 (TREE_CODE (arg0), type,
6946			    fold_build1 (CONJ_EXPR, type,
6947					 TREE_OPERAND (arg0, 0)),
6948			    fold_build1 (CONJ_EXPR, type,
6949					 TREE_OPERAND (arg0, 1)));
6950      else if (TREE_CODE (arg0) == CONJ_EXPR)
6951	return TREE_OPERAND (arg0, 0);
6952      return NULL_TREE;
6953
6954    case BIT_NOT_EXPR:
6955      if (TREE_CODE (arg0) == INTEGER_CST)
6956        return fold_not_const (arg0, type);
6957      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
6958	return TREE_OPERAND (arg0, 0);
6959      /* Convert ~ (-A) to A - 1.  */
6960      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
6961	return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
6962			    build_int_cst (type, 1));
6963      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
6964      else if (INTEGRAL_TYPE_P (type)
6965	       && ((TREE_CODE (arg0) == MINUS_EXPR
6966		    && integer_onep (TREE_OPERAND (arg0, 1)))
6967		   || (TREE_CODE (arg0) == PLUS_EXPR
6968		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
6969	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
6970      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
6971      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6972	       && (tem = fold_unary (BIT_NOT_EXPR, type,
6973			       	     fold_convert (type,
6974					     	   TREE_OPERAND (arg0, 0)))))
6975	return fold_build2 (BIT_XOR_EXPR, type, tem,
6976			    fold_convert (type, TREE_OPERAND (arg0, 1)));
6977      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
6978	       && (tem = fold_unary (BIT_NOT_EXPR, type,
6979			       	     fold_convert (type,
6980					     	   TREE_OPERAND (arg0, 1)))))
6981	return fold_build2 (BIT_XOR_EXPR, type,
6982			    fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
6983
6984      return NULL_TREE;
6985
6986    case TRUTH_NOT_EXPR:
6987      /* The argument to invert_truthvalue must have Boolean type.  */
6988      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
6989          arg0 = fold_convert (boolean_type_node, arg0);
6990
6991      /* Note that the operand of this must be an int
6992	 and its values must be 0 or 1.
6993	 ("true" is a fixed value perhaps depending on the language,
6994	 but we don't handle values other than 1 correctly yet.)  */
6995      tem = invert_truthvalue (arg0);
6996      /* Avoid infinite recursion.  */
6997      if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6998	return NULL_TREE;
6999      return fold_convert (type, tem);
7000
7001    case REALPART_EXPR:
7002      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7003	return NULL_TREE;
7004      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7005	return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7006				 TREE_OPERAND (arg0, 1));
7007      else if (TREE_CODE (arg0) == COMPLEX_CST)
7008	return TREE_REALPART (arg0);
7009      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7010	return fold_build2 (TREE_CODE (arg0), type,
7011			    fold_build1 (REALPART_EXPR, type,
7012					 TREE_OPERAND (arg0, 0)),
7013			    fold_build1 (REALPART_EXPR, type,
7014					 TREE_OPERAND (arg0, 1)));
7015      return NULL_TREE;
7016
7017    case IMAGPART_EXPR:
7018      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7019	return fold_convert (type, integer_zero_node);
7020      else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7021	return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7022				 TREE_OPERAND (arg0, 0));
7023      else if (TREE_CODE (arg0) == COMPLEX_CST)
7024	return TREE_IMAGPART (arg0);
7025      else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7026	return fold_build2 (TREE_CODE (arg0), type,
7027			    fold_build1 (IMAGPART_EXPR, type,
7028					 TREE_OPERAND (arg0, 0)),
7029			    fold_build1 (IMAGPART_EXPR, type,
7030					 TREE_OPERAND (arg0, 1)));
7031      return NULL_TREE;
7032
7033    default:
7034      return NULL_TREE;
7035    } /* switch (code) */
7036}
7037
7038/* Fold a binary expression of code CODE and type TYPE with operands
7039   OP0 and OP1.  Return the folded expression if folding is
7040   successful.  Otherwise, return NULL_TREE.  */
7041
7042tree
7043fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7044{
7045  tree t1 = NULL_TREE;
7046  tree tem;
7047  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7048  enum tree_code_class kind = TREE_CODE_CLASS (code);
7049
7050  /* WINS will be nonzero when the switch is done
7051     if all operands are constant.  */
7052  int wins = 1;
7053
7054  gcc_assert (IS_EXPR_CODE_CLASS (kind)
7055	      && TREE_CODE_LENGTH (code) == 2);
7056
7057  arg0 = op0;
7058  arg1 = op1;
7059
7060  if (arg0)
7061    {
7062      tree subop;
7063
7064      /* Strip any conversions that don't change the mode.  This is
7065	 safe for every expression, except for a comparison expression
7066	 because its signedness is derived from its operands.  So, in
7067	 the latter case, only strip conversions that don't change the
7068	 signedness.
7069
7070	 Note that this is done as an internal manipulation within the
7071	 constant folder, in order to find the simplest representation
7072	 of the arguments so that their form can be studied.  In any
7073	 cases, the appropriate type conversions should be put back in
7074	 the tree that will get out of the constant folder.  */
7075      if (kind == tcc_comparison)
7076	STRIP_SIGN_NOPS (arg0);
7077      else
7078	STRIP_NOPS (arg0);
7079
7080      if (TREE_CODE (arg0) == COMPLEX_CST)
7081	subop = TREE_REALPART (arg0);
7082      else
7083	subop = arg0;
7084
7085      if (TREE_CODE (subop) != INTEGER_CST
7086	  && TREE_CODE (subop) != REAL_CST)
7087	/* Note that TREE_CONSTANT isn't enough:
7088	   static var addresses are constant but we can't
7089	   do arithmetic on them.  */
7090	wins = 0;
7091    }
7092
7093  if (arg1)
7094    {
7095      tree subop;
7096
7097      /* Strip any conversions that don't change the mode.  This is
7098	 safe for every expression, except for a comparison expression
7099	 because its signedness is derived from its operands.  So, in
7100	 the latter case, only strip conversions that don't change the
7101	 signedness.
7102
7103	 Note that this is done as an internal manipulation within the
7104	 constant folder, in order to find the simplest representation
7105	 of the arguments so that their form can be studied.  In any
7106	 cases, the appropriate type conversions should be put back in
7107	 the tree that will get out of the constant folder.  */
7108      if (kind == tcc_comparison)
7109	STRIP_SIGN_NOPS (arg1);
7110      else
7111	STRIP_NOPS (arg1);
7112
7113      if (TREE_CODE (arg1) == COMPLEX_CST)
7114	subop = TREE_REALPART (arg1);
7115      else
7116	subop = arg1;
7117
7118      if (TREE_CODE (subop) != INTEGER_CST
7119	  && TREE_CODE (subop) != REAL_CST)
7120	/* Note that TREE_CONSTANT isn't enough:
7121	   static var addresses are constant but we can't
7122	   do arithmetic on them.  */
7123	wins = 0;
7124    }
7125
7126  /* If this is a commutative operation, and ARG0 is a constant, move it
7127     to ARG1 to reduce the number of tests below.  */
7128  if (commutative_tree_code (code)
7129      && tree_swap_operands_p (arg0, arg1, true))
7130    return fold_build2 (code, type, op1, op0);
7131
7132  /* Now WINS is set as described above,
7133     ARG0 is the first operand of EXPR,
7134     and ARG1 is the second operand (if it has more than one operand).
7135
7136     First check for cases where an arithmetic operation is applied to a
7137     compound, conditional, or comparison operation.  Push the arithmetic
7138     operation inside the compound or conditional to see if any folding
7139     can then be done.  Convert comparison to conditional for this purpose.
7140     The also optimizes non-constant cases that used to be done in
7141     expand_expr.
7142
7143     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7144     one of the operands is a comparison and the other is a comparison, a
7145     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
7146     code below would make the expression more complex.  Change it to a
7147     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
7148     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
7149
7150  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7151       || code == EQ_EXPR || code == NE_EXPR)
7152      && ((truth_value_p (TREE_CODE (arg0))
7153	   && (truth_value_p (TREE_CODE (arg1))
7154	       || (TREE_CODE (arg1) == BIT_AND_EXPR
7155		   && integer_onep (TREE_OPERAND (arg1, 1)))))
7156	  || (truth_value_p (TREE_CODE (arg1))
7157	      && (truth_value_p (TREE_CODE (arg0))
7158		  || (TREE_CODE (arg0) == BIT_AND_EXPR
7159		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
7160    {
7161      tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7162			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7163			 : TRUTH_XOR_EXPR,
7164			 boolean_type_node,
7165			 fold_convert (boolean_type_node, arg0),
7166			 fold_convert (boolean_type_node, arg1));
7167
7168      if (code == EQ_EXPR)
7169	tem = invert_truthvalue (tem);
7170
7171      return fold_convert (type, tem);
7172    }
7173
7174  if (TREE_CODE_CLASS (code) == tcc_binary
7175      || TREE_CODE_CLASS (code) == tcc_comparison)
7176    {
7177      if (TREE_CODE (arg0) == COMPOUND_EXPR)
7178	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7179		       fold_build2 (code, type,
7180				    TREE_OPERAND (arg0, 1), op1));
7181      if (TREE_CODE (arg1) == COMPOUND_EXPR
7182	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7183	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7184		       fold_build2 (code, type,
7185				    op0, TREE_OPERAND (arg1, 1)));
7186
7187      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7188	{
7189	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7190						     arg0, arg1,
7191						     /*cond_first_p=*/1);
7192	  if (tem != NULL_TREE)
7193	    return tem;
7194	}
7195
7196      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7197	{
7198	  tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7199						     arg1, arg0,
7200					             /*cond_first_p=*/0);
7201	  if (tem != NULL_TREE)
7202	    return tem;
7203	}
7204    }
7205
7206  switch (code)
7207    {
7208    case PLUS_EXPR:
7209      /* A + (-B) -> A - B */
7210      if (TREE_CODE (arg1) == NEGATE_EXPR)
7211	return fold_build2 (MINUS_EXPR, type,
7212			    fold_convert (type, arg0),
7213			    fold_convert (type, TREE_OPERAND (arg1, 0)));
7214      /* (-A) + B -> B - A */
7215      if (TREE_CODE (arg0) == NEGATE_EXPR
7216	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7217	return fold_build2 (MINUS_EXPR, type,
7218			    fold_convert (type, arg1),
7219			    fold_convert (type, TREE_OPERAND (arg0, 0)));
7220      /* Convert ~A + 1 to -A.  */
7221      if (INTEGRAL_TYPE_P (type)
7222	  && TREE_CODE (arg0) == BIT_NOT_EXPR
7223	  && integer_onep (arg1))
7224	return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7225
7226      if (! FLOAT_TYPE_P (type))
7227	{
7228	  if (integer_zerop (arg1))
7229	    return non_lvalue (fold_convert (type, arg0));
7230
7231	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7232	     with a constant, and the two constants have no bits in common,
7233	     we should treat this as a BIT_IOR_EXPR since this may produce more
7234	     simplifications.  */
7235	  if (TREE_CODE (arg0) == BIT_AND_EXPR
7236	      && TREE_CODE (arg1) == BIT_AND_EXPR
7237	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7238	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7239	      && integer_zerop (const_binop (BIT_AND_EXPR,
7240					     TREE_OPERAND (arg0, 1),
7241					     TREE_OPERAND (arg1, 1), 0)))
7242	    {
7243	      code = BIT_IOR_EXPR;
7244	      goto bit_ior;
7245	    }
7246
7247	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7248	     (plus (plus (mult) (mult)) (foo)) so that we can
7249	     take advantage of the factoring cases below.  */
7250	  if (((TREE_CODE (arg0) == PLUS_EXPR
7251		|| TREE_CODE (arg0) == MINUS_EXPR)
7252	       && TREE_CODE (arg1) == MULT_EXPR)
7253	      || ((TREE_CODE (arg1) == PLUS_EXPR
7254		   || TREE_CODE (arg1) == MINUS_EXPR)
7255		  && TREE_CODE (arg0) == MULT_EXPR))
7256	    {
7257	      tree parg0, parg1, parg, marg;
7258	      enum tree_code pcode;
7259
7260	      if (TREE_CODE (arg1) == MULT_EXPR)
7261		parg = arg0, marg = arg1;
7262	      else
7263		parg = arg1, marg = arg0;
7264	      pcode = TREE_CODE (parg);
7265	      parg0 = TREE_OPERAND (parg, 0);
7266	      parg1 = TREE_OPERAND (parg, 1);
7267	      STRIP_NOPS (parg0);
7268	      STRIP_NOPS (parg1);
7269
7270	      if (TREE_CODE (parg0) == MULT_EXPR
7271		  && TREE_CODE (parg1) != MULT_EXPR)
7272		return fold_build2 (pcode, type,
7273				    fold_build2 (PLUS_EXPR, type,
7274						 fold_convert (type, parg0),
7275						 fold_convert (type, marg)),
7276				    fold_convert (type, parg1));
7277	      if (TREE_CODE (parg0) != MULT_EXPR
7278		  && TREE_CODE (parg1) == MULT_EXPR)
7279		return fold_build2 (PLUS_EXPR, type,
7280				    fold_convert (type, parg0),
7281				    fold_build2 (pcode, type,
7282						 fold_convert (type, marg),
7283						 fold_convert (type,
7284							       parg1)));
7285	    }
7286
7287	  if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
7288	    {
7289	      tree arg00, arg01, arg10, arg11;
7290	      tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7291
7292	      /* (A * C) + (B * C) -> (A+B) * C.
7293		 We are most concerned about the case where C is a constant,
7294		 but other combinations show up during loop reduction.  Since
7295		 it is not difficult, try all four possibilities.  */
7296
7297	      arg00 = TREE_OPERAND (arg0, 0);
7298	      arg01 = TREE_OPERAND (arg0, 1);
7299	      arg10 = TREE_OPERAND (arg1, 0);
7300	      arg11 = TREE_OPERAND (arg1, 1);
7301	      same = NULL_TREE;
7302
7303	      if (operand_equal_p (arg01, arg11, 0))
7304		same = arg01, alt0 = arg00, alt1 = arg10;
7305	      else if (operand_equal_p (arg00, arg10, 0))
7306		same = arg00, alt0 = arg01, alt1 = arg11;
7307	      else if (operand_equal_p (arg00, arg11, 0))
7308		same = arg00, alt0 = arg01, alt1 = arg10;
7309	      else if (operand_equal_p (arg01, arg10, 0))
7310		same = arg01, alt0 = arg00, alt1 = arg11;
7311
7312	      /* No identical multiplicands; see if we can find a common
7313		 power-of-two factor in non-power-of-two multiplies.  This
7314		 can help in multi-dimensional array access.  */
7315	      else if (TREE_CODE (arg01) == INTEGER_CST
7316		       && TREE_CODE (arg11) == INTEGER_CST
7317		       && TREE_INT_CST_HIGH (arg01) == 0
7318		       && TREE_INT_CST_HIGH (arg11) == 0)
7319		{
7320		  HOST_WIDE_INT int01, int11, tmp;
7321		  int01 = TREE_INT_CST_LOW (arg01);
7322		  int11 = TREE_INT_CST_LOW (arg11);
7323
7324		  /* Move min of absolute values to int11.  */
7325		  if ((int01 >= 0 ? int01 : -int01)
7326		      < (int11 >= 0 ? int11 : -int11))
7327		    {
7328		      tmp = int01, int01 = int11, int11 = tmp;
7329		      alt0 = arg00, arg00 = arg10, arg10 = alt0;
7330		      alt0 = arg01, arg01 = arg11, arg11 = alt0;
7331		    }
7332
7333		  if (exact_log2 (int11) > 0 && int01 % int11 == 0)
7334		    {
7335		      alt0 = fold_build2 (MULT_EXPR, type, arg00,
7336					  build_int_cst (NULL_TREE,
7337							 int01 / int11));
7338		      alt1 = arg10;
7339		      same = arg11;
7340		    }
7341		}
7342
7343	      if (same)
7344		return fold_build2 (MULT_EXPR, type,
7345				    fold_build2 (PLUS_EXPR, type,
7346						 fold_convert (type, alt0),
7347						 fold_convert (type, alt1)),
7348				    fold_convert (type, same));
7349	    }
7350
7351	  /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7352	     of the array.  Loop optimizer sometimes produce this type of
7353	     expressions.  */
7354	  if (TREE_CODE (arg0) == ADDR_EXPR)
7355	    {
7356	      tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7357	      if (tem)
7358		return fold_convert (type, tem);
7359	    }
7360	  else if (TREE_CODE (arg1) == ADDR_EXPR)
7361	    {
7362	      tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7363	      if (tem)
7364		return fold_convert (type, tem);
7365	    }
7366	}
7367      else
7368	{
7369	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
7370	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7371	    return non_lvalue (fold_convert (type, arg0));
7372
7373	  /* Likewise if the operands are reversed.  */
7374	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7375	    return non_lvalue (fold_convert (type, arg1));
7376
7377	  /* Convert X + -C into X - C.  */
7378	  if (TREE_CODE (arg1) == REAL_CST
7379	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7380	    {
7381	      tem = fold_negate_const (arg1, type);
7382	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7383		return fold_build2 (MINUS_EXPR, type,
7384				    fold_convert (type, arg0),
7385				    fold_convert (type, tem));
7386	    }
7387
7388          if (flag_unsafe_math_optimizations
7389	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7390	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7391	      && (tem = distribute_real_division (code, type, arg0, arg1)))
7392	    return tem;
7393
7394	  /* Convert x+x into x*2.0.  */
7395	  if (operand_equal_p (arg0, arg1, 0)
7396	      && SCALAR_FLOAT_TYPE_P (type))
7397	    return fold_build2 (MULT_EXPR, type, arg0,
7398				build_real (type, dconst2));
7399
7400	  /* Convert x*c+x into x*(c+1).  */
7401	  if (flag_unsafe_math_optimizations
7402	      && TREE_CODE (arg0) == MULT_EXPR
7403	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7404	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7405	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
7406	    {
7407	      REAL_VALUE_TYPE c;
7408
7409	      c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7410	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7411	      return fold_build2 (MULT_EXPR, type, arg1,
7412				  build_real (type, c));
7413	    }
7414
7415	  /* Convert x+x*c into x*(c+1).  */
7416	  if (flag_unsafe_math_optimizations
7417	      && TREE_CODE (arg1) == MULT_EXPR
7418	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7419	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7420	      && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
7421	    {
7422	      REAL_VALUE_TYPE c;
7423
7424	      c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7425	      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7426	      return fold_build2 (MULT_EXPR, type, arg0,
7427				  build_real (type, c));
7428	    }
7429
7430	  /* Convert x*c1+x*c2 into x*(c1+c2).  */
7431	  if (flag_unsafe_math_optimizations
7432	      && TREE_CODE (arg0) == MULT_EXPR
7433	      && TREE_CODE (arg1) == MULT_EXPR
7434	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7435	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
7436	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
7437	      && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
7438	      && operand_equal_p (TREE_OPERAND (arg0, 0),
7439				  TREE_OPERAND (arg1, 0), 0))
7440	    {
7441	      REAL_VALUE_TYPE c1, c2;
7442
7443	      c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
7444	      c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
7445	      real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
7446	      return fold_build2 (MULT_EXPR, type,
7447				  TREE_OPERAND (arg0, 0),
7448				  build_real (type, c1));
7449	    }
7450          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.  */
7451          if (flag_unsafe_math_optimizations
7452              && TREE_CODE (arg1) == PLUS_EXPR
7453              && TREE_CODE (arg0) != MULT_EXPR)
7454            {
7455              tree tree10 = TREE_OPERAND (arg1, 0);
7456              tree tree11 = TREE_OPERAND (arg1, 1);
7457              if (TREE_CODE (tree11) == MULT_EXPR
7458		  && TREE_CODE (tree10) == MULT_EXPR)
7459                {
7460                  tree tree0;
7461                  tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7462                  return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7463                }
7464            }
7465          /* Convert (b*c + d*e) + a into b*c + (d*e +a).  */
7466          if (flag_unsafe_math_optimizations
7467              && TREE_CODE (arg0) == PLUS_EXPR
7468              && TREE_CODE (arg1) != MULT_EXPR)
7469            {
7470              tree tree00 = TREE_OPERAND (arg0, 0);
7471              tree tree01 = TREE_OPERAND (arg0, 1);
7472              if (TREE_CODE (tree01) == MULT_EXPR
7473		  && TREE_CODE (tree00) == MULT_EXPR)
7474                {
7475                  tree tree0;
7476                  tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7477                  return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7478                }
7479            }
7480	}
7481
7482     bit_rotate:
7483      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7484	 is a rotate of A by C1 bits.  */
7485      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7486	 is a rotate of A by B bits.  */
7487      {
7488	enum tree_code code0, code1;
7489	code0 = TREE_CODE (arg0);
7490	code1 = TREE_CODE (arg1);
7491	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7492	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7493	    && operand_equal_p (TREE_OPERAND (arg0, 0),
7494			        TREE_OPERAND (arg1, 0), 0)
7495	    && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7496	  {
7497	    tree tree01, tree11;
7498	    enum tree_code code01, code11;
7499
7500	    tree01 = TREE_OPERAND (arg0, 1);
7501	    tree11 = TREE_OPERAND (arg1, 1);
7502	    STRIP_NOPS (tree01);
7503	    STRIP_NOPS (tree11);
7504	    code01 = TREE_CODE (tree01);
7505	    code11 = TREE_CODE (tree11);
7506	    if (code01 == INTEGER_CST
7507		&& code11 == INTEGER_CST
7508		&& TREE_INT_CST_HIGH (tree01) == 0
7509		&& TREE_INT_CST_HIGH (tree11) == 0
7510		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7511		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7512	      return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7513			     code0 == LSHIFT_EXPR ? tree01 : tree11);
7514	    else if (code11 == MINUS_EXPR)
7515	      {
7516		tree tree110, tree111;
7517		tree110 = TREE_OPERAND (tree11, 0);
7518		tree111 = TREE_OPERAND (tree11, 1);
7519		STRIP_NOPS (tree110);
7520		STRIP_NOPS (tree111);
7521		if (TREE_CODE (tree110) == INTEGER_CST
7522		    && 0 == compare_tree_int (tree110,
7523					      TYPE_PRECISION
7524					      (TREE_TYPE (TREE_OPERAND
7525							  (arg0, 0))))
7526		    && operand_equal_p (tree01, tree111, 0))
7527		  return build2 ((code0 == LSHIFT_EXPR
7528				  ? LROTATE_EXPR
7529				  : RROTATE_EXPR),
7530				 type, TREE_OPERAND (arg0, 0), tree01);
7531	      }
7532	    else if (code01 == MINUS_EXPR)
7533	      {
7534		tree tree010, tree011;
7535		tree010 = TREE_OPERAND (tree01, 0);
7536		tree011 = TREE_OPERAND (tree01, 1);
7537		STRIP_NOPS (tree010);
7538		STRIP_NOPS (tree011);
7539		if (TREE_CODE (tree010) == INTEGER_CST
7540		    && 0 == compare_tree_int (tree010,
7541					      TYPE_PRECISION
7542					      (TREE_TYPE (TREE_OPERAND
7543							  (arg0, 0))))
7544		    && operand_equal_p (tree11, tree011, 0))
7545		  return build2 ((code0 != LSHIFT_EXPR
7546				  ? LROTATE_EXPR
7547				  : RROTATE_EXPR),
7548				 type, TREE_OPERAND (arg0, 0), tree11);
7549	      }
7550	  }
7551      }
7552
7553    associate:
7554      /* In most languages, can't associate operations on floats through
7555	 parentheses.  Rather than remember where the parentheses were, we
7556	 don't associate floats at all, unless the user has specified
7557	 -funsafe-math-optimizations.  */
7558
7559      if (! wins
7560	  && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7561	{
7562	  tree var0, con0, lit0, minus_lit0;
7563	  tree var1, con1, lit1, minus_lit1;
7564	  bool ok = true;
7565
7566	  /* Split both trees into variables, constants, and literals.  Then
7567	     associate each group together, the constants with literals,
7568	     then the result with variables.  This increases the chances of
7569	     literals being recombined later and of generating relocatable
7570	     expressions for the sum of a constant and literal.  */
7571	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7572	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7573			     code == MINUS_EXPR);
7574
7575	  /* With undefined overflow we can only associate constants
7576	     with one variable.  */
7577	  if ((POINTER_TYPE_P (type)
7578	       || (INTEGRAL_TYPE_P (type)
7579		   && !(TYPE_UNSIGNED (type) || flag_wrapv)))
7580	      && var0 && var1)
7581	    {
7582	      tree tmp0 = var0;
7583	      tree tmp1 = var1;
7584
7585	      if (TREE_CODE (tmp0) == NEGATE_EXPR)
7586	        tmp0 = TREE_OPERAND (tmp0, 0);
7587	      if (TREE_CODE (tmp1) == NEGATE_EXPR)
7588	        tmp1 = TREE_OPERAND (tmp1, 0);
7589	      /* The only case we can still associate with two variables
7590		 is if they are the same, modulo negation.  */
7591	      if (!operand_equal_p (tmp0, tmp1, 0))
7592	        ok = false;
7593	    }
7594
7595	  /* Only do something if we found more than two objects.  Otherwise,
7596	     nothing has changed and we risk infinite recursion.  */
7597	  if (ok
7598	      && (2 < ((var0 != 0) + (var1 != 0)
7599		       + (con0 != 0) + (con1 != 0)
7600		       + (lit0 != 0) + (lit1 != 0)
7601		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
7602	    {
7603	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
7604	      if (code == MINUS_EXPR)
7605		code = PLUS_EXPR;
7606
7607	      var0 = associate_trees (var0, var1, code, type);
7608	      con0 = associate_trees (con0, con1, code, type);
7609	      lit0 = associate_trees (lit0, lit1, code, type);
7610	      minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7611
7612	      /* Preserve the MINUS_EXPR if the negative part of the literal is
7613		 greater than the positive part.  Otherwise, the multiplicative
7614		 folding code (i.e extract_muldiv) may be fooled in case
7615		 unsigned constants are subtracted, like in the following
7616		 example: ((X*2 + 4) - 8U)/2.  */
7617	      if (minus_lit0 && lit0)
7618		{
7619		  if (TREE_CODE (lit0) == INTEGER_CST
7620		      && TREE_CODE (minus_lit0) == INTEGER_CST
7621		      && tree_int_cst_lt (lit0, minus_lit0))
7622		    {
7623		      minus_lit0 = associate_trees (minus_lit0, lit0,
7624						    MINUS_EXPR, type);
7625		      lit0 = 0;
7626		    }
7627		  else
7628		    {
7629		      lit0 = associate_trees (lit0, minus_lit0,
7630					      MINUS_EXPR, type);
7631		      minus_lit0 = 0;
7632		    }
7633		}
7634	      if (minus_lit0)
7635		{
7636		  if (con0 == 0)
7637		    return fold_convert (type,
7638					 associate_trees (var0, minus_lit0,
7639							  MINUS_EXPR, type));
7640		  else
7641		    {
7642		      con0 = associate_trees (con0, minus_lit0,
7643					      MINUS_EXPR, type);
7644		      return fold_convert (type,
7645					   associate_trees (var0, con0,
7646							    PLUS_EXPR, type));
7647		    }
7648		}
7649
7650	      con0 = associate_trees (con0, lit0, code, type);
7651	      return fold_convert (type, associate_trees (var0, con0,
7652							  code, type));
7653	    }
7654	}
7655
7656    binary:
7657      if (wins)
7658	t1 = const_binop (code, arg0, arg1, 0);
7659      if (t1 != NULL_TREE)
7660	{
7661	  /* The return value should always have
7662	     the same type as the original expression.  */
7663	  if (TREE_TYPE (t1) != type)
7664	    t1 = fold_convert (type, t1);
7665
7666	  return t1;
7667	}
7668      return NULL_TREE;
7669
7670    case MINUS_EXPR:
7671      /* A - (-B) -> A + B */
7672      if (TREE_CODE (arg1) == NEGATE_EXPR)
7673	return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7674      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
7675      if (TREE_CODE (arg0) == NEGATE_EXPR
7676	  && (FLOAT_TYPE_P (type)
7677	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7678	  && negate_expr_p (arg1)
7679	  && reorder_operands_p (arg0, arg1))
7680	return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7681			    TREE_OPERAND (arg0, 0));
7682      /* Convert -A - 1 to ~A.  */
7683      if (INTEGRAL_TYPE_P (type)
7684	  && TREE_CODE (arg0) == NEGATE_EXPR
7685	  && integer_onep (arg1))
7686	return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7687
7688      /* Convert -1 - A to ~A.  */
7689      if (INTEGRAL_TYPE_P (type)
7690	  && integer_all_onesp (arg0))
7691	return fold_build1 (BIT_NOT_EXPR, type, arg1);
7692
7693      if (! FLOAT_TYPE_P (type))
7694	{
7695	  if (! wins && integer_zerop (arg0))
7696	    return negate_expr (fold_convert (type, arg1));
7697	  if (integer_zerop (arg1))
7698	    return non_lvalue (fold_convert (type, arg0));
7699
7700	  /* Fold A - (A & B) into ~B & A.  */
7701	  if (!TREE_SIDE_EFFECTS (arg0)
7702	      && TREE_CODE (arg1) == BIT_AND_EXPR)
7703	    {
7704	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7705		return fold_build2 (BIT_AND_EXPR, type,
7706				    fold_build1 (BIT_NOT_EXPR, type,
7707						 TREE_OPERAND (arg1, 0)),
7708				    arg0);
7709	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7710		return fold_build2 (BIT_AND_EXPR, type,
7711				    fold_build1 (BIT_NOT_EXPR, type,
7712						 TREE_OPERAND (arg1, 1)),
7713				    arg0);
7714	    }
7715
7716	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7717	     any power of 2 minus 1.  */
7718	  if (TREE_CODE (arg0) == BIT_AND_EXPR
7719	      && TREE_CODE (arg1) == BIT_AND_EXPR
7720	      && operand_equal_p (TREE_OPERAND (arg0, 0),
7721				  TREE_OPERAND (arg1, 0), 0))
7722	    {
7723	      tree mask0 = TREE_OPERAND (arg0, 1);
7724	      tree mask1 = TREE_OPERAND (arg1, 1);
7725	      tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7726
7727	      if (operand_equal_p (tem, mask1, 0))
7728		{
7729		  tem = fold_build2 (BIT_XOR_EXPR, type,
7730				     TREE_OPERAND (arg0, 0), mask1);
7731		  return fold_build2 (MINUS_EXPR, type, tem, mask1);
7732		}
7733	    }
7734	}
7735
7736      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
7737      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7738	return non_lvalue (fold_convert (type, arg0));
7739
7740      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
7741	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7742	 (-ARG1 + ARG0) reduces to -ARG1.  */
7743      else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7744	return negate_expr (fold_convert (type, arg1));
7745
7746      /* Fold &x - &x.  This can happen from &x.foo - &x.
7747	 This is unsafe for certain floats even in non-IEEE formats.
7748	 In IEEE, it is unsafe because it does wrong for NaNs.
7749	 Also note that operand_equal_p is always false if an operand
7750	 is volatile.  */
7751
7752      if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7753	  && operand_equal_p (arg0, arg1, 0))
7754	return fold_convert (type, integer_zero_node);
7755
7756      /* A - B -> A + (-B) if B is easily negatable.  */
7757      if (!wins && negate_expr_p (arg1)
7758	  && ((FLOAT_TYPE_P (type)
7759               /* Avoid this transformation if B is a positive REAL_CST.  */
7760	       && (TREE_CODE (arg1) != REAL_CST
7761		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7762	      || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7763	return fold_build2 (PLUS_EXPR, type,
7764			    fold_convert (type, arg0),
7765			    fold_convert (type, negate_expr (arg1)));
7766
7767      /* Try folding difference of addresses.  */
7768      {
7769	HOST_WIDE_INT diff;
7770
7771	if ((TREE_CODE (arg0) == ADDR_EXPR
7772	     || TREE_CODE (arg1) == ADDR_EXPR)
7773	    && ptr_difference_const (arg0, arg1, &diff))
7774	  return build_int_cst_type (type, diff);
7775      }
7776
7777      /* Fold &a[i] - &a[j] to i-j.  */
7778      if (TREE_CODE (arg0) == ADDR_EXPR
7779	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7780	  && TREE_CODE (arg1) == ADDR_EXPR
7781	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7782        {
7783	  tree aref0 = TREE_OPERAND (arg0, 0);
7784	  tree aref1 = TREE_OPERAND (arg1, 0);
7785	  if (operand_equal_p (TREE_OPERAND (aref0, 0),
7786			       TREE_OPERAND (aref1, 0), 0))
7787	    {
7788	      tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7789	      tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7790	      tree esz = array_ref_element_size (aref0);
7791	      tree diff = build2 (MINUS_EXPR, type, op0, op1);
7792	      return fold_build2 (MULT_EXPR, type, diff,
7793			          fold_convert (type, esz));
7794
7795	    }
7796	}
7797
7798      /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7799	 of the array.  Loop optimizer sometimes produce this type of
7800	 expressions.  */
7801      if (TREE_CODE (arg0) == ADDR_EXPR)
7802	{
7803	  tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7804	  if (tem)
7805	    return fold_convert (type, tem);
7806	}
7807
7808      if (flag_unsafe_math_optimizations
7809	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7810	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7811	  && (tem = distribute_real_division (code, type, arg0, arg1)))
7812	return tem;
7813
7814      if (TREE_CODE (arg0) == MULT_EXPR
7815	  && TREE_CODE (arg1) == MULT_EXPR
7816	  && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7817	{
7818          /* (A * C) - (B * C) -> (A-B) * C.  */
7819	  if (operand_equal_p (TREE_OPERAND (arg0, 1),
7820			       TREE_OPERAND (arg1, 1), 0))
7821	    return fold_build2 (MULT_EXPR, type,
7822				fold_build2 (MINUS_EXPR, type,
7823					     TREE_OPERAND (arg0, 0),
7824					     TREE_OPERAND (arg1, 0)),
7825				TREE_OPERAND (arg0, 1));
7826          /* (A * C1) - (A * C2) -> A * (C1-C2).  */
7827	  if (operand_equal_p (TREE_OPERAND (arg0, 0),
7828			       TREE_OPERAND (arg1, 0), 0))
7829	    return fold_build2 (MULT_EXPR, type,
7830				TREE_OPERAND (arg0, 0),
7831				fold_build2 (MINUS_EXPR, type,
7832					     TREE_OPERAND (arg0, 1),
7833					     TREE_OPERAND (arg1, 1)));
7834	}
7835
7836      goto associate;
7837
7838    case MULT_EXPR:
7839      /* (-A) * (-B) -> A * B  */
7840      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7841	return fold_build2 (MULT_EXPR, type,
7842			    fold_convert (type, TREE_OPERAND (arg0, 0)),
7843			    fold_convert (type, negate_expr (arg1)));
7844      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7845	return fold_build2 (MULT_EXPR, type,
7846			    fold_convert (type, negate_expr (arg0)),
7847			    fold_convert (type, TREE_OPERAND (arg1, 0)));
7848
7849      if (! FLOAT_TYPE_P (type))
7850	{
7851	  if (integer_zerop (arg1))
7852	    return omit_one_operand (type, arg1, arg0);
7853	  if (integer_onep (arg1))
7854	    return non_lvalue (fold_convert (type, arg0));
7855	  /* Transform x * -1 into -x.  */
7856	  if (integer_all_onesp (arg1))
7857	    return fold_convert (type, negate_expr (arg0));
7858
7859	  /* (a * (1 << b)) is (a << b)  */
7860	  if (TREE_CODE (arg1) == LSHIFT_EXPR
7861	      && integer_onep (TREE_OPERAND (arg1, 0)))
7862	    return fold_build2 (LSHIFT_EXPR, type, arg0,
7863				TREE_OPERAND (arg1, 1));
7864	  if (TREE_CODE (arg0) == LSHIFT_EXPR
7865	      && integer_onep (TREE_OPERAND (arg0, 0)))
7866	    return fold_build2 (LSHIFT_EXPR, type, arg1,
7867				TREE_OPERAND (arg0, 1));
7868
7869	  if (TREE_CODE (arg1) == INTEGER_CST
7870	      && 0 != (tem = extract_muldiv (op0,
7871					     fold_convert (type, arg1),
7872					     code, NULL_TREE)))
7873	    return fold_convert (type, tem);
7874
7875	}
7876      else
7877	{
7878	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
7879	     when x is NaN, since x * 0 is also NaN.  Nor are they the
7880	     same in modes with signed zeros, since multiplying a
7881	     negative value by 0 gives -0, not +0.  */
7882	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7883	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7884	      && real_zerop (arg1))
7885	    return omit_one_operand (type, arg1, arg0);
7886	  /* In IEEE floating point, x*1 is not equivalent to x for snans.  */
7887	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7888	      && real_onep (arg1))
7889	    return non_lvalue (fold_convert (type, arg0));
7890
7891	  /* Transform x * -1.0 into -x.  */
7892	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7893	      && real_minus_onep (arg1))
7894	    return fold_convert (type, negate_expr (arg0));
7895
7896	  /* Convert (C1/X)*C2 into (C1*C2)/X.  */
7897	  if (flag_unsafe_math_optimizations
7898	      && TREE_CODE (arg0) == RDIV_EXPR
7899	      && TREE_CODE (arg1) == REAL_CST
7900	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7901	    {
7902	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7903				      arg1, 0);
7904	      if (tem)
7905		return fold_build2 (RDIV_EXPR, type, tem,
7906				    TREE_OPERAND (arg0, 1));
7907	    }
7908
7909          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
7910	  if (operand_equal_p (arg0, arg1, 0))
7911	    {
7912	      tree tem = fold_strip_sign_ops (arg0);
7913	      if (tem != NULL_TREE)
7914		{
7915		  tem = fold_convert (type, tem);
7916		  return fold_build2 (MULT_EXPR, type, tem, tem);
7917		}
7918	    }
7919
7920	  if (flag_unsafe_math_optimizations)
7921	    {
7922	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7923	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7924
7925	      /* Optimizations of root(...)*root(...).  */
7926	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7927		{
7928		  tree rootfn, arg, arglist;
7929		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7930		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7931
7932		  /* Optimize sqrt(x)*sqrt(x) as x.  */
7933		  if (BUILTIN_SQRT_P (fcode0)
7934		      && operand_equal_p (arg00, arg10, 0)
7935		      && ! HONOR_SNANS (TYPE_MODE (type)))
7936		    return arg00;
7937
7938	          /* Optimize root(x)*root(y) as root(x*y).  */
7939		  rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7940		  arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7941		  arglist = build_tree_list (NULL_TREE, arg);
7942		  return build_function_call_expr (rootfn, arglist);
7943		}
7944
7945	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
7946	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7947		{
7948		  tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7949		  tree arg = fold_build2 (PLUS_EXPR, type,
7950					  TREE_VALUE (TREE_OPERAND (arg0, 1)),
7951					  TREE_VALUE (TREE_OPERAND (arg1, 1)));
7952		  tree arglist = build_tree_list (NULL_TREE, arg);
7953		  return build_function_call_expr (expfn, arglist);
7954		}
7955
7956	      /* Optimizations of pow(...)*pow(...).  */
7957	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7958		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7959		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7960		{
7961		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7962		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7963								     1)));
7964		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7965		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7966								     1)));
7967
7968		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
7969		  if (operand_equal_p (arg01, arg11, 0))
7970		    {
7971		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7972		      tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7973		      tree arglist = tree_cons (NULL_TREE, arg,
7974						build_tree_list (NULL_TREE,
7975								 arg01));
7976		      return build_function_call_expr (powfn, arglist);
7977		    }
7978
7979		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
7980		  if (operand_equal_p (arg00, arg10, 0))
7981		    {
7982		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7983		      tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7984		      tree arglist = tree_cons (NULL_TREE, arg00,
7985						build_tree_list (NULL_TREE,
7986								 arg));
7987		      return build_function_call_expr (powfn, arglist);
7988		    }
7989		}
7990
7991	      /* Optimize tan(x)*cos(x) as sin(x).  */
7992	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7993		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7994		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7995		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7996		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7997		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7998		  && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7999				      TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8000		{
8001		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
8002
8003		  if (sinfn != NULL_TREE)
8004		    return build_function_call_expr (sinfn,
8005						     TREE_OPERAND (arg0, 1));
8006		}
8007
8008	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
8009	      if (fcode1 == BUILT_IN_POW
8010		  || fcode1 == BUILT_IN_POWF
8011		  || fcode1 == BUILT_IN_POWL)
8012		{
8013		  tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8014		  tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
8015								     1)));
8016		  if (TREE_CODE (arg11) == REAL_CST
8017		      && ! TREE_CONSTANT_OVERFLOW (arg11)
8018		      && operand_equal_p (arg0, arg10, 0))
8019		    {
8020		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8021		      REAL_VALUE_TYPE c;
8022		      tree arg, arglist;
8023
8024		      c = TREE_REAL_CST (arg11);
8025		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8026		      arg = build_real (type, c);
8027		      arglist = build_tree_list (NULL_TREE, arg);
8028		      arglist = tree_cons (NULL_TREE, arg0, arglist);
8029		      return build_function_call_expr (powfn, arglist);
8030		    }
8031		}
8032
8033	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
8034	      if (fcode0 == BUILT_IN_POW
8035		  || fcode0 == BUILT_IN_POWF
8036		  || fcode0 == BUILT_IN_POWL)
8037		{
8038		  tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8039		  tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8040								     1)));
8041		  if (TREE_CODE (arg01) == REAL_CST
8042		      && ! TREE_CONSTANT_OVERFLOW (arg01)
8043		      && operand_equal_p (arg1, arg00, 0))
8044		    {
8045		      tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8046		      REAL_VALUE_TYPE c;
8047		      tree arg, arglist;
8048
8049		      c = TREE_REAL_CST (arg01);
8050		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8051		      arg = build_real (type, c);
8052		      arglist = build_tree_list (NULL_TREE, arg);
8053		      arglist = tree_cons (NULL_TREE, arg1, arglist);
8054		      return build_function_call_expr (powfn, arglist);
8055		    }
8056		}
8057
8058	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
8059	      if (! optimize_size
8060		  && operand_equal_p (arg0, arg1, 0))
8061		{
8062		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8063
8064		  if (powfn)
8065		    {
8066		      tree arg = build_real (type, dconst2);
8067		      tree arglist = build_tree_list (NULL_TREE, arg);
8068		      arglist = tree_cons (NULL_TREE, arg0, arglist);
8069		      return build_function_call_expr (powfn, arglist);
8070		    }
8071		}
8072	    }
8073	}
8074      goto associate;
8075
8076    case BIT_IOR_EXPR:
8077    bit_ior:
8078      if (integer_all_onesp (arg1))
8079	return omit_one_operand (type, arg1, arg0);
8080      if (integer_zerop (arg1))
8081	return non_lvalue (fold_convert (type, arg0));
8082      if (operand_equal_p (arg0, arg1, 0))
8083	return non_lvalue (fold_convert (type, arg0));
8084
8085      /* ~X | X is -1.  */
8086      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8087	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8088	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8089	{
8090	  t1 = build_int_cst (type, -1);
8091	  t1 = force_fit_type (t1, 0, false, false);
8092	  return omit_one_operand (type, t1, arg1);
8093	}
8094
8095      /* X | ~X is -1.  */
8096      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8097	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8098	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8099	{
8100	  t1 = build_int_cst (type, -1);
8101	  t1 = force_fit_type (t1, 0, false, false);
8102	  return omit_one_operand (type, t1, arg0);
8103	}
8104
8105      /* Canonicalize (X & C1) | C2.  */
8106      if (TREE_CODE (arg0) == BIT_AND_EXPR
8107	  && TREE_CODE (arg1) == INTEGER_CST
8108	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8109	{
8110	  unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
8111	  int width = TYPE_PRECISION (type);
8112	  hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
8113	  lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8114	  hi2 = TREE_INT_CST_HIGH (arg1);
8115	  lo2 = TREE_INT_CST_LOW (arg1);
8116
8117	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
8118	  if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
8119	    return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
8120
8121	  if (width > HOST_BITS_PER_WIDE_INT)
8122	    {
8123	      mhi = (unsigned HOST_WIDE_INT) -1
8124		    >> (2 * HOST_BITS_PER_WIDE_INT - width);
8125	      mlo = -1;
8126	    }
8127	  else
8128	    {
8129	      mhi = 0;
8130	      mlo = (unsigned HOST_WIDE_INT) -1
8131		    >> (HOST_BITS_PER_WIDE_INT - width);
8132	    }
8133
8134	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
8135	  if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
8136	    return fold_build2 (BIT_IOR_EXPR, type,
8137				TREE_OPERAND (arg0, 0), arg1);
8138
8139	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2.  */
8140	  hi1 &= mhi;
8141	  lo1 &= mlo;
8142	  if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
8143	    return fold_build2 (BIT_IOR_EXPR, type,
8144				fold_build2 (BIT_AND_EXPR, type,
8145					     TREE_OPERAND (arg0, 0),
8146					     build_int_cst_wide (type,
8147								 lo1 & ~lo2,
8148								 hi1 & ~hi2)),
8149				arg1);
8150	}
8151
8152      t1 = distribute_bit_expr (code, type, arg0, arg1);
8153      if (t1 != NULL_TREE)
8154	return t1;
8155
8156      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8157
8158	 This results in more efficient code for machines without a NAND
8159	 instruction.  Combine will canonicalize to the first form
8160	 which will allow use of NAND instructions provided by the
8161	 backend if they exist.  */
8162      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8163	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
8164	{
8165	  return fold_build1 (BIT_NOT_EXPR, type,
8166			      build2 (BIT_AND_EXPR, type,
8167				      TREE_OPERAND (arg0, 0),
8168				      TREE_OPERAND (arg1, 0)));
8169	}
8170
8171      /* See if this can be simplified into a rotate first.  If that
8172	 is unsuccessful continue in the association code.  */
8173      goto bit_rotate;
8174
8175    case BIT_XOR_EXPR:
8176      if (integer_zerop (arg1))
8177	return non_lvalue (fold_convert (type, arg0));
8178      if (integer_all_onesp (arg1))
8179	return fold_build1 (BIT_NOT_EXPR, type, arg0);
8180      if (operand_equal_p (arg0, arg1, 0))
8181	return omit_one_operand (type, integer_zero_node, arg0);
8182
8183      /* ~X ^ X is -1.  */
8184      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8185	  && INTEGRAL_TYPE_P (TREE_TYPE (arg1))
8186	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8187	{
8188	  t1 = build_int_cst (type, -1);
8189	  t1 = force_fit_type (t1, 0, false, false);
8190	  return omit_one_operand (type, t1, arg1);
8191	}
8192
8193      /* X ^ ~X is -1.  */
8194      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8195	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8196	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8197	{
8198	  t1 = build_int_cst (type, -1);
8199	  t1 = force_fit_type (t1, 0, false, false);
8200	  return omit_one_operand (type, t1, arg0);
8201	}
8202
8203      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8204         with a constant, and the two constants have no bits in common,
8205	 we should treat this as a BIT_IOR_EXPR since this may produce more
8206	 simplifications.  */
8207      if (TREE_CODE (arg0) == BIT_AND_EXPR
8208	  && TREE_CODE (arg1) == BIT_AND_EXPR
8209	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8210	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8211	  && integer_zerop (const_binop (BIT_AND_EXPR,
8212					 TREE_OPERAND (arg0, 1),
8213					 TREE_OPERAND (arg1, 1), 0)))
8214	{
8215	  code = BIT_IOR_EXPR;
8216	  goto bit_ior;
8217	}
8218
8219      /* (X | Y) ^ X -> Y & ~ X*/
8220      if (TREE_CODE (arg0) == BIT_IOR_EXPR
8221          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8222        {
8223	  tree t2 = TREE_OPERAND (arg0, 1);
8224	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8225			    arg1);
8226	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8227			    fold_convert (type, t1));
8228	  return t1;
8229	}
8230
8231      /* (Y | X) ^ X -> Y & ~ X*/
8232      if (TREE_CODE (arg0) == BIT_IOR_EXPR
8233          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8234        {
8235	  tree t2 = TREE_OPERAND (arg0, 0);
8236	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8237			    arg1);
8238	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8239			    fold_convert (type, t1));
8240	  return t1;
8241	}
8242
8243      /* X ^ (X | Y) -> Y & ~ X*/
8244      if (TREE_CODE (arg1) == BIT_IOR_EXPR
8245          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8246        {
8247	  tree t2 = TREE_OPERAND (arg1, 1);
8248	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8249			    arg0);
8250	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8251			    fold_convert (type, t1));
8252	  return t1;
8253	}
8254
8255      /* X ^ (Y | X) -> Y & ~ X*/
8256      if (TREE_CODE (arg1) == BIT_IOR_EXPR
8257          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8258        {
8259	  tree t2 = TREE_OPERAND (arg1, 0);
8260	  t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8261			    arg0);
8262	  t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8263			    fold_convert (type, t1));
8264	  return t1;
8265	}
8266
8267      /* Convert ~X ^ ~Y to X ^ Y.  */
8268      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8269	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
8270	return fold_build2 (code, type,
8271			    fold_convert (type, TREE_OPERAND (arg0, 0)),
8272			    fold_convert (type, TREE_OPERAND (arg1, 0)));
8273
8274      /* See if this can be simplified into a rotate first.  If that
8275	 is unsuccessful continue in the association code.  */
8276      goto bit_rotate;
8277
8278    case BIT_AND_EXPR:
8279      if (integer_all_onesp (arg1))
8280	return non_lvalue (fold_convert (type, arg0));
8281      if (integer_zerop (arg1))
8282	return omit_one_operand (type, arg1, arg0);
8283      if (operand_equal_p (arg0, arg1, 0))
8284	return non_lvalue (fold_convert (type, arg0));
8285
8286      /* ~X & X is always zero.  */
8287      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8288	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8289	return omit_one_operand (type, integer_zero_node, arg1);
8290
8291      /* X & ~X is always zero.  */
8292      if (TREE_CODE (arg1) == BIT_NOT_EXPR
8293	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8294	return omit_one_operand (type, integer_zero_node, arg0);
8295
8296      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
8297      if (TREE_CODE (arg0) == BIT_IOR_EXPR
8298	  && TREE_CODE (arg1) == INTEGER_CST
8299	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8300	return fold_build2 (BIT_IOR_EXPR, type,
8301			    fold_build2 (BIT_AND_EXPR, type,
8302					 TREE_OPERAND (arg0, 0), arg1),
8303			    fold_build2 (BIT_AND_EXPR, type,
8304					 TREE_OPERAND (arg0, 1), arg1));
8305
8306      t1 = distribute_bit_expr (code, type, arg0, arg1);
8307      if (t1 != NULL_TREE)
8308	return t1;
8309      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
8310      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8311	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8312	{
8313	  unsigned int prec
8314	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8315
8316	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8317	      && (~TREE_INT_CST_LOW (arg1)
8318		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8319	    return fold_convert (type, TREE_OPERAND (arg0, 0));
8320	}
8321
8322      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8323
8324	 This results in more efficient code for machines without a NOR
8325	 instruction.  Combine will canonicalize to the first form
8326	 which will allow use of NOR instructions provided by the
8327	 backend if they exist.  */
8328      if (TREE_CODE (arg0) == BIT_NOT_EXPR
8329	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
8330	{
8331	  return fold_build1 (BIT_NOT_EXPR, type,
8332			      build2 (BIT_IOR_EXPR, type,
8333				      TREE_OPERAND (arg0, 0),
8334				      TREE_OPERAND (arg1, 0)));
8335	}
8336
8337      goto associate;
8338
8339    case RDIV_EXPR:
8340      /* Don't touch a floating-point divide by zero unless the mode
8341	 of the constant can represent infinity.  */
8342      if (TREE_CODE (arg1) == REAL_CST
8343	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8344	  && real_zerop (arg1))
8345	return NULL_TREE;
8346
8347      /* (-A) / (-B) -> A / B  */
8348      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8349	return fold_build2 (RDIV_EXPR, type,
8350			    TREE_OPERAND (arg0, 0),
8351			    negate_expr (arg1));
8352      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8353	return fold_build2 (RDIV_EXPR, type,
8354			    negate_expr (arg0),
8355			    TREE_OPERAND (arg1, 0));
8356
8357      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
8358      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8359	  && real_onep (arg1))
8360	return non_lvalue (fold_convert (type, arg0));
8361
8362      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
8363      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8364	  && real_minus_onep (arg1))
8365	return non_lvalue (fold_convert (type, negate_expr (arg0)));
8366
8367      /* If ARG1 is a constant, we can convert this to a multiply by the
8368	 reciprocal.  This does not have the same rounding properties,
8369	 so only do this if -funsafe-math-optimizations.  We can actually
8370	 always safely do it if ARG1 is a power of two, but it's hard to
8371	 tell if it is or not in a portable manner.  */
8372      if (TREE_CODE (arg1) == REAL_CST)
8373	{
8374	  if (flag_unsafe_math_optimizations
8375	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
8376					  arg1, 0)))
8377	    return fold_build2 (MULT_EXPR, type, arg0, tem);
8378	  /* Find the reciprocal if optimizing and the result is exact.  */
8379	  if (optimize)
8380	    {
8381	      REAL_VALUE_TYPE r;
8382	      r = TREE_REAL_CST (arg1);
8383	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8384		{
8385		  tem = build_real (type, r);
8386		  return fold_build2 (MULT_EXPR, type,
8387				      fold_convert (type, arg0), tem);
8388		}
8389	    }
8390	}
8391      /* Convert A/B/C to A/(B*C).  */
8392      if (flag_unsafe_math_optimizations
8393	  && TREE_CODE (arg0) == RDIV_EXPR)
8394	return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8395			    fold_build2 (MULT_EXPR, type,
8396					 TREE_OPERAND (arg0, 1), arg1));
8397
8398      /* Convert A/(B/C) to (A/B)*C.  */
8399      if (flag_unsafe_math_optimizations
8400	  && TREE_CODE (arg1) == RDIV_EXPR)
8401	return fold_build2 (MULT_EXPR, type,
8402			    fold_build2 (RDIV_EXPR, type, arg0,
8403					 TREE_OPERAND (arg1, 0)),
8404			    TREE_OPERAND (arg1, 1));
8405
8406      /* Convert C1/(X*C2) into (C1/C2)/X.  */
8407      if (flag_unsafe_math_optimizations
8408	  && TREE_CODE (arg1) == MULT_EXPR
8409	  && TREE_CODE (arg0) == REAL_CST
8410	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8411	{
8412	  tree tem = const_binop (RDIV_EXPR, arg0,
8413				  TREE_OPERAND (arg1, 1), 0);
8414	  if (tem)
8415	    return fold_build2 (RDIV_EXPR, type, tem,
8416				TREE_OPERAND (arg1, 0));
8417	}
8418
8419      if (flag_unsafe_math_optimizations)
8420	{
8421	  enum built_in_function fcode = builtin_mathfn_code (arg1);
8422	  /* Optimize x/expN(y) into x*expN(-y).  */
8423	  if (BUILTIN_EXPONENT_P (fcode))
8424	    {
8425	      tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8426	      tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8427	      tree arglist = build_tree_list (NULL_TREE,
8428					      fold_convert (type, arg));
8429	      arg1 = build_function_call_expr (expfn, arglist);
8430	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
8431	    }
8432
8433	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
8434	  if (fcode == BUILT_IN_POW
8435	      || fcode == BUILT_IN_POWF
8436	      || fcode == BUILT_IN_POWL)
8437	    {
8438	      tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8439	      tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8440	      tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8441	      tree neg11 = fold_convert (type, negate_expr (arg11));
8442	      tree arglist = tree_cons(NULL_TREE, arg10,
8443				       build_tree_list (NULL_TREE, neg11));
8444	      arg1 = build_function_call_expr (powfn, arglist);
8445	      return fold_build2 (MULT_EXPR, type, arg0, arg1);
8446	    }
8447	}
8448
8449      if (flag_unsafe_math_optimizations)
8450	{
8451	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8452	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8453
8454	  /* Optimize sin(x)/cos(x) as tan(x).  */
8455	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8456	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8457	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8458	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8459				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8460	    {
8461	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8462
8463	      if (tanfn != NULL_TREE)
8464		return build_function_call_expr (tanfn,
8465						 TREE_OPERAND (arg0, 1));
8466	    }
8467
8468	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
8469	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8470	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8471	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8472	      && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8473				  TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8474	    {
8475	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8476
8477	      if (tanfn != NULL_TREE)
8478		{
8479		  tree tmp = TREE_OPERAND (arg0, 1);
8480		  tmp = build_function_call_expr (tanfn, tmp);
8481		  return fold_build2 (RDIV_EXPR, type,
8482				      build_real (type, dconst1), tmp);
8483		}
8484	    }
8485
8486	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
8487	  if (fcode0 == BUILT_IN_POW
8488	      || fcode0 == BUILT_IN_POWF
8489	      || fcode0 == BUILT_IN_POWL)
8490	    {
8491	      tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8492	      tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8493	      if (TREE_CODE (arg01) == REAL_CST
8494		  && ! TREE_CONSTANT_OVERFLOW (arg01)
8495		  && operand_equal_p (arg1, arg00, 0))
8496		{
8497		  tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8498		  REAL_VALUE_TYPE c;
8499		  tree arg, arglist;
8500
8501		  c = TREE_REAL_CST (arg01);
8502		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8503		  arg = build_real (type, c);
8504		  arglist = build_tree_list (NULL_TREE, arg);
8505		  arglist = tree_cons (NULL_TREE, arg1, arglist);
8506		  return build_function_call_expr (powfn, arglist);
8507		}
8508	    }
8509	}
8510      goto binary;
8511
8512    case TRUNC_DIV_EXPR:
8513    case ROUND_DIV_EXPR:
8514    case FLOOR_DIV_EXPR:
8515    case CEIL_DIV_EXPR:
8516    case EXACT_DIV_EXPR:
8517      if (integer_onep (arg1))
8518	return non_lvalue (fold_convert (type, arg0));
8519      if (integer_zerop (arg1))
8520	return NULL_TREE;
8521      /* X / -1 is -X.  */
8522      if (!TYPE_UNSIGNED (type)
8523	  && TREE_CODE (arg1) == INTEGER_CST
8524	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8525	  && TREE_INT_CST_HIGH (arg1) == -1)
8526	return fold_convert (type, negate_expr (arg0));
8527
8528      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8529	 operation, EXACT_DIV_EXPR.
8530
8531	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8532	 At one time others generated faster code, it's not clear if they do
8533	 after the last round to changes to the DIV code in expmed.c.  */
8534      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8535	  && multiple_of_p (type, arg0, arg1))
8536	return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8537
8538      if (TREE_CODE (arg1) == INTEGER_CST
8539	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8540	return fold_convert (type, tem);
8541
8542      goto binary;
8543
8544    case CEIL_MOD_EXPR:
8545    case FLOOR_MOD_EXPR:
8546    case ROUND_MOD_EXPR:
8547    case TRUNC_MOD_EXPR:
8548      /* X % 1 is always zero, but be sure to preserve any side
8549	 effects in X.  */
8550      if (integer_onep (arg1))
8551	return omit_one_operand (type, integer_zero_node, arg0);
8552
8553      /* X % 0, return X % 0 unchanged so that we can get the
8554	 proper warnings and errors.  */
8555      if (integer_zerop (arg1))
8556	return NULL_TREE;
8557
8558      /* 0 % X is always zero, but be sure to preserve any side
8559	 effects in X.  Place this after checking for X == 0.  */
8560      if (integer_zerop (arg0))
8561	return omit_one_operand (type, integer_zero_node, arg1);
8562
8563      /* X % -1 is zero.  */
8564      if (!TYPE_UNSIGNED (type)
8565	  && TREE_CODE (arg1) == INTEGER_CST
8566	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8567	  && TREE_INT_CST_HIGH (arg1) == -1)
8568	return omit_one_operand (type, integer_zero_node, arg0);
8569
8570      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8571         i.e. "X % C" into "X & C2", if X and C are positive.  */
8572      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8573	  && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8574	  && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8575	{
8576	  unsigned HOST_WIDE_INT high, low;
8577	  tree mask;
8578	  int l;
8579
8580	  l = tree_log2 (arg1);
8581	  if (l >= HOST_BITS_PER_WIDE_INT)
8582	    {
8583	      high = ((unsigned HOST_WIDE_INT) 1
8584		      << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8585	      low = -1;
8586	    }
8587	  else
8588	    {
8589	      high = 0;
8590	      low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8591	    }
8592
8593	  mask = build_int_cst_wide (type, low, high);
8594	  return fold_build2 (BIT_AND_EXPR, type,
8595			      fold_convert (type, arg0), mask);
8596	}
8597
8598      /* X % -C is the same as X % C.  */
8599      if (code == TRUNC_MOD_EXPR
8600	  && !TYPE_UNSIGNED (type)
8601	  && TREE_CODE (arg1) == INTEGER_CST
8602	  && !TREE_CONSTANT_OVERFLOW (arg1)
8603	  && TREE_INT_CST_HIGH (arg1) < 0
8604	  && !flag_trapv
8605	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
8606	  && !sign_bit_p (arg1, arg1))
8607	return fold_build2 (code, type, fold_convert (type, arg0),
8608			    fold_convert (type, negate_expr (arg1)));
8609
8610      /* X % -Y is the same as X % Y.  */
8611      if (code == TRUNC_MOD_EXPR
8612	  && !TYPE_UNSIGNED (type)
8613	  && TREE_CODE (arg1) == NEGATE_EXPR
8614	  && !flag_trapv)
8615	return fold_build2 (code, type, fold_convert (type, arg0),
8616			    fold_convert (type, TREE_OPERAND (arg1, 0)));
8617
8618      if (TREE_CODE (arg1) == INTEGER_CST
8619	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8620	return fold_convert (type, tem);
8621
8622      goto binary;
8623
8624    case LROTATE_EXPR:
8625    case RROTATE_EXPR:
8626      if (integer_all_onesp (arg0))
8627	return omit_one_operand (type, arg0, arg1);
8628      goto shift;
8629
8630    case RSHIFT_EXPR:
8631      /* Optimize -1 >> x for arithmetic right shifts.  */
8632      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8633	return omit_one_operand (type, arg0, arg1);
8634      /* ... fall through ...  */
8635
8636    case LSHIFT_EXPR:
8637    shift:
8638      if (integer_zerop (arg1))
8639	return non_lvalue (fold_convert (type, arg0));
8640      if (integer_zerop (arg0))
8641	return omit_one_operand (type, arg0, arg1);
8642
8643      /* Since negative shift count is not well-defined,
8644	 don't try to compute it in the compiler.  */
8645      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8646	return NULL_TREE;
8647
8648      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
8649      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
8650	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8651	  && host_integerp (TREE_OPERAND (arg0, 1), false)
8652	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8653	{
8654	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8655			       + TREE_INT_CST_LOW (arg1));
8656
8657	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8658	     being well defined.  */
8659	  if (low >= TYPE_PRECISION (type))
8660	    {
8661	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8662	        low = low % TYPE_PRECISION (type);
8663	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8664	        return build_int_cst (type, 0);
8665	      else
8666		low = TYPE_PRECISION (type) - 1;
8667	    }
8668
8669	  return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8670			      build_int_cst (type, low));
8671	}
8672
8673      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8674         into x & ((unsigned)-1 >> c) for unsigned types.  */
8675      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8676           || (TYPE_UNSIGNED (type)
8677	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8678	  && host_integerp (arg1, false)
8679	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8680	  && host_integerp (TREE_OPERAND (arg0, 1), false)
8681	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8682	{
8683	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8684	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8685	  tree lshift;
8686	  tree arg00;
8687
8688	  if (low0 == low1)
8689	    {
8690	      arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8691
8692	      lshift = build_int_cst (type, -1);
8693	      lshift = int_const_binop (code, lshift, arg1, 0);
8694
8695	      return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8696	    }
8697	}
8698
8699      /* Rewrite an LROTATE_EXPR by a constant into an
8700	 RROTATE_EXPR by a new constant.  */
8701      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8702	{
8703	  tree tem = build_int_cst (NULL_TREE,
8704				    GET_MODE_BITSIZE (TYPE_MODE (type)));
8705	  tem = fold_convert (TREE_TYPE (arg1), tem);
8706	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8707	  return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8708	}
8709
8710      /* If we have a rotate of a bit operation with the rotate count and
8711	 the second operand of the bit operation both constant,
8712	 permute the two operations.  */
8713      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8714	  && (TREE_CODE (arg0) == BIT_AND_EXPR
8715	      || TREE_CODE (arg0) == BIT_IOR_EXPR
8716	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
8717	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8718	return fold_build2 (TREE_CODE (arg0), type,
8719			    fold_build2 (code, type,
8720					 TREE_OPERAND (arg0, 0), arg1),
8721			    fold_build2 (code, type,
8722					 TREE_OPERAND (arg0, 1), arg1));
8723
8724      /* Two consecutive rotates adding up to the width of the mode can
8725	 be ignored.  */
8726      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8727	  && TREE_CODE (arg0) == RROTATE_EXPR
8728	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8729	  && TREE_INT_CST_HIGH (arg1) == 0
8730	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8731	  && ((TREE_INT_CST_LOW (arg1)
8732	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8733	      == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8734	return TREE_OPERAND (arg0, 0);
8735
8736      goto binary;
8737
8738    case MIN_EXPR:
8739      if (operand_equal_p (arg0, arg1, 0))
8740	return omit_one_operand (type, arg0, arg1);
8741      if (INTEGRAL_TYPE_P (type)
8742	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8743	return omit_one_operand (type, arg1, arg0);
8744      goto associate;
8745
8746    case MAX_EXPR:
8747      if (operand_equal_p (arg0, arg1, 0))
8748	return omit_one_operand (type, arg0, arg1);
8749      if (INTEGRAL_TYPE_P (type)
8750	  && TYPE_MAX_VALUE (type)
8751	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8752	return omit_one_operand (type, arg1, arg0);
8753      goto associate;
8754
8755    case TRUTH_ANDIF_EXPR:
8756      /* Note that the operands of this must be ints
8757	 and their values must be 0 or 1.
8758	 ("true" is a fixed value perhaps depending on the language.)  */
8759      /* If first arg is constant zero, return it.  */
8760      if (integer_zerop (arg0))
8761	return fold_convert (type, arg0);
8762    case TRUTH_AND_EXPR:
8763      /* If either arg is constant true, drop it.  */
8764      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8765	return non_lvalue (fold_convert (type, arg1));
8766      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8767	  /* Preserve sequence points.  */
8768	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8769	return non_lvalue (fold_convert (type, arg0));
8770      /* If second arg is constant zero, result is zero, but first arg
8771	 must be evaluated.  */
8772      if (integer_zerop (arg1))
8773	return omit_one_operand (type, arg1, arg0);
8774      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8775	 case will be handled here.  */
8776      if (integer_zerop (arg0))
8777	return omit_one_operand (type, arg0, arg1);
8778
8779      /* !X && X is always false.  */
8780      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8781	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8782	return omit_one_operand (type, integer_zero_node, arg1);
8783      /* X && !X is always false.  */
8784      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8785	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8786	return omit_one_operand (type, integer_zero_node, arg0);
8787
8788      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
8789	 means A >= Y && A != MAX, but in this case we know that
8790	 A < X <= MAX.  */
8791
8792      if (!TREE_SIDE_EFFECTS (arg0)
8793	  && !TREE_SIDE_EFFECTS (arg1))
8794	{
8795	  tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8796	  if (tem && !operand_equal_p (tem, arg0, 0))
8797	    return fold_build2 (code, type, tem, arg1);
8798
8799	  tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8800	  if (tem && !operand_equal_p (tem, arg1, 0))
8801	    return fold_build2 (code, type, arg0, tem);
8802	}
8803
8804    truth_andor:
8805      /* We only do these simplifications if we are optimizing.  */
8806      if (!optimize)
8807	return NULL_TREE;
8808
8809      /* Check for things like (A || B) && (A || C).  We can convert this
8810	 to A || (B && C).  Note that either operator can be any of the four
8811	 truth and/or operations and the transformation will still be
8812	 valid.   Also note that we only care about order for the
8813	 ANDIF and ORIF operators.  If B contains side effects, this
8814	 might change the truth-value of A.  */
8815      if (TREE_CODE (arg0) == TREE_CODE (arg1)
8816	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8817	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8818	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
8819	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8820	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8821	{
8822	  tree a00 = TREE_OPERAND (arg0, 0);
8823	  tree a01 = TREE_OPERAND (arg0, 1);
8824	  tree a10 = TREE_OPERAND (arg1, 0);
8825	  tree a11 = TREE_OPERAND (arg1, 1);
8826	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8827			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8828			     && (code == TRUTH_AND_EXPR
8829				 || code == TRUTH_OR_EXPR));
8830
8831	  if (operand_equal_p (a00, a10, 0))
8832	    return fold_build2 (TREE_CODE (arg0), type, a00,
8833				fold_build2 (code, type, a01, a11));
8834	  else if (commutative && operand_equal_p (a00, a11, 0))
8835	    return fold_build2 (TREE_CODE (arg0), type, a00,
8836				fold_build2 (code, type, a01, a10));
8837	  else if (commutative && operand_equal_p (a01, a10, 0))
8838	    return fold_build2 (TREE_CODE (arg0), type, a01,
8839				fold_build2 (code, type, a00, a11));
8840
8841	  /* This case if tricky because we must either have commutative
8842	     operators or else A10 must not have side-effects.  */
8843
8844	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8845		   && operand_equal_p (a01, a11, 0))
8846	    return fold_build2 (TREE_CODE (arg0), type,
8847				fold_build2 (code, type, a00, a10),
8848				a01);
8849	}
8850
8851      /* See if we can build a range comparison.  */
8852      if (0 != (tem = fold_range_test (code, type, op0, op1)))
8853	return tem;
8854
8855      /* Check for the possibility of merging component references.  If our
8856	 lhs is another similar operation, try to merge its rhs with our
8857	 rhs.  Then try to merge our lhs and rhs.  */
8858      if (TREE_CODE (arg0) == code
8859	  && 0 != (tem = fold_truthop (code, type,
8860				       TREE_OPERAND (arg0, 1), arg1)))
8861	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8862
8863      if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8864	return tem;
8865
8866      return NULL_TREE;
8867
8868    case TRUTH_ORIF_EXPR:
8869      /* Note that the operands of this must be ints
8870	 and their values must be 0 or true.
8871	 ("true" is a fixed value perhaps depending on the language.)  */
8872      /* If first arg is constant true, return it.  */
8873      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8874	return fold_convert (type, arg0);
8875    case TRUTH_OR_EXPR:
8876      /* If either arg is constant zero, drop it.  */
8877      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8878	return non_lvalue (fold_convert (type, arg1));
8879      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8880	  /* Preserve sequence points.  */
8881	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8882	return non_lvalue (fold_convert (type, arg0));
8883      /* If second arg is constant true, result is true, but we must
8884	 evaluate first arg.  */
8885      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8886	return omit_one_operand (type, arg1, arg0);
8887      /* Likewise for first arg, but note this only occurs here for
8888	 TRUTH_OR_EXPR.  */
8889      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8890	return omit_one_operand (type, arg0, arg1);
8891
8892      /* !X || X is always true.  */
8893      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8894	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8895	return omit_one_operand (type, integer_one_node, arg1);
8896      /* X || !X is always true.  */
8897      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8898	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8899	return omit_one_operand (type, integer_one_node, arg0);
8900
8901      goto truth_andor;
8902
8903    case TRUTH_XOR_EXPR:
8904      /* If the second arg is constant zero, drop it.  */
8905      if (integer_zerop (arg1))
8906	return non_lvalue (fold_convert (type, arg0));
8907      /* If the second arg is constant true, this is a logical inversion.  */
8908      if (integer_onep (arg1))
8909	{
8910	  /* Only call invert_truthvalue if operand is a truth value.  */
8911	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8912	    tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8913	  else
8914	    tem = invert_truthvalue (arg0);
8915	  return non_lvalue (fold_convert (type, tem));
8916	}
8917      /* Identical arguments cancel to zero.  */
8918      if (operand_equal_p (arg0, arg1, 0))
8919	return omit_one_operand (type, integer_zero_node, arg0);
8920
8921      /* !X ^ X is always true.  */
8922      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8923	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8924	return omit_one_operand (type, integer_one_node, arg1);
8925
8926      /* X ^ !X is always true.  */
8927      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8928	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8929	return omit_one_operand (type, integer_one_node, arg0);
8930
8931      return NULL_TREE;
8932
8933    case EQ_EXPR:
8934    case NE_EXPR:
8935    case LT_EXPR:
8936    case GT_EXPR:
8937    case LE_EXPR:
8938    case GE_EXPR:
8939      /* If one arg is a real or integer constant, put it last.  */
8940      if (tree_swap_operands_p (arg0, arg1, true))
8941	return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8942
8943      /* bool_var != 0 becomes bool_var. */
8944      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8945          && code == NE_EXPR)
8946        return non_lvalue (fold_convert (type, arg0));
8947
8948      /* bool_var == 1 becomes bool_var. */
8949      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8950          && code == EQ_EXPR)
8951        return non_lvalue (fold_convert (type, arg0));
8952
8953      /* If this is an equality comparison of the address of a non-weak
8954	 object against zero, then we know the result.  */
8955      if ((code == EQ_EXPR || code == NE_EXPR)
8956	  && TREE_CODE (arg0) == ADDR_EXPR
8957	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8958	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8959	  && integer_zerop (arg1))
8960	return constant_boolean_node (code != EQ_EXPR, type);
8961
8962      /* If this is an equality comparison of the address of two non-weak,
8963	 unaliased symbols neither of which are extern (since we do not
8964	 have access to attributes for externs), then we know the result.  */
8965      if ((code == EQ_EXPR || code == NE_EXPR)
8966	  && TREE_CODE (arg0) == ADDR_EXPR
8967	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8968	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8969	  && ! lookup_attribute ("alias",
8970				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8971	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8972	  && TREE_CODE (arg1) == ADDR_EXPR
8973	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8974	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8975	  && ! lookup_attribute ("alias",
8976				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8977	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8978	{
8979	  /* We know that we're looking at the address of two
8980	     non-weak, unaliased, static _DECL nodes.
8981
8982	     It is both wasteful and incorrect to call operand_equal_p
8983	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
8984	     all we need to do is test pointer equality for the arguments
8985	     to the two ADDR_EXPR nodes.  It is incorrect to use
8986	     operand_equal_p as that function is NOT equivalent to a
8987	     C equality test.  It can in fact return false for two
8988	     objects which would test as equal using the C equality
8989	     operator.  */
8990	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8991	  return constant_boolean_node (equal
8992				        ? code == EQ_EXPR : code != EQ_EXPR,
8993				        type);
8994	}
8995
8996      /* If this is a comparison of two exprs that look like an
8997	 ARRAY_REF of the same object, then we can fold this to a
8998	 comparison of the two offsets.  This is only safe for
8999	 EQ_EXPR and NE_EXPR because of overflow issues.  */
9000      if (code == EQ_EXPR || code == NE_EXPR)
9001	{
9002	  tree base0, offset0, base1, offset1;
9003
9004	  if (extract_array_ref (arg0, &base0, &offset0)
9005	      && extract_array_ref (arg1, &base1, &offset1)
9006	      && operand_equal_p (base0, base1, 0))
9007	    {
9008	      /* Handle no offsets on both sides specially.  */
9009	      if (offset0 == NULL_TREE
9010		  && offset1 == NULL_TREE)
9011		return fold_build2 (code, type, integer_zero_node,
9012				    integer_zero_node);
9013
9014	      if (!offset0 || !offset1
9015		  || TREE_TYPE (offset0) == TREE_TYPE (offset1))
9016		{
9017		  if (offset0 == NULL_TREE)
9018		    offset0 = build_int_cst (TREE_TYPE (offset1), 0);
9019		  if (offset1 == NULL_TREE)
9020		    offset1 = build_int_cst (TREE_TYPE (offset0), 0);
9021		  return fold_build2 (code, type, offset0, offset1);
9022		}
9023	    }
9024	}
9025
9026      /* Transform comparisons of the form X +- C CMP X.  */
9027      if ((code != EQ_EXPR && code != NE_EXPR)
9028	  && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9029	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9030	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9031	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
9032	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9033	          && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9034		  && !(flag_wrapv || flag_trapv))))
9035	{
9036	  tree arg01 = TREE_OPERAND (arg0, 1);
9037	  enum tree_code code0 = TREE_CODE (arg0);
9038	  int is_positive;
9039
9040	  if (TREE_CODE (arg01) == REAL_CST)
9041	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
9042	  else
9043	    is_positive = tree_int_cst_sgn (arg01);
9044
9045	  /* (X - c) > X becomes false.  */
9046	  if (code == GT_EXPR
9047	      && ((code0 == MINUS_EXPR && is_positive >= 0)
9048		  || (code0 == PLUS_EXPR && is_positive <= 0)))
9049	    return constant_boolean_node (0, type);
9050
9051	  /* Likewise (X + c) < X becomes false.  */
9052	  if (code == LT_EXPR
9053	      && ((code0 == PLUS_EXPR && is_positive >= 0)
9054		  || (code0 == MINUS_EXPR && is_positive <= 0)))
9055	    return constant_boolean_node (0, type);
9056
9057	  /* Convert (X - c) <= X to true.  */
9058	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9059	      && code == LE_EXPR
9060	      && ((code0 == MINUS_EXPR && is_positive >= 0)
9061		  || (code0 == PLUS_EXPR && is_positive <= 0)))
9062	    return constant_boolean_node (1, type);
9063
9064	  /* Convert (X + c) >= X to true.  */
9065	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
9066	      && code == GE_EXPR
9067	      && ((code0 == PLUS_EXPR && is_positive >= 0)
9068		  || (code0 == MINUS_EXPR && is_positive <= 0)))
9069	    return constant_boolean_node (1, type);
9070
9071	  if (TREE_CODE (arg01) == INTEGER_CST)
9072	    {
9073	      /* Convert X + c > X and X - c < X to true for integers.  */
9074	      if (code == GT_EXPR
9075	          && ((code0 == PLUS_EXPR && is_positive > 0)
9076		      || (code0 == MINUS_EXPR && is_positive < 0)))
9077		return constant_boolean_node (1, type);
9078
9079	      if (code == LT_EXPR
9080	          && ((code0 == MINUS_EXPR && is_positive > 0)
9081		      || (code0 == PLUS_EXPR && is_positive < 0)))
9082		return constant_boolean_node (1, type);
9083
9084	      /* Convert X + c <= X and X - c >= X to false for integers.  */
9085	      if (code == LE_EXPR
9086	          && ((code0 == PLUS_EXPR && is_positive > 0)
9087		      || (code0 == MINUS_EXPR && is_positive < 0)))
9088		return constant_boolean_node (0, type);
9089
9090	      if (code == GE_EXPR
9091	          && ((code0 == MINUS_EXPR && is_positive > 0)
9092		      || (code0 == PLUS_EXPR && is_positive < 0)))
9093		return constant_boolean_node (0, type);
9094	    }
9095	}
9096
9097      /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9098      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9099	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9100	      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9101	      && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9102	      && !(flag_wrapv || flag_trapv))
9103	  && (TREE_CODE (arg1) == INTEGER_CST
9104	      && !TREE_OVERFLOW (arg1)))
9105	{
9106	  tree const1 = TREE_OPERAND (arg0, 1);
9107	  tree const2 = arg1;
9108	  tree variable = TREE_OPERAND (arg0, 0);
9109	  tree lhs;
9110	  int lhs_add;
9111	  lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9112
9113	  lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9114			     TREE_TYPE (arg1), const2, const1);
9115	  if (TREE_CODE (lhs) == TREE_CODE (arg1)
9116	      && (TREE_CODE (lhs) != INTEGER_CST
9117	          || !TREE_OVERFLOW (lhs)))
9118	    return fold_build2 (code, type, variable, lhs);
9119	}
9120
9121      if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9122	{
9123	  tree targ0 = strip_float_extensions (arg0);
9124	  tree targ1 = strip_float_extensions (arg1);
9125	  tree newtype = TREE_TYPE (targ0);
9126
9127	  if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9128	    newtype = TREE_TYPE (targ1);
9129
9130	  /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9131	  if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9132	    return fold_build2 (code, type, fold_convert (newtype, targ0),
9133				fold_convert (newtype, targ1));
9134
9135	  /* (-a) CMP (-b) -> b CMP a  */
9136	  if (TREE_CODE (arg0) == NEGATE_EXPR
9137	      && TREE_CODE (arg1) == NEGATE_EXPR)
9138	    return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9139				TREE_OPERAND (arg0, 0));
9140
9141	  if (TREE_CODE (arg1) == REAL_CST)
9142	  {
9143	    REAL_VALUE_TYPE cst;
9144	    cst = TREE_REAL_CST (arg1);
9145
9146	    /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9147	    if (TREE_CODE (arg0) == NEGATE_EXPR)
9148	      return
9149		fold_build2 (swap_tree_comparison (code), type,
9150			     TREE_OPERAND (arg0, 0),
9151			     build_real (TREE_TYPE (arg1),
9152					 REAL_VALUE_NEGATE (cst)));
9153
9154	    /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9155	    /* a CMP (-0) -> a CMP 0  */
9156	    if (REAL_VALUE_MINUS_ZERO (cst))
9157	      return fold_build2 (code, type, arg0,
9158				  build_real (TREE_TYPE (arg1), dconst0));
9159
9160	    /* x != NaN is always true, other ops are always false.  */
9161	    if (REAL_VALUE_ISNAN (cst)
9162		&& ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9163	      {
9164		tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9165		return omit_one_operand (type, tem, arg0);
9166	      }
9167
9168	    /* Fold comparisons against infinity.  */
9169	    if (REAL_VALUE_ISINF (cst))
9170	      {
9171		tem = fold_inf_compare (code, type, arg0, arg1);
9172		if (tem != NULL_TREE)
9173		  return tem;
9174	      }
9175	  }
9176
9177	  /* If this is a comparison of a real constant with a PLUS_EXPR
9178	     or a MINUS_EXPR of a real constant, we can convert it into a
9179	     comparison with a revised real constant as long as no overflow
9180	     occurs when unsafe_math_optimizations are enabled.  */
9181	  if (flag_unsafe_math_optimizations
9182	      && TREE_CODE (arg1) == REAL_CST
9183	      && (TREE_CODE (arg0) == PLUS_EXPR
9184		  || TREE_CODE (arg0) == MINUS_EXPR)
9185	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9186	      && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9187					  ? MINUS_EXPR : PLUS_EXPR,
9188					  arg1, TREE_OPERAND (arg0, 1), 0))
9189	      && ! TREE_CONSTANT_OVERFLOW (tem))
9190	    return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9191
9192	  /* Likewise, we can simplify a comparison of a real constant with
9193	     a MINUS_EXPR whose first operand is also a real constant, i.e.
9194	     (c1 - x) < c2 becomes x > c1-c2.  */
9195	  if (flag_unsafe_math_optimizations
9196	      && TREE_CODE (arg1) == REAL_CST
9197	      && TREE_CODE (arg0) == MINUS_EXPR
9198	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9199	      && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9200					  arg1, 0))
9201	      && ! TREE_CONSTANT_OVERFLOW (tem))
9202	    return fold_build2 (swap_tree_comparison (code), type,
9203				TREE_OPERAND (arg0, 1), tem);
9204
9205	  /* Fold comparisons against built-in math functions.  */
9206	  if (TREE_CODE (arg1) == REAL_CST
9207	      && flag_unsafe_math_optimizations
9208	      && ! flag_errno_math)
9209	    {
9210	      enum built_in_function fcode = builtin_mathfn_code (arg0);
9211
9212	      if (fcode != END_BUILTINS)
9213		{
9214		  tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9215		  if (tem != NULL_TREE)
9216		    return tem;
9217		}
9218	    }
9219	}
9220
9221      /* Convert foo++ == CONST into ++foo == CONST + INCR.  */
9222      if (TREE_CONSTANT (arg1)
9223	  && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9224	      || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9225	  /* This optimization is invalid for ordered comparisons
9226	     if CONST+INCR overflows or if foo+incr might overflow.
9227	     This optimization is invalid for floating point due to rounding.
9228	     For pointer types we assume overflow doesn't happen.  */
9229	  && (POINTER_TYPE_P (TREE_TYPE (arg0))
9230	      || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9231		  && (code == EQ_EXPR || code == NE_EXPR))))
9232	{
9233	  tree varop, newconst;
9234
9235	  if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9236	    {
9237	      newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9238				      arg1, TREE_OPERAND (arg0, 1));
9239	      varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9240			      TREE_OPERAND (arg0, 0),
9241			      TREE_OPERAND (arg0, 1));
9242	    }
9243	  else
9244	    {
9245	      newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9246				      arg1, TREE_OPERAND (arg0, 1));
9247	      varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9248			      TREE_OPERAND (arg0, 0),
9249			      TREE_OPERAND (arg0, 1));
9250	    }
9251
9252
9253	  /* If VAROP is a reference to a bitfield, we must mask
9254	     the constant by the width of the field.  */
9255	  if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9256	      && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9257	      && host_integerp (DECL_SIZE (TREE_OPERAND
9258					   (TREE_OPERAND (varop, 0), 1)), 1))
9259	    {
9260	      tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9261	      HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9262	      tree folded_compare, shift;
9263
9264	      /* First check whether the comparison would come out
9265		 always the same.  If we don't do that we would
9266		 change the meaning with the masking.  */
9267	      folded_compare = fold_build2 (code, type,
9268					    TREE_OPERAND (varop, 0), arg1);
9269	      if (integer_zerop (folded_compare)
9270		  || integer_onep (folded_compare))
9271		return omit_one_operand (type, folded_compare, varop);
9272
9273	      shift = build_int_cst (NULL_TREE,
9274				     TYPE_PRECISION (TREE_TYPE (varop)) - size);
9275	      shift = fold_convert (TREE_TYPE (varop), shift);
9276	      newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9277				      newconst, shift);
9278	      newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9279				      newconst, shift);
9280	    }
9281
9282	  return fold_build2 (code, type, varop, newconst);
9283	}
9284
9285      /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9286	 This transformation affects the cases which are handled in later
9287	 optimizations involving comparisons with non-negative constants.  */
9288      if (TREE_CODE (arg1) == INTEGER_CST
9289	  && TREE_CODE (arg0) != INTEGER_CST
9290	  && tree_int_cst_sgn (arg1) > 0)
9291	{
9292	  switch (code)
9293	    {
9294	    case GE_EXPR:
9295	      arg1 = const_binop (MINUS_EXPR, arg1,
9296			          build_int_cst (TREE_TYPE (arg1), 1), 0);
9297	      return fold_build2 (GT_EXPR, type, arg0,
9298				  fold_convert (TREE_TYPE (arg0), arg1));
9299
9300	    case LT_EXPR:
9301	      arg1 = const_binop (MINUS_EXPR, arg1,
9302			          build_int_cst (TREE_TYPE (arg1), 1), 0);
9303	      return fold_build2 (LE_EXPR, type, arg0,
9304				  fold_convert (TREE_TYPE (arg0), arg1));
9305
9306	    default:
9307	      break;
9308	    }
9309	}
9310
9311      /* Comparisons with the highest or lowest possible integer of
9312	 the specified size will have known values.  */
9313      {
9314	int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9315
9316	if (TREE_CODE (arg1) == INTEGER_CST
9317	    && ! TREE_CONSTANT_OVERFLOW (arg1)
9318	    && width <= 2 * HOST_BITS_PER_WIDE_INT
9319	    && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9320		|| POINTER_TYPE_P (TREE_TYPE (arg1))))
9321	  {
9322	    HOST_WIDE_INT signed_max_hi;
9323	    unsigned HOST_WIDE_INT signed_max_lo;
9324	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9325
9326	    if (width <= HOST_BITS_PER_WIDE_INT)
9327	      {
9328		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9329				- 1;
9330		signed_max_hi = 0;
9331		max_hi = 0;
9332
9333		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9334		  {
9335		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9336		    min_lo = 0;
9337		    min_hi = 0;
9338		  }
9339		else
9340		  {
9341		    max_lo = signed_max_lo;
9342		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9343		    min_hi = -1;
9344		  }
9345	      }
9346	    else
9347	      {
9348		width -= HOST_BITS_PER_WIDE_INT;
9349		signed_max_lo = -1;
9350		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9351				- 1;
9352		max_lo = -1;
9353		min_lo = 0;
9354
9355		if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9356		  {
9357		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9358		    min_hi = 0;
9359		  }
9360		else
9361		  {
9362		    max_hi = signed_max_hi;
9363		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9364		  }
9365	      }
9366
9367	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9368		&& TREE_INT_CST_LOW (arg1) == max_lo)
9369	      switch (code)
9370		{
9371		case GT_EXPR:
9372		  return omit_one_operand (type, integer_zero_node, arg0);
9373
9374		case GE_EXPR:
9375		  return fold_build2 (EQ_EXPR, type, op0, op1);
9376
9377		case LE_EXPR:
9378		  return omit_one_operand (type, integer_one_node, arg0);
9379
9380		case LT_EXPR:
9381		  return fold_build2 (NE_EXPR, type, op0, op1);
9382
9383		/* The GE_EXPR and LT_EXPR cases above are not normally
9384		   reached because of previous transformations.  */
9385
9386		default:
9387		  break;
9388		}
9389	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9390		     == max_hi
9391		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9392	      switch (code)
9393		{
9394		case GT_EXPR:
9395		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9396		  return fold_build2 (EQ_EXPR, type,
9397				      fold_convert (TREE_TYPE (arg1), arg0),
9398				      arg1);
9399		case LE_EXPR:
9400		  arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9401		  return fold_build2 (NE_EXPR, type,
9402				      fold_convert (TREE_TYPE (arg1), arg0),
9403				      arg1);
9404		default:
9405		  break;
9406		}
9407	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9408		     == min_hi
9409		     && TREE_INT_CST_LOW (arg1) == min_lo)
9410	      switch (code)
9411		{
9412		case LT_EXPR:
9413		  return omit_one_operand (type, integer_zero_node, arg0);
9414
9415		case LE_EXPR:
9416		  return fold_build2 (EQ_EXPR, type, op0, op1);
9417
9418		case GE_EXPR:
9419		  return omit_one_operand (type, integer_one_node, arg0);
9420
9421		case GT_EXPR:
9422		  return fold_build2 (NE_EXPR, type, op0, op1);
9423
9424		default:
9425		  break;
9426		}
9427	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9428		     == min_hi
9429		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9430	      switch (code)
9431		{
9432		case GE_EXPR:
9433		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9434		  return fold_build2 (NE_EXPR, type,
9435				      fold_convert (TREE_TYPE (arg1), arg0),
9436				      arg1);
9437		case LT_EXPR:
9438		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9439		  return fold_build2 (EQ_EXPR, type,
9440				      fold_convert (TREE_TYPE (arg1), arg0),
9441				      arg1);
9442		default:
9443		  break;
9444		}
9445
9446	    else if (!in_gimple_form
9447		     && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9448		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
9449		     && TYPE_UNSIGNED (TREE_TYPE (arg1))
9450		     /* signed_type does not work on pointer types.  */
9451		     && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9452	      {
9453		/* The following case also applies to X < signed_max+1
9454		   and X >= signed_max+1 because previous transformations.  */
9455		if (code == LE_EXPR || code == GT_EXPR)
9456		  {
9457		    tree st;
9458		    st = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9459		    return fold_build2 (code == LE_EXPR ? GE_EXPR : LT_EXPR,
9460					type, fold_convert (st, arg0),
9461					build_int_cst (st, 0));
9462		  }
9463	      }
9464	  }
9465      }
9466
9467      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9468	 a MINUS_EXPR of a constant, we can convert it into a comparison with
9469	 a revised constant as long as no overflow occurs.  */
9470      if ((code == EQ_EXPR || code == NE_EXPR)
9471	  && TREE_CODE (arg1) == INTEGER_CST
9472	  && (TREE_CODE (arg0) == PLUS_EXPR
9473	      || TREE_CODE (arg0) == MINUS_EXPR)
9474	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9475	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9476				      ? MINUS_EXPR : PLUS_EXPR,
9477				      fold_convert (TREE_TYPE (arg0), arg1),
9478				      TREE_OPERAND (arg0, 1), 0))
9479	  && ! TREE_CONSTANT_OVERFLOW (tem))
9480	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9481
9482      /* Similarly for a NEGATE_EXPR.  */
9483      else if ((code == EQ_EXPR || code == NE_EXPR)
9484	       && TREE_CODE (arg0) == NEGATE_EXPR
9485	       && TREE_CODE (arg1) == INTEGER_CST
9486	       && 0 != (tem = negate_expr (arg1))
9487	       && TREE_CODE (tem) == INTEGER_CST
9488	       && ! TREE_CONSTANT_OVERFLOW (tem))
9489	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9490
9491      /* If we have X - Y == 0, we can convert that to X == Y and similarly
9492	 for !=.  Don't do this for ordered comparisons due to overflow.  */
9493      else if ((code == NE_EXPR || code == EQ_EXPR)
9494	       && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9495	return fold_build2 (code, type,
9496			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9497
9498      else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9499	       && (TREE_CODE (arg0) == NOP_EXPR
9500		   || TREE_CODE (arg0) == CONVERT_EXPR))
9501	{
9502	  /* If we are widening one operand of an integer comparison,
9503	     see if the other operand is similarly being widened.  Perhaps we
9504	     can do the comparison in the narrower type.  */
9505	  tem = fold_widened_comparison (code, type, arg0, arg1);
9506	  if (tem)
9507	    return tem;
9508
9509	  /* Or if we are changing signedness.  */
9510	  tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9511	  if (tem)
9512	    return tem;
9513	}
9514
9515      /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9516	 constant, we can simplify it.  */
9517      else if (TREE_CODE (arg1) == INTEGER_CST
9518	       && (TREE_CODE (arg0) == MIN_EXPR
9519		   || TREE_CODE (arg0) == MAX_EXPR)
9520	       && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9521	{
9522	  tem = optimize_minmax_comparison (code, type, op0, op1);
9523	  if (tem)
9524	    return tem;
9525
9526	  return NULL_TREE;
9527	}
9528
9529      /* If we are comparing an ABS_EXPR with a constant, we can
9530	 convert all the cases into explicit comparisons, but they may
9531	 well not be faster than doing the ABS and one comparison.
9532	 But ABS (X) <= C is a range comparison, which becomes a subtraction
9533	 and a comparison, and is probably faster.  */
9534      else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9535	       && TREE_CODE (arg0) == ABS_EXPR
9536	       && ! TREE_SIDE_EFFECTS (arg0)
9537	       && (0 != (tem = negate_expr (arg1)))
9538	       && TREE_CODE (tem) == INTEGER_CST
9539	       && ! TREE_CONSTANT_OVERFLOW (tem))
9540	return fold_build2 (TRUTH_ANDIF_EXPR, type,
9541			    build2 (GE_EXPR, type,
9542				    TREE_OPERAND (arg0, 0), tem),
9543			    build2 (LE_EXPR, type,
9544				    TREE_OPERAND (arg0, 0), arg1));
9545
9546      /* Convert ABS_EXPR<x> >= 0 to true.  */
9547      else if (code == GE_EXPR
9548	       && tree_expr_nonnegative_p (arg0)
9549	       && (integer_zerop (arg1)
9550		   || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9551                       && real_zerop (arg1))))
9552	return omit_one_operand (type, integer_one_node, arg0);
9553
9554      /* Convert ABS_EXPR<x> < 0 to false.  */
9555      else if (code == LT_EXPR
9556	       && tree_expr_nonnegative_p (arg0)
9557	       && (integer_zerop (arg1) || real_zerop (arg1)))
9558	return omit_one_operand (type, integer_zero_node, arg0);
9559
9560      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
9561      else if ((code == EQ_EXPR || code == NE_EXPR)
9562	       && TREE_CODE (arg0) == ABS_EXPR
9563	       && (integer_zerop (arg1) || real_zerop (arg1)))
9564	return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9565
9566      /* If this is an EQ or NE comparison with zero and ARG0 is
9567	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
9568	 two operations, but the latter can be done in one less insn
9569	 on machines that have only two-operand insns or on which a
9570	 constant cannot be the first operand.  */
9571      if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9572	  && TREE_CODE (arg0) == BIT_AND_EXPR)
9573	{
9574	  tree arg00 = TREE_OPERAND (arg0, 0);
9575	  tree arg01 = TREE_OPERAND (arg0, 1);
9576	  if (TREE_CODE (arg00) == LSHIFT_EXPR
9577	      && integer_onep (TREE_OPERAND (arg00, 0)))
9578	    {
9579	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9580				      arg01, TREE_OPERAND (arg00, 1));
9581	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
9582				 build_int_cst (TREE_TYPE (arg0), 1));
9583	      return fold_build2 (code, type,
9584				  fold_convert (TREE_TYPE (arg1), tem), arg1);
9585	    }
9586	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
9587		   && integer_onep (TREE_OPERAND (arg01, 0)))
9588	    {
9589	      tree tem = fold_build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9590				      arg00, TREE_OPERAND (arg01, 1));
9591	      tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0), tem,
9592				 build_int_cst (TREE_TYPE (arg0), 1));
9593	      return fold_build2 (code, type,
9594				  fold_convert (TREE_TYPE (arg1), tem), arg1);
9595	    }
9596	}
9597
9598      /* If this is an NE or EQ comparison of zero against the result of a
9599	 signed MOD operation whose second operand is a power of 2, make
9600	 the MOD operation unsigned since it is simpler and equivalent.  */
9601      if ((code == NE_EXPR || code == EQ_EXPR)
9602	  && integer_zerop (arg1)
9603	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9604	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9605	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
9606	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9607	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9608	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
9609	{
9610	  tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9611	  tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9612				     fold_convert (newtype,
9613						   TREE_OPERAND (arg0, 0)),
9614				     fold_convert (newtype,
9615						   TREE_OPERAND (arg0, 1)));
9616
9617	  return fold_build2 (code, type, newmod,
9618			      fold_convert (newtype, arg1));
9619	}
9620
9621      /* If this is an NE comparison of zero with an AND of one, remove the
9622	 comparison since the AND will give the correct value.  */
9623      if (code == NE_EXPR && integer_zerop (arg1)
9624	  && TREE_CODE (arg0) == BIT_AND_EXPR
9625	  && integer_onep (TREE_OPERAND (arg0, 1)))
9626	return fold_convert (type, arg0);
9627
9628      /* If we have (A & C) == C where C is a power of 2, convert this into
9629	 (A & C) != 0.  Similarly for NE_EXPR.  */
9630      if ((code == EQ_EXPR || code == NE_EXPR)
9631	  && TREE_CODE (arg0) == BIT_AND_EXPR
9632	  && integer_pow2p (TREE_OPERAND (arg0, 1))
9633	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9634	return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9635			    arg0, fold_convert (TREE_TYPE (arg0),
9636						integer_zero_node));
9637
9638      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9639	 bit, then fold the expression into A < 0 or A >= 0.  */
9640      tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9641      if (tem)
9642	return tem;
9643
9644      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9645	 Similarly for NE_EXPR.  */
9646      if ((code == EQ_EXPR || code == NE_EXPR)
9647	  && TREE_CODE (arg0) == BIT_AND_EXPR
9648	  && TREE_CODE (arg1) == INTEGER_CST
9649	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9650	{
9651	  tree notc = fold_build1 (BIT_NOT_EXPR,
9652				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
9653				   TREE_OPERAND (arg0, 1));
9654	  tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9655				       arg1, notc);
9656	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9657	  if (integer_nonzerop (dandnotc))
9658	    return omit_one_operand (type, rslt, arg0);
9659	}
9660
9661      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9662	 Similarly for NE_EXPR.  */
9663      if ((code == EQ_EXPR || code == NE_EXPR)
9664	  && TREE_CODE (arg0) == BIT_IOR_EXPR
9665	  && TREE_CODE (arg1) == INTEGER_CST
9666	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9667	{
9668	  tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9669	  tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9670				       TREE_OPERAND (arg0, 1), notd);
9671	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9672	  if (integer_nonzerop (candnotd))
9673	    return omit_one_operand (type, rslt, arg0);
9674	}
9675
9676      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9677	 and similarly for >= into !=.  */
9678      if ((code == LT_EXPR || code == GE_EXPR)
9679	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
9680	  && TREE_CODE (arg1) == LSHIFT_EXPR
9681	  && integer_onep (TREE_OPERAND (arg1, 0)))
9682	return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9683		       build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9684			       TREE_OPERAND (arg1, 1)),
9685		       fold_convert (TREE_TYPE (arg0), integer_zero_node));
9686
9687      else if ((code == LT_EXPR || code == GE_EXPR)
9688	       && TYPE_UNSIGNED (TREE_TYPE (arg0))
9689	       && (TREE_CODE (arg1) == NOP_EXPR
9690		   || TREE_CODE (arg1) == CONVERT_EXPR)
9691	       && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9692	       && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9693	return
9694	  build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9695		  fold_convert (TREE_TYPE (arg0),
9696				build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9697					TREE_OPERAND (TREE_OPERAND (arg1, 0),
9698						      1))),
9699		  fold_convert (TREE_TYPE (arg0), integer_zero_node));
9700
9701      /* Simplify comparison of something with itself.  (For IEEE
9702	 floating-point, we can only do some of these simplifications.)  */
9703      if (operand_equal_p (arg0, arg1, 0))
9704	{
9705	  switch (code)
9706	    {
9707	    case EQ_EXPR:
9708	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9709		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9710		return constant_boolean_node (1, type);
9711	      break;
9712
9713	    case GE_EXPR:
9714	    case LE_EXPR:
9715	      if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9716		  || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9717		return constant_boolean_node (1, type);
9718	      return fold_build2 (EQ_EXPR, type, arg0, arg1);
9719
9720	    case NE_EXPR:
9721	      /* For NE, we can only do this simplification if integer
9722		 or we don't honor IEEE floating point NaNs.  */
9723	      if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9724		  && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9725		break;
9726	      /* ... fall through ...  */
9727	    case GT_EXPR:
9728	    case LT_EXPR:
9729	      return constant_boolean_node (0, type);
9730	    default:
9731	      gcc_unreachable ();
9732	    }
9733	}
9734
9735      /* If we are comparing an expression that just has comparisons
9736	 of two integer values, arithmetic expressions of those comparisons,
9737	 and constants, we can simplify it.  There are only three cases
9738	 to check: the two values can either be equal, the first can be
9739	 greater, or the second can be greater.  Fold the expression for
9740	 those three values.  Since each value must be 0 or 1, we have
9741	 eight possibilities, each of which corresponds to the constant 0
9742	 or 1 or one of the six possible comparisons.
9743
9744	 This handles common cases like (a > b) == 0 but also handles
9745	 expressions like  ((x > y) - (y > x)) > 0, which supposedly
9746	 occur in macroized code.  */
9747
9748      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9749	{
9750	  tree cval1 = 0, cval2 = 0;
9751	  int save_p = 0;
9752
9753	  if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9754	      /* Don't handle degenerate cases here; they should already
9755		 have been handled anyway.  */
9756	      && cval1 != 0 && cval2 != 0
9757	      && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9758	      && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9759	      && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9760	      && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9761	      && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9762	      && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9763				    TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9764	    {
9765	      tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9766	      tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9767
9768	      /* We can't just pass T to eval_subst in case cval1 or cval2
9769		 was the same as ARG1.  */
9770
9771	      tree high_result
9772		= fold_build2 (code, type,
9773			       eval_subst (arg0, cval1, maxval,
9774					   cval2, minval),
9775			       arg1);
9776	      tree equal_result
9777		= fold_build2 (code, type,
9778			       eval_subst (arg0, cval1, maxval,
9779					   cval2, maxval),
9780			       arg1);
9781	      tree low_result
9782		= fold_build2 (code, type,
9783			       eval_subst (arg0, cval1, minval,
9784					   cval2, maxval),
9785			       arg1);
9786
9787	      /* All three of these results should be 0 or 1.  Confirm they
9788		 are.  Then use those values to select the proper code
9789		 to use.  */
9790
9791	      if ((integer_zerop (high_result)
9792		   || integer_onep (high_result))
9793		  && (integer_zerop (equal_result)
9794		      || integer_onep (equal_result))
9795		  && (integer_zerop (low_result)
9796		      || integer_onep (low_result)))
9797		{
9798		  /* Make a 3-bit mask with the high-order bit being the
9799		     value for `>', the next for '=', and the low for '<'.  */
9800		  switch ((integer_onep (high_result) * 4)
9801			  + (integer_onep (equal_result) * 2)
9802			  + integer_onep (low_result))
9803		    {
9804		    case 0:
9805		      /* Always false.  */
9806		      return omit_one_operand (type, integer_zero_node, arg0);
9807		    case 1:
9808		      code = LT_EXPR;
9809		      break;
9810		    case 2:
9811		      code = EQ_EXPR;
9812		      break;
9813		    case 3:
9814		      code = LE_EXPR;
9815		      break;
9816		    case 4:
9817		      code = GT_EXPR;
9818		      break;
9819		    case 5:
9820		      code = NE_EXPR;
9821		      break;
9822		    case 6:
9823		      code = GE_EXPR;
9824		      break;
9825		    case 7:
9826		      /* Always true.  */
9827		      return omit_one_operand (type, integer_one_node, arg0);
9828		    }
9829
9830		  if (save_p)
9831		    return save_expr (build2 (code, type, cval1, cval2));
9832		  else
9833		    return fold_build2 (code, type, cval1, cval2);
9834		}
9835	    }
9836	}
9837
9838      /* If this is a comparison of a field, we may be able to simplify it.  */
9839      if (((TREE_CODE (arg0) == COMPONENT_REF
9840	    && lang_hooks.can_use_bit_fields_p ())
9841	   || TREE_CODE (arg0) == BIT_FIELD_REF)
9842	  && (code == EQ_EXPR || code == NE_EXPR)
9843	  /* Handle the constant case even without -O
9844	     to make sure the warnings are given.  */
9845	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9846	{
9847	  t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9848	  if (t1)
9849	    return t1;
9850	}
9851
9852      /* Fold a comparison of the address of COMPONENT_REFs with the same
9853         type and component to a comparison of the address of the base
9854	 object.  In short, &x->a OP &y->a to x OP y and
9855         &x->a OP &y.a to x OP &y  */
9856      if (TREE_CODE (arg0) == ADDR_EXPR
9857	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9858	  && TREE_CODE (arg1) == ADDR_EXPR
9859	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9860        {
9861	  tree cref0 = TREE_OPERAND (arg0, 0);
9862	  tree cref1 = TREE_OPERAND (arg1, 0);
9863	  if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9864	    {
9865	      tree op0 = TREE_OPERAND (cref0, 0);
9866	      tree op1 = TREE_OPERAND (cref1, 0);
9867	      return fold_build2 (code, type,
9868			          build_fold_addr_expr (op0),
9869				  build_fold_addr_expr (op1));
9870	    }
9871	}
9872
9873      /* Optimize comparisons of strlen vs zero to a compare of the
9874	 first character of the string vs zero.  To wit,
9875		strlen(ptr) == 0   =>  *ptr == 0
9876		strlen(ptr) != 0   =>  *ptr != 0
9877	 Other cases should reduce to one of these two (or a constant)
9878	 due to the return value of strlen being unsigned.  */
9879      if ((code == EQ_EXPR || code == NE_EXPR)
9880	  && integer_zerop (arg1)
9881	  && TREE_CODE (arg0) == CALL_EXPR)
9882	{
9883	  tree fndecl = get_callee_fndecl (arg0);
9884	  tree arglist;
9885
9886	  if (fndecl
9887	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9888	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9889	      && (arglist = TREE_OPERAND (arg0, 1))
9890	      && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9891	      && ! TREE_CHAIN (arglist))
9892	    {
9893	      tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9894	      return fold_build2 (code, type, iref,
9895				  build_int_cst (TREE_TYPE (iref), 0));
9896	    }
9897	}
9898
9899      /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9900	 into a single range test.  */
9901      if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9902	   || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9903	  && TREE_CODE (arg1) == INTEGER_CST
9904	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9905	  && !integer_zerop (TREE_OPERAND (arg0, 1))
9906	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9907	  && !TREE_OVERFLOW (arg1))
9908	{
9909	  t1 = fold_div_compare (code, type, arg0, arg1);
9910	  if (t1 != NULL_TREE)
9911	    return t1;
9912	}
9913
9914      if ((code == EQ_EXPR || code == NE_EXPR)
9915	  && integer_zerop (arg1)
9916	  && tree_expr_nonzero_p (arg0))
9917        {
9918	  tree res = constant_boolean_node (code==NE_EXPR, type);
9919	  return omit_one_operand (type, res, arg0);
9920	}
9921
9922      t1 = fold_relational_const (code, type, arg0, arg1);
9923      return t1 == NULL_TREE ? NULL_TREE : t1;
9924
9925    case UNORDERED_EXPR:
9926    case ORDERED_EXPR:
9927    case UNLT_EXPR:
9928    case UNLE_EXPR:
9929    case UNGT_EXPR:
9930    case UNGE_EXPR:
9931    case UNEQ_EXPR:
9932    case LTGT_EXPR:
9933      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9934	{
9935	  t1 = fold_relational_const (code, type, arg0, arg1);
9936	  if (t1 != NULL_TREE)
9937	    return t1;
9938	}
9939
9940      /* If the first operand is NaN, the result is constant.  */
9941      if (TREE_CODE (arg0) == REAL_CST
9942	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9943	  && (code != LTGT_EXPR || ! flag_trapping_math))
9944	{
9945	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9946	       ? integer_zero_node
9947	       : integer_one_node;
9948	  return omit_one_operand (type, t1, arg1);
9949	}
9950
9951      /* If the second operand is NaN, the result is constant.  */
9952      if (TREE_CODE (arg1) == REAL_CST
9953	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9954	  && (code != LTGT_EXPR || ! flag_trapping_math))
9955	{
9956	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9957	       ? integer_zero_node
9958	       : integer_one_node;
9959	  return omit_one_operand (type, t1, arg0);
9960	}
9961
9962      /* Simplify unordered comparison of something with itself.  */
9963      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9964	  && operand_equal_p (arg0, arg1, 0))
9965	return constant_boolean_node (1, type);
9966
9967      if (code == LTGT_EXPR
9968	  && !flag_trapping_math
9969	  && operand_equal_p (arg0, arg1, 0))
9970	return constant_boolean_node (0, type);
9971
9972      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9973      {
9974	tree targ0 = strip_float_extensions (arg0);
9975	tree targ1 = strip_float_extensions (arg1);
9976	tree newtype = TREE_TYPE (targ0);
9977
9978	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9979	  newtype = TREE_TYPE (targ1);
9980
9981	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9982	  return fold_build2 (code, type, fold_convert (newtype, targ0),
9983			      fold_convert (newtype, targ1));
9984      }
9985
9986      return NULL_TREE;
9987
9988    case COMPOUND_EXPR:
9989      /* When pedantic, a compound expression can be neither an lvalue
9990	 nor an integer constant expression.  */
9991      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9992	return NULL_TREE;
9993      /* Don't let (0, 0) be null pointer constant.  */
9994      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9995				 : fold_convert (type, arg1);
9996      return pedantic_non_lvalue (tem);
9997
9998    case COMPLEX_EXPR:
9999      if (wins)
10000	return build_complex (type, arg0, arg1);
10001      return NULL_TREE;
10002
10003    case ASSERT_EXPR:
10004      /* An ASSERT_EXPR should never be passed to fold_binary.  */
10005      gcc_unreachable ();
10006
10007    default:
10008      return NULL_TREE;
10009    } /* switch (code) */
10010}
10011
10012/* Callback for walk_tree, looking for LABEL_EXPR.
10013   Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
10014   Do not check the sub-tree of GOTO_EXPR.  */
10015
10016static tree
10017contains_label_1 (tree *tp,
10018                  int *walk_subtrees,
10019                  void *data ATTRIBUTE_UNUSED)
10020{
10021  switch (TREE_CODE (*tp))
10022    {
10023    case LABEL_EXPR:
10024      return *tp;
10025    case GOTO_EXPR:
10026      *walk_subtrees = 0;
10027    /* no break */
10028    default:
10029      return NULL_TREE;
10030    }
10031}
10032
10033/* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
10034   accessible from outside the sub-tree. Returns NULL_TREE if no
10035   addressable label is found.  */
10036
10037static bool
10038contains_label_p (tree st)
10039{
10040  return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
10041}
10042
10043/* Fold a ternary expression of code CODE and type TYPE with operands
10044   OP0, OP1, and OP2.  Return the folded expression if folding is
10045   successful.  Otherwise, return NULL_TREE.  */
10046
10047tree
10048fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
10049{
10050  tree tem;
10051  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
10052  enum tree_code_class kind = TREE_CODE_CLASS (code);
10053
10054  gcc_assert (IS_EXPR_CODE_CLASS (kind)
10055	      && TREE_CODE_LENGTH (code) == 3);
10056
10057  /* Strip any conversions that don't change the mode.  This is safe
10058     for every expression, except for a comparison expression because
10059     its signedness is derived from its operands.  So, in the latter
10060     case, only strip conversions that don't change the signedness.
10061
10062     Note that this is done as an internal manipulation within the
10063     constant folder, in order to find the simplest representation of
10064     the arguments so that their form can be studied.  In any cases,
10065     the appropriate type conversions should be put back in the tree
10066     that will get out of the constant folder.  */
10067  if (op0)
10068    {
10069      arg0 = op0;
10070      STRIP_NOPS (arg0);
10071    }
10072
10073  if (op1)
10074    {
10075      arg1 = op1;
10076      STRIP_NOPS (arg1);
10077    }
10078
10079  switch (code)
10080    {
10081    case COMPONENT_REF:
10082      if (TREE_CODE (arg0) == CONSTRUCTOR
10083	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10084	{
10085	  unsigned HOST_WIDE_INT idx;
10086	  tree field, value;
10087	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10088	    if (field == arg1)
10089	      return value;
10090	}
10091      return NULL_TREE;
10092
10093    case COND_EXPR:
10094      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10095	 so all simple results must be passed through pedantic_non_lvalue.  */
10096      if (TREE_CODE (arg0) == INTEGER_CST)
10097	{
10098	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
10099	  tem = integer_zerop (arg0) ? op2 : op1;
10100	  /* Only optimize constant conditions when the selected branch
10101	     has the same type as the COND_EXPR.  This avoids optimizing
10102             away "c ? x : throw", where the throw has a void type.
10103             Avoid throwing away that operand which contains label.  */
10104          if ((!TREE_SIDE_EFFECTS (unused_op)
10105               || !contains_label_p (unused_op))
10106              && (! VOID_TYPE_P (TREE_TYPE (tem))
10107                  || VOID_TYPE_P (type)))
10108	    return pedantic_non_lvalue (tem);
10109	  return NULL_TREE;
10110	}
10111      if (operand_equal_p (arg1, op2, 0))
10112	return pedantic_omit_one_operand (type, arg1, arg0);
10113
10114      /* If we have A op B ? A : C, we may be able to convert this to a
10115	 simpler expression, depending on the operation and the values
10116	 of B and C.  Signed zeros prevent all of these transformations,
10117	 for reasons given above each one.
10118
10119         Also try swapping the arguments and inverting the conditional.  */
10120      if (COMPARISON_CLASS_P (arg0)
10121	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10122					     arg1, TREE_OPERAND (arg0, 1))
10123	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10124	{
10125	  tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10126	  if (tem)
10127	    return tem;
10128	}
10129
10130      if (COMPARISON_CLASS_P (arg0)
10131	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10132					     op2,
10133					     TREE_OPERAND (arg0, 1))
10134	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10135	{
10136	  tem = invert_truthvalue (arg0);
10137	  if (COMPARISON_CLASS_P (tem))
10138	    {
10139	      tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10140	      if (tem)
10141		return tem;
10142	    }
10143	}
10144
10145      /* If the second operand is simpler than the third, swap them
10146	 since that produces better jump optimization results.  */
10147      if (truth_value_p (TREE_CODE (arg0))
10148	  && tree_swap_operands_p (op1, op2, false))
10149	{
10150	  /* See if this can be inverted.  If it can't, possibly because
10151	     it was a floating-point inequality comparison, don't do
10152	     anything.  */
10153	  tem = invert_truthvalue (arg0);
10154
10155	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10156	    return fold_build3 (code, type, tem, op2, op1);
10157	}
10158
10159      /* Convert A ? 1 : 0 to simply A.  */
10160      if (integer_onep (op1)
10161	  && integer_zerop (op2)
10162	  /* If we try to convert OP0 to our type, the
10163	     call to fold will try to move the conversion inside
10164	     a COND, which will recurse.  In that case, the COND_EXPR
10165	     is probably the best choice, so leave it alone.  */
10166	  && type == TREE_TYPE (arg0))
10167	return pedantic_non_lvalue (arg0);
10168
10169      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
10170	 over COND_EXPR in cases such as floating point comparisons.  */
10171      if (integer_zerop (op1)
10172	  && integer_onep (op2)
10173	  && truth_value_p (TREE_CODE (arg0)))
10174	return pedantic_non_lvalue (fold_convert (type,
10175						  invert_truthvalue (arg0)));
10176
10177      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
10178      if (TREE_CODE (arg0) == LT_EXPR
10179	  && integer_zerop (TREE_OPERAND (arg0, 1))
10180	  && integer_zerop (op2)
10181	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10182	{
10183	  /* sign_bit_p only checks ARG1 bits within A's precision.
10184	     If <sign bit of A> has wider type than A, bits outside
10185	     of A's precision in <sign bit of A> need to be checked.
10186	     If they are all 0, this optimization needs to be done
10187	     in unsigned A's type, if they are all 1 in signed A's type,
10188	     otherwise this can't be done.  */
10189	  if (TYPE_PRECISION (TREE_TYPE (tem))
10190	      < TYPE_PRECISION (TREE_TYPE (arg1))
10191	      && TYPE_PRECISION (TREE_TYPE (tem))
10192		 < TYPE_PRECISION (type))
10193	    {
10194	      unsigned HOST_WIDE_INT mask_lo;
10195	      HOST_WIDE_INT mask_hi;
10196	      int inner_width, outer_width;
10197	      tree tem_type;
10198
10199	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
10200	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
10201	      if (outer_width > TYPE_PRECISION (type))
10202		outer_width = TYPE_PRECISION (type);
10203
10204	      if (outer_width > HOST_BITS_PER_WIDE_INT)
10205		{
10206		  mask_hi = ((unsigned HOST_WIDE_INT) -1
10207			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
10208		  mask_lo = -1;
10209		}
10210	      else
10211		{
10212		  mask_hi = 0;
10213		  mask_lo = ((unsigned HOST_WIDE_INT) -1
10214			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
10215		}
10216	      if (inner_width > HOST_BITS_PER_WIDE_INT)
10217		{
10218		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
10219			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
10220		  mask_lo = 0;
10221		}
10222	      else
10223		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
10224			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
10225
10226	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
10227		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
10228		{
10229		  tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
10230		  tem = fold_convert (tem_type, tem);
10231		}
10232	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
10233		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
10234		{
10235		  tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
10236		  tem = fold_convert (tem_type, tem);
10237		}
10238	      else
10239		tem = NULL;
10240	    }
10241
10242	  if (tem)
10243	    return fold_convert (type,
10244				 fold_build2 (BIT_AND_EXPR,
10245					      TREE_TYPE (tem), tem,
10246					      fold_convert (TREE_TYPE (tem),
10247							    arg1)));
10248	}
10249
10250      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
10251	 already handled above.  */
10252      if (TREE_CODE (arg0) == BIT_AND_EXPR
10253	  && integer_onep (TREE_OPERAND (arg0, 1))
10254	  && integer_zerop (op2)
10255	  && integer_pow2p (arg1))
10256	{
10257	  tree tem = TREE_OPERAND (arg0, 0);
10258	  STRIP_NOPS (tem);
10259	  if (TREE_CODE (tem) == RSHIFT_EXPR
10260              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10261              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10262	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10263	    return fold_build2 (BIT_AND_EXPR, type,
10264				TREE_OPERAND (tem, 0), arg1);
10265	}
10266
10267      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
10268	 is probably obsolete because the first operand should be a
10269	 truth value (that's why we have the two cases above), but let's
10270	 leave it in until we can confirm this for all front-ends.  */
10271      if (integer_zerop (op2)
10272	  && TREE_CODE (arg0) == NE_EXPR
10273	  && integer_zerop (TREE_OPERAND (arg0, 1))
10274	  && integer_pow2p (arg1)
10275	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10276	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10277			      arg1, OEP_ONLY_CONST))
10278	return pedantic_non_lvalue (fold_convert (type,
10279						  TREE_OPERAND (arg0, 0)));
10280
10281      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
10282      if (integer_zerop (op2)
10283	  && truth_value_p (TREE_CODE (arg0))
10284	  && truth_value_p (TREE_CODE (arg1)))
10285	return fold_build2 (TRUTH_ANDIF_EXPR, type,
10286			    fold_convert (type, arg0),
10287			    arg1);
10288
10289      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
10290      if (integer_onep (op2)
10291	  && truth_value_p (TREE_CODE (arg0))
10292	  && truth_value_p (TREE_CODE (arg1)))
10293	{
10294	  /* Only perform transformation if ARG0 is easily inverted.  */
10295	  tem = invert_truthvalue (arg0);
10296	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10297	    return fold_build2 (TRUTH_ORIF_EXPR, type,
10298				fold_convert (type, tem),
10299				arg1);
10300	}
10301
10302      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
10303      if (integer_zerop (arg1)
10304	  && truth_value_p (TREE_CODE (arg0))
10305	  && truth_value_p (TREE_CODE (op2)))
10306	{
10307	  /* Only perform transformation if ARG0 is easily inverted.  */
10308	  tem = invert_truthvalue (arg0);
10309	  if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10310	    return fold_build2 (TRUTH_ANDIF_EXPR, type,
10311				fold_convert (type, tem),
10312				op2);
10313	}
10314
10315      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
10316      if (integer_onep (arg1)
10317	  && truth_value_p (TREE_CODE (arg0))
10318	  && truth_value_p (TREE_CODE (op2)))
10319	return fold_build2 (TRUTH_ORIF_EXPR, type,
10320			    fold_convert (type, arg0),
10321			    op2);
10322
10323      return NULL_TREE;
10324
10325    case CALL_EXPR:
10326      /* Check for a built-in function.  */
10327      if (TREE_CODE (op0) == ADDR_EXPR
10328	  && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10329	  && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10330	return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10331      return NULL_TREE;
10332
10333    case BIT_FIELD_REF:
10334      if (TREE_CODE (arg0) == VECTOR_CST
10335	  && type == TREE_TYPE (TREE_TYPE (arg0))
10336	  && host_integerp (arg1, 1)
10337	  && host_integerp (op2, 1))
10338	{
10339	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10340	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10341
10342	  if (width != 0
10343	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10344	      && (idx % width) == 0
10345	      && (idx = idx / width)
10346		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10347	    {
10348	      tree elements = TREE_VECTOR_CST_ELTS (arg0);
10349	      while (idx-- > 0 && elements)
10350		elements = TREE_CHAIN (elements);
10351	      if (elements)
10352		return TREE_VALUE (elements);
10353	      else
10354		return fold_convert (type, integer_zero_node);
10355	    }
10356	}
10357      return NULL_TREE;
10358
10359    default:
10360      return NULL_TREE;
10361    } /* switch (code) */
10362}
10363
10364/* Perform constant folding and related simplification of EXPR.
10365   The related simplifications include x*1 => x, x*0 => 0, etc.,
10366   and application of the associative law.
10367   NOP_EXPR conversions may be removed freely (as long as we
10368   are careful not to change the type of the overall expression).
10369   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10370   but we can constant-fold them if they have constant operands.  */
10371
10372#ifdef ENABLE_FOLD_CHECKING
10373# define fold(x) fold_1 (x)
10374static tree fold_1 (tree);
10375static
10376#endif
10377tree
10378fold (tree expr)
10379{
10380  const tree t = expr;
10381  enum tree_code code = TREE_CODE (t);
10382  enum tree_code_class kind = TREE_CODE_CLASS (code);
10383  tree tem;
10384
10385  /* Return right away if a constant.  */
10386  if (kind == tcc_constant)
10387    return t;
10388
10389  if (IS_EXPR_CODE_CLASS (kind))
10390    {
10391      tree type = TREE_TYPE (t);
10392      tree op0, op1, op2;
10393
10394      switch (TREE_CODE_LENGTH (code))
10395	{
10396	case 1:
10397	  op0 = TREE_OPERAND (t, 0);
10398	  tem = fold_unary (code, type, op0);
10399	  return tem ? tem : expr;
10400	case 2:
10401	  op0 = TREE_OPERAND (t, 0);
10402	  op1 = TREE_OPERAND (t, 1);
10403	  tem = fold_binary (code, type, op0, op1);
10404	  return tem ? tem : expr;
10405	case 3:
10406	  op0 = TREE_OPERAND (t, 0);
10407	  op1 = TREE_OPERAND (t, 1);
10408	  op2 = TREE_OPERAND (t, 2);
10409	  tem = fold_ternary (code, type, op0, op1, op2);
10410	  return tem ? tem : expr;
10411	default:
10412	  break;
10413	}
10414    }
10415
10416  switch (code)
10417    {
10418    case CONST_DECL:
10419      return fold (DECL_INITIAL (t));
10420
10421    default:
10422      return t;
10423    } /* switch (code) */
10424}
10425
10426#ifdef ENABLE_FOLD_CHECKING
10427#undef fold
10428
10429static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10430static void fold_check_failed (tree, tree);
10431void print_fold_checksum (tree);
10432
10433/* When --enable-checking=fold, compute a digest of expr before
10434   and after actual fold call to see if fold did not accidentally
10435   change original expr.  */
10436
10437tree
10438fold (tree expr)
10439{
10440  tree ret;
10441  struct md5_ctx ctx;
10442  unsigned char checksum_before[16], checksum_after[16];
10443  htab_t ht;
10444
10445  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10446  md5_init_ctx (&ctx);
10447  fold_checksum_tree (expr, &ctx, ht);
10448  md5_finish_ctx (&ctx, checksum_before);
10449  htab_empty (ht);
10450
10451  ret = fold_1 (expr);
10452
10453  md5_init_ctx (&ctx);
10454  fold_checksum_tree (expr, &ctx, ht);
10455  md5_finish_ctx (&ctx, checksum_after);
10456  htab_delete (ht);
10457
10458  if (memcmp (checksum_before, checksum_after, 16))
10459    fold_check_failed (expr, ret);
10460
10461  return ret;
10462}
10463
10464void
10465print_fold_checksum (tree expr)
10466{
10467  struct md5_ctx ctx;
10468  unsigned char checksum[16], cnt;
10469  htab_t ht;
10470
10471  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10472  md5_init_ctx (&ctx);
10473  fold_checksum_tree (expr, &ctx, ht);
10474  md5_finish_ctx (&ctx, checksum);
10475  htab_delete (ht);
10476  for (cnt = 0; cnt < 16; ++cnt)
10477    fprintf (stderr, "%02x", checksum[cnt]);
10478  putc ('\n', stderr);
10479}
10480
10481static void
10482fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10483{
10484  internal_error ("fold check: original tree changed by fold");
10485}
10486
10487static void
10488fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10489{
10490  void **slot;
10491  enum tree_code code;
10492  struct tree_function_decl buf;
10493  int i, len;
10494
10495recursive_label:
10496
10497  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10498	       <= sizeof (struct tree_function_decl))
10499	      && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10500  if (expr == NULL)
10501    return;
10502  slot = htab_find_slot (ht, expr, INSERT);
10503  if (*slot != NULL)
10504    return;
10505  *slot = expr;
10506  code = TREE_CODE (expr);
10507  if (TREE_CODE_CLASS (code) == tcc_declaration
10508      && DECL_ASSEMBLER_NAME_SET_P (expr))
10509    {
10510      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
10511      memcpy ((char *) &buf, expr, tree_size (expr));
10512      expr = (tree) &buf;
10513      SET_DECL_ASSEMBLER_NAME (expr, NULL);
10514    }
10515  else if (TREE_CODE_CLASS (code) == tcc_type
10516	   && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10517	       || TYPE_CACHED_VALUES_P (expr)
10518	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10519    {
10520      /* Allow these fields to be modified.  */
10521      memcpy ((char *) &buf, expr, tree_size (expr));
10522      expr = (tree) &buf;
10523      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10524      TYPE_POINTER_TO (expr) = NULL;
10525      TYPE_REFERENCE_TO (expr) = NULL;
10526      if (TYPE_CACHED_VALUES_P (expr))
10527	{
10528	  TYPE_CACHED_VALUES_P (expr) = 0;
10529	  TYPE_CACHED_VALUES (expr) = NULL;
10530	}
10531    }
10532  md5_process_bytes (expr, tree_size (expr), ctx);
10533  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10534  if (TREE_CODE_CLASS (code) != tcc_type
10535      && TREE_CODE_CLASS (code) != tcc_declaration
10536      && code != TREE_LIST)
10537    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10538  switch (TREE_CODE_CLASS (code))
10539    {
10540    case tcc_constant:
10541      switch (code)
10542	{
10543	case STRING_CST:
10544	  md5_process_bytes (TREE_STRING_POINTER (expr),
10545			     TREE_STRING_LENGTH (expr), ctx);
10546	  break;
10547	case COMPLEX_CST:
10548	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10549	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10550	  break;
10551	case VECTOR_CST:
10552	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10553	  break;
10554	default:
10555	  break;
10556	}
10557      break;
10558    case tcc_exceptional:
10559      switch (code)
10560	{
10561	case TREE_LIST:
10562	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10563	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10564	  expr = TREE_CHAIN (expr);
10565	  goto recursive_label;
10566	  break;
10567	case TREE_VEC:
10568	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10569	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10570	  break;
10571	default:
10572	  break;
10573	}
10574      break;
10575    case tcc_expression:
10576    case tcc_reference:
10577    case tcc_comparison:
10578    case tcc_unary:
10579    case tcc_binary:
10580    case tcc_statement:
10581      len = TREE_CODE_LENGTH (code);
10582      for (i = 0; i < len; ++i)
10583	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10584      break;
10585    case tcc_declaration:
10586      fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10587      fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10588      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10589      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10590      fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10591      fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10592      fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10593      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10594	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10595
10596      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10597	{
10598	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10599	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10600	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10601	}
10602      break;
10603    case tcc_type:
10604      if (TREE_CODE (expr) == ENUMERAL_TYPE)
10605        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10606      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10607      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10608      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10609      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10610      if (INTEGRAL_TYPE_P (expr)
10611          || SCALAR_FLOAT_TYPE_P (expr))
10612	{
10613	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10614	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10615	}
10616      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10617      if (TREE_CODE (expr) == RECORD_TYPE
10618	  || TREE_CODE (expr) == UNION_TYPE
10619	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
10620	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10621      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10622      break;
10623    default:
10624      break;
10625    }
10626}
10627
10628#endif
10629
10630/* Fold a unary tree expression with code CODE of type TYPE with an
10631   operand OP0.  Return a folded expression if successful.  Otherwise,
10632   return a tree expression with code CODE of type TYPE with an
10633   operand OP0.  */
10634
10635tree
10636fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10637{
10638  tree tem;
10639#ifdef ENABLE_FOLD_CHECKING
10640  unsigned char checksum_before[16], checksum_after[16];
10641  struct md5_ctx ctx;
10642  htab_t ht;
10643
10644  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10645  md5_init_ctx (&ctx);
10646  fold_checksum_tree (op0, &ctx, ht);
10647  md5_finish_ctx (&ctx, checksum_before);
10648  htab_empty (ht);
10649#endif
10650
10651  tem = fold_unary (code, type, op0);
10652  if (!tem)
10653    tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10654
10655#ifdef ENABLE_FOLD_CHECKING
10656  md5_init_ctx (&ctx);
10657  fold_checksum_tree (op0, &ctx, ht);
10658  md5_finish_ctx (&ctx, checksum_after);
10659  htab_delete (ht);
10660
10661  if (memcmp (checksum_before, checksum_after, 16))
10662    fold_check_failed (op0, tem);
10663#endif
10664  return tem;
10665}
10666
10667/* Fold a binary tree expression with code CODE of type TYPE with
10668   operands OP0 and OP1.  Return a folded expression if successful.
10669   Otherwise, return a tree expression with code CODE of type TYPE
10670   with operands OP0 and OP1.  */
10671
10672tree
10673fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10674		  MEM_STAT_DECL)
10675{
10676  tree tem;
10677#ifdef ENABLE_FOLD_CHECKING
10678  unsigned char checksum_before_op0[16],
10679                checksum_before_op1[16],
10680		checksum_after_op0[16],
10681		checksum_after_op1[16];
10682  struct md5_ctx ctx;
10683  htab_t ht;
10684
10685  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10686  md5_init_ctx (&ctx);
10687  fold_checksum_tree (op0, &ctx, ht);
10688  md5_finish_ctx (&ctx, checksum_before_op0);
10689  htab_empty (ht);
10690
10691  md5_init_ctx (&ctx);
10692  fold_checksum_tree (op1, &ctx, ht);
10693  md5_finish_ctx (&ctx, checksum_before_op1);
10694  htab_empty (ht);
10695#endif
10696
10697  tem = fold_binary (code, type, op0, op1);
10698  if (!tem)
10699    tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10700
10701#ifdef ENABLE_FOLD_CHECKING
10702  md5_init_ctx (&ctx);
10703  fold_checksum_tree (op0, &ctx, ht);
10704  md5_finish_ctx (&ctx, checksum_after_op0);
10705  htab_empty (ht);
10706
10707  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10708    fold_check_failed (op0, tem);
10709
10710  md5_init_ctx (&ctx);
10711  fold_checksum_tree (op1, &ctx, ht);
10712  md5_finish_ctx (&ctx, checksum_after_op1);
10713  htab_delete (ht);
10714
10715  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10716    fold_check_failed (op1, tem);
10717#endif
10718  return tem;
10719}
10720
10721/* Fold a ternary tree expression with code CODE of type TYPE with
10722   operands OP0, OP1, and OP2.  Return a folded expression if
10723   successful.  Otherwise, return a tree expression with code CODE of
10724   type TYPE with operands OP0, OP1, and OP2.  */
10725
10726tree
10727fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10728	     MEM_STAT_DECL)
10729{
10730  tree tem;
10731#ifdef ENABLE_FOLD_CHECKING
10732  unsigned char checksum_before_op0[16],
10733                checksum_before_op1[16],
10734                checksum_before_op2[16],
10735		checksum_after_op0[16],
10736		checksum_after_op1[16],
10737		checksum_after_op2[16];
10738  struct md5_ctx ctx;
10739  htab_t ht;
10740
10741  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10742  md5_init_ctx (&ctx);
10743  fold_checksum_tree (op0, &ctx, ht);
10744  md5_finish_ctx (&ctx, checksum_before_op0);
10745  htab_empty (ht);
10746
10747  md5_init_ctx (&ctx);
10748  fold_checksum_tree (op1, &ctx, ht);
10749  md5_finish_ctx (&ctx, checksum_before_op1);
10750  htab_empty (ht);
10751
10752  md5_init_ctx (&ctx);
10753  fold_checksum_tree (op2, &ctx, ht);
10754  md5_finish_ctx (&ctx, checksum_before_op2);
10755  htab_empty (ht);
10756#endif
10757
10758  tem = fold_ternary (code, type, op0, op1, op2);
10759  if (!tem)
10760    tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10761
10762#ifdef ENABLE_FOLD_CHECKING
10763  md5_init_ctx (&ctx);
10764  fold_checksum_tree (op0, &ctx, ht);
10765  md5_finish_ctx (&ctx, checksum_after_op0);
10766  htab_empty (ht);
10767
10768  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10769    fold_check_failed (op0, tem);
10770
10771  md5_init_ctx (&ctx);
10772  fold_checksum_tree (op1, &ctx, ht);
10773  md5_finish_ctx (&ctx, checksum_after_op1);
10774  htab_empty (ht);
10775
10776  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10777    fold_check_failed (op1, tem);
10778
10779  md5_init_ctx (&ctx);
10780  fold_checksum_tree (op2, &ctx, ht);
10781  md5_finish_ctx (&ctx, checksum_after_op2);
10782  htab_delete (ht);
10783
10784  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10785    fold_check_failed (op2, tem);
10786#endif
10787  return tem;
10788}
10789
10790/* Perform constant folding and related simplification of initializer
10791   expression EXPR.  These behave identically to "fold_buildN" but ignore
10792   potential run-time traps and exceptions that fold must preserve.  */
10793
10794#define START_FOLD_INIT \
10795  int saved_signaling_nans = flag_signaling_nans;\
10796  int saved_trapping_math = flag_trapping_math;\
10797  int saved_rounding_math = flag_rounding_math;\
10798  int saved_trapv = flag_trapv;\
10799  flag_signaling_nans = 0;\
10800  flag_trapping_math = 0;\
10801  flag_rounding_math = 0;\
10802  flag_trapv = 0
10803
10804#define END_FOLD_INIT \
10805  flag_signaling_nans = saved_signaling_nans;\
10806  flag_trapping_math = saved_trapping_math;\
10807  flag_rounding_math = saved_rounding_math;\
10808  flag_trapv = saved_trapv
10809
10810tree
10811fold_build1_initializer (enum tree_code code, tree type, tree op)
10812{
10813  tree result;
10814  START_FOLD_INIT;
10815
10816  result = fold_build1 (code, type, op);
10817
10818  END_FOLD_INIT;
10819  return result;
10820}
10821
10822tree
10823fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10824{
10825  tree result;
10826  START_FOLD_INIT;
10827
10828  result = fold_build2 (code, type, op0, op1);
10829
10830  END_FOLD_INIT;
10831  return result;
10832}
10833
10834tree
10835fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10836			 tree op2)
10837{
10838  tree result;
10839  START_FOLD_INIT;
10840
10841  result = fold_build3 (code, type, op0, op1, op2);
10842
10843  END_FOLD_INIT;
10844  return result;
10845}
10846
10847#undef START_FOLD_INIT
10848#undef END_FOLD_INIT
10849
10850/* Determine if first argument is a multiple of second argument.  Return 0 if
10851   it is not, or we cannot easily determined it to be.
10852
10853   An example of the sort of thing we care about (at this point; this routine
10854   could surely be made more general, and expanded to do what the *_DIV_EXPR's
10855   fold cases do now) is discovering that
10856
10857     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10858
10859   is a multiple of
10860
10861     SAVE_EXPR (J * 8)
10862
10863   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10864
10865   This code also handles discovering that
10866
10867     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10868
10869   is a multiple of 8 so we don't have to worry about dealing with a
10870   possible remainder.
10871
10872   Note that we *look* inside a SAVE_EXPR only to determine how it was
10873   calculated; it is not safe for fold to do much of anything else with the
10874   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10875   at run time.  For example, the latter example above *cannot* be implemented
10876   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10877   evaluation time of the original SAVE_EXPR is not necessarily the same at
10878   the time the new expression is evaluated.  The only optimization of this
10879   sort that would be valid is changing
10880
10881     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10882
10883   divided by 8 to
10884
10885     SAVE_EXPR (I) * SAVE_EXPR (J)
10886
10887   (where the same SAVE_EXPR (J) is used in the original and the
10888   transformed version).  */
10889
10890static int
10891multiple_of_p (tree type, tree top, tree bottom)
10892{
10893  if (operand_equal_p (top, bottom, 0))
10894    return 1;
10895
10896  if (TREE_CODE (type) != INTEGER_TYPE)
10897    return 0;
10898
10899  switch (TREE_CODE (top))
10900    {
10901    case BIT_AND_EXPR:
10902      /* Bitwise and provides a power of two multiple.  If the mask is
10903	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
10904      if (!integer_pow2p (bottom))
10905	return 0;
10906      /* FALLTHRU */
10907
10908    case MULT_EXPR:
10909      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10910	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10911
10912    case PLUS_EXPR:
10913    case MINUS_EXPR:
10914      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10915	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10916
10917    case LSHIFT_EXPR:
10918      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10919	{
10920	  tree op1, t1;
10921
10922	  op1 = TREE_OPERAND (top, 1);
10923	  /* const_binop may not detect overflow correctly,
10924	     so check for it explicitly here.  */
10925	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10926	      > TREE_INT_CST_LOW (op1)
10927	      && TREE_INT_CST_HIGH (op1) == 0
10928	      && 0 != (t1 = fold_convert (type,
10929					  const_binop (LSHIFT_EXPR,
10930						       size_one_node,
10931						       op1, 0)))
10932	      && ! TREE_OVERFLOW (t1))
10933	    return multiple_of_p (type, t1, bottom);
10934	}
10935      return 0;
10936
10937    case NOP_EXPR:
10938      /* Can't handle conversions from non-integral or wider integral type.  */
10939      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10940	  || (TYPE_PRECISION (type)
10941	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10942	return 0;
10943
10944      /* .. fall through ...  */
10945
10946    case SAVE_EXPR:
10947      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10948
10949    case INTEGER_CST:
10950      if (TREE_CODE (bottom) != INTEGER_CST
10951	  || (TYPE_UNSIGNED (type)
10952	      && (tree_int_cst_sgn (top) < 0
10953		  || tree_int_cst_sgn (bottom) < 0)))
10954	return 0;
10955      return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10956					 top, bottom, 0));
10957
10958    default:
10959      return 0;
10960    }
10961}
10962
10963/* Return true if `t' is known to be non-negative.  */
10964
10965int
10966tree_expr_nonnegative_p (tree t)
10967{
10968  if (t == error_mark_node)
10969    return 0;
10970
10971  if (TYPE_UNSIGNED (TREE_TYPE (t)))
10972    return 1;
10973
10974  switch (TREE_CODE (t))
10975    {
10976    case ABS_EXPR:
10977      /* We can't return 1 if flag_wrapv is set because
10978	 ABS_EXPR<INT_MIN> = INT_MIN.  */
10979      if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10980        return 1;
10981      break;
10982
10983    case INTEGER_CST:
10984      return tree_int_cst_sgn (t) >= 0;
10985
10986    case REAL_CST:
10987      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10988
10989    case PLUS_EXPR:
10990      if (FLOAT_TYPE_P (TREE_TYPE (t)))
10991	return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10992	       && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10993
10994      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10995	 both unsigned and at least 2 bits shorter than the result.  */
10996      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10997	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10998	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10999	{
11000	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11001	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11002	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11003	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11004	    {
11005	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
11006				       TYPE_PRECISION (inner2)) + 1;
11007	      return prec < TYPE_PRECISION (TREE_TYPE (t));
11008	    }
11009	}
11010      break;
11011
11012    case MULT_EXPR:
11013      if (FLOAT_TYPE_P (TREE_TYPE (t)))
11014	{
11015	  /* x * x for floating point x is always non-negative.  */
11016	  if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
11017	    return 1;
11018	  return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11019		 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11020	}
11021
11022      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
11023	 both unsigned and their total bits is shorter than the result.  */
11024      if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
11025	  && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
11026	  && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
11027	{
11028	  tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
11029	  tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
11030	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
11031	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
11032	    return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
11033		   < TYPE_PRECISION (TREE_TYPE (t));
11034	}
11035      return 0;
11036
11037    case BIT_AND_EXPR:
11038    case MAX_EXPR:
11039      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11040	     || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11041
11042    case BIT_IOR_EXPR:
11043    case BIT_XOR_EXPR:
11044    case MIN_EXPR:
11045    case RDIV_EXPR:
11046    case TRUNC_DIV_EXPR:
11047    case CEIL_DIV_EXPR:
11048    case FLOOR_DIV_EXPR:
11049    case ROUND_DIV_EXPR:
11050      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11051	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11052
11053    case TRUNC_MOD_EXPR:
11054    case CEIL_MOD_EXPR:
11055    case FLOOR_MOD_EXPR:
11056    case ROUND_MOD_EXPR:
11057    case SAVE_EXPR:
11058    case NON_LVALUE_EXPR:
11059    case FLOAT_EXPR:
11060      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11061
11062    case COMPOUND_EXPR:
11063    case MODIFY_EXPR:
11064      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11065
11066    case BIND_EXPR:
11067      return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
11068
11069    case COND_EXPR:
11070      return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
11071	     && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
11072
11073    case NOP_EXPR:
11074      {
11075	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11076	tree outer_type = TREE_TYPE (t);
11077
11078	if (TREE_CODE (outer_type) == REAL_TYPE)
11079	  {
11080	    if (TREE_CODE (inner_type) == REAL_TYPE)
11081	      return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11082	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
11083	      {
11084		if (TYPE_UNSIGNED (inner_type))
11085		  return 1;
11086		return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11087	      }
11088	  }
11089	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
11090	  {
11091	    if (TREE_CODE (inner_type) == REAL_TYPE)
11092	      return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
11093	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
11094	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
11095		      && TYPE_UNSIGNED (inner_type);
11096	  }
11097      }
11098      break;
11099
11100    case TARGET_EXPR:
11101      {
11102	tree temp = TARGET_EXPR_SLOT (t);
11103	t = TARGET_EXPR_INITIAL (t);
11104
11105	/* If the initializer is non-void, then it's a normal expression
11106	   that will be assigned to the slot.  */
11107	if (!VOID_TYPE_P (t))
11108	  return tree_expr_nonnegative_p (t);
11109
11110	/* Otherwise, the initializer sets the slot in some way.  One common
11111	   way is an assignment statement at the end of the initializer.  */
11112	while (1)
11113	  {
11114	    if (TREE_CODE (t) == BIND_EXPR)
11115	      t = expr_last (BIND_EXPR_BODY (t));
11116	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
11117		     || TREE_CODE (t) == TRY_CATCH_EXPR)
11118	      t = expr_last (TREE_OPERAND (t, 0));
11119	    else if (TREE_CODE (t) == STATEMENT_LIST)
11120	      t = expr_last (t);
11121	    else
11122	      break;
11123	  }
11124	if (TREE_CODE (t) == MODIFY_EXPR
11125	    && TREE_OPERAND (t, 0) == temp)
11126	  return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
11127
11128	return 0;
11129      }
11130
11131    case CALL_EXPR:
11132      {
11133	tree fndecl = get_callee_fndecl (t);
11134	tree arglist = TREE_OPERAND (t, 1);
11135	if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
11136	  switch (DECL_FUNCTION_CODE (fndecl))
11137	    {
11138#define CASE_BUILTIN_F(BUILT_IN_FN) \
11139  case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
11140#define CASE_BUILTIN_I(BUILT_IN_FN) \
11141  case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
11142
11143	    CASE_BUILTIN_F (BUILT_IN_ACOS)
11144	    CASE_BUILTIN_F (BUILT_IN_ACOSH)
11145	    CASE_BUILTIN_F (BUILT_IN_CABS)
11146	    CASE_BUILTIN_F (BUILT_IN_COSH)
11147	    CASE_BUILTIN_F (BUILT_IN_ERFC)
11148	    CASE_BUILTIN_F (BUILT_IN_EXP)
11149	    CASE_BUILTIN_F (BUILT_IN_EXP10)
11150	    CASE_BUILTIN_F (BUILT_IN_EXP2)
11151	    CASE_BUILTIN_F (BUILT_IN_FABS)
11152	    CASE_BUILTIN_F (BUILT_IN_FDIM)
11153	    CASE_BUILTIN_F (BUILT_IN_HYPOT)
11154	    CASE_BUILTIN_F (BUILT_IN_POW10)
11155	    CASE_BUILTIN_I (BUILT_IN_FFS)
11156	    CASE_BUILTIN_I (BUILT_IN_PARITY)
11157	    CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
11158	      /* Always true.  */
11159	      return 1;
11160
11161	    CASE_BUILTIN_F (BUILT_IN_SQRT)
11162	      /* sqrt(-0.0) is -0.0.  */
11163	      if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11164		return 1;
11165	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11166
11167	    CASE_BUILTIN_F (BUILT_IN_ASINH)
11168	    CASE_BUILTIN_F (BUILT_IN_ATAN)
11169	    CASE_BUILTIN_F (BUILT_IN_ATANH)
11170	    CASE_BUILTIN_F (BUILT_IN_CBRT)
11171	    CASE_BUILTIN_F (BUILT_IN_CEIL)
11172	    CASE_BUILTIN_F (BUILT_IN_ERF)
11173	    CASE_BUILTIN_F (BUILT_IN_EXPM1)
11174	    CASE_BUILTIN_F (BUILT_IN_FLOOR)
11175	    CASE_BUILTIN_F (BUILT_IN_FMOD)
11176	    CASE_BUILTIN_F (BUILT_IN_FREXP)
11177	    CASE_BUILTIN_F (BUILT_IN_LCEIL)
11178	    CASE_BUILTIN_F (BUILT_IN_LDEXP)
11179	    CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11180	    CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11181	    CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11182	    CASE_BUILTIN_F (BUILT_IN_LLRINT)
11183	    CASE_BUILTIN_F (BUILT_IN_LLROUND)
11184	    CASE_BUILTIN_F (BUILT_IN_LRINT)
11185	    CASE_BUILTIN_F (BUILT_IN_LROUND)
11186	    CASE_BUILTIN_F (BUILT_IN_MODF)
11187	    CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11188	    CASE_BUILTIN_F (BUILT_IN_POW)
11189	    CASE_BUILTIN_F (BUILT_IN_RINT)
11190	    CASE_BUILTIN_F (BUILT_IN_ROUND)
11191	    CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11192	    CASE_BUILTIN_F (BUILT_IN_SINH)
11193	    CASE_BUILTIN_F (BUILT_IN_TANH)
11194	    CASE_BUILTIN_F (BUILT_IN_TRUNC)
11195	      /* True if the 1st argument is nonnegative.  */
11196	      return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11197
11198	    CASE_BUILTIN_F (BUILT_IN_FMAX)
11199	      /* True if the 1st OR 2nd arguments are nonnegative.  */
11200	      return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11201	        || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11202
11203	    CASE_BUILTIN_F (BUILT_IN_FMIN)
11204	      /* True if the 1st AND 2nd arguments are nonnegative.  */
11205	      return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11206	        && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11207
11208	    CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11209	      /* True if the 2nd argument is nonnegative.  */
11210	      return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11211
11212	    default:
11213	      break;
11214#undef CASE_BUILTIN_F
11215#undef CASE_BUILTIN_I
11216	    }
11217      }
11218
11219      /* ... fall through ...  */
11220
11221    default:
11222      if (truth_value_p (TREE_CODE (t)))
11223	/* Truth values evaluate to 0 or 1, which is nonnegative.  */
11224	return 1;
11225    }
11226
11227  /* We don't know sign of `t', so be conservative and return false.  */
11228  return 0;
11229}
11230
11231/* Return true when T is an address and is known to be nonzero.
11232   For floating point we further ensure that T is not denormal.
11233   Similar logic is present in nonzero_address in rtlanal.h.  */
11234
11235bool
11236tree_expr_nonzero_p (tree t)
11237{
11238  tree type = TREE_TYPE (t);
11239
11240  /* Doing something useful for floating point would need more work.  */
11241  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11242    return false;
11243
11244  switch (TREE_CODE (t))
11245    {
11246    case ABS_EXPR:
11247      return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11248
11249    case INTEGER_CST:
11250      /* We used to test for !integer_zerop here.  This does not work correctly
11251	 if TREE_CONSTANT_OVERFLOW (t).  */
11252      return (TREE_INT_CST_LOW (t) != 0
11253	      || TREE_INT_CST_HIGH (t) != 0);
11254
11255    case PLUS_EXPR:
11256      if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11257	{
11258	  /* With the presence of negative values it is hard
11259	     to say something.  */
11260	  if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11261	      || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11262	    return false;
11263	  /* One of operands must be positive and the other non-negative.  */
11264	  return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11265	          || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11266	}
11267      break;
11268
11269    case MULT_EXPR:
11270      if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11271	{
11272	  return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11273	          && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11274	}
11275      break;
11276
11277    case NOP_EXPR:
11278      {
11279	tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11280	tree outer_type = TREE_TYPE (t);
11281
11282	return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11283		&& tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11284      }
11285      break;
11286
11287   case ADDR_EXPR:
11288      {
11289	tree base = get_base_address (TREE_OPERAND (t, 0));
11290
11291	if (!base)
11292	  return false;
11293
11294	/* Weak declarations may link to NULL.  */
11295	if (VAR_OR_FUNCTION_DECL_P (base))
11296	  return !DECL_WEAK (base);
11297
11298	/* Constants are never weak.  */
11299	if (CONSTANT_CLASS_P (base))
11300	  return true;
11301
11302	return false;
11303      }
11304
11305    case COND_EXPR:
11306      return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11307	      && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11308
11309    case MIN_EXPR:
11310      return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11311	      && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11312
11313    case MAX_EXPR:
11314      if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11315	{
11316	  /* When both operands are nonzero, then MAX must be too.  */
11317	  if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11318	    return true;
11319
11320	  /* MAX where operand 0 is positive is positive.  */
11321	  return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11322	}
11323      /* MAX where operand 1 is positive is positive.  */
11324      else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11325	       && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11326	return true;
11327      break;
11328
11329    case COMPOUND_EXPR:
11330    case MODIFY_EXPR:
11331    case BIND_EXPR:
11332      return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11333
11334    case SAVE_EXPR:
11335    case NON_LVALUE_EXPR:
11336      return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11337
11338    case BIT_IOR_EXPR:
11339      return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11340	     || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11341
11342    case CALL_EXPR:
11343      return alloca_call_p (t);
11344
11345    default:
11346      break;
11347    }
11348  return false;
11349}
11350
11351/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11352   attempt to fold the expression to a constant without modifying TYPE,
11353   OP0 or OP1.
11354
11355   If the expression could be simplified to a constant, then return
11356   the constant.  If the expression would not be simplified to a
11357   constant, then return NULL_TREE.  */
11358
11359tree
11360fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11361{
11362  tree tem = fold_binary (code, type, op0, op1);
11363  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11364}
11365
11366/* Given the components of a unary expression CODE, TYPE and OP0,
11367   attempt to fold the expression to a constant without modifying
11368   TYPE or OP0.
11369
11370   If the expression could be simplified to a constant, then return
11371   the constant.  If the expression would not be simplified to a
11372   constant, then return NULL_TREE.  */
11373
11374tree
11375fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11376{
11377  tree tem = fold_unary (code, type, op0);
11378  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11379}
11380
11381/* If EXP represents referencing an element in a constant string
11382   (either via pointer arithmetic or array indexing), return the
11383   tree representing the value accessed, otherwise return NULL.  */
11384
11385tree
11386fold_read_from_constant_string (tree exp)
11387{
11388  if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11389    {
11390      tree exp1 = TREE_OPERAND (exp, 0);
11391      tree index;
11392      tree string;
11393
11394      if (TREE_CODE (exp) == INDIRECT_REF)
11395	string = string_constant (exp1, &index);
11396      else
11397	{
11398	  tree low_bound = array_ref_low_bound (exp);
11399	  index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11400
11401	  /* Optimize the special-case of a zero lower bound.
11402
11403	     We convert the low_bound to sizetype to avoid some problems
11404	     with constant folding.  (E.g. suppose the lower bound is 1,
11405	     and its mode is QI.  Without the conversion,l (ARRAY
11406	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11407	     +INDEX), which becomes (ARRAY+255+INDEX).  Opps!)  */
11408	  if (! integer_zerop (low_bound))
11409	    index = size_diffop (index, fold_convert (sizetype, low_bound));
11410
11411	  string = exp1;
11412	}
11413
11414      if (string
11415	  && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11416	  && TREE_CODE (string) == STRING_CST
11417	  && TREE_CODE (index) == INTEGER_CST
11418	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11419	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11420	      == MODE_INT)
11421	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11422	return fold_convert (TREE_TYPE (exp),
11423			     build_int_cst (NULL_TREE,
11424					    (TREE_STRING_POINTER (string)
11425					     [TREE_INT_CST_LOW (index)])));
11426    }
11427  return NULL;
11428}
11429
11430/* Return the tree for neg (ARG0) when ARG0 is known to be either
11431   an integer constant or real constant.
11432
11433   TYPE is the type of the result.  */
11434
11435static tree
11436fold_negate_const (tree arg0, tree type)
11437{
11438  tree t = NULL_TREE;
11439
11440  switch (TREE_CODE (arg0))
11441    {
11442    case INTEGER_CST:
11443      {
11444	unsigned HOST_WIDE_INT low;
11445	HOST_WIDE_INT high;
11446	int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11447				   TREE_INT_CST_HIGH (arg0),
11448				   &low, &high);
11449	t = build_int_cst_wide (type, low, high);
11450	t = force_fit_type (t, 1,
11451			    (overflow | TREE_OVERFLOW (arg0))
11452			    && !TYPE_UNSIGNED (type),
11453			    TREE_CONSTANT_OVERFLOW (arg0));
11454	break;
11455      }
11456
11457    case REAL_CST:
11458      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11459      break;
11460
11461    default:
11462      gcc_unreachable ();
11463    }
11464
11465  return t;
11466}
11467
11468/* Return the tree for abs (ARG0) when ARG0 is known to be either
11469   an integer constant or real constant.
11470
11471   TYPE is the type of the result.  */
11472
11473tree
11474fold_abs_const (tree arg0, tree type)
11475{
11476  tree t = NULL_TREE;
11477
11478  switch (TREE_CODE (arg0))
11479    {
11480    case INTEGER_CST:
11481      /* If the value is unsigned, then the absolute value is
11482	 the same as the ordinary value.  */
11483      if (TYPE_UNSIGNED (type))
11484	t = arg0;
11485      /* Similarly, if the value is non-negative.  */
11486      else if (INT_CST_LT (integer_minus_one_node, arg0))
11487	t = arg0;
11488      /* If the value is negative, then the absolute value is
11489	 its negation.  */
11490      else
11491	{
11492	  unsigned HOST_WIDE_INT low;
11493	  HOST_WIDE_INT high;
11494	  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11495				     TREE_INT_CST_HIGH (arg0),
11496				     &low, &high);
11497	  t = build_int_cst_wide (type, low, high);
11498	  t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11499			      TREE_CONSTANT_OVERFLOW (arg0));
11500	}
11501      break;
11502
11503    case REAL_CST:
11504      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11505	t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11506      else
11507	t =  arg0;
11508      break;
11509
11510    default:
11511      gcc_unreachable ();
11512    }
11513
11514  return t;
11515}
11516
11517/* Return the tree for not (ARG0) when ARG0 is known to be an integer
11518   constant.  TYPE is the type of the result.  */
11519
11520static tree
11521fold_not_const (tree arg0, tree type)
11522{
11523  tree t = NULL_TREE;
11524
11525  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11526
11527  t = build_int_cst_wide (type,
11528			  ~ TREE_INT_CST_LOW (arg0),
11529			  ~ TREE_INT_CST_HIGH (arg0));
11530  t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11531		      TREE_CONSTANT_OVERFLOW (arg0));
11532
11533  return t;
11534}
11535
11536/* Given CODE, a relational operator, the target type, TYPE and two
11537   constant operands OP0 and OP1, return the result of the
11538   relational operation.  If the result is not a compile time
11539   constant, then return NULL_TREE.  */
11540
11541static tree
11542fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11543{
11544  int result, invert;
11545
11546  /* From here on, the only cases we handle are when the result is
11547     known to be a constant.  */
11548
11549  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11550    {
11551      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11552      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11553
11554      /* Handle the cases where either operand is a NaN.  */
11555      if (real_isnan (c0) || real_isnan (c1))
11556	{
11557	  switch (code)
11558	    {
11559	    case EQ_EXPR:
11560	    case ORDERED_EXPR:
11561	      result = 0;
11562	      break;
11563
11564	    case NE_EXPR:
11565	    case UNORDERED_EXPR:
11566	    case UNLT_EXPR:
11567	    case UNLE_EXPR:
11568	    case UNGT_EXPR:
11569	    case UNGE_EXPR:
11570	    case UNEQ_EXPR:
11571              result = 1;
11572	      break;
11573
11574	    case LT_EXPR:
11575	    case LE_EXPR:
11576	    case GT_EXPR:
11577	    case GE_EXPR:
11578	    case LTGT_EXPR:
11579	      if (flag_trapping_math)
11580		return NULL_TREE;
11581	      result = 0;
11582	      break;
11583
11584	    default:
11585	      gcc_unreachable ();
11586	    }
11587
11588	  return constant_boolean_node (result, type);
11589	}
11590
11591      return constant_boolean_node (real_compare (code, c0, c1), type);
11592    }
11593
11594  /* Handle equality/inequality of complex constants.  */
11595  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
11596    {
11597      tree rcond = fold_relational_const (code, type,
11598					  TREE_REALPART (op0),
11599					  TREE_REALPART (op1));
11600      tree icond = fold_relational_const (code, type,
11601					  TREE_IMAGPART (op0),
11602					  TREE_IMAGPART (op1));
11603      if (code == EQ_EXPR)
11604	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
11605      else if (code == NE_EXPR)
11606	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
11607      else
11608	return NULL_TREE;
11609    }
11610
11611  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11612
11613     To compute GT, swap the arguments and do LT.
11614     To compute GE, do LT and invert the result.
11615     To compute LE, swap the arguments, do LT and invert the result.
11616     To compute NE, do EQ and invert the result.
11617
11618     Therefore, the code below must handle only EQ and LT.  */
11619
11620  if (code == LE_EXPR || code == GT_EXPR)
11621    {
11622      tree tem = op0;
11623      op0 = op1;
11624      op1 = tem;
11625      code = swap_tree_comparison (code);
11626    }
11627
11628  /* Note that it is safe to invert for real values here because we
11629     have already handled the one case that it matters.  */
11630
11631  invert = 0;
11632  if (code == NE_EXPR || code == GE_EXPR)
11633    {
11634      invert = 1;
11635      code = invert_tree_comparison (code, false);
11636    }
11637
11638  /* Compute a result for LT or EQ if args permit;
11639     Otherwise return T.  */
11640  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11641    {
11642      if (code == EQ_EXPR)
11643	result = tree_int_cst_equal (op0, op1);
11644      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11645	result = INT_CST_LT_UNSIGNED (op0, op1);
11646      else
11647	result = INT_CST_LT (op0, op1);
11648    }
11649  else
11650    return NULL_TREE;
11651
11652  if (invert)
11653    result ^= 1;
11654  return constant_boolean_node (result, type);
11655}
11656
11657/* Build an expression for the a clean point containing EXPR with type TYPE.
11658   Don't build a cleanup point expression for EXPR which don't have side
11659   effects.  */
11660
11661tree
11662fold_build_cleanup_point_expr (tree type, tree expr)
11663{
11664  /* If the expression does not have side effects then we don't have to wrap
11665     it with a cleanup point expression.  */
11666  if (!TREE_SIDE_EFFECTS (expr))
11667    return expr;
11668
11669  /* If the expression is a return, check to see if the expression inside the
11670     return has no side effects or the right hand side of the modify expression
11671     inside the return. If either don't have side effects set we don't need to
11672     wrap the expression in a cleanup point expression.  Note we don't check the
11673     left hand side of the modify because it should always be a return decl.  */
11674  if (TREE_CODE (expr) == RETURN_EXPR)
11675    {
11676      tree op = TREE_OPERAND (expr, 0);
11677      if (!op || !TREE_SIDE_EFFECTS (op))
11678        return expr;
11679      op = TREE_OPERAND (op, 1);
11680      if (!TREE_SIDE_EFFECTS (op))
11681        return expr;
11682    }
11683
11684  return build1 (CLEANUP_POINT_EXPR, type, expr);
11685}
11686
11687/* Build an expression for the address of T.  Folds away INDIRECT_REF to
11688   avoid confusing the gimplify process.  */
11689
11690tree
11691build_fold_addr_expr_with_type (tree t, tree ptrtype)
11692{
11693  /* The size of the object is not relevant when talking about its address.  */
11694  if (TREE_CODE (t) == WITH_SIZE_EXPR)
11695    t = TREE_OPERAND (t, 0);
11696
11697  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11698  if (TREE_CODE (t) == INDIRECT_REF
11699      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11700    {
11701      t = TREE_OPERAND (t, 0);
11702      if (TREE_TYPE (t) != ptrtype)
11703	t = build1 (NOP_EXPR, ptrtype, t);
11704    }
11705  else
11706    {
11707      tree base = t;
11708
11709      while (handled_component_p (base))
11710	base = TREE_OPERAND (base, 0);
11711      if (DECL_P (base))
11712	TREE_ADDRESSABLE (base) = 1;
11713
11714      t = build1 (ADDR_EXPR, ptrtype, t);
11715    }
11716
11717  return t;
11718}
11719
11720tree
11721build_fold_addr_expr (tree t)
11722{
11723  return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11724}
11725
11726/* Given a pointer value OP0 and a type TYPE, return a simplified version
11727   of an indirection through OP0, or NULL_TREE if no simplification is
11728   possible.  */
11729
11730tree
11731fold_indirect_ref_1 (tree type, tree op0)
11732{
11733  tree sub = op0;
11734  tree subtype;
11735
11736  STRIP_NOPS (sub);
11737  subtype = TREE_TYPE (sub);
11738  if (!POINTER_TYPE_P (subtype))
11739    return NULL_TREE;
11740
11741  if (TREE_CODE (sub) == ADDR_EXPR)
11742    {
11743      tree op = TREE_OPERAND (sub, 0);
11744      tree optype = TREE_TYPE (op);
11745      /* *&CONST_DECL -> to the value of the const decl.  */
11746      if (TREE_CODE (op) == CONST_DECL)
11747	return DECL_INITIAL (op);
11748      /* *&p => p */
11749      if (type == optype)
11750	return op;
11751      /* *(foo *)&fooarray => fooarray[0] */
11752      else if (TREE_CODE (optype) == ARRAY_TYPE
11753	       && type == TREE_TYPE (optype))
11754	{
11755	  tree type_domain = TYPE_DOMAIN (optype);
11756	  tree min_val = size_zero_node;
11757	  if (type_domain && TYPE_MIN_VALUE (type_domain))
11758	    min_val = TYPE_MIN_VALUE (type_domain);
11759	  return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11760	}
11761    }
11762
11763  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11764  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11765      && type == TREE_TYPE (TREE_TYPE (subtype)))
11766    {
11767      tree type_domain;
11768      tree min_val = size_zero_node;
11769      sub = build_fold_indirect_ref (sub);
11770      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11771      if (type_domain && TYPE_MIN_VALUE (type_domain))
11772	min_val = TYPE_MIN_VALUE (type_domain);
11773      return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11774    }
11775
11776  return NULL_TREE;
11777}
11778
11779/* Builds an expression for an indirection through T, simplifying some
11780   cases.  */
11781
11782tree
11783build_fold_indirect_ref (tree t)
11784{
11785  tree type = TREE_TYPE (TREE_TYPE (t));
11786  tree sub = fold_indirect_ref_1 (type, t);
11787
11788  if (sub)
11789    return sub;
11790  else
11791    return build1 (INDIRECT_REF, type, t);
11792}
11793
11794/* Given an INDIRECT_REF T, return either T or a simplified version.  */
11795
11796tree
11797fold_indirect_ref (tree t)
11798{
11799  tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11800
11801  if (sub)
11802    return sub;
11803  else
11804    return t;
11805}
11806
11807/* Strip non-trapping, non-side-effecting tree nodes from an expression
11808   whose result is ignored.  The type of the returned tree need not be
11809   the same as the original expression.  */
11810
11811tree
11812fold_ignored_result (tree t)
11813{
11814  if (!TREE_SIDE_EFFECTS (t))
11815    return integer_zero_node;
11816
11817  for (;;)
11818    switch (TREE_CODE_CLASS (TREE_CODE (t)))
11819      {
11820      case tcc_unary:
11821	t = TREE_OPERAND (t, 0);
11822	break;
11823
11824      case tcc_binary:
11825      case tcc_comparison:
11826	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11827	  t = TREE_OPERAND (t, 0);
11828	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11829	  t = TREE_OPERAND (t, 1);
11830	else
11831	  return t;
11832	break;
11833
11834      case tcc_expression:
11835	switch (TREE_CODE (t))
11836	  {
11837	  case COMPOUND_EXPR:
11838	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11839	      return t;
11840	    t = TREE_OPERAND (t, 0);
11841	    break;
11842
11843	  case COND_EXPR:
11844	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11845		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11846	      return t;
11847	    t = TREE_OPERAND (t, 0);
11848	    break;
11849
11850	  default:
11851	    return t;
11852	  }
11853	break;
11854
11855      default:
11856	return t;
11857      }
11858}
11859
11860/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11861   This can only be applied to objects of a sizetype.  */
11862
11863tree
11864round_up (tree value, int divisor)
11865{
11866  tree div = NULL_TREE;
11867
11868  gcc_assert (divisor > 0);
11869  if (divisor == 1)
11870    return value;
11871
11872  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
11873     have to do anything.  Only do this when we are not given a const,
11874     because in that case, this check is more expensive than just
11875     doing it.  */
11876  if (TREE_CODE (value) != INTEGER_CST)
11877    {
11878      div = build_int_cst (TREE_TYPE (value), divisor);
11879
11880      if (multiple_of_p (TREE_TYPE (value), value, div))
11881	return value;
11882    }
11883
11884  /* If divisor is a power of two, simplify this to bit manipulation.  */
11885  if (divisor == (divisor & -divisor))
11886    {
11887      tree t;
11888
11889      t = build_int_cst (TREE_TYPE (value), divisor - 1);
11890      value = size_binop (PLUS_EXPR, value, t);
11891      t = build_int_cst (TREE_TYPE (value), -divisor);
11892      value = size_binop (BIT_AND_EXPR, value, t);
11893    }
11894  else
11895    {
11896      if (!div)
11897	div = build_int_cst (TREE_TYPE (value), divisor);
11898      value = size_binop (CEIL_DIV_EXPR, value, div);
11899      value = size_binop (MULT_EXPR, value, div);
11900    }
11901
11902  return value;
11903}
11904
11905/* Likewise, but round down.  */
11906
11907tree
11908round_down (tree value, int divisor)
11909{
11910  tree div = NULL_TREE;
11911
11912  gcc_assert (divisor > 0);
11913  if (divisor == 1)
11914    return value;
11915
11916  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
11917     have to do anything.  Only do this when we are not given a const,
11918     because in that case, this check is more expensive than just
11919     doing it.  */
11920  if (TREE_CODE (value) != INTEGER_CST)
11921    {
11922      div = build_int_cst (TREE_TYPE (value), divisor);
11923
11924      if (multiple_of_p (TREE_TYPE (value), value, div))
11925	return value;
11926    }
11927
11928  /* If divisor is a power of two, simplify this to bit manipulation.  */
11929  if (divisor == (divisor & -divisor))
11930    {
11931      tree t;
11932
11933      t = build_int_cst (TREE_TYPE (value), -divisor);
11934      value = size_binop (BIT_AND_EXPR, value, t);
11935    }
11936  else
11937    {
11938      if (!div)
11939	div = build_int_cst (TREE_TYPE (value), divisor);
11940      value = size_binop (FLOOR_DIV_EXPR, value, div);
11941      value = size_binop (MULT_EXPR, value, div);
11942    }
11943
11944  return value;
11945}
11946
11947/* Returns the pointer to the base of the object addressed by EXP and
11948   extracts the information about the offset of the access, storing it
11949   to PBITPOS and POFFSET.  */
11950
11951static tree
11952split_address_to_core_and_offset (tree exp,
11953				  HOST_WIDE_INT *pbitpos, tree *poffset)
11954{
11955  tree core;
11956  enum machine_mode mode;
11957  int unsignedp, volatilep;
11958  HOST_WIDE_INT bitsize;
11959
11960  if (TREE_CODE (exp) == ADDR_EXPR)
11961    {
11962      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11963				  poffset, &mode, &unsignedp, &volatilep,
11964				  false);
11965      core = build_fold_addr_expr (core);
11966    }
11967  else
11968    {
11969      core = exp;
11970      *pbitpos = 0;
11971      *poffset = NULL_TREE;
11972    }
11973
11974  return core;
11975}
11976
11977/* Returns true if addresses of E1 and E2 differ by a constant, false
11978   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
11979
11980bool
11981ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11982{
11983  tree core1, core2;
11984  HOST_WIDE_INT bitpos1, bitpos2;
11985  tree toffset1, toffset2, tdiff, type;
11986
11987  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11988  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11989
11990  if (bitpos1 % BITS_PER_UNIT != 0
11991      || bitpos2 % BITS_PER_UNIT != 0
11992      || !operand_equal_p (core1, core2, 0))
11993    return false;
11994
11995  if (toffset1 && toffset2)
11996    {
11997      type = TREE_TYPE (toffset1);
11998      if (type != TREE_TYPE (toffset2))
11999	toffset2 = fold_convert (type, toffset2);
12000
12001      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
12002      if (!cst_and_fits_in_hwi (tdiff))
12003	return false;
12004
12005      *diff = int_cst_value (tdiff);
12006    }
12007  else if (toffset1 || toffset2)
12008    {
12009      /* If only one of the offsets is non-constant, the difference cannot
12010	 be a constant.  */
12011      return false;
12012    }
12013  else
12014    *diff = 0;
12015
12016  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
12017  return true;
12018}
12019
12020/* Simplify the floating point expression EXP when the sign of the
12021   result is not significant.  Return NULL_TREE if no simplification
12022   is possible.  */
12023
12024tree
12025fold_strip_sign_ops (tree exp)
12026{
12027  tree arg0, arg1;
12028
12029  switch (TREE_CODE (exp))
12030    {
12031    case ABS_EXPR:
12032    case NEGATE_EXPR:
12033      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12034      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
12035
12036    case MULT_EXPR:
12037    case RDIV_EXPR:
12038      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
12039	return NULL_TREE;
12040      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
12041      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
12042      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
12043	return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
12044			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
12045			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
12046      break;
12047
12048    default:
12049      break;
12050    }
12051  return NULL_TREE;
12052}
12053
12054