1/* Fold a constant sub-tree into a single node for C-compiler
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3   2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
4   Free Software Foundation, Inc.
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3.  If not see
20<http://www.gnu.org/licenses/>.  */
21
22/*@@ This file should be rewritten to use an arbitrary precision
23  @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24  @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25  @@ The routines that translate from the ap rep should
26  @@ warn if precision et. al. is lost.
27  @@ This would also make life easier when this technology is used
28  @@ for cross-compilers.  */
29
30/* The entry points in this file are fold, size_int_wide, size_binop
31   and force_fit_type_double.
32
33   fold takes a tree as argument and returns a simplified tree.
34
35   size_binop takes a tree code for an arithmetic operation
36   and two operands that are trees, and produces a tree for the
37   result, assuming the type comes from `sizetype'.
38
39   size_int takes an integer value, and creates a tree constant
40   with type from `sizetype'.
41
42   force_fit_type_double takes a constant, an overflowable flag and a
43   prior overflow indicator.  It forces the value to fit the type and
44   sets TREE_OVERFLOW.
45
46   Note: Since the folders get called on non-gimple code as well as
47   gimple code, we need to handle GIMPLE tuples as well as their
48   corresponding tree equivalents.  */
49
50#include "config.h"
51#include "system.h"
52#include "coretypes.h"
53#include "tm.h"
54#include "flags.h"
55#include "tree.h"
56#include "real.h"
57#include "fixed-value.h"
58#include "rtl.h"
59#include "expr.h"
60#include "tm_p.h"
61#include "target.h"
62#include "toplev.h"
63#include "intl.h"
64#include "ggc.h"
65#include "hashtab.h"
66#include "langhooks.h"
67#include "md5.h"
68#include "gimple.h"
69
70/* Nonzero if we are folding constants inside an initializer; zero
71   otherwise.  */
72int folding_initializer = 0;
73
74/* The following constants represent a bit based encoding of GCC's
75   comparison operators.  This encoding simplifies transformations
76   on relational comparison operators, such as AND and OR.  */
77enum comparison_code {
78  COMPCODE_FALSE = 0,
79  COMPCODE_LT = 1,
80  COMPCODE_EQ = 2,
81  COMPCODE_LE = 3,
82  COMPCODE_GT = 4,
83  COMPCODE_LTGT = 5,
84  COMPCODE_GE = 6,
85  COMPCODE_ORD = 7,
86  COMPCODE_UNORD = 8,
87  COMPCODE_UNLT = 9,
88  COMPCODE_UNEQ = 10,
89  COMPCODE_UNLE = 11,
90  COMPCODE_UNGT = 12,
91  COMPCODE_NE = 13,
92  COMPCODE_UNGE = 14,
93  COMPCODE_TRUE = 15
94};
95
96static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
97static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
98static bool negate_mathfn_p (enum built_in_function);
99static bool negate_expr_p (tree);
100static tree negate_expr (tree);
101static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103static tree const_binop (enum tree_code, tree, tree, int);
104static enum comparison_code comparison_to_compcode (enum tree_code);
105static enum tree_code compcode_to_comparison (enum comparison_code);
106static int operand_equal_for_comparison_p (tree, tree, tree);
107static int twoval_comparison_p (tree, tree *, tree *, int *);
108static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111static tree make_bit_field_ref (location_t, tree, tree,
112				HOST_WIDE_INT, HOST_WIDE_INT, int);
113static tree optimize_bit_field_compare (location_t, enum tree_code,
114					tree, tree, tree);
115static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116				    HOST_WIDE_INT *,
117				    enum machine_mode *, int *, int *,
118				    tree *, tree *);
119static int all_ones_mask_p (const_tree, int);
120static tree sign_bit_p (tree, const_tree);
121static int simple_operand_p (const_tree);
122static tree range_binop (enum tree_code, tree, tree, int, tree, int);
123static tree range_predecessor (tree);
124static tree range_successor (tree);
125extern tree make_range (tree, int *, tree *, tree *, bool *);
126extern bool merge_ranges (int *, tree *, tree *, int, tree, tree, int,
127			  tree, tree);
128static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
129static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
130static tree unextend (tree, int, int, tree);
131static tree fold_truthop (location_t, enum tree_code, tree, tree, tree);
132static tree optimize_minmax_comparison (location_t, enum tree_code,
133					tree, tree, tree);
134static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
135static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
136static tree fold_binary_op_with_conditional_arg (location_t,
137						 enum tree_code, tree,
138						 tree, tree,
139						 tree, tree, int);
140static tree fold_mathfn_compare (location_t,
141				 enum built_in_function, enum tree_code,
142				 tree, tree, tree);
143static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
144static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145static bool reorder_operands_p (const_tree, const_tree);
146static tree fold_negate_const (tree, tree);
147static tree fold_not_const (tree, tree);
148static tree fold_relational_const (enum tree_code, tree, tree, tree);
149static tree fold_convert_const (enum tree_code, tree, tree);
150
151
152/* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
153   overflow.  Suppose A, B and SUM have the same respective signs as A1, B1,
154   and SUM1.  Then this yields nonzero if overflow occurred during the
155   addition.
156
157   Overflow occurs if A and B have the same sign, but A and SUM differ in
158   sign.  Use `^' to test whether signs differ, and `< 0' to isolate the
159   sign.  */
160#define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
161
162/* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
163   We do that by representing the two-word integer in 4 words, with only
164   HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
165   number.  The value of the word is LOWPART + HIGHPART * BASE.  */
166
167#define LOWPART(x) \
168  ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
169#define HIGHPART(x) \
170  ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
171#define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
172
173/* Unpack a two-word integer into 4 words.
174   LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
175   WORDS points to the array of HOST_WIDE_INTs.  */
176
177static void
178encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
179{
180  words[0] = LOWPART (low);
181  words[1] = HIGHPART (low);
182  words[2] = LOWPART (hi);
183  words[3] = HIGHPART (hi);
184}
185
186/* Pack an array of 4 words into a two-word integer.
187   WORDS points to the array of words.
188   The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces.  */
189
190static void
191decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
192	HOST_WIDE_INT *hi)
193{
194  *low = words[0] + words[1] * BASE;
195  *hi = words[2] + words[3] * BASE;
196}
197
198/* Force the double-word integer L1, H1 to be within the range of the
199   integer type TYPE.  Stores the properly truncated and sign-extended
200   double-word integer in *LV, *HV.  Returns true if the operation
201   overflows, that is, argument and result are different.  */
202
203int
204fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
205		 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, const_tree type)
206{
207  unsigned HOST_WIDE_INT low0 = l1;
208  HOST_WIDE_INT high0 = h1;
209  unsigned int prec = TYPE_PRECISION (type);
210  int sign_extended_type;
211
212  /* Size types *are* sign extended.  */
213  sign_extended_type = (!TYPE_UNSIGNED (type)
214			|| (TREE_CODE (type) == INTEGER_TYPE
215			    && TYPE_IS_SIZETYPE (type)));
216
217  /* First clear all bits that are beyond the type's precision.  */
218  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
219    ;
220  else if (prec > HOST_BITS_PER_WIDE_INT)
221    h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
222  else
223    {
224      h1 = 0;
225      if (prec < HOST_BITS_PER_WIDE_INT)
226	l1 &= ~((HOST_WIDE_INT) (-1) << prec);
227    }
228
229  /* Then do sign extension if necessary.  */
230  if (!sign_extended_type)
231    /* No sign extension */;
232  else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
233    /* Correct width already.  */;
234  else if (prec > HOST_BITS_PER_WIDE_INT)
235    {
236      /* Sign extend top half? */
237      if (h1 & ((unsigned HOST_WIDE_INT)1
238		<< (prec - HOST_BITS_PER_WIDE_INT - 1)))
239	h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
240    }
241  else if (prec == HOST_BITS_PER_WIDE_INT)
242    {
243      if ((HOST_WIDE_INT)l1 < 0)
244	h1 = -1;
245    }
246  else
247    {
248      /* Sign extend bottom half? */
249      if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
250	{
251	  h1 = -1;
252	  l1 |= (HOST_WIDE_INT)(-1) << prec;
253	}
254    }
255
256  *lv = l1;
257  *hv = h1;
258
259  /* If the value didn't fit, signal overflow.  */
260  return l1 != low0 || h1 != high0;
261}
262
263/* We force the double-int HIGH:LOW to the range of the type TYPE by
264   sign or zero extending it.
265   OVERFLOWABLE indicates if we are interested
266   in overflow of the value, when >0 we are only interested in signed
267   overflow, for <0 we are interested in any overflow.  OVERFLOWED
268   indicates whether overflow has already occurred.  CONST_OVERFLOWED
269   indicates whether constant overflow has already occurred.  We force
270   T's value to be within range of T's type (by setting to 0 or 1 all
271   the bits outside the type's range).  We set TREE_OVERFLOWED if,
272  	OVERFLOWED is nonzero,
273	or OVERFLOWABLE is >0 and signed overflow occurs
274	or OVERFLOWABLE is <0 and any overflow occurs
275   We return a new tree node for the extended double-int.  The node
276   is shared if no overflow flags are set.  */
277
278tree
279force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
280		       HOST_WIDE_INT high, int overflowable,
281		       bool overflowed)
282{
283  int sign_extended_type;
284  bool overflow;
285
286  /* Size types *are* sign extended.  */
287  sign_extended_type = (!TYPE_UNSIGNED (type)
288			|| (TREE_CODE (type) == INTEGER_TYPE
289			    && TYPE_IS_SIZETYPE (type)));
290
291  overflow = fit_double_type (low, high, &low, &high, type);
292
293  /* If we need to set overflow flags, return a new unshared node.  */
294  if (overflowed || overflow)
295    {
296      if (overflowed
297	  || overflowable < 0
298	  || (overflowable > 0 && sign_extended_type))
299	{
300          tree t = make_node (INTEGER_CST);
301          TREE_INT_CST_LOW (t) = low;
302          TREE_INT_CST_HIGH (t) = high;
303          TREE_TYPE (t) = type;
304	  TREE_OVERFLOW (t) = 1;
305	  return t;
306	}
307    }
308
309  /* Else build a shared node.  */
310  return build_int_cst_wide (type, low, high);
311}
312
313/* Add two doubleword integers with doubleword result.
314   Return nonzero if the operation overflows according to UNSIGNED_P.
315   Each argument is given as two `HOST_WIDE_INT' pieces.
316   One argument is L1 and H1; the other, L2 and H2.
317   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
318
319int
320add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
321		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
322		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
323		      bool unsigned_p)
324{
325  unsigned HOST_WIDE_INT l;
326  HOST_WIDE_INT h;
327
328  l = l1 + l2;
329  h = (HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) h1
330		       + (unsigned HOST_WIDE_INT) h2
331		       + (l < l1));
332
333  *lv = l;
334  *hv = h;
335
336  if (unsigned_p)
337    return ((unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1
338	    || (h == h1
339		&& l < l1));
340  else
341    return OVERFLOW_SUM_SIGN (h1, h2, h);
342}
343
344/* Negate a doubleword integer with doubleword result.
345   Return nonzero if the operation overflows, assuming it's signed.
346   The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
347   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
348
349int
350neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
351	    unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
352{
353  if (l1 == 0)
354    {
355      *lv = 0;
356      *hv = - h1;
357      return (*hv & h1) < 0;
358    }
359  else
360    {
361      *lv = -l1;
362      *hv = ~h1;
363      return 0;
364    }
365}
366
367/* Multiply two doubleword integers with doubleword result.
368   Return nonzero if the operation overflows according to UNSIGNED_P.
369   Each argument is given as two `HOST_WIDE_INT' pieces.
370   One argument is L1 and H1; the other, L2 and H2.
371   The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
372
373int
374mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
375		      unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
376		      unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
377		      bool unsigned_p)
378{
379  HOST_WIDE_INT arg1[4];
380  HOST_WIDE_INT arg2[4];
381  HOST_WIDE_INT prod[4 * 2];
382  unsigned HOST_WIDE_INT carry;
383  int i, j, k;
384  unsigned HOST_WIDE_INT toplow, neglow;
385  HOST_WIDE_INT tophigh, neghigh;
386
387  encode (arg1, l1, h1);
388  encode (arg2, l2, h2);
389
390  memset (prod, 0, sizeof prod);
391
392  for (i = 0; i < 4; i++)
393    {
394      carry = 0;
395      for (j = 0; j < 4; j++)
396	{
397	  k = i + j;
398	  /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000.  */
399	  carry += arg1[i] * arg2[j];
400	  /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF.  */
401	  carry += prod[k];
402	  prod[k] = LOWPART (carry);
403	  carry = HIGHPART (carry);
404	}
405      prod[i + 4] = carry;
406    }
407
408  decode (prod, lv, hv);
409  decode (prod + 4, &toplow, &tophigh);
410
411  /* Unsigned overflow is immediate.  */
412  if (unsigned_p)
413    return (toplow | tophigh) != 0;
414
415  /* Check for signed overflow by calculating the signed representation of the
416     top half of the result; it should agree with the low half's sign bit.  */
417  if (h1 < 0)
418    {
419      neg_double (l2, h2, &neglow, &neghigh);
420      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
421    }
422  if (h2 < 0)
423    {
424      neg_double (l1, h1, &neglow, &neghigh);
425      add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
426    }
427  return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
428}
429
430/* Shift the doubleword integer in L1, H1 left by COUNT places
431   keeping only PREC bits of result.
432   Shift right if COUNT is negative.
433   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
434   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
435
436void
437lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
438	       HOST_WIDE_INT count, unsigned int prec,
439	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
440{
441  unsigned HOST_WIDE_INT signmask;
442
443  if (count < 0)
444    {
445      rshift_double (l1, h1, -count, prec, lv, hv, arith);
446      return;
447    }
448
449  if (SHIFT_COUNT_TRUNCATED)
450    count %= prec;
451
452  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
453    {
454      /* Shifting by the host word size is undefined according to the
455	 ANSI standard, so we must handle this as a special case.  */
456      *hv = 0;
457      *lv = 0;
458    }
459  else if (count >= HOST_BITS_PER_WIDE_INT)
460    {
461      *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
462      *lv = 0;
463    }
464  else
465    {
466      *hv = (((unsigned HOST_WIDE_INT) h1 << count)
467	     | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
468      *lv = l1 << count;
469    }
470
471  /* Sign extend all bits that are beyond the precision.  */
472
473  signmask = -((prec > HOST_BITS_PER_WIDE_INT
474		? ((unsigned HOST_WIDE_INT) *hv
475		   >> (prec - HOST_BITS_PER_WIDE_INT - 1))
476		: (*lv >> (prec - 1))) & 1);
477
478  if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
479    ;
480  else if (prec >= HOST_BITS_PER_WIDE_INT)
481    {
482      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
483      *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
484    }
485  else
486    {
487      *hv = signmask;
488      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
489      *lv |= signmask << prec;
490    }
491}
492
493/* Shift the doubleword integer in L1, H1 right by COUNT places
494   keeping only PREC bits of result.  COUNT must be positive.
495   ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
496   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
497
498void
499rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
500	       HOST_WIDE_INT count, unsigned int prec,
501	       unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
502	       int arith)
503{
504  unsigned HOST_WIDE_INT signmask;
505
506  signmask = (arith
507	      ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
508	      : 0);
509
510  if (SHIFT_COUNT_TRUNCATED)
511    count %= prec;
512
513  if (count >= 2 * HOST_BITS_PER_WIDE_INT)
514    {
515      /* Shifting by the host word size is undefined according to the
516	 ANSI standard, so we must handle this as a special case.  */
517      *hv = 0;
518      *lv = 0;
519    }
520  else if (count >= HOST_BITS_PER_WIDE_INT)
521    {
522      *hv = 0;
523      *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
524    }
525  else
526    {
527      *hv = (unsigned HOST_WIDE_INT) h1 >> count;
528      *lv = ((l1 >> count)
529	     | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
530    }
531
532  /* Zero / sign extend all bits that are beyond the precision.  */
533
534  if (count >= (HOST_WIDE_INT)prec)
535    {
536      *hv = signmask;
537      *lv = signmask;
538    }
539  else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
540    ;
541  else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
542    {
543      *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
544      *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
545    }
546  else
547    {
548      *hv = signmask;
549      *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
550      *lv |= signmask << (prec - count);
551    }
552}
553
554/* Rotate the doubleword integer in L1, H1 left by COUNT places
555   keeping only PREC bits of result.
556   Rotate right if COUNT is negative.
557   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
558
559void
560lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
561		HOST_WIDE_INT count, unsigned int prec,
562		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
563{
564  unsigned HOST_WIDE_INT s1l, s2l;
565  HOST_WIDE_INT s1h, s2h;
566
567  count %= prec;
568  if (count < 0)
569    count += prec;
570
571  lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
572  rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
573  *lv = s1l | s2l;
574  *hv = s1h | s2h;
575}
576
577/* Rotate the doubleword integer in L1, H1 left by COUNT places
578   keeping only PREC bits of result.  COUNT must be positive.
579   Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV.  */
580
581void
582rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
583		HOST_WIDE_INT count, unsigned int prec,
584		unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
585{
586  unsigned HOST_WIDE_INT s1l, s2l;
587  HOST_WIDE_INT s1h, s2h;
588
589  count %= prec;
590  if (count < 0)
591    count += prec;
592
593  rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
594  lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
595  *lv = s1l | s2l;
596  *hv = s1h | s2h;
597}
598
599/* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
600   for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
601   CODE is a tree code for a kind of division, one of
602   TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
603   or EXACT_DIV_EXPR
604   It controls how the quotient is rounded to an integer.
605   Return nonzero if the operation overflows.
606   UNS nonzero says do unsigned division.  */
607
608int
609div_and_round_double (enum tree_code code, int uns,
610		      unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
611		      HOST_WIDE_INT hnum_orig,
612		      unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
613		      HOST_WIDE_INT hden_orig,
614		      unsigned HOST_WIDE_INT *lquo,
615		      HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
616		      HOST_WIDE_INT *hrem)
617{
618  int quo_neg = 0;
619  HOST_WIDE_INT num[4 + 1];	/* extra element for scaling.  */
620  HOST_WIDE_INT den[4], quo[4];
621  int i, j;
622  unsigned HOST_WIDE_INT work;
623  unsigned HOST_WIDE_INT carry = 0;
624  unsigned HOST_WIDE_INT lnum = lnum_orig;
625  HOST_WIDE_INT hnum = hnum_orig;
626  unsigned HOST_WIDE_INT lden = lden_orig;
627  HOST_WIDE_INT hden = hden_orig;
628  int overflow = 0;
629
630  if (hden == 0 && lden == 0)
631    overflow = 1, lden = 1;
632
633  /* Calculate quotient sign and convert operands to unsigned.  */
634  if (!uns)
635    {
636      if (hnum < 0)
637	{
638	  quo_neg = ~ quo_neg;
639	  /* (minimum integer) / (-1) is the only overflow case.  */
640	  if (neg_double (lnum, hnum, &lnum, &hnum)
641	      && ((HOST_WIDE_INT) lden & hden) == -1)
642	    overflow = 1;
643	}
644      if (hden < 0)
645	{
646	  quo_neg = ~ quo_neg;
647	  neg_double (lden, hden, &lden, &hden);
648	}
649    }
650
651  if (hnum == 0 && hden == 0)
652    {				/* single precision */
653      *hquo = *hrem = 0;
654      /* This unsigned division rounds toward zero.  */
655      *lquo = lnum / lden;
656      goto finish_up;
657    }
658
659  if (hnum == 0)
660    {				/* trivial case: dividend < divisor */
661      /* hden != 0 already checked.  */
662      *hquo = *lquo = 0;
663      *hrem = hnum;
664      *lrem = lnum;
665      goto finish_up;
666    }
667
668  memset (quo, 0, sizeof quo);
669
670  memset (num, 0, sizeof num);	/* to zero 9th element */
671  memset (den, 0, sizeof den);
672
673  encode (num, lnum, hnum);
674  encode (den, lden, hden);
675
676  /* Special code for when the divisor < BASE.  */
677  if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
678    {
679      /* hnum != 0 already checked.  */
680      for (i = 4 - 1; i >= 0; i--)
681	{
682	  work = num[i] + carry * BASE;
683	  quo[i] = work / lden;
684	  carry = work % lden;
685	}
686    }
687  else
688    {
689      /* Full double precision division,
690	 with thanks to Don Knuth's "Seminumerical Algorithms".  */
691      int num_hi_sig, den_hi_sig;
692      unsigned HOST_WIDE_INT quo_est, scale;
693
694      /* Find the highest nonzero divisor digit.  */
695      for (i = 4 - 1;; i--)
696	if (den[i] != 0)
697	  {
698	    den_hi_sig = i;
699	    break;
700	  }
701
702      /* Insure that the first digit of the divisor is at least BASE/2.
703	 This is required by the quotient digit estimation algorithm.  */
704
705      scale = BASE / (den[den_hi_sig] + 1);
706      if (scale > 1)
707	{		/* scale divisor and dividend */
708	  carry = 0;
709	  for (i = 0; i <= 4 - 1; i++)
710	    {
711	      work = (num[i] * scale) + carry;
712	      num[i] = LOWPART (work);
713	      carry = HIGHPART (work);
714	    }
715
716	  num[4] = carry;
717	  carry = 0;
718	  for (i = 0; i <= 4 - 1; i++)
719	    {
720	      work = (den[i] * scale) + carry;
721	      den[i] = LOWPART (work);
722	      carry = HIGHPART (work);
723	      if (den[i] != 0) den_hi_sig = i;
724	    }
725	}
726
727      num_hi_sig = 4;
728
729      /* Main loop */
730      for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
731	{
732	  /* Guess the next quotient digit, quo_est, by dividing the first
733	     two remaining dividend digits by the high order quotient digit.
734	     quo_est is never low and is at most 2 high.  */
735	  unsigned HOST_WIDE_INT tmp;
736
737	  num_hi_sig = i + den_hi_sig + 1;
738	  work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
739	  if (num[num_hi_sig] != den[den_hi_sig])
740	    quo_est = work / den[den_hi_sig];
741	  else
742	    quo_est = BASE - 1;
743
744	  /* Refine quo_est so it's usually correct, and at most one high.  */
745	  tmp = work - quo_est * den[den_hi_sig];
746	  if (tmp < BASE
747	      && (den[den_hi_sig - 1] * quo_est
748		  > (tmp * BASE + num[num_hi_sig - 2])))
749	    quo_est--;
750
751	  /* Try QUO_EST as the quotient digit, by multiplying the
752	     divisor by QUO_EST and subtracting from the remaining dividend.
753	     Keep in mind that QUO_EST is the I - 1st digit.  */
754
755	  carry = 0;
756	  for (j = 0; j <= den_hi_sig; j++)
757	    {
758	      work = quo_est * den[j] + carry;
759	      carry = HIGHPART (work);
760	      work = num[i + j] - LOWPART (work);
761	      num[i + j] = LOWPART (work);
762	      carry += HIGHPART (work) != 0;
763	    }
764
765	  /* If quo_est was high by one, then num[i] went negative and
766	     we need to correct things.  */
767	  if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
768	    {
769	      quo_est--;
770	      carry = 0;		/* add divisor back in */
771	      for (j = 0; j <= den_hi_sig; j++)
772		{
773		  work = num[i + j] + den[j] + carry;
774		  carry = HIGHPART (work);
775		  num[i + j] = LOWPART (work);
776		}
777
778	      num [num_hi_sig] += carry;
779	    }
780
781	  /* Store the quotient digit.  */
782	  quo[i] = quo_est;
783	}
784    }
785
786  decode (quo, lquo, hquo);
787
788 finish_up:
789  /* If result is negative, make it so.  */
790  if (quo_neg)
791    neg_double (*lquo, *hquo, lquo, hquo);
792
793  /* Compute trial remainder:  rem = num - (quo * den)  */
794  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
795  neg_double (*lrem, *hrem, lrem, hrem);
796  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
797
798  switch (code)
799    {
800    case TRUNC_DIV_EXPR:
801    case TRUNC_MOD_EXPR:	/* round toward zero */
802    case EXACT_DIV_EXPR:	/* for this one, it shouldn't matter */
803      return overflow;
804
805    case FLOOR_DIV_EXPR:
806    case FLOOR_MOD_EXPR:	/* round toward negative infinity */
807      if (quo_neg && (*lrem != 0 || *hrem != 0))   /* ratio < 0 && rem != 0 */
808	{
809	  /* quo = quo - 1;  */
810	  add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT)  -1,
811		      lquo, hquo);
812	}
813      else
814	return overflow;
815      break;
816
817    case CEIL_DIV_EXPR:
818    case CEIL_MOD_EXPR:		/* round toward positive infinity */
819      if (!quo_neg && (*lrem != 0 || *hrem != 0))  /* ratio > 0 && rem != 0 */
820	{
821	  add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
822		      lquo, hquo);
823	}
824      else
825	return overflow;
826      break;
827
828    case ROUND_DIV_EXPR:
829    case ROUND_MOD_EXPR:	/* round to closest integer */
830      {
831	unsigned HOST_WIDE_INT labs_rem = *lrem;
832	HOST_WIDE_INT habs_rem = *hrem;
833	unsigned HOST_WIDE_INT labs_den = lden, ltwice;
834	HOST_WIDE_INT habs_den = hden, htwice;
835
836	/* Get absolute values.  */
837	if (*hrem < 0)
838	  neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
839	if (hden < 0)
840	  neg_double (lden, hden, &labs_den, &habs_den);
841
842	/* If (2 * abs (lrem) >= abs (lden)), adjust the quotient.  */
843	mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
844		    labs_rem, habs_rem, &ltwice, &htwice);
845
846	if (((unsigned HOST_WIDE_INT) habs_den
847	     < (unsigned HOST_WIDE_INT) htwice)
848	    || (((unsigned HOST_WIDE_INT) habs_den
849		 == (unsigned HOST_WIDE_INT) htwice)
850		&& (labs_den <= ltwice)))
851	  {
852	    if (*hquo < 0)
853	      /* quo = quo - 1;  */
854	      add_double (*lquo, *hquo,
855			  (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
856	    else
857	      /* quo = quo + 1; */
858	      add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
859			  lquo, hquo);
860	  }
861	else
862	  return overflow;
863      }
864      break;
865
866    default:
867      gcc_unreachable ();
868    }
869
870  /* Compute true remainder:  rem = num - (quo * den)  */
871  mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
872  neg_double (*lrem, *hrem, lrem, hrem);
873  add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
874  return overflow;
875}
876
877/* If ARG2 divides ARG1 with zero remainder, carries out the division
878   of type CODE and returns the quotient.
879   Otherwise returns NULL_TREE.  */
880
881tree
882div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
883{
884  unsigned HOST_WIDE_INT int1l, int2l;
885  HOST_WIDE_INT int1h, int2h;
886  unsigned HOST_WIDE_INT quol, reml;
887  HOST_WIDE_INT quoh, remh;
888  int uns;
889
890  /* The sign of the division is according to operand two, that
891     does the correct thing for POINTER_PLUS_EXPR where we want
892     a signed division.  */
893  uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
894  if (TREE_CODE (TREE_TYPE (arg2)) == INTEGER_TYPE
895      && TYPE_IS_SIZETYPE (TREE_TYPE (arg2)))
896    uns = false;
897
898  int1l = TREE_INT_CST_LOW (arg1);
899  int1h = TREE_INT_CST_HIGH (arg1);
900  int2l = TREE_INT_CST_LOW (arg2);
901  int2h = TREE_INT_CST_HIGH (arg2);
902
903  div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
904		  	&quol, &quoh, &reml, &remh);
905  if (remh != 0 || reml != 0)
906    return NULL_TREE;
907
908  return build_int_cst_wide (TREE_TYPE (arg1), quol, quoh);
909}
910
911/* This is nonzero if we should defer warnings about undefined
912   overflow.  This facility exists because these warnings are a
913   special case.  The code to estimate loop iterations does not want
914   to issue any warnings, since it works with expressions which do not
915   occur in user code.  Various bits of cleanup code call fold(), but
916   only use the result if it has certain characteristics (e.g., is a
917   constant); that code only wants to issue a warning if the result is
918   used.  */
919
920static int fold_deferring_overflow_warnings;
921
922/* If a warning about undefined overflow is deferred, this is the
923   warning.  Note that this may cause us to turn two warnings into
924   one, but that is fine since it is sufficient to only give one
925   warning per expression.  */
926
927static const char* fold_deferred_overflow_warning;
928
929/* If a warning about undefined overflow is deferred, this is the
930   level at which the warning should be emitted.  */
931
932static enum warn_strict_overflow_code fold_deferred_overflow_code;
933
934/* Start deferring overflow warnings.  We could use a stack here to
935   permit nested calls, but at present it is not necessary.  */
936
937void
938fold_defer_overflow_warnings (void)
939{
940  ++fold_deferring_overflow_warnings;
941}
942
943/* Stop deferring overflow warnings.  If there is a pending warning,
944   and ISSUE is true, then issue the warning if appropriate.  STMT is
945   the statement with which the warning should be associated (used for
946   location information); STMT may be NULL.  CODE is the level of the
947   warning--a warn_strict_overflow_code value.  This function will use
948   the smaller of CODE and the deferred code when deciding whether to
949   issue the warning.  CODE may be zero to mean to always use the
950   deferred code.  */
951
952void
953fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
954{
955  const char *warnmsg;
956  location_t locus;
957
958  gcc_assert (fold_deferring_overflow_warnings > 0);
959  --fold_deferring_overflow_warnings;
960  if (fold_deferring_overflow_warnings > 0)
961    {
962      if (fold_deferred_overflow_warning != NULL
963	  && code != 0
964	  && code < (int) fold_deferred_overflow_code)
965	fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
966      return;
967    }
968
969  warnmsg = fold_deferred_overflow_warning;
970  fold_deferred_overflow_warning = NULL;
971
972  if (!issue || warnmsg == NULL)
973    return;
974
975  if (gimple_no_warning_p (stmt))
976    return;
977
978  /* Use the smallest code level when deciding to issue the
979     warning.  */
980  if (code == 0 || code > (int) fold_deferred_overflow_code)
981    code = fold_deferred_overflow_code;
982
983  if (!issue_strict_overflow_warning (code))
984    return;
985
986  if (stmt == NULL)
987    locus = input_location;
988  else
989    locus = gimple_location (stmt);
990  warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
991}
992
993/* Stop deferring overflow warnings, ignoring any deferred
994   warnings.  */
995
996void
997fold_undefer_and_ignore_overflow_warnings (void)
998{
999  fold_undefer_overflow_warnings (false, NULL, 0);
1000}
1001
1002/* Whether we are deferring overflow warnings.  */
1003
1004bool
1005fold_deferring_overflow_warnings_p (void)
1006{
1007  return fold_deferring_overflow_warnings > 0;
1008}
1009
1010/* This is called when we fold something based on the fact that signed
1011   overflow is undefined.  */
1012
1013static void
1014fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
1015{
1016  if (fold_deferring_overflow_warnings > 0)
1017    {
1018      if (fold_deferred_overflow_warning == NULL
1019	  || wc < fold_deferred_overflow_code)
1020	{
1021	  fold_deferred_overflow_warning = gmsgid;
1022	  fold_deferred_overflow_code = wc;
1023	}
1024    }
1025  else if (issue_strict_overflow_warning (wc))
1026    warning (OPT_Wstrict_overflow, gmsgid);
1027}
1028
1029/* Return true if the built-in mathematical function specified by CODE
1030   is odd, i.e. -f(x) == f(-x).  */
1031
1032static bool
1033negate_mathfn_p (enum built_in_function code)
1034{
1035  switch (code)
1036    {
1037    CASE_FLT_FN (BUILT_IN_ASIN):
1038    CASE_FLT_FN (BUILT_IN_ASINH):
1039    CASE_FLT_FN (BUILT_IN_ATAN):
1040    CASE_FLT_FN (BUILT_IN_ATANH):
1041    CASE_FLT_FN (BUILT_IN_CASIN):
1042    CASE_FLT_FN (BUILT_IN_CASINH):
1043    CASE_FLT_FN (BUILT_IN_CATAN):
1044    CASE_FLT_FN (BUILT_IN_CATANH):
1045    CASE_FLT_FN (BUILT_IN_CBRT):
1046    CASE_FLT_FN (BUILT_IN_CPROJ):
1047    CASE_FLT_FN (BUILT_IN_CSIN):
1048    CASE_FLT_FN (BUILT_IN_CSINH):
1049    CASE_FLT_FN (BUILT_IN_CTAN):
1050    CASE_FLT_FN (BUILT_IN_CTANH):
1051    CASE_FLT_FN (BUILT_IN_ERF):
1052    CASE_FLT_FN (BUILT_IN_LLROUND):
1053    CASE_FLT_FN (BUILT_IN_LROUND):
1054    CASE_FLT_FN (BUILT_IN_ROUND):
1055    CASE_FLT_FN (BUILT_IN_SIN):
1056    CASE_FLT_FN (BUILT_IN_SINH):
1057    CASE_FLT_FN (BUILT_IN_TAN):
1058    CASE_FLT_FN (BUILT_IN_TANH):
1059    CASE_FLT_FN (BUILT_IN_TRUNC):
1060      return true;
1061
1062    CASE_FLT_FN (BUILT_IN_LLRINT):
1063    CASE_FLT_FN (BUILT_IN_LRINT):
1064    CASE_FLT_FN (BUILT_IN_NEARBYINT):
1065    CASE_FLT_FN (BUILT_IN_RINT):
1066      return !flag_rounding_math;
1067
1068    default:
1069      break;
1070    }
1071  return false;
1072}
1073
1074/* Check whether we may negate an integer constant T without causing
1075   overflow.  */
1076
1077bool
1078may_negate_without_overflow_p (const_tree t)
1079{
1080  unsigned HOST_WIDE_INT val;
1081  unsigned int prec;
1082  tree type;
1083
1084  gcc_assert (TREE_CODE (t) == INTEGER_CST);
1085
1086  type = TREE_TYPE (t);
1087  if (TYPE_UNSIGNED (type))
1088    return false;
1089
1090  prec = TYPE_PRECISION (type);
1091  if (prec > HOST_BITS_PER_WIDE_INT)
1092    {
1093      if (TREE_INT_CST_LOW (t) != 0)
1094	return true;
1095      prec -= HOST_BITS_PER_WIDE_INT;
1096      val = TREE_INT_CST_HIGH (t);
1097    }
1098  else
1099    val = TREE_INT_CST_LOW (t);
1100  if (prec < HOST_BITS_PER_WIDE_INT)
1101    val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
1102  return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
1103}
1104
1105/* Determine whether an expression T can be cheaply negated using
1106   the function negate_expr without introducing undefined overflow.  */
1107
1108static bool
1109negate_expr_p (tree t)
1110{
1111  tree type;
1112
1113  if (t == 0)
1114    return false;
1115
1116  type = TREE_TYPE (t);
1117
1118  STRIP_SIGN_NOPS (t);
1119  switch (TREE_CODE (t))
1120    {
1121    case INTEGER_CST:
1122      if (TYPE_OVERFLOW_WRAPS (type))
1123	return true;
1124
1125      /* Check that -CST will not overflow type.  */
1126      return may_negate_without_overflow_p (t);
1127    case BIT_NOT_EXPR:
1128      return (INTEGRAL_TYPE_P (type)
1129	      && TYPE_OVERFLOW_WRAPS (type));
1130
1131    case FIXED_CST:
1132    case NEGATE_EXPR:
1133      return true;
1134
1135    case REAL_CST:
1136      /* We want to canonicalize to positive real constants.  Pretend
1137         that only negative ones can be easily negated.  */
1138      return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
1139
1140    case COMPLEX_CST:
1141      return negate_expr_p (TREE_REALPART (t))
1142	     && negate_expr_p (TREE_IMAGPART (t));
1143
1144    case COMPLEX_EXPR:
1145      return negate_expr_p (TREE_OPERAND (t, 0))
1146	     && negate_expr_p (TREE_OPERAND (t, 1));
1147
1148    case CONJ_EXPR:
1149      return negate_expr_p (TREE_OPERAND (t, 0));
1150
1151    case PLUS_EXPR:
1152      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1153	  || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1154	return false;
1155      /* -(A + B) -> (-B) - A.  */
1156      if (negate_expr_p (TREE_OPERAND (t, 1))
1157	  && reorder_operands_p (TREE_OPERAND (t, 0),
1158				 TREE_OPERAND (t, 1)))
1159	return true;
1160      /* -(A + B) -> (-A) - B.  */
1161      return negate_expr_p (TREE_OPERAND (t, 0));
1162
1163    case MINUS_EXPR:
1164      /* We can't turn -(A-B) into B-A when we honor signed zeros.  */
1165      return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1166	     && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1167	     && reorder_operands_p (TREE_OPERAND (t, 0),
1168				    TREE_OPERAND (t, 1));
1169
1170    case MULT_EXPR:
1171      if (TYPE_UNSIGNED (TREE_TYPE (t)))
1172        break;
1173
1174      /* Fall through.  */
1175
1176    case RDIV_EXPR:
1177      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1178	return negate_expr_p (TREE_OPERAND (t, 1))
1179	       || negate_expr_p (TREE_OPERAND (t, 0));
1180      break;
1181
1182    case TRUNC_DIV_EXPR:
1183    case ROUND_DIV_EXPR:
1184    case FLOOR_DIV_EXPR:
1185    case CEIL_DIV_EXPR:
1186    case EXACT_DIV_EXPR:
1187      /* In general we can't negate A / B, because if A is INT_MIN and
1188	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1189	 and actually traps on some architectures.  But if overflow is
1190	 undefined, we can negate, because - (INT_MIN / 1) is an
1191	 overflow.  */
1192      if (INTEGRAL_TYPE_P (TREE_TYPE (t))
1193	  && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
1194        break;
1195      return negate_expr_p (TREE_OPERAND (t, 1))
1196             || negate_expr_p (TREE_OPERAND (t, 0));
1197
1198    case NOP_EXPR:
1199      /* Negate -((double)float) as (double)(-float).  */
1200      if (TREE_CODE (type) == REAL_TYPE)
1201	{
1202	  tree tem = strip_float_extensions (t);
1203	  if (tem != t)
1204	    return negate_expr_p (tem);
1205	}
1206      break;
1207
1208    case CALL_EXPR:
1209      /* Negate -f(x) as f(-x).  */
1210      if (negate_mathfn_p (builtin_mathfn_code (t)))
1211	return negate_expr_p (CALL_EXPR_ARG (t, 0));
1212      break;
1213
1214    case RSHIFT_EXPR:
1215      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1216      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1217	{
1218	  tree op1 = TREE_OPERAND (t, 1);
1219	  if (TREE_INT_CST_HIGH (op1) == 0
1220	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1221		 == TREE_INT_CST_LOW (op1))
1222	    return true;
1223	}
1224      break;
1225
1226    default:
1227      break;
1228    }
1229  return false;
1230}
1231
1232/* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1233   simplification is possible.
1234   If negate_expr_p would return true for T, NULL_TREE will never be
1235   returned.  */
1236
1237static tree
1238fold_negate_expr (location_t loc, tree t)
1239{
1240  tree type = TREE_TYPE (t);
1241  tree tem;
1242
1243  switch (TREE_CODE (t))
1244    {
1245    /* Convert - (~A) to A + 1.  */
1246    case BIT_NOT_EXPR:
1247      if (INTEGRAL_TYPE_P (type))
1248        return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
1249                            build_int_cst (type, 1));
1250      break;
1251
1252    case INTEGER_CST:
1253      tem = fold_negate_const (t, type);
1254      if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
1255	  || !TYPE_OVERFLOW_TRAPS (type))
1256	return tem;
1257      break;
1258
1259    case REAL_CST:
1260      tem = fold_negate_const (t, type);
1261      /* Two's complement FP formats, such as c4x, may overflow.  */
1262      if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
1263	return tem;
1264      break;
1265
1266    case FIXED_CST:
1267      tem = fold_negate_const (t, type);
1268      return tem;
1269
1270    case COMPLEX_CST:
1271      {
1272	tree rpart = negate_expr (TREE_REALPART (t));
1273	tree ipart = negate_expr (TREE_IMAGPART (t));
1274
1275	if ((TREE_CODE (rpart) == REAL_CST
1276	     && TREE_CODE (ipart) == REAL_CST)
1277	    || (TREE_CODE (rpart) == INTEGER_CST
1278		&& TREE_CODE (ipart) == INTEGER_CST))
1279	  return build_complex (type, rpart, ipart);
1280      }
1281      break;
1282
1283    case COMPLEX_EXPR:
1284      if (negate_expr_p (t))
1285	return fold_build2_loc (loc, COMPLEX_EXPR, type,
1286			    fold_negate_expr (loc, TREE_OPERAND (t, 0)),
1287			    fold_negate_expr (loc, TREE_OPERAND (t, 1)));
1288      break;
1289
1290    case CONJ_EXPR:
1291      if (negate_expr_p (t))
1292	return fold_build1_loc (loc, CONJ_EXPR, type,
1293			    fold_negate_expr (loc, TREE_OPERAND (t, 0)));
1294      break;
1295
1296    case NEGATE_EXPR:
1297      return TREE_OPERAND (t, 0);
1298
1299    case PLUS_EXPR:
1300      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1301	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1302	{
1303	  /* -(A + B) -> (-B) - A.  */
1304	  if (negate_expr_p (TREE_OPERAND (t, 1))
1305	      && reorder_operands_p (TREE_OPERAND (t, 0),
1306				     TREE_OPERAND (t, 1)))
1307	    {
1308	      tem = negate_expr (TREE_OPERAND (t, 1));
1309	      return fold_build2_loc (loc, MINUS_EXPR, type,
1310				  tem, TREE_OPERAND (t, 0));
1311	    }
1312
1313	  /* -(A + B) -> (-A) - B.  */
1314	  if (negate_expr_p (TREE_OPERAND (t, 0)))
1315	    {
1316	      tem = negate_expr (TREE_OPERAND (t, 0));
1317	      return fold_build2_loc (loc, MINUS_EXPR, type,
1318				  tem, TREE_OPERAND (t, 1));
1319	    }
1320	}
1321      break;
1322
1323    case MINUS_EXPR:
1324      /* - (A - B) -> B - A  */
1325      if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1326	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1327	  && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1328	return fold_build2_loc (loc, MINUS_EXPR, type,
1329			    TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1330      break;
1331
1332    case MULT_EXPR:
1333      if (TYPE_UNSIGNED (type))
1334        break;
1335
1336      /* Fall through.  */
1337
1338    case RDIV_EXPR:
1339      if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1340	{
1341	  tem = TREE_OPERAND (t, 1);
1342	  if (negate_expr_p (tem))
1343	    return fold_build2_loc (loc, TREE_CODE (t), type,
1344				TREE_OPERAND (t, 0), negate_expr (tem));
1345	  tem = TREE_OPERAND (t, 0);
1346	  if (negate_expr_p (tem))
1347	    return fold_build2_loc (loc, TREE_CODE (t), type,
1348				negate_expr (tem), TREE_OPERAND (t, 1));
1349	}
1350      break;
1351
1352    case TRUNC_DIV_EXPR:
1353    case ROUND_DIV_EXPR:
1354    case FLOOR_DIV_EXPR:
1355    case CEIL_DIV_EXPR:
1356    case EXACT_DIV_EXPR:
1357      /* In general we can't negate A / B, because if A is INT_MIN and
1358	 B is 1, we may turn this into INT_MIN / -1 which is undefined
1359	 and actually traps on some architectures.  But if overflow is
1360	 undefined, we can negate, because - (INT_MIN / 1) is an
1361	 overflow.  */
1362      if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
1363        {
1364	  const char * const warnmsg = G_("assuming signed overflow does not "
1365					  "occur when negating a division");
1366          tem = TREE_OPERAND (t, 1);
1367          if (negate_expr_p (tem))
1368	    {
1369	      if (INTEGRAL_TYPE_P (type)
1370		  && (TREE_CODE (tem) != INTEGER_CST
1371		      || integer_onep (tem)))
1372		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1373	      return fold_build2_loc (loc, TREE_CODE (t), type,
1374				  TREE_OPERAND (t, 0), negate_expr (tem));
1375	    }
1376          tem = TREE_OPERAND (t, 0);
1377          if (negate_expr_p (tem))
1378	    {
1379	      if (INTEGRAL_TYPE_P (type)
1380		  && (TREE_CODE (tem) != INTEGER_CST
1381		      || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
1382		fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
1383	      return fold_build2_loc (loc, TREE_CODE (t), type,
1384				  negate_expr (tem), TREE_OPERAND (t, 1));
1385	    }
1386        }
1387      break;
1388
1389    case NOP_EXPR:
1390      /* Convert -((double)float) into (double)(-float).  */
1391      if (TREE_CODE (type) == REAL_TYPE)
1392	{
1393	  tem = strip_float_extensions (t);
1394	  if (tem != t && negate_expr_p (tem))
1395	    return fold_convert_loc (loc, type, negate_expr (tem));
1396	}
1397      break;
1398
1399    case CALL_EXPR:
1400      /* Negate -f(x) as f(-x).  */
1401      if (negate_mathfn_p (builtin_mathfn_code (t))
1402	  && negate_expr_p (CALL_EXPR_ARG (t, 0)))
1403	{
1404	  tree fndecl, arg;
1405
1406	  fndecl = get_callee_fndecl (t);
1407	  arg = negate_expr (CALL_EXPR_ARG (t, 0));
1408	  return build_call_expr_loc (loc, fndecl, 1, arg);
1409	}
1410      break;
1411
1412    case RSHIFT_EXPR:
1413      /* Optimize -((int)x >> 31) into (unsigned)x >> 31.  */
1414      if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1415	{
1416	  tree op1 = TREE_OPERAND (t, 1);
1417	  if (TREE_INT_CST_HIGH (op1) == 0
1418	      && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1419		 == TREE_INT_CST_LOW (op1))
1420	    {
1421	      tree ntype = TYPE_UNSIGNED (type)
1422			   ? signed_type_for (type)
1423			   : unsigned_type_for (type);
1424	      tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
1425	      temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
1426	      return fold_convert_loc (loc, type, temp);
1427	    }
1428	}
1429      break;
1430
1431    default:
1432      break;
1433    }
1434
1435  return NULL_TREE;
1436}
1437
1438/* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1439   negated in a simpler way.  Also allow for T to be NULL_TREE, in which case
1440   return NULL_TREE. */
1441
1442static tree
1443negate_expr (tree t)
1444{
1445  tree type, tem;
1446  location_t loc;
1447
1448  if (t == NULL_TREE)
1449    return NULL_TREE;
1450
1451  loc = EXPR_LOCATION (t);
1452  type = TREE_TYPE (t);
1453  STRIP_SIGN_NOPS (t);
1454
1455  tem = fold_negate_expr (loc, t);
1456  if (!tem)
1457    {
1458      tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1459      SET_EXPR_LOCATION (tem, loc);
1460    }
1461  return fold_convert_loc (loc, type, tem);
1462}
1463
1464/* Split a tree IN into a constant, literal and variable parts that could be
1465   combined with CODE to make IN.  "constant" means an expression with
1466   TREE_CONSTANT but that isn't an actual constant.  CODE must be a
1467   commutative arithmetic operation.  Store the constant part into *CONP,
1468   the literal in *LITP and return the variable part.  If a part isn't
1469   present, set it to null.  If the tree does not decompose in this way,
1470   return the entire tree as the variable part and the other parts as null.
1471
1472   If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.  In that
1473   case, we negate an operand that was subtracted.  Except if it is a
1474   literal for which we use *MINUS_LITP instead.
1475
1476   If NEGATE_P is true, we are negating all of IN, again except a literal
1477   for which we use *MINUS_LITP instead.
1478
1479   If IN is itself a literal or constant, return it as appropriate.
1480
1481   Note that we do not guarantee that any of the three values will be the
1482   same type as IN, but they will have the same signedness and mode.  */
1483
1484static tree
1485split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1486	    tree *minus_litp, int negate_p)
1487{
1488  tree var = 0;
1489
1490  *conp = 0;
1491  *litp = 0;
1492  *minus_litp = 0;
1493
1494  /* Strip any conversions that don't change the machine mode or signedness.  */
1495  STRIP_SIGN_NOPS (in);
1496
1497  if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
1498      || TREE_CODE (in) == FIXED_CST)
1499    *litp = in;
1500  else if (TREE_CODE (in) == code
1501	   || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
1502	       && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
1503	       /* We can associate addition and subtraction together (even
1504		  though the C standard doesn't say so) for integers because
1505		  the value is not affected.  For reals, the value might be
1506		  affected, so we can't.  */
1507	       && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1508		   || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1509    {
1510      tree op0 = TREE_OPERAND (in, 0);
1511      tree op1 = TREE_OPERAND (in, 1);
1512      int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1513      int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1514
1515      /* First see if either of the operands is a literal, then a constant.  */
1516      if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
1517	  || TREE_CODE (op0) == FIXED_CST)
1518	*litp = op0, op0 = 0;
1519      else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
1520	       || TREE_CODE (op1) == FIXED_CST)
1521	*litp = op1, neg_litp_p = neg1_p, op1 = 0;
1522
1523      if (op0 != 0 && TREE_CONSTANT (op0))
1524	*conp = op0, op0 = 0;
1525      else if (op1 != 0 && TREE_CONSTANT (op1))
1526	*conp = op1, neg_conp_p = neg1_p, op1 = 0;
1527
1528      /* If we haven't dealt with either operand, this is not a case we can
1529	 decompose.  Otherwise, VAR is either of the ones remaining, if any.  */
1530      if (op0 != 0 && op1 != 0)
1531	var = in;
1532      else if (op0 != 0)
1533	var = op0;
1534      else
1535	var = op1, neg_var_p = neg1_p;
1536
1537      /* Now do any needed negations.  */
1538      if (neg_litp_p)
1539	*minus_litp = *litp, *litp = 0;
1540      if (neg_conp_p)
1541	*conp = negate_expr (*conp);
1542      if (neg_var_p)
1543	var = negate_expr (var);
1544    }
1545  else if (TREE_CONSTANT (in))
1546    *conp = in;
1547  else
1548    var = in;
1549
1550  if (negate_p)
1551    {
1552      if (*litp)
1553	*minus_litp = *litp, *litp = 0;
1554      else if (*minus_litp)
1555	*litp = *minus_litp, *minus_litp = 0;
1556      *conp = negate_expr (*conp);
1557      var = negate_expr (var);
1558    }
1559
1560  return var;
1561}
1562
1563/* Re-associate trees split by the above function.  T1 and T2 are
1564   either expressions to associate or null.  Return the new
1565   expression, if any.  LOC is the location of the new expression.  If
1566   we build an operation, do it in TYPE and with CODE.  */
1567
1568static tree
1569associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
1570{
1571  tree tem;
1572
1573  if (t1 == 0)
1574    return t2;
1575  else if (t2 == 0)
1576    return t1;
1577
1578  /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1579     try to fold this since we will have infinite recursion.  But do
1580     deal with any NEGATE_EXPRs.  */
1581  if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1582      || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1583    {
1584      if (code == PLUS_EXPR)
1585	{
1586	  if (TREE_CODE (t1) == NEGATE_EXPR)
1587	    tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t2),
1588			  fold_convert_loc (loc, type, TREE_OPERAND (t1, 0)));
1589	  else if (TREE_CODE (t2) == NEGATE_EXPR)
1590	    tem = build2 (MINUS_EXPR, type, fold_convert_loc (loc, type, t1),
1591			  fold_convert_loc (loc, type, TREE_OPERAND (t2, 0)));
1592	  else if (integer_zerop (t2))
1593	    return fold_convert_loc (loc, type, t1);
1594	}
1595      else if (code == MINUS_EXPR)
1596	{
1597	  if (integer_zerop (t2))
1598	    return fold_convert_loc (loc, type, t1);
1599	}
1600
1601      tem = build2 (code, type, fold_convert_loc (loc, type, t1),
1602		    fold_convert_loc (loc, type, t2));
1603      goto associate_trees_exit;
1604    }
1605
1606  return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
1607		      fold_convert_loc (loc, type, t2));
1608 associate_trees_exit:
1609  protected_set_expr_location (tem, loc);
1610  return tem;
1611}
1612
1613/* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1614   for use in int_const_binop, size_binop and size_diffop.  */
1615
1616static bool
1617int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
1618{
1619  if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1620    return false;
1621  if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1622    return false;
1623
1624  switch (code)
1625    {
1626    case LSHIFT_EXPR:
1627    case RSHIFT_EXPR:
1628    case LROTATE_EXPR:
1629    case RROTATE_EXPR:
1630      return true;
1631
1632    default:
1633      break;
1634    }
1635
1636  return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1637	 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1638	 && TYPE_MODE (type1) == TYPE_MODE (type2);
1639}
1640
1641
1642/* Combine two integer constants ARG1 and ARG2 under operation CODE
1643   to produce a new constant.  Return NULL_TREE if we don't know how
1644   to evaluate CODE at compile-time.
1645
1646   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1647
1648tree
1649int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2, int notrunc)
1650{
1651  unsigned HOST_WIDE_INT int1l, int2l;
1652  HOST_WIDE_INT int1h, int2h;
1653  unsigned HOST_WIDE_INT low;
1654  HOST_WIDE_INT hi;
1655  unsigned HOST_WIDE_INT garbagel;
1656  HOST_WIDE_INT garbageh;
1657  tree t;
1658  tree type = TREE_TYPE (arg1);
1659  int uns = TYPE_UNSIGNED (type);
1660  int is_sizetype
1661    = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1662  int overflow = 0;
1663
1664  int1l = TREE_INT_CST_LOW (arg1);
1665  int1h = TREE_INT_CST_HIGH (arg1);
1666  int2l = TREE_INT_CST_LOW (arg2);
1667  int2h = TREE_INT_CST_HIGH (arg2);
1668
1669  switch (code)
1670    {
1671    case BIT_IOR_EXPR:
1672      low = int1l | int2l, hi = int1h | int2h;
1673      break;
1674
1675    case BIT_XOR_EXPR:
1676      low = int1l ^ int2l, hi = int1h ^ int2h;
1677      break;
1678
1679    case BIT_AND_EXPR:
1680      low = int1l & int2l, hi = int1h & int2h;
1681      break;
1682
1683    case RSHIFT_EXPR:
1684      int2l = -int2l;
1685    case LSHIFT_EXPR:
1686      /* It's unclear from the C standard whether shifts can overflow.
1687	 The following code ignores overflow; perhaps a C standard
1688	 interpretation ruling is needed.  */
1689      lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1690		     &low, &hi, !uns);
1691      break;
1692
1693    case RROTATE_EXPR:
1694      int2l = - int2l;
1695    case LROTATE_EXPR:
1696      lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1697		      &low, &hi);
1698      break;
1699
1700    case PLUS_EXPR:
1701      overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1702      break;
1703
1704    case MINUS_EXPR:
1705      neg_double (int2l, int2h, &low, &hi);
1706      add_double (int1l, int1h, low, hi, &low, &hi);
1707      overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1708      break;
1709
1710    case MULT_EXPR:
1711      overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1712      break;
1713
1714    case TRUNC_DIV_EXPR:
1715    case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1716    case EXACT_DIV_EXPR:
1717      /* This is a shortcut for a common special case.  */
1718      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1719	  && !TREE_OVERFLOW (arg1)
1720	  && !TREE_OVERFLOW (arg2)
1721	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1722	{
1723	  if (code == CEIL_DIV_EXPR)
1724	    int1l += int2l - 1;
1725
1726	  low = int1l / int2l, hi = 0;
1727	  break;
1728	}
1729
1730      /* ... fall through ...  */
1731
1732    case ROUND_DIV_EXPR:
1733      if (int2h == 0 && int2l == 0)
1734	return NULL_TREE;
1735      if (int2h == 0 && int2l == 1)
1736	{
1737	  low = int1l, hi = int1h;
1738	  break;
1739	}
1740      if (int1l == int2l && int1h == int2h
1741	  && ! (int1l == 0 && int1h == 0))
1742	{
1743	  low = 1, hi = 0;
1744	  break;
1745	}
1746      overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1747				       &low, &hi, &garbagel, &garbageh);
1748      break;
1749
1750    case TRUNC_MOD_EXPR:
1751    case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1752      /* This is a shortcut for a common special case.  */
1753      if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1754	  && !TREE_OVERFLOW (arg1)
1755	  && !TREE_OVERFLOW (arg2)
1756	  && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1757	{
1758	  if (code == CEIL_MOD_EXPR)
1759	    int1l += int2l - 1;
1760	  low = int1l % int2l, hi = 0;
1761	  break;
1762	}
1763
1764      /* ... fall through ...  */
1765
1766    case ROUND_MOD_EXPR:
1767      if (int2h == 0 && int2l == 0)
1768	return NULL_TREE;
1769      overflow = div_and_round_double (code, uns,
1770				       int1l, int1h, int2l, int2h,
1771				       &garbagel, &garbageh, &low, &hi);
1772      break;
1773
1774    case MIN_EXPR:
1775    case MAX_EXPR:
1776      if (uns)
1777	low = (((unsigned HOST_WIDE_INT) int1h
1778		< (unsigned HOST_WIDE_INT) int2h)
1779	       || (((unsigned HOST_WIDE_INT) int1h
1780		    == (unsigned HOST_WIDE_INT) int2h)
1781		   && int1l < int2l));
1782      else
1783	low = (int1h < int2h
1784	       || (int1h == int2h && int1l < int2l));
1785
1786      if (low == (code == MIN_EXPR))
1787	low = int1l, hi = int1h;
1788      else
1789	low = int2l, hi = int2h;
1790      break;
1791
1792    default:
1793      return NULL_TREE;
1794    }
1795
1796  if (notrunc)
1797    {
1798      t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1799
1800      /* Propagate overflow flags ourselves.  */
1801      if (((!uns || is_sizetype) && overflow)
1802	  | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1803	{
1804	  t = copy_node (t);
1805	  TREE_OVERFLOW (t) = 1;
1806	}
1807    }
1808  else
1809    t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1810			       ((!uns || is_sizetype) && overflow)
1811			       | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1812
1813  return t;
1814}
1815
1816/* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1817   constant.  We assume ARG1 and ARG2 have the same data type, or at least
1818   are the same kind of constant and the same machine mode.  Return zero if
1819   combining the constants is not allowed in the current operating mode.
1820
1821   If NOTRUNC is nonzero, do not truncate the result to fit the data type.  */
1822
1823static tree
1824const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1825{
1826  /* Sanity check for the recursive cases.  */
1827  if (!arg1 || !arg2)
1828    return NULL_TREE;
1829
1830  STRIP_NOPS (arg1);
1831  STRIP_NOPS (arg2);
1832
1833  if (TREE_CODE (arg1) == INTEGER_CST)
1834    return int_const_binop (code, arg1, arg2, notrunc);
1835
1836  if (TREE_CODE (arg1) == REAL_CST)
1837    {
1838      enum machine_mode mode;
1839      REAL_VALUE_TYPE d1;
1840      REAL_VALUE_TYPE d2;
1841      REAL_VALUE_TYPE value;
1842      REAL_VALUE_TYPE result;
1843      bool inexact;
1844      tree t, type;
1845
1846      /* The following codes are handled by real_arithmetic.  */
1847      switch (code)
1848	{
1849	case PLUS_EXPR:
1850	case MINUS_EXPR:
1851	case MULT_EXPR:
1852	case RDIV_EXPR:
1853	case MIN_EXPR:
1854	case MAX_EXPR:
1855	  break;
1856
1857	default:
1858	  return NULL_TREE;
1859	}
1860
1861      d1 = TREE_REAL_CST (arg1);
1862      d2 = TREE_REAL_CST (arg2);
1863
1864      type = TREE_TYPE (arg1);
1865      mode = TYPE_MODE (type);
1866
1867      /* Don't perform operation if we honor signaling NaNs and
1868	 either operand is a NaN.  */
1869      if (HONOR_SNANS (mode)
1870	  && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1871	return NULL_TREE;
1872
1873      /* Don't perform operation if it would raise a division
1874	 by zero exception.  */
1875      if (code == RDIV_EXPR
1876	  && REAL_VALUES_EQUAL (d2, dconst0)
1877	  && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1878	return NULL_TREE;
1879
1880      /* If either operand is a NaN, just return it.  Otherwise, set up
1881	 for floating-point trap; we return an overflow.  */
1882      if (REAL_VALUE_ISNAN (d1))
1883	return arg1;
1884      else if (REAL_VALUE_ISNAN (d2))
1885	return arg2;
1886
1887      inexact = real_arithmetic (&value, code, &d1, &d2);
1888      real_convert (&result, mode, &value);
1889
1890      /* Don't constant fold this floating point operation if
1891	 the result has overflowed and flag_trapping_math.  */
1892      if (flag_trapping_math
1893	  && MODE_HAS_INFINITIES (mode)
1894	  && REAL_VALUE_ISINF (result)
1895	  && !REAL_VALUE_ISINF (d1)
1896	  && !REAL_VALUE_ISINF (d2))
1897	return NULL_TREE;
1898
1899      /* Don't constant fold this floating point operation if the
1900	 result may dependent upon the run-time rounding mode and
1901	 flag_rounding_math is set, or if GCC's software emulation
1902	 is unable to accurately represent the result.  */
1903      if ((flag_rounding_math
1904	   || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1905	  && (inexact || !real_identical (&result, &value)))
1906	return NULL_TREE;
1907
1908      t = build_real (type, result);
1909
1910      TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1911      return t;
1912    }
1913
1914  if (TREE_CODE (arg1) == FIXED_CST)
1915    {
1916      FIXED_VALUE_TYPE f1;
1917      FIXED_VALUE_TYPE f2;
1918      FIXED_VALUE_TYPE result;
1919      tree t, type;
1920      int sat_p;
1921      bool overflow_p;
1922
1923      /* The following codes are handled by fixed_arithmetic.  */
1924      switch (code)
1925        {
1926	case PLUS_EXPR:
1927	case MINUS_EXPR:
1928	case MULT_EXPR:
1929	case TRUNC_DIV_EXPR:
1930	  f2 = TREE_FIXED_CST (arg2);
1931	  break;
1932
1933	case LSHIFT_EXPR:
1934	case RSHIFT_EXPR:
1935	  f2.data.high = TREE_INT_CST_HIGH (arg2);
1936	  f2.data.low = TREE_INT_CST_LOW (arg2);
1937	  f2.mode = SImode;
1938	  break;
1939
1940        default:
1941	  return NULL_TREE;
1942        }
1943
1944      f1 = TREE_FIXED_CST (arg1);
1945      type = TREE_TYPE (arg1);
1946      sat_p = TYPE_SATURATING (type);
1947      overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1948      t = build_fixed (type, result);
1949      /* Propagate overflow flags.  */
1950      if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1951	TREE_OVERFLOW (t) = 1;
1952      return t;
1953    }
1954
1955  if (TREE_CODE (arg1) == COMPLEX_CST)
1956    {
1957      tree type = TREE_TYPE (arg1);
1958      tree r1 = TREE_REALPART (arg1);
1959      tree i1 = TREE_IMAGPART (arg1);
1960      tree r2 = TREE_REALPART (arg2);
1961      tree i2 = TREE_IMAGPART (arg2);
1962      tree real, imag;
1963
1964      switch (code)
1965	{
1966	case PLUS_EXPR:
1967	case MINUS_EXPR:
1968	  real = const_binop (code, r1, r2, notrunc);
1969	  imag = const_binop (code, i1, i2, notrunc);
1970	  break;
1971
1972	case MULT_EXPR:
1973	  if (COMPLEX_FLOAT_TYPE_P (type))
1974	    return do_mpc_arg2 (arg1, arg2, type,
1975				/* do_nonfinite= */ folding_initializer,
1976				mpc_mul);
1977
1978	  real = const_binop (MINUS_EXPR,
1979			      const_binop (MULT_EXPR, r1, r2, notrunc),
1980			      const_binop (MULT_EXPR, i1, i2, notrunc),
1981			      notrunc);
1982	  imag = const_binop (PLUS_EXPR,
1983			      const_binop (MULT_EXPR, r1, i2, notrunc),
1984			      const_binop (MULT_EXPR, i1, r2, notrunc),
1985			      notrunc);
1986	  break;
1987
1988	case RDIV_EXPR:
1989	  if (COMPLEX_FLOAT_TYPE_P (type))
1990	    return do_mpc_arg2 (arg1, arg2, type,
1991                                /* do_nonfinite= */ folding_initializer,
1992				mpc_div);
1993	  /* Fallthru ... */
1994	case TRUNC_DIV_EXPR:
1995	case CEIL_DIV_EXPR:
1996	case FLOOR_DIV_EXPR:
1997	case ROUND_DIV_EXPR:
1998	  if (flag_complex_method == 0)
1999	  {
2000	    /* Keep this algorithm in sync with
2001	       tree-complex.c:expand_complex_div_straight().
2002
2003	       Expand complex division to scalars, straightforward algorithm.
2004	       a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
2005	       t = br*br + bi*bi
2006	    */
2007	    tree magsquared
2008	      = const_binop (PLUS_EXPR,
2009			     const_binop (MULT_EXPR, r2, r2, notrunc),
2010			     const_binop (MULT_EXPR, i2, i2, notrunc),
2011			     notrunc);
2012	    tree t1
2013	      = const_binop (PLUS_EXPR,
2014			     const_binop (MULT_EXPR, r1, r2, notrunc),
2015			     const_binop (MULT_EXPR, i1, i2, notrunc),
2016			     notrunc);
2017	    tree t2
2018	      = const_binop (MINUS_EXPR,
2019			     const_binop (MULT_EXPR, i1, r2, notrunc),
2020			     const_binop (MULT_EXPR, r1, i2, notrunc),
2021			     notrunc);
2022
2023	    real = const_binop (code, t1, magsquared, notrunc);
2024	    imag = const_binop (code, t2, magsquared, notrunc);
2025	  }
2026	  else
2027	  {
2028	    /* Keep this algorithm in sync with
2029               tree-complex.c:expand_complex_div_wide().
2030
2031	       Expand complex division to scalars, modified algorithm to minimize
2032	       overflow with wide input ranges.  */
2033	    tree compare = fold_build2 (LT_EXPR, boolean_type_node,
2034					fold_abs_const (r2, TREE_TYPE (type)),
2035					fold_abs_const (i2, TREE_TYPE (type)));
2036
2037	    if (integer_nonzerop (compare))
2038	      {
2039		/* In the TRUE branch, we compute
2040		   ratio = br/bi;
2041		   div = (br * ratio) + bi;
2042		   tr = (ar * ratio) + ai;
2043		   ti = (ai * ratio) - ar;
2044		   tr = tr / div;
2045		   ti = ti / div;  */
2046		tree ratio = const_binop (code, r2, i2, notrunc);
2047		tree div = const_binop (PLUS_EXPR, i2,
2048					const_binop (MULT_EXPR, r2, ratio,
2049						     notrunc),
2050					notrunc);
2051		real = const_binop (MULT_EXPR, r1, ratio, notrunc);
2052		real = const_binop (PLUS_EXPR, real, i1, notrunc);
2053		real = const_binop (code, real, div, notrunc);
2054
2055		imag = const_binop (MULT_EXPR, i1, ratio, notrunc);
2056		imag = const_binop (MINUS_EXPR, imag, r1, notrunc);
2057		imag = const_binop (code, imag, div, notrunc);
2058	      }
2059	    else
2060	      {
2061		/* In the FALSE branch, we compute
2062		   ratio = d/c;
2063		   divisor = (d * ratio) + c;
2064		   tr = (b * ratio) + a;
2065		   ti = b - (a * ratio);
2066		   tr = tr / div;
2067		   ti = ti / div;  */
2068		tree ratio = const_binop (code, i2, r2, notrunc);
2069		tree div = const_binop (PLUS_EXPR, r2,
2070                                        const_binop (MULT_EXPR, i2, ratio,
2071						     notrunc),
2072					notrunc);
2073
2074		real = const_binop (MULT_EXPR, i1, ratio, notrunc);
2075		real = const_binop (PLUS_EXPR, real, r1, notrunc);
2076		real = const_binop (code, real, div, notrunc);
2077
2078		imag = const_binop (MULT_EXPR, r1, ratio, notrunc);
2079		imag = const_binop (MINUS_EXPR, i1, imag, notrunc);
2080		imag = const_binop (code, imag, div, notrunc);
2081	      }
2082	  }
2083	  break;
2084
2085	default:
2086	  return NULL_TREE;
2087	}
2088
2089      if (real && imag)
2090	return build_complex (type, real, imag);
2091    }
2092
2093  if (TREE_CODE (arg1) == VECTOR_CST)
2094    {
2095      tree type = TREE_TYPE(arg1);
2096      int count = TYPE_VECTOR_SUBPARTS (type), i;
2097      tree elements1, elements2, list = NULL_TREE;
2098
2099      if(TREE_CODE(arg2) != VECTOR_CST)
2100        return NULL_TREE;
2101
2102      elements1 = TREE_VECTOR_CST_ELTS (arg1);
2103      elements2 = TREE_VECTOR_CST_ELTS (arg2);
2104
2105      for (i = 0; i < count; i++)
2106	{
2107          tree elem1, elem2, elem;
2108
2109          /* The trailing elements can be empty and should be treated as 0 */
2110          if(!elements1)
2111            elem1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2112          else
2113            {
2114              elem1 = TREE_VALUE(elements1);
2115              elements1 = TREE_CHAIN (elements1);
2116            }
2117
2118          if(!elements2)
2119            elem2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2120          else
2121            {
2122              elem2 = TREE_VALUE(elements2);
2123              elements2 = TREE_CHAIN (elements2);
2124            }
2125
2126          elem = const_binop (code, elem1, elem2, notrunc);
2127
2128          /* It is possible that const_binop cannot handle the given
2129            code and return NULL_TREE */
2130          if(elem == NULL_TREE)
2131            return NULL_TREE;
2132
2133          list = tree_cons (NULL_TREE, elem, list);
2134	}
2135      return build_vector(type, nreverse(list));
2136    }
2137  return NULL_TREE;
2138}
2139
2140/* Create a size type INT_CST node with NUMBER sign extended.  KIND
2141   indicates which particular sizetype to create.  */
2142
2143tree
2144size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
2145{
2146  return build_int_cst (sizetype_tab[(int) kind], number);
2147}
2148
2149/* Combine operands OP1 and OP2 with arithmetic operation CODE.  CODE
2150   is a tree code.  The type of the result is taken from the operands.
2151   Both must be equivalent integer types, ala int_binop_types_match_p.
2152   If the operands are constant, so is the result.  */
2153
2154tree
2155size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
2156{
2157  tree type = TREE_TYPE (arg0);
2158
2159  if (arg0 == error_mark_node || arg1 == error_mark_node)
2160    return error_mark_node;
2161
2162  gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
2163                                       TREE_TYPE (arg1)));
2164
2165  /* Handle the special case of two integer constants faster.  */
2166  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2167    {
2168      /* And some specific cases even faster than that.  */
2169      if (code == PLUS_EXPR)
2170	{
2171	  if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
2172	    return arg1;
2173	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2174	    return arg0;
2175	}
2176      else if (code == MINUS_EXPR)
2177	{
2178	  if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
2179	    return arg0;
2180	}
2181      else if (code == MULT_EXPR)
2182	{
2183	  if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
2184	    return arg1;
2185	}
2186
2187      /* Handle general case of two integer constants.  */
2188      return int_const_binop (code, arg0, arg1, 0);
2189    }
2190
2191  return fold_build2_loc (loc, code, type, arg0, arg1);
2192}
2193
2194/* Given two values, either both of sizetype or both of bitsizetype,
2195   compute the difference between the two values.  Return the value
2196   in signed type corresponding to the type of the operands.  */
2197
2198tree
2199size_diffop_loc (location_t loc, tree arg0, tree arg1)
2200{
2201  tree type = TREE_TYPE (arg0);
2202  tree ctype;
2203
2204  gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
2205				       TREE_TYPE (arg1)));
2206
2207  /* If the type is already signed, just do the simple thing.  */
2208  if (!TYPE_UNSIGNED (type))
2209    return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
2210
2211  if (type == sizetype)
2212    ctype = ssizetype;
2213  else if (type == bitsizetype)
2214    ctype = sbitsizetype;
2215  else
2216    ctype = signed_type_for (type);
2217
2218  /* If either operand is not a constant, do the conversions to the signed
2219     type and subtract.  The hardware will do the right thing with any
2220     overflow in the subtraction.  */
2221  if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
2222    return size_binop_loc (loc, MINUS_EXPR,
2223			   fold_convert_loc (loc, ctype, arg0),
2224			   fold_convert_loc (loc, ctype, arg1));
2225
2226  /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
2227     Otherwise, subtract the other way, convert to CTYPE (we know that can't
2228     overflow) and negate (which can't either).  Special-case a result
2229     of zero while we're here.  */
2230  if (tree_int_cst_equal (arg0, arg1))
2231    return build_int_cst (ctype, 0);
2232  else if (tree_int_cst_lt (arg1, arg0))
2233    return fold_convert_loc (loc, ctype,
2234			     size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
2235  else
2236    return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
2237			   fold_convert_loc (loc, ctype,
2238					     size_binop_loc (loc,
2239							     MINUS_EXPR,
2240							     arg1, arg0)));
2241}
2242
2243/* A subroutine of fold_convert_const handling conversions of an
2244   INTEGER_CST to another integer type.  */
2245
2246static tree
2247fold_convert_const_int_from_int (tree type, const_tree arg1)
2248{
2249  tree t;
2250
2251  /* Given an integer constant, make new constant with new type,
2252     appropriately sign-extended or truncated.  */
2253  t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
2254			     TREE_INT_CST_HIGH (arg1),
2255		             /* Don't set the overflow when
2256		      		converting from a pointer,  */
2257			     !POINTER_TYPE_P (TREE_TYPE (arg1))
2258			     /* or to a sizetype with same signedness
2259				and the precision is unchanged.
2260				???  sizetype is always sign-extended,
2261				but its signedness depends on the
2262				frontend.  Thus we see spurious overflows
2263				here if we do not check this.  */
2264			     && !((TYPE_PRECISION (TREE_TYPE (arg1))
2265				   == TYPE_PRECISION (type))
2266				  && (TYPE_UNSIGNED (TREE_TYPE (arg1))
2267				      == TYPE_UNSIGNED (type))
2268				  && ((TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
2269				       && TYPE_IS_SIZETYPE (TREE_TYPE (arg1)))
2270				      || (TREE_CODE (type) == INTEGER_TYPE
2271					  && TYPE_IS_SIZETYPE (type)))),
2272			     (TREE_INT_CST_HIGH (arg1) < 0
2273		 	      && (TYPE_UNSIGNED (type)
2274				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2275			     | TREE_OVERFLOW (arg1));
2276
2277  return t;
2278}
2279
2280/* A subroutine of fold_convert_const handling conversions a REAL_CST
2281   to an integer type.  */
2282
2283static tree
2284fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
2285{
2286  int overflow = 0;
2287  tree t;
2288
2289  /* The following code implements the floating point to integer
2290     conversion rules required by the Java Language Specification,
2291     that IEEE NaNs are mapped to zero and values that overflow
2292     the target precision saturate, i.e. values greater than
2293     INT_MAX are mapped to INT_MAX, and values less than INT_MIN
2294     are mapped to INT_MIN.  These semantics are allowed by the
2295     C and C++ standards that simply state that the behavior of
2296     FP-to-integer conversion is unspecified upon overflow.  */
2297
2298  HOST_WIDE_INT high, low;
2299  REAL_VALUE_TYPE r;
2300  REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
2301
2302  switch (code)
2303    {
2304    case FIX_TRUNC_EXPR:
2305      real_trunc (&r, VOIDmode, &x);
2306      break;
2307
2308    default:
2309      gcc_unreachable ();
2310    }
2311
2312  /* If R is NaN, return zero and show we have an overflow.  */
2313  if (REAL_VALUE_ISNAN (r))
2314    {
2315      overflow = 1;
2316      high = 0;
2317      low = 0;
2318    }
2319
2320  /* See if R is less than the lower bound or greater than the
2321     upper bound.  */
2322
2323  if (! overflow)
2324    {
2325      tree lt = TYPE_MIN_VALUE (type);
2326      REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
2327      if (REAL_VALUES_LESS (r, l))
2328	{
2329	  overflow = 1;
2330	  high = TREE_INT_CST_HIGH (lt);
2331	  low = TREE_INT_CST_LOW (lt);
2332	}
2333    }
2334
2335  if (! overflow)
2336    {
2337      tree ut = TYPE_MAX_VALUE (type);
2338      if (ut)
2339	{
2340	  REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
2341	  if (REAL_VALUES_LESS (u, r))
2342	    {
2343	      overflow = 1;
2344	      high = TREE_INT_CST_HIGH (ut);
2345	      low = TREE_INT_CST_LOW (ut);
2346	    }
2347	}
2348    }
2349
2350  if (! overflow)
2351    REAL_VALUE_TO_INT (&low, &high, r);
2352
2353  t = force_fit_type_double (type, low, high, -1,
2354			     overflow | TREE_OVERFLOW (arg1));
2355  return t;
2356}
2357
2358/* A subroutine of fold_convert_const handling conversions of a
2359   FIXED_CST to an integer type.  */
2360
2361static tree
2362fold_convert_const_int_from_fixed (tree type, const_tree arg1)
2363{
2364  tree t;
2365  double_int temp, temp_trunc;
2366  unsigned int mode;
2367
2368  /* Right shift FIXED_CST to temp by fbit.  */
2369  temp = TREE_FIXED_CST (arg1).data;
2370  mode = TREE_FIXED_CST (arg1).mode;
2371  if (GET_MODE_FBIT (mode) < 2 * HOST_BITS_PER_WIDE_INT)
2372    {
2373      lshift_double (temp.low, temp.high,
2374		     - GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2375		     &temp.low, &temp.high, SIGNED_FIXED_POINT_MODE_P (mode));
2376
2377      /* Left shift temp to temp_trunc by fbit.  */
2378      lshift_double (temp.low, temp.high,
2379		     GET_MODE_FBIT (mode), 2 * HOST_BITS_PER_WIDE_INT,
2380		     &temp_trunc.low, &temp_trunc.high,
2381		     SIGNED_FIXED_POINT_MODE_P (mode));
2382    }
2383  else
2384    {
2385      temp.low = 0;
2386      temp.high = 0;
2387      temp_trunc.low = 0;
2388      temp_trunc.high = 0;
2389    }
2390
2391  /* If FIXED_CST is negative, we need to round the value toward 0.
2392     By checking if the fractional bits are not zero to add 1 to temp.  */
2393  if (SIGNED_FIXED_POINT_MODE_P (mode) && temp_trunc.high < 0
2394      && !double_int_equal_p (TREE_FIXED_CST (arg1).data, temp_trunc))
2395    {
2396      double_int one;
2397      one.low = 1;
2398      one.high = 0;
2399      temp = double_int_add (temp, one);
2400    }
2401
2402  /* Given a fixed-point constant, make new constant with new type,
2403     appropriately sign-extended or truncated.  */
2404  t = force_fit_type_double (type, temp.low, temp.high, -1,
2405			     (temp.high < 0
2406		 	      && (TYPE_UNSIGNED (type)
2407				  < TYPE_UNSIGNED (TREE_TYPE (arg1))))
2408			     | TREE_OVERFLOW (arg1));
2409
2410  return t;
2411}
2412
2413/* A subroutine of fold_convert_const handling conversions a REAL_CST
2414   to another floating point type.  */
2415
2416static tree
2417fold_convert_const_real_from_real (tree type, const_tree arg1)
2418{
2419  REAL_VALUE_TYPE value;
2420  tree t;
2421
2422  real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2423  t = build_real (type, value);
2424
2425  /* If converting an infinity or NAN to a representation that doesn't
2426     have one, set the overflow bit so that we can produce some kind of
2427     error message at the appropriate point if necessary.  It's not the
2428     most user-friendly message, but it's better than nothing.  */
2429  if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2430      && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2431    TREE_OVERFLOW (t) = 1;
2432  else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2433	   && !MODE_HAS_NANS (TYPE_MODE (type)))
2434    TREE_OVERFLOW (t) = 1;
2435  /* Regular overflow, conversion produced an infinity in a mode that
2436     can't represent them.  */
2437  else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2438	   && REAL_VALUE_ISINF (value)
2439	   && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2440    TREE_OVERFLOW (t) = 1;
2441  else
2442    TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2443  return t;
2444}
2445
2446/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2447   to a floating point type.  */
2448
2449static tree
2450fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2451{
2452  REAL_VALUE_TYPE value;
2453  tree t;
2454
2455  real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2456  t = build_real (type, value);
2457
2458  TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2459  return t;
2460}
2461
2462/* A subroutine of fold_convert_const handling conversions a FIXED_CST
2463   to another fixed-point type.  */
2464
2465static tree
2466fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2467{
2468  FIXED_VALUE_TYPE value;
2469  tree t;
2470  bool overflow_p;
2471
2472  overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2473			      TYPE_SATURATING (type));
2474  t = build_fixed (type, value);
2475
2476  /* Propagate overflow flags.  */
2477  if (overflow_p | TREE_OVERFLOW (arg1))
2478    TREE_OVERFLOW (t) = 1;
2479  return t;
2480}
2481
2482/* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2483   to a fixed-point type.  */
2484
2485static tree
2486fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2487{
2488  FIXED_VALUE_TYPE value;
2489  tree t;
2490  bool overflow_p;
2491
2492  overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
2493				       TREE_INT_CST (arg1),
2494				       TYPE_UNSIGNED (TREE_TYPE (arg1)),
2495				       TYPE_SATURATING (type));
2496  t = build_fixed (type, value);
2497
2498  /* Propagate overflow flags.  */
2499  if (overflow_p | TREE_OVERFLOW (arg1))
2500    TREE_OVERFLOW (t) = 1;
2501  return t;
2502}
2503
2504/* A subroutine of fold_convert_const handling conversions a REAL_CST
2505   to a fixed-point type.  */
2506
2507static tree
2508fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2509{
2510  FIXED_VALUE_TYPE value;
2511  tree t;
2512  bool overflow_p;
2513
2514  overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2515					&TREE_REAL_CST (arg1),
2516					TYPE_SATURATING (type));
2517  t = build_fixed (type, value);
2518
2519  /* Propagate overflow flags.  */
2520  if (overflow_p | TREE_OVERFLOW (arg1))
2521    TREE_OVERFLOW (t) = 1;
2522  return t;
2523}
2524
2525/* Attempt to fold type conversion operation CODE of expression ARG1 to
2526   type TYPE.  If no simplification can be done return NULL_TREE.  */
2527
2528static tree
2529fold_convert_const (enum tree_code code, tree type, tree arg1)
2530{
2531  if (TREE_TYPE (arg1) == type)
2532    return arg1;
2533
2534  if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2535      || TREE_CODE (type) == OFFSET_TYPE)
2536    {
2537      if (TREE_CODE (arg1) == INTEGER_CST)
2538	return fold_convert_const_int_from_int (type, arg1);
2539      else if (TREE_CODE (arg1) == REAL_CST)
2540	return fold_convert_const_int_from_real (code, type, arg1);
2541      else if (TREE_CODE (arg1) == FIXED_CST)
2542	return fold_convert_const_int_from_fixed (type, arg1);
2543    }
2544  else if (TREE_CODE (type) == REAL_TYPE)
2545    {
2546      if (TREE_CODE (arg1) == INTEGER_CST)
2547	return build_real_from_int_cst (type, arg1);
2548      else if (TREE_CODE (arg1) == REAL_CST)
2549	return fold_convert_const_real_from_real (type, arg1);
2550      else if (TREE_CODE (arg1) == FIXED_CST)
2551	return fold_convert_const_real_from_fixed (type, arg1);
2552    }
2553  else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2554    {
2555      if (TREE_CODE (arg1) == FIXED_CST)
2556	return fold_convert_const_fixed_from_fixed (type, arg1);
2557      else if (TREE_CODE (arg1) == INTEGER_CST)
2558	return fold_convert_const_fixed_from_int (type, arg1);
2559      else if (TREE_CODE (arg1) == REAL_CST)
2560	return fold_convert_const_fixed_from_real (type, arg1);
2561    }
2562  return NULL_TREE;
2563}
2564
2565/* Construct a vector of zero elements of vector type TYPE.  */
2566
2567static tree
2568build_zero_vector (tree type)
2569{
2570  tree elem, list;
2571  int i, units;
2572
2573  elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2574  units = TYPE_VECTOR_SUBPARTS (type);
2575
2576  list = NULL_TREE;
2577  for (i = 0; i < units; i++)
2578    list = tree_cons (NULL_TREE, elem, list);
2579  return build_vector (type, list);
2580}
2581
2582/* Returns true, if ARG is convertible to TYPE using a NOP_EXPR.  */
2583
2584bool
2585fold_convertible_p (const_tree type, const_tree arg)
2586{
2587  tree orig = TREE_TYPE (arg);
2588
2589  if (type == orig)
2590    return true;
2591
2592  if (TREE_CODE (arg) == ERROR_MARK
2593      || TREE_CODE (type) == ERROR_MARK
2594      || TREE_CODE (orig) == ERROR_MARK)
2595    return false;
2596
2597  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2598    return true;
2599
2600  switch (TREE_CODE (type))
2601    {
2602    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2603    case POINTER_TYPE: case REFERENCE_TYPE:
2604    case OFFSET_TYPE:
2605      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2606	  || TREE_CODE (orig) == OFFSET_TYPE)
2607        return true;
2608      return (TREE_CODE (orig) == VECTOR_TYPE
2609	      && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2610
2611    case REAL_TYPE:
2612    case FIXED_POINT_TYPE:
2613    case COMPLEX_TYPE:
2614    case VECTOR_TYPE:
2615    case VOID_TYPE:
2616      return TREE_CODE (type) == TREE_CODE (orig);
2617
2618    default:
2619      return false;
2620    }
2621}
2622
2623/* Convert expression ARG to type TYPE.  Used by the middle-end for
2624   simple conversions in preference to calling the front-end's convert.  */
2625
2626tree
2627fold_convert_loc (location_t loc, tree type, tree arg)
2628{
2629  tree orig = TREE_TYPE (arg);
2630  tree tem;
2631
2632  if (type == orig)
2633    return arg;
2634
2635  if (TREE_CODE (arg) == ERROR_MARK
2636      || TREE_CODE (type) == ERROR_MARK
2637      || TREE_CODE (orig) == ERROR_MARK)
2638    return error_mark_node;
2639
2640  if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2641    return fold_build1_loc (loc, NOP_EXPR, type, arg);
2642
2643  switch (TREE_CODE (type))
2644    {
2645    case POINTER_TYPE:
2646    case REFERENCE_TYPE:
2647      /* Handle conversions between pointers to different address spaces.  */
2648      if (POINTER_TYPE_P (orig)
2649	  && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2650	      != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2651	return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2652      /* fall through */
2653
2654    case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2655    case OFFSET_TYPE:
2656      if (TREE_CODE (arg) == INTEGER_CST)
2657	{
2658	  tem = fold_convert_const (NOP_EXPR, type, arg);
2659	  if (tem != NULL_TREE)
2660	    return tem;
2661	}
2662      if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2663	  || TREE_CODE (orig) == OFFSET_TYPE)
2664	return fold_build1_loc (loc, NOP_EXPR, type, arg);
2665      if (TREE_CODE (orig) == COMPLEX_TYPE)
2666	return fold_convert_loc (loc, type,
2667			     fold_build1_loc (loc, REALPART_EXPR,
2668					  TREE_TYPE (orig), arg));
2669      gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2670		  && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2671      return fold_build1_loc (loc, NOP_EXPR, type, arg);
2672
2673    case REAL_TYPE:
2674      if (TREE_CODE (arg) == INTEGER_CST)
2675	{
2676	  tem = fold_convert_const (FLOAT_EXPR, type, arg);
2677	  if (tem != NULL_TREE)
2678	    return tem;
2679	}
2680      else if (TREE_CODE (arg) == REAL_CST)
2681	{
2682	  tem = fold_convert_const (NOP_EXPR, type, arg);
2683	  if (tem != NULL_TREE)
2684	    return tem;
2685	}
2686      else if (TREE_CODE (arg) == FIXED_CST)
2687	{
2688	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2689	  if (tem != NULL_TREE)
2690	    return tem;
2691	}
2692
2693      switch (TREE_CODE (orig))
2694	{
2695	case INTEGER_TYPE:
2696	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2697	case POINTER_TYPE: case REFERENCE_TYPE:
2698	  return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2699
2700	case REAL_TYPE:
2701	  return fold_build1_loc (loc, NOP_EXPR, type, arg);
2702
2703	case FIXED_POINT_TYPE:
2704	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2705
2706	case COMPLEX_TYPE:
2707	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2708	  return fold_convert_loc (loc, type, tem);
2709
2710	default:
2711	  gcc_unreachable ();
2712	}
2713
2714    case FIXED_POINT_TYPE:
2715      if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2716	  || TREE_CODE (arg) == REAL_CST)
2717	{
2718	  tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2719	  if (tem != NULL_TREE)
2720	    goto fold_convert_exit;
2721	}
2722
2723      switch (TREE_CODE (orig))
2724	{
2725	case FIXED_POINT_TYPE:
2726	case INTEGER_TYPE:
2727	case ENUMERAL_TYPE:
2728	case BOOLEAN_TYPE:
2729	case REAL_TYPE:
2730	  return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2731
2732	case COMPLEX_TYPE:
2733	  tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2734	  return fold_convert_loc (loc, type, tem);
2735
2736	default:
2737	  gcc_unreachable ();
2738	}
2739
2740    case COMPLEX_TYPE:
2741      switch (TREE_CODE (orig))
2742	{
2743	case INTEGER_TYPE:
2744	case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2745	case POINTER_TYPE: case REFERENCE_TYPE:
2746	case REAL_TYPE:
2747	case FIXED_POINT_TYPE:
2748	  return fold_build2_loc (loc, COMPLEX_EXPR, type,
2749			      fold_convert_loc (loc, TREE_TYPE (type), arg),
2750			      fold_convert_loc (loc, TREE_TYPE (type),
2751					    integer_zero_node));
2752	case COMPLEX_TYPE:
2753	  {
2754	    tree rpart, ipart;
2755
2756	    if (TREE_CODE (arg) == COMPLEX_EXPR)
2757	      {
2758		rpart = fold_convert_loc (loc, TREE_TYPE (type),
2759				      TREE_OPERAND (arg, 0));
2760		ipart = fold_convert_loc (loc, TREE_TYPE (type),
2761				      TREE_OPERAND (arg, 1));
2762		return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2763	      }
2764
2765	    arg = save_expr (arg);
2766	    rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2767	    ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2768	    rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2769	    ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2770	    return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2771	  }
2772
2773	default:
2774	  gcc_unreachable ();
2775	}
2776
2777    case VECTOR_TYPE:
2778      if (integer_zerop (arg))
2779	return build_zero_vector (type);
2780      gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2781      gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2782		  || TREE_CODE (orig) == VECTOR_TYPE);
2783      return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2784
2785    case VOID_TYPE:
2786      tem = fold_ignored_result (arg);
2787      if (TREE_CODE (tem) == MODIFY_EXPR)
2788	goto fold_convert_exit;
2789      return fold_build1_loc (loc, NOP_EXPR, type, tem);
2790
2791    default:
2792      gcc_unreachable ();
2793    }
2794 fold_convert_exit:
2795  protected_set_expr_location (tem, loc);
2796  return tem;
2797}
2798
2799/* Return false if expr can be assumed not to be an lvalue, true
2800   otherwise.  */
2801
2802static bool
2803maybe_lvalue_p (const_tree x)
2804{
2805  /* We only need to wrap lvalue tree codes.  */
2806  switch (TREE_CODE (x))
2807  {
2808  case VAR_DECL:
2809  case PARM_DECL:
2810  case RESULT_DECL:
2811  case LABEL_DECL:
2812  case FUNCTION_DECL:
2813  case SSA_NAME:
2814
2815  case COMPONENT_REF:
2816  case INDIRECT_REF:
2817  case ALIGN_INDIRECT_REF:
2818  case MISALIGNED_INDIRECT_REF:
2819  case ARRAY_REF:
2820  case ARRAY_RANGE_REF:
2821  case BIT_FIELD_REF:
2822  case OBJ_TYPE_REF:
2823
2824  case REALPART_EXPR:
2825  case IMAGPART_EXPR:
2826  case PREINCREMENT_EXPR:
2827  case PREDECREMENT_EXPR:
2828  case SAVE_EXPR:
2829  case TRY_CATCH_EXPR:
2830  case WITH_CLEANUP_EXPR:
2831  case COMPOUND_EXPR:
2832  case MODIFY_EXPR:
2833  case TARGET_EXPR:
2834  case COND_EXPR:
2835  case BIND_EXPR:
2836    break;
2837
2838  default:
2839    /* Assume the worst for front-end tree codes.  */
2840    if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2841      break;
2842    return false;
2843  }
2844
2845  return true;
2846}
2847
2848/* Return an expr equal to X but certainly not valid as an lvalue.  */
2849
2850tree
2851non_lvalue_loc (location_t loc, tree x)
2852{
2853  /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2854     us.  */
2855  if (in_gimple_form)
2856    return x;
2857
2858  if (! maybe_lvalue_p (x))
2859    return x;
2860  x = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2861  SET_EXPR_LOCATION (x, loc);
2862  return x;
2863}
2864
2865/* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2866   Zero means allow extended lvalues.  */
2867
2868int pedantic_lvalues;
2869
2870/* When pedantic, return an expr equal to X but certainly not valid as a
2871   pedantic lvalue.  Otherwise, return X.  */
2872
2873static tree
2874pedantic_non_lvalue_loc (location_t loc, tree x)
2875{
2876  if (pedantic_lvalues)
2877    return non_lvalue_loc (loc, x);
2878
2879  if (CAN_HAVE_LOCATION_P (x)
2880      && EXPR_LOCATION (x) != loc
2881      && !(TREE_CODE (x) == SAVE_EXPR
2882	   || TREE_CODE (x) == TARGET_EXPR
2883	   || TREE_CODE (x) == BIND_EXPR))
2884    {
2885      x = copy_node (x);
2886      SET_EXPR_LOCATION (x, loc);
2887    }
2888  return x;
2889}
2890
2891/* Given a tree comparison code, return the code that is the logical inverse
2892   of the given code.  It is not safe to do this for floating-point
2893   comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2894   as well: if reversing the comparison is unsafe, return ERROR_MARK.  */
2895
2896enum tree_code
2897invert_tree_comparison (enum tree_code code, bool honor_nans)
2898{
2899  if (honor_nans && flag_trapping_math)
2900    return ERROR_MARK;
2901
2902  switch (code)
2903    {
2904    case EQ_EXPR:
2905      return NE_EXPR;
2906    case NE_EXPR:
2907      return EQ_EXPR;
2908    case GT_EXPR:
2909      return honor_nans ? UNLE_EXPR : LE_EXPR;
2910    case GE_EXPR:
2911      return honor_nans ? UNLT_EXPR : LT_EXPR;
2912    case LT_EXPR:
2913      return honor_nans ? UNGE_EXPR : GE_EXPR;
2914    case LE_EXPR:
2915      return honor_nans ? UNGT_EXPR : GT_EXPR;
2916    case LTGT_EXPR:
2917      return UNEQ_EXPR;
2918    case UNEQ_EXPR:
2919      return LTGT_EXPR;
2920    case UNGT_EXPR:
2921      return LE_EXPR;
2922    case UNGE_EXPR:
2923      return LT_EXPR;
2924    case UNLT_EXPR:
2925      return GE_EXPR;
2926    case UNLE_EXPR:
2927      return GT_EXPR;
2928    case ORDERED_EXPR:
2929      return UNORDERED_EXPR;
2930    case UNORDERED_EXPR:
2931      return ORDERED_EXPR;
2932    default:
2933      gcc_unreachable ();
2934    }
2935}
2936
2937/* Similar, but return the comparison that results if the operands are
2938   swapped.  This is safe for floating-point.  */
2939
2940enum tree_code
2941swap_tree_comparison (enum tree_code code)
2942{
2943  switch (code)
2944    {
2945    case EQ_EXPR:
2946    case NE_EXPR:
2947    case ORDERED_EXPR:
2948    case UNORDERED_EXPR:
2949    case LTGT_EXPR:
2950    case UNEQ_EXPR:
2951      return code;
2952    case GT_EXPR:
2953      return LT_EXPR;
2954    case GE_EXPR:
2955      return LE_EXPR;
2956    case LT_EXPR:
2957      return GT_EXPR;
2958    case LE_EXPR:
2959      return GE_EXPR;
2960    case UNGT_EXPR:
2961      return UNLT_EXPR;
2962    case UNGE_EXPR:
2963      return UNLE_EXPR;
2964    case UNLT_EXPR:
2965      return UNGT_EXPR;
2966    case UNLE_EXPR:
2967      return UNGE_EXPR;
2968    default:
2969      gcc_unreachable ();
2970    }
2971}
2972
2973
2974/* Convert a comparison tree code from an enum tree_code representation
2975   into a compcode bit-based encoding.  This function is the inverse of
2976   compcode_to_comparison.  */
2977
2978static enum comparison_code
2979comparison_to_compcode (enum tree_code code)
2980{
2981  switch (code)
2982    {
2983    case LT_EXPR:
2984      return COMPCODE_LT;
2985    case EQ_EXPR:
2986      return COMPCODE_EQ;
2987    case LE_EXPR:
2988      return COMPCODE_LE;
2989    case GT_EXPR:
2990      return COMPCODE_GT;
2991    case NE_EXPR:
2992      return COMPCODE_NE;
2993    case GE_EXPR:
2994      return COMPCODE_GE;
2995    case ORDERED_EXPR:
2996      return COMPCODE_ORD;
2997    case UNORDERED_EXPR:
2998      return COMPCODE_UNORD;
2999    case UNLT_EXPR:
3000      return COMPCODE_UNLT;
3001    case UNEQ_EXPR:
3002      return COMPCODE_UNEQ;
3003    case UNLE_EXPR:
3004      return COMPCODE_UNLE;
3005    case UNGT_EXPR:
3006      return COMPCODE_UNGT;
3007    case LTGT_EXPR:
3008      return COMPCODE_LTGT;
3009    case UNGE_EXPR:
3010      return COMPCODE_UNGE;
3011    default:
3012      gcc_unreachable ();
3013    }
3014}
3015
3016/* Convert a compcode bit-based encoding of a comparison operator back
3017   to GCC's enum tree_code representation.  This function is the
3018   inverse of comparison_to_compcode.  */
3019
3020static enum tree_code
3021compcode_to_comparison (enum comparison_code code)
3022{
3023  switch (code)
3024    {
3025    case COMPCODE_LT:
3026      return LT_EXPR;
3027    case COMPCODE_EQ:
3028      return EQ_EXPR;
3029    case COMPCODE_LE:
3030      return LE_EXPR;
3031    case COMPCODE_GT:
3032      return GT_EXPR;
3033    case COMPCODE_NE:
3034      return NE_EXPR;
3035    case COMPCODE_GE:
3036      return GE_EXPR;
3037    case COMPCODE_ORD:
3038      return ORDERED_EXPR;
3039    case COMPCODE_UNORD:
3040      return UNORDERED_EXPR;
3041    case COMPCODE_UNLT:
3042      return UNLT_EXPR;
3043    case COMPCODE_UNEQ:
3044      return UNEQ_EXPR;
3045    case COMPCODE_UNLE:
3046      return UNLE_EXPR;
3047    case COMPCODE_UNGT:
3048      return UNGT_EXPR;
3049    case COMPCODE_LTGT:
3050      return LTGT_EXPR;
3051    case COMPCODE_UNGE:
3052      return UNGE_EXPR;
3053    default:
3054      gcc_unreachable ();
3055    }
3056}
3057
3058/* Return a tree for the comparison which is the combination of
3059   doing the AND or OR (depending on CODE) of the two operations LCODE
3060   and RCODE on the identical operands LL_ARG and LR_ARG.  Take into account
3061   the possibility of trapping if the mode has NaNs, and return NULL_TREE
3062   if this makes the transformation invalid.  */
3063
3064tree
3065combine_comparisons (location_t loc,
3066		     enum tree_code code, enum tree_code lcode,
3067		     enum tree_code rcode, tree truth_type,
3068		     tree ll_arg, tree lr_arg)
3069{
3070  bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
3071  enum comparison_code lcompcode = comparison_to_compcode (lcode);
3072  enum comparison_code rcompcode = comparison_to_compcode (rcode);
3073  int compcode;
3074
3075  switch (code)
3076    {
3077    case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
3078      compcode = lcompcode & rcompcode;
3079      break;
3080
3081    case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
3082      compcode = lcompcode | rcompcode;
3083      break;
3084
3085    default:
3086      return NULL_TREE;
3087    }
3088
3089  if (!honor_nans)
3090    {
3091      /* Eliminate unordered comparisons, as well as LTGT and ORD
3092	 which are not used unless the mode has NaNs.  */
3093      compcode &= ~COMPCODE_UNORD;
3094      if (compcode == COMPCODE_LTGT)
3095	compcode = COMPCODE_NE;
3096      else if (compcode == COMPCODE_ORD)
3097	compcode = COMPCODE_TRUE;
3098    }
3099   else if (flag_trapping_math)
3100     {
3101	/* Check that the original operation and the optimized ones will trap
3102	   under the same condition.  */
3103	bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
3104		     && (lcompcode != COMPCODE_EQ)
3105		     && (lcompcode != COMPCODE_ORD);
3106	bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
3107		     && (rcompcode != COMPCODE_EQ)
3108		     && (rcompcode != COMPCODE_ORD);
3109	bool trap = (compcode & COMPCODE_UNORD) == 0
3110		    && (compcode != COMPCODE_EQ)
3111		    && (compcode != COMPCODE_ORD);
3112
3113        /* In a short-circuited boolean expression the LHS might be
3114	   such that the RHS, if evaluated, will never trap.  For
3115	   example, in ORD (x, y) && (x < y), we evaluate the RHS only
3116	   if neither x nor y is NaN.  (This is a mixed blessing: for
3117	   example, the expression above will never trap, hence
3118	   optimizing it to x < y would be invalid).  */
3119        if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
3120            || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
3121          rtrap = false;
3122
3123        /* If the comparison was short-circuited, and only the RHS
3124	   trapped, we may now generate a spurious trap.  */
3125	if (rtrap && !ltrap
3126	    && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3127	  return NULL_TREE;
3128
3129	/* If we changed the conditions that cause a trap, we lose.  */
3130	if ((ltrap || rtrap) != trap)
3131	  return NULL_TREE;
3132      }
3133
3134  if (compcode == COMPCODE_TRUE)
3135    return constant_boolean_node (true, truth_type);
3136  else if (compcode == COMPCODE_FALSE)
3137    return constant_boolean_node (false, truth_type);
3138  else
3139    {
3140      enum tree_code tcode;
3141
3142      tcode = compcode_to_comparison ((enum comparison_code) compcode);
3143      return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
3144    }
3145}
3146
3147/* Return nonzero if two operands (typically of the same tree node)
3148   are necessarily equal.  If either argument has side-effects this
3149   function returns zero.  FLAGS modifies behavior as follows:
3150
3151   If OEP_ONLY_CONST is set, only return nonzero for constants.
3152   This function tests whether the operands are indistinguishable;
3153   it does not test whether they are equal using C's == operation.
3154   The distinction is important for IEEE floating point, because
3155   (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
3156   (2) two NaNs may be indistinguishable, but NaN!=NaN.
3157
3158   If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
3159   even though it may hold multiple values during a function.
3160   This is because a GCC tree node guarantees that nothing else is
3161   executed between the evaluation of its "operands" (which may often
3162   be evaluated in arbitrary order).  Hence if the operands themselves
3163   don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
3164   same value in each operand/subexpression.  Hence leaving OEP_ONLY_CONST
3165   unset means assuming isochronic (or instantaneous) tree equivalence.
3166   Unless comparing arbitrary expression trees, such as from different
3167   statements, this flag can usually be left unset.
3168
3169   If OEP_PURE_SAME is set, then pure functions with identical arguments
3170   are considered the same.  It is used when the caller has other ways
3171   to ensure that global memory is unchanged in between.  */
3172
3173int
3174operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
3175{
3176  /* If either is ERROR_MARK, they aren't equal.  */
3177  if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
3178      || TREE_TYPE (arg0) == error_mark_node
3179      || TREE_TYPE (arg1) == error_mark_node)
3180    return 0;
3181
3182  /* Check equality of integer constants before bailing out due to
3183     precision differences.  */
3184  if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3185    return tree_int_cst_equal (arg0, arg1);
3186
3187  /* If both types don't have the same signedness, then we can't consider
3188     them equal.  We must check this before the STRIP_NOPS calls
3189     because they may change the signedness of the arguments.  As pointers
3190     strictly don't have a signedness, require either two pointers or
3191     two non-pointers as well.  */
3192  if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
3193      || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
3194    return 0;
3195
3196  /* We cannot consider pointers to different address space equal.  */
3197  if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
3198      && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
3199	  != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
3200    return 0;
3201
3202  /* If both types don't have the same precision, then it is not safe
3203     to strip NOPs.  */
3204  if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
3205    return 0;
3206
3207  STRIP_NOPS (arg0);
3208  STRIP_NOPS (arg1);
3209
3210  /* In case both args are comparisons but with different comparison
3211     code, try to swap the comparison operands of one arg to produce
3212     a match and compare that variant.  */
3213  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3214      && COMPARISON_CLASS_P (arg0)
3215      && COMPARISON_CLASS_P (arg1))
3216    {
3217      enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
3218
3219      if (TREE_CODE (arg0) == swap_code)
3220	return operand_equal_p (TREE_OPERAND (arg0, 0),
3221			        TREE_OPERAND (arg1, 1), flags)
3222	       && operand_equal_p (TREE_OPERAND (arg0, 1),
3223				   TREE_OPERAND (arg1, 0), flags);
3224    }
3225
3226  if (TREE_CODE (arg0) != TREE_CODE (arg1)
3227      /* This is needed for conversions and for COMPONENT_REF.
3228	 Might as well play it safe and always test this.  */
3229      || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
3230      || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
3231      || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
3232    return 0;
3233
3234  /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
3235     We don't care about side effects in that case because the SAVE_EXPR
3236     takes care of that for us. In all other cases, two expressions are
3237     equal if they have no side effects.  If we have two identical
3238     expressions with side effects that should be treated the same due
3239     to the only side effects being identical SAVE_EXPR's, that will
3240     be detected in the recursive calls below.  */
3241  if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
3242      && (TREE_CODE (arg0) == SAVE_EXPR
3243	  || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
3244    return 1;
3245
3246  /* Next handle constant cases, those for which we can return 1 even
3247     if ONLY_CONST is set.  */
3248  if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
3249    switch (TREE_CODE (arg0))
3250      {
3251      case INTEGER_CST:
3252	return tree_int_cst_equal (arg0, arg1);
3253
3254      case FIXED_CST:
3255	return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
3256				       TREE_FIXED_CST (arg1));
3257
3258      case REAL_CST:
3259	if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
3260				   TREE_REAL_CST (arg1)))
3261	  return 1;
3262
3263
3264	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
3265	  {
3266	    /* If we do not distinguish between signed and unsigned zero,
3267	       consider them equal.  */
3268	    if (real_zerop (arg0) && real_zerop (arg1))
3269	      return 1;
3270	  }
3271	return 0;
3272
3273      case VECTOR_CST:
3274	{
3275	  tree v1, v2;
3276
3277	  v1 = TREE_VECTOR_CST_ELTS (arg0);
3278	  v2 = TREE_VECTOR_CST_ELTS (arg1);
3279	  while (v1 && v2)
3280	    {
3281	      if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
3282				    flags))
3283		return 0;
3284	      v1 = TREE_CHAIN (v1);
3285	      v2 = TREE_CHAIN (v2);
3286	    }
3287
3288	  return v1 == v2;
3289	}
3290
3291      case COMPLEX_CST:
3292	return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
3293				 flags)
3294		&& operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
3295				    flags));
3296
3297      case STRING_CST:
3298	return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
3299		&& ! memcmp (TREE_STRING_POINTER (arg0),
3300			      TREE_STRING_POINTER (arg1),
3301			      TREE_STRING_LENGTH (arg0)));
3302
3303      case ADDR_EXPR:
3304	return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
3305				0);
3306      default:
3307	break;
3308      }
3309
3310  if (flags & OEP_ONLY_CONST)
3311    return 0;
3312
3313/* Define macros to test an operand from arg0 and arg1 for equality and a
3314   variant that allows null and views null as being different from any
3315   non-null value.  In the latter case, if either is null, the both
3316   must be; otherwise, do the normal comparison.  */
3317#define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N),	\
3318				    TREE_OPERAND (arg1, N), flags)
3319
3320#define OP_SAME_WITH_NULL(N)				\
3321  ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N))	\
3322   ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
3323
3324  switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
3325    {
3326    case tcc_unary:
3327      /* Two conversions are equal only if signedness and modes match.  */
3328      switch (TREE_CODE (arg0))
3329        {
3330	CASE_CONVERT:
3331        case FIX_TRUNC_EXPR:
3332	  if (TYPE_UNSIGNED (TREE_TYPE (arg0))
3333	      != TYPE_UNSIGNED (TREE_TYPE (arg1)))
3334	    return 0;
3335	  break;
3336	default:
3337	  break;
3338	}
3339
3340      return OP_SAME (0);
3341
3342
3343    case tcc_comparison:
3344    case tcc_binary:
3345      if (OP_SAME (0) && OP_SAME (1))
3346	return 1;
3347
3348      /* For commutative ops, allow the other order.  */
3349      return (commutative_tree_code (TREE_CODE (arg0))
3350	      && operand_equal_p (TREE_OPERAND (arg0, 0),
3351				  TREE_OPERAND (arg1, 1), flags)
3352	      && operand_equal_p (TREE_OPERAND (arg0, 1),
3353				  TREE_OPERAND (arg1, 0), flags));
3354
3355    case tcc_reference:
3356      /* If either of the pointer (or reference) expressions we are
3357	 dereferencing contain a side effect, these cannot be equal.  */
3358      if (TREE_SIDE_EFFECTS (arg0)
3359	  || TREE_SIDE_EFFECTS (arg1))
3360	return 0;
3361
3362      switch (TREE_CODE (arg0))
3363	{
3364	case INDIRECT_REF:
3365	case ALIGN_INDIRECT_REF:
3366	case MISALIGNED_INDIRECT_REF:
3367	case REALPART_EXPR:
3368	case IMAGPART_EXPR:
3369	  return OP_SAME (0);
3370
3371	case ARRAY_REF:
3372	case ARRAY_RANGE_REF:
3373	  /* Operands 2 and 3 may be null.
3374	     Compare the array index by value if it is constant first as we
3375	     may have different types but same value here.  */
3376	  return (OP_SAME (0)
3377		  && (tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3378					  TREE_OPERAND (arg1, 1))
3379		      || OP_SAME (1))
3380		  && OP_SAME_WITH_NULL (2)
3381		  && OP_SAME_WITH_NULL (3));
3382
3383	case COMPONENT_REF:
3384	  /* Handle operand 2 the same as for ARRAY_REF.  Operand 0
3385	     may be NULL when we're called to compare MEM_EXPRs.  */
3386	  return OP_SAME_WITH_NULL (0)
3387		 && OP_SAME (1)
3388		 && OP_SAME_WITH_NULL (2);
3389
3390	case BIT_FIELD_REF:
3391	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3392
3393	default:
3394	  return 0;
3395	}
3396
3397    case tcc_expression:
3398      switch (TREE_CODE (arg0))
3399	{
3400	case ADDR_EXPR:
3401	case TRUTH_NOT_EXPR:
3402	  return OP_SAME (0);
3403
3404	case TRUTH_ANDIF_EXPR:
3405	case TRUTH_ORIF_EXPR:
3406	  return OP_SAME (0) && OP_SAME (1);
3407
3408	case TRUTH_AND_EXPR:
3409	case TRUTH_OR_EXPR:
3410	case TRUTH_XOR_EXPR:
3411	  if (OP_SAME (0) && OP_SAME (1))
3412	    return 1;
3413
3414	  /* Otherwise take into account this is a commutative operation.  */
3415	  return (operand_equal_p (TREE_OPERAND (arg0, 0),
3416				   TREE_OPERAND (arg1, 1), flags)
3417		  && operand_equal_p (TREE_OPERAND (arg0, 1),
3418				      TREE_OPERAND (arg1, 0), flags));
3419
3420	case COND_EXPR:
3421	  return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3422
3423	default:
3424	  return 0;
3425	}
3426
3427    case tcc_vl_exp:
3428      switch (TREE_CODE (arg0))
3429	{
3430	case CALL_EXPR:
3431	  /* If the CALL_EXPRs call different functions, then they
3432	     clearly can not be equal.  */
3433	  if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3434				 flags))
3435	    return 0;
3436
3437	  {
3438	    unsigned int cef = call_expr_flags (arg0);
3439	    if (flags & OEP_PURE_SAME)
3440	      cef &= ECF_CONST | ECF_PURE;
3441	    else
3442	      cef &= ECF_CONST;
3443	    if (!cef)
3444	      return 0;
3445	  }
3446
3447	  /* Now see if all the arguments are the same.  */
3448	  {
3449	    const_call_expr_arg_iterator iter0, iter1;
3450	    const_tree a0, a1;
3451	    for (a0 = first_const_call_expr_arg (arg0, &iter0),
3452		   a1 = first_const_call_expr_arg (arg1, &iter1);
3453		 a0 && a1;
3454		 a0 = next_const_call_expr_arg (&iter0),
3455		   a1 = next_const_call_expr_arg (&iter1))
3456	      if (! operand_equal_p (a0, a1, flags))
3457		return 0;
3458
3459	    /* If we get here and both argument lists are exhausted
3460	       then the CALL_EXPRs are equal.  */
3461	    return ! (a0 || a1);
3462	  }
3463	default:
3464	  return 0;
3465	}
3466
3467    case tcc_declaration:
3468      /* Consider __builtin_sqrt equal to sqrt.  */
3469      return (TREE_CODE (arg0) == FUNCTION_DECL
3470	      && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3471	      && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3472	      && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3473
3474    default:
3475      return 0;
3476    }
3477
3478#undef OP_SAME
3479#undef OP_SAME_WITH_NULL
3480}
3481
3482/* Similar to operand_equal_p, but see if ARG0 might have been made by
3483   shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3484
3485   When in doubt, return 0.  */
3486
3487static int
3488operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3489{
3490  int unsignedp1, unsignedpo;
3491  tree primarg0, primarg1, primother;
3492  unsigned int correct_width;
3493
3494  if (operand_equal_p (arg0, arg1, 0))
3495    return 1;
3496
3497  if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3498      || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3499    return 0;
3500
3501  /* Discard any conversions that don't change the modes of ARG0 and ARG1
3502     and see if the inner values are the same.  This removes any
3503     signedness comparison, which doesn't matter here.  */
3504  primarg0 = arg0, primarg1 = arg1;
3505  STRIP_NOPS (primarg0);
3506  STRIP_NOPS (primarg1);
3507  if (operand_equal_p (primarg0, primarg1, 0))
3508    return 1;
3509
3510  /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3511     actual comparison operand, ARG0.
3512
3513     First throw away any conversions to wider types
3514     already present in the operands.  */
3515
3516  primarg1 = get_narrower (arg1, &unsignedp1);
3517  primother = get_narrower (other, &unsignedpo);
3518
3519  correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3520  if (unsignedp1 == unsignedpo
3521      && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3522      && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3523    {
3524      tree type = TREE_TYPE (arg0);
3525
3526      /* Make sure shorter operand is extended the right way
3527	 to match the longer operand.  */
3528      primarg1 = fold_convert (signed_or_unsigned_type_for
3529			       (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3530
3531      if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3532	return 1;
3533    }
3534
3535  return 0;
3536}
3537
3538/* See if ARG is an expression that is either a comparison or is performing
3539   arithmetic on comparisons.  The comparisons must only be comparing
3540   two different values, which will be stored in *CVAL1 and *CVAL2; if
3541   they are nonzero it means that some operands have already been found.
3542   No variables may be used anywhere else in the expression except in the
3543   comparisons.  If SAVE_P is true it means we removed a SAVE_EXPR around
3544   the expression and save_expr needs to be called with CVAL1 and CVAL2.
3545
3546   If this is true, return 1.  Otherwise, return zero.  */
3547
3548static int
3549twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3550{
3551  enum tree_code code = TREE_CODE (arg);
3552  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3553
3554  /* We can handle some of the tcc_expression cases here.  */
3555  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3556    tclass = tcc_unary;
3557  else if (tclass == tcc_expression
3558	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3559	       || code == COMPOUND_EXPR))
3560    tclass = tcc_binary;
3561
3562  else if (tclass == tcc_expression && code == SAVE_EXPR
3563	   && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3564    {
3565      /* If we've already found a CVAL1 or CVAL2, this expression is
3566	 two complex to handle.  */
3567      if (*cval1 || *cval2)
3568	return 0;
3569
3570      tclass = tcc_unary;
3571      *save_p = 1;
3572    }
3573
3574  switch (tclass)
3575    {
3576    case tcc_unary:
3577      return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3578
3579    case tcc_binary:
3580      return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3581	      && twoval_comparison_p (TREE_OPERAND (arg, 1),
3582				      cval1, cval2, save_p));
3583
3584    case tcc_constant:
3585      return 1;
3586
3587    case tcc_expression:
3588      if (code == COND_EXPR)
3589	return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3590				     cval1, cval2, save_p)
3591		&& twoval_comparison_p (TREE_OPERAND (arg, 1),
3592					cval1, cval2, save_p)
3593		&& twoval_comparison_p (TREE_OPERAND (arg, 2),
3594					cval1, cval2, save_p));
3595      return 0;
3596
3597    case tcc_comparison:
3598      /* First see if we can handle the first operand, then the second.  For
3599	 the second operand, we know *CVAL1 can't be zero.  It must be that
3600	 one side of the comparison is each of the values; test for the
3601	 case where this isn't true by failing if the two operands
3602	 are the same.  */
3603
3604      if (operand_equal_p (TREE_OPERAND (arg, 0),
3605			   TREE_OPERAND (arg, 1), 0))
3606	return 0;
3607
3608      if (*cval1 == 0)
3609	*cval1 = TREE_OPERAND (arg, 0);
3610      else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3611	;
3612      else if (*cval2 == 0)
3613	*cval2 = TREE_OPERAND (arg, 0);
3614      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3615	;
3616      else
3617	return 0;
3618
3619      if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3620	;
3621      else if (*cval2 == 0)
3622	*cval2 = TREE_OPERAND (arg, 1);
3623      else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3624	;
3625      else
3626	return 0;
3627
3628      return 1;
3629
3630    default:
3631      return 0;
3632    }
3633}
3634
3635/* ARG is a tree that is known to contain just arithmetic operations and
3636   comparisons.  Evaluate the operations in the tree substituting NEW0 for
3637   any occurrence of OLD0 as an operand of a comparison and likewise for
3638   NEW1 and OLD1.  */
3639
3640static tree
3641eval_subst (location_t loc, tree arg, tree old0, tree new0,
3642	    tree old1, tree new1)
3643{
3644  tree type = TREE_TYPE (arg);
3645  enum tree_code code = TREE_CODE (arg);
3646  enum tree_code_class tclass = TREE_CODE_CLASS (code);
3647
3648  /* We can handle some of the tcc_expression cases here.  */
3649  if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3650    tclass = tcc_unary;
3651  else if (tclass == tcc_expression
3652	   && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3653    tclass = tcc_binary;
3654
3655  switch (tclass)
3656    {
3657    case tcc_unary:
3658      return fold_build1_loc (loc, code, type,
3659			  eval_subst (loc, TREE_OPERAND (arg, 0),
3660				      old0, new0, old1, new1));
3661
3662    case tcc_binary:
3663      return fold_build2_loc (loc, code, type,
3664			  eval_subst (loc, TREE_OPERAND (arg, 0),
3665				      old0, new0, old1, new1),
3666			  eval_subst (loc, TREE_OPERAND (arg, 1),
3667				      old0, new0, old1, new1));
3668
3669    case tcc_expression:
3670      switch (code)
3671	{
3672	case SAVE_EXPR:
3673	  return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3674			     old1, new1);
3675
3676	case COMPOUND_EXPR:
3677	  return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3678			     old1, new1);
3679
3680	case COND_EXPR:
3681	  return fold_build3_loc (loc, code, type,
3682			      eval_subst (loc, TREE_OPERAND (arg, 0),
3683					  old0, new0, old1, new1),
3684			      eval_subst (loc, TREE_OPERAND (arg, 1),
3685					  old0, new0, old1, new1),
3686			      eval_subst (loc, TREE_OPERAND (arg, 2),
3687					  old0, new0, old1, new1));
3688	default:
3689	  break;
3690	}
3691      /* Fall through - ???  */
3692
3693    case tcc_comparison:
3694      {
3695	tree arg0 = TREE_OPERAND (arg, 0);
3696	tree arg1 = TREE_OPERAND (arg, 1);
3697
3698	/* We need to check both for exact equality and tree equality.  The
3699	   former will be true if the operand has a side-effect.  In that
3700	   case, we know the operand occurred exactly once.  */
3701
3702	if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3703	  arg0 = new0;
3704	else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3705	  arg0 = new1;
3706
3707	if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3708	  arg1 = new0;
3709	else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3710	  arg1 = new1;
3711
3712	return fold_build2_loc (loc, code, type, arg0, arg1);
3713      }
3714
3715    default:
3716      return arg;
3717    }
3718}
3719
3720/* Return a tree for the case when the result of an expression is RESULT
3721   converted to TYPE and OMITTED was previously an operand of the expression
3722   but is now not needed (e.g., we folded OMITTED * 0).
3723
3724   If OMITTED has side effects, we must evaluate it.  Otherwise, just do
3725   the conversion of RESULT to TYPE.  */
3726
3727tree
3728omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3729{
3730  tree t = fold_convert_loc (loc, type, result);
3731
3732  /* If the resulting operand is an empty statement, just return the omitted
3733     statement casted to void. */
3734  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3735    {
3736      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3737      goto omit_one_operand_exit;
3738    }
3739
3740  if (TREE_SIDE_EFFECTS (omitted))
3741    {
3742      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3743      goto omit_one_operand_exit;
3744    }
3745
3746  return non_lvalue_loc (loc, t);
3747
3748 omit_one_operand_exit:
3749  protected_set_expr_location (t, loc);
3750  return t;
3751}
3752
3753/* Similar, but call pedantic_non_lvalue instead of non_lvalue.  */
3754
3755static tree
3756pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3757			       tree omitted)
3758{
3759  tree t = fold_convert_loc (loc, type, result);
3760
3761  /* If the resulting operand is an empty statement, just return the omitted
3762     statement casted to void. */
3763  if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3764    {
3765      t = build1 (NOP_EXPR, void_type_node, fold_ignored_result (omitted));
3766      goto pedantic_omit_one_operand_exit;
3767    }
3768
3769  if (TREE_SIDE_EFFECTS (omitted))
3770    {
3771      t = build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3772      goto pedantic_omit_one_operand_exit;
3773    }
3774
3775  return pedantic_non_lvalue_loc (loc, t);
3776
3777 pedantic_omit_one_operand_exit:
3778  protected_set_expr_location (t, loc);
3779  return t;
3780}
3781
3782/* Return a tree for the case when the result of an expression is RESULT
3783   converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3784   of the expression but are now not needed.
3785
3786   If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3787   If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3788   evaluated before OMITTED2.  Otherwise, if neither has side effects,
3789   just do the conversion of RESULT to TYPE.  */
3790
3791tree
3792omit_two_operands_loc (location_t loc, tree type, tree result,
3793		   tree omitted1, tree omitted2)
3794{
3795  tree t = fold_convert_loc (loc, type, result);
3796
3797  if (TREE_SIDE_EFFECTS (omitted2))
3798    {
3799      t = build2 (COMPOUND_EXPR, type, omitted2, t);
3800      SET_EXPR_LOCATION (t, loc);
3801    }
3802  if (TREE_SIDE_EFFECTS (omitted1))
3803    {
3804      t = build2 (COMPOUND_EXPR, type, omitted1, t);
3805      SET_EXPR_LOCATION (t, loc);
3806    }
3807
3808  return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3809}
3810
3811
3812/* Return a simplified tree node for the truth-negation of ARG.  This
3813   never alters ARG itself.  We assume that ARG is an operation that
3814   returns a truth value (0 or 1).
3815
3816   FIXME: one would think we would fold the result, but it causes
3817   problems with the dominator optimizer.  */
3818
3819tree
3820fold_truth_not_expr (location_t loc, tree arg)
3821{
3822  tree t, type = TREE_TYPE (arg);
3823  enum tree_code code = TREE_CODE (arg);
3824  location_t loc1, loc2;
3825
3826  /* If this is a comparison, we can simply invert it, except for
3827     floating-point non-equality comparisons, in which case we just
3828     enclose a TRUTH_NOT_EXPR around what we have.  */
3829
3830  if (TREE_CODE_CLASS (code) == tcc_comparison)
3831    {
3832      tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3833      if (FLOAT_TYPE_P (op_type)
3834	  && flag_trapping_math
3835	  && code != ORDERED_EXPR && code != UNORDERED_EXPR
3836	  && code != NE_EXPR && code != EQ_EXPR)
3837	return NULL_TREE;
3838
3839      code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3840      if (code == ERROR_MARK)
3841	return NULL_TREE;
3842
3843      t = build2 (code, type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3844      SET_EXPR_LOCATION (t, loc);
3845      return t;
3846    }
3847
3848  switch (code)
3849    {
3850    case INTEGER_CST:
3851      return constant_boolean_node (integer_zerop (arg), type);
3852
3853    case TRUTH_AND_EXPR:
3854      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3855      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3856      if (loc1 == UNKNOWN_LOCATION)
3857	loc1 = loc;
3858      if (loc2 == UNKNOWN_LOCATION)
3859	loc2 = loc;
3860      t = build2 (TRUTH_OR_EXPR, type,
3861		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3862		  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3863      break;
3864
3865    case TRUTH_OR_EXPR:
3866      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3867      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3868      if (loc1 == UNKNOWN_LOCATION)
3869	loc1 = loc;
3870      if (loc2 == UNKNOWN_LOCATION)
3871	loc2 = loc;
3872      t = build2 (TRUTH_AND_EXPR, type,
3873		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3874		  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3875      break;
3876
3877    case TRUTH_XOR_EXPR:
3878      /* Here we can invert either operand.  We invert the first operand
3879	 unless the second operand is a TRUTH_NOT_EXPR in which case our
3880	 result is the XOR of the first operand with the inside of the
3881	 negation of the second operand.  */
3882
3883      if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3884	t = build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3885		    TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3886      else
3887	t = build2 (TRUTH_XOR_EXPR, type,
3888		    invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3889		    TREE_OPERAND (arg, 1));
3890      break;
3891
3892    case TRUTH_ANDIF_EXPR:
3893      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3894      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3895      if (loc1 == UNKNOWN_LOCATION)
3896	loc1 = loc;
3897      if (loc2 == UNKNOWN_LOCATION)
3898	loc2 = loc;
3899      t = build2 (TRUTH_ORIF_EXPR, type,
3900		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3901		  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3902      break;
3903
3904    case TRUTH_ORIF_EXPR:
3905      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3906      loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3907      if (loc1 == UNKNOWN_LOCATION)
3908	loc1 = loc;
3909      if (loc2 == UNKNOWN_LOCATION)
3910	loc2 = loc;
3911      t = build2 (TRUTH_ANDIF_EXPR, type,
3912		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3913		  invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3914      break;
3915
3916    case TRUTH_NOT_EXPR:
3917      return TREE_OPERAND (arg, 0);
3918
3919    case COND_EXPR:
3920      {
3921	tree arg1 = TREE_OPERAND (arg, 1);
3922	tree arg2 = TREE_OPERAND (arg, 2);
3923
3924	loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3925	loc2 = EXPR_LOCATION (TREE_OPERAND (arg, 2));
3926	if (loc1 == UNKNOWN_LOCATION)
3927	  loc1 = loc;
3928	if (loc2 == UNKNOWN_LOCATION)
3929	  loc2 = loc;
3930
3931	/* A COND_EXPR may have a throw as one operand, which
3932	   then has void type.  Just leave void operands
3933	   as they are.  */
3934	t = build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3935		    VOID_TYPE_P (TREE_TYPE (arg1))
3936		    ? arg1 : invert_truthvalue_loc (loc1, arg1),
3937		    VOID_TYPE_P (TREE_TYPE (arg2))
3938		    ? arg2 : invert_truthvalue_loc (loc2, arg2));
3939	break;
3940      }
3941
3942    case COMPOUND_EXPR:
3943      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 1));
3944      if (loc1 == UNKNOWN_LOCATION)
3945	loc1 = loc;
3946      t = build2 (COMPOUND_EXPR, type,
3947		  TREE_OPERAND (arg, 0),
3948		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3949      break;
3950
3951    case NON_LVALUE_EXPR:
3952      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3953      if (loc1 == UNKNOWN_LOCATION)
3954	loc1 = loc;
3955      return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3956
3957    CASE_CONVERT:
3958      if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3959	{
3960	  t = build1 (TRUTH_NOT_EXPR, type, arg);
3961	  break;
3962	}
3963
3964      /* ... fall through ...  */
3965
3966    case FLOAT_EXPR:
3967      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3968      if (loc1 == UNKNOWN_LOCATION)
3969	loc1 = loc;
3970      t = build1 (TREE_CODE (arg), type,
3971		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3972      break;
3973
3974    case BIT_AND_EXPR:
3975      if (!integer_onep (TREE_OPERAND (arg, 1)))
3976	return NULL_TREE;
3977      t = build2 (EQ_EXPR, type, arg, build_int_cst (type, 0));
3978      break;
3979
3980    case SAVE_EXPR:
3981      t = build1 (TRUTH_NOT_EXPR, type, arg);
3982      break;
3983
3984    case CLEANUP_POINT_EXPR:
3985      loc1 = EXPR_LOCATION (TREE_OPERAND (arg, 0));
3986      if (loc1 == UNKNOWN_LOCATION)
3987	loc1 = loc;
3988      t = build1 (CLEANUP_POINT_EXPR, type,
3989		  invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3990      break;
3991
3992    default:
3993      t = NULL_TREE;
3994      break;
3995    }
3996
3997  if (t)
3998    SET_EXPR_LOCATION (t, loc);
3999
4000  return t;
4001}
4002
4003/* Return a simplified tree node for the truth-negation of ARG.  This
4004   never alters ARG itself.  We assume that ARG is an operation that
4005   returns a truth value (0 or 1).
4006
4007   FIXME: one would think we would fold the result, but it causes
4008   problems with the dominator optimizer.  */
4009
4010tree
4011invert_truthvalue_loc (location_t loc, tree arg)
4012{
4013  tree tem;
4014
4015  if (TREE_CODE (arg) == ERROR_MARK)
4016    return arg;
4017
4018  tem = fold_truth_not_expr (loc, arg);
4019  if (!tem)
4020    {
4021      tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
4022      SET_EXPR_LOCATION (tem, loc);
4023    }
4024
4025  return tem;
4026}
4027
4028/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
4029   operands are another bit-wise operation with a common input.  If so,
4030   distribute the bit operations to save an operation and possibly two if
4031   constants are involved.  For example, convert
4032	(A | B) & (A | C) into A | (B & C)
4033   Further simplification will occur if B and C are constants.
4034
4035   If this optimization cannot be done, 0 will be returned.  */
4036
4037static tree
4038distribute_bit_expr (location_t loc, enum tree_code code, tree type,
4039		     tree arg0, tree arg1)
4040{
4041  tree common;
4042  tree left, right;
4043
4044  if (TREE_CODE (arg0) != TREE_CODE (arg1)
4045      || TREE_CODE (arg0) == code
4046      || (TREE_CODE (arg0) != BIT_AND_EXPR
4047	  && TREE_CODE (arg0) != BIT_IOR_EXPR))
4048    return 0;
4049
4050  if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
4051    {
4052      common = TREE_OPERAND (arg0, 0);
4053      left = TREE_OPERAND (arg0, 1);
4054      right = TREE_OPERAND (arg1, 1);
4055    }
4056  else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
4057    {
4058      common = TREE_OPERAND (arg0, 0);
4059      left = TREE_OPERAND (arg0, 1);
4060      right = TREE_OPERAND (arg1, 0);
4061    }
4062  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
4063    {
4064      common = TREE_OPERAND (arg0, 1);
4065      left = TREE_OPERAND (arg0, 0);
4066      right = TREE_OPERAND (arg1, 1);
4067    }
4068  else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
4069    {
4070      common = TREE_OPERAND (arg0, 1);
4071      left = TREE_OPERAND (arg0, 0);
4072      right = TREE_OPERAND (arg1, 0);
4073    }
4074  else
4075    return 0;
4076
4077  common = fold_convert_loc (loc, type, common);
4078  left = fold_convert_loc (loc, type, left);
4079  right = fold_convert_loc (loc, type, right);
4080  return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
4081		      fold_build2_loc (loc, code, type, left, right));
4082}
4083
4084/* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
4085   with code CODE.  This optimization is unsafe.  */
4086static tree
4087distribute_real_division (location_t loc, enum tree_code code, tree type,
4088			  tree arg0, tree arg1)
4089{
4090  bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
4091  bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
4092
4093  /* (A / C) +- (B / C) -> (A +- B) / C.  */
4094  if (mul0 == mul1
4095      && operand_equal_p (TREE_OPERAND (arg0, 1),
4096		       TREE_OPERAND (arg1, 1), 0))
4097    return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
4098			fold_build2_loc (loc, code, type,
4099				     TREE_OPERAND (arg0, 0),
4100				     TREE_OPERAND (arg1, 0)),
4101			TREE_OPERAND (arg0, 1));
4102
4103  /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2).  */
4104  if (operand_equal_p (TREE_OPERAND (arg0, 0),
4105		       TREE_OPERAND (arg1, 0), 0)
4106      && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
4107      && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
4108    {
4109      REAL_VALUE_TYPE r0, r1;
4110      r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
4111      r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
4112      if (!mul0)
4113	real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
4114      if (!mul1)
4115        real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
4116      real_arithmetic (&r0, code, &r0, &r1);
4117      return fold_build2_loc (loc, MULT_EXPR, type,
4118			  TREE_OPERAND (arg0, 0),
4119			  build_real (type, r0));
4120    }
4121
4122  return NULL_TREE;
4123}
4124
4125/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
4126   starting at BITPOS.  The field is unsigned if UNSIGNEDP is nonzero.  */
4127
4128static tree
4129make_bit_field_ref (location_t loc, tree inner, tree type,
4130		    HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
4131{
4132  tree result, bftype;
4133
4134  if (bitpos == 0)
4135    {
4136      tree size = TYPE_SIZE (TREE_TYPE (inner));
4137      if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
4138	   || POINTER_TYPE_P (TREE_TYPE (inner)))
4139	  && host_integerp (size, 0)
4140	  && tree_low_cst (size, 0) == bitsize)
4141	return fold_convert_loc (loc, type, inner);
4142    }
4143
4144  bftype = type;
4145  if (TYPE_PRECISION (bftype) != bitsize
4146      || TYPE_UNSIGNED (bftype) == !unsignedp)
4147    bftype = build_nonstandard_integer_type (bitsize, 0);
4148
4149  result = build3 (BIT_FIELD_REF, bftype, inner,
4150		   size_int (bitsize), bitsize_int (bitpos));
4151  SET_EXPR_LOCATION (result, loc);
4152
4153  if (bftype != type)
4154    result = fold_convert_loc (loc, type, result);
4155
4156  return result;
4157}
4158
4159/* Optimize a bit-field compare.
4160
4161   There are two cases:  First is a compare against a constant and the
4162   second is a comparison of two items where the fields are at the same
4163   bit position relative to the start of a chunk (byte, halfword, word)
4164   large enough to contain it.  In these cases we can avoid the shift
4165   implicit in bitfield extractions.
4166
4167   For constants, we emit a compare of the shifted constant with the
4168   BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
4169   compared.  For two fields at the same position, we do the ANDs with the
4170   similar mask and compare the result of the ANDs.
4171
4172   CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
4173   COMPARE_TYPE is the type of the comparison, and LHS and RHS
4174   are the left and right operands of the comparison, respectively.
4175
4176   If the optimization described above can be done, we return the resulting
4177   tree.  Otherwise we return zero.  */
4178
4179static tree
4180optimize_bit_field_compare (location_t loc, enum tree_code code,
4181			    tree compare_type, tree lhs, tree rhs)
4182{
4183  HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
4184  tree type = TREE_TYPE (lhs);
4185  tree signed_type, unsigned_type;
4186  int const_p = TREE_CODE (rhs) == INTEGER_CST;
4187  enum machine_mode lmode, rmode, nmode;
4188  int lunsignedp, runsignedp;
4189  int lvolatilep = 0, rvolatilep = 0;
4190  tree linner, rinner = NULL_TREE;
4191  tree mask;
4192  tree offset;
4193
4194  /* Get all the information about the extractions being done.  If the bit size
4195     if the same as the size of the underlying object, we aren't doing an
4196     extraction at all and so can do nothing.  We also don't want to
4197     do anything if the inner expression is a PLACEHOLDER_EXPR since we
4198     then will no longer be able to replace it.  */
4199  linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
4200				&lunsignedp, &lvolatilep, false);
4201  if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
4202      || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
4203    return 0;
4204
4205 if (!const_p)
4206   {
4207     /* If this is not a constant, we can only do something if bit positions,
4208	sizes, and signedness are the same.  */
4209     rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
4210				   &runsignedp, &rvolatilep, false);
4211
4212     if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
4213	 || lunsignedp != runsignedp || offset != 0
4214	 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
4215       return 0;
4216   }
4217
4218  /* See if we can find a mode to refer to this field.  We should be able to,
4219     but fail if we can't.  */
4220  nmode = get_best_mode (lbitsize, lbitpos,
4221			 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
4222			 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
4223				TYPE_ALIGN (TREE_TYPE (rinner))),
4224			 word_mode, lvolatilep || rvolatilep);
4225  if (nmode == VOIDmode)
4226    return 0;
4227
4228  /* Set signed and unsigned types of the precision of this mode for the
4229     shifts below.  */
4230  signed_type = lang_hooks.types.type_for_mode (nmode, 0);
4231  unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
4232
4233  /* Compute the bit position and size for the new reference and our offset
4234     within it. If the new reference is the same size as the original, we
4235     won't optimize anything, so return zero.  */
4236  nbitsize = GET_MODE_BITSIZE (nmode);
4237  nbitpos = lbitpos & ~ (nbitsize - 1);
4238  lbitpos -= nbitpos;
4239  if (nbitsize == lbitsize)
4240    return 0;
4241
4242  if (BYTES_BIG_ENDIAN)
4243    lbitpos = nbitsize - lbitsize - lbitpos;
4244
4245  /* Make the mask to be used against the extracted field.  */
4246  mask = build_int_cst_type (unsigned_type, -1);
4247  mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
4248  mask = const_binop (RSHIFT_EXPR, mask,
4249		      size_int (nbitsize - lbitsize - lbitpos), 0);
4250
4251  if (! const_p)
4252    /* If not comparing with constant, just rework the comparison
4253       and return.  */
4254    return fold_build2_loc (loc, code, compare_type,
4255			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4256				     make_bit_field_ref (loc, linner,
4257							 unsigned_type,
4258							 nbitsize, nbitpos,
4259							 1),
4260				     mask),
4261			fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4262				     make_bit_field_ref (loc, rinner,
4263							 unsigned_type,
4264							 nbitsize, nbitpos,
4265							 1),
4266				     mask));
4267
4268  /* Otherwise, we are handling the constant case. See if the constant is too
4269     big for the field.  Warn and return a tree of for 0 (false) if so.  We do
4270     this not only for its own sake, but to avoid having to test for this
4271     error case below.  If we didn't, we might generate wrong code.
4272
4273     For unsigned fields, the constant shifted right by the field length should
4274     be all zero.  For signed fields, the high-order bits should agree with
4275     the sign bit.  */
4276
4277  if (lunsignedp)
4278    {
4279      if (! integer_zerop (const_binop (RSHIFT_EXPR,
4280					fold_convert_loc (loc,
4281							  unsigned_type, rhs),
4282					size_int (lbitsize), 0)))
4283	{
4284	  warning (0, "comparison is always %d due to width of bit-field",
4285		   code == NE_EXPR);
4286	  return constant_boolean_node (code == NE_EXPR, compare_type);
4287	}
4288    }
4289  else
4290    {
4291      tree tem = const_binop (RSHIFT_EXPR,
4292			      fold_convert_loc (loc, signed_type, rhs),
4293			      size_int (lbitsize - 1), 0);
4294      if (! integer_zerop (tem) && ! integer_all_onesp (tem))
4295	{
4296	  warning (0, "comparison is always %d due to width of bit-field",
4297		   code == NE_EXPR);
4298	  return constant_boolean_node (code == NE_EXPR, compare_type);
4299	}
4300    }
4301
4302  /* Single-bit compares should always be against zero.  */
4303  if (lbitsize == 1 && ! integer_zerop (rhs))
4304    {
4305      code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
4306      rhs = build_int_cst (type, 0);
4307    }
4308
4309  /* Make a new bitfield reference, shift the constant over the
4310     appropriate number of bits and mask it with the computed mask
4311     (in case this was a signed field).  If we changed it, make a new one.  */
4312  lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
4313  if (lvolatilep)
4314    {
4315      TREE_SIDE_EFFECTS (lhs) = 1;
4316      TREE_THIS_VOLATILE (lhs) = 1;
4317    }
4318
4319  rhs = const_binop (BIT_AND_EXPR,
4320		     const_binop (LSHIFT_EXPR,
4321				  fold_convert_loc (loc, unsigned_type, rhs),
4322				  size_int (lbitpos), 0),
4323		     mask, 0);
4324
4325  lhs = build2 (code, compare_type,
4326		build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
4327		rhs);
4328  SET_EXPR_LOCATION (lhs, loc);
4329  return lhs;
4330}
4331
4332/* Subroutine for fold_truthop: decode a field reference.
4333
4334   If EXP is a comparison reference, we return the innermost reference.
4335
4336   *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4337   set to the starting bit number.
4338
4339   If the innermost field can be completely contained in a mode-sized
4340   unit, *PMODE is set to that mode.  Otherwise, it is set to VOIDmode.
4341
4342   *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4343   otherwise it is not changed.
4344
4345   *PUNSIGNEDP is set to the signedness of the field.
4346
4347   *PMASK is set to the mask used.  This is either contained in a
4348   BIT_AND_EXPR or derived from the width of the field.
4349
4350   *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4351
4352   Return 0 if this is not a component reference or is one that we can't
4353   do anything with.  */
4354
4355static tree
4356decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4357			HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
4358			int *punsignedp, int *pvolatilep,
4359			tree *pmask, tree *pand_mask)
4360{
4361  tree outer_type = 0;
4362  tree and_mask = 0;
4363  tree mask, inner, offset;
4364  tree unsigned_type;
4365  unsigned int precision;
4366
4367  /* All the optimizations using this function assume integer fields.
4368     There are problems with FP fields since the type_for_size call
4369     below can fail for, e.g., XFmode.  */
4370  if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4371    return 0;
4372
4373  /* We are interested in the bare arrangement of bits, so strip everything
4374     that doesn't affect the machine mode.  However, record the type of the
4375     outermost expression if it may matter below.  */
4376  if (CONVERT_EXPR_P (exp)
4377      || TREE_CODE (exp) == NON_LVALUE_EXPR)
4378    outer_type = TREE_TYPE (exp);
4379  STRIP_NOPS (exp);
4380
4381  if (TREE_CODE (exp) == BIT_AND_EXPR)
4382    {
4383      and_mask = TREE_OPERAND (exp, 1);
4384      exp = TREE_OPERAND (exp, 0);
4385      STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4386      if (TREE_CODE (and_mask) != INTEGER_CST)
4387	return 0;
4388    }
4389
4390  inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4391			       punsignedp, pvolatilep, false);
4392  if ((inner == exp && and_mask == 0)
4393      || *pbitsize < 0 || offset != 0
4394      || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4395    return 0;
4396
4397  /* If the number of bits in the reference is the same as the bitsize of
4398     the outer type, then the outer type gives the signedness. Otherwise
4399     (in case of a small bitfield) the signedness is unchanged.  */
4400  if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4401    *punsignedp = TYPE_UNSIGNED (outer_type);
4402
4403  /* Compute the mask to access the bitfield.  */
4404  unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4405  precision = TYPE_PRECISION (unsigned_type);
4406
4407  mask = build_int_cst_type (unsigned_type, -1);
4408
4409  mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4410  mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
4411
4412  /* Merge it with the mask we found in the BIT_AND_EXPR, if any.  */
4413  if (and_mask != 0)
4414    mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4415			fold_convert_loc (loc, unsigned_type, and_mask), mask);
4416
4417  *pmask = mask;
4418  *pand_mask = and_mask;
4419  return inner;
4420}
4421
4422/* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4423   bit positions.  */
4424
4425static int
4426all_ones_mask_p (const_tree mask, int size)
4427{
4428  tree type = TREE_TYPE (mask);
4429  unsigned int precision = TYPE_PRECISION (type);
4430  tree tmask;
4431
4432  tmask = build_int_cst_type (signed_type_for (type), -1);
4433
4434  return
4435    tree_int_cst_equal (mask,
4436			const_binop (RSHIFT_EXPR,
4437				     const_binop (LSHIFT_EXPR, tmask,
4438						  size_int (precision - size),
4439						  0),
4440				     size_int (precision - size), 0));
4441}
4442
4443/* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4444   represents the sign bit of EXP's type.  If EXP represents a sign
4445   or zero extension, also test VAL against the unextended type.
4446   The return value is the (sub)expression whose sign bit is VAL,
4447   or NULL_TREE otherwise.  */
4448
4449static tree
4450sign_bit_p (tree exp, const_tree val)
4451{
4452  unsigned HOST_WIDE_INT mask_lo, lo;
4453  HOST_WIDE_INT mask_hi, hi;
4454  int width;
4455  tree t;
4456
4457  /* Tree EXP must have an integral type.  */
4458  t = TREE_TYPE (exp);
4459  if (! INTEGRAL_TYPE_P (t))
4460    return NULL_TREE;
4461
4462  /* Tree VAL must be an integer constant.  */
4463  if (TREE_CODE (val) != INTEGER_CST
4464      || TREE_OVERFLOW (val))
4465    return NULL_TREE;
4466
4467  width = TYPE_PRECISION (t);
4468  if (width > HOST_BITS_PER_WIDE_INT)
4469    {
4470      hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
4471      lo = 0;
4472
4473      mask_hi = ((unsigned HOST_WIDE_INT) -1
4474		 >> (2 * HOST_BITS_PER_WIDE_INT - width));
4475      mask_lo = -1;
4476    }
4477  else
4478    {
4479      hi = 0;
4480      lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
4481
4482      mask_hi = 0;
4483      mask_lo = ((unsigned HOST_WIDE_INT) -1
4484		 >> (HOST_BITS_PER_WIDE_INT - width));
4485    }
4486
4487  /* We mask off those bits beyond TREE_TYPE (exp) so that we can
4488     treat VAL as if it were unsigned.  */
4489  if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
4490      && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
4491    return exp;
4492
4493  /* Handle extension from a narrower type.  */
4494  if (TREE_CODE (exp) == NOP_EXPR
4495      && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4496    return sign_bit_p (TREE_OPERAND (exp, 0), val);
4497
4498  return NULL_TREE;
4499}
4500
4501/* Subroutine for fold_truthop: determine if an operand is simple enough
4502   to be evaluated unconditionally.  */
4503
4504static int
4505simple_operand_p (const_tree exp)
4506{
4507  /* Strip any conversions that don't change the machine mode.  */
4508  STRIP_NOPS (exp);
4509
4510  return (CONSTANT_CLASS_P (exp)
4511	  || TREE_CODE (exp) == SSA_NAME
4512	  || (DECL_P (exp)
4513	      && ! TREE_ADDRESSABLE (exp)
4514	      && ! TREE_THIS_VOLATILE (exp)
4515	      && ! DECL_NONLOCAL (exp)
4516	      /* Don't regard global variables as simple.  They may be
4517		 allocated in ways unknown to the compiler (shared memory,
4518		 #pragma weak, etc).  */
4519	      && ! TREE_PUBLIC (exp)
4520	      && ! DECL_EXTERNAL (exp)
4521	      /* Loading a static variable is unduly expensive, but global
4522		 registers aren't expensive.  */
4523	      && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4524}
4525
4526/* The following functions are subroutines to fold_range_test and allow it to
4527   try to change a logical combination of comparisons into a range test.
4528
4529   For example, both
4530	X == 2 || X == 3 || X == 4 || X == 5
4531   and
4532	X >= 2 && X <= 5
4533   are converted to
4534	(unsigned) (X - 2) <= 3
4535
4536   We describe each set of comparisons as being either inside or outside
4537   a range, using a variable named like IN_P, and then describe the
4538   range with a lower and upper bound.  If one of the bounds is omitted,
4539   it represents either the highest or lowest value of the type.
4540
4541   In the comments below, we represent a range by two numbers in brackets
4542   preceded by a "+" to designate being inside that range, or a "-" to
4543   designate being outside that range, so the condition can be inverted by
4544   flipping the prefix.  An omitted bound is represented by a "-".  For
4545   example, "- [-, 10]" means being outside the range starting at the lowest
4546   possible value and ending at 10, in other words, being greater than 10.
4547   The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4548   always false.
4549
4550   We set up things so that the missing bounds are handled in a consistent
4551   manner so neither a missing bound nor "true" and "false" need to be
4552   handled using a special case.  */
4553
4554/* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4555   of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4556   and UPPER1_P are nonzero if the respective argument is an upper bound
4557   and zero for a lower.  TYPE, if nonzero, is the type of the result; it
4558   must be specified for a comparison.  ARG1 will be converted to ARG0's
4559   type if both are specified.  */
4560
4561static tree
4562range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4563	     tree arg1, int upper1_p)
4564{
4565  tree tem;
4566  int result;
4567  int sgn0, sgn1;
4568
4569  /* If neither arg represents infinity, do the normal operation.
4570     Else, if not a comparison, return infinity.  Else handle the special
4571     comparison rules. Note that most of the cases below won't occur, but
4572     are handled for consistency.  */
4573
4574  if (arg0 != 0 && arg1 != 0)
4575    {
4576      tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4577			 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4578      STRIP_NOPS (tem);
4579      return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4580    }
4581
4582  if (TREE_CODE_CLASS (code) != tcc_comparison)
4583    return 0;
4584
4585  /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4586     for neither.  In real maths, we cannot assume open ended ranges are
4587     the same. But, this is computer arithmetic, where numbers are finite.
4588     We can therefore make the transformation of any unbounded range with
4589     the value Z, Z being greater than any representable number. This permits
4590     us to treat unbounded ranges as equal.  */
4591  sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4592  sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4593  switch (code)
4594    {
4595    case EQ_EXPR:
4596      result = sgn0 == sgn1;
4597      break;
4598    case NE_EXPR:
4599      result = sgn0 != sgn1;
4600      break;
4601    case LT_EXPR:
4602      result = sgn0 < sgn1;
4603      break;
4604    case LE_EXPR:
4605      result = sgn0 <= sgn1;
4606      break;
4607    case GT_EXPR:
4608      result = sgn0 > sgn1;
4609      break;
4610    case GE_EXPR:
4611      result = sgn0 >= sgn1;
4612      break;
4613    default:
4614      gcc_unreachable ();
4615    }
4616
4617  return constant_boolean_node (result, type);
4618}
4619
4620/* Given EXP, a logical expression, set the range it is testing into
4621   variables denoted by PIN_P, PLOW, and PHIGH.  Return the expression
4622   actually being tested.  *PLOW and *PHIGH will be made of the same
4623   type as the returned expression.  If EXP is not a comparison, we
4624   will most likely not be returning a useful value and range.  Set
4625   *STRICT_OVERFLOW_P to true if the return value is only valid
4626   because signed overflow is undefined; otherwise, do not change
4627   *STRICT_OVERFLOW_P.  */
4628
4629tree
4630make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4631	    bool *strict_overflow_p)
4632{
4633  enum tree_code code;
4634  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
4635  tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
4636  int in_p, n_in_p;
4637  tree low, high, n_low, n_high;
4638  location_t loc = EXPR_LOCATION (exp);
4639
4640  /* Start with simply saying "EXP != 0" and then look at the code of EXP
4641     and see if we can refine the range.  Some of the cases below may not
4642     happen, but it doesn't seem worth worrying about this.  We "continue"
4643     the outer loop when we've changed something; otherwise we "break"
4644     the switch, which will "break" the while.  */
4645
4646  in_p = 0;
4647  low = high = build_int_cst (TREE_TYPE (exp), 0);
4648
4649  while (1)
4650    {
4651      code = TREE_CODE (exp);
4652      exp_type = TREE_TYPE (exp);
4653
4654      if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4655	{
4656	  if (TREE_OPERAND_LENGTH (exp) > 0)
4657	    arg0 = TREE_OPERAND (exp, 0);
4658	  if (TREE_CODE_CLASS (code) == tcc_comparison
4659	      || TREE_CODE_CLASS (code) == tcc_unary
4660	      || TREE_CODE_CLASS (code) == tcc_binary)
4661	    arg0_type = TREE_TYPE (arg0);
4662	  if (TREE_CODE_CLASS (code) == tcc_binary
4663	      || TREE_CODE_CLASS (code) == tcc_comparison
4664	      || (TREE_CODE_CLASS (code) == tcc_expression
4665		  && TREE_OPERAND_LENGTH (exp) > 1))
4666	    arg1 = TREE_OPERAND (exp, 1);
4667	}
4668
4669      switch (code)
4670	{
4671	case TRUTH_NOT_EXPR:
4672	  in_p = ! in_p, exp = arg0;
4673	  continue;
4674
4675	case EQ_EXPR: case NE_EXPR:
4676	case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4677	  /* We can only do something if the range is testing for zero
4678	     and if the second operand is an integer constant.  Note that
4679	     saying something is "in" the range we make is done by
4680	     complementing IN_P since it will set in the initial case of
4681	     being not equal to zero; "out" is leaving it alone.  */
4682	  if (low == 0 || high == 0
4683	      || ! integer_zerop (low) || ! integer_zerop (high)
4684	      || TREE_CODE (arg1) != INTEGER_CST)
4685	    break;
4686
4687	  switch (code)
4688	    {
4689	    case NE_EXPR:  /* - [c, c]  */
4690	      low = high = arg1;
4691	      break;
4692	    case EQ_EXPR:  /* + [c, c]  */
4693	      in_p = ! in_p, low = high = arg1;
4694	      break;
4695	    case GT_EXPR:  /* - [-, c] */
4696	      low = 0, high = arg1;
4697	      break;
4698	    case GE_EXPR:  /* + [c, -] */
4699	      in_p = ! in_p, low = arg1, high = 0;
4700	      break;
4701	    case LT_EXPR:  /* - [c, -] */
4702	      low = arg1, high = 0;
4703	      break;
4704	    case LE_EXPR:  /* + [-, c] */
4705	      in_p = ! in_p, low = 0, high = arg1;
4706	      break;
4707	    default:
4708	      gcc_unreachable ();
4709	    }
4710
4711	  /* If this is an unsigned comparison, we also know that EXP is
4712	     greater than or equal to zero.  We base the range tests we make
4713	     on that fact, so we record it here so we can parse existing
4714	     range tests.  We test arg0_type since often the return type
4715	     of, e.g. EQ_EXPR, is boolean.  */
4716	  if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4717	    {
4718	      if (! merge_ranges (&n_in_p, &n_low, &n_high,
4719				  in_p, low, high, 1,
4720				  build_int_cst (arg0_type, 0),
4721				  NULL_TREE))
4722		break;
4723
4724	      in_p = n_in_p, low = n_low, high = n_high;
4725
4726	      /* If the high bound is missing, but we have a nonzero low
4727		 bound, reverse the range so it goes from zero to the low bound
4728		 minus 1.  */
4729	      if (high == 0 && low && ! integer_zerop (low))
4730		{
4731		  in_p = ! in_p;
4732		  high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4733				      integer_one_node, 0);
4734		  low = build_int_cst (arg0_type, 0);
4735		}
4736	    }
4737
4738	  exp = arg0;
4739	  continue;
4740
4741	case NEGATE_EXPR:
4742	  /* (-x) IN [a,b] -> x in [-b, -a]  */
4743	  n_low = range_binop (MINUS_EXPR, exp_type,
4744			       build_int_cst (exp_type, 0),
4745			       0, high, 1);
4746	  n_high = range_binop (MINUS_EXPR, exp_type,
4747				build_int_cst (exp_type, 0),
4748				0, low, 0);
4749	  if (n_high != 0 && TREE_OVERFLOW (n_high))
4750	    break;
4751	  goto normalize;
4752
4753	case BIT_NOT_EXPR:
4754	  /* ~ X -> -X - 1  */
4755	  exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
4756			build_int_cst (exp_type, 1));
4757	  SET_EXPR_LOCATION (exp, loc);
4758	  continue;
4759
4760	case PLUS_EXPR:  case MINUS_EXPR:
4761	  if (TREE_CODE (arg1) != INTEGER_CST)
4762	    break;
4763
4764	  /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4765	     move a constant to the other side.  */
4766	  if (!TYPE_UNSIGNED (arg0_type)
4767	      && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4768	    break;
4769
4770	  /* If EXP is signed, any overflow in the computation is undefined,
4771	     so we don't worry about it so long as our computations on
4772	     the bounds don't overflow.  For unsigned, overflow is defined
4773	     and this is exactly the right thing.  */
4774	  n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4775			       arg0_type, low, 0, arg1, 0);
4776	  n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4777				arg0_type, high, 1, arg1, 0);
4778	  if ((n_low != 0 && TREE_OVERFLOW (n_low))
4779	      || (n_high != 0 && TREE_OVERFLOW (n_high)))
4780	    break;
4781
4782	  if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4783	    *strict_overflow_p = true;
4784
4785	normalize:
4786	  /* Check for an unsigned range which has wrapped around the maximum
4787	     value thus making n_high < n_low, and normalize it.  */
4788	  if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4789	    {
4790	      low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4791				 integer_one_node, 0);
4792	      high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4793				  integer_one_node, 0);
4794
4795	      /* If the range is of the form +/- [ x+1, x ], we won't
4796		 be able to normalize it.  But then, it represents the
4797		 whole range or the empty set, so make it
4798		 +/- [ -, - ].  */
4799	      if (tree_int_cst_equal (n_low, low)
4800		  && tree_int_cst_equal (n_high, high))
4801		low = high = 0;
4802	      else
4803		in_p = ! in_p;
4804	    }
4805	  else
4806	    low = n_low, high = n_high;
4807
4808	  exp = arg0;
4809	  continue;
4810
4811	CASE_CONVERT: case NON_LVALUE_EXPR:
4812	  if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4813	    break;
4814
4815	  if (! INTEGRAL_TYPE_P (arg0_type)
4816	      || (low != 0 && ! int_fits_type_p (low, arg0_type))
4817	      || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4818	    break;
4819
4820	  n_low = low, n_high = high;
4821
4822	  if (n_low != 0)
4823	    n_low = fold_convert_loc (loc, arg0_type, n_low);
4824
4825	  if (n_high != 0)
4826	    n_high = fold_convert_loc (loc, arg0_type, n_high);
4827
4828
4829	  /* If we're converting arg0 from an unsigned type, to exp,
4830	     a signed type,  we will be doing the comparison as unsigned.
4831	     The tests above have already verified that LOW and HIGH
4832	     are both positive.
4833
4834	     So we have to ensure that we will handle large unsigned
4835	     values the same way that the current signed bounds treat
4836	     negative values.  */
4837
4838	  if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4839	    {
4840	      tree high_positive;
4841	      tree equiv_type;
4842	      /* For fixed-point modes, we need to pass the saturating flag
4843		 as the 2nd parameter.  */
4844	      if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4845		equiv_type = lang_hooks.types.type_for_mode
4846			     (TYPE_MODE (arg0_type),
4847			      TYPE_SATURATING (arg0_type));
4848	      else
4849		equiv_type = lang_hooks.types.type_for_mode
4850			     (TYPE_MODE (arg0_type), 1);
4851
4852	      /* A range without an upper bound is, naturally, unbounded.
4853		 Since convert would have cropped a very large value, use
4854		 the max value for the destination type.  */
4855	      high_positive
4856		= TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4857		: TYPE_MAX_VALUE (arg0_type);
4858
4859	      if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4860		high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4861					     fold_convert_loc (loc, arg0_type,
4862							       high_positive),
4863					     build_int_cst (arg0_type, 1));
4864
4865	      /* If the low bound is specified, "and" the range with the
4866		 range for which the original unsigned value will be
4867		 positive.  */
4868	      if (low != 0)
4869		{
4870		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4871				      1, n_low, n_high, 1,
4872				      fold_convert_loc (loc, arg0_type,
4873							integer_zero_node),
4874				      high_positive))
4875		    break;
4876
4877		  in_p = (n_in_p == in_p);
4878		}
4879	      else
4880		{
4881		  /* Otherwise, "or" the range with the range of the input
4882		     that will be interpreted as negative.  */
4883		  if (! merge_ranges (&n_in_p, &n_low, &n_high,
4884				      0, n_low, n_high, 1,
4885				      fold_convert_loc (loc, arg0_type,
4886							integer_zero_node),
4887				      high_positive))
4888		    break;
4889
4890		  in_p = (in_p != n_in_p);
4891		}
4892	    }
4893
4894	  exp = arg0;
4895	  low = n_low, high = n_high;
4896	  continue;
4897
4898	default:
4899	  break;
4900	}
4901
4902      break;
4903    }
4904
4905  /* If EXP is a constant, we can evaluate whether this is true or false.  */
4906  if (TREE_CODE (exp) == INTEGER_CST)
4907    {
4908      in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4909						 exp, 0, low, 0))
4910		      && integer_onep (range_binop (LE_EXPR, integer_type_node,
4911						    exp, 1, high, 1)));
4912      low = high = 0;
4913      exp = 0;
4914    }
4915
4916  *pin_p = in_p, *plow = low, *phigh = high;
4917  return exp;
4918}
4919
4920/* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4921   type, TYPE, return an expression to test if EXP is in (or out of, depending
4922   on IN_P) the range.  Return 0 if the test couldn't be created.  */
4923
4924tree
4925build_range_check (location_t loc, tree type, tree exp, int in_p,
4926		   tree low, tree high)
4927{
4928  tree etype = TREE_TYPE (exp), value;
4929
4930#ifdef HAVE_canonicalize_funcptr_for_compare
4931  /* Disable this optimization for function pointer expressions
4932     on targets that require function pointer canonicalization.  */
4933  if (HAVE_canonicalize_funcptr_for_compare
4934      && TREE_CODE (etype) == POINTER_TYPE
4935      && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4936    return NULL_TREE;
4937#endif
4938
4939  if (! in_p)
4940    {
4941      value = build_range_check (loc, type, exp, 1, low, high);
4942      if (value != 0)
4943        return invert_truthvalue_loc (loc, value);
4944
4945      return 0;
4946    }
4947
4948  if (low == 0 && high == 0)
4949    return build_int_cst (type, 1);
4950
4951  if (low == 0)
4952    return fold_build2_loc (loc, LE_EXPR, type, exp,
4953			fold_convert_loc (loc, etype, high));
4954
4955  if (high == 0)
4956    return fold_build2_loc (loc, GE_EXPR, type, exp,
4957			fold_convert_loc (loc, etype, low));
4958
4959  if (operand_equal_p (low, high, 0))
4960    return fold_build2_loc (loc, EQ_EXPR, type, exp,
4961			fold_convert_loc (loc, etype, low));
4962
4963  if (integer_zerop (low))
4964    {
4965      if (! TYPE_UNSIGNED (etype))
4966	{
4967	  etype = unsigned_type_for (etype);
4968	  high = fold_convert_loc (loc, etype, high);
4969	  exp = fold_convert_loc (loc, etype, exp);
4970	}
4971      return build_range_check (loc, type, exp, 1, 0, high);
4972    }
4973
4974  /* Optimize (c>=1) && (c<=127) into (signed char)c > 0.  */
4975  if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4976    {
4977      unsigned HOST_WIDE_INT lo;
4978      HOST_WIDE_INT hi;
4979      int prec;
4980
4981      prec = TYPE_PRECISION (etype);
4982      if (prec <= HOST_BITS_PER_WIDE_INT)
4983	{
4984	  hi = 0;
4985	  lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4986	}
4987      else
4988	{
4989	  hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4990	  lo = (unsigned HOST_WIDE_INT) -1;
4991	}
4992
4993      if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4994	{
4995	  if (TYPE_UNSIGNED (etype))
4996	    {
4997	      tree signed_etype = signed_type_for (etype);
4998	      if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4999		etype
5000		  = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
5001	      else
5002		etype = signed_etype;
5003	      exp = fold_convert_loc (loc, etype, exp);
5004	    }
5005	  return fold_build2_loc (loc, GT_EXPR, type, exp,
5006			      build_int_cst (etype, 0));
5007	}
5008    }
5009
5010  /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
5011     This requires wrap-around arithmetics for the type of the expression.
5012     First make sure that arithmetics in this type is valid, then make sure
5013     that it wraps around.  */
5014  if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
5015    etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
5016					    TYPE_UNSIGNED (etype));
5017
5018  if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
5019    {
5020      tree utype, minv, maxv;
5021
5022      /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
5023	 for the type in question, as we rely on this here.  */
5024      utype = unsigned_type_for (etype);
5025      maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
5026      maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
5027			  integer_one_node, 1);
5028      minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
5029
5030      if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
5031				      minv, 1, maxv, 1)))
5032	etype = utype;
5033      else
5034	return 0;
5035    }
5036
5037  high = fold_convert_loc (loc, etype, high);
5038  low = fold_convert_loc (loc, etype, low);
5039  exp = fold_convert_loc (loc, etype, exp);
5040
5041  value = const_binop (MINUS_EXPR, high, low, 0);
5042
5043
5044  if (POINTER_TYPE_P (etype))
5045    {
5046      if (value != 0 && !TREE_OVERFLOW (value))
5047	{
5048	  low = fold_convert_loc (loc, sizetype, low);
5049	  low = fold_build1_loc (loc, NEGATE_EXPR, sizetype, low);
5050          return build_range_check (loc, type,
5051			     	    fold_build2_loc (loc, POINTER_PLUS_EXPR,
5052						 etype, exp, low),
5053			            1, build_int_cst (etype, 0), value);
5054	}
5055      return 0;
5056    }
5057
5058  if (value != 0 && !TREE_OVERFLOW (value))
5059    return build_range_check (loc, type,
5060			      fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
5061			      1, build_int_cst (etype, 0), value);
5062
5063  return 0;
5064}
5065
5066/* Return the predecessor of VAL in its type, handling the infinite case.  */
5067
5068static tree
5069range_predecessor (tree val)
5070{
5071  tree type = TREE_TYPE (val);
5072
5073  if (INTEGRAL_TYPE_P (type)
5074      && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
5075    return 0;
5076  else
5077    return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5078}
5079
5080/* Return the successor of VAL in its type, handling the infinite case.  */
5081
5082static tree
5083range_successor (tree val)
5084{
5085  tree type = TREE_TYPE (val);
5086
5087  if (INTEGRAL_TYPE_P (type)
5088      && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
5089    return 0;
5090  else
5091    return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
5092}
5093
5094/* Given two ranges, see if we can merge them into one.  Return 1 if we
5095   can, 0 if we can't.  Set the output range into the specified parameters.  */
5096
5097bool
5098merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
5099	      tree high0, int in1_p, tree low1, tree high1)
5100{
5101  int no_overlap;
5102  int subset;
5103  int temp;
5104  tree tem;
5105  int in_p;
5106  tree low, high;
5107  int lowequal = ((low0 == 0 && low1 == 0)
5108		  || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5109						low0, 0, low1, 0)));
5110  int highequal = ((high0 == 0 && high1 == 0)
5111		   || integer_onep (range_binop (EQ_EXPR, integer_type_node,
5112						 high0, 1, high1, 1)));
5113
5114  /* Make range 0 be the range that starts first, or ends last if they
5115     start at the same value.  Swap them if it isn't.  */
5116  if (integer_onep (range_binop (GT_EXPR, integer_type_node,
5117				 low0, 0, low1, 0))
5118      || (lowequal
5119	  && integer_onep (range_binop (GT_EXPR, integer_type_node,
5120					high1, 1, high0, 1))))
5121    {
5122      temp = in0_p, in0_p = in1_p, in1_p = temp;
5123      tem = low0, low0 = low1, low1 = tem;
5124      tem = high0, high0 = high1, high1 = tem;
5125    }
5126
5127  /* Now flag two cases, whether the ranges are disjoint or whether the
5128     second range is totally subsumed in the first.  Note that the tests
5129     below are simplified by the ones above.  */
5130  no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
5131					  high0, 1, low1, 0));
5132  subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
5133				      high1, 1, high0, 1));
5134
5135  /* We now have four cases, depending on whether we are including or
5136     excluding the two ranges.  */
5137  if (in0_p && in1_p)
5138    {
5139      /* If they don't overlap, the result is false.  If the second range
5140	 is a subset it is the result.  Otherwise, the range is from the start
5141	 of the second to the end of the first.  */
5142      if (no_overlap)
5143	in_p = 0, low = high = 0;
5144      else if (subset)
5145	in_p = 1, low = low1, high = high1;
5146      else
5147	in_p = 1, low = low1, high = high0;
5148    }
5149
5150  else if (in0_p && ! in1_p)
5151    {
5152      /* If they don't overlap, the result is the first range.  If they are
5153	 equal, the result is false.  If the second range is a subset of the
5154	 first, and the ranges begin at the same place, we go from just after
5155	 the end of the second range to the end of the first.  If the second
5156	 range is not a subset of the first, or if it is a subset and both
5157	 ranges end at the same place, the range starts at the start of the
5158	 first range and ends just before the second range.
5159	 Otherwise, we can't describe this as a single range.  */
5160      if (no_overlap)
5161	in_p = 1, low = low0, high = high0;
5162      else if (lowequal && highequal)
5163	in_p = 0, low = high = 0;
5164      else if (subset && lowequal)
5165	{
5166	  low = range_successor (high1);
5167	  high = high0;
5168	  in_p = 1;
5169	  if (low == 0)
5170	    {
5171	      /* We are in the weird situation where high0 > high1 but
5172		 high1 has no successor.  Punt.  */
5173	      return 0;
5174	    }
5175	}
5176      else if (! subset || highequal)
5177	{
5178	  low = low0;
5179	  high = range_predecessor (low1);
5180	  in_p = 1;
5181	  if (high == 0)
5182	    {
5183	      /* low0 < low1 but low1 has no predecessor.  Punt.  */
5184	      return 0;
5185	    }
5186	}
5187      else
5188	return 0;
5189    }
5190
5191  else if (! in0_p && in1_p)
5192    {
5193      /* If they don't overlap, the result is the second range.  If the second
5194	 is a subset of the first, the result is false.  Otherwise,
5195	 the range starts just after the first range and ends at the
5196	 end of the second.  */
5197      if (no_overlap)
5198	in_p = 1, low = low1, high = high1;
5199      else if (subset || highequal)
5200	in_p = 0, low = high = 0;
5201      else
5202	{
5203	  low = range_successor (high0);
5204	  high = high1;
5205	  in_p = 1;
5206	  if (low == 0)
5207	    {
5208	      /* high1 > high0 but high0 has no successor.  Punt.  */
5209	      return 0;
5210	    }
5211	}
5212    }
5213
5214  else
5215    {
5216      /* The case where we are excluding both ranges.  Here the complex case
5217	 is if they don't overlap.  In that case, the only time we have a
5218	 range is if they are adjacent.  If the second is a subset of the
5219	 first, the result is the first.  Otherwise, the range to exclude
5220	 starts at the beginning of the first range and ends at the end of the
5221	 second.  */
5222      if (no_overlap)
5223	{
5224	  if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
5225					 range_successor (high0),
5226					 1, low1, 0)))
5227	    in_p = 0, low = low0, high = high1;
5228	  else
5229	    {
5230	      /* Canonicalize - [min, x] into - [-, x].  */
5231	      if (low0 && TREE_CODE (low0) == INTEGER_CST)
5232		switch (TREE_CODE (TREE_TYPE (low0)))
5233		  {
5234		  case ENUMERAL_TYPE:
5235		    if (TYPE_PRECISION (TREE_TYPE (low0))
5236			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
5237		      break;
5238		    /* FALLTHROUGH */
5239		  case INTEGER_TYPE:
5240		    if (tree_int_cst_equal (low0,
5241					    TYPE_MIN_VALUE (TREE_TYPE (low0))))
5242		      low0 = 0;
5243		    break;
5244		  case POINTER_TYPE:
5245		    if (TYPE_UNSIGNED (TREE_TYPE (low0))
5246			&& integer_zerop (low0))
5247		      low0 = 0;
5248		    break;
5249		  default:
5250		    break;
5251		  }
5252
5253	      /* Canonicalize - [x, max] into - [x, -].  */
5254	      if (high1 && TREE_CODE (high1) == INTEGER_CST)
5255		switch (TREE_CODE (TREE_TYPE (high1)))
5256		  {
5257		  case ENUMERAL_TYPE:
5258		    if (TYPE_PRECISION (TREE_TYPE (high1))
5259			!= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
5260		      break;
5261		    /* FALLTHROUGH */
5262		  case INTEGER_TYPE:
5263		    if (tree_int_cst_equal (high1,
5264					    TYPE_MAX_VALUE (TREE_TYPE (high1))))
5265		      high1 = 0;
5266		    break;
5267		  case POINTER_TYPE:
5268		    if (TYPE_UNSIGNED (TREE_TYPE (high1))
5269			&& integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
5270						       high1, 1,
5271						       integer_one_node, 1)))
5272		      high1 = 0;
5273		    break;
5274		  default:
5275		    break;
5276		  }
5277
5278	      /* The ranges might be also adjacent between the maximum and
5279	         minimum values of the given type.  For
5280	         - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
5281	         return + [x + 1, y - 1].  */
5282	      if (low0 == 0 && high1 == 0)
5283	        {
5284		  low = range_successor (high0);
5285		  high = range_predecessor (low1);
5286		  if (low == 0 || high == 0)
5287		    return 0;
5288
5289		  in_p = 1;
5290		}
5291	      else
5292		return 0;
5293	    }
5294	}
5295      else if (subset)
5296	in_p = 0, low = low0, high = high0;
5297      else
5298	in_p = 0, low = low0, high = high1;
5299    }
5300
5301  *pin_p = in_p, *plow = low, *phigh = high;
5302  return 1;
5303}
5304
5305
5306/* Subroutine of fold, looking inside expressions of the form
5307   A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5308   of the COND_EXPR.  This function is being used also to optimize
5309   A op B ? C : A, by reversing the comparison first.
5310
5311   Return a folded expression whose code is not a COND_EXPR
5312   anymore, or NULL_TREE if no folding opportunity is found.  */
5313
5314static tree
5315fold_cond_expr_with_comparison (location_t loc, tree type,
5316				tree arg0, tree arg1, tree arg2)
5317{
5318  enum tree_code comp_code = TREE_CODE (arg0);
5319  tree arg00 = TREE_OPERAND (arg0, 0);
5320  tree arg01 = TREE_OPERAND (arg0, 1);
5321  tree arg1_type = TREE_TYPE (arg1);
5322  tree tem;
5323
5324  STRIP_NOPS (arg1);
5325  STRIP_NOPS (arg2);
5326
5327  /* If we have A op 0 ? A : -A, consider applying the following
5328     transformations:
5329
5330     A == 0? A : -A    same as -A
5331     A != 0? A : -A    same as A
5332     A >= 0? A : -A    same as abs (A)
5333     A > 0?  A : -A    same as abs (A)
5334     A <= 0? A : -A    same as -abs (A)
5335     A < 0?  A : -A    same as -abs (A)
5336
5337     None of these transformations work for modes with signed
5338     zeros.  If A is +/-0, the first two transformations will
5339     change the sign of the result (from +0 to -0, or vice
5340     versa).  The last four will fix the sign of the result,
5341     even though the original expressions could be positive or
5342     negative, depending on the sign of A.
5343
5344     Note that all these transformations are correct if A is
5345     NaN, since the two alternatives (A and -A) are also NaNs.  */
5346  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5347      && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5348	  ? real_zerop (arg01)
5349	  : integer_zerop (arg01))
5350      && ((TREE_CODE (arg2) == NEGATE_EXPR
5351	   && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5352	     /* In the case that A is of the form X-Y, '-A' (arg2) may
5353	        have already been folded to Y-X, check for that. */
5354	  || (TREE_CODE (arg1) == MINUS_EXPR
5355	      && TREE_CODE (arg2) == MINUS_EXPR
5356	      && operand_equal_p (TREE_OPERAND (arg1, 0),
5357				  TREE_OPERAND (arg2, 1), 0)
5358	      && operand_equal_p (TREE_OPERAND (arg1, 1),
5359				  TREE_OPERAND (arg2, 0), 0))))
5360    switch (comp_code)
5361      {
5362      case EQ_EXPR:
5363      case UNEQ_EXPR:
5364	tem = fold_convert_loc (loc, arg1_type, arg1);
5365	return pedantic_non_lvalue_loc (loc,
5366				    fold_convert_loc (loc, type,
5367						  negate_expr (tem)));
5368      case NE_EXPR:
5369      case LTGT_EXPR:
5370	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5371      case UNGE_EXPR:
5372      case UNGT_EXPR:
5373	if (flag_trapping_math)
5374	  break;
5375	/* Fall through.  */
5376      case GE_EXPR:
5377      case GT_EXPR:
5378	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5379	  arg1 = fold_convert_loc (loc, signed_type_for
5380			       (TREE_TYPE (arg1)), arg1);
5381	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5382	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5383      case UNLE_EXPR:
5384      case UNLT_EXPR:
5385	if (flag_trapping_math)
5386	  break;
5387      case LE_EXPR:
5388      case LT_EXPR:
5389	if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5390	  arg1 = fold_convert_loc (loc, signed_type_for
5391			       (TREE_TYPE (arg1)), arg1);
5392	tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5393	return negate_expr (fold_convert_loc (loc, type, tem));
5394      default:
5395	gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5396	break;
5397      }
5398
5399  /* A != 0 ? A : 0 is simply A, unless A is -0.  Likewise
5400     A == 0 ? A : 0 is always 0 unless A is -0.  Note that
5401     both transformations are correct when A is NaN: A != 0
5402     is then true, and A == 0 is false.  */
5403
5404  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5405      && integer_zerop (arg01) && integer_zerop (arg2))
5406    {
5407      if (comp_code == NE_EXPR)
5408	return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5409      else if (comp_code == EQ_EXPR)
5410	return build_int_cst (type, 0);
5411    }
5412
5413  /* Try some transformations of A op B ? A : B.
5414
5415     A == B? A : B    same as B
5416     A != B? A : B    same as A
5417     A >= B? A : B    same as max (A, B)
5418     A > B?  A : B    same as max (B, A)
5419     A <= B? A : B    same as min (A, B)
5420     A < B?  A : B    same as min (B, A)
5421
5422     As above, these transformations don't work in the presence
5423     of signed zeros.  For example, if A and B are zeros of
5424     opposite sign, the first two transformations will change
5425     the sign of the result.  In the last four, the original
5426     expressions give different results for (A=+0, B=-0) and
5427     (A=-0, B=+0), but the transformed expressions do not.
5428
5429     The first two transformations are correct if either A or B
5430     is a NaN.  In the first transformation, the condition will
5431     be false, and B will indeed be chosen.  In the case of the
5432     second transformation, the condition A != B will be true,
5433     and A will be chosen.
5434
5435     The conversions to max() and min() are not correct if B is
5436     a number and A is not.  The conditions in the original
5437     expressions will be false, so all four give B.  The min()
5438     and max() versions would give a NaN instead.  */
5439  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
5440      && operand_equal_for_comparison_p (arg01, arg2, arg00)
5441      /* Avoid these transformations if the COND_EXPR may be used
5442	 as an lvalue in the C++ front-end.  PR c++/19199.  */
5443      && (in_gimple_form
5444	  || (strcmp (lang_hooks.name, "GNU C++") != 0
5445	      && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5446	  || ! maybe_lvalue_p (arg1)
5447	  || ! maybe_lvalue_p (arg2)))
5448    {
5449      tree comp_op0 = arg00;
5450      tree comp_op1 = arg01;
5451      tree comp_type = TREE_TYPE (comp_op0);
5452
5453      /* Avoid adding NOP_EXPRs in case this is an lvalue.  */
5454      if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5455	{
5456	  comp_type = type;
5457	  comp_op0 = arg1;
5458	  comp_op1 = arg2;
5459	}
5460
5461      switch (comp_code)
5462	{
5463	case EQ_EXPR:
5464	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5465	case NE_EXPR:
5466	  return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5467	case LE_EXPR:
5468	case LT_EXPR:
5469	case UNLE_EXPR:
5470	case UNLT_EXPR:
5471	  /* In C++ a ?: expression can be an lvalue, so put the
5472	     operand which will be used if they are equal first
5473	     so that we can convert this back to the
5474	     corresponding COND_EXPR.  */
5475	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5476	    {
5477	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5478	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5479	      tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5480		    ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5481		    : fold_build2_loc (loc, MIN_EXPR, comp_type,
5482				   comp_op1, comp_op0);
5483	      return pedantic_non_lvalue_loc (loc,
5484					  fold_convert_loc (loc, type, tem));
5485	    }
5486	  break;
5487	case GE_EXPR:
5488	case GT_EXPR:
5489	case UNGE_EXPR:
5490	case UNGT_EXPR:
5491	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5492	    {
5493	      comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5494	      comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5495	      tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5496		    ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5497		    : fold_build2_loc (loc, MAX_EXPR, comp_type,
5498				   comp_op1, comp_op0);
5499	      return pedantic_non_lvalue_loc (loc,
5500					  fold_convert_loc (loc, type, tem));
5501	    }
5502	  break;
5503	case UNEQ_EXPR:
5504	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5505	    return pedantic_non_lvalue_loc (loc,
5506					fold_convert_loc (loc, type, arg2));
5507	  break;
5508	case LTGT_EXPR:
5509	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
5510	    return pedantic_non_lvalue_loc (loc,
5511					fold_convert_loc (loc, type, arg1));
5512	  break;
5513	default:
5514	  gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5515	  break;
5516	}
5517    }
5518
5519  /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5520     we might still be able to simplify this.  For example,
5521     if C1 is one less or one more than C2, this might have started
5522     out as a MIN or MAX and been transformed by this function.
5523     Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE.  */
5524
5525  if (INTEGRAL_TYPE_P (type)
5526      && TREE_CODE (arg01) == INTEGER_CST
5527      && TREE_CODE (arg2) == INTEGER_CST)
5528    switch (comp_code)
5529      {
5530      case EQ_EXPR:
5531	if (TREE_CODE (arg1) == INTEGER_CST)
5532	  break;
5533	/* We can replace A with C1 in this case.  */
5534	arg1 = fold_convert_loc (loc, type, arg01);
5535	return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5536
5537      case LT_EXPR:
5538	/* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5539	   MIN_EXPR, to preserve the signedness of the comparison.  */
5540	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5541			       OEP_ONLY_CONST)
5542	    && operand_equal_p (arg01,
5543				const_binop (PLUS_EXPR, arg2,
5544					     build_int_cst (type, 1), 0),
5545				OEP_ONLY_CONST))
5546	  {
5547	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5548				   fold_convert_loc (loc, TREE_TYPE (arg00),
5549						     arg2));
5550	    return pedantic_non_lvalue_loc (loc,
5551					    fold_convert_loc (loc, type, tem));
5552	  }
5553	break;
5554
5555      case LE_EXPR:
5556	/* If C1 is C2 - 1, this is min(A, C2), with the same care
5557	   as above.  */
5558	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5559			       OEP_ONLY_CONST)
5560	    && operand_equal_p (arg01,
5561				const_binop (MINUS_EXPR, arg2,
5562					     build_int_cst (type, 1), 0),
5563				OEP_ONLY_CONST))
5564	  {
5565	    tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5566				   fold_convert_loc (loc, TREE_TYPE (arg00),
5567						     arg2));
5568	    return pedantic_non_lvalue_loc (loc,
5569					    fold_convert_loc (loc, type, tem));
5570	  }
5571	break;
5572
5573      case GT_EXPR:
5574	/* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5575	   MAX_EXPR, to preserve the signedness of the comparison.  */
5576	if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5577			       OEP_ONLY_CONST)
5578	    && operand_equal_p (arg01,
5579				const_binop (MINUS_EXPR, arg2,
5580					     build_int_cst (type, 1), 0),
5581				OEP_ONLY_CONST))
5582	  {
5583	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5584				   fold_convert_loc (loc, TREE_TYPE (arg00),
5585						     arg2));
5586	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5587	  }
5588	break;
5589
5590      case GE_EXPR:
5591	/* If C1 is C2 + 1, this is max(A, C2), with the same care as above.  */
5592	if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5593			       OEP_ONLY_CONST)
5594	    && operand_equal_p (arg01,
5595				const_binop (PLUS_EXPR, arg2,
5596					     build_int_cst (type, 1), 0),
5597				OEP_ONLY_CONST))
5598	  {
5599	    tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5600				   fold_convert_loc (loc, TREE_TYPE (arg00),
5601						     arg2));
5602	    return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5603	  }
5604	break;
5605      case NE_EXPR:
5606	break;
5607      default:
5608	gcc_unreachable ();
5609      }
5610
5611  return NULL_TREE;
5612}
5613
5614
5615
5616#ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5617#define LOGICAL_OP_NON_SHORT_CIRCUIT \
5618  (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5619		false) >= 2)
5620#endif
5621
5622/* EXP is some logical combination of boolean tests.  See if we can
5623   merge it into some range test.  Return the new tree if so.  */
5624
5625static tree
5626fold_range_test (location_t loc, enum tree_code code, tree type,
5627		 tree op0, tree op1)
5628{
5629  int or_op = (code == TRUTH_ORIF_EXPR
5630	       || code == TRUTH_OR_EXPR);
5631  int in0_p, in1_p, in_p;
5632  tree low0, low1, low, high0, high1, high;
5633  bool strict_overflow_p = false;
5634  tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5635  tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5636  tree tem;
5637  const char * const warnmsg = G_("assuming signed overflow does not occur "
5638				  "when simplifying range test");
5639
5640  /* If this is an OR operation, invert both sides; we will invert
5641     again at the end.  */
5642  if (or_op)
5643    in0_p = ! in0_p, in1_p = ! in1_p;
5644
5645  /* If both expressions are the same, if we can merge the ranges, and we
5646     can build the range test, return it or it inverted.  If one of the
5647     ranges is always true or always false, consider it to be the same
5648     expression as the other.  */
5649  if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5650      && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5651		       in1_p, low1, high1)
5652      && 0 != (tem = (build_range_check (UNKNOWN_LOCATION, type,
5653					 lhs != 0 ? lhs
5654					 : rhs != 0 ? rhs : integer_zero_node,
5655					 in_p, low, high))))
5656    {
5657      if (strict_overflow_p)
5658	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5659      return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5660    }
5661
5662  /* On machines where the branch cost is expensive, if this is a
5663     short-circuited branch and the underlying object on both sides
5664     is the same, make a non-short-circuit operation.  */
5665  else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5666	   && lhs != 0 && rhs != 0
5667	   && (code == TRUTH_ANDIF_EXPR
5668	       || code == TRUTH_ORIF_EXPR)
5669	   && operand_equal_p (lhs, rhs, 0))
5670    {
5671      /* If simple enough, just rewrite.  Otherwise, make a SAVE_EXPR
5672	 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5673	 which cases we can't do this.  */
5674      if (simple_operand_p (lhs))
5675	{
5676	  tem = build2 (code == TRUTH_ANDIF_EXPR
5677			? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5678			type, op0, op1);
5679	  SET_EXPR_LOCATION (tem, loc);
5680	  return tem;
5681	}
5682
5683      else if (lang_hooks.decls.global_bindings_p () == 0
5684	       && ! CONTAINS_PLACEHOLDER_P (lhs))
5685	{
5686	  tree common = save_expr (lhs);
5687
5688	  if (0 != (lhs = build_range_check (loc, type, common,
5689					     or_op ? ! in0_p : in0_p,
5690					     low0, high0))
5691	      && (0 != (rhs = build_range_check (loc, type, common,
5692						 or_op ? ! in1_p : in1_p,
5693						 low1, high1))))
5694	    {
5695	      if (strict_overflow_p)
5696		fold_overflow_warning (warnmsg,
5697				       WARN_STRICT_OVERFLOW_COMPARISON);
5698	      tem = build2 (code == TRUTH_ANDIF_EXPR
5699			    ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5700			    type, lhs, rhs);
5701	      SET_EXPR_LOCATION (tem, loc);
5702	      return tem;
5703	    }
5704	}
5705    }
5706
5707  return 0;
5708}
5709
5710/* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
5711   bit value.  Arrange things so the extra bits will be set to zero if and
5712   only if C is signed-extended to its full width.  If MASK is nonzero,
5713   it is an INTEGER_CST that should be AND'ed with the extra bits.  */
5714
5715static tree
5716unextend (tree c, int p, int unsignedp, tree mask)
5717{
5718  tree type = TREE_TYPE (c);
5719  int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5720  tree temp;
5721
5722  if (p == modesize || unsignedp)
5723    return c;
5724
5725  /* We work by getting just the sign bit into the low-order bit, then
5726     into the high-order bit, then sign-extend.  We then XOR that value
5727     with C.  */
5728  temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
5729  temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
5730
5731  /* We must use a signed type in order to get an arithmetic right shift.
5732     However, we must also avoid introducing accidental overflows, so that
5733     a subsequent call to integer_zerop will work.  Hence we must
5734     do the type conversion here.  At this point, the constant is either
5735     zero or one, and the conversion to a signed type can never overflow.
5736     We could get an overflow if this conversion is done anywhere else.  */
5737  if (TYPE_UNSIGNED (type))
5738    temp = fold_convert (signed_type_for (type), temp);
5739
5740  temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
5741  temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
5742  if (mask != 0)
5743    temp = const_binop (BIT_AND_EXPR, temp,
5744			fold_convert (TREE_TYPE (c), mask),
5745			0);
5746  /* If necessary, convert the type back to match the type of C.  */
5747  if (TYPE_UNSIGNED (type))
5748    temp = fold_convert (type, temp);
5749
5750  return fold_convert (type,
5751		       const_binop (BIT_XOR_EXPR, c, temp, 0));
5752}
5753
5754/* Find ways of folding logical expressions of LHS and RHS:
5755   Try to merge two comparisons to the same innermost item.
5756   Look for range tests like "ch >= '0' && ch <= '9'".
5757   Look for combinations of simple terms on machines with expensive branches
5758   and evaluate the RHS unconditionally.
5759
5760   For example, if we have p->a == 2 && p->b == 4 and we can make an
5761   object large enough to span both A and B, we can do this with a comparison
5762   against the object ANDed with the a mask.
5763
5764   If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5765   operations to do this with one comparison.
5766
5767   We check for both normal comparisons and the BIT_AND_EXPRs made this by
5768   function and the one above.
5769
5770   CODE is the logical operation being done.  It can be TRUTH_ANDIF_EXPR,
5771   TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5772
5773   TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5774   two operands.
5775
5776   We return the simplified tree or 0 if no optimization is possible.  */
5777
5778static tree
5779fold_truthop (location_t loc, enum tree_code code, tree truth_type,
5780	      tree lhs, tree rhs)
5781{
5782  /* If this is the "or" of two comparisons, we can do something if
5783     the comparisons are NE_EXPR.  If this is the "and", we can do something
5784     if the comparisons are EQ_EXPR.  I.e.,
5785	(a->b == 2 && a->c == 4) can become (a->new == NEW).
5786
5787     WANTED_CODE is this operation code.  For single bit fields, we can
5788     convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5789     comparison for one-bit fields.  */
5790
5791  enum tree_code wanted_code;
5792  enum tree_code lcode, rcode;
5793  tree ll_arg, lr_arg, rl_arg, rr_arg;
5794  tree ll_inner, lr_inner, rl_inner, rr_inner;
5795  HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5796  HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5797  HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5798  HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5799  int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5800  enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5801  enum machine_mode lnmode, rnmode;
5802  tree ll_mask, lr_mask, rl_mask, rr_mask;
5803  tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5804  tree l_const, r_const;
5805  tree lntype, rntype, result;
5806  HOST_WIDE_INT first_bit, end_bit;
5807  int volatilep;
5808  tree orig_lhs = lhs, orig_rhs = rhs;
5809  enum tree_code orig_code = code;
5810
5811  /* Start by getting the comparison codes.  Fail if anything is volatile.
5812     If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5813     it were surrounded with a NE_EXPR.  */
5814
5815  if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5816    return 0;
5817
5818  lcode = TREE_CODE (lhs);
5819  rcode = TREE_CODE (rhs);
5820
5821  if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5822    {
5823      lhs = build2 (NE_EXPR, truth_type, lhs,
5824		    build_int_cst (TREE_TYPE (lhs), 0));
5825      lcode = NE_EXPR;
5826    }
5827
5828  if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5829    {
5830      rhs = build2 (NE_EXPR, truth_type, rhs,
5831		    build_int_cst (TREE_TYPE (rhs), 0));
5832      rcode = NE_EXPR;
5833    }
5834
5835  if (TREE_CODE_CLASS (lcode) != tcc_comparison
5836      || TREE_CODE_CLASS (rcode) != tcc_comparison)
5837    return 0;
5838
5839  ll_arg = TREE_OPERAND (lhs, 0);
5840  lr_arg = TREE_OPERAND (lhs, 1);
5841  rl_arg = TREE_OPERAND (rhs, 0);
5842  rr_arg = TREE_OPERAND (rhs, 1);
5843
5844  /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations.  */
5845  if (simple_operand_p (ll_arg)
5846      && simple_operand_p (lr_arg))
5847    {
5848      tree result;
5849      if (operand_equal_p (ll_arg, rl_arg, 0)
5850          && operand_equal_p (lr_arg, rr_arg, 0))
5851	{
5852          result = combine_comparisons (loc, code, lcode, rcode,
5853					truth_type, ll_arg, lr_arg);
5854	  if (result)
5855	    return result;
5856	}
5857      else if (operand_equal_p (ll_arg, rr_arg, 0)
5858               && operand_equal_p (lr_arg, rl_arg, 0))
5859	{
5860          result = combine_comparisons (loc, code, lcode,
5861					swap_tree_comparison (rcode),
5862					truth_type, ll_arg, lr_arg);
5863	  if (result)
5864	    return result;
5865	}
5866    }
5867
5868  code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5869	  ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5870
5871  /* If the RHS can be evaluated unconditionally and its operands are
5872     simple, it wins to evaluate the RHS unconditionally on machines
5873     with expensive branches.  In this case, this isn't a comparison
5874     that can be merged.  Avoid doing this if the RHS is a floating-point
5875     comparison since those can trap.  */
5876
5877  if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5878		   false) >= 2
5879      && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5880      && simple_operand_p (rl_arg)
5881      && simple_operand_p (rr_arg))
5882    {
5883      /* Convert (a != 0) || (b != 0) into (a | b) != 0.  */
5884      if (code == TRUTH_OR_EXPR
5885	  && lcode == NE_EXPR && integer_zerop (lr_arg)
5886	  && rcode == NE_EXPR && integer_zerop (rr_arg)
5887	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5888	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5889	{
5890	  result = build2 (NE_EXPR, truth_type,
5891			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5892				   ll_arg, rl_arg),
5893			   build_int_cst (TREE_TYPE (ll_arg), 0));
5894	  goto fold_truthop_exit;
5895	}
5896
5897      /* Convert (a == 0) && (b == 0) into (a | b) == 0.  */
5898      if (code == TRUTH_AND_EXPR
5899	  && lcode == EQ_EXPR && integer_zerop (lr_arg)
5900	  && rcode == EQ_EXPR && integer_zerop (rr_arg)
5901	  && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5902	  && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5903	{
5904	  result = build2 (EQ_EXPR, truth_type,
5905			   build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5906				   ll_arg, rl_arg),
5907			   build_int_cst (TREE_TYPE (ll_arg), 0));
5908	  goto fold_truthop_exit;
5909	}
5910
5911      if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5912	{
5913	  if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5914	    {
5915	      result = build2 (code, truth_type, lhs, rhs);
5916	      goto fold_truthop_exit;
5917	    }
5918	  return NULL_TREE;
5919	}
5920    }
5921
5922  /* See if the comparisons can be merged.  Then get all the parameters for
5923     each side.  */
5924
5925  if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5926      || (rcode != EQ_EXPR && rcode != NE_EXPR))
5927    return 0;
5928
5929  volatilep = 0;
5930  ll_inner = decode_field_reference (loc, ll_arg,
5931				     &ll_bitsize, &ll_bitpos, &ll_mode,
5932				     &ll_unsignedp, &volatilep, &ll_mask,
5933				     &ll_and_mask);
5934  lr_inner = decode_field_reference (loc, lr_arg,
5935				     &lr_bitsize, &lr_bitpos, &lr_mode,
5936				     &lr_unsignedp, &volatilep, &lr_mask,
5937				     &lr_and_mask);
5938  rl_inner = decode_field_reference (loc, rl_arg,
5939				     &rl_bitsize, &rl_bitpos, &rl_mode,
5940				     &rl_unsignedp, &volatilep, &rl_mask,
5941				     &rl_and_mask);
5942  rr_inner = decode_field_reference (loc, rr_arg,
5943				     &rr_bitsize, &rr_bitpos, &rr_mode,
5944				     &rr_unsignedp, &volatilep, &rr_mask,
5945				     &rr_and_mask);
5946
5947  /* It must be true that the inner operation on the lhs of each
5948     comparison must be the same if we are to be able to do anything.
5949     Then see if we have constants.  If not, the same must be true for
5950     the rhs's.  */
5951  if (volatilep || ll_inner == 0 || rl_inner == 0
5952      || ! operand_equal_p (ll_inner, rl_inner, 0))
5953    return 0;
5954
5955  if (TREE_CODE (lr_arg) == INTEGER_CST
5956      && TREE_CODE (rr_arg) == INTEGER_CST)
5957    l_const = lr_arg, r_const = rr_arg;
5958  else if (lr_inner == 0 || rr_inner == 0
5959	   || ! operand_equal_p (lr_inner, rr_inner, 0))
5960    return 0;
5961  else
5962    l_const = r_const = 0;
5963
5964  /* If either comparison code is not correct for our logical operation,
5965     fail.  However, we can convert a one-bit comparison against zero into
5966     the opposite comparison against that bit being set in the field.  */
5967
5968  wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5969  if (lcode != wanted_code)
5970    {
5971      if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5972	{
5973	  /* Make the left operand unsigned, since we are only interested
5974	     in the value of one bit.  Otherwise we are doing the wrong
5975	     thing below.  */
5976	  ll_unsignedp = 1;
5977	  l_const = ll_mask;
5978	}
5979      else
5980	return 0;
5981    }
5982
5983  /* This is analogous to the code for l_const above.  */
5984  if (rcode != wanted_code)
5985    {
5986      if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5987	{
5988	  rl_unsignedp = 1;
5989	  r_const = rl_mask;
5990	}
5991      else
5992	return 0;
5993    }
5994
5995  /* See if we can find a mode that contains both fields being compared on
5996     the left.  If we can't, fail.  Otherwise, update all constants and masks
5997     to be relative to a field of that size.  */
5998  first_bit = MIN (ll_bitpos, rl_bitpos);
5999  end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
6000  lnmode = get_best_mode (end_bit - first_bit, first_bit,
6001			  TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
6002			  volatilep);
6003  if (lnmode == VOIDmode)
6004    return 0;
6005
6006  lnbitsize = GET_MODE_BITSIZE (lnmode);
6007  lnbitpos = first_bit & ~ (lnbitsize - 1);
6008  lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
6009  xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
6010
6011  if (BYTES_BIG_ENDIAN)
6012    {
6013      xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
6014      xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
6015    }
6016
6017  ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
6018			 size_int (xll_bitpos), 0);
6019  rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
6020			 size_int (xrl_bitpos), 0);
6021
6022  if (l_const)
6023    {
6024      l_const = fold_convert_loc (loc, lntype, l_const);
6025      l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
6026      l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
6027      if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
6028					fold_build1_loc (loc, BIT_NOT_EXPR,
6029						     lntype, ll_mask),
6030					0)))
6031	{
6032	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6033
6034	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6035	}
6036    }
6037  if (r_const)
6038    {
6039      r_const = fold_convert_loc (loc, lntype, r_const);
6040      r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
6041      r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
6042      if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
6043					fold_build1_loc (loc, BIT_NOT_EXPR,
6044						     lntype, rl_mask),
6045					0)))
6046	{
6047	  warning (0, "comparison is always %d", wanted_code == NE_EXPR);
6048
6049	  return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
6050	}
6051    }
6052
6053  /* If the right sides are not constant, do the same for it.  Also,
6054     disallow this optimization if a size or signedness mismatch occurs
6055     between the left and right sides.  */
6056  if (l_const == 0)
6057    {
6058      if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
6059	  || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
6060	  /* Make sure the two fields on the right
6061	     correspond to the left without being swapped.  */
6062	  || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
6063	return 0;
6064
6065      first_bit = MIN (lr_bitpos, rr_bitpos);
6066      end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
6067      rnmode = get_best_mode (end_bit - first_bit, first_bit,
6068			      TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
6069			      volatilep);
6070      if (rnmode == VOIDmode)
6071	return 0;
6072
6073      rnbitsize = GET_MODE_BITSIZE (rnmode);
6074      rnbitpos = first_bit & ~ (rnbitsize - 1);
6075      rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
6076      xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
6077
6078      if (BYTES_BIG_ENDIAN)
6079	{
6080	  xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
6081	  xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
6082	}
6083
6084      lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6085							    rntype, lr_mask),
6086			     size_int (xlr_bitpos), 0);
6087      rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
6088							    rntype, rr_mask),
6089			     size_int (xrr_bitpos), 0);
6090
6091      /* Make a mask that corresponds to both fields being compared.
6092	 Do this for both items being compared.  If the operands are the
6093	 same size and the bits being compared are in the same position
6094	 then we can do this by masking both and comparing the masked
6095	 results.  */
6096      ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6097      lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
6098      if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
6099	{
6100	  lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6101				    ll_unsignedp || rl_unsignedp);
6102	  if (! all_ones_mask_p (ll_mask, lnbitsize))
6103	    lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
6104
6105	  rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
6106				    lr_unsignedp || rr_unsignedp);
6107	  if (! all_ones_mask_p (lr_mask, rnbitsize))
6108	    rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
6109
6110	  result = build2 (wanted_code, truth_type, lhs, rhs);
6111	  goto fold_truthop_exit;
6112	}
6113
6114      /* There is still another way we can do something:  If both pairs of
6115	 fields being compared are adjacent, we may be able to make a wider
6116	 field containing them both.
6117
6118	 Note that we still must mask the lhs/rhs expressions.  Furthermore,
6119	 the mask must be shifted to account for the shift done by
6120	 make_bit_field_ref.  */
6121      if ((ll_bitsize + ll_bitpos == rl_bitpos
6122	   && lr_bitsize + lr_bitpos == rr_bitpos)
6123	  || (ll_bitpos == rl_bitpos + rl_bitsize
6124	      && lr_bitpos == rr_bitpos + rr_bitsize))
6125	{
6126	  tree type;
6127
6128	  lhs = make_bit_field_ref (loc, ll_inner, lntype,
6129				    ll_bitsize + rl_bitsize,
6130				    MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
6131	  rhs = make_bit_field_ref (loc, lr_inner, rntype,
6132				    lr_bitsize + rr_bitsize,
6133				    MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
6134
6135	  ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
6136				 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
6137	  lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
6138				 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
6139
6140	  /* Convert to the smaller type before masking out unwanted bits.  */
6141	  type = lntype;
6142	  if (lntype != rntype)
6143	    {
6144	      if (lnbitsize > rnbitsize)
6145		{
6146		  lhs = fold_convert_loc (loc, rntype, lhs);
6147		  ll_mask = fold_convert_loc (loc, rntype, ll_mask);
6148		  type = rntype;
6149		}
6150	      else if (lnbitsize < rnbitsize)
6151		{
6152		  rhs = fold_convert_loc (loc, lntype, rhs);
6153		  lr_mask = fold_convert_loc (loc, lntype, lr_mask);
6154		  type = lntype;
6155		}
6156	    }
6157
6158	  if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
6159	    lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
6160
6161	  if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
6162	    rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
6163
6164	  result = build2 (wanted_code, truth_type, lhs, rhs);
6165	  goto fold_truthop_exit;
6166	}
6167
6168      return 0;
6169    }
6170
6171  /* Handle the case of comparisons with constants.  If there is something in
6172     common between the masks, those bits of the constants must be the same.
6173     If not, the condition is always false.  Test for this to avoid generating
6174     incorrect code below.  */
6175  result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
6176  if (! integer_zerop (result)
6177      && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
6178			   const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
6179    {
6180      if (wanted_code == NE_EXPR)
6181	{
6182	  warning (0, "%<or%> of unmatched not-equal tests is always 1");
6183	  return constant_boolean_node (true, truth_type);
6184	}
6185      else
6186	{
6187	  warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
6188	  return constant_boolean_node (false, truth_type);
6189	}
6190    }
6191
6192  /* Construct the expression we will return.  First get the component
6193     reference we will make.  Unless the mask is all ones the width of
6194     that field, perform the mask operation.  Then compare with the
6195     merged constant.  */
6196  result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
6197			       ll_unsignedp || rl_unsignedp);
6198
6199  ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
6200  if (! all_ones_mask_p (ll_mask, lnbitsize))
6201    {
6202      result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
6203      SET_EXPR_LOCATION (result, loc);
6204    }
6205
6206  result = build2 (wanted_code, truth_type, result,
6207		   const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
6208
6209 fold_truthop_exit:
6210  SET_EXPR_LOCATION (result, loc);
6211  return result;
6212}
6213
6214/* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
6215   constant.  */
6216
6217static tree
6218optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
6219			    tree op0, tree op1)
6220{
6221  tree arg0 = op0;
6222  enum tree_code op_code;
6223  tree comp_const;
6224  tree minmax_const;
6225  int consts_equal, consts_lt;
6226  tree inner;
6227
6228  STRIP_SIGN_NOPS (arg0);
6229
6230  op_code = TREE_CODE (arg0);
6231  minmax_const = TREE_OPERAND (arg0, 1);
6232  comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
6233  consts_equal = tree_int_cst_equal (minmax_const, comp_const);
6234  consts_lt = tree_int_cst_lt (minmax_const, comp_const);
6235  inner = TREE_OPERAND (arg0, 0);
6236
6237  /* If something does not permit us to optimize, return the original tree.  */
6238  if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
6239      || TREE_CODE (comp_const) != INTEGER_CST
6240      || TREE_OVERFLOW (comp_const)
6241      || TREE_CODE (minmax_const) != INTEGER_CST
6242      || TREE_OVERFLOW (minmax_const))
6243    return NULL_TREE;
6244
6245  /* Now handle all the various comparison codes.  We only handle EQ_EXPR
6246     and GT_EXPR, doing the rest with recursive calls using logical
6247     simplifications.  */
6248  switch (code)
6249    {
6250    case NE_EXPR:  case LT_EXPR:  case LE_EXPR:
6251      {
6252	tree tem
6253	  = optimize_minmax_comparison (loc,
6254					invert_tree_comparison (code, false),
6255					type, op0, op1);
6256	if (tem)
6257	  return invert_truthvalue_loc (loc, tem);
6258	return NULL_TREE;
6259      }
6260
6261    case GE_EXPR:
6262      return
6263	fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6264		     optimize_minmax_comparison
6265		     (loc, EQ_EXPR, type, arg0, comp_const),
6266		     optimize_minmax_comparison
6267		     (loc, GT_EXPR, type, arg0, comp_const));
6268
6269    case EQ_EXPR:
6270      if (op_code == MAX_EXPR && consts_equal)
6271	/* MAX (X, 0) == 0  ->  X <= 0  */
6272	return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6273
6274      else if (op_code == MAX_EXPR && consts_lt)
6275	/* MAX (X, 0) == 5  ->  X == 5   */
6276	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6277
6278      else if (op_code == MAX_EXPR)
6279	/* MAX (X, 0) == -1  ->  false  */
6280	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6281
6282      else if (consts_equal)
6283	/* MIN (X, 0) == 0  ->  X >= 0  */
6284	return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6285
6286      else if (consts_lt)
6287	/* MIN (X, 0) == 5  ->  false  */
6288	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6289
6290      else
6291	/* MIN (X, 0) == -1  ->  X == -1  */
6292	return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6293
6294    case GT_EXPR:
6295      if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6296	/* MAX (X, 0) > 0  ->  X > 0
6297	   MAX (X, 0) > 5  ->  X > 5  */
6298	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6299
6300      else if (op_code == MAX_EXPR)
6301	/* MAX (X, 0) > -1  ->  true  */
6302	return omit_one_operand_loc (loc, type, integer_one_node, inner);
6303
6304      else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6305	/* MIN (X, 0) > 0  ->  false
6306	   MIN (X, 0) > 5  ->  false  */
6307	return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6308
6309      else
6310	/* MIN (X, 0) > -1  ->  X > -1  */
6311	return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6312
6313    default:
6314      return NULL_TREE;
6315    }
6316}
6317
6318/* T is an integer expression that is being multiplied, divided, or taken a
6319   modulus (CODE says which and what kind of divide or modulus) by a
6320   constant C.  See if we can eliminate that operation by folding it with
6321   other operations already in T.  WIDE_TYPE, if non-null, is a type that
6322   should be used for the computation if wider than our type.
6323
6324   For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6325   (X * 2) + (Y * 4).  We must, however, be assured that either the original
6326   expression would not overflow or that overflow is undefined for the type
6327   in the language in question.
6328
6329   If we return a non-null expression, it is an equivalent form of the
6330   original computation, but need not be in the original type.
6331
6332   We set *STRICT_OVERFLOW_P to true if the return values depends on
6333   signed overflow being undefined.  Otherwise we do not change
6334   *STRICT_OVERFLOW_P.  */
6335
6336static tree
6337extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6338		bool *strict_overflow_p)
6339{
6340  /* To avoid exponential search depth, refuse to allow recursion past
6341     three levels.  Beyond that (1) it's highly unlikely that we'll find
6342     something interesting and (2) we've probably processed it before
6343     when we built the inner expression.  */
6344
6345  static int depth;
6346  tree ret;
6347
6348  if (depth > 3)
6349    return NULL;
6350
6351  depth++;
6352  ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6353  depth--;
6354
6355  return ret;
6356}
6357
6358static tree
6359extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6360		  bool *strict_overflow_p)
6361{
6362  tree type = TREE_TYPE (t);
6363  enum tree_code tcode = TREE_CODE (t);
6364  tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6365				   > GET_MODE_SIZE (TYPE_MODE (type)))
6366		? wide_type : type);
6367  tree t1, t2;
6368  int same_p = tcode == code;
6369  tree op0 = NULL_TREE, op1 = NULL_TREE;
6370  bool sub_strict_overflow_p;
6371
6372  /* Don't deal with constants of zero here; they confuse the code below.  */
6373  if (integer_zerop (c))
6374    return NULL_TREE;
6375
6376  if (TREE_CODE_CLASS (tcode) == tcc_unary)
6377    op0 = TREE_OPERAND (t, 0);
6378
6379  if (TREE_CODE_CLASS (tcode) == tcc_binary)
6380    op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6381
6382  /* Note that we need not handle conditional operations here since fold
6383     already handles those cases.  So just do arithmetic here.  */
6384  switch (tcode)
6385    {
6386    case INTEGER_CST:
6387      /* For a constant, we can always simplify if we are a multiply
6388	 or (for divide and modulus) if it is a multiple of our constant.  */
6389      if (code == MULT_EXPR
6390	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
6391	return const_binop (code, fold_convert (ctype, t),
6392			    fold_convert (ctype, c), 0);
6393      break;
6394
6395    CASE_CONVERT: case NON_LVALUE_EXPR:
6396      /* If op0 is an expression ...  */
6397      if ((COMPARISON_CLASS_P (op0)
6398	   || UNARY_CLASS_P (op0)
6399	   || BINARY_CLASS_P (op0)
6400	   || VL_EXP_CLASS_P (op0)
6401	   || EXPRESSION_CLASS_P (op0))
6402	  /* ... and has wrapping overflow, and its type is smaller
6403	     than ctype, then we cannot pass through as widening.  */
6404	  && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
6405	       && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
6406		     && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
6407	       && (TYPE_PRECISION (ctype)
6408	           > TYPE_PRECISION (TREE_TYPE (op0))))
6409	      /* ... or this is a truncation (t is narrower than op0),
6410		 then we cannot pass through this narrowing.  */
6411	      || (TYPE_PRECISION (type)
6412		  < TYPE_PRECISION (TREE_TYPE (op0)))
6413	      /* ... or signedness changes for division or modulus,
6414		 then we cannot pass through this conversion.  */
6415	      || (code != MULT_EXPR
6416		  && (TYPE_UNSIGNED (ctype)
6417		      != TYPE_UNSIGNED (TREE_TYPE (op0))))
6418	      /* ... or has undefined overflow while the converted to
6419		 type has not, we cannot do the operation in the inner type
6420		 as that would introduce undefined overflow.  */
6421	      || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
6422		  && !TYPE_OVERFLOW_UNDEFINED (type))))
6423	break;
6424
6425      /* Pass the constant down and see if we can make a simplification.  If
6426	 we can, replace this expression with the inner simplification for
6427	 possible later conversion to our or some other type.  */
6428      if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6429	  && TREE_CODE (t2) == INTEGER_CST
6430	  && !TREE_OVERFLOW (t2)
6431	  && (0 != (t1 = extract_muldiv (op0, t2, code,
6432					 code == MULT_EXPR
6433					 ? ctype : NULL_TREE,
6434					 strict_overflow_p))))
6435	return t1;
6436      break;
6437
6438    case ABS_EXPR:
6439      /* If widening the type changes it from signed to unsigned, then we
6440         must avoid building ABS_EXPR itself as unsigned.  */
6441      if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6442        {
6443          tree cstype = (*signed_type_for) (ctype);
6444          if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6445	      != 0)
6446            {
6447              t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6448              return fold_convert (ctype, t1);
6449            }
6450          break;
6451        }
6452      /* If the constant is negative, we cannot simplify this.  */
6453      if (tree_int_cst_sgn (c) == -1)
6454        break;
6455      /* FALLTHROUGH */
6456    case NEGATE_EXPR:
6457      if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6458	  != 0)
6459	return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6460      break;
6461
6462    case MIN_EXPR:  case MAX_EXPR:
6463      /* If widening the type changes the signedness, then we can't perform
6464	 this optimization as that changes the result.  */
6465      if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6466	break;
6467
6468      /* MIN (a, b) / 5 -> MIN (a / 5, b / 5)  */
6469      sub_strict_overflow_p = false;
6470      if ((t1 = extract_muldiv (op0, c, code, wide_type,
6471				&sub_strict_overflow_p)) != 0
6472	  && (t2 = extract_muldiv (op1, c, code, wide_type,
6473				   &sub_strict_overflow_p)) != 0)
6474	{
6475	  if (tree_int_cst_sgn (c) < 0)
6476	    tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6477	  if (sub_strict_overflow_p)
6478	    *strict_overflow_p = true;
6479	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6480			      fold_convert (ctype, t2));
6481	}
6482      break;
6483
6484    case LSHIFT_EXPR:  case RSHIFT_EXPR:
6485      /* If the second operand is constant, this is a multiplication
6486	 or floor division, by a power of two, so we can treat it that
6487	 way unless the multiplier or divisor overflows.  Signed
6488	 left-shift overflow is implementation-defined rather than
6489	 undefined in C90, so do not convert signed left shift into
6490	 multiplication.  */
6491      if (TREE_CODE (op1) == INTEGER_CST
6492	  && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6493	  /* const_binop may not detect overflow correctly,
6494	     so check for it explicitly here.  */
6495	  && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
6496	  && TREE_INT_CST_HIGH (op1) == 0
6497	  && 0 != (t1 = fold_convert (ctype,
6498				      const_binop (LSHIFT_EXPR,
6499						   size_one_node,
6500						   op1, 0)))
6501	  && !TREE_OVERFLOW (t1))
6502	return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6503				       ? MULT_EXPR : FLOOR_DIV_EXPR,
6504				       ctype,
6505				       fold_convert (ctype, op0),
6506				       t1),
6507			       c, code, wide_type, strict_overflow_p);
6508      break;
6509
6510    case PLUS_EXPR:  case MINUS_EXPR:
6511      /* See if we can eliminate the operation on both sides.  If we can, we
6512	 can return a new PLUS or MINUS.  If we can't, the only remaining
6513	 cases where we can do anything are if the second operand is a
6514	 constant.  */
6515      sub_strict_overflow_p = false;
6516      t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6517      t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6518      if (t1 != 0 && t2 != 0
6519	  && (code == MULT_EXPR
6520	      /* If not multiplication, we can only do this if both operands
6521		 are divisible by c.  */
6522	      || (multiple_of_p (ctype, op0, c)
6523	          && multiple_of_p (ctype, op1, c))))
6524	{
6525	  if (sub_strict_overflow_p)
6526	    *strict_overflow_p = true;
6527	  return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6528			      fold_convert (ctype, t2));
6529	}
6530
6531      /* If this was a subtraction, negate OP1 and set it to be an addition.
6532	 This simplifies the logic below.  */
6533      if (tcode == MINUS_EXPR)
6534	{
6535	  tcode = PLUS_EXPR, op1 = negate_expr (op1);
6536	  /* If OP1 was not easily negatable, the constant may be OP0.  */
6537	  if (TREE_CODE (op0) == INTEGER_CST)
6538	    {
6539	      tree tem = op0;
6540	      op0 = op1;
6541	      op1 = tem;
6542	      tem = t1;
6543	      t1 = t2;
6544	      t2 = tem;
6545	    }
6546	}
6547
6548      if (TREE_CODE (op1) != INTEGER_CST)
6549	break;
6550
6551      /* If either OP1 or C are negative, this optimization is not safe for
6552	 some of the division and remainder types while for others we need
6553	 to change the code.  */
6554      if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6555	{
6556	  if (code == CEIL_DIV_EXPR)
6557	    code = FLOOR_DIV_EXPR;
6558	  else if (code == FLOOR_DIV_EXPR)
6559	    code = CEIL_DIV_EXPR;
6560	  else if (code != MULT_EXPR
6561		   && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6562	    break;
6563	}
6564
6565      /* If it's a multiply or a division/modulus operation of a multiple
6566         of our constant, do the operation and verify it doesn't overflow.  */
6567      if (code == MULT_EXPR
6568	  || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6569	{
6570	  op1 = const_binop (code, fold_convert (ctype, op1),
6571			     fold_convert (ctype, c), 0);
6572	  /* We allow the constant to overflow with wrapping semantics.  */
6573	  if (op1 == 0
6574	      || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6575	    break;
6576	}
6577      else
6578	break;
6579
6580      /* If we have an unsigned type is not a sizetype, we cannot widen
6581	 the operation since it will change the result if the original
6582	 computation overflowed.  */
6583      if (TYPE_UNSIGNED (ctype)
6584	  && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
6585	  && ctype != type)
6586	break;
6587
6588      /* If we were able to eliminate our operation from the first side,
6589	 apply our operation to the second side and reform the PLUS.  */
6590      if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6591	return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6592
6593      /* The last case is if we are a multiply.  In that case, we can
6594	 apply the distributive law to commute the multiply and addition
6595	 if the multiplication of the constants doesn't overflow.  */
6596      if (code == MULT_EXPR)
6597	return fold_build2 (tcode, ctype,
6598			    fold_build2 (code, ctype,
6599					 fold_convert (ctype, op0),
6600					 fold_convert (ctype, c)),
6601			    op1);
6602
6603      break;
6604
6605    case MULT_EXPR:
6606      /* We have a special case here if we are doing something like
6607	 (C * 8) % 4 since we know that's zero.  */
6608      if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6609	   || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6610	  /* If the multiplication can overflow we cannot optimize this.
6611	     ???  Until we can properly mark individual operations as
6612	     not overflowing we need to treat sizetype special here as
6613	     stor-layout relies on this opimization to make
6614	     DECL_FIELD_BIT_OFFSET always a constant.  */
6615	  && (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6616	      || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
6617		  && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
6618	  && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6619	  && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6620	{
6621	  *strict_overflow_p = true;
6622	  return omit_one_operand (type, integer_zero_node, op0);
6623	}
6624
6625      /* ... fall through ...  */
6626
6627    case TRUNC_DIV_EXPR:  case CEIL_DIV_EXPR:  case FLOOR_DIV_EXPR:
6628    case ROUND_DIV_EXPR:  case EXACT_DIV_EXPR:
6629      /* If we can extract our operation from the LHS, do so and return a
6630	 new operation.  Likewise for the RHS from a MULT_EXPR.  Otherwise,
6631	 do something only if the second operand is a constant.  */
6632      if (same_p
6633	  && (t1 = extract_muldiv (op0, c, code, wide_type,
6634				   strict_overflow_p)) != 0)
6635	return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6636			    fold_convert (ctype, op1));
6637      else if (tcode == MULT_EXPR && code == MULT_EXPR
6638	       && (t1 = extract_muldiv (op1, c, code, wide_type,
6639					strict_overflow_p)) != 0)
6640	return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6641			    fold_convert (ctype, t1));
6642      else if (TREE_CODE (op1) != INTEGER_CST)
6643	return 0;
6644
6645      /* If these are the same operation types, we can associate them
6646	 assuming no overflow.  */
6647      if (tcode == code
6648	  && 0 != (t1 = int_const_binop (MULT_EXPR,
6649					 fold_convert (ctype, op1),
6650					 fold_convert (ctype, c), 1))
6651	  && 0 != (t1 = force_fit_type_double (ctype, TREE_INT_CST_LOW (t1),
6652					       TREE_INT_CST_HIGH (t1),
6653					       (TYPE_UNSIGNED (ctype)
6654					        && tcode != MULT_EXPR) ? -1 : 1,
6655					       TREE_OVERFLOW (t1)))
6656	  && !TREE_OVERFLOW (t1))
6657	return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
6658
6659      /* If these operations "cancel" each other, we have the main
6660	 optimizations of this pass, which occur when either constant is a
6661	 multiple of the other, in which case we replace this with either an
6662	 operation or CODE or TCODE.
6663
6664	 If we have an unsigned type that is not a sizetype, we cannot do
6665	 this since it will change the result if the original computation
6666	 overflowed.  */
6667      if ((TYPE_OVERFLOW_UNDEFINED (ctype)
6668	   || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
6669	  && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6670	      || (tcode == MULT_EXPR
6671		  && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6672		  && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6673		  && code != MULT_EXPR)))
6674	{
6675	  if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
6676	    {
6677	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6678		*strict_overflow_p = true;
6679	      return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6680				  fold_convert (ctype,
6681						const_binop (TRUNC_DIV_EXPR,
6682							     op1, c, 0)));
6683	    }
6684	  else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
6685	    {
6686	      if (TYPE_OVERFLOW_UNDEFINED (ctype))
6687		*strict_overflow_p = true;
6688	      return fold_build2 (code, ctype, fold_convert (ctype, op0),
6689				  fold_convert (ctype,
6690						const_binop (TRUNC_DIV_EXPR,
6691							     c, op1, 0)));
6692	    }
6693	}
6694      break;
6695
6696    default:
6697      break;
6698    }
6699
6700  return 0;
6701}
6702
6703/* Return a node which has the indicated constant VALUE (either 0 or
6704   1), and is of the indicated TYPE.  */
6705
6706tree
6707constant_boolean_node (int value, tree type)
6708{
6709  if (type == integer_type_node)
6710    return value ? integer_one_node : integer_zero_node;
6711  else if (type == boolean_type_node)
6712    return value ? boolean_true_node : boolean_false_node;
6713  else
6714    return build_int_cst (type, value);
6715}
6716
6717
6718/* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6719   Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'.  Here
6720   CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6721   expression, and ARG to `a'.  If COND_FIRST_P is nonzero, then the
6722   COND is the first argument to CODE; otherwise (as in the example
6723   given here), it is the second argument.  TYPE is the type of the
6724   original expression.  Return NULL_TREE if no simplification is
6725   possible.  */
6726
6727static tree
6728fold_binary_op_with_conditional_arg (location_t loc,
6729				     enum tree_code code,
6730				     tree type, tree op0, tree op1,
6731				     tree cond, tree arg, int cond_first_p)
6732{
6733  tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6734  tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6735  tree test, true_value, false_value;
6736  tree lhs = NULL_TREE;
6737  tree rhs = NULL_TREE;
6738
6739  /* This transformation is only worthwhile if we don't have to wrap
6740     arg in a SAVE_EXPR, and the operation can be simplified on at least
6741     one of the branches once its pushed inside the COND_EXPR.  */
6742  if (!TREE_CONSTANT (arg))
6743    return NULL_TREE;
6744
6745  if (TREE_CODE (cond) == COND_EXPR)
6746    {
6747      test = TREE_OPERAND (cond, 0);
6748      true_value = TREE_OPERAND (cond, 1);
6749      false_value = TREE_OPERAND (cond, 2);
6750      /* If this operand throws an expression, then it does not make
6751	 sense to try to perform a logical or arithmetic operation
6752	 involving it.  */
6753      if (VOID_TYPE_P (TREE_TYPE (true_value)))
6754	lhs = true_value;
6755      if (VOID_TYPE_P (TREE_TYPE (false_value)))
6756	rhs = false_value;
6757    }
6758  else
6759    {
6760      tree testtype = TREE_TYPE (cond);
6761      test = cond;
6762      true_value = constant_boolean_node (true, testtype);
6763      false_value = constant_boolean_node (false, testtype);
6764    }
6765
6766  arg = fold_convert_loc (loc, arg_type, arg);
6767  if (lhs == 0)
6768    {
6769      true_value = fold_convert_loc (loc, cond_type, true_value);
6770      if (cond_first_p)
6771	lhs = fold_build2_loc (loc, code, type, true_value, arg);
6772      else
6773	lhs = fold_build2_loc (loc, code, type, arg, true_value);
6774    }
6775  if (rhs == 0)
6776    {
6777      false_value = fold_convert_loc (loc, cond_type, false_value);
6778      if (cond_first_p)
6779	rhs = fold_build2_loc (loc, code, type, false_value, arg);
6780      else
6781	rhs = fold_build2_loc (loc, code, type, arg, false_value);
6782    }
6783
6784  test = fold_build3_loc (loc, COND_EXPR, type, test, lhs, rhs);
6785  return fold_convert_loc (loc, type, test);
6786}
6787
6788
6789/* Subroutine of fold() that checks for the addition of +/- 0.0.
6790
6791   If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6792   TYPE, X + ADDEND is the same as X.  If NEGATE, return true if X -
6793   ADDEND is the same as X.
6794
6795   X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6796   and finite.  The problematic cases are when X is zero, and its mode
6797   has signed zeros.  In the case of rounding towards -infinity,
6798   X - 0 is not the same as X because 0 - 0 is -0.  In other rounding
6799   modes, X + 0 is not the same as X because -0 + 0 is 0.  */
6800
6801bool
6802fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6803{
6804  if (!real_zerop (addend))
6805    return false;
6806
6807  /* Don't allow the fold with -fsignaling-nans.  */
6808  if (HONOR_SNANS (TYPE_MODE (type)))
6809    return false;
6810
6811  /* Allow the fold if zeros aren't signed, or their sign isn't important.  */
6812  if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6813    return true;
6814
6815  /* Treat x + -0 as x - 0 and x - -0 as x + 0.  */
6816  if (TREE_CODE (addend) == REAL_CST
6817      && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6818    negate = !negate;
6819
6820  /* The mode has signed zeros, and we have to honor their sign.
6821     In this situation, there is only one case we can return true for.
6822     X - 0 is the same as X unless rounding towards -infinity is
6823     supported.  */
6824  return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6825}
6826
6827/* Subroutine of fold() that checks comparisons of built-in math
6828   functions against real constants.
6829
6830   FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6831   operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR.  TYPE
6832   is the type of the result and ARG0 and ARG1 are the operands of the
6833   comparison.  ARG1 must be a TREE_REAL_CST.
6834
6835   The function returns the constant folded tree if a simplification
6836   can be made, and NULL_TREE otherwise.  */
6837
6838static tree
6839fold_mathfn_compare (location_t loc,
6840		     enum built_in_function fcode, enum tree_code code,
6841		     tree type, tree arg0, tree arg1)
6842{
6843  REAL_VALUE_TYPE c;
6844
6845  if (BUILTIN_SQRT_P (fcode))
6846    {
6847      tree arg = CALL_EXPR_ARG (arg0, 0);
6848      enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6849
6850      c = TREE_REAL_CST (arg1);
6851      if (REAL_VALUE_NEGATIVE (c))
6852	{
6853	  /* sqrt(x) < y is always false, if y is negative.  */
6854	  if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6855	    return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6856
6857	  /* sqrt(x) > y is always true, if y is negative and we
6858	     don't care about NaNs, i.e. negative values of x.  */
6859	  if (code == NE_EXPR || !HONOR_NANS (mode))
6860	    return omit_one_operand_loc (loc, type, integer_one_node, arg);
6861
6862	  /* sqrt(x) > y is the same as x >= 0, if y is negative.  */
6863	  return fold_build2_loc (loc, GE_EXPR, type, arg,
6864			      build_real (TREE_TYPE (arg), dconst0));
6865	}
6866      else if (code == GT_EXPR || code == GE_EXPR)
6867	{
6868	  REAL_VALUE_TYPE c2;
6869
6870	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6871	  real_convert (&c2, mode, &c2);
6872
6873	  if (REAL_VALUE_ISINF (c2))
6874	    {
6875	      /* sqrt(x) > y is x == +Inf, when y is very large.  */
6876	      if (HONOR_INFINITIES (mode))
6877		return fold_build2_loc (loc, EQ_EXPR, type, arg,
6878				    build_real (TREE_TYPE (arg), c2));
6879
6880	      /* sqrt(x) > y is always false, when y is very large
6881		 and we don't care about infinities.  */
6882	      return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6883	    }
6884
6885	  /* sqrt(x) > c is the same as x > c*c.  */
6886	  return fold_build2_loc (loc, code, type, arg,
6887			      build_real (TREE_TYPE (arg), c2));
6888	}
6889      else if (code == LT_EXPR || code == LE_EXPR)
6890	{
6891	  REAL_VALUE_TYPE c2;
6892
6893	  REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6894	  real_convert (&c2, mode, &c2);
6895
6896	  if (REAL_VALUE_ISINF (c2))
6897	    {
6898	      /* sqrt(x) < y is always true, when y is a very large
6899		 value and we don't care about NaNs or Infinities.  */
6900	      if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6901		return omit_one_operand_loc (loc, type, integer_one_node, arg);
6902
6903	      /* sqrt(x) < y is x != +Inf when y is very large and we
6904		 don't care about NaNs.  */
6905	      if (! HONOR_NANS (mode))
6906		return fold_build2_loc (loc, NE_EXPR, type, arg,
6907				    build_real (TREE_TYPE (arg), c2));
6908
6909	      /* sqrt(x) < y is x >= 0 when y is very large and we
6910		 don't care about Infinities.  */
6911	      if (! HONOR_INFINITIES (mode))
6912		return fold_build2_loc (loc, GE_EXPR, type, arg,
6913				    build_real (TREE_TYPE (arg), dconst0));
6914
6915	      /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large.  */
6916	      if (lang_hooks.decls.global_bindings_p () != 0
6917		  || CONTAINS_PLACEHOLDER_P (arg))
6918		return NULL_TREE;
6919
6920	      arg = save_expr (arg);
6921	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6922				  fold_build2_loc (loc, GE_EXPR, type, arg,
6923					       build_real (TREE_TYPE (arg),
6924							   dconst0)),
6925				  fold_build2_loc (loc, NE_EXPR, type, arg,
6926					       build_real (TREE_TYPE (arg),
6927							   c2)));
6928	    }
6929
6930	  /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs.  */
6931	  if (! HONOR_NANS (mode))
6932	    return fold_build2_loc (loc, code, type, arg,
6933				build_real (TREE_TYPE (arg), c2));
6934
6935	  /* sqrt(x) < c is the same as x >= 0 && x < c*c.  */
6936	  if (lang_hooks.decls.global_bindings_p () == 0
6937	      && ! CONTAINS_PLACEHOLDER_P (arg))
6938	    {
6939	      arg = save_expr (arg);
6940	      return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6941				  fold_build2_loc (loc, GE_EXPR, type, arg,
6942					       build_real (TREE_TYPE (arg),
6943							   dconst0)),
6944				  fold_build2_loc (loc, code, type, arg,
6945					       build_real (TREE_TYPE (arg),
6946							   c2)));
6947	    }
6948	}
6949    }
6950
6951  return NULL_TREE;
6952}
6953
6954/* Subroutine of fold() that optimizes comparisons against Infinities,
6955   either +Inf or -Inf.
6956
6957   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6958   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
6959   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
6960
6961   The function returns the constant folded tree if a simplification
6962   can be made, and NULL_TREE otherwise.  */
6963
6964static tree
6965fold_inf_compare (location_t loc, enum tree_code code, tree type,
6966		  tree arg0, tree arg1)
6967{
6968  enum machine_mode mode;
6969  REAL_VALUE_TYPE max;
6970  tree temp;
6971  bool neg;
6972
6973  mode = TYPE_MODE (TREE_TYPE (arg0));
6974
6975  /* For negative infinity swap the sense of the comparison.  */
6976  neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6977  if (neg)
6978    code = swap_tree_comparison (code);
6979
6980  switch (code)
6981    {
6982    case GT_EXPR:
6983      /* x > +Inf is always false, if with ignore sNANs.  */
6984      if (HONOR_SNANS (mode))
6985        return NULL_TREE;
6986      return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6987
6988    case LE_EXPR:
6989      /* x <= +Inf is always true, if we don't case about NaNs.  */
6990      if (! HONOR_NANS (mode))
6991	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6992
6993      /* x <= +Inf is the same as x == x, i.e. isfinite(x).  */
6994      if (lang_hooks.decls.global_bindings_p () == 0
6995	  && ! CONTAINS_PLACEHOLDER_P (arg0))
6996	{
6997	  arg0 = save_expr (arg0);
6998	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6999	}
7000      break;
7001
7002    case EQ_EXPR:
7003    case GE_EXPR:
7004      /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX.  */
7005      real_maxval (&max, neg, mode);
7006      return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7007			  arg0, build_real (TREE_TYPE (arg0), max));
7008
7009    case LT_EXPR:
7010      /* x < +Inf is always equal to x <= DBL_MAX.  */
7011      real_maxval (&max, neg, mode);
7012      return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7013			  arg0, build_real (TREE_TYPE (arg0), max));
7014
7015    case NE_EXPR:
7016      /* x != +Inf is always equal to !(x > DBL_MAX).  */
7017      real_maxval (&max, neg, mode);
7018      if (! HONOR_NANS (mode))
7019	return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
7020			    arg0, build_real (TREE_TYPE (arg0), max));
7021
7022      temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
7023			  arg0, build_real (TREE_TYPE (arg0), max));
7024      return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
7025
7026    default:
7027      break;
7028    }
7029
7030  return NULL_TREE;
7031}
7032
7033/* Subroutine of fold() that optimizes comparisons of a division by
7034   a nonzero integer constant against an integer constant, i.e.
7035   X/C1 op C2.
7036
7037   CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
7038   GE_EXPR or LE_EXPR.  TYPE is the type of the result and ARG0 and ARG1
7039   are the operands of the comparison.  ARG1 must be a TREE_REAL_CST.
7040
7041   The function returns the constant folded tree if a simplification
7042   can be made, and NULL_TREE otherwise.  */
7043
7044static tree
7045fold_div_compare (location_t loc,
7046		  enum tree_code code, tree type, tree arg0, tree arg1)
7047{
7048  tree prod, tmp, hi, lo;
7049  tree arg00 = TREE_OPERAND (arg0, 0);
7050  tree arg01 = TREE_OPERAND (arg0, 1);
7051  unsigned HOST_WIDE_INT lpart;
7052  HOST_WIDE_INT hpart;
7053  bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
7054  bool neg_overflow;
7055  int overflow;
7056
7057  /* We have to do this the hard way to detect unsigned overflow.
7058     prod = int_const_binop (MULT_EXPR, arg01, arg1, 0);  */
7059  overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
7060				   TREE_INT_CST_HIGH (arg01),
7061				   TREE_INT_CST_LOW (arg1),
7062				   TREE_INT_CST_HIGH (arg1),
7063				   &lpart, &hpart, unsigned_p);
7064  prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7065				-1, overflow);
7066  neg_overflow = false;
7067
7068  if (unsigned_p)
7069    {
7070      tmp = int_const_binop (MINUS_EXPR, arg01,
7071                             build_int_cst (TREE_TYPE (arg01), 1), 0);
7072      lo = prod;
7073
7074      /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0).  */
7075      overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
7076				       TREE_INT_CST_HIGH (prod),
7077				       TREE_INT_CST_LOW (tmp),
7078				       TREE_INT_CST_HIGH (tmp),
7079				       &lpart, &hpart, unsigned_p);
7080      hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
7081				  -1, overflow | TREE_OVERFLOW (prod));
7082    }
7083  else if (tree_int_cst_sgn (arg01) >= 0)
7084    {
7085      tmp = int_const_binop (MINUS_EXPR, arg01,
7086			     build_int_cst (TREE_TYPE (arg01), 1), 0);
7087      switch (tree_int_cst_sgn (arg1))
7088	{
7089	case -1:
7090	  neg_overflow = true;
7091	  lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7092	  hi = prod;
7093	  break;
7094
7095	case  0:
7096	  lo = fold_negate_const (tmp, TREE_TYPE (arg0));
7097	  hi = tmp;
7098	  break;
7099
7100	case  1:
7101          hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7102	  lo = prod;
7103	  break;
7104
7105	default:
7106	  gcc_unreachable ();
7107	}
7108    }
7109  else
7110    {
7111      /* A negative divisor reverses the relational operators.  */
7112      code = swap_tree_comparison (code);
7113
7114      tmp = int_const_binop (PLUS_EXPR, arg01,
7115			     build_int_cst (TREE_TYPE (arg01), 1), 0);
7116      switch (tree_int_cst_sgn (arg1))
7117	{
7118	case -1:
7119	  hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
7120	  lo = prod;
7121	  break;
7122
7123	case  0:
7124	  hi = fold_negate_const (tmp, TREE_TYPE (arg0));
7125	  lo = tmp;
7126	  break;
7127
7128	case  1:
7129	  neg_overflow = true;
7130	  lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
7131	  hi = prod;
7132	  break;
7133
7134	default:
7135	  gcc_unreachable ();
7136	}
7137    }
7138
7139  switch (code)
7140    {
7141    case EQ_EXPR:
7142      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7143	return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
7144      if (TREE_OVERFLOW (hi))
7145	return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7146      if (TREE_OVERFLOW (lo))
7147	return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7148      return build_range_check (loc, type, arg00, 1, lo, hi);
7149
7150    case NE_EXPR:
7151      if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
7152	return omit_one_operand_loc (loc, type, integer_one_node, arg00);
7153      if (TREE_OVERFLOW (hi))
7154	return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7155      if (TREE_OVERFLOW (lo))
7156	return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7157      return build_range_check (loc, type, arg00, 0, lo, hi);
7158
7159    case LT_EXPR:
7160      if (TREE_OVERFLOW (lo))
7161	{
7162	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
7163	  return omit_one_operand_loc (loc, type, tmp, arg00);
7164	}
7165      return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
7166
7167    case LE_EXPR:
7168      if (TREE_OVERFLOW (hi))
7169	{
7170	  tmp = neg_overflow ? integer_zero_node : integer_one_node;
7171	  return omit_one_operand_loc (loc, type, tmp, arg00);
7172	}
7173      return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
7174
7175    case GT_EXPR:
7176      if (TREE_OVERFLOW (hi))
7177	{
7178	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
7179	  return omit_one_operand_loc (loc, type, tmp, arg00);
7180	}
7181      return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
7182
7183    case GE_EXPR:
7184      if (TREE_OVERFLOW (lo))
7185	{
7186	  tmp = neg_overflow ? integer_one_node : integer_zero_node;
7187	  return omit_one_operand_loc (loc, type, tmp, arg00);
7188	}
7189      return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
7190
7191    default:
7192      break;
7193    }
7194
7195  return NULL_TREE;
7196}
7197
7198
7199/* If CODE with arguments ARG0 and ARG1 represents a single bit
7200   equality/inequality test, then return a simplified form of the test
7201   using a sign testing.  Otherwise return NULL.  TYPE is the desired
7202   result type.  */
7203
7204static tree
7205fold_single_bit_test_into_sign_test (location_t loc,
7206				     enum tree_code code, tree arg0, tree arg1,
7207				     tree result_type)
7208{
7209  /* If this is testing a single bit, we can optimize the test.  */
7210  if ((code == NE_EXPR || code == EQ_EXPR)
7211      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7212      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7213    {
7214      /* If we have (A & C) != 0 where C is the sign bit of A, convert
7215	 this into A < 0.  Similarly for (A & C) == 0 into A >= 0.  */
7216      tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
7217
7218      if (arg00 != NULL_TREE
7219	  /* This is only a win if casting to a signed type is cheap,
7220	     i.e. when arg00's type is not a partial mode.  */
7221	  && TYPE_PRECISION (TREE_TYPE (arg00))
7222	     == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
7223	{
7224	  tree stype = signed_type_for (TREE_TYPE (arg00));
7225	  return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
7226			      result_type,
7227			      fold_convert_loc (loc, stype, arg00),
7228			      build_int_cst (stype, 0));
7229	}
7230    }
7231
7232  return NULL_TREE;
7233}
7234
7235/* If CODE with arguments ARG0 and ARG1 represents a single bit
7236   equality/inequality test, then return a simplified form of
7237   the test using shifts and logical operations.  Otherwise return
7238   NULL.  TYPE is the desired result type.  */
7239
7240tree
7241fold_single_bit_test (location_t loc, enum tree_code code,
7242		      tree arg0, tree arg1, tree result_type)
7243{
7244  /* If this is testing a single bit, we can optimize the test.  */
7245  if ((code == NE_EXPR || code == EQ_EXPR)
7246      && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7247      && integer_pow2p (TREE_OPERAND (arg0, 1)))
7248    {
7249      tree inner = TREE_OPERAND (arg0, 0);
7250      tree type = TREE_TYPE (arg0);
7251      int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
7252      enum machine_mode operand_mode = TYPE_MODE (type);
7253      int ops_unsigned;
7254      tree signed_type, unsigned_type, intermediate_type;
7255      tree tem, one;
7256
7257      /* First, see if we can fold the single bit test into a sign-bit
7258	 test.  */
7259      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
7260						 result_type);
7261      if (tem)
7262	return tem;
7263
7264      /* Otherwise we have (A & C) != 0 where C is a single bit,
7265	 convert that into ((A >> C2) & 1).  Where C2 = log2(C).
7266	 Similarly for (A & C) == 0.  */
7267
7268      /* If INNER is a right shift of a constant and it plus BITNUM does
7269	 not overflow, adjust BITNUM and INNER.  */
7270      if (TREE_CODE (inner) == RSHIFT_EXPR
7271	  && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7272	  && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7273	  && bitnum < TYPE_PRECISION (type)
7274	  && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
7275				   bitnum - TYPE_PRECISION (type)))
7276	{
7277	  bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7278	  inner = TREE_OPERAND (inner, 0);
7279	}
7280
7281      /* If we are going to be able to omit the AND below, we must do our
7282	 operations as unsigned.  If we must use the AND, we have a choice.
7283	 Normally unsigned is faster, but for some machines signed is.  */
7284#ifdef LOAD_EXTEND_OP
7285      ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
7286		      && !flag_syntax_only) ? 0 : 1;
7287#else
7288      ops_unsigned = 1;
7289#endif
7290
7291      signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
7292      unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
7293      intermediate_type = ops_unsigned ? unsigned_type : signed_type;
7294      inner = fold_convert_loc (loc, intermediate_type, inner);
7295
7296      if (bitnum != 0)
7297	inner = build2 (RSHIFT_EXPR, intermediate_type,
7298			inner, size_int (bitnum));
7299
7300      one = build_int_cst (intermediate_type, 1);
7301
7302      if (code == EQ_EXPR)
7303	inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
7304
7305      /* Put the AND last so it can combine with more things.  */
7306      inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
7307
7308      /* Make sure to return the proper type.  */
7309      inner = fold_convert_loc (loc, result_type, inner);
7310
7311      return inner;
7312    }
7313  return NULL_TREE;
7314}
7315
7316/* Check whether we are allowed to reorder operands arg0 and arg1,
7317   such that the evaluation of arg1 occurs before arg0.  */
7318
7319static bool
7320reorder_operands_p (const_tree arg0, const_tree arg1)
7321{
7322  if (! flag_evaluation_order)
7323      return true;
7324  if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
7325    return true;
7326  return ! TREE_SIDE_EFFECTS (arg0)
7327	 && ! TREE_SIDE_EFFECTS (arg1);
7328}
7329
7330/* Test whether it is preferable two swap two operands, ARG0 and
7331   ARG1, for example because ARG0 is an integer constant and ARG1
7332   isn't.  If REORDER is true, only recommend swapping if we can
7333   evaluate the operands in reverse order.  */
7334
7335bool
7336tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
7337{
7338  STRIP_SIGN_NOPS (arg0);
7339  STRIP_SIGN_NOPS (arg1);
7340
7341  if (TREE_CODE (arg1) == INTEGER_CST)
7342    return 0;
7343  if (TREE_CODE (arg0) == INTEGER_CST)
7344    return 1;
7345
7346  if (TREE_CODE (arg1) == REAL_CST)
7347    return 0;
7348  if (TREE_CODE (arg0) == REAL_CST)
7349    return 1;
7350
7351  if (TREE_CODE (arg1) == FIXED_CST)
7352    return 0;
7353  if (TREE_CODE (arg0) == FIXED_CST)
7354    return 1;
7355
7356  if (TREE_CODE (arg1) == COMPLEX_CST)
7357    return 0;
7358  if (TREE_CODE (arg0) == COMPLEX_CST)
7359    return 1;
7360
7361  if (TREE_CONSTANT (arg1))
7362    return 0;
7363  if (TREE_CONSTANT (arg0))
7364    return 1;
7365
7366  if (optimize_function_for_size_p (cfun))
7367    return 0;
7368
7369  if (reorder && flag_evaluation_order
7370      && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
7371    return 0;
7372
7373  /* It is preferable to swap two SSA_NAME to ensure a canonical form
7374     for commutative and comparison operators.  Ensuring a canonical
7375     form allows the optimizers to find additional redundancies without
7376     having to explicitly check for both orderings.  */
7377  if (TREE_CODE (arg0) == SSA_NAME
7378      && TREE_CODE (arg1) == SSA_NAME
7379      && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
7380    return 1;
7381
7382  /* Put SSA_NAMEs last.  */
7383  if (TREE_CODE (arg1) == SSA_NAME)
7384    return 0;
7385  if (TREE_CODE (arg0) == SSA_NAME)
7386    return 1;
7387
7388  /* Put variables last.  */
7389  if (DECL_P (arg1))
7390    return 0;
7391  if (DECL_P (arg0))
7392    return 1;
7393
7394  return 0;
7395}
7396
7397/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
7398   ARG0 is extended to a wider type.  */
7399
7400static tree
7401fold_widened_comparison (location_t loc, enum tree_code code,
7402			 tree type, tree arg0, tree arg1)
7403{
7404  tree arg0_unw = get_unwidened (arg0, NULL_TREE);
7405  tree arg1_unw;
7406  tree shorter_type, outer_type;
7407  tree min, max;
7408  bool above, below;
7409
7410  if (arg0_unw == arg0)
7411    return NULL_TREE;
7412  shorter_type = TREE_TYPE (arg0_unw);
7413
7414#ifdef HAVE_canonicalize_funcptr_for_compare
7415  /* Disable this optimization if we're casting a function pointer
7416     type on targets that require function pointer canonicalization.  */
7417  if (HAVE_canonicalize_funcptr_for_compare
7418      && TREE_CODE (shorter_type) == POINTER_TYPE
7419      && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
7420    return NULL_TREE;
7421#endif
7422
7423  if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
7424    return NULL_TREE;
7425
7426  arg1_unw = get_unwidened (arg1, NULL_TREE);
7427
7428  /* If possible, express the comparison in the shorter mode.  */
7429  if ((code == EQ_EXPR || code == NE_EXPR
7430       || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7431      && (TREE_TYPE (arg1_unw) == shorter_type
7432	  || ((TYPE_PRECISION (shorter_type)
7433	       >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7434	      && (TYPE_UNSIGNED (shorter_type)
7435		  == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7436	  || (TREE_CODE (arg1_unw) == INTEGER_CST
7437	      && (TREE_CODE (shorter_type) == INTEGER_TYPE
7438		  || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7439	      && int_fits_type_p (arg1_unw, shorter_type))))
7440    return fold_build2_loc (loc, code, type, arg0_unw,
7441			fold_convert_loc (loc, shorter_type, arg1_unw));
7442
7443  if (TREE_CODE (arg1_unw) != INTEGER_CST
7444      || TREE_CODE (shorter_type) != INTEGER_TYPE
7445      || !int_fits_type_p (arg1_unw, shorter_type))
7446    return NULL_TREE;
7447
7448  /* If we are comparing with the integer that does not fit into the range
7449     of the shorter type, the result is known.  */
7450  outer_type = TREE_TYPE (arg1_unw);
7451  min = lower_bound_in_type (outer_type, shorter_type);
7452  max = upper_bound_in_type (outer_type, shorter_type);
7453
7454  above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7455						   max, arg1_unw));
7456  below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7457						   arg1_unw, min));
7458
7459  switch (code)
7460    {
7461    case EQ_EXPR:
7462      if (above || below)
7463	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7464      break;
7465
7466    case NE_EXPR:
7467      if (above || below)
7468	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7469      break;
7470
7471    case LT_EXPR:
7472    case LE_EXPR:
7473      if (above)
7474	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7475      else if (below)
7476	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7477
7478    case GT_EXPR:
7479    case GE_EXPR:
7480      if (above)
7481	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7482      else if (below)
7483	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7484
7485    default:
7486      break;
7487    }
7488
7489  return NULL_TREE;
7490}
7491
7492/* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7493   ARG0 just the signedness is changed.  */
7494
7495static tree
7496fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7497			      tree arg0, tree arg1)
7498{
7499  tree arg0_inner;
7500  tree inner_type, outer_type;
7501
7502  if (!CONVERT_EXPR_P (arg0))
7503    return NULL_TREE;
7504
7505  outer_type = TREE_TYPE (arg0);
7506  arg0_inner = TREE_OPERAND (arg0, 0);
7507  inner_type = TREE_TYPE (arg0_inner);
7508
7509#ifdef HAVE_canonicalize_funcptr_for_compare
7510  /* Disable this optimization if we're casting a function pointer
7511     type on targets that require function pointer canonicalization.  */
7512  if (HAVE_canonicalize_funcptr_for_compare
7513      && TREE_CODE (inner_type) == POINTER_TYPE
7514      && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7515    return NULL_TREE;
7516#endif
7517
7518  if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7519    return NULL_TREE;
7520
7521  if (TREE_CODE (arg1) != INTEGER_CST
7522      && !(CONVERT_EXPR_P (arg1)
7523	   && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7524    return NULL_TREE;
7525
7526  if ((TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7527       || POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7528      && code != NE_EXPR
7529      && code != EQ_EXPR)
7530    return NULL_TREE;
7531
7532  if (TREE_CODE (arg1) == INTEGER_CST)
7533    arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
7534				  TREE_INT_CST_HIGH (arg1), 0,
7535				  TREE_OVERFLOW (arg1));
7536  else
7537    arg1 = fold_convert_loc (loc, inner_type, arg1);
7538
7539  return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7540}
7541
7542/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
7543   step of the array.  Reconstructs s and delta in the case of s *
7544   delta being an integer constant (and thus already folded).  ADDR is
7545   the address. MULT is the multiplicative expression.  If the
7546   function succeeds, the new address expression is returned.
7547   Otherwise NULL_TREE is returned.  LOC is the location of the
7548   resulting expression.  */
7549
7550static tree
7551try_move_mult_to_index (location_t loc, tree addr, tree op1)
7552{
7553  tree s, delta, step;
7554  tree ref = TREE_OPERAND (addr, 0), pref;
7555  tree ret, pos;
7556  tree itype;
7557  bool mdim = false;
7558
7559  /*  Strip the nops that might be added when converting op1 to sizetype. */
7560  STRIP_NOPS (op1);
7561
7562  /* Canonicalize op1 into a possibly non-constant delta
7563     and an INTEGER_CST s.  */
7564  if (TREE_CODE (op1) == MULT_EXPR)
7565    {
7566      tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
7567
7568      STRIP_NOPS (arg0);
7569      STRIP_NOPS (arg1);
7570
7571      if (TREE_CODE (arg0) == INTEGER_CST)
7572        {
7573          s = arg0;
7574          delta = arg1;
7575        }
7576      else if (TREE_CODE (arg1) == INTEGER_CST)
7577        {
7578          s = arg1;
7579          delta = arg0;
7580        }
7581      else
7582        return NULL_TREE;
7583    }
7584  else if (TREE_CODE (op1) == INTEGER_CST)
7585    {
7586      delta = op1;
7587      s = NULL_TREE;
7588    }
7589  else
7590    {
7591      /* Simulate we are delta * 1.  */
7592      delta = op1;
7593      s = integer_one_node;
7594    }
7595
7596  for (;; ref = TREE_OPERAND (ref, 0))
7597    {
7598      if (TREE_CODE (ref) == ARRAY_REF)
7599	{
7600	  tree domain;
7601
7602	  /* Remember if this was a multi-dimensional array.  */
7603	  if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7604	    mdim = true;
7605
7606	  domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7607	  if (! domain)
7608	    continue;
7609	  itype = TREE_TYPE (domain);
7610
7611	  step = array_ref_element_size (ref);
7612	  if (TREE_CODE (step) != INTEGER_CST)
7613	    continue;
7614
7615	  if (s)
7616	    {
7617	      if (! tree_int_cst_equal (step, s))
7618                continue;
7619	    }
7620	  else
7621	    {
7622	      /* Try if delta is a multiple of step.  */
7623	      tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7624	      if (! tmp)
7625		continue;
7626	      delta = tmp;
7627	    }
7628
7629	  /* Only fold here if we can verify we do not overflow one
7630	     dimension of a multi-dimensional array.  */
7631	  if (mdim)
7632	    {
7633	      tree tmp;
7634
7635	      if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7636		  || !TYPE_MAX_VALUE (domain)
7637		  || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7638		continue;
7639
7640	      tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7641				     fold_convert_loc (loc, itype,
7642						       TREE_OPERAND (ref, 1)),
7643				     fold_convert_loc (loc, itype, delta));
7644	      if (!tmp
7645		  || TREE_CODE (tmp) != INTEGER_CST
7646		  || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7647		continue;
7648	    }
7649
7650	  break;
7651	}
7652      else
7653	mdim = false;
7654
7655      if (!handled_component_p (ref))
7656	return NULL_TREE;
7657    }
7658
7659  /* We found the suitable array reference.  So copy everything up to it,
7660     and replace the index.  */
7661
7662  pref = TREE_OPERAND (addr, 0);
7663  ret = copy_node (pref);
7664  SET_EXPR_LOCATION (ret, loc);
7665  pos = ret;
7666
7667  while (pref != ref)
7668    {
7669      pref = TREE_OPERAND (pref, 0);
7670      TREE_OPERAND (pos, 0) = copy_node (pref);
7671      pos = TREE_OPERAND (pos, 0);
7672    }
7673
7674  TREE_OPERAND (pos, 1) = fold_build2_loc (loc, PLUS_EXPR, itype,
7675				       fold_convert_loc (loc, itype,
7676							 TREE_OPERAND (pos, 1)),
7677				       fold_convert_loc (loc, itype, delta));
7678
7679  return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7680}
7681
7682
7683/* Fold A < X && A + 1 > Y to A < X && A >= Y.  Normally A + 1 > Y
7684   means A >= Y && A != MAX, but in this case we know that
7685   A < X <= MAX.  INEQ is A + 1 > Y, BOUND is A < X.  */
7686
7687static tree
7688fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7689{
7690  tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7691
7692  if (TREE_CODE (bound) == LT_EXPR)
7693    a = TREE_OPERAND (bound, 0);
7694  else if (TREE_CODE (bound) == GT_EXPR)
7695    a = TREE_OPERAND (bound, 1);
7696  else
7697    return NULL_TREE;
7698
7699  typea = TREE_TYPE (a);
7700  if (!INTEGRAL_TYPE_P (typea)
7701      && !POINTER_TYPE_P (typea))
7702    return NULL_TREE;
7703
7704  if (TREE_CODE (ineq) == LT_EXPR)
7705    {
7706      a1 = TREE_OPERAND (ineq, 1);
7707      y = TREE_OPERAND (ineq, 0);
7708    }
7709  else if (TREE_CODE (ineq) == GT_EXPR)
7710    {
7711      a1 = TREE_OPERAND (ineq, 0);
7712      y = TREE_OPERAND (ineq, 1);
7713    }
7714  else
7715    return NULL_TREE;
7716
7717  if (TREE_TYPE (a1) != typea)
7718    return NULL_TREE;
7719
7720  if (POINTER_TYPE_P (typea))
7721    {
7722      /* Convert the pointer types into integer before taking the difference.  */
7723      tree ta = fold_convert_loc (loc, ssizetype, a);
7724      tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7725      diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7726    }
7727  else
7728    diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7729
7730  if (!diff || !integer_onep (diff))
7731   return NULL_TREE;
7732
7733  return fold_build2_loc (loc, GE_EXPR, type, a, y);
7734}
7735
7736/* Fold a sum or difference of at least one multiplication.
7737   Returns the folded tree or NULL if no simplification could be made.  */
7738
7739static tree
7740fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7741			  tree arg0, tree arg1)
7742{
7743  tree arg00, arg01, arg10, arg11;
7744  tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7745
7746  /* (A * C) +- (B * C) -> (A+-B) * C.
7747     (A * C) +- A -> A * (C+-1).
7748     We are most concerned about the case where C is a constant,
7749     but other combinations show up during loop reduction.  Since
7750     it is not difficult, try all four possibilities.  */
7751
7752  if (TREE_CODE (arg0) == MULT_EXPR)
7753    {
7754      arg00 = TREE_OPERAND (arg0, 0);
7755      arg01 = TREE_OPERAND (arg0, 1);
7756    }
7757  else if (TREE_CODE (arg0) == INTEGER_CST)
7758    {
7759      arg00 = build_one_cst (type);
7760      arg01 = arg0;
7761    }
7762  else
7763    {
7764      /* We cannot generate constant 1 for fract.  */
7765      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7766	return NULL_TREE;
7767      arg00 = arg0;
7768      arg01 = build_one_cst (type);
7769    }
7770  if (TREE_CODE (arg1) == MULT_EXPR)
7771    {
7772      arg10 = TREE_OPERAND (arg1, 0);
7773      arg11 = TREE_OPERAND (arg1, 1);
7774    }
7775  else if (TREE_CODE (arg1) == INTEGER_CST)
7776    {
7777      arg10 = build_one_cst (type);
7778      /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7779	 the purpose of this canonicalization.  */
7780      if (TREE_INT_CST_HIGH (arg1) == -1
7781	  && negate_expr_p (arg1)
7782	  && code == PLUS_EXPR)
7783	{
7784	  arg11 = negate_expr (arg1);
7785	  code = MINUS_EXPR;
7786	}
7787      else
7788	arg11 = arg1;
7789    }
7790  else
7791    {
7792      /* We cannot generate constant 1 for fract.  */
7793      if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7794	return NULL_TREE;
7795      arg10 = arg1;
7796      arg11 = build_one_cst (type);
7797    }
7798  same = NULL_TREE;
7799
7800  if (operand_equal_p (arg01, arg11, 0))
7801    same = arg01, alt0 = arg00, alt1 = arg10;
7802  else if (operand_equal_p (arg00, arg10, 0))
7803    same = arg00, alt0 = arg01, alt1 = arg11;
7804  else if (operand_equal_p (arg00, arg11, 0))
7805    same = arg00, alt0 = arg01, alt1 = arg10;
7806  else if (operand_equal_p (arg01, arg10, 0))
7807    same = arg01, alt0 = arg00, alt1 = arg11;
7808
7809  /* No identical multiplicands; see if we can find a common
7810     power-of-two factor in non-power-of-two multiplies.  This
7811     can help in multi-dimensional array access.  */
7812  else if (host_integerp (arg01, 0)
7813	   && host_integerp (arg11, 0))
7814    {
7815      HOST_WIDE_INT int01, int11, tmp;
7816      bool swap = false;
7817      tree maybe_same;
7818      int01 = TREE_INT_CST_LOW (arg01);
7819      int11 = TREE_INT_CST_LOW (arg11);
7820
7821      /* Move min of absolute values to int11.  */
7822      if ((int01 >= 0 ? int01 : -int01)
7823	  < (int11 >= 0 ? int11 : -int11))
7824        {
7825	  tmp = int01, int01 = int11, int11 = tmp;
7826	  alt0 = arg00, arg00 = arg10, arg10 = alt0;
7827	  maybe_same = arg01;
7828	  swap = true;
7829	}
7830      else
7831	maybe_same = arg11;
7832
7833      if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0
7834	  /* The remainder should not be a constant, otherwise we
7835	     end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7836	     increased the number of multiplications necessary.  */
7837	  && TREE_CODE (arg10) != INTEGER_CST)
7838        {
7839	  alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7840			      build_int_cst (TREE_TYPE (arg00),
7841					     int01 / int11));
7842	  alt1 = arg10;
7843	  same = maybe_same;
7844	  if (swap)
7845	    maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7846	}
7847    }
7848
7849  if (same)
7850    return fold_build2_loc (loc, MULT_EXPR, type,
7851			fold_build2_loc (loc, code, type,
7852				     fold_convert_loc (loc, type, alt0),
7853				     fold_convert_loc (loc, type, alt1)),
7854			fold_convert_loc (loc, type, same));
7855
7856  return NULL_TREE;
7857}
7858
7859/* Subroutine of native_encode_expr.  Encode the INTEGER_CST
7860   specified by EXPR into the buffer PTR of length LEN bytes.
7861   Return the number of bytes placed in the buffer, or zero
7862   upon failure.  */
7863
7864static int
7865native_encode_int (const_tree expr, unsigned char *ptr, int len)
7866{
7867  tree type = TREE_TYPE (expr);
7868  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7869  int byte, offset, word, words;
7870  unsigned char value;
7871
7872  if (total_bytes > len)
7873    return 0;
7874  words = total_bytes / UNITS_PER_WORD;
7875
7876  for (byte = 0; byte < total_bytes; byte++)
7877    {
7878      int bitpos = byte * BITS_PER_UNIT;
7879      if (bitpos < HOST_BITS_PER_WIDE_INT)
7880	value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7881      else
7882	value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7883				 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7884
7885      if (total_bytes > UNITS_PER_WORD)
7886	{
7887	  word = byte / UNITS_PER_WORD;
7888	  if (WORDS_BIG_ENDIAN)
7889	    word = (words - 1) - word;
7890	  offset = word * UNITS_PER_WORD;
7891	  if (BYTES_BIG_ENDIAN)
7892	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7893	  else
7894	    offset += byte % UNITS_PER_WORD;
7895	}
7896      else
7897	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7898      ptr[offset] = value;
7899    }
7900  return total_bytes;
7901}
7902
7903
7904/* Subroutine of native_encode_expr.  Encode the REAL_CST
7905   specified by EXPR into the buffer PTR of length LEN bytes.
7906   Return the number of bytes placed in the buffer, or zero
7907   upon failure.  */
7908
7909static int
7910native_encode_real (const_tree expr, unsigned char *ptr, int len)
7911{
7912  tree type = TREE_TYPE (expr);
7913  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7914  int byte, offset, word, words, bitpos;
7915  unsigned char value;
7916
7917  /* There are always 32 bits in each long, no matter the size of
7918     the hosts long.  We handle floating point representations with
7919     up to 192 bits.  */
7920  long tmp[6];
7921
7922  if (total_bytes > len)
7923    return 0;
7924  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7925
7926  real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7927
7928  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7929       bitpos += BITS_PER_UNIT)
7930    {
7931      byte = (bitpos / BITS_PER_UNIT) & 3;
7932      value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7933
7934      if (UNITS_PER_WORD < 4)
7935	{
7936	  word = byte / UNITS_PER_WORD;
7937	  if (WORDS_BIG_ENDIAN)
7938	    word = (words - 1) - word;
7939	  offset = word * UNITS_PER_WORD;
7940	  if (BYTES_BIG_ENDIAN)
7941	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7942	  else
7943	    offset += byte % UNITS_PER_WORD;
7944	}
7945      else
7946	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7947      ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7948    }
7949  return total_bytes;
7950}
7951
7952/* Subroutine of native_encode_expr.  Encode the COMPLEX_CST
7953   specified by EXPR into the buffer PTR of length LEN bytes.
7954   Return the number of bytes placed in the buffer, or zero
7955   upon failure.  */
7956
7957static int
7958native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7959{
7960  int rsize, isize;
7961  tree part;
7962
7963  part = TREE_REALPART (expr);
7964  rsize = native_encode_expr (part, ptr, len);
7965  if (rsize == 0)
7966    return 0;
7967  part = TREE_IMAGPART (expr);
7968  isize = native_encode_expr (part, ptr+rsize, len-rsize);
7969  if (isize != rsize)
7970    return 0;
7971  return rsize + isize;
7972}
7973
7974
7975/* Subroutine of native_encode_expr.  Encode the VECTOR_CST
7976   specified by EXPR into the buffer PTR of length LEN bytes.
7977   Return the number of bytes placed in the buffer, or zero
7978   upon failure.  */
7979
7980static int
7981native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7982{
7983  int i, size, offset, count;
7984  tree itype, elem, elements;
7985
7986  offset = 0;
7987  elements = TREE_VECTOR_CST_ELTS (expr);
7988  count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7989  itype = TREE_TYPE (TREE_TYPE (expr));
7990  size = GET_MODE_SIZE (TYPE_MODE (itype));
7991  for (i = 0; i < count; i++)
7992    {
7993      if (elements)
7994	{
7995	  elem = TREE_VALUE (elements);
7996	  elements = TREE_CHAIN (elements);
7997	}
7998      else
7999	elem = NULL_TREE;
8000
8001      if (elem)
8002	{
8003	  if (native_encode_expr (elem, ptr+offset, len-offset) != size)
8004	    return 0;
8005	}
8006      else
8007	{
8008	  if (offset + size > len)
8009	    return 0;
8010	  memset (ptr+offset, 0, size);
8011	}
8012      offset += size;
8013    }
8014  return offset;
8015}
8016
8017
8018/* Subroutine of native_encode_expr.  Encode the STRING_CST
8019   specified by EXPR into the buffer PTR of length LEN bytes.
8020   Return the number of bytes placed in the buffer, or zero
8021   upon failure.  */
8022
8023static int
8024native_encode_string (const_tree expr, unsigned char *ptr, int len)
8025{
8026  tree type = TREE_TYPE (expr);
8027  HOST_WIDE_INT total_bytes;
8028
8029  if (TREE_CODE (type) != ARRAY_TYPE
8030      || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
8031      || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
8032      || !host_integerp (TYPE_SIZE_UNIT (type), 0))
8033    return 0;
8034  total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
8035  if (total_bytes > len)
8036    return 0;
8037  if (TREE_STRING_LENGTH (expr) < total_bytes)
8038    {
8039      memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
8040      memset (ptr + TREE_STRING_LENGTH (expr), 0,
8041	      total_bytes - TREE_STRING_LENGTH (expr));
8042    }
8043  else
8044    memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
8045  return total_bytes;
8046}
8047
8048
8049/* Subroutine of fold_view_convert_expr.  Encode the INTEGER_CST,
8050   REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
8051   buffer PTR of length LEN bytes.  Return the number of bytes
8052   placed in the buffer, or zero upon failure.  */
8053
8054int
8055native_encode_expr (const_tree expr, unsigned char *ptr, int len)
8056{
8057  switch (TREE_CODE (expr))
8058    {
8059    case INTEGER_CST:
8060      return native_encode_int (expr, ptr, len);
8061
8062    case REAL_CST:
8063      return native_encode_real (expr, ptr, len);
8064
8065    case COMPLEX_CST:
8066      return native_encode_complex (expr, ptr, len);
8067
8068    case VECTOR_CST:
8069      return native_encode_vector (expr, ptr, len);
8070
8071    case STRING_CST:
8072      return native_encode_string (expr, ptr, len);
8073
8074    default:
8075      return 0;
8076    }
8077}
8078
8079
8080/* Subroutine of native_interpret_expr.  Interpret the contents of
8081   the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
8082   If the buffer cannot be interpreted, return NULL_TREE.  */
8083
8084static tree
8085native_interpret_int (tree type, const unsigned char *ptr, int len)
8086{
8087  int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8088  int byte, offset, word, words;
8089  unsigned char value;
8090  unsigned int HOST_WIDE_INT lo = 0;
8091  HOST_WIDE_INT hi = 0;
8092
8093  if (total_bytes > len)
8094    return NULL_TREE;
8095  if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
8096    return NULL_TREE;
8097  words = total_bytes / UNITS_PER_WORD;
8098
8099  for (byte = 0; byte < total_bytes; byte++)
8100    {
8101      int bitpos = byte * BITS_PER_UNIT;
8102      if (total_bytes > UNITS_PER_WORD)
8103	{
8104	  word = byte / UNITS_PER_WORD;
8105	  if (WORDS_BIG_ENDIAN)
8106	    word = (words - 1) - word;
8107	  offset = word * UNITS_PER_WORD;
8108	  if (BYTES_BIG_ENDIAN)
8109	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8110	  else
8111	    offset += byte % UNITS_PER_WORD;
8112	}
8113      else
8114	offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
8115      value = ptr[offset];
8116
8117      if (bitpos < HOST_BITS_PER_WIDE_INT)
8118	lo |= (unsigned HOST_WIDE_INT) value << bitpos;
8119      else
8120	hi |= (unsigned HOST_WIDE_INT) value
8121	      << (bitpos - HOST_BITS_PER_WIDE_INT);
8122    }
8123
8124  return build_int_cst_wide_type (type, lo, hi);
8125}
8126
8127
8128/* Subroutine of native_interpret_expr.  Interpret the contents of
8129   the buffer PTR of length LEN as a REAL_CST of type TYPE.
8130   If the buffer cannot be interpreted, return NULL_TREE.  */
8131
8132static tree
8133native_interpret_real (tree type, const unsigned char *ptr, int len)
8134{
8135  enum machine_mode mode = TYPE_MODE (type);
8136  int total_bytes = GET_MODE_SIZE (mode);
8137  int byte, offset, word, words, bitpos;
8138  unsigned char value;
8139  /* There are always 32 bits in each long, no matter the size of
8140     the hosts long.  We handle floating point representations with
8141     up to 192 bits.  */
8142  REAL_VALUE_TYPE r;
8143  long tmp[6];
8144
8145  total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
8146  if (total_bytes > len || total_bytes > 24)
8147    return NULL_TREE;
8148  words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
8149
8150  memset (tmp, 0, sizeof (tmp));
8151  for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
8152       bitpos += BITS_PER_UNIT)
8153    {
8154      byte = (bitpos / BITS_PER_UNIT) & 3;
8155      if (UNITS_PER_WORD < 4)
8156	{
8157	  word = byte / UNITS_PER_WORD;
8158	  if (WORDS_BIG_ENDIAN)
8159	    word = (words - 1) - word;
8160	  offset = word * UNITS_PER_WORD;
8161	  if (BYTES_BIG_ENDIAN)
8162	    offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
8163	  else
8164	    offset += byte % UNITS_PER_WORD;
8165	}
8166      else
8167	offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
8168      value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
8169
8170      tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
8171    }
8172
8173  real_from_target (&r, tmp, mode);
8174  return build_real (type, r);
8175}
8176
8177
8178/* Subroutine of native_interpret_expr.  Interpret the contents of
8179   the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
8180   If the buffer cannot be interpreted, return NULL_TREE.  */
8181
8182static tree
8183native_interpret_complex (tree type, const unsigned char *ptr, int len)
8184{
8185  tree etype, rpart, ipart;
8186  int size;
8187
8188  etype = TREE_TYPE (type);
8189  size = GET_MODE_SIZE (TYPE_MODE (etype));
8190  if (size * 2 > len)
8191    return NULL_TREE;
8192  rpart = native_interpret_expr (etype, ptr, size);
8193  if (!rpart)
8194    return NULL_TREE;
8195  ipart = native_interpret_expr (etype, ptr+size, size);
8196  if (!ipart)
8197    return NULL_TREE;
8198  return build_complex (type, rpart, ipart);
8199}
8200
8201
8202/* Subroutine of native_interpret_expr.  Interpret the contents of
8203   the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
8204   If the buffer cannot be interpreted, return NULL_TREE.  */
8205
8206static tree
8207native_interpret_vector (tree type, const unsigned char *ptr, int len)
8208{
8209  tree etype, elem, elements;
8210  int i, size, count;
8211
8212  etype = TREE_TYPE (type);
8213  size = GET_MODE_SIZE (TYPE_MODE (etype));
8214  count = TYPE_VECTOR_SUBPARTS (type);
8215  if (size * count > len)
8216    return NULL_TREE;
8217
8218  elements = NULL_TREE;
8219  for (i = count - 1; i >= 0; i--)
8220    {
8221      elem = native_interpret_expr (etype, ptr+(i*size), size);
8222      if (!elem)
8223	return NULL_TREE;
8224      elements = tree_cons (NULL_TREE, elem, elements);
8225    }
8226  return build_vector (type, elements);
8227}
8228
8229
8230/* Subroutine of fold_view_convert_expr.  Interpret the contents of
8231   the buffer PTR of length LEN as a constant of type TYPE.  For
8232   INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
8233   we return a REAL_CST, etc...  If the buffer cannot be interpreted,
8234   return NULL_TREE.  */
8235
8236tree
8237native_interpret_expr (tree type, const unsigned char *ptr, int len)
8238{
8239  switch (TREE_CODE (type))
8240    {
8241    case INTEGER_TYPE:
8242    case ENUMERAL_TYPE:
8243    case BOOLEAN_TYPE:
8244      return native_interpret_int (type, ptr, len);
8245
8246    case REAL_TYPE:
8247      return native_interpret_real (type, ptr, len);
8248
8249    case COMPLEX_TYPE:
8250      return native_interpret_complex (type, ptr, len);
8251
8252    case VECTOR_TYPE:
8253      return native_interpret_vector (type, ptr, len);
8254
8255    default:
8256      return NULL_TREE;
8257    }
8258}
8259
8260
8261/* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
8262   TYPE at compile-time.  If we're unable to perform the conversion
8263   return NULL_TREE.  */
8264
8265static tree
8266fold_view_convert_expr (tree type, tree expr)
8267{
8268  /* We support up to 512-bit values (for V8DFmode).  */
8269  unsigned char buffer[64];
8270  int len;
8271
8272  /* Check that the host and target are sane.  */
8273  if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
8274    return NULL_TREE;
8275
8276  len = native_encode_expr (expr, buffer, sizeof (buffer));
8277  if (len == 0)
8278    return NULL_TREE;
8279
8280  return native_interpret_expr (type, buffer, len);
8281}
8282
8283/* Build an expression for the address of T.  Folds away INDIRECT_REF
8284   to avoid confusing the gimplify process.  */
8285
8286tree
8287build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
8288{
8289  /* The size of the object is not relevant when talking about its address.  */
8290  if (TREE_CODE (t) == WITH_SIZE_EXPR)
8291    t = TREE_OPERAND (t, 0);
8292
8293  /* Note: doesn't apply to ALIGN_INDIRECT_REF */
8294  if (TREE_CODE (t) == INDIRECT_REF
8295      || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
8296    {
8297      t = TREE_OPERAND (t, 0);
8298
8299      if (TREE_TYPE (t) != ptrtype)
8300	{
8301	  t = build1 (NOP_EXPR, ptrtype, t);
8302	  SET_EXPR_LOCATION (t, loc);
8303	}
8304    }
8305  else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
8306    {
8307      t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
8308
8309      if (TREE_TYPE (t) != ptrtype)
8310	t = fold_convert_loc (loc, ptrtype, t);
8311    }
8312  else
8313    {
8314      t = build1 (ADDR_EXPR, ptrtype, t);
8315      SET_EXPR_LOCATION (t, loc);
8316    }
8317
8318  return t;
8319}
8320
8321/* Build an expression for the address of T.  */
8322
8323tree
8324build_fold_addr_expr_loc (location_t loc, tree t)
8325{
8326  tree ptrtype = build_pointer_type (TREE_TYPE (t));
8327
8328  return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
8329}
8330
8331/* Fold a unary expression of code CODE and type TYPE with operand
8332   OP0.  Return the folded expression if folding is successful.
8333   Otherwise, return NULL_TREE.  */
8334
8335tree
8336fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
8337{
8338  tree tem;
8339  tree arg0;
8340  enum tree_code_class kind = TREE_CODE_CLASS (code);
8341
8342  gcc_assert (IS_EXPR_CODE_CLASS (kind)
8343	      && TREE_CODE_LENGTH (code) == 1);
8344
8345  arg0 = op0;
8346  if (arg0)
8347    {
8348      if (CONVERT_EXPR_CODE_P (code)
8349	  || code == FLOAT_EXPR || code == ABS_EXPR)
8350	{
8351	  /* Don't use STRIP_NOPS, because signedness of argument type
8352	     matters.  */
8353	  STRIP_SIGN_NOPS (arg0);
8354	}
8355      else
8356	{
8357	  /* Strip any conversions that don't change the mode.  This
8358	     is safe for every expression, except for a comparison
8359	     expression because its signedness is derived from its
8360	     operands.
8361
8362	     Note that this is done as an internal manipulation within
8363	     the constant folder, in order to find the simplest
8364	     representation of the arguments so that their form can be
8365	     studied.  In any cases, the appropriate type conversions
8366	     should be put back in the tree that will get out of the
8367	     constant folder.  */
8368	  STRIP_NOPS (arg0);
8369	}
8370    }
8371
8372  if (TREE_CODE_CLASS (code) == tcc_unary)
8373    {
8374      if (TREE_CODE (arg0) == COMPOUND_EXPR)
8375	return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8376		       fold_build1_loc (loc, code, type,
8377				    fold_convert_loc (loc, TREE_TYPE (op0),
8378						      TREE_OPERAND (arg0, 1))));
8379      else if (TREE_CODE (arg0) == COND_EXPR)
8380	{
8381	  tree arg01 = TREE_OPERAND (arg0, 1);
8382	  tree arg02 = TREE_OPERAND (arg0, 2);
8383	  if (! VOID_TYPE_P (TREE_TYPE (arg01)))
8384	    arg01 = fold_build1_loc (loc, code, type,
8385				 fold_convert_loc (loc,
8386						   TREE_TYPE (op0), arg01));
8387	  if (! VOID_TYPE_P (TREE_TYPE (arg02)))
8388	    arg02 = fold_build1_loc (loc, code, type,
8389				 fold_convert_loc (loc,
8390						   TREE_TYPE (op0), arg02));
8391	  tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
8392			     arg01, arg02);
8393
8394	  /* If this was a conversion, and all we did was to move into
8395	     inside the COND_EXPR, bring it back out.  But leave it if
8396	     it is a conversion from integer to integer and the
8397	     result precision is no wider than a word since such a
8398	     conversion is cheap and may be optimized away by combine,
8399	     while it couldn't if it were outside the COND_EXPR.  Then return
8400	     so we don't get into an infinite recursion loop taking the
8401	     conversion out and then back in.  */
8402
8403	  if ((CONVERT_EXPR_CODE_P (code)
8404	       || code == NON_LVALUE_EXPR)
8405	      && TREE_CODE (tem) == COND_EXPR
8406	      && TREE_CODE (TREE_OPERAND (tem, 1)) == code
8407	      && TREE_CODE (TREE_OPERAND (tem, 2)) == code
8408	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
8409	      && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
8410	      && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
8411		  == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
8412	      && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8413		     && (INTEGRAL_TYPE_P
8414			 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
8415		     && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
8416		  || flag_syntax_only))
8417	    {
8418	      tem = build1 (code, type,
8419			    build3 (COND_EXPR,
8420				    TREE_TYPE (TREE_OPERAND
8421					       (TREE_OPERAND (tem, 1), 0)),
8422				    TREE_OPERAND (tem, 0),
8423				    TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
8424				    TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
8425	      SET_EXPR_LOCATION (tem, loc);
8426	    }
8427	  return tem;
8428	}
8429      else if (COMPARISON_CLASS_P (arg0))
8430	{
8431	  if (TREE_CODE (type) == BOOLEAN_TYPE)
8432	    {
8433	      arg0 = copy_node (arg0);
8434	      TREE_TYPE (arg0) = type;
8435	      return arg0;
8436	    }
8437	  else if (TREE_CODE (type) != INTEGER_TYPE)
8438	    return fold_build3_loc (loc, COND_EXPR, type, arg0,
8439				fold_build1_loc (loc, code, type,
8440					     integer_one_node),
8441				fold_build1_loc (loc, code, type,
8442					     integer_zero_node));
8443	}
8444   }
8445
8446  switch (code)
8447    {
8448    case PAREN_EXPR:
8449      /* Re-association barriers around constants and other re-association
8450	 barriers can be removed.  */
8451      if (CONSTANT_CLASS_P (op0)
8452	  || TREE_CODE (op0) == PAREN_EXPR)
8453	return fold_convert_loc (loc, type, op0);
8454      return NULL_TREE;
8455
8456    CASE_CONVERT:
8457    case FLOAT_EXPR:
8458    case FIX_TRUNC_EXPR:
8459      if (TREE_TYPE (op0) == type)
8460	return op0;
8461
8462      /* If we have (type) (a CMP b) and type is an integral type, return
8463         new expression involving the new type.  */
8464      if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
8465	return fold_build2_loc (loc, TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
8466			    TREE_OPERAND (op0, 1));
8467
8468      /* Handle cases of two conversions in a row.  */
8469      if (CONVERT_EXPR_P (op0))
8470	{
8471	  tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
8472	  tree inter_type = TREE_TYPE (op0);
8473	  int inside_int = INTEGRAL_TYPE_P (inside_type);
8474	  int inside_ptr = POINTER_TYPE_P (inside_type);
8475	  int inside_float = FLOAT_TYPE_P (inside_type);
8476	  int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
8477	  unsigned int inside_prec = TYPE_PRECISION (inside_type);
8478	  int inside_unsignedp = TYPE_UNSIGNED (inside_type);
8479	  int inter_int = INTEGRAL_TYPE_P (inter_type);
8480	  int inter_ptr = POINTER_TYPE_P (inter_type);
8481	  int inter_float = FLOAT_TYPE_P (inter_type);
8482	  int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
8483	  unsigned int inter_prec = TYPE_PRECISION (inter_type);
8484	  int inter_unsignedp = TYPE_UNSIGNED (inter_type);
8485	  int final_int = INTEGRAL_TYPE_P (type);
8486	  int final_ptr = POINTER_TYPE_P (type);
8487	  int final_float = FLOAT_TYPE_P (type);
8488	  int final_vec = TREE_CODE (type) == VECTOR_TYPE;
8489	  unsigned int final_prec = TYPE_PRECISION (type);
8490	  int final_unsignedp = TYPE_UNSIGNED (type);
8491
8492	  /* In addition to the cases of two conversions in a row
8493	     handled below, if we are converting something to its own
8494	     type via an object of identical or wider precision, neither
8495	     conversion is needed.  */
8496	  if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
8497	      && (((inter_int || inter_ptr) && final_int)
8498		  || (inter_float && final_float))
8499	      && inter_prec >= final_prec)
8500	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8501
8502	  /* Likewise, if the intermediate and initial types are either both
8503	     float or both integer, we don't need the middle conversion if the
8504	     former is wider than the latter and doesn't change the signedness
8505	     (for integers).  Avoid this if the final type is a pointer since
8506	     then we sometimes need the middle conversion.  Likewise if the
8507	     final type has a precision not equal to the size of its mode.  */
8508	  if (((inter_int && inside_int)
8509	       || (inter_float && inside_float)
8510	       || (inter_vec && inside_vec))
8511	      && inter_prec >= inside_prec
8512	      && (inter_float || inter_vec
8513		  || inter_unsignedp == inside_unsignedp)
8514	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8515		    && TYPE_MODE (type) == TYPE_MODE (inter_type))
8516	      && ! final_ptr
8517	      && (! final_vec || inter_prec == inside_prec))
8518	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8519
8520	  /* If we have a sign-extension of a zero-extended value, we can
8521	     replace that by a single zero-extension.  */
8522	  if (inside_int && inter_int && final_int
8523	      && inside_prec < inter_prec && inter_prec < final_prec
8524	      && inside_unsignedp && !inter_unsignedp)
8525	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8526
8527	  /* Two conversions in a row are not needed unless:
8528	     - some conversion is floating-point (overstrict for now), or
8529	     - some conversion is a vector (overstrict for now), or
8530	     - the intermediate type is narrower than both initial and
8531	       final, or
8532	     - the intermediate type and innermost type differ in signedness,
8533	       and the outermost type is wider than the intermediate, or
8534	     - the initial type is a pointer type and the precisions of the
8535	       intermediate and final types differ, or
8536	     - the final type is a pointer type and the precisions of the
8537	       initial and intermediate types differ.  */
8538	  if (! inside_float && ! inter_float && ! final_float
8539	      && ! inside_vec && ! inter_vec && ! final_vec
8540	      && (inter_prec >= inside_prec || inter_prec >= final_prec)
8541	      && ! (inside_int && inter_int
8542		    && inter_unsignedp != inside_unsignedp
8543		    && inter_prec < final_prec)
8544	      && ((inter_unsignedp && inter_prec > inside_prec)
8545		  == (final_unsignedp && final_prec > inter_prec))
8546	      && ! (inside_ptr && inter_prec != final_prec)
8547	      && ! (final_ptr && inside_prec != inter_prec)
8548	      && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
8549		    && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8550	    return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8551	}
8552
8553      /* Handle (T *)&A.B.C for A being of type T and B and C
8554	 living at offset zero.  This occurs frequently in
8555	 C++ upcasting and then accessing the base.  */
8556      if (TREE_CODE (op0) == ADDR_EXPR
8557	  && POINTER_TYPE_P (type)
8558	  && handled_component_p (TREE_OPERAND (op0, 0)))
8559        {
8560	  HOST_WIDE_INT bitsize, bitpos;
8561	  tree offset;
8562	  enum machine_mode mode;
8563	  int unsignedp, volatilep;
8564          tree base = TREE_OPERAND (op0, 0);
8565	  base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8566				      &mode, &unsignedp, &volatilep, false);
8567	  /* If the reference was to a (constant) zero offset, we can use
8568	     the address of the base if it has the same base type
8569	     as the result type.  */
8570	  if (! offset && bitpos == 0
8571	      && TYPE_MAIN_VARIANT (TREE_TYPE (type))
8572		  == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8573	    return fold_convert_loc (loc, type,
8574				     build_fold_addr_expr_loc (loc, base));
8575        }
8576
8577      if (TREE_CODE (op0) == MODIFY_EXPR
8578	  && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8579	  /* Detect assigning a bitfield.  */
8580	  && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8581	       && DECL_BIT_FIELD
8582	       (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8583	{
8584	  /* Don't leave an assignment inside a conversion
8585	     unless assigning a bitfield.  */
8586	  tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8587	  /* First do the assignment, then return converted constant.  */
8588	  tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8589	  TREE_NO_WARNING (tem) = 1;
8590	  TREE_USED (tem) = 1;
8591	  SET_EXPR_LOCATION (tem, loc);
8592	  return tem;
8593	}
8594
8595      /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8596	 constants (if x has signed type, the sign bit cannot be set
8597	 in c).  This folds extension into the BIT_AND_EXPR.
8598	 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8599	 very likely don't have maximal range for their precision and this
8600	 transformation effectively doesn't preserve non-maximal ranges.  */
8601      if (TREE_CODE (type) == INTEGER_TYPE
8602	  && TREE_CODE (op0) == BIT_AND_EXPR
8603	  && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8604	{
8605	  tree and_expr = op0;
8606	  tree and0 = TREE_OPERAND (and_expr, 0);
8607	  tree and1 = TREE_OPERAND (and_expr, 1);
8608	  int change = 0;
8609
8610	  if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8611	      || (TYPE_PRECISION (type)
8612		  <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8613	    change = 1;
8614	  else if (TYPE_PRECISION (TREE_TYPE (and1))
8615		   <= HOST_BITS_PER_WIDE_INT
8616		   && host_integerp (and1, 1))
8617	    {
8618	      unsigned HOST_WIDE_INT cst;
8619
8620	      cst = tree_low_cst (and1, 1);
8621	      cst &= (HOST_WIDE_INT) -1
8622		     << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8623	      change = (cst == 0);
8624#ifdef LOAD_EXTEND_OP
8625	      if (change
8626		  && !flag_syntax_only
8627		  && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8628		      == ZERO_EXTEND))
8629		{
8630		  tree uns = unsigned_type_for (TREE_TYPE (and0));
8631		  and0 = fold_convert_loc (loc, uns, and0);
8632		  and1 = fold_convert_loc (loc, uns, and1);
8633		}
8634#endif
8635	    }
8636	  if (change)
8637	    {
8638	      tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
8639					   TREE_INT_CST_HIGH (and1), 0,
8640					   TREE_OVERFLOW (and1));
8641	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
8642				  fold_convert_loc (loc, type, and0), tem);
8643	    }
8644	}
8645
8646      /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8647         when one of the new casts will fold away. Conservatively we assume
8648	 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8649      if (POINTER_TYPE_P (type)
8650	  && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8651	  && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8652	      || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8653	      || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8654	{
8655	  tree arg00 = TREE_OPERAND (arg0, 0);
8656	  tree arg01 = TREE_OPERAND (arg0, 1);
8657
8658	  return fold_build2_loc (loc,
8659			      TREE_CODE (arg0), type,
8660			      fold_convert_loc (loc, type, arg00),
8661			      fold_convert_loc (loc, sizetype, arg01));
8662	}
8663
8664      /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8665	 of the same precision, and X is an integer type not narrower than
8666	 types T1 or T2, i.e. the cast (T2)X isn't an extension.  */
8667      if (INTEGRAL_TYPE_P (type)
8668	  && TREE_CODE (op0) == BIT_NOT_EXPR
8669	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8670	  && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8671	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8672	{
8673	  tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8674	  if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8675	      && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8676	    return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8677				fold_convert_loc (loc, type, tem));
8678	}
8679
8680      /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8681	 type of X and Y (integer types only).  */
8682      if (INTEGRAL_TYPE_P (type)
8683	  && TREE_CODE (op0) == MULT_EXPR
8684	  && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8685	  && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8686	{
8687	  /* Be careful not to introduce new overflows.  */
8688	  tree mult_type;
8689          if (TYPE_OVERFLOW_WRAPS (type))
8690	    mult_type = type;
8691	  else
8692	    mult_type = unsigned_type_for (type);
8693
8694	  if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8695	    {
8696	      tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8697				 fold_convert_loc (loc, mult_type,
8698						   TREE_OPERAND (op0, 0)),
8699				 fold_convert_loc (loc, mult_type,
8700						   TREE_OPERAND (op0, 1)));
8701	      return fold_convert_loc (loc, type, tem);
8702	    }
8703	}
8704
8705      tem = fold_convert_const (code, type, op0);
8706      return tem ? tem : NULL_TREE;
8707
8708    case ADDR_SPACE_CONVERT_EXPR:
8709      if (integer_zerop (arg0))
8710	return fold_convert_const (code, type, arg0);
8711      return NULL_TREE;
8712
8713    case FIXED_CONVERT_EXPR:
8714      tem = fold_convert_const (code, type, arg0);
8715      return tem ? tem : NULL_TREE;
8716
8717    case VIEW_CONVERT_EXPR:
8718      if (TREE_TYPE (op0) == type)
8719	return op0;
8720      if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8721	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8722			    type, TREE_OPERAND (op0, 0));
8723
8724      /* For integral conversions with the same precision or pointer
8725	 conversions use a NOP_EXPR instead.  */
8726      if ((INTEGRAL_TYPE_P (type)
8727	   || POINTER_TYPE_P (type))
8728	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8729	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8730	  && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8731	return fold_convert_loc (loc, type, op0);
8732
8733      /* Strip inner integral conversions that do not change the precision.  */
8734      if (CONVERT_EXPR_P (op0)
8735	  && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8736	      || POINTER_TYPE_P (TREE_TYPE (op0)))
8737	  && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8738	      || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8739	  && (TYPE_PRECISION (TREE_TYPE (op0))
8740	      == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8741	return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8742			    type, TREE_OPERAND (op0, 0));
8743
8744      return fold_view_convert_expr (type, op0);
8745
8746    case NEGATE_EXPR:
8747      tem = fold_negate_expr (loc, arg0);
8748      if (tem)
8749	return fold_convert_loc (loc, type, tem);
8750      return NULL_TREE;
8751
8752    case ABS_EXPR:
8753      if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8754	return fold_abs_const (arg0, type);
8755      else if (TREE_CODE (arg0) == NEGATE_EXPR)
8756	return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8757      /* Convert fabs((double)float) into (double)fabsf(float).  */
8758      else if (TREE_CODE (arg0) == NOP_EXPR
8759	       && TREE_CODE (type) == REAL_TYPE)
8760	{
8761	  tree targ0 = strip_float_extensions (arg0);
8762	  if (targ0 != arg0)
8763	    return fold_convert_loc (loc, type,
8764				     fold_build1_loc (loc, ABS_EXPR,
8765						  TREE_TYPE (targ0),
8766						  targ0));
8767	}
8768      /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on.  */
8769      else if (TREE_CODE (arg0) == ABS_EXPR)
8770	return arg0;
8771      else if (tree_expr_nonnegative_p (arg0))
8772	return arg0;
8773
8774      /* Strip sign ops from argument.  */
8775      if (TREE_CODE (type) == REAL_TYPE)
8776	{
8777	  tem = fold_strip_sign_ops (arg0);
8778	  if (tem)
8779	    return fold_build1_loc (loc, ABS_EXPR, type,
8780				fold_convert_loc (loc, type, tem));
8781	}
8782      return NULL_TREE;
8783
8784    case CONJ_EXPR:
8785      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8786	return fold_convert_loc (loc, type, arg0);
8787      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8788	{
8789	  tree itype = TREE_TYPE (type);
8790	  tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8791	  tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8792	  return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8793			      negate_expr (ipart));
8794	}
8795      if (TREE_CODE (arg0) == COMPLEX_CST)
8796	{
8797	  tree itype = TREE_TYPE (type);
8798	  tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8799	  tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8800	  return build_complex (type, rpart, negate_expr (ipart));
8801	}
8802      if (TREE_CODE (arg0) == CONJ_EXPR)
8803	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8804      return NULL_TREE;
8805
8806    case BIT_NOT_EXPR:
8807      if (TREE_CODE (arg0) == INTEGER_CST)
8808        return fold_not_const (arg0, type);
8809      else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8810	return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8811      /* Convert ~ (-A) to A - 1.  */
8812      else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8813	return fold_build2_loc (loc, MINUS_EXPR, type,
8814			    fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8815			    build_int_cst (type, 1));
8816      /* Convert ~ (A - 1) or ~ (A + -1) to -A.  */
8817      else if (INTEGRAL_TYPE_P (type)
8818	       && ((TREE_CODE (arg0) == MINUS_EXPR
8819		    && integer_onep (TREE_OPERAND (arg0, 1)))
8820		   || (TREE_CODE (arg0) == PLUS_EXPR
8821		       && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8822	return fold_build1_loc (loc, NEGATE_EXPR, type,
8823			    fold_convert_loc (loc, type,
8824					      TREE_OPERAND (arg0, 0)));
8825      /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify.  */
8826      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8827	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8828			       	     fold_convert_loc (loc, type,
8829						       TREE_OPERAND (arg0, 0)))))
8830	return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8831			    fold_convert_loc (loc, type,
8832					      TREE_OPERAND (arg0, 1)));
8833      else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8834	       && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8835			       	     fold_convert_loc (loc, type,
8836						       TREE_OPERAND (arg0, 1)))))
8837	return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8838			    fold_convert_loc (loc, type,
8839					      TREE_OPERAND (arg0, 0)), tem);
8840      /* Perform BIT_NOT_EXPR on each element individually.  */
8841      else if (TREE_CODE (arg0) == VECTOR_CST)
8842	{
8843	  tree elements = TREE_VECTOR_CST_ELTS (arg0), elem, list = NULL_TREE;
8844	  int count = TYPE_VECTOR_SUBPARTS (type), i;
8845
8846	  for (i = 0; i < count; i++)
8847	    {
8848	      if (elements)
8849		{
8850		  elem = TREE_VALUE (elements);
8851		  elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8852		  if (elem == NULL_TREE)
8853		    break;
8854		  elements = TREE_CHAIN (elements);
8855		}
8856	      else
8857		elem = build_int_cst (TREE_TYPE (type), -1);
8858	      list = tree_cons (NULL_TREE, elem, list);
8859	    }
8860	  if (i == count)
8861	    return build_vector (type, nreverse (list));
8862	}
8863
8864      return NULL_TREE;
8865
8866    case TRUTH_NOT_EXPR:
8867      /* The argument to invert_truthvalue must have Boolean type.  */
8868      if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8869          arg0 = fold_convert_loc (loc, boolean_type_node, arg0);
8870
8871      /* Note that the operand of this must be an int
8872	 and its values must be 0 or 1.
8873	 ("true" is a fixed value perhaps depending on the language,
8874	 but we don't handle values other than 1 correctly yet.)  */
8875      tem = fold_truth_not_expr (loc, arg0);
8876      if (!tem)
8877	return NULL_TREE;
8878      return fold_convert_loc (loc, type, tem);
8879
8880    case REALPART_EXPR:
8881      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8882	return fold_convert_loc (loc, type, arg0);
8883      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8884	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8885				 TREE_OPERAND (arg0, 1));
8886      if (TREE_CODE (arg0) == COMPLEX_CST)
8887	return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8888      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8889	{
8890	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8891	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8892			     fold_build1_loc (loc, REALPART_EXPR, itype,
8893					  TREE_OPERAND (arg0, 0)),
8894			     fold_build1_loc (loc, REALPART_EXPR, itype,
8895					  TREE_OPERAND (arg0, 1)));
8896	  return fold_convert_loc (loc, type, tem);
8897	}
8898      if (TREE_CODE (arg0) == CONJ_EXPR)
8899	{
8900	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8901	  tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8902			     TREE_OPERAND (arg0, 0));
8903	  return fold_convert_loc (loc, type, tem);
8904	}
8905      if (TREE_CODE (arg0) == CALL_EXPR)
8906	{
8907	  tree fn = get_callee_fndecl (arg0);
8908	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8909	    switch (DECL_FUNCTION_CODE (fn))
8910	      {
8911	      CASE_FLT_FN (BUILT_IN_CEXPI):
8912	        fn = mathfn_built_in (type, BUILT_IN_COS);
8913		if (fn)
8914	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8915		break;
8916
8917	      default:
8918		break;
8919	      }
8920	}
8921      return NULL_TREE;
8922
8923    case IMAGPART_EXPR:
8924      if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8925	return fold_convert_loc (loc, type, integer_zero_node);
8926      if (TREE_CODE (arg0) == COMPLEX_EXPR)
8927	return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8928				 TREE_OPERAND (arg0, 0));
8929      if (TREE_CODE (arg0) == COMPLEX_CST)
8930	return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8931      if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8932	{
8933	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8934	  tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8935			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8936					  TREE_OPERAND (arg0, 0)),
8937			     fold_build1_loc (loc, IMAGPART_EXPR, itype,
8938					  TREE_OPERAND (arg0, 1)));
8939	  return fold_convert_loc (loc, type, tem);
8940	}
8941      if (TREE_CODE (arg0) == CONJ_EXPR)
8942	{
8943	  tree itype = TREE_TYPE (TREE_TYPE (arg0));
8944	  tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8945	  return fold_convert_loc (loc, type, negate_expr (tem));
8946	}
8947      if (TREE_CODE (arg0) == CALL_EXPR)
8948	{
8949	  tree fn = get_callee_fndecl (arg0);
8950	  if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8951	    switch (DECL_FUNCTION_CODE (fn))
8952	      {
8953	      CASE_FLT_FN (BUILT_IN_CEXPI):
8954	        fn = mathfn_built_in (type, BUILT_IN_SIN);
8955		if (fn)
8956	          return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8957		break;
8958
8959	      default:
8960		break;
8961	      }
8962	}
8963      return NULL_TREE;
8964
8965    case INDIRECT_REF:
8966      /* Fold *&X to X if X is an lvalue.  */
8967      if (TREE_CODE (op0) == ADDR_EXPR)
8968	{
8969	  tree op00 = TREE_OPERAND (op0, 0);
8970	  if ((TREE_CODE (op00) == VAR_DECL
8971	       || TREE_CODE (op00) == PARM_DECL
8972	       || TREE_CODE (op00) == RESULT_DECL)
8973	      && !TREE_READONLY (op00))
8974	    return op00;
8975	}
8976      return NULL_TREE;
8977
8978    default:
8979      return NULL_TREE;
8980    } /* switch (code) */
8981}
8982
8983
8984/* If the operation was a conversion do _not_ mark a resulting constant
8985   with TREE_OVERFLOW if the original constant was not.  These conversions
8986   have implementation defined behavior and retaining the TREE_OVERFLOW
8987   flag here would confuse later passes such as VRP.  */
8988tree
8989fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8990				tree type, tree op0)
8991{
8992  tree res = fold_unary_loc (loc, code, type, op0);
8993  if (res
8994      && TREE_CODE (res) == INTEGER_CST
8995      && TREE_CODE (op0) == INTEGER_CST
8996      && CONVERT_EXPR_CODE_P (code))
8997    TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8998
8999  return res;
9000}
9001
9002/* Fold a binary expression of code CODE and type TYPE with operands
9003   OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
9004   Return the folded expression if folding is successful.  Otherwise,
9005   return NULL_TREE.  */
9006
9007static tree
9008fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
9009{
9010  enum tree_code compl_code;
9011
9012  if (code == MIN_EXPR)
9013    compl_code = MAX_EXPR;
9014  else if (code == MAX_EXPR)
9015    compl_code = MIN_EXPR;
9016  else
9017    gcc_unreachable ();
9018
9019  /* MIN (MAX (a, b), b) == b.  */
9020  if (TREE_CODE (op0) == compl_code
9021      && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
9022    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
9023
9024  /* MIN (MAX (b, a), b) == b.  */
9025  if (TREE_CODE (op0) == compl_code
9026      && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
9027      && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
9028    return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
9029
9030  /* MIN (a, MAX (a, b)) == a.  */
9031  if (TREE_CODE (op1) == compl_code
9032      && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
9033      && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
9034    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
9035
9036  /* MIN (a, MAX (b, a)) == a.  */
9037  if (TREE_CODE (op1) == compl_code
9038      && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
9039      && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
9040    return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
9041
9042  return NULL_TREE;
9043}
9044
9045/* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
9046   by changing CODE to reduce the magnitude of constants involved in
9047   ARG0 of the comparison.
9048   Returns a canonicalized comparison tree if a simplification was
9049   possible, otherwise returns NULL_TREE.
9050   Set *STRICT_OVERFLOW_P to true if the canonicalization is only
9051   valid if signed overflow is undefined.  */
9052
9053static tree
9054maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
9055				 tree arg0, tree arg1,
9056				 bool *strict_overflow_p)
9057{
9058  enum tree_code code0 = TREE_CODE (arg0);
9059  tree t, cst0 = NULL_TREE;
9060  int sgn0;
9061  bool swap = false;
9062
9063  /* Match A +- CST code arg1 and CST code arg1.  We can change the
9064     first form only if overflow is undefined.  */
9065  if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9066	 /* In principle pointers also have undefined overflow behavior,
9067	    but that causes problems elsewhere.  */
9068	 && !POINTER_TYPE_P (TREE_TYPE (arg0))
9069	 && (code0 == MINUS_EXPR
9070	     || code0 == PLUS_EXPR)
9071         && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9072	|| code0 == INTEGER_CST))
9073    return NULL_TREE;
9074
9075  /* Identify the constant in arg0 and its sign.  */
9076  if (code0 == INTEGER_CST)
9077    cst0 = arg0;
9078  else
9079    cst0 = TREE_OPERAND (arg0, 1);
9080  sgn0 = tree_int_cst_sgn (cst0);
9081
9082  /* Overflowed constants and zero will cause problems.  */
9083  if (integer_zerop (cst0)
9084      || TREE_OVERFLOW (cst0))
9085    return NULL_TREE;
9086
9087  /* See if we can reduce the magnitude of the constant in
9088     arg0 by changing the comparison code.  */
9089  if (code0 == INTEGER_CST)
9090    {
9091      /* CST <= arg1  ->  CST-1 < arg1.  */
9092      if (code == LE_EXPR && sgn0 == 1)
9093	code = LT_EXPR;
9094      /* -CST < arg1  ->  -CST-1 <= arg1.  */
9095      else if (code == LT_EXPR && sgn0 == -1)
9096	code = LE_EXPR;
9097      /* CST > arg1  ->  CST-1 >= arg1.  */
9098      else if (code == GT_EXPR && sgn0 == 1)
9099	code = GE_EXPR;
9100      /* -CST >= arg1  ->  -CST-1 > arg1.  */
9101      else if (code == GE_EXPR && sgn0 == -1)
9102	code = GT_EXPR;
9103      else
9104        return NULL_TREE;
9105      /* arg1 code' CST' might be more canonical.  */
9106      swap = true;
9107    }
9108  else
9109    {
9110      /* A - CST < arg1  ->  A - CST-1 <= arg1.  */
9111      if (code == LT_EXPR
9112	  && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9113	code = LE_EXPR;
9114      /* A + CST > arg1  ->  A + CST-1 >= arg1.  */
9115      else if (code == GT_EXPR
9116	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9117	code = GE_EXPR;
9118      /* A + CST <= arg1  ->  A + CST-1 < arg1.  */
9119      else if (code == LE_EXPR
9120	       && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
9121	code = LT_EXPR;
9122      /* A - CST >= arg1  ->  A - CST-1 > arg1.  */
9123      else if (code == GE_EXPR
9124	       && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
9125	code = GT_EXPR;
9126      else
9127	return NULL_TREE;
9128      *strict_overflow_p = true;
9129    }
9130
9131  /* Now build the constant reduced in magnitude.  But not if that
9132     would produce one outside of its types range.  */
9133  if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
9134      && ((sgn0 == 1
9135	   && TYPE_MIN_VALUE (TREE_TYPE (cst0))
9136	   && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
9137	  || (sgn0 == -1
9138	      && TYPE_MAX_VALUE (TREE_TYPE (cst0))
9139	      && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
9140    /* We cannot swap the comparison here as that would cause us to
9141       endlessly recurse.  */
9142    return NULL_TREE;
9143
9144  t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
9145		       cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
9146  if (code0 != INTEGER_CST)
9147    t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
9148
9149  /* If swapping might yield to a more canonical form, do so.  */
9150  if (swap)
9151    return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
9152  else
9153    return fold_build2_loc (loc, code, type, t, arg1);
9154}
9155
9156/* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
9157   overflow further.  Try to decrease the magnitude of constants involved
9158   by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
9159   and put sole constants at the second argument position.
9160   Returns the canonicalized tree if changed, otherwise NULL_TREE.  */
9161
9162static tree
9163maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
9164			       tree arg0, tree arg1)
9165{
9166  tree t;
9167  bool strict_overflow_p;
9168  const char * const warnmsg = G_("assuming signed overflow does not occur "
9169				  "when reducing constant in comparison");
9170
9171  /* Try canonicalization by simplifying arg0.  */
9172  strict_overflow_p = false;
9173  t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
9174				       &strict_overflow_p);
9175  if (t)
9176    {
9177      if (strict_overflow_p)
9178	fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9179      return t;
9180    }
9181
9182  /* Try canonicalization by simplifying arg1 using the swapped
9183     comparison.  */
9184  code = swap_tree_comparison (code);
9185  strict_overflow_p = false;
9186  t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
9187				       &strict_overflow_p);
9188  if (t && strict_overflow_p)
9189    fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
9190  return t;
9191}
9192
9193/* Return whether BASE + OFFSET + BITPOS may wrap around the address
9194   space.  This is used to avoid issuing overflow warnings for
9195   expressions like &p->x which can not wrap.  */
9196
9197static bool
9198pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
9199{
9200  unsigned HOST_WIDE_INT offset_low, total_low;
9201  HOST_WIDE_INT size, offset_high, total_high;
9202
9203  if (!POINTER_TYPE_P (TREE_TYPE (base)))
9204    return true;
9205
9206  if (bitpos < 0)
9207    return true;
9208
9209  if (offset == NULL_TREE)
9210    {
9211      offset_low = 0;
9212      offset_high = 0;
9213    }
9214  else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
9215    return true;
9216  else
9217    {
9218      offset_low = TREE_INT_CST_LOW (offset);
9219      offset_high = TREE_INT_CST_HIGH (offset);
9220    }
9221
9222  if (add_double_with_sign (offset_low, offset_high,
9223			    bitpos / BITS_PER_UNIT, 0,
9224			    &total_low, &total_high,
9225			    true))
9226    return true;
9227
9228  if (total_high != 0)
9229    return true;
9230
9231  size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
9232  if (size <= 0)
9233    return true;
9234
9235  /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
9236     array.  */
9237  if (TREE_CODE (base) == ADDR_EXPR)
9238    {
9239      HOST_WIDE_INT base_size;
9240
9241      base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
9242      if (base_size > 0 && size < base_size)
9243	size = base_size;
9244    }
9245
9246  return total_low > (unsigned HOST_WIDE_INT) size;
9247}
9248
9249/* Subroutine of fold_binary.  This routine performs all of the
9250   transformations that are common to the equality/inequality
9251   operators (EQ_EXPR and NE_EXPR) and the ordering operators
9252   (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR).  Callers other than
9253   fold_binary should call fold_binary.  Fold a comparison with
9254   tree code CODE and type TYPE with operands OP0 and OP1.  Return
9255   the folded comparison or NULL_TREE.  */
9256
9257static tree
9258fold_comparison (location_t loc, enum tree_code code, tree type,
9259		 tree op0, tree op1)
9260{
9261  tree arg0, arg1, tem;
9262
9263  arg0 = op0;
9264  arg1 = op1;
9265
9266  STRIP_SIGN_NOPS (arg0);
9267  STRIP_SIGN_NOPS (arg1);
9268
9269  tem = fold_relational_const (code, type, arg0, arg1);
9270  if (tem != NULL_TREE)
9271    return tem;
9272
9273  /* If one arg is a real or integer constant, put it last.  */
9274  if (tree_swap_operands_p (arg0, arg1, true))
9275    return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9276
9277  /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1.  */
9278  if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9279      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9280	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9281	  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9282      && (TREE_CODE (arg1) == INTEGER_CST
9283	  && !TREE_OVERFLOW (arg1)))
9284    {
9285      tree const1 = TREE_OPERAND (arg0, 1);
9286      tree const2 = arg1;
9287      tree variable = TREE_OPERAND (arg0, 0);
9288      tree lhs;
9289      int lhs_add;
9290      lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9291
9292      lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9293			 TREE_TYPE (arg1), const2, const1);
9294
9295      /* If the constant operation overflowed this can be
9296	 simplified as a comparison against INT_MAX/INT_MIN.  */
9297      if (TREE_CODE (lhs) == INTEGER_CST
9298	  && TREE_OVERFLOW (lhs))
9299	{
9300	  int const1_sgn = tree_int_cst_sgn (const1);
9301	  enum tree_code code2 = code;
9302
9303	  /* Get the sign of the constant on the lhs if the
9304	     operation were VARIABLE + CONST1.  */
9305	  if (TREE_CODE (arg0) == MINUS_EXPR)
9306	    const1_sgn = -const1_sgn;
9307
9308	  /* The sign of the constant determines if we overflowed
9309	     INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9310	     Canonicalize to the INT_MIN overflow by swapping the comparison
9311	     if necessary.  */
9312	  if (const1_sgn == -1)
9313	    code2 = swap_tree_comparison (code);
9314
9315	  /* We now can look at the canonicalized case
9316	       VARIABLE + 1  CODE2  INT_MIN
9317	     and decide on the result.  */
9318	  if (code2 == LT_EXPR
9319	      || code2 == LE_EXPR
9320	      || code2 == EQ_EXPR)
9321	    return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9322	  else if (code2 == NE_EXPR
9323		   || code2 == GE_EXPR
9324		   || code2 == GT_EXPR)
9325	    return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9326	}
9327
9328      if (TREE_CODE (lhs) == TREE_CODE (arg1)
9329	  && (TREE_CODE (lhs) != INTEGER_CST
9330	      || !TREE_OVERFLOW (lhs)))
9331	{
9332	  fold_overflow_warning (("assuming signed overflow does not occur "
9333				  "when changing X +- C1 cmp C2 to "
9334				  "X cmp C1 +- C2"),
9335				 WARN_STRICT_OVERFLOW_COMPARISON);
9336	  return fold_build2_loc (loc, code, type, variable, lhs);
9337	}
9338    }
9339
9340  /* For comparisons of pointers we can decompose it to a compile time
9341     comparison of the base objects and the offsets into the object.
9342     This requires at least one operand being an ADDR_EXPR or a
9343     POINTER_PLUS_EXPR to do more than the operand_equal_p test below.  */
9344  if (POINTER_TYPE_P (TREE_TYPE (arg0))
9345      && (TREE_CODE (arg0) == ADDR_EXPR
9346	  || TREE_CODE (arg1) == ADDR_EXPR
9347	  || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9348	  || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9349    {
9350      tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9351      HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9352      enum machine_mode mode;
9353      int volatilep, unsignedp;
9354      bool indirect_base0 = false, indirect_base1 = false;
9355
9356      /* Get base and offset for the access.  Strip ADDR_EXPR for
9357	 get_inner_reference, but put it back by stripping INDIRECT_REF
9358	 off the base object if possible.  indirect_baseN will be true
9359	 if baseN is not an address but refers to the object itself.  */
9360      base0 = arg0;
9361      if (TREE_CODE (arg0) == ADDR_EXPR)
9362	{
9363	  base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9364				       &bitsize, &bitpos0, &offset0, &mode,
9365				       &unsignedp, &volatilep, false);
9366	  if (TREE_CODE (base0) == INDIRECT_REF)
9367	    base0 = TREE_OPERAND (base0, 0);
9368	  else
9369	    indirect_base0 = true;
9370	}
9371      else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9372	{
9373	  base0 = TREE_OPERAND (arg0, 0);
9374	  offset0 = TREE_OPERAND (arg0, 1);
9375	}
9376
9377      base1 = arg1;
9378      if (TREE_CODE (arg1) == ADDR_EXPR)
9379	{
9380	  base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9381				       &bitsize, &bitpos1, &offset1, &mode,
9382				       &unsignedp, &volatilep, false);
9383	  if (TREE_CODE (base1) == INDIRECT_REF)
9384	    base1 = TREE_OPERAND (base1, 0);
9385	  else
9386	    indirect_base1 = true;
9387	}
9388      else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9389	{
9390	  base1 = TREE_OPERAND (arg1, 0);
9391	  offset1 = TREE_OPERAND (arg1, 1);
9392	}
9393
9394      /* If we have equivalent bases we might be able to simplify.  */
9395      if (indirect_base0 == indirect_base1
9396	  && operand_equal_p (base0, base1, 0))
9397	{
9398	  /* We can fold this expression to a constant if the non-constant
9399	     offset parts are equal.  */
9400	  if ((offset0 == offset1
9401	       || (offset0 && offset1
9402		   && operand_equal_p (offset0, offset1, 0)))
9403	      && (code == EQ_EXPR
9404		  || code == NE_EXPR
9405		  || POINTER_TYPE_OVERFLOW_UNDEFINED))
9406
9407	    {
9408	      if (code != EQ_EXPR
9409		  && code != NE_EXPR
9410		  && bitpos0 != bitpos1
9411		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9412		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9413		fold_overflow_warning (("assuming pointer wraparound does not "
9414					"occur when comparing P +- C1 with "
9415					"P +- C2"),
9416				       WARN_STRICT_OVERFLOW_CONDITIONAL);
9417
9418	      switch (code)
9419		{
9420		case EQ_EXPR:
9421		  return constant_boolean_node (bitpos0 == bitpos1, type);
9422		case NE_EXPR:
9423		  return constant_boolean_node (bitpos0 != bitpos1, type);
9424		case LT_EXPR:
9425		  return constant_boolean_node (bitpos0 < bitpos1, type);
9426		case LE_EXPR:
9427		  return constant_boolean_node (bitpos0 <= bitpos1, type);
9428		case GE_EXPR:
9429		  return constant_boolean_node (bitpos0 >= bitpos1, type);
9430		case GT_EXPR:
9431		  return constant_boolean_node (bitpos0 > bitpos1, type);
9432		default:;
9433		}
9434	    }
9435	  /* We can simplify the comparison to a comparison of the variable
9436	     offset parts if the constant offset parts are equal.
9437	     Be careful to use signed size type here because otherwise we
9438	     mess with array offsets in the wrong way.  This is possible
9439	     because pointer arithmetic is restricted to retain within an
9440	     object and overflow on pointer differences is undefined as of
9441	     6.5.6/8 and /9 with respect to the signed ptrdiff_t.  */
9442	  else if (bitpos0 == bitpos1
9443		   && ((code == EQ_EXPR || code == NE_EXPR)
9444		       || POINTER_TYPE_OVERFLOW_UNDEFINED))
9445	    {
9446	      tree signed_size_type_node;
9447	      signed_size_type_node = signed_type_for (size_type_node);
9448
9449	      /* By converting to signed size type we cover middle-end pointer
9450	         arithmetic which operates on unsigned pointer types of size
9451	         type size and ARRAY_REF offsets which are properly sign or
9452	         zero extended from their type in case it is narrower than
9453	         size type.  */
9454	      if (offset0 == NULL_TREE)
9455		offset0 = build_int_cst (signed_size_type_node, 0);
9456	      else
9457		offset0 = fold_convert_loc (loc, signed_size_type_node,
9458					    offset0);
9459	      if (offset1 == NULL_TREE)
9460		offset1 = build_int_cst (signed_size_type_node, 0);
9461	      else
9462		offset1 = fold_convert_loc (loc, signed_size_type_node,
9463					    offset1);
9464
9465	      if (code != EQ_EXPR
9466		  && code != NE_EXPR
9467		  && (pointer_may_wrap_p (base0, offset0, bitpos0)
9468		      || pointer_may_wrap_p (base1, offset1, bitpos1)))
9469		fold_overflow_warning (("assuming pointer wraparound does not "
9470					"occur when comparing P +- C1 with "
9471					"P +- C2"),
9472				       WARN_STRICT_OVERFLOW_COMPARISON);
9473
9474	      return fold_build2_loc (loc, code, type, offset0, offset1);
9475	    }
9476	}
9477      /* For non-equal bases we can simplify if they are addresses
9478	 of local binding decls or constants.  */
9479      else if (indirect_base0 && indirect_base1
9480	       /* We know that !operand_equal_p (base0, base1, 0)
9481		  because the if condition was false.  But make
9482		  sure two decls are not the same.  */
9483	       && base0 != base1
9484	       && TREE_CODE (arg0) == ADDR_EXPR
9485	       && TREE_CODE (arg1) == ADDR_EXPR
9486	       && (((TREE_CODE (base0) == VAR_DECL
9487		     || TREE_CODE (base0) == PARM_DECL)
9488		    && (targetm.binds_local_p (base0)
9489			|| CONSTANT_CLASS_P (base1)))
9490		   || CONSTANT_CLASS_P (base0))
9491	       && (((TREE_CODE (base1) == VAR_DECL
9492		     || TREE_CODE (base1) == PARM_DECL)
9493		    && (targetm.binds_local_p (base1)
9494			|| CONSTANT_CLASS_P (base0)))
9495		   || CONSTANT_CLASS_P (base1)))
9496	{
9497	  if (code == EQ_EXPR)
9498	    return omit_two_operands_loc (loc, type, boolean_false_node,
9499				      arg0, arg1);
9500	  else if (code == NE_EXPR)
9501	    return omit_two_operands_loc (loc, type, boolean_true_node,
9502				      arg0, arg1);
9503	}
9504      /* For equal offsets we can simplify to a comparison of the
9505	 base addresses.  */
9506      else if (bitpos0 == bitpos1
9507	       && (indirect_base0
9508		   ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9509	       && (indirect_base1
9510		   ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9511	       && ((offset0 == offset1)
9512		   || (offset0 && offset1
9513		       && operand_equal_p (offset0, offset1, 0))))
9514	{
9515	  if (indirect_base0)
9516	    base0 = build_fold_addr_expr_loc (loc, base0);
9517	  if (indirect_base1)
9518	    base1 = build_fold_addr_expr_loc (loc, base1);
9519	  return fold_build2_loc (loc, code, type, base0, base1);
9520	}
9521    }
9522
9523  /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9524     X CMP Y +- C2 +- C1 for signed X, Y.  This is valid if
9525     the resulting offset is smaller in absolute value than the
9526     original one.  */
9527  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9528      && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9529      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9530	  && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9531      && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9532      && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9533	  && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9534    {
9535      tree const1 = TREE_OPERAND (arg0, 1);
9536      tree const2 = TREE_OPERAND (arg1, 1);
9537      tree variable1 = TREE_OPERAND (arg0, 0);
9538      tree variable2 = TREE_OPERAND (arg1, 0);
9539      tree cst;
9540      const char * const warnmsg = G_("assuming signed overflow does not "
9541				      "occur when combining constants around "
9542				      "a comparison");
9543
9544      /* Put the constant on the side where it doesn't overflow and is
9545	 of lower absolute value than before.  */
9546      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9547			     ? MINUS_EXPR : PLUS_EXPR,
9548			     const2, const1, 0);
9549      if (!TREE_OVERFLOW (cst)
9550	  && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9551	{
9552	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9553	  return fold_build2_loc (loc, code, type,
9554			      variable1,
9555			      fold_build2_loc (loc,
9556					   TREE_CODE (arg1), TREE_TYPE (arg1),
9557					   variable2, cst));
9558	}
9559
9560      cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9561			     ? MINUS_EXPR : PLUS_EXPR,
9562			     const1, const2, 0);
9563      if (!TREE_OVERFLOW (cst)
9564	  && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9565	{
9566	  fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9567	  return fold_build2_loc (loc, code, type,
9568			      fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9569					   variable1, cst),
9570			      variable2);
9571	}
9572    }
9573
9574  /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9575     signed arithmetic case.  That form is created by the compiler
9576     often enough for folding it to be of value.  One example is in
9577     computing loop trip counts after Operator Strength Reduction.  */
9578  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9579      && TREE_CODE (arg0) == MULT_EXPR
9580      && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9581          && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9582      && integer_zerop (arg1))
9583    {
9584      tree const1 = TREE_OPERAND (arg0, 1);
9585      tree const2 = arg1;                       /* zero */
9586      tree variable1 = TREE_OPERAND (arg0, 0);
9587      enum tree_code cmp_code = code;
9588
9589      /* Handle unfolded multiplication by zero.  */
9590      if (integer_zerop (const1))
9591	return fold_build2_loc (loc, cmp_code, type, const1, const2);
9592
9593      fold_overflow_warning (("assuming signed overflow does not occur when "
9594			      "eliminating multiplication in comparison "
9595			      "with zero"),
9596			     WARN_STRICT_OVERFLOW_COMPARISON);
9597
9598      /* If const1 is negative we swap the sense of the comparison.  */
9599      if (tree_int_cst_sgn (const1) < 0)
9600        cmp_code = swap_tree_comparison (cmp_code);
9601
9602      return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9603    }
9604
9605  tem = maybe_canonicalize_comparison (loc, code, type, op0, op1);
9606  if (tem)
9607    return tem;
9608
9609  if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9610    {
9611      tree targ0 = strip_float_extensions (arg0);
9612      tree targ1 = strip_float_extensions (arg1);
9613      tree newtype = TREE_TYPE (targ0);
9614
9615      if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9616	newtype = TREE_TYPE (targ1);
9617
9618      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
9619      if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9620	return fold_build2_loc (loc, code, type,
9621			    fold_convert_loc (loc, newtype, targ0),
9622			    fold_convert_loc (loc, newtype, targ1));
9623
9624      /* (-a) CMP (-b) -> b CMP a  */
9625      if (TREE_CODE (arg0) == NEGATE_EXPR
9626	  && TREE_CODE (arg1) == NEGATE_EXPR)
9627	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9628			    TREE_OPERAND (arg0, 0));
9629
9630      if (TREE_CODE (arg1) == REAL_CST)
9631	{
9632	  REAL_VALUE_TYPE cst;
9633	  cst = TREE_REAL_CST (arg1);
9634
9635	  /* (-a) CMP CST -> a swap(CMP) (-CST)  */
9636	  if (TREE_CODE (arg0) == NEGATE_EXPR)
9637	    return fold_build2_loc (loc, swap_tree_comparison (code), type,
9638				TREE_OPERAND (arg0, 0),
9639				build_real (TREE_TYPE (arg1),
9640					    REAL_VALUE_NEGATE (cst)));
9641
9642	  /* IEEE doesn't distinguish +0 and -0 in comparisons.  */
9643	  /* a CMP (-0) -> a CMP 0  */
9644	  if (REAL_VALUE_MINUS_ZERO (cst))
9645	    return fold_build2_loc (loc, code, type, arg0,
9646				build_real (TREE_TYPE (arg1), dconst0));
9647
9648	  /* x != NaN is always true, other ops are always false.  */
9649	  if (REAL_VALUE_ISNAN (cst)
9650	      && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9651	    {
9652	      tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9653	      return omit_one_operand_loc (loc, type, tem, arg0);
9654	    }
9655
9656	  /* Fold comparisons against infinity.  */
9657	  if (REAL_VALUE_ISINF (cst)
9658	      && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9659	    {
9660	      tem = fold_inf_compare (loc, code, type, arg0, arg1);
9661	      if (tem != NULL_TREE)
9662		return tem;
9663	    }
9664	}
9665
9666      /* If this is a comparison of a real constant with a PLUS_EXPR
9667	 or a MINUS_EXPR of a real constant, we can convert it into a
9668	 comparison with a revised real constant as long as no overflow
9669	 occurs when unsafe_math_optimizations are enabled.  */
9670      if (flag_unsafe_math_optimizations
9671	  && TREE_CODE (arg1) == REAL_CST
9672	  && (TREE_CODE (arg0) == PLUS_EXPR
9673	      || TREE_CODE (arg0) == MINUS_EXPR)
9674	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9675	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9676				      ? MINUS_EXPR : PLUS_EXPR,
9677				      arg1, TREE_OPERAND (arg0, 1), 0))
9678	  && !TREE_OVERFLOW (tem))
9679	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9680
9681      /* Likewise, we can simplify a comparison of a real constant with
9682         a MINUS_EXPR whose first operand is also a real constant, i.e.
9683         (c1 - x) < c2 becomes x > c1-c2.  Reordering is allowed on
9684         floating-point types only if -fassociative-math is set.  */
9685      if (flag_associative_math
9686	  && TREE_CODE (arg1) == REAL_CST
9687	  && TREE_CODE (arg0) == MINUS_EXPR
9688	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9689	  && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9690				      arg1, 0))
9691	  && !TREE_OVERFLOW (tem))
9692	return fold_build2_loc (loc, swap_tree_comparison (code), type,
9693			    TREE_OPERAND (arg0, 1), tem);
9694
9695      /* Fold comparisons against built-in math functions.  */
9696      if (TREE_CODE (arg1) == REAL_CST
9697	  && flag_unsafe_math_optimizations
9698	  && ! flag_errno_math)
9699	{
9700	  enum built_in_function fcode = builtin_mathfn_code (arg0);
9701
9702	  if (fcode != END_BUILTINS)
9703	    {
9704	      tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9705	      if (tem != NULL_TREE)
9706		return tem;
9707	    }
9708	}
9709    }
9710
9711  if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9712      && CONVERT_EXPR_P (arg0))
9713    {
9714      /* If we are widening one operand of an integer comparison,
9715	 see if the other operand is similarly being widened.  Perhaps we
9716	 can do the comparison in the narrower type.  */
9717      tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9718      if (tem)
9719	return tem;
9720
9721      /* Or if we are changing signedness.  */
9722      tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9723      if (tem)
9724	return tem;
9725    }
9726
9727  /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9728     constant, we can simplify it.  */
9729  if (TREE_CODE (arg1) == INTEGER_CST
9730      && (TREE_CODE (arg0) == MIN_EXPR
9731	  || TREE_CODE (arg0) == MAX_EXPR)
9732      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9733    {
9734      tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9735      if (tem)
9736	return tem;
9737    }
9738
9739  /* Simplify comparison of something with itself.  (For IEEE
9740     floating-point, we can only do some of these simplifications.)  */
9741  if (operand_equal_p (arg0, arg1, 0))
9742    {
9743      switch (code)
9744	{
9745	case EQ_EXPR:
9746	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9747	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9748	    return constant_boolean_node (1, type);
9749	  break;
9750
9751	case GE_EXPR:
9752	case LE_EXPR:
9753	  if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9754	      || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9755	    return constant_boolean_node (1, type);
9756	  return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9757
9758	case NE_EXPR:
9759	  /* For NE, we can only do this simplification if integer
9760	     or we don't honor IEEE floating point NaNs.  */
9761	  if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9762	      && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9763	    break;
9764	  /* ... fall through ...  */
9765	case GT_EXPR:
9766	case LT_EXPR:
9767	  return constant_boolean_node (0, type);
9768	default:
9769	  gcc_unreachable ();
9770	}
9771    }
9772
9773  /* If we are comparing an expression that just has comparisons
9774     of two integer values, arithmetic expressions of those comparisons,
9775     and constants, we can simplify it.  There are only three cases
9776     to check: the two values can either be equal, the first can be
9777     greater, or the second can be greater.  Fold the expression for
9778     those three values.  Since each value must be 0 or 1, we have
9779     eight possibilities, each of which corresponds to the constant 0
9780     or 1 or one of the six possible comparisons.
9781
9782     This handles common cases like (a > b) == 0 but also handles
9783     expressions like  ((x > y) - (y > x)) > 0, which supposedly
9784     occur in macroized code.  */
9785
9786  if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9787    {
9788      tree cval1 = 0, cval2 = 0;
9789      int save_p = 0;
9790
9791      if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9792	  /* Don't handle degenerate cases here; they should already
9793	     have been handled anyway.  */
9794	  && cval1 != 0 && cval2 != 0
9795	  && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9796	  && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9797	  && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9798	  && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9799	  && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9800	  && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9801				TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9802	{
9803	  tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9804	  tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9805
9806	  /* We can't just pass T to eval_subst in case cval1 or cval2
9807	     was the same as ARG1.  */
9808
9809	  tree high_result
9810		= fold_build2_loc (loc, code, type,
9811			       eval_subst (loc, arg0, cval1, maxval,
9812					   cval2, minval),
9813			       arg1);
9814	  tree equal_result
9815		= fold_build2_loc (loc, code, type,
9816			       eval_subst (loc, arg0, cval1, maxval,
9817					   cval2, maxval),
9818			       arg1);
9819	  tree low_result
9820		= fold_build2_loc (loc, code, type,
9821			       eval_subst (loc, arg0, cval1, minval,
9822					   cval2, maxval),
9823			       arg1);
9824
9825	  /* All three of these results should be 0 or 1.  Confirm they are.
9826	     Then use those values to select the proper code to use.  */
9827
9828	  if (TREE_CODE (high_result) == INTEGER_CST
9829	      && TREE_CODE (equal_result) == INTEGER_CST
9830	      && TREE_CODE (low_result) == INTEGER_CST)
9831	    {
9832	      /* Make a 3-bit mask with the high-order bit being the
9833		 value for `>', the next for '=', and the low for '<'.  */
9834	      switch ((integer_onep (high_result) * 4)
9835		      + (integer_onep (equal_result) * 2)
9836		      + integer_onep (low_result))
9837		{
9838		case 0:
9839		  /* Always false.  */
9840		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9841		case 1:
9842		  code = LT_EXPR;
9843		  break;
9844		case 2:
9845		  code = EQ_EXPR;
9846		  break;
9847		case 3:
9848		  code = LE_EXPR;
9849		  break;
9850		case 4:
9851		  code = GT_EXPR;
9852		  break;
9853		case 5:
9854		  code = NE_EXPR;
9855		  break;
9856		case 6:
9857		  code = GE_EXPR;
9858		  break;
9859		case 7:
9860		  /* Always true.  */
9861		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9862		}
9863
9864	      if (save_p)
9865		{
9866		  tem = save_expr (build2 (code, type, cval1, cval2));
9867		  SET_EXPR_LOCATION (tem, loc);
9868		  return tem;
9869		}
9870	      return fold_build2_loc (loc, code, type, cval1, cval2);
9871	    }
9872	}
9873    }
9874
9875  /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9876     into a single range test.  */
9877  if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9878       || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9879      && TREE_CODE (arg1) == INTEGER_CST
9880      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9881      && !integer_zerop (TREE_OPERAND (arg0, 1))
9882      && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9883      && !TREE_OVERFLOW (arg1))
9884    {
9885      tem = fold_div_compare (loc, code, type, arg0, arg1);
9886      if (tem != NULL_TREE)
9887	return tem;
9888    }
9889
9890  /* Fold ~X op ~Y as Y op X.  */
9891  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9892      && TREE_CODE (arg1) == BIT_NOT_EXPR)
9893    {
9894      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9895      return fold_build2_loc (loc, code, type,
9896			  fold_convert_loc (loc, cmp_type,
9897					    TREE_OPERAND (arg1, 0)),
9898			  TREE_OPERAND (arg0, 0));
9899    }
9900
9901  /* Fold ~X op C as X op' ~C, where op' is the swapped comparison.  */
9902  if (TREE_CODE (arg0) == BIT_NOT_EXPR
9903      && TREE_CODE (arg1) == INTEGER_CST)
9904    {
9905      tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9906      return fold_build2_loc (loc, swap_tree_comparison (code), type,
9907			  TREE_OPERAND (arg0, 0),
9908			  fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9909				       fold_convert_loc (loc, cmp_type, arg1)));
9910    }
9911
9912  return NULL_TREE;
9913}
9914
9915
9916/* Subroutine of fold_binary.  Optimize complex multiplications of the
9917   form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2).  The
9918   argument EXPR represents the expression "z" of type TYPE.  */
9919
9920static tree
9921fold_mult_zconjz (location_t loc, tree type, tree expr)
9922{
9923  tree itype = TREE_TYPE (type);
9924  tree rpart, ipart, tem;
9925
9926  if (TREE_CODE (expr) == COMPLEX_EXPR)
9927    {
9928      rpart = TREE_OPERAND (expr, 0);
9929      ipart = TREE_OPERAND (expr, 1);
9930    }
9931  else if (TREE_CODE (expr) == COMPLEX_CST)
9932    {
9933      rpart = TREE_REALPART (expr);
9934      ipart = TREE_IMAGPART (expr);
9935    }
9936  else
9937    {
9938      expr = save_expr (expr);
9939      rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9940      ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9941    }
9942
9943  rpart = save_expr (rpart);
9944  ipart = save_expr (ipart);
9945  tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9946		     fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9947		     fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9948  return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9949		      fold_convert_loc (loc, itype, integer_zero_node));
9950}
9951
9952
9953/* Subroutine of fold_binary.  If P is the value of EXPR, computes
9954   power-of-two M and (arbitrary) N such that M divides (P-N).  This condition
9955   guarantees that P and N have the same least significant log2(M) bits.
9956   N is not otherwise constrained.  In particular, N is not normalized to
9957   0 <= N < M as is common.  In general, the precise value of P is unknown.
9958   M is chosen as large as possible such that constant N can be determined.
9959
9960   Returns M and sets *RESIDUE to N.
9961
9962   If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9963   account.  This is not always possible due to PR 35705.
9964 */
9965
9966static unsigned HOST_WIDE_INT
9967get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9968				 bool allow_func_align)
9969{
9970  enum tree_code code;
9971
9972  *residue = 0;
9973
9974  code = TREE_CODE (expr);
9975  if (code == ADDR_EXPR)
9976    {
9977      expr = TREE_OPERAND (expr, 0);
9978      if (handled_component_p (expr))
9979	{
9980	  HOST_WIDE_INT bitsize, bitpos;
9981	  tree offset;
9982	  enum machine_mode mode;
9983	  int unsignedp, volatilep;
9984
9985	  expr = get_inner_reference (expr, &bitsize, &bitpos, &offset,
9986				      &mode, &unsignedp, &volatilep, false);
9987	  *residue = bitpos / BITS_PER_UNIT;
9988	  if (offset)
9989	    {
9990	      if (TREE_CODE (offset) == INTEGER_CST)
9991		*residue += TREE_INT_CST_LOW (offset);
9992	      else
9993		/* We don't handle more complicated offset expressions.  */
9994		return 1;
9995	    }
9996	}
9997
9998      if (DECL_P (expr)
9999	  && (allow_func_align || TREE_CODE (expr) != FUNCTION_DECL))
10000	return DECL_ALIGN_UNIT (expr);
10001    }
10002  else if (code == POINTER_PLUS_EXPR)
10003    {
10004      tree op0, op1;
10005      unsigned HOST_WIDE_INT modulus;
10006      enum tree_code inner_code;
10007
10008      op0 = TREE_OPERAND (expr, 0);
10009      STRIP_NOPS (op0);
10010      modulus = get_pointer_modulus_and_residue (op0, residue,
10011						 allow_func_align);
10012
10013      op1 = TREE_OPERAND (expr, 1);
10014      STRIP_NOPS (op1);
10015      inner_code = TREE_CODE (op1);
10016      if (inner_code == INTEGER_CST)
10017	{
10018	  *residue += TREE_INT_CST_LOW (op1);
10019	  return modulus;
10020	}
10021      else if (inner_code == MULT_EXPR)
10022	{
10023	  op1 = TREE_OPERAND (op1, 1);
10024	  if (TREE_CODE (op1) == INTEGER_CST)
10025	    {
10026	      unsigned HOST_WIDE_INT align;
10027
10028	      /* Compute the greatest power-of-2 divisor of op1.  */
10029	      align = TREE_INT_CST_LOW (op1);
10030	      align &= -align;
10031
10032	      /* If align is non-zero and less than *modulus, replace
10033		 *modulus with align., If align is 0, then either op1 is 0
10034		 or the greatest power-of-2 divisor of op1 doesn't fit in an
10035		 unsigned HOST_WIDE_INT.  In either case, no additional
10036		 constraint is imposed.  */
10037	      if (align)
10038		modulus = MIN (modulus, align);
10039
10040	      return modulus;
10041	    }
10042	}
10043    }
10044
10045    /* If we get here, we were unable to determine anything useful about the
10046       expression.  */
10047    return 1;
10048}
10049
10050
10051/* Fold a binary expression of code CODE and type TYPE with operands
10052   OP0 and OP1.  LOC is the location of the resulting expression.
10053   Return the folded expression if folding is successful.  Otherwise,
10054   return NULL_TREE.  */
10055
10056tree
10057fold_binary_loc (location_t loc,
10058	     enum tree_code code, tree type, tree op0, tree op1)
10059{
10060  enum tree_code_class kind = TREE_CODE_CLASS (code);
10061  tree arg0, arg1, tem;
10062  tree t1 = NULL_TREE;
10063  bool strict_overflow_p;
10064
10065  gcc_assert (IS_EXPR_CODE_CLASS (kind)
10066	      && TREE_CODE_LENGTH (code) == 2
10067	      && op0 != NULL_TREE
10068	      && op1 != NULL_TREE);
10069
10070  arg0 = op0;
10071  arg1 = op1;
10072
10073  /* Strip any conversions that don't change the mode.  This is
10074     safe for every expression, except for a comparison expression
10075     because its signedness is derived from its operands.  So, in
10076     the latter case, only strip conversions that don't change the
10077     signedness.  MIN_EXPR/MAX_EXPR also need signedness of arguments
10078     preserved.
10079
10080     Note that this is done as an internal manipulation within the
10081     constant folder, in order to find the simplest representation
10082     of the arguments so that their form can be studied.  In any
10083     cases, the appropriate type conversions should be put back in
10084     the tree that will get out of the constant folder.  */
10085
10086  if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10087    {
10088      STRIP_SIGN_NOPS (arg0);
10089      STRIP_SIGN_NOPS (arg1);
10090    }
10091  else
10092    {
10093      STRIP_NOPS (arg0);
10094      STRIP_NOPS (arg1);
10095    }
10096
10097  /* Note that TREE_CONSTANT isn't enough: static var addresses are
10098     constant but we can't do arithmetic on them.  */
10099  if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10100      || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10101      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10102      || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10103      || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10104      || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
10105    {
10106      if (kind == tcc_binary)
10107	{
10108	  /* Make sure type and arg0 have the same saturating flag.  */
10109	  gcc_assert (TYPE_SATURATING (type)
10110		      == TYPE_SATURATING (TREE_TYPE (arg0)));
10111	  tem = const_binop (code, arg0, arg1, 0);
10112	}
10113      else if (kind == tcc_comparison)
10114	tem = fold_relational_const (code, type, arg0, arg1);
10115      else
10116	tem = NULL_TREE;
10117
10118      if (tem != NULL_TREE)
10119	{
10120	  if (TREE_TYPE (tem) != type)
10121	    tem = fold_convert_loc (loc, type, tem);
10122	  return tem;
10123	}
10124    }
10125
10126  /* If this is a commutative operation, and ARG0 is a constant, move it
10127     to ARG1 to reduce the number of tests below.  */
10128  if (commutative_tree_code (code)
10129      && tree_swap_operands_p (arg0, arg1, true))
10130    return fold_build2_loc (loc, code, type, op1, op0);
10131
10132  /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10133
10134     First check for cases where an arithmetic operation is applied to a
10135     compound, conditional, or comparison operation.  Push the arithmetic
10136     operation inside the compound or conditional to see if any folding
10137     can then be done.  Convert comparison to conditional for this purpose.
10138     The also optimizes non-constant cases that used to be done in
10139     expand_expr.
10140
10141     Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10142     one of the operands is a comparison and the other is a comparison, a
10143     BIT_AND_EXPR with the constant 1, or a truth value.  In that case, the
10144     code below would make the expression more complex.  Change it to a
10145     TRUTH_{AND,OR}_EXPR.  Likewise, convert a similar NE_EXPR to
10146     TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR.  */
10147
10148  if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10149       || code == EQ_EXPR || code == NE_EXPR)
10150      && ((truth_value_p (TREE_CODE (arg0))
10151	   && (truth_value_p (TREE_CODE (arg1))
10152	       || (TREE_CODE (arg1) == BIT_AND_EXPR
10153		   && integer_onep (TREE_OPERAND (arg1, 1)))))
10154	  || (truth_value_p (TREE_CODE (arg1))
10155	      && (truth_value_p (TREE_CODE (arg0))
10156		  || (TREE_CODE (arg0) == BIT_AND_EXPR
10157		      && integer_onep (TREE_OPERAND (arg0, 1)))))))
10158    {
10159      tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10160			 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10161			 : TRUTH_XOR_EXPR,
10162			 boolean_type_node,
10163			 fold_convert_loc (loc, boolean_type_node, arg0),
10164			 fold_convert_loc (loc, boolean_type_node, arg1));
10165
10166      if (code == EQ_EXPR)
10167	tem = invert_truthvalue_loc (loc, tem);
10168
10169      return fold_convert_loc (loc, type, tem);
10170    }
10171
10172  if (TREE_CODE_CLASS (code) == tcc_binary
10173      || TREE_CODE_CLASS (code) == tcc_comparison)
10174    {
10175      if (TREE_CODE (arg0) == COMPOUND_EXPR)
10176	{
10177	  tem = fold_build2_loc (loc, code, type,
10178			     fold_convert_loc (loc, TREE_TYPE (op0),
10179					       TREE_OPERAND (arg0, 1)), op1);
10180	  tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0), tem);
10181	  goto fold_binary_exit;
10182	}
10183      if (TREE_CODE (arg1) == COMPOUND_EXPR
10184	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10185	{
10186	  tem = fold_build2_loc (loc, code, type, op0,
10187			     fold_convert_loc (loc, TREE_TYPE (op1),
10188					       TREE_OPERAND (arg1, 1)));
10189	  tem = build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0), tem);
10190	  goto fold_binary_exit;
10191	}
10192
10193      if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
10194	{
10195	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10196						     arg0, arg1,
10197						     /*cond_first_p=*/1);
10198	  if (tem != NULL_TREE)
10199	    return tem;
10200	}
10201
10202      if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
10203	{
10204	  tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10205						     arg1, arg0,
10206					             /*cond_first_p=*/0);
10207	  if (tem != NULL_TREE)
10208	    return tem;
10209	}
10210    }
10211
10212  switch (code)
10213    {
10214    case POINTER_PLUS_EXPR:
10215      /* 0 +p index -> (type)index */
10216      if (integer_zerop (arg0))
10217	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10218
10219      /* PTR +p 0 -> PTR */
10220      if (integer_zerop (arg1))
10221	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10222
10223      /* INT +p INT -> (PTR)(INT + INT).  Stripping types allows for this. */
10224      if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10225	   && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10226        return fold_convert_loc (loc, type,
10227				 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10228					      fold_convert_loc (loc, sizetype,
10229								arg1),
10230					      fold_convert_loc (loc, sizetype,
10231								arg0)));
10232
10233      /* index +p PTR -> PTR +p index */
10234      if (POINTER_TYPE_P (TREE_TYPE (arg1))
10235	  && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10236        return fold_build2_loc (loc, POINTER_PLUS_EXPR, type,
10237			    fold_convert_loc (loc, type, arg1),
10238			    fold_convert_loc (loc, sizetype, arg0));
10239
10240      /* (PTR +p B) +p A -> PTR +p (B + A) */
10241      if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10242	{
10243	  tree inner;
10244	  tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10245	  tree arg00 = TREE_OPERAND (arg0, 0);
10246	  inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10247			       arg01, fold_convert_loc (loc, sizetype, arg1));
10248	  return fold_convert_loc (loc, type,
10249				   fold_build2_loc (loc, POINTER_PLUS_EXPR,
10250						TREE_TYPE (arg00),
10251						arg00, inner));
10252	}
10253
10254      /* PTR_CST +p CST -> CST1 */
10255      if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10256	return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10257			    fold_convert_loc (loc, type, arg1));
10258
10259     /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10260	of the array.  Loop optimizer sometimes produce this type of
10261	expressions.  */
10262      if (TREE_CODE (arg0) == ADDR_EXPR)
10263	{
10264	  tem = try_move_mult_to_index (loc, arg0,
10265					fold_convert_loc (loc, sizetype, arg1));
10266	  if (tem)
10267	    return fold_convert_loc (loc, type, tem);
10268	}
10269
10270      return NULL_TREE;
10271
10272    case PLUS_EXPR:
10273      /* A + (-B) -> A - B */
10274      if (TREE_CODE (arg1) == NEGATE_EXPR)
10275	return fold_build2_loc (loc, MINUS_EXPR, type,
10276			    fold_convert_loc (loc, type, arg0),
10277			    fold_convert_loc (loc, type,
10278					      TREE_OPERAND (arg1, 0)));
10279      /* (-A) + B -> B - A */
10280      if (TREE_CODE (arg0) == NEGATE_EXPR
10281	  && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10282	return fold_build2_loc (loc, MINUS_EXPR, type,
10283			    fold_convert_loc (loc, type, arg1),
10284			    fold_convert_loc (loc, type,
10285					      TREE_OPERAND (arg0, 0)));
10286
10287      if (INTEGRAL_TYPE_P (type))
10288	{
10289	  /* Convert ~A + 1 to -A.  */
10290	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10291	      && integer_onep (arg1))
10292	    return fold_build1_loc (loc, NEGATE_EXPR, type,
10293				fold_convert_loc (loc, type,
10294						  TREE_OPERAND (arg0, 0)));
10295
10296	  /* ~X + X is -1.  */
10297	  if (TREE_CODE (arg0) == BIT_NOT_EXPR
10298	      && !TYPE_OVERFLOW_TRAPS (type))
10299	    {
10300	      tree tem = TREE_OPERAND (arg0, 0);
10301
10302	      STRIP_NOPS (tem);
10303	      if (operand_equal_p (tem, arg1, 0))
10304		{
10305		  t1 = build_int_cst_type (type, -1);
10306		  return omit_one_operand_loc (loc, type, t1, arg1);
10307		}
10308	    }
10309
10310	  /* X + ~X is -1.  */
10311	  if (TREE_CODE (arg1) == BIT_NOT_EXPR
10312	      && !TYPE_OVERFLOW_TRAPS (type))
10313	    {
10314	      tree tem = TREE_OPERAND (arg1, 0);
10315
10316	      STRIP_NOPS (tem);
10317	      if (operand_equal_p (arg0, tem, 0))
10318		{
10319		  t1 = build_int_cst_type (type, -1);
10320		  return omit_one_operand_loc (loc, type, t1, arg0);
10321		}
10322	    }
10323
10324	  /* X + (X / CST) * -CST is X % CST.  */
10325	  if (TREE_CODE (arg1) == MULT_EXPR
10326	      && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10327	      && operand_equal_p (arg0,
10328				  TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10329	    {
10330	      tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10331	      tree cst1 = TREE_OPERAND (arg1, 1);
10332	      tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10333				      cst1, cst0);
10334	      if (sum && integer_zerop (sum))
10335		return fold_convert_loc (loc, type,
10336					 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10337						      TREE_TYPE (arg0), arg0,
10338						      cst0));
10339	    }
10340	}
10341
10342      /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
10343	 same or one.  Make sure type is not saturating.
10344	 fold_plusminus_mult_expr will re-associate.  */
10345      if ((TREE_CODE (arg0) == MULT_EXPR
10346	   || TREE_CODE (arg1) == MULT_EXPR)
10347	  && !TYPE_SATURATING (type)
10348	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10349        {
10350	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10351	  if (tem)
10352	    return tem;
10353	}
10354
10355      if (! FLOAT_TYPE_P (type))
10356	{
10357	  if (integer_zerop (arg1))
10358	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10359
10360	  /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10361	     with a constant, and the two constants have no bits in common,
10362	     we should treat this as a BIT_IOR_EXPR since this may produce more
10363	     simplifications.  */
10364	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10365	      && TREE_CODE (arg1) == BIT_AND_EXPR
10366	      && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10367	      && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10368	      && integer_zerop (const_binop (BIT_AND_EXPR,
10369					     TREE_OPERAND (arg0, 1),
10370					     TREE_OPERAND (arg1, 1), 0)))
10371	    {
10372	      code = BIT_IOR_EXPR;
10373	      goto bit_ior;
10374	    }
10375
10376	  /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10377	     (plus (plus (mult) (mult)) (foo)) so that we can
10378	     take advantage of the factoring cases below.  */
10379	  if (((TREE_CODE (arg0) == PLUS_EXPR
10380		|| TREE_CODE (arg0) == MINUS_EXPR)
10381	       && TREE_CODE (arg1) == MULT_EXPR)
10382	      || ((TREE_CODE (arg1) == PLUS_EXPR
10383		   || TREE_CODE (arg1) == MINUS_EXPR)
10384		  && TREE_CODE (arg0) == MULT_EXPR))
10385	    {
10386	      tree parg0, parg1, parg, marg;
10387	      enum tree_code pcode;
10388
10389	      if (TREE_CODE (arg1) == MULT_EXPR)
10390		parg = arg0, marg = arg1;
10391	      else
10392		parg = arg1, marg = arg0;
10393	      pcode = TREE_CODE (parg);
10394	      parg0 = TREE_OPERAND (parg, 0);
10395	      parg1 = TREE_OPERAND (parg, 1);
10396	      STRIP_NOPS (parg0);
10397	      STRIP_NOPS (parg1);
10398
10399	      if (TREE_CODE (parg0) == MULT_EXPR
10400		  && TREE_CODE (parg1) != MULT_EXPR)
10401		return fold_build2_loc (loc, pcode, type,
10402				    fold_build2_loc (loc, PLUS_EXPR, type,
10403						 fold_convert_loc (loc, type,
10404								   parg0),
10405						 fold_convert_loc (loc, type,
10406								   marg)),
10407				    fold_convert_loc (loc, type, parg1));
10408	      if (TREE_CODE (parg0) != MULT_EXPR
10409		  && TREE_CODE (parg1) == MULT_EXPR)
10410		return
10411		  fold_build2_loc (loc, PLUS_EXPR, type,
10412			       fold_convert_loc (loc, type, parg0),
10413			       fold_build2_loc (loc, pcode, type,
10414					    fold_convert_loc (loc, type, marg),
10415					    fold_convert_loc (loc, type,
10416							      parg1)));
10417	    }
10418	}
10419      else
10420	{
10421	  /* See if ARG1 is zero and X + ARG1 reduces to X.  */
10422	  if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10423	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10424
10425	  /* Likewise if the operands are reversed.  */
10426	  if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10427	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10428
10429	  /* Convert X + -C into X - C.  */
10430	  if (TREE_CODE (arg1) == REAL_CST
10431	      && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10432	    {
10433	      tem = fold_negate_const (arg1, type);
10434	      if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10435		return fold_build2_loc (loc, MINUS_EXPR, type,
10436				    fold_convert_loc (loc, type, arg0),
10437				    fold_convert_loc (loc, type, tem));
10438	    }
10439
10440	  /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10441	     to __complex__ ( x, y ).  This is not the same for SNaNs or
10442	     if signed zeros are involved.  */
10443	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10444              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10445	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10446	    {
10447	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10448	      tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10449	      tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10450	      bool arg0rz = false, arg0iz = false;
10451	      if ((arg0r && (arg0rz = real_zerop (arg0r)))
10452		  || (arg0i && (arg0iz = real_zerop (arg0i))))
10453		{
10454		  tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10455		  tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10456		  if (arg0rz && arg1i && real_zerop (arg1i))
10457		    {
10458		      tree rp = arg1r ? arg1r
10459				  : build1 (REALPART_EXPR, rtype, arg1);
10460		      tree ip = arg0i ? arg0i
10461				  : build1 (IMAGPART_EXPR, rtype, arg0);
10462		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10463		    }
10464		  else if (arg0iz && arg1r && real_zerop (arg1r))
10465		    {
10466		      tree rp = arg0r ? arg0r
10467				  : build1 (REALPART_EXPR, rtype, arg0);
10468		      tree ip = arg1i ? arg1i
10469				  : build1 (IMAGPART_EXPR, rtype, arg1);
10470		      return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10471		    }
10472		}
10473	    }
10474
10475	  if (flag_unsafe_math_optimizations
10476	      && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10477	      && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10478	      && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10479	    return tem;
10480
10481	  /* Convert x+x into x*2.0.  */
10482	  if (operand_equal_p (arg0, arg1, 0)
10483	      && SCALAR_FLOAT_TYPE_P (type))
10484	    return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10485				build_real (type, dconst2));
10486
10487          /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10488             We associate floats only if the user has specified
10489             -fassociative-math.  */
10490          if (flag_associative_math
10491              && TREE_CODE (arg1) == PLUS_EXPR
10492              && TREE_CODE (arg0) != MULT_EXPR)
10493            {
10494              tree tree10 = TREE_OPERAND (arg1, 0);
10495              tree tree11 = TREE_OPERAND (arg1, 1);
10496              if (TREE_CODE (tree11) == MULT_EXPR
10497		  && TREE_CODE (tree10) == MULT_EXPR)
10498                {
10499                  tree tree0;
10500                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10501                  return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10502                }
10503            }
10504          /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10505             We associate floats only if the user has specified
10506             -fassociative-math.  */
10507          if (flag_associative_math
10508              && TREE_CODE (arg0) == PLUS_EXPR
10509              && TREE_CODE (arg1) != MULT_EXPR)
10510            {
10511              tree tree00 = TREE_OPERAND (arg0, 0);
10512              tree tree01 = TREE_OPERAND (arg0, 1);
10513              if (TREE_CODE (tree01) == MULT_EXPR
10514		  && TREE_CODE (tree00) == MULT_EXPR)
10515                {
10516                  tree tree0;
10517                  tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10518                  return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10519                }
10520            }
10521	}
10522
10523     bit_rotate:
10524      /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10525	 is a rotate of A by C1 bits.  */
10526      /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10527	 is a rotate of A by B bits.  */
10528      {
10529	enum tree_code code0, code1;
10530	tree rtype;
10531	code0 = TREE_CODE (arg0);
10532	code1 = TREE_CODE (arg1);
10533	if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10534	     || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10535	    && operand_equal_p (TREE_OPERAND (arg0, 0),
10536			        TREE_OPERAND (arg1, 0), 0)
10537	    && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10538	        TYPE_UNSIGNED (rtype))
10539	    /* Only create rotates in complete modes.  Other cases are not
10540	       expanded properly.  */
10541	    && TYPE_PRECISION (rtype) == GET_MODE_PRECISION (TYPE_MODE (rtype)))
10542	  {
10543	    tree tree01, tree11;
10544	    enum tree_code code01, code11;
10545
10546	    tree01 = TREE_OPERAND (arg0, 1);
10547	    tree11 = TREE_OPERAND (arg1, 1);
10548	    STRIP_NOPS (tree01);
10549	    STRIP_NOPS (tree11);
10550	    code01 = TREE_CODE (tree01);
10551	    code11 = TREE_CODE (tree11);
10552	    if (code01 == INTEGER_CST
10553		&& code11 == INTEGER_CST
10554		&& TREE_INT_CST_HIGH (tree01) == 0
10555		&& TREE_INT_CST_HIGH (tree11) == 0
10556		&& ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10557		    == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10558	      {
10559		tem = build2 (LROTATE_EXPR,
10560			      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10561			      TREE_OPERAND (arg0, 0),
10562			      code0 == LSHIFT_EXPR
10563			      ? tree01 : tree11);
10564		SET_EXPR_LOCATION (tem, loc);
10565		return fold_convert_loc (loc, type, tem);
10566	      }
10567	    else if (code11 == MINUS_EXPR)
10568	      {
10569		tree tree110, tree111;
10570		tree110 = TREE_OPERAND (tree11, 0);
10571		tree111 = TREE_OPERAND (tree11, 1);
10572		STRIP_NOPS (tree110);
10573		STRIP_NOPS (tree111);
10574		if (TREE_CODE (tree110) == INTEGER_CST
10575		    && 0 == compare_tree_int (tree110,
10576					      TYPE_PRECISION
10577					      (TREE_TYPE (TREE_OPERAND
10578							  (arg0, 0))))
10579		    && operand_equal_p (tree01, tree111, 0))
10580		  return
10581		    fold_convert_loc (loc, type,
10582				      build2 ((code0 == LSHIFT_EXPR
10583					       ? LROTATE_EXPR
10584					       : RROTATE_EXPR),
10585					      TREE_TYPE (TREE_OPERAND (arg0, 0)),
10586					      TREE_OPERAND (arg0, 0), tree01));
10587	      }
10588	    else if (code01 == MINUS_EXPR)
10589	      {
10590		tree tree010, tree011;
10591		tree010 = TREE_OPERAND (tree01, 0);
10592		tree011 = TREE_OPERAND (tree01, 1);
10593		STRIP_NOPS (tree010);
10594		STRIP_NOPS (tree011);
10595		if (TREE_CODE (tree010) == INTEGER_CST
10596		    && 0 == compare_tree_int (tree010,
10597					      TYPE_PRECISION
10598					      (TREE_TYPE (TREE_OPERAND
10599							  (arg0, 0))))
10600		    && operand_equal_p (tree11, tree011, 0))
10601		    return fold_convert_loc
10602		      (loc, type,
10603		       build2 ((code0 != LSHIFT_EXPR
10604				? LROTATE_EXPR
10605				: RROTATE_EXPR),
10606			       TREE_TYPE (TREE_OPERAND (arg0, 0)),
10607			       TREE_OPERAND (arg0, 0), tree11));
10608	      }
10609	  }
10610      }
10611
10612    associate:
10613      /* In most languages, can't associate operations on floats through
10614	 parentheses.  Rather than remember where the parentheses were, we
10615	 don't associate floats at all, unless the user has specified
10616	 -fassociative-math.
10617	 And, we need to make sure type is not saturating.  */
10618
10619      if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10620	  && !TYPE_SATURATING (type))
10621	{
10622	  tree var0, con0, lit0, minus_lit0;
10623	  tree var1, con1, lit1, minus_lit1;
10624	  bool ok = true;
10625
10626	  /* Split both trees into variables, constants, and literals.  Then
10627	     associate each group together, the constants with literals,
10628	     then the result with variables.  This increases the chances of
10629	     literals being recombined later and of generating relocatable
10630	     expressions for the sum of a constant and literal.  */
10631	  var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10632	  var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10633			     code == MINUS_EXPR);
10634
10635	  /* With undefined overflow we can only associate constants
10636	     with one variable.  */
10637	  if (((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10638	       || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10639	      && var0 && var1)
10640	    {
10641	      tree tmp0 = var0;
10642	      tree tmp1 = var1;
10643
10644	      if (TREE_CODE (tmp0) == NEGATE_EXPR)
10645	        tmp0 = TREE_OPERAND (tmp0, 0);
10646	      if (TREE_CODE (tmp1) == NEGATE_EXPR)
10647	        tmp1 = TREE_OPERAND (tmp1, 0);
10648	      /* The only case we can still associate with two variables
10649		 is if they are the same, modulo negation.  */
10650	      if (!operand_equal_p (tmp0, tmp1, 0))
10651	        ok = false;
10652	    }
10653
10654	  /* Only do something if we found more than two objects.  Otherwise,
10655	     nothing has changed and we risk infinite recursion.  */
10656	  if (ok
10657	      && (2 < ((var0 != 0) + (var1 != 0)
10658		       + (con0 != 0) + (con1 != 0)
10659		       + (lit0 != 0) + (lit1 != 0)
10660		       + (minus_lit0 != 0) + (minus_lit1 != 0))))
10661	    {
10662	      /* Recombine MINUS_EXPR operands by using PLUS_EXPR.  */
10663	      if (code == MINUS_EXPR)
10664		code = PLUS_EXPR;
10665
10666	      var0 = associate_trees (loc, var0, var1, code, type);
10667	      con0 = associate_trees (loc, con0, con1, code, type);
10668	      lit0 = associate_trees (loc, lit0, lit1, code, type);
10669	      minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1, code, type);
10670
10671	      /* Preserve the MINUS_EXPR if the negative part of the literal is
10672		 greater than the positive part.  Otherwise, the multiplicative
10673		 folding code (i.e extract_muldiv) may be fooled in case
10674		 unsigned constants are subtracted, like in the following
10675		 example: ((X*2 + 4) - 8U)/2.  */
10676	      if (minus_lit0 && lit0)
10677		{
10678		  if (TREE_CODE (lit0) == INTEGER_CST
10679		      && TREE_CODE (minus_lit0) == INTEGER_CST
10680		      && tree_int_cst_lt (lit0, minus_lit0))
10681		    {
10682		      minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10683						    MINUS_EXPR, type);
10684		      lit0 = 0;
10685		    }
10686		  else
10687		    {
10688		      lit0 = associate_trees (loc, lit0, minus_lit0,
10689					      MINUS_EXPR, type);
10690		      minus_lit0 = 0;
10691		    }
10692		}
10693	      if (minus_lit0)
10694		{
10695		  if (con0 == 0)
10696		    return
10697		      fold_convert_loc (loc, type,
10698					associate_trees (loc, var0, minus_lit0,
10699							 MINUS_EXPR, type));
10700		  else
10701		    {
10702		      con0 = associate_trees (loc, con0, minus_lit0,
10703					      MINUS_EXPR, type);
10704		      return
10705			fold_convert_loc (loc, type,
10706					  associate_trees (loc, var0, con0,
10707							   PLUS_EXPR, type));
10708		    }
10709		}
10710
10711	      con0 = associate_trees (loc, con0, lit0, code, type);
10712	      return
10713		fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10714							      code, type));
10715	    }
10716	}
10717
10718      return NULL_TREE;
10719
10720    case MINUS_EXPR:
10721      /* Pointer simplifications for subtraction, simple reassociations. */
10722      if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10723	{
10724	  /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10725	  if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10726	      && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10727	    {
10728	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10729	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10730	      tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10731	      tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10732	      return fold_build2_loc (loc, PLUS_EXPR, type,
10733				  fold_build2_loc (loc, MINUS_EXPR, type,
10734					       arg00, arg10),
10735				  fold_build2_loc (loc, MINUS_EXPR, type,
10736					       arg01, arg11));
10737	    }
10738	  /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10739	  else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10740	    {
10741	      tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10742	      tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10743	      tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10744				      fold_convert_loc (loc, type, arg1));
10745	      if (tmp)
10746	        return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10747	    }
10748	}
10749      /* A - (-B) -> A + B */
10750      if (TREE_CODE (arg1) == NEGATE_EXPR)
10751	return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10752			    fold_convert_loc (loc, type,
10753					      TREE_OPERAND (arg1, 0)));
10754      /* (-A) - B -> (-B) - A  where B is easily negated and we can swap.  */
10755      if (TREE_CODE (arg0) == NEGATE_EXPR
10756	  && (FLOAT_TYPE_P (type)
10757	      || INTEGRAL_TYPE_P (type))
10758	  && negate_expr_p (arg1)
10759	  && reorder_operands_p (arg0, arg1))
10760	return fold_build2_loc (loc, MINUS_EXPR, type,
10761			    fold_convert_loc (loc, type,
10762					      negate_expr (arg1)),
10763			    fold_convert_loc (loc, type,
10764					      TREE_OPERAND (arg0, 0)));
10765      /* Convert -A - 1 to ~A.  */
10766      if (INTEGRAL_TYPE_P (type)
10767	  && TREE_CODE (arg0) == NEGATE_EXPR
10768	  && integer_onep (arg1)
10769	  && !TYPE_OVERFLOW_TRAPS (type))
10770	return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10771			    fold_convert_loc (loc, type,
10772					      TREE_OPERAND (arg0, 0)));
10773
10774      /* Convert -1 - A to ~A.  */
10775      if (INTEGRAL_TYPE_P (type)
10776	  && integer_all_onesp (arg0))
10777	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10778
10779
10780      /* X - (X / CST) * CST is X % CST.  */
10781      if (INTEGRAL_TYPE_P (type)
10782	  && TREE_CODE (arg1) == MULT_EXPR
10783	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10784	  && operand_equal_p (arg0,
10785			      TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10786	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10787			      TREE_OPERAND (arg1, 1), 0))
10788	return
10789	  fold_convert_loc (loc, type,
10790			    fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10791					 arg0, TREE_OPERAND (arg1, 1)));
10792
10793      if (! FLOAT_TYPE_P (type))
10794	{
10795	  if (integer_zerop (arg0))
10796	    return negate_expr (fold_convert_loc (loc, type, arg1));
10797	  if (integer_zerop (arg1))
10798	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10799
10800	  /* Fold A - (A & B) into ~B & A.  */
10801	  if (!TREE_SIDE_EFFECTS (arg0)
10802	      && TREE_CODE (arg1) == BIT_AND_EXPR)
10803	    {
10804	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10805		{
10806		  tree arg10 = fold_convert_loc (loc, type,
10807						 TREE_OPERAND (arg1, 0));
10808		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10809				      fold_build1_loc (loc, BIT_NOT_EXPR,
10810						   type, arg10),
10811				      fold_convert_loc (loc, type, arg0));
10812		}
10813	      if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10814		{
10815		  tree arg11 = fold_convert_loc (loc,
10816						 type, TREE_OPERAND (arg1, 1));
10817		  return fold_build2_loc (loc, BIT_AND_EXPR, type,
10818				      fold_build1_loc (loc, BIT_NOT_EXPR,
10819						   type, arg11),
10820				      fold_convert_loc (loc, type, arg0));
10821		}
10822	    }
10823
10824	  /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10825	     any power of 2 minus 1.  */
10826	  if (TREE_CODE (arg0) == BIT_AND_EXPR
10827	      && TREE_CODE (arg1) == BIT_AND_EXPR
10828	      && operand_equal_p (TREE_OPERAND (arg0, 0),
10829				  TREE_OPERAND (arg1, 0), 0))
10830	    {
10831	      tree mask0 = TREE_OPERAND (arg0, 1);
10832	      tree mask1 = TREE_OPERAND (arg1, 1);
10833	      tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10834
10835	      if (operand_equal_p (tem, mask1, 0))
10836		{
10837		  tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10838				     TREE_OPERAND (arg0, 0), mask1);
10839		  return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10840		}
10841	    }
10842	}
10843
10844      /* See if ARG1 is zero and X - ARG1 reduces to X.  */
10845      else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10846	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10847
10848      /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0).  So check whether
10849	 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10850	 (-ARG1 + ARG0) reduces to -ARG1.  */
10851      else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10852	return negate_expr (fold_convert_loc (loc, type, arg1));
10853
10854      /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10855	 __complex__ ( x, -y ).  This is not the same for SNaNs or if
10856	 signed zeros are involved.  */
10857      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10858	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10859	  && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10860        {
10861	  tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10862	  tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10863	  tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10864	  bool arg0rz = false, arg0iz = false;
10865	  if ((arg0r && (arg0rz = real_zerop (arg0r)))
10866	      || (arg0i && (arg0iz = real_zerop (arg0i))))
10867	    {
10868	      tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10869	      tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10870	      if (arg0rz && arg1i && real_zerop (arg1i))
10871	        {
10872		  tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10873					 arg1r ? arg1r
10874					 : build1 (REALPART_EXPR, rtype, arg1));
10875		  tree ip = arg0i ? arg0i
10876		    : build1 (IMAGPART_EXPR, rtype, arg0);
10877		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10878		}
10879	      else if (arg0iz && arg1r && real_zerop (arg1r))
10880	        {
10881		  tree rp = arg0r ? arg0r
10882		    : build1 (REALPART_EXPR, rtype, arg0);
10883		  tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10884					 arg1i ? arg1i
10885					 : build1 (IMAGPART_EXPR, rtype, arg1));
10886		  return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10887		}
10888	    }
10889	}
10890
10891      /* Fold &x - &x.  This can happen from &x.foo - &x.
10892	 This is unsafe for certain floats even in non-IEEE formats.
10893	 In IEEE, it is unsafe because it does wrong for NaNs.
10894	 Also note that operand_equal_p is always false if an operand
10895	 is volatile.  */
10896
10897      if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10898	  && operand_equal_p (arg0, arg1, 0))
10899	return fold_convert_loc (loc, type, integer_zero_node);
10900
10901      /* A - B -> A + (-B) if B is easily negatable.  */
10902      if (negate_expr_p (arg1)
10903	  && ((FLOAT_TYPE_P (type)
10904               /* Avoid this transformation if B is a positive REAL_CST.  */
10905	       && (TREE_CODE (arg1) != REAL_CST
10906		   ||  REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10907	      || INTEGRAL_TYPE_P (type)))
10908	return fold_build2_loc (loc, PLUS_EXPR, type,
10909			    fold_convert_loc (loc, type, arg0),
10910			    fold_convert_loc (loc, type,
10911					      negate_expr (arg1)));
10912
10913      /* Try folding difference of addresses.  */
10914      {
10915	HOST_WIDE_INT diff;
10916
10917	if ((TREE_CODE (arg0) == ADDR_EXPR
10918	     || TREE_CODE (arg1) == ADDR_EXPR)
10919	    && ptr_difference_const (arg0, arg1, &diff))
10920	  return build_int_cst_type (type, diff);
10921      }
10922
10923      /* Fold &a[i] - &a[j] to i-j.  */
10924      if (TREE_CODE (arg0) == ADDR_EXPR
10925	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10926	  && TREE_CODE (arg1) == ADDR_EXPR
10927	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10928        {
10929	  tree aref0 = TREE_OPERAND (arg0, 0);
10930	  tree aref1 = TREE_OPERAND (arg1, 0);
10931	  if (operand_equal_p (TREE_OPERAND (aref0, 0),
10932			       TREE_OPERAND (aref1, 0), 0))
10933	    {
10934	      tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
10935	      tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
10936	      tree esz = array_ref_element_size (aref0);
10937	      tree diff = build2 (MINUS_EXPR, type, op0, op1);
10938	      return fold_build2_loc (loc, MULT_EXPR, type, diff,
10939			          fold_convert_loc (loc, type, esz));
10940
10941	    }
10942	}
10943
10944      if (FLOAT_TYPE_P (type)
10945	  && flag_unsafe_math_optimizations
10946	  && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10947	  && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10948	  && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10949	return tem;
10950
10951      /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
10952	 same or one.  Make sure type is not saturating.
10953	 fold_plusminus_mult_expr will re-associate.  */
10954      if ((TREE_CODE (arg0) == MULT_EXPR
10955	   || TREE_CODE (arg1) == MULT_EXPR)
10956	  && !TYPE_SATURATING (type)
10957	  && (!FLOAT_TYPE_P (type) || flag_associative_math))
10958        {
10959	  tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10960	  if (tem)
10961	    return tem;
10962	}
10963
10964      goto associate;
10965
10966    case MULT_EXPR:
10967      /* (-A) * (-B) -> A * B  */
10968      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10969	return fold_build2_loc (loc, MULT_EXPR, type,
10970			    fold_convert_loc (loc, type,
10971					      TREE_OPERAND (arg0, 0)),
10972			    fold_convert_loc (loc, type,
10973					      negate_expr (arg1)));
10974      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10975	return fold_build2_loc (loc, MULT_EXPR, type,
10976			    fold_convert_loc (loc, type,
10977					      negate_expr (arg0)),
10978			    fold_convert_loc (loc, type,
10979					      TREE_OPERAND (arg1, 0)));
10980
10981      if (! FLOAT_TYPE_P (type))
10982	{
10983	  if (integer_zerop (arg1))
10984	    return omit_one_operand_loc (loc, type, arg1, arg0);
10985	  if (integer_onep (arg1))
10986	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10987	  /* Transform x * -1 into -x.  Make sure to do the negation
10988	     on the original operand with conversions not stripped
10989	     because we can only strip non-sign-changing conversions.  */
10990	  if (integer_all_onesp (arg1))
10991	    return fold_convert_loc (loc, type, negate_expr (op0));
10992	  /* Transform x * -C into -x * C if x is easily negatable.  */
10993	  if (TREE_CODE (arg1) == INTEGER_CST
10994	      && tree_int_cst_sgn (arg1) == -1
10995	      && negate_expr_p (arg0)
10996	      && (tem = negate_expr (arg1)) != arg1
10997	      && !TREE_OVERFLOW (tem))
10998	    return fold_build2_loc (loc, MULT_EXPR, type,
10999	    			fold_convert_loc (loc, type,
11000						  negate_expr (arg0)),
11001				tem);
11002
11003	  /* (a * (1 << b)) is (a << b)  */
11004	  if (TREE_CODE (arg1) == LSHIFT_EXPR
11005	      && integer_onep (TREE_OPERAND (arg1, 0)))
11006	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11007				TREE_OPERAND (arg1, 1));
11008	  if (TREE_CODE (arg0) == LSHIFT_EXPR
11009	      && integer_onep (TREE_OPERAND (arg0, 0)))
11010	    return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11011				TREE_OPERAND (arg0, 1));
11012
11013	  /* (A + A) * C -> A * 2 * C  */
11014	  if (TREE_CODE (arg0) == PLUS_EXPR
11015	      && TREE_CODE (arg1) == INTEGER_CST
11016	      && operand_equal_p (TREE_OPERAND (arg0, 0),
11017			          TREE_OPERAND (arg0, 1), 0))
11018	    return fold_build2_loc (loc, MULT_EXPR, type,
11019				omit_one_operand_loc (loc, type,
11020						  TREE_OPERAND (arg0, 0),
11021						  TREE_OPERAND (arg0, 1)),
11022				fold_build2_loc (loc, MULT_EXPR, type,
11023					     build_int_cst (type, 2) , arg1));
11024
11025	  strict_overflow_p = false;
11026	  if (TREE_CODE (arg1) == INTEGER_CST
11027	      && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11028					     &strict_overflow_p)))
11029	    {
11030	      if (strict_overflow_p)
11031		fold_overflow_warning (("assuming signed overflow does not "
11032					"occur when simplifying "
11033					"multiplication"),
11034				       WARN_STRICT_OVERFLOW_MISC);
11035	      return fold_convert_loc (loc, type, tem);
11036	    }
11037
11038	  /* Optimize z * conj(z) for integer complex numbers.  */
11039	  if (TREE_CODE (arg0) == CONJ_EXPR
11040	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11041	    return fold_mult_zconjz (loc, type, arg1);
11042	  if (TREE_CODE (arg1) == CONJ_EXPR
11043	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11044	    return fold_mult_zconjz (loc, type, arg0);
11045	}
11046      else
11047	{
11048	  /* Maybe fold x * 0 to 0.  The expressions aren't the same
11049	     when x is NaN, since x * 0 is also NaN.  Nor are they the
11050	     same in modes with signed zeros, since multiplying a
11051	     negative value by 0 gives -0, not +0.  */
11052	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11053	      && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11054	      && real_zerop (arg1))
11055	    return omit_one_operand_loc (loc, type, arg1, arg0);
11056	  /* In IEEE floating point, x*1 is not equivalent to x for snans.
11057	     Likewise for complex arithmetic with signed zeros.  */
11058	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11059	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11060		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11061	      && real_onep (arg1))
11062	    return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11063
11064	  /* Transform x * -1.0 into -x.  */
11065	  if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11066	      && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11067		  || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11068	      && real_minus_onep (arg1))
11069	    return fold_convert_loc (loc, type, negate_expr (arg0));
11070
11071	  /* Convert (C1/X)*C2 into (C1*C2)/X.  This transformation may change
11072             the result for floating point types due to rounding so it is applied
11073             only if -fassociative-math was specify.  */
11074	  if (flag_associative_math
11075	      && TREE_CODE (arg0) == RDIV_EXPR
11076	      && TREE_CODE (arg1) == REAL_CST
11077	      && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11078	    {
11079	      tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11080				      arg1, 0);
11081	      if (tem)
11082		return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11083				    TREE_OPERAND (arg0, 1));
11084	    }
11085
11086          /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y.  */
11087	  if (operand_equal_p (arg0, arg1, 0))
11088	    {
11089	      tree tem = fold_strip_sign_ops (arg0);
11090	      if (tem != NULL_TREE)
11091		{
11092		  tem = fold_convert_loc (loc, type, tem);
11093		  return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11094		}
11095	    }
11096
11097	  /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11098	     This is not the same for NaNs or if signed zeros are
11099	     involved.  */
11100	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11101              && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11102	      && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11103	      && TREE_CODE (arg1) == COMPLEX_CST
11104	      && real_zerop (TREE_REALPART (arg1)))
11105	    {
11106	      tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11107	      if (real_onep (TREE_IMAGPART (arg1)))
11108		return
11109		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11110			       negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11111							     rtype, arg0)),
11112			       fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11113	      else if (real_minus_onep (TREE_IMAGPART (arg1)))
11114		return
11115		  fold_build2_loc (loc, COMPLEX_EXPR, type,
11116			       fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11117			       negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11118							     rtype, arg0)));
11119	    }
11120
11121	  /* Optimize z * conj(z) for floating point complex numbers.
11122	     Guarded by flag_unsafe_math_optimizations as non-finite
11123	     imaginary components don't produce scalar results.  */
11124	  if (flag_unsafe_math_optimizations
11125	      && TREE_CODE (arg0) == CONJ_EXPR
11126	      && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11127	    return fold_mult_zconjz (loc, type, arg1);
11128	  if (flag_unsafe_math_optimizations
11129	      && TREE_CODE (arg1) == CONJ_EXPR
11130	      && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11131	    return fold_mult_zconjz (loc, type, arg0);
11132
11133	  if (flag_unsafe_math_optimizations)
11134	    {
11135	      enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11136	      enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11137
11138	      /* Optimizations of root(...)*root(...).  */
11139	      if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11140		{
11141		  tree rootfn, arg;
11142		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11143		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11144
11145		  /* Optimize sqrt(x)*sqrt(x) as x.  */
11146		  if (BUILTIN_SQRT_P (fcode0)
11147		      && operand_equal_p (arg00, arg10, 0)
11148		      && ! HONOR_SNANS (TYPE_MODE (type)))
11149		    return arg00;
11150
11151	          /* Optimize root(x)*root(y) as root(x*y).  */
11152		  rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11153		  arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11154		  return build_call_expr_loc (loc, rootfn, 1, arg);
11155		}
11156
11157	      /* Optimize expN(x)*expN(y) as expN(x+y).  */
11158	      if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11159		{
11160		  tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11161		  tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11162					  CALL_EXPR_ARG (arg0, 0),
11163					  CALL_EXPR_ARG (arg1, 0));
11164		  return build_call_expr_loc (loc, expfn, 1, arg);
11165		}
11166
11167	      /* Optimizations of pow(...)*pow(...).  */
11168	      if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11169		  || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11170		  || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11171		{
11172		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11173		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11174		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11175		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11176
11177		  /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y).  */
11178		  if (operand_equal_p (arg01, arg11, 0))
11179		    {
11180		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11181		      tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11182					      arg00, arg10);
11183		      return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11184		    }
11185
11186		  /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z).  */
11187		  if (operand_equal_p (arg00, arg10, 0))
11188		    {
11189		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11190		      tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11191					      arg01, arg11);
11192		      return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11193		    }
11194		}
11195
11196	      /* Optimize tan(x)*cos(x) as sin(x).  */
11197	      if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11198		   || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11199		   || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11200		   || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11201		   || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11202		   || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11203		  && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11204				      CALL_EXPR_ARG (arg1, 0), 0))
11205		{
11206		  tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11207
11208		  if (sinfn != NULL_TREE)
11209		    return build_call_expr_loc (loc, sinfn, 1,
11210					    CALL_EXPR_ARG (arg0, 0));
11211		}
11212
11213	      /* Optimize x*pow(x,c) as pow(x,c+1).  */
11214	      if (fcode1 == BUILT_IN_POW
11215		  || fcode1 == BUILT_IN_POWF
11216		  || fcode1 == BUILT_IN_POWL)
11217		{
11218		  tree arg10 = CALL_EXPR_ARG (arg1, 0);
11219		  tree arg11 = CALL_EXPR_ARG (arg1, 1);
11220		  if (TREE_CODE (arg11) == REAL_CST
11221		      && !TREE_OVERFLOW (arg11)
11222		      && operand_equal_p (arg0, arg10, 0))
11223		    {
11224		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11225		      REAL_VALUE_TYPE c;
11226		      tree arg;
11227
11228		      c = TREE_REAL_CST (arg11);
11229		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11230		      arg = build_real (type, c);
11231		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11232		    }
11233		}
11234
11235	      /* Optimize pow(x,c)*x as pow(x,c+1).  */
11236	      if (fcode0 == BUILT_IN_POW
11237		  || fcode0 == BUILT_IN_POWF
11238		  || fcode0 == BUILT_IN_POWL)
11239		{
11240		  tree arg00 = CALL_EXPR_ARG (arg0, 0);
11241		  tree arg01 = CALL_EXPR_ARG (arg0, 1);
11242		  if (TREE_CODE (arg01) == REAL_CST
11243		      && !TREE_OVERFLOW (arg01)
11244		      && operand_equal_p (arg1, arg00, 0))
11245		    {
11246		      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11247		      REAL_VALUE_TYPE c;
11248		      tree arg;
11249
11250		      c = TREE_REAL_CST (arg01);
11251		      real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11252		      arg = build_real (type, c);
11253		      return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11254		    }
11255		}
11256
11257	      /* Optimize x*x as pow(x,2.0), which is expanded as x*x.  */
11258	      if (optimize_function_for_speed_p (cfun)
11259		  && operand_equal_p (arg0, arg1, 0))
11260		{
11261		  tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11262
11263		  if (powfn)
11264		    {
11265		      tree arg = build_real (type, dconst2);
11266		      return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11267		    }
11268		}
11269	    }
11270	}
11271      goto associate;
11272
11273    case BIT_IOR_EXPR:
11274    bit_ior:
11275      if (integer_all_onesp (arg1))
11276	return omit_one_operand_loc (loc, type, arg1, arg0);
11277      if (integer_zerop (arg1))
11278	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11279      if (operand_equal_p (arg0, arg1, 0))
11280	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11281
11282      /* ~X | X is -1.  */
11283      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11284	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11285	{
11286	  t1 = fold_convert_loc (loc, type, integer_zero_node);
11287	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11288	  return omit_one_operand_loc (loc, type, t1, arg1);
11289	}
11290
11291      /* X | ~X is -1.  */
11292      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11293	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11294	{
11295	  t1 = fold_convert_loc (loc, type, integer_zero_node);
11296	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11297	  return omit_one_operand_loc (loc, type, t1, arg0);
11298	}
11299
11300      /* Canonicalize (X & C1) | C2.  */
11301      if (TREE_CODE (arg0) == BIT_AND_EXPR
11302	  && TREE_CODE (arg1) == INTEGER_CST
11303	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11304	{
11305	  unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, hi3, lo3, mlo, mhi;
11306	  int width = TYPE_PRECISION (type), w;
11307	  hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
11308	  lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
11309	  hi2 = TREE_INT_CST_HIGH (arg1);
11310	  lo2 = TREE_INT_CST_LOW (arg1);
11311
11312	  /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2).  */
11313	  if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
11314	    return omit_one_operand_loc (loc, type, arg1,
11315				     TREE_OPERAND (arg0, 0));
11316
11317	  if (width > HOST_BITS_PER_WIDE_INT)
11318	    {
11319	      mhi = (unsigned HOST_WIDE_INT) -1
11320		    >> (2 * HOST_BITS_PER_WIDE_INT - width);
11321	      mlo = -1;
11322	    }
11323	  else
11324	    {
11325	      mhi = 0;
11326	      mlo = (unsigned HOST_WIDE_INT) -1
11327		    >> (HOST_BITS_PER_WIDE_INT - width);
11328	    }
11329
11330	  /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2.  */
11331	  if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
11332	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11333				TREE_OPERAND (arg0, 0), arg1);
11334
11335	  /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11336	     unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11337	     mode which allows further optimizations.  */
11338	  hi1 &= mhi;
11339	  lo1 &= mlo;
11340	  hi2 &= mhi;
11341	  lo2 &= mlo;
11342	  hi3 = hi1 & ~hi2;
11343	  lo3 = lo1 & ~lo2;
11344	  for (w = BITS_PER_UNIT;
11345	       w <= width && w <= HOST_BITS_PER_WIDE_INT;
11346	       w <<= 1)
11347	    {
11348	      unsigned HOST_WIDE_INT mask
11349		= (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11350	      if (((lo1 | lo2) & mask) == mask
11351		  && (lo1 & ~mask) == 0 && hi1 == 0)
11352		{
11353		  hi3 = 0;
11354		  lo3 = mask;
11355		  break;
11356		}
11357	    }
11358	  if (hi3 != hi1 || lo3 != lo1)
11359	    return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11360				fold_build2_loc (loc, BIT_AND_EXPR, type,
11361					     TREE_OPERAND (arg0, 0),
11362					     build_int_cst_wide (type,
11363								 lo3, hi3)),
11364				arg1);
11365	}
11366
11367      /* (X & Y) | Y is (X, Y).  */
11368      if (TREE_CODE (arg0) == BIT_AND_EXPR
11369	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11370	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11371      /* (X & Y) | X is (Y, X).  */
11372      if (TREE_CODE (arg0) == BIT_AND_EXPR
11373	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11374	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11375	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11376      /* X | (X & Y) is (Y, X).  */
11377      if (TREE_CODE (arg1) == BIT_AND_EXPR
11378	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11379	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11380	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11381      /* X | (Y & X) is (Y, X).  */
11382      if (TREE_CODE (arg1) == BIT_AND_EXPR
11383	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11384	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11385	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11386
11387      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11388      if (t1 != NULL_TREE)
11389	return t1;
11390
11391      /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11392
11393	 This results in more efficient code for machines without a NAND
11394	 instruction.  Combine will canonicalize to the first form
11395	 which will allow use of NAND instructions provided by the
11396	 backend if they exist.  */
11397      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11398	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11399	{
11400	  return
11401	    fold_build1_loc (loc, BIT_NOT_EXPR, type,
11402			 build2 (BIT_AND_EXPR, type,
11403				 fold_convert_loc (loc, type,
11404						   TREE_OPERAND (arg0, 0)),
11405				 fold_convert_loc (loc, type,
11406						   TREE_OPERAND (arg1, 0))));
11407	}
11408
11409      /* See if this can be simplified into a rotate first.  If that
11410	 is unsuccessful continue in the association code.  */
11411      goto bit_rotate;
11412
11413    case BIT_XOR_EXPR:
11414      if (integer_zerop (arg1))
11415	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11416      if (integer_all_onesp (arg1))
11417	return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11418      if (operand_equal_p (arg0, arg1, 0))
11419	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11420
11421      /* ~X ^ X is -1.  */
11422      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11423	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11424	{
11425	  t1 = fold_convert_loc (loc, type, integer_zero_node);
11426	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11427	  return omit_one_operand_loc (loc, type, t1, arg1);
11428	}
11429
11430      /* X ^ ~X is -1.  */
11431      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11432	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11433	{
11434	  t1 = fold_convert_loc (loc, type, integer_zero_node);
11435	  t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11436	  return omit_one_operand_loc (loc, type, t1, arg0);
11437	}
11438
11439      /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11440         with a constant, and the two constants have no bits in common,
11441	 we should treat this as a BIT_IOR_EXPR since this may produce more
11442	 simplifications.  */
11443      if (TREE_CODE (arg0) == BIT_AND_EXPR
11444	  && TREE_CODE (arg1) == BIT_AND_EXPR
11445	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11446	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11447	  && integer_zerop (const_binop (BIT_AND_EXPR,
11448					 TREE_OPERAND (arg0, 1),
11449					 TREE_OPERAND (arg1, 1), 0)))
11450	{
11451	  code = BIT_IOR_EXPR;
11452	  goto bit_ior;
11453	}
11454
11455      /* (X | Y) ^ X -> Y & ~ X*/
11456      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11457          && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11458        {
11459	  tree t2 = TREE_OPERAND (arg0, 1);
11460	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11461			    arg1);
11462	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11463			    fold_convert_loc (loc, type, t2),
11464			    fold_convert_loc (loc, type, t1));
11465	  return t1;
11466	}
11467
11468      /* (Y | X) ^ X -> Y & ~ X*/
11469      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11470          && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11471        {
11472	  tree t2 = TREE_OPERAND (arg0, 0);
11473	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11474			    arg1);
11475	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11476			    fold_convert_loc (loc, type, t2),
11477			    fold_convert_loc (loc, type, t1));
11478	  return t1;
11479	}
11480
11481      /* X ^ (X | Y) -> Y & ~ X*/
11482      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11483          && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11484        {
11485	  tree t2 = TREE_OPERAND (arg1, 1);
11486	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11487			    arg0);
11488	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11489			    fold_convert_loc (loc, type, t2),
11490			    fold_convert_loc (loc, type, t1));
11491	  return t1;
11492	}
11493
11494      /* X ^ (Y | X) -> Y & ~ X*/
11495      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11496          && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11497        {
11498	  tree t2 = TREE_OPERAND (arg1, 0);
11499	  t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11500			    arg0);
11501	  t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11502			    fold_convert_loc (loc, type, t2),
11503			    fold_convert_loc (loc, type, t1));
11504	  return t1;
11505	}
11506
11507      /* Convert ~X ^ ~Y to X ^ Y.  */
11508      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11509	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11510	return fold_build2_loc (loc, code, type,
11511			    fold_convert_loc (loc, type,
11512					      TREE_OPERAND (arg0, 0)),
11513			    fold_convert_loc (loc, type,
11514					      TREE_OPERAND (arg1, 0)));
11515
11516      /* Convert ~X ^ C to X ^ ~C.  */
11517      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11518	  && TREE_CODE (arg1) == INTEGER_CST)
11519	return fold_build2_loc (loc, code, type,
11520			    fold_convert_loc (loc, type,
11521					      TREE_OPERAND (arg0, 0)),
11522			    fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11523
11524      /* Fold (X & 1) ^ 1 as (X & 1) == 0.  */
11525      if (TREE_CODE (arg0) == BIT_AND_EXPR
11526	  && integer_onep (TREE_OPERAND (arg0, 1))
11527	  && integer_onep (arg1))
11528	return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11529			    build_int_cst (TREE_TYPE (arg0), 0));
11530
11531      /* Fold (X & Y) ^ Y as ~X & Y.  */
11532      if (TREE_CODE (arg0) == BIT_AND_EXPR
11533	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11534	{
11535	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11536	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11537			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11538			      fold_convert_loc (loc, type, arg1));
11539	}
11540      /* Fold (X & Y) ^ X as ~Y & X.  */
11541      if (TREE_CODE (arg0) == BIT_AND_EXPR
11542	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11543	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11544	{
11545	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11546	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11547			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11548			      fold_convert_loc (loc, type, arg1));
11549	}
11550      /* Fold X ^ (X & Y) as X & ~Y.  */
11551      if (TREE_CODE (arg1) == BIT_AND_EXPR
11552	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11553	{
11554	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11555	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11556			      fold_convert_loc (loc, type, arg0),
11557			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11558	}
11559      /* Fold X ^ (Y & X) as ~Y & X.  */
11560      if (TREE_CODE (arg1) == BIT_AND_EXPR
11561	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11562	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11563	{
11564	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11565	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11566			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11567			      fold_convert_loc (loc, type, arg0));
11568	}
11569
11570      /* See if this can be simplified into a rotate first.  If that
11571	 is unsuccessful continue in the association code.  */
11572      goto bit_rotate;
11573
11574    case BIT_AND_EXPR:
11575      if (integer_all_onesp (arg1))
11576	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11577      if (integer_zerop (arg1))
11578	return omit_one_operand_loc (loc, type, arg1, arg0);
11579      if (operand_equal_p (arg0, arg1, 0))
11580	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11581
11582      /* ~X & X is always zero.  */
11583      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11584	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11585	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11586
11587      /* X & ~X is always zero.  */
11588      if (TREE_CODE (arg1) == BIT_NOT_EXPR
11589	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11590	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11591
11592      /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2).  */
11593      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11594	  && TREE_CODE (arg1) == INTEGER_CST
11595	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11596	{
11597	  tree tmp1 = fold_convert_loc (loc, type, arg1);
11598	  tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11599	  tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11600	  tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11601	  tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11602	  return
11603	    fold_convert_loc (loc, type,
11604			      fold_build2_loc (loc, BIT_IOR_EXPR,
11605					   type, tmp2, tmp3));
11606	}
11607
11608      /* (X | Y) & Y is (X, Y).  */
11609      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11610	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11611	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11612      /* (X | Y) & X is (Y, X).  */
11613      if (TREE_CODE (arg0) == BIT_IOR_EXPR
11614	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11615	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11616	return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11617      /* X & (X | Y) is (Y, X).  */
11618      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11619	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11620	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11621	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11622      /* X & (Y | X) is (Y, X).  */
11623      if (TREE_CODE (arg1) == BIT_IOR_EXPR
11624	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11625	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11626	return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11627
11628      /* Fold (X ^ 1) & 1 as (X & 1) == 0.  */
11629      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11630	  && integer_onep (TREE_OPERAND (arg0, 1))
11631	  && integer_onep (arg1))
11632	{
11633	  tem = TREE_OPERAND (arg0, 0);
11634	  return fold_build2_loc (loc, EQ_EXPR, type,
11635			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11636					   build_int_cst (TREE_TYPE (tem), 1)),
11637			      build_int_cst (TREE_TYPE (tem), 0));
11638	}
11639      /* Fold ~X & 1 as (X & 1) == 0.  */
11640      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11641	  && integer_onep (arg1))
11642	{
11643	  tem = TREE_OPERAND (arg0, 0);
11644	  return fold_build2_loc (loc, EQ_EXPR, type,
11645			      fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem), tem,
11646					   build_int_cst (TREE_TYPE (tem), 1)),
11647			      build_int_cst (TREE_TYPE (tem), 0));
11648	}
11649
11650      /* Fold (X ^ Y) & Y as ~X & Y.  */
11651      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11652	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11653	{
11654	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11655	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11656			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11657			      fold_convert_loc (loc, type, arg1));
11658	}
11659      /* Fold (X ^ Y) & X as ~Y & X.  */
11660      if (TREE_CODE (arg0) == BIT_XOR_EXPR
11661	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11662	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11663	{
11664	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11665	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11666			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11667			      fold_convert_loc (loc, type, arg1));
11668	}
11669      /* Fold X & (X ^ Y) as X & ~Y.  */
11670      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11671	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11672	{
11673	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11674	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11675			      fold_convert_loc (loc, type, arg0),
11676			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11677	}
11678      /* Fold X & (Y ^ X) as ~Y & X.  */
11679      if (TREE_CODE (arg1) == BIT_XOR_EXPR
11680	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11681	  && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11682	{
11683	  tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11684	  return fold_build2_loc (loc, BIT_AND_EXPR, type,
11685			      fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11686			      fold_convert_loc (loc, type, arg0));
11687	}
11688
11689      t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11690      if (t1 != NULL_TREE)
11691	return t1;
11692      /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char.  */
11693      if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11694	  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11695	{
11696	  unsigned int prec
11697	    = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11698
11699	  if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11700	      && (~TREE_INT_CST_LOW (arg1)
11701		  & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11702	    return
11703	      fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11704	}
11705
11706      /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11707
11708	 This results in more efficient code for machines without a NOR
11709	 instruction.  Combine will canonicalize to the first form
11710	 which will allow use of NOR instructions provided by the
11711	 backend if they exist.  */
11712      if (TREE_CODE (arg0) == BIT_NOT_EXPR
11713	  && TREE_CODE (arg1) == BIT_NOT_EXPR)
11714	{
11715	  return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11716			      build2 (BIT_IOR_EXPR, type,
11717				      fold_convert_loc (loc, type,
11718							TREE_OPERAND (arg0, 0)),
11719				      fold_convert_loc (loc, type,
11720							TREE_OPERAND (arg1, 0))));
11721	}
11722
11723      /* If arg0 is derived from the address of an object or function, we may
11724	 be able to fold this expression using the object or function's
11725	 alignment.  */
11726      if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11727	{
11728	  unsigned HOST_WIDE_INT modulus, residue;
11729	  unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11730
11731	  modulus = get_pointer_modulus_and_residue (arg0, &residue,
11732						     integer_onep (arg1));
11733
11734	  /* This works because modulus is a power of 2.  If this weren't the
11735	     case, we'd have to replace it by its greatest power-of-2
11736	     divisor: modulus & -modulus.  */
11737	  if (low < modulus)
11738	    return build_int_cst (type, residue & low);
11739	}
11740
11741      /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11742	      (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11743	 if the new mask might be further optimized.  */
11744      if ((TREE_CODE (arg0) == LSHIFT_EXPR
11745	   || TREE_CODE (arg0) == RSHIFT_EXPR)
11746	  && host_integerp (TREE_OPERAND (arg0, 1), 1)
11747	  && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11748	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11749	     < TYPE_PRECISION (TREE_TYPE (arg0))
11750	  && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11751	  && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11752	{
11753	  unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11754	  unsigned HOST_WIDE_INT mask
11755	    = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11756	  unsigned HOST_WIDE_INT newmask, zerobits = 0;
11757	  tree shift_type = TREE_TYPE (arg0);
11758
11759	  if (TREE_CODE (arg0) == LSHIFT_EXPR)
11760	    zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11761	  else if (TREE_CODE (arg0) == RSHIFT_EXPR
11762		   && TYPE_PRECISION (TREE_TYPE (arg0))
11763		      == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11764	    {
11765	      unsigned int prec = TYPE_PRECISION (TREE_TYPE (arg0));
11766	      tree arg00 = TREE_OPERAND (arg0, 0);
11767	      /* See if more bits can be proven as zero because of
11768		 zero extension.  */
11769	      if (TREE_CODE (arg00) == NOP_EXPR
11770		  && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11771		{
11772		  tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11773		  if (TYPE_PRECISION (inner_type)
11774		      == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11775		      && TYPE_PRECISION (inner_type) < prec)
11776		    {
11777		      prec = TYPE_PRECISION (inner_type);
11778		      /* See if we can shorten the right shift.  */
11779		      if (shiftc < prec)
11780			shift_type = inner_type;
11781		    }
11782		}
11783	      zerobits = ~(unsigned HOST_WIDE_INT) 0;
11784	      zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11785	      zerobits <<= prec - shiftc;
11786	      /* For arithmetic shift if sign bit could be set, zerobits
11787		 can contain actually sign bits, so no transformation is
11788		 possible, unless MASK masks them all away.  In that
11789		 case the shift needs to be converted into logical shift.  */
11790	      if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11791		  && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11792		{
11793		  if ((mask & zerobits) == 0)
11794		    shift_type = unsigned_type_for (TREE_TYPE (arg0));
11795		  else
11796		    zerobits = 0;
11797		}
11798	    }
11799
11800	  /* ((X << 16) & 0xff00) is (X, 0).  */
11801	  if ((mask & zerobits) == mask)
11802	    return omit_one_operand_loc (loc, type,
11803				     build_int_cst (type, 0), arg0);
11804
11805	  newmask = mask | zerobits;
11806	  if (newmask != mask && (newmask & (newmask + 1)) == 0)
11807	    {
11808	      unsigned int prec;
11809
11810	      /* Only do the transformation if NEWMASK is some integer
11811		 mode's mask.  */
11812	      for (prec = BITS_PER_UNIT;
11813		   prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11814		if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11815		  break;
11816	      if (prec < HOST_BITS_PER_WIDE_INT
11817		  || newmask == ~(unsigned HOST_WIDE_INT) 0)
11818		{
11819		  tree newmaskt;
11820
11821		  if (shift_type != TREE_TYPE (arg0))
11822		    {
11823		      tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11824					 fold_convert_loc (loc, shift_type,
11825							   TREE_OPERAND (arg0, 0)),
11826					 TREE_OPERAND (arg0, 1));
11827		      tem = fold_convert_loc (loc, type, tem);
11828		    }
11829		  else
11830		    tem = op0;
11831		  newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11832		  if (!tree_int_cst_equal (newmaskt, arg1))
11833		    return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11834		}
11835	    }
11836	}
11837
11838      goto associate;
11839
11840    case RDIV_EXPR:
11841      /* Don't touch a floating-point divide by zero unless the mode
11842	 of the constant can represent infinity.  */
11843      if (TREE_CODE (arg1) == REAL_CST
11844	  && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11845	  && real_zerop (arg1))
11846	return NULL_TREE;
11847
11848      /* Optimize A / A to 1.0 if we don't care about
11849	 NaNs or Infinities.  Skip the transformation
11850	 for non-real operands.  */
11851      if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11852	  && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11853	  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11854	  && operand_equal_p (arg0, arg1, 0))
11855	{
11856	  tree r = build_real (TREE_TYPE (arg0), dconst1);
11857
11858	  return omit_two_operands_loc (loc, type, r, arg0, arg1);
11859	}
11860
11861      /* The complex version of the above A / A optimization.  */
11862      if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11863	  && operand_equal_p (arg0, arg1, 0))
11864	{
11865	  tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11866	  if (! HONOR_NANS (TYPE_MODE (elem_type))
11867	      && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11868	    {
11869	      tree r = build_real (elem_type, dconst1);
11870	      /* omit_two_operands will call fold_convert for us.  */
11871	      return omit_two_operands_loc (loc, type, r, arg0, arg1);
11872	    }
11873	}
11874
11875      /* (-A) / (-B) -> A / B  */
11876      if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11877	return fold_build2_loc (loc, RDIV_EXPR, type,
11878			    TREE_OPERAND (arg0, 0),
11879			    negate_expr (arg1));
11880      if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11881	return fold_build2_loc (loc, RDIV_EXPR, type,
11882			    negate_expr (arg0),
11883			    TREE_OPERAND (arg1, 0));
11884
11885      /* In IEEE floating point, x/1 is not equivalent to x for snans.  */
11886      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11887	  && real_onep (arg1))
11888	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11889
11890      /* In IEEE floating point, x/-1 is not equivalent to -x for snans.  */
11891      if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11892	  && real_minus_onep (arg1))
11893	return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11894						  negate_expr (arg0)));
11895
11896      /* If ARG1 is a constant, we can convert this to a multiply by the
11897	 reciprocal.  This does not have the same rounding properties,
11898	 so only do this if -freciprocal-math.  We can actually
11899	 always safely do it if ARG1 is a power of two, but it's hard to
11900	 tell if it is or not in a portable manner.  */
11901      if (TREE_CODE (arg1) == REAL_CST)
11902	{
11903	  if (flag_reciprocal_math
11904	      && 0 != (tem = const_binop (code, build_real (type, dconst1),
11905					  arg1, 0)))
11906	    return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11907	  /* Find the reciprocal if optimizing and the result is exact.  */
11908	  if (optimize)
11909	    {
11910	      REAL_VALUE_TYPE r;
11911	      r = TREE_REAL_CST (arg1);
11912	      if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
11913		{
11914		  tem = build_real (type, r);
11915		  return fold_build2_loc (loc, MULT_EXPR, type,
11916				      fold_convert_loc (loc, type, arg0), tem);
11917		}
11918	    }
11919	}
11920      /* Convert A/B/C to A/(B*C).  */
11921      if (flag_reciprocal_math
11922	  && TREE_CODE (arg0) == RDIV_EXPR)
11923	return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11924			    fold_build2_loc (loc, MULT_EXPR, type,
11925					 TREE_OPERAND (arg0, 1), arg1));
11926
11927      /* Convert A/(B/C) to (A/B)*C.  */
11928      if (flag_reciprocal_math
11929	  && TREE_CODE (arg1) == RDIV_EXPR)
11930	return fold_build2_loc (loc, MULT_EXPR, type,
11931			    fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11932					 TREE_OPERAND (arg1, 0)),
11933			    TREE_OPERAND (arg1, 1));
11934
11935      /* Convert C1/(X*C2) into (C1/C2)/X.  */
11936      if (flag_reciprocal_math
11937	  && TREE_CODE (arg1) == MULT_EXPR
11938	  && TREE_CODE (arg0) == REAL_CST
11939	  && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11940	{
11941	  tree tem = const_binop (RDIV_EXPR, arg0,
11942				  TREE_OPERAND (arg1, 1), 0);
11943	  if (tem)
11944	    return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11945				TREE_OPERAND (arg1, 0));
11946	}
11947
11948      if (flag_unsafe_math_optimizations)
11949	{
11950	  enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11951	  enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11952
11953	  /* Optimize sin(x)/cos(x) as tan(x).  */
11954	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11955	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11956	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11957	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11958				  CALL_EXPR_ARG (arg1, 0), 0))
11959	    {
11960	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11961
11962	      if (tanfn != NULL_TREE)
11963		return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11964	    }
11965
11966	  /* Optimize cos(x)/sin(x) as 1.0/tan(x).  */
11967	  if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11968	       || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11969	       || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11970	      && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11971				  CALL_EXPR_ARG (arg1, 0), 0))
11972	    {
11973	      tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11974
11975	      if (tanfn != NULL_TREE)
11976		{
11977		  tree tmp = build_call_expr_loc (loc, tanfn, 1,
11978					      CALL_EXPR_ARG (arg0, 0));
11979		  return fold_build2_loc (loc, RDIV_EXPR, type,
11980				      build_real (type, dconst1), tmp);
11981		}
11982	    }
11983
11984 	  /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11985	     NaNs or Infinities.  */
11986 	  if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11987 	       || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11988 	       || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11989	    {
11990	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
11991	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
11992
11993	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
11994		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
11995		  && operand_equal_p (arg00, arg01, 0))
11996		{
11997		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11998
11999		  if (cosfn != NULL_TREE)
12000		    return build_call_expr_loc (loc, cosfn, 1, arg00);
12001		}
12002	    }
12003
12004 	  /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12005	     NaNs or Infinities.  */
12006 	  if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12007 	       || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12008 	       || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12009	    {
12010	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12011	      tree arg01 = CALL_EXPR_ARG (arg1, 0);
12012
12013	      if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12014		  && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12015		  && operand_equal_p (arg00, arg01, 0))
12016		{
12017		  tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12018
12019		  if (cosfn != NULL_TREE)
12020		    {
12021		      tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12022		      return fold_build2_loc (loc, RDIV_EXPR, type,
12023					  build_real (type, dconst1),
12024					  tmp);
12025		    }
12026		}
12027	    }
12028
12029	  /* Optimize pow(x,c)/x as pow(x,c-1).  */
12030	  if (fcode0 == BUILT_IN_POW
12031	      || fcode0 == BUILT_IN_POWF
12032	      || fcode0 == BUILT_IN_POWL)
12033	    {
12034	      tree arg00 = CALL_EXPR_ARG (arg0, 0);
12035	      tree arg01 = CALL_EXPR_ARG (arg0, 1);
12036	      if (TREE_CODE (arg01) == REAL_CST
12037		  && !TREE_OVERFLOW (arg01)
12038		  && operand_equal_p (arg1, arg00, 0))
12039		{
12040		  tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12041		  REAL_VALUE_TYPE c;
12042		  tree arg;
12043
12044		  c = TREE_REAL_CST (arg01);
12045		  real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12046		  arg = build_real (type, c);
12047		  return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12048		}
12049	    }
12050
12051	  /* Optimize a/root(b/c) into a*root(c/b).  */
12052	  if (BUILTIN_ROOT_P (fcode1))
12053	    {
12054	      tree rootarg = CALL_EXPR_ARG (arg1, 0);
12055
12056	      if (TREE_CODE (rootarg) == RDIV_EXPR)
12057		{
12058		  tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12059		  tree b = TREE_OPERAND (rootarg, 0);
12060		  tree c = TREE_OPERAND (rootarg, 1);
12061
12062		  tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12063
12064		  tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12065		  return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12066		}
12067	    }
12068
12069	  /* Optimize x/expN(y) into x*expN(-y).  */
12070	  if (BUILTIN_EXPONENT_P (fcode1))
12071	    {
12072	      tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12073	      tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12074	      arg1 = build_call_expr_loc (loc,
12075				      expfn, 1,
12076				      fold_convert_loc (loc, type, arg));
12077	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12078	    }
12079
12080	  /* Optimize x/pow(y,z) into x*pow(y,-z).  */
12081	  if (fcode1 == BUILT_IN_POW
12082	      || fcode1 == BUILT_IN_POWF
12083	      || fcode1 == BUILT_IN_POWL)
12084	    {
12085	      tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12086	      tree arg10 = CALL_EXPR_ARG (arg1, 0);
12087	      tree arg11 = CALL_EXPR_ARG (arg1, 1);
12088	      tree neg11 = fold_convert_loc (loc, type,
12089					     negate_expr (arg11));
12090	      arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12091	      return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12092	    }
12093	}
12094      return NULL_TREE;
12095
12096    case TRUNC_DIV_EXPR:
12097    case FLOOR_DIV_EXPR:
12098      /* Simplify A / (B << N) where A and B are positive and B is
12099	 a power of 2, to A >> (N + log2(B)).  */
12100      strict_overflow_p = false;
12101      if (TREE_CODE (arg1) == LSHIFT_EXPR
12102	  && (TYPE_UNSIGNED (type)
12103	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12104	{
12105	  tree sval = TREE_OPERAND (arg1, 0);
12106	  if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12107	    {
12108	      tree sh_cnt = TREE_OPERAND (arg1, 1);
12109	      unsigned long pow2;
12110
12111	      if (TREE_INT_CST_LOW (sval))
12112		pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12113	      else
12114		pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12115		       + HOST_BITS_PER_WIDE_INT;
12116
12117	      if (strict_overflow_p)
12118		fold_overflow_warning (("assuming signed overflow does not "
12119					"occur when simplifying A / (B << N)"),
12120				       WARN_STRICT_OVERFLOW_MISC);
12121
12122	      sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12123				    sh_cnt, build_int_cst (NULL_TREE, pow2));
12124	      return fold_build2_loc (loc, RSHIFT_EXPR, type,
12125				  fold_convert_loc (loc, type, arg0), sh_cnt);
12126	    }
12127	}
12128
12129      /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12130	 TRUNC_DIV_EXPR.  Rewrite into the latter in this case.  */
12131      if (INTEGRAL_TYPE_P (type)
12132	  && TYPE_UNSIGNED (type)
12133	  && code == FLOOR_DIV_EXPR)
12134	return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12135
12136      /* Fall thru */
12137
12138    case ROUND_DIV_EXPR:
12139    case CEIL_DIV_EXPR:
12140    case EXACT_DIV_EXPR:
12141      if (integer_onep (arg1))
12142	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12143      if (integer_zerop (arg1))
12144	return NULL_TREE;
12145      /* X / -1 is -X.  */
12146      if (!TYPE_UNSIGNED (type)
12147	  && TREE_CODE (arg1) == INTEGER_CST
12148	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12149	  && TREE_INT_CST_HIGH (arg1) == -1)
12150	return fold_convert_loc (loc, type, negate_expr (arg0));
12151
12152      /* Convert -A / -B to A / B when the type is signed and overflow is
12153	 undefined.  */
12154      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12155	  && TREE_CODE (arg0) == NEGATE_EXPR
12156	  && negate_expr_p (arg1))
12157	{
12158	  if (INTEGRAL_TYPE_P (type))
12159	    fold_overflow_warning (("assuming signed overflow does not occur "
12160				    "when distributing negation across "
12161				    "division"),
12162				   WARN_STRICT_OVERFLOW_MISC);
12163	  return fold_build2_loc (loc, code, type,
12164			      fold_convert_loc (loc, type,
12165						TREE_OPERAND (arg0, 0)),
12166			      fold_convert_loc (loc, type,
12167						negate_expr (arg1)));
12168	}
12169      if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12170	  && TREE_CODE (arg1) == NEGATE_EXPR
12171	  && negate_expr_p (arg0))
12172	{
12173	  if (INTEGRAL_TYPE_P (type))
12174	    fold_overflow_warning (("assuming signed overflow does not occur "
12175				    "when distributing negation across "
12176				    "division"),
12177				   WARN_STRICT_OVERFLOW_MISC);
12178	  return fold_build2_loc (loc, code, type,
12179			      fold_convert_loc (loc, type,
12180						negate_expr (arg0)),
12181			      fold_convert_loc (loc, type,
12182						TREE_OPERAND (arg1, 0)));
12183	}
12184
12185      /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12186	 operation, EXACT_DIV_EXPR.
12187
12188	 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12189	 At one time others generated faster code, it's not clear if they do
12190	 after the last round to changes to the DIV code in expmed.c.  */
12191      if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12192	  && multiple_of_p (type, arg0, arg1))
12193	return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12194
12195      strict_overflow_p = false;
12196      if (TREE_CODE (arg1) == INTEGER_CST
12197	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12198					 &strict_overflow_p)))
12199	{
12200	  if (strict_overflow_p)
12201	    fold_overflow_warning (("assuming signed overflow does not occur "
12202				    "when simplifying division"),
12203				   WARN_STRICT_OVERFLOW_MISC);
12204	  return fold_convert_loc (loc, type, tem);
12205	}
12206
12207      return NULL_TREE;
12208
12209    case CEIL_MOD_EXPR:
12210    case FLOOR_MOD_EXPR:
12211    case ROUND_MOD_EXPR:
12212    case TRUNC_MOD_EXPR:
12213      /* X % 1 is always zero, but be sure to preserve any side
12214	 effects in X.  */
12215      if (integer_onep (arg1))
12216	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12217
12218      /* X % 0, return X % 0 unchanged so that we can get the
12219	 proper warnings and errors.  */
12220      if (integer_zerop (arg1))
12221	return NULL_TREE;
12222
12223      /* 0 % X is always zero, but be sure to preserve any side
12224	 effects in X.  Place this after checking for X == 0.  */
12225      if (integer_zerop (arg0))
12226	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12227
12228      /* X % -1 is zero.  */
12229      if (!TYPE_UNSIGNED (type)
12230	  && TREE_CODE (arg1) == INTEGER_CST
12231	  && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12232	  && TREE_INT_CST_HIGH (arg1) == -1)
12233	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12234
12235      /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12236         i.e. "X % C" into "X & (C - 1)", if X and C are positive.  */
12237      strict_overflow_p = false;
12238      if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12239	  && (TYPE_UNSIGNED (type)
12240	      || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12241	{
12242	  tree c = arg1;
12243	  /* Also optimize A % (C << N)  where C is a power of 2,
12244	     to A & ((C << N) - 1).  */
12245	  if (TREE_CODE (arg1) == LSHIFT_EXPR)
12246	    c = TREE_OPERAND (arg1, 0);
12247
12248	  if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12249	    {
12250	      tree mask = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12251				       build_int_cst (TREE_TYPE (arg1), 1));
12252	      if (strict_overflow_p)
12253		fold_overflow_warning (("assuming signed overflow does not "
12254					"occur when simplifying "
12255					"X % (power of two)"),
12256				       WARN_STRICT_OVERFLOW_MISC);
12257	      return fold_build2_loc (loc, BIT_AND_EXPR, type,
12258				  fold_convert_loc (loc, type, arg0),
12259				  fold_convert_loc (loc, type, mask));
12260	    }
12261	}
12262
12263      /* X % -C is the same as X % C.  */
12264      if (code == TRUNC_MOD_EXPR
12265	  && !TYPE_UNSIGNED (type)
12266	  && TREE_CODE (arg1) == INTEGER_CST
12267	  && !TREE_OVERFLOW (arg1)
12268	  && TREE_INT_CST_HIGH (arg1) < 0
12269	  && !TYPE_OVERFLOW_TRAPS (type)
12270	  /* Avoid this transformation if C is INT_MIN, i.e. C == -C.  */
12271	  && !sign_bit_p (arg1, arg1))
12272	return fold_build2_loc (loc, code, type,
12273			    fold_convert_loc (loc, type, arg0),
12274			    fold_convert_loc (loc, type,
12275					      negate_expr (arg1)));
12276
12277      /* X % -Y is the same as X % Y.  */
12278      if (code == TRUNC_MOD_EXPR
12279	  && !TYPE_UNSIGNED (type)
12280	  && TREE_CODE (arg1) == NEGATE_EXPR
12281	  && !TYPE_OVERFLOW_TRAPS (type))
12282	return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12283			    fold_convert_loc (loc, type,
12284					      TREE_OPERAND (arg1, 0)));
12285
12286      if (TREE_CODE (arg1) == INTEGER_CST
12287	  && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12288					 &strict_overflow_p)))
12289	{
12290	  if (strict_overflow_p)
12291	    fold_overflow_warning (("assuming signed overflow does not occur "
12292				    "when simplifying modulus"),
12293				   WARN_STRICT_OVERFLOW_MISC);
12294	  return fold_convert_loc (loc, type, tem);
12295	}
12296
12297      return NULL_TREE;
12298
12299    case LROTATE_EXPR:
12300    case RROTATE_EXPR:
12301      if (integer_all_onesp (arg0))
12302	return omit_one_operand_loc (loc, type, arg0, arg1);
12303      goto shift;
12304
12305    case RSHIFT_EXPR:
12306      /* Optimize -1 >> x for arithmetic right shifts.  */
12307      if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12308	  && tree_expr_nonnegative_p (arg1))
12309	return omit_one_operand_loc (loc, type, arg0, arg1);
12310      /* ... fall through ...  */
12311
12312    case LSHIFT_EXPR:
12313    shift:
12314      if (integer_zerop (arg1))
12315	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12316      if (integer_zerop (arg0))
12317	return omit_one_operand_loc (loc, type, arg0, arg1);
12318
12319      /* Since negative shift count is not well-defined,
12320	 don't try to compute it in the compiler.  */
12321      if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12322	return NULL_TREE;
12323
12324      /* Turn (a OP c1) OP c2 into a OP (c1+c2).  */
12325      if (TREE_CODE (op0) == code && host_integerp (arg1, false)
12326	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12327	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12328	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12329	{
12330	  HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12331			       + TREE_INT_CST_LOW (arg1));
12332
12333	  /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12334	     being well defined.  */
12335	  if (low >= TYPE_PRECISION (type))
12336	    {
12337	      if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12338	        low = low % TYPE_PRECISION (type);
12339	      else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12340		return omit_one_operand_loc (loc, type, build_int_cst (type, 0),
12341					 TREE_OPERAND (arg0, 0));
12342	      else
12343		low = TYPE_PRECISION (type) - 1;
12344	    }
12345
12346	  return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12347			      build_int_cst (type, low));
12348	}
12349
12350      /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12351         into x & ((unsigned)-1 >> c) for unsigned types.  */
12352      if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12353           || (TYPE_UNSIGNED (type)
12354	       && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12355	  && host_integerp (arg1, false)
12356	  && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
12357	  && host_integerp (TREE_OPERAND (arg0, 1), false)
12358	  && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
12359	{
12360	  HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12361	  HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12362	  tree lshift;
12363	  tree arg00;
12364
12365	  if (low0 == low1)
12366	    {
12367	      arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12368
12369	      lshift = build_int_cst (type, -1);
12370	      lshift = int_const_binop (code, lshift, arg1, 0);
12371
12372	      return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12373	    }
12374	}
12375
12376      /* Rewrite an LROTATE_EXPR by a constant into an
12377	 RROTATE_EXPR by a new constant.  */
12378      if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12379	{
12380	  tree tem = build_int_cst (TREE_TYPE (arg1),
12381				    TYPE_PRECISION (type));
12382	  tem = const_binop (MINUS_EXPR, tem, arg1, 0);
12383	  return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12384	}
12385
12386      /* If we have a rotate of a bit operation with the rotate count and
12387	 the second operand of the bit operation both constant,
12388	 permute the two operations.  */
12389      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12390	  && (TREE_CODE (arg0) == BIT_AND_EXPR
12391	      || TREE_CODE (arg0) == BIT_IOR_EXPR
12392	      || TREE_CODE (arg0) == BIT_XOR_EXPR)
12393	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12394	return fold_build2_loc (loc, TREE_CODE (arg0), type,
12395			    fold_build2_loc (loc, code, type,
12396					 TREE_OPERAND (arg0, 0), arg1),
12397			    fold_build2_loc (loc, code, type,
12398					 TREE_OPERAND (arg0, 1), arg1));
12399
12400      /* Two consecutive rotates adding up to the precision of the
12401	 type can be ignored.  */
12402      if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12403	  && TREE_CODE (arg0) == RROTATE_EXPR
12404	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12405	  && TREE_INT_CST_HIGH (arg1) == 0
12406	  && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12407	  && ((TREE_INT_CST_LOW (arg1)
12408	       + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12409	      == (unsigned int) TYPE_PRECISION (type)))
12410	return TREE_OPERAND (arg0, 0);
12411
12412      /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12413	      (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12414	 if the latter can be further optimized.  */
12415      if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12416	  && TREE_CODE (arg0) == BIT_AND_EXPR
12417	  && TREE_CODE (arg1) == INTEGER_CST
12418	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12419	{
12420	  tree mask = fold_build2_loc (loc, code, type,
12421				   fold_convert_loc (loc, type,
12422						     TREE_OPERAND (arg0, 1)),
12423				   arg1);
12424	  tree shift = fold_build2_loc (loc, code, type,
12425				    fold_convert_loc (loc, type,
12426						      TREE_OPERAND (arg0, 0)),
12427				    arg1);
12428	  tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12429	  if (tem)
12430	    return tem;
12431	}
12432
12433      return NULL_TREE;
12434
12435    case MIN_EXPR:
12436      if (operand_equal_p (arg0, arg1, 0))
12437	return omit_one_operand_loc (loc, type, arg0, arg1);
12438      if (INTEGRAL_TYPE_P (type)
12439	  && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12440	return omit_one_operand_loc (loc, type, arg1, arg0);
12441      tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12442      if (tem)
12443	return tem;
12444      goto associate;
12445
12446    case MAX_EXPR:
12447      if (operand_equal_p (arg0, arg1, 0))
12448	return omit_one_operand_loc (loc, type, arg0, arg1);
12449      if (INTEGRAL_TYPE_P (type)
12450	  && TYPE_MAX_VALUE (type)
12451	  && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12452	return omit_one_operand_loc (loc, type, arg1, arg0);
12453      tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12454      if (tem)
12455	return tem;
12456      goto associate;
12457
12458    case TRUTH_ANDIF_EXPR:
12459      /* Note that the operands of this must be ints
12460	 and their values must be 0 or 1.
12461	 ("true" is a fixed value perhaps depending on the language.)  */
12462      /* If first arg is constant zero, return it.  */
12463      if (integer_zerop (arg0))
12464	return fold_convert_loc (loc, type, arg0);
12465    case TRUTH_AND_EXPR:
12466      /* If either arg is constant true, drop it.  */
12467      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12468	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12469      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12470	  /* Preserve sequence points.  */
12471	  && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12472	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12473      /* If second arg is constant zero, result is zero, but first arg
12474	 must be evaluated.  */
12475      if (integer_zerop (arg1))
12476	return omit_one_operand_loc (loc, type, arg1, arg0);
12477      /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12478	 case will be handled here.  */
12479      if (integer_zerop (arg0))
12480	return omit_one_operand_loc (loc, type, arg0, arg1);
12481
12482      /* !X && X is always false.  */
12483      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12484	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12485	return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12486      /* X && !X is always false.  */
12487      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12488	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12489	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12490
12491      /* A < X && A + 1 > Y ==> A < X && A >= Y.  Normally A + 1 > Y
12492	 means A >= Y && A != MAX, but in this case we know that
12493	 A < X <= MAX.  */
12494
12495      if (!TREE_SIDE_EFFECTS (arg0)
12496	  && !TREE_SIDE_EFFECTS (arg1))
12497	{
12498	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12499	  if (tem && !operand_equal_p (tem, arg0, 0))
12500	    return fold_build2_loc (loc, code, type, tem, arg1);
12501
12502	  tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12503	  if (tem && !operand_equal_p (tem, arg1, 0))
12504	    return fold_build2_loc (loc, code, type, arg0, tem);
12505	}
12506
12507    truth_andor:
12508      /* We only do these simplifications if we are optimizing.  */
12509      if (!optimize)
12510	return NULL_TREE;
12511
12512      /* Check for things like (A || B) && (A || C).  We can convert this
12513	 to A || (B && C).  Note that either operator can be any of the four
12514	 truth and/or operations and the transformation will still be
12515	 valid.   Also note that we only care about order for the
12516	 ANDIF and ORIF operators.  If B contains side effects, this
12517	 might change the truth-value of A.  */
12518      if (TREE_CODE (arg0) == TREE_CODE (arg1)
12519	  && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
12520	      || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
12521	      || TREE_CODE (arg0) == TRUTH_AND_EXPR
12522	      || TREE_CODE (arg0) == TRUTH_OR_EXPR)
12523	  && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
12524	{
12525	  tree a00 = TREE_OPERAND (arg0, 0);
12526	  tree a01 = TREE_OPERAND (arg0, 1);
12527	  tree a10 = TREE_OPERAND (arg1, 0);
12528	  tree a11 = TREE_OPERAND (arg1, 1);
12529	  int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
12530			      || TREE_CODE (arg0) == TRUTH_AND_EXPR)
12531			     && (code == TRUTH_AND_EXPR
12532				 || code == TRUTH_OR_EXPR));
12533
12534	  if (operand_equal_p (a00, a10, 0))
12535	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12536				fold_build2_loc (loc, code, type, a01, a11));
12537	  else if (commutative && operand_equal_p (a00, a11, 0))
12538	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
12539				fold_build2_loc (loc, code, type, a01, a10));
12540	  else if (commutative && operand_equal_p (a01, a10, 0))
12541	    return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
12542				fold_build2_loc (loc, code, type, a00, a11));
12543
12544	  /* This case if tricky because we must either have commutative
12545	     operators or else A10 must not have side-effects.  */
12546
12547	  else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
12548		   && operand_equal_p (a01, a11, 0))
12549	    return fold_build2_loc (loc, TREE_CODE (arg0), type,
12550				fold_build2_loc (loc, code, type, a00, a10),
12551				a01);
12552	}
12553
12554      /* See if we can build a range comparison.  */
12555      if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
12556	return tem;
12557
12558      /* Check for the possibility of merging component references.  If our
12559	 lhs is another similar operation, try to merge its rhs with our
12560	 rhs.  Then try to merge our lhs and rhs.  */
12561      if (TREE_CODE (arg0) == code
12562	  && 0 != (tem = fold_truthop (loc, code, type,
12563				       TREE_OPERAND (arg0, 1), arg1)))
12564	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12565
12566      if ((tem = fold_truthop (loc, code, type, arg0, arg1)) != 0)
12567	return tem;
12568
12569      return NULL_TREE;
12570
12571    case TRUTH_ORIF_EXPR:
12572      /* Note that the operands of this must be ints
12573	 and their values must be 0 or true.
12574	 ("true" is a fixed value perhaps depending on the language.)  */
12575      /* If first arg is constant true, return it.  */
12576      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12577	return fold_convert_loc (loc, type, arg0);
12578    case TRUTH_OR_EXPR:
12579      /* If either arg is constant zero, drop it.  */
12580      if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12581	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12582      if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12583	  /* Preserve sequence points.  */
12584	  && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12585	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12586      /* If second arg is constant true, result is true, but we must
12587	 evaluate first arg.  */
12588      if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12589	return omit_one_operand_loc (loc, type, arg1, arg0);
12590      /* Likewise for first arg, but note this only occurs here for
12591	 TRUTH_OR_EXPR.  */
12592      if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12593	return omit_one_operand_loc (loc, type, arg0, arg1);
12594
12595      /* !X || X is always true.  */
12596      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12597	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12598	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12599      /* X || !X is always true.  */
12600      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12601	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12602	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12603
12604      goto truth_andor;
12605
12606    case TRUTH_XOR_EXPR:
12607      /* If the second arg is constant zero, drop it.  */
12608      if (integer_zerop (arg1))
12609	return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12610      /* If the second arg is constant true, this is a logical inversion.  */
12611      if (integer_onep (arg1))
12612	{
12613	  /* Only call invert_truthvalue if operand is a truth value.  */
12614	  if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
12615	    tem = fold_build1_loc (loc, TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
12616	  else
12617	    tem = invert_truthvalue_loc (loc, arg0);
12618	  return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12619	}
12620      /* Identical arguments cancel to zero.  */
12621      if (operand_equal_p (arg0, arg1, 0))
12622	return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12623
12624      /* !X ^ X is always true.  */
12625      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12626	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12627	return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12628
12629      /* X ^ !X is always true.  */
12630      if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12631	  && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12632	return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12633
12634      return NULL_TREE;
12635
12636    case EQ_EXPR:
12637    case NE_EXPR:
12638      tem = fold_comparison (loc, code, type, op0, op1);
12639      if (tem != NULL_TREE)
12640	return tem;
12641
12642      /* bool_var != 0 becomes bool_var. */
12643      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12644          && code == NE_EXPR)
12645        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12646
12647      /* bool_var == 1 becomes bool_var. */
12648      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12649          && code == EQ_EXPR)
12650        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12651
12652      /* bool_var != 1 becomes !bool_var. */
12653      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12654          && code == NE_EXPR)
12655        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12656			    fold_convert_loc (loc, type, arg0));
12657
12658      /* bool_var == 0 becomes !bool_var. */
12659      if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12660          && code == EQ_EXPR)
12661        return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
12662			    fold_convert_loc (loc, type, arg0));
12663
12664      /* !exp != 0 becomes !exp */
12665      if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12666	  && code == NE_EXPR)
12667        return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12668
12669      /* If this is an equality comparison of the address of two non-weak,
12670	 unaliased symbols neither of which are extern (since we do not
12671	 have access to attributes for externs), then we know the result.  */
12672      if (TREE_CODE (arg0) == ADDR_EXPR
12673	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12674	  && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12675	  && ! lookup_attribute ("alias",
12676				 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12677	  && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12678	  && TREE_CODE (arg1) == ADDR_EXPR
12679	  && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12680	  && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12681	  && ! lookup_attribute ("alias",
12682				 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12683	  && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12684	{
12685	  /* We know that we're looking at the address of two
12686	     non-weak, unaliased, static _DECL nodes.
12687
12688	     It is both wasteful and incorrect to call operand_equal_p
12689	     to compare the two ADDR_EXPR nodes.  It is wasteful in that
12690	     all we need to do is test pointer equality for the arguments
12691	     to the two ADDR_EXPR nodes.  It is incorrect to use
12692	     operand_equal_p as that function is NOT equivalent to a
12693	     C equality test.  It can in fact return false for two
12694	     objects which would test as equal using the C equality
12695	     operator.  */
12696	  bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12697	  return constant_boolean_node (equal
12698				        ? code == EQ_EXPR : code != EQ_EXPR,
12699				        type);
12700	}
12701
12702      /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12703	 a MINUS_EXPR of a constant, we can convert it into a comparison with
12704	 a revised constant as long as no overflow occurs.  */
12705      if (TREE_CODE (arg1) == INTEGER_CST
12706	  && (TREE_CODE (arg0) == PLUS_EXPR
12707	      || TREE_CODE (arg0) == MINUS_EXPR)
12708	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12709	  && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12710				      ? MINUS_EXPR : PLUS_EXPR,
12711				      fold_convert_loc (loc, TREE_TYPE (arg0),
12712							arg1),
12713				      TREE_OPERAND (arg0, 1), 0))
12714	  && !TREE_OVERFLOW (tem))
12715	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12716
12717      /* Similarly for a NEGATE_EXPR.  */
12718      if (TREE_CODE (arg0) == NEGATE_EXPR
12719	  && TREE_CODE (arg1) == INTEGER_CST
12720	  && 0 != (tem = negate_expr (arg1))
12721	  && TREE_CODE (tem) == INTEGER_CST
12722	  && !TREE_OVERFLOW (tem))
12723	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12724
12725      /* Similarly for a BIT_XOR_EXPR;  X ^ C1 == C2 is X == (C1 ^ C2).  */
12726      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12727	  && TREE_CODE (arg1) == INTEGER_CST
12728	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12729	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12730			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12731					 fold_convert_loc (loc,
12732							   TREE_TYPE (arg0),
12733							   arg1),
12734					 TREE_OPERAND (arg0, 1)));
12735
12736      /* Transform comparisons of the form X +- Y CMP X to Y CMP 0.  */
12737      if ((TREE_CODE (arg0) == PLUS_EXPR
12738	   || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12739	   || TREE_CODE (arg0) == MINUS_EXPR)
12740	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12741	  && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12742	      || POINTER_TYPE_P (TREE_TYPE (arg0))))
12743	{
12744	  tree val = TREE_OPERAND (arg0, 1);
12745	  return omit_two_operands_loc (loc, type,
12746				    fold_build2_loc (loc, code, type,
12747						 val,
12748						 build_int_cst (TREE_TYPE (val),
12749								0)),
12750				    TREE_OPERAND (arg0, 0), arg1);
12751	}
12752
12753      /* Transform comparisons of the form C - X CMP X if C % 2 == 1.  */
12754      if (TREE_CODE (arg0) == MINUS_EXPR
12755	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12756	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)
12757	  && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12758	{
12759	  return omit_two_operands_loc (loc, type,
12760				    code == NE_EXPR
12761				    ? boolean_true_node : boolean_false_node,
12762				    TREE_OPERAND (arg0, 1), arg1);
12763	}
12764
12765      /* If we have X - Y == 0, we can convert that to X == Y and similarly
12766	 for !=.  Don't do this for ordered comparisons due to overflow.  */
12767      if (TREE_CODE (arg0) == MINUS_EXPR
12768	  && integer_zerop (arg1))
12769	return fold_build2_loc (loc, code, type,
12770			    TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12771
12772      /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0.  */
12773      if (TREE_CODE (arg0) == ABS_EXPR
12774	  && (integer_zerop (arg1) || real_zerop (arg1)))
12775	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12776
12777      /* If this is an EQ or NE comparison with zero and ARG0 is
12778	 (1 << foo) & bar, convert it to (bar >> foo) & 1.  Both require
12779	 two operations, but the latter can be done in one less insn
12780	 on machines that have only two-operand insns or on which a
12781	 constant cannot be the first operand.  */
12782      if (TREE_CODE (arg0) == BIT_AND_EXPR
12783	  && integer_zerop (arg1))
12784	{
12785	  tree arg00 = TREE_OPERAND (arg0, 0);
12786	  tree arg01 = TREE_OPERAND (arg0, 1);
12787	  if (TREE_CODE (arg00) == LSHIFT_EXPR
12788	      && integer_onep (TREE_OPERAND (arg00, 0)))
12789	    {
12790	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12791				      arg01, TREE_OPERAND (arg00, 1));
12792	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12793				 build_int_cst (TREE_TYPE (arg0), 1));
12794	      return fold_build2_loc (loc, code, type,
12795				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12796				  arg1);
12797	    }
12798	  else if (TREE_CODE (arg01) == LSHIFT_EXPR
12799		   && integer_onep (TREE_OPERAND (arg01, 0)))
12800	    {
12801	      tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12802				      arg00, TREE_OPERAND (arg01, 1));
12803	      tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12804				 build_int_cst (TREE_TYPE (arg0), 1));
12805	      return fold_build2_loc (loc, code, type,
12806				  fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12807				  arg1);
12808	    }
12809	}
12810
12811      /* If this is an NE or EQ comparison of zero against the result of a
12812	 signed MOD operation whose second operand is a power of 2, make
12813	 the MOD operation unsigned since it is simpler and equivalent.  */
12814      if (integer_zerop (arg1)
12815	  && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12816	  && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12817	      || TREE_CODE (arg0) == CEIL_MOD_EXPR
12818	      || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12819	      || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12820	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
12821	{
12822	  tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12823	  tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12824				     fold_convert_loc (loc, newtype,
12825						       TREE_OPERAND (arg0, 0)),
12826				     fold_convert_loc (loc, newtype,
12827						       TREE_OPERAND (arg0, 1)));
12828
12829	  return fold_build2_loc (loc, code, type, newmod,
12830			      fold_convert_loc (loc, newtype, arg1));
12831	}
12832
12833      /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12834	 C1 is a valid shift constant, and C2 is a power of two, i.e.
12835	 a single bit.  */
12836      if (TREE_CODE (arg0) == BIT_AND_EXPR
12837	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12838	  && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12839	     == INTEGER_CST
12840	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12841	  && integer_zerop (arg1))
12842	{
12843	  tree itype = TREE_TYPE (arg0);
12844	  unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
12845	  tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12846
12847	  /* Check for a valid shift count.  */
12848	  if (TREE_INT_CST_HIGH (arg001) == 0
12849	      && TREE_INT_CST_LOW (arg001) < prec)
12850	    {
12851	      tree arg01 = TREE_OPERAND (arg0, 1);
12852	      tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12853	      unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12854	      /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12855		 can be rewritten as (X & (C2 << C1)) != 0.  */
12856	      if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12857		{
12858		  tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12859		  tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12860		  return fold_build2_loc (loc, code, type, tem, arg1);
12861		}
12862	      /* Otherwise, for signed (arithmetic) shifts,
12863		 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12864		 ((X >> C1) & C2) == 0 is rewritten as X >= 0.  */
12865	      else if (!TYPE_UNSIGNED (itype))
12866		return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12867				    arg000, build_int_cst (itype, 0));
12868	      /* Otherwise, of unsigned (logical) shifts,
12869		 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12870		 ((X >> C1) & C2) == 0 is rewritten as (X,true).  */
12871	      else
12872		return omit_one_operand_loc (loc, type,
12873					 code == EQ_EXPR ? integer_one_node
12874							 : integer_zero_node,
12875					 arg000);
12876	    }
12877	}
12878
12879      /* If this is an NE comparison of zero with an AND of one, remove the
12880	 comparison since the AND will give the correct value.  */
12881      if (code == NE_EXPR
12882	  && integer_zerop (arg1)
12883	  && TREE_CODE (arg0) == BIT_AND_EXPR
12884	  && integer_onep (TREE_OPERAND (arg0, 1)))
12885	return fold_convert_loc (loc, type, arg0);
12886
12887      /* If we have (A & C) == C where C is a power of 2, convert this into
12888	 (A & C) != 0.  Similarly for NE_EXPR.  */
12889      if (TREE_CODE (arg0) == BIT_AND_EXPR
12890	  && integer_pow2p (TREE_OPERAND (arg0, 1))
12891	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12892	return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12893			    arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12894						    integer_zero_node));
12895
12896      /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12897	 bit, then fold the expression into A < 0 or A >= 0.  */
12898      tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12899      if (tem)
12900	return tem;
12901
12902      /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12903	 Similarly for NE_EXPR.  */
12904      if (TREE_CODE (arg0) == BIT_AND_EXPR
12905	  && TREE_CODE (arg1) == INTEGER_CST
12906	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12907	{
12908	  tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12909				   TREE_TYPE (TREE_OPERAND (arg0, 1)),
12910				   TREE_OPERAND (arg0, 1));
12911	  tree dandnotc = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12912				       arg1, notc);
12913	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12914	  if (integer_nonzerop (dandnotc))
12915	    return omit_one_operand_loc (loc, type, rslt, arg0);
12916	}
12917
12918      /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12919	 Similarly for NE_EXPR.  */
12920      if (TREE_CODE (arg0) == BIT_IOR_EXPR
12921	  && TREE_CODE (arg1) == INTEGER_CST
12922	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12923	{
12924	  tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12925	  tree candnotd = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12926				       TREE_OPERAND (arg0, 1), notd);
12927	  tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12928	  if (integer_nonzerop (candnotd))
12929	    return omit_one_operand_loc (loc, type, rslt, arg0);
12930	}
12931
12932      /* If this is a comparison of a field, we may be able to simplify it.  */
12933      if ((TREE_CODE (arg0) == COMPONENT_REF
12934	   || TREE_CODE (arg0) == BIT_FIELD_REF)
12935	  /* Handle the constant case even without -O
12936	     to make sure the warnings are given.  */
12937	  && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12938	{
12939	  t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12940	  if (t1)
12941	    return t1;
12942	}
12943
12944      /* Optimize comparisons of strlen vs zero to a compare of the
12945	 first character of the string vs zero.  To wit,
12946		strlen(ptr) == 0   =>  *ptr == 0
12947		strlen(ptr) != 0   =>  *ptr != 0
12948	 Other cases should reduce to one of these two (or a constant)
12949	 due to the return value of strlen being unsigned.  */
12950      if (TREE_CODE (arg0) == CALL_EXPR
12951	  && integer_zerop (arg1))
12952	{
12953	  tree fndecl = get_callee_fndecl (arg0);
12954
12955	  if (fndecl
12956	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12957	      && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12958	      && call_expr_nargs (arg0) == 1
12959	      && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12960	    {
12961	      tree iref = build_fold_indirect_ref_loc (loc,
12962						   CALL_EXPR_ARG (arg0, 0));
12963	      return fold_build2_loc (loc, code, type, iref,
12964				  build_int_cst (TREE_TYPE (iref), 0));
12965	    }
12966	}
12967
12968      /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12969	 of X.  Similarly fold (X >> C) == 0 into X >= 0.  */
12970      if (TREE_CODE (arg0) == RSHIFT_EXPR
12971	  && integer_zerop (arg1)
12972	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12973	{
12974	  tree arg00 = TREE_OPERAND (arg0, 0);
12975	  tree arg01 = TREE_OPERAND (arg0, 1);
12976	  tree itype = TREE_TYPE (arg00);
12977	  if (TREE_INT_CST_HIGH (arg01) == 0
12978	      && TREE_INT_CST_LOW (arg01)
12979		 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
12980	    {
12981	      if (TYPE_UNSIGNED (itype))
12982		{
12983		  itype = signed_type_for (itype);
12984		  arg00 = fold_convert_loc (loc, itype, arg00);
12985		}
12986	      return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12987				  type, arg00, build_int_cst (itype, 0));
12988	    }
12989	}
12990
12991      /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y.  */
12992      if (integer_zerop (arg1)
12993	  && TREE_CODE (arg0) == BIT_XOR_EXPR)
12994	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12995			    TREE_OPERAND (arg0, 1));
12996
12997      /* (X ^ Y) == Y becomes X == 0.  We know that Y has no side-effects.  */
12998      if (TREE_CODE (arg0) == BIT_XOR_EXPR
12999	  && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13000	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13001			    build_int_cst (TREE_TYPE (arg1), 0));
13002      /* Likewise (X ^ Y) == X becomes Y == 0.  X has no side-effects.  */
13003      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13004	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13005	  && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13006	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13007			    build_int_cst (TREE_TYPE (arg1), 0));
13008
13009      /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2).  */
13010      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13011	  && TREE_CODE (arg1) == INTEGER_CST
13012	  && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13013	return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13014			    fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13015					 TREE_OPERAND (arg0, 1), arg1));
13016
13017      /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13018	 (X & C) == 0 when C is a single bit.  */
13019      if (TREE_CODE (arg0) == BIT_AND_EXPR
13020	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13021	  && integer_zerop (arg1)
13022	  && integer_pow2p (TREE_OPERAND (arg0, 1)))
13023	{
13024	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13025			     TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13026			     TREE_OPERAND (arg0, 1));
13027	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13028			      type, tem, arg1);
13029	}
13030
13031      /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13032	 constant C is a power of two, i.e. a single bit.  */
13033      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13034	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13035	  && integer_zerop (arg1)
13036	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13037	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13038			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13039	{
13040	  tree arg00 = TREE_OPERAND (arg0, 0);
13041	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13042			      arg00, build_int_cst (TREE_TYPE (arg00), 0));
13043	}
13044
13045      /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13046	 when is C is a power of two, i.e. a single bit.  */
13047      if (TREE_CODE (arg0) == BIT_AND_EXPR
13048	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13049	  && integer_zerop (arg1)
13050	  && integer_pow2p (TREE_OPERAND (arg0, 1))
13051	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13052			      TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13053	{
13054	  tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13055	  tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13056			     arg000, TREE_OPERAND (arg0, 1));
13057	  return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13058			      tem, build_int_cst (TREE_TYPE (tem), 0));
13059	}
13060
13061      if (integer_zerop (arg1)
13062	  && tree_expr_nonzero_p (arg0))
13063        {
13064	  tree res = constant_boolean_node (code==NE_EXPR, type);
13065	  return omit_one_operand_loc (loc, type, res, arg0);
13066	}
13067
13068      /* Fold -X op -Y as X op Y, where op is eq/ne.  */
13069      if (TREE_CODE (arg0) == NEGATE_EXPR
13070          && TREE_CODE (arg1) == NEGATE_EXPR)
13071	return fold_build2_loc (loc, code, type,
13072			    TREE_OPERAND (arg0, 0),
13073			    TREE_OPERAND (arg1, 0));
13074
13075      /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries.  */
13076      if (TREE_CODE (arg0) == BIT_AND_EXPR
13077	  && TREE_CODE (arg1) == BIT_AND_EXPR)
13078	{
13079	  tree arg00 = TREE_OPERAND (arg0, 0);
13080	  tree arg01 = TREE_OPERAND (arg0, 1);
13081	  tree arg10 = TREE_OPERAND (arg1, 0);
13082	  tree arg11 = TREE_OPERAND (arg1, 1);
13083	  tree itype = TREE_TYPE (arg0);
13084
13085	  if (operand_equal_p (arg01, arg11, 0))
13086	    return fold_build2_loc (loc, code, type,
13087				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13088					     fold_build2_loc (loc,
13089							  BIT_XOR_EXPR, itype,
13090							  arg00, arg10),
13091					     arg01),
13092				build_int_cst (itype, 0));
13093
13094	  if (operand_equal_p (arg01, arg10, 0))
13095	    return fold_build2_loc (loc, code, type,
13096				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13097					     fold_build2_loc (loc,
13098							  BIT_XOR_EXPR, itype,
13099							  arg00, arg11),
13100					     arg01),
13101				build_int_cst (itype, 0));
13102
13103	  if (operand_equal_p (arg00, arg11, 0))
13104	    return fold_build2_loc (loc, code, type,
13105				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13106					     fold_build2_loc (loc,
13107							  BIT_XOR_EXPR, itype,
13108							  arg01, arg10),
13109					     arg00),
13110				build_int_cst (itype, 0));
13111
13112	  if (operand_equal_p (arg00, arg10, 0))
13113	    return fold_build2_loc (loc, code, type,
13114				fold_build2_loc (loc, BIT_AND_EXPR, itype,
13115					     fold_build2_loc (loc,
13116							  BIT_XOR_EXPR, itype,
13117							  arg01, arg11),
13118					     arg00),
13119				build_int_cst (itype, 0));
13120	}
13121
13122      if (TREE_CODE (arg0) == BIT_XOR_EXPR
13123	  && TREE_CODE (arg1) == BIT_XOR_EXPR)
13124	{
13125	  tree arg00 = TREE_OPERAND (arg0, 0);
13126	  tree arg01 = TREE_OPERAND (arg0, 1);
13127	  tree arg10 = TREE_OPERAND (arg1, 0);
13128	  tree arg11 = TREE_OPERAND (arg1, 1);
13129	  tree itype = TREE_TYPE (arg0);
13130
13131	  /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13132	     operand_equal_p guarantees no side-effects so we don't need
13133	     to use omit_one_operand on Z.  */
13134	  if (operand_equal_p (arg01, arg11, 0))
13135	    return fold_build2_loc (loc, code, type, arg00, arg10);
13136	  if (operand_equal_p (arg01, arg10, 0))
13137	    return fold_build2_loc (loc, code, type, arg00, arg11);
13138	  if (operand_equal_p (arg00, arg11, 0))
13139	    return fold_build2_loc (loc, code, type, arg01, arg10);
13140	  if (operand_equal_p (arg00, arg10, 0))
13141	    return fold_build2_loc (loc, code, type, arg01, arg11);
13142
13143	  /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y.  */
13144	  if (TREE_CODE (arg01) == INTEGER_CST
13145	      && TREE_CODE (arg11) == INTEGER_CST)
13146	    return fold_build2_loc (loc, code, type,
13147				fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00,
13148					     fold_build2_loc (loc,
13149							  BIT_XOR_EXPR, itype,
13150							  arg01, arg11)),
13151				arg10);
13152	}
13153
13154      /* Attempt to simplify equality/inequality comparisons of complex
13155	 values.  Only lower the comparison if the result is known or
13156	 can be simplified to a single scalar comparison.  */
13157      if ((TREE_CODE (arg0) == COMPLEX_EXPR
13158	   || TREE_CODE (arg0) == COMPLEX_CST)
13159	  && (TREE_CODE (arg1) == COMPLEX_EXPR
13160	      || TREE_CODE (arg1) == COMPLEX_CST))
13161	{
13162	  tree real0, imag0, real1, imag1;
13163	  tree rcond, icond;
13164
13165	  if (TREE_CODE (arg0) == COMPLEX_EXPR)
13166	    {
13167	      real0 = TREE_OPERAND (arg0, 0);
13168	      imag0 = TREE_OPERAND (arg0, 1);
13169	    }
13170	  else
13171	    {
13172	      real0 = TREE_REALPART (arg0);
13173	      imag0 = TREE_IMAGPART (arg0);
13174	    }
13175
13176	  if (TREE_CODE (arg1) == COMPLEX_EXPR)
13177	    {
13178	      real1 = TREE_OPERAND (arg1, 0);
13179	      imag1 = TREE_OPERAND (arg1, 1);
13180	    }
13181	  else
13182	    {
13183	      real1 = TREE_REALPART (arg1);
13184	      imag1 = TREE_IMAGPART (arg1);
13185	    }
13186
13187	  rcond = fold_binary_loc (loc, code, type, real0, real1);
13188	  if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13189	    {
13190	      if (integer_zerop (rcond))
13191		{
13192		  if (code == EQ_EXPR)
13193		    return omit_two_operands_loc (loc, type, boolean_false_node,
13194					      imag0, imag1);
13195		  return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13196		}
13197	      else
13198		{
13199		  if (code == NE_EXPR)
13200		    return omit_two_operands_loc (loc, type, boolean_true_node,
13201					      imag0, imag1);
13202		  return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13203		}
13204	    }
13205
13206	  icond = fold_binary_loc (loc, code, type, imag0, imag1);
13207	  if (icond && TREE_CODE (icond) == INTEGER_CST)
13208	    {
13209	      if (integer_zerop (icond))
13210		{
13211		  if (code == EQ_EXPR)
13212		    return omit_two_operands_loc (loc, type, boolean_false_node,
13213					      real0, real1);
13214		  return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13215		}
13216	      else
13217		{
13218		  if (code == NE_EXPR)
13219		    return omit_two_operands_loc (loc, type, boolean_true_node,
13220					      real0, real1);
13221		  return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13222		}
13223	    }
13224	}
13225
13226      return NULL_TREE;
13227
13228    case LT_EXPR:
13229    case GT_EXPR:
13230    case LE_EXPR:
13231    case GE_EXPR:
13232      tem = fold_comparison (loc, code, type, op0, op1);
13233      if (tem != NULL_TREE)
13234	return tem;
13235
13236      /* Transform comparisons of the form X +- C CMP X.  */
13237      if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13238	  && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13239	  && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13240	       && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13241	      || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13242		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13243	{
13244	  tree arg01 = TREE_OPERAND (arg0, 1);
13245	  enum tree_code code0 = TREE_CODE (arg0);
13246	  int is_positive;
13247
13248	  if (TREE_CODE (arg01) == REAL_CST)
13249	    is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13250	  else
13251	    is_positive = tree_int_cst_sgn (arg01);
13252
13253	  /* (X - c) > X becomes false.  */
13254	  if (code == GT_EXPR
13255	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13256		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13257	    {
13258	      if (TREE_CODE (arg01) == INTEGER_CST
13259		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13260		fold_overflow_warning (("assuming signed overflow does not "
13261					"occur when assuming that (X - c) > X "
13262					"is always false"),
13263				       WARN_STRICT_OVERFLOW_ALL);
13264	      return constant_boolean_node (0, type);
13265	    }
13266
13267	  /* Likewise (X + c) < X becomes false.  */
13268	  if (code == LT_EXPR
13269	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13270		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13271	    {
13272	      if (TREE_CODE (arg01) == INTEGER_CST
13273		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13274		fold_overflow_warning (("assuming signed overflow does not "
13275					"occur when assuming that "
13276					"(X + c) < X is always false"),
13277				       WARN_STRICT_OVERFLOW_ALL);
13278	      return constant_boolean_node (0, type);
13279	    }
13280
13281	  /* Convert (X - c) <= X to true.  */
13282	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13283	      && code == LE_EXPR
13284	      && ((code0 == MINUS_EXPR && is_positive >= 0)
13285		  || (code0 == PLUS_EXPR && is_positive <= 0)))
13286	    {
13287	      if (TREE_CODE (arg01) == INTEGER_CST
13288		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13289		fold_overflow_warning (("assuming signed overflow does not "
13290					"occur when assuming that "
13291					"(X - c) <= X is always true"),
13292				       WARN_STRICT_OVERFLOW_ALL);
13293	      return constant_boolean_node (1, type);
13294	    }
13295
13296	  /* Convert (X + c) >= X to true.  */
13297	  if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13298	      && code == GE_EXPR
13299	      && ((code0 == PLUS_EXPR && is_positive >= 0)
13300		  || (code0 == MINUS_EXPR && is_positive <= 0)))
13301	    {
13302	      if (TREE_CODE (arg01) == INTEGER_CST
13303		  && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13304		fold_overflow_warning (("assuming signed overflow does not "
13305					"occur when assuming that "
13306					"(X + c) >= X is always true"),
13307				       WARN_STRICT_OVERFLOW_ALL);
13308	      return constant_boolean_node (1, type);
13309	    }
13310
13311	  if (TREE_CODE (arg01) == INTEGER_CST)
13312	    {
13313	      /* Convert X + c > X and X - c < X to true for integers.  */
13314	      if (code == GT_EXPR
13315	          && ((code0 == PLUS_EXPR && is_positive > 0)
13316		      || (code0 == MINUS_EXPR && is_positive < 0)))
13317		{
13318		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13319		    fold_overflow_warning (("assuming signed overflow does "
13320					    "not occur when assuming that "
13321					    "(X + c) > X is always true"),
13322					   WARN_STRICT_OVERFLOW_ALL);
13323		  return constant_boolean_node (1, type);
13324		}
13325
13326	      if (code == LT_EXPR
13327	          && ((code0 == MINUS_EXPR && is_positive > 0)
13328		      || (code0 == PLUS_EXPR && is_positive < 0)))
13329		{
13330		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13331		    fold_overflow_warning (("assuming signed overflow does "
13332					    "not occur when assuming that "
13333					    "(X - c) < X is always true"),
13334					   WARN_STRICT_OVERFLOW_ALL);
13335		  return constant_boolean_node (1, type);
13336		}
13337
13338	      /* Convert X + c <= X and X - c >= X to false for integers.  */
13339	      if (code == LE_EXPR
13340	          && ((code0 == PLUS_EXPR && is_positive > 0)
13341		      || (code0 == MINUS_EXPR && is_positive < 0)))
13342		{
13343		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13344		    fold_overflow_warning (("assuming signed overflow does "
13345					    "not occur when assuming that "
13346					    "(X + c) <= X is always false"),
13347					   WARN_STRICT_OVERFLOW_ALL);
13348		  return constant_boolean_node (0, type);
13349		}
13350
13351	      if (code == GE_EXPR
13352	          && ((code0 == MINUS_EXPR && is_positive > 0)
13353		      || (code0 == PLUS_EXPR && is_positive < 0)))
13354		{
13355		  if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13356		    fold_overflow_warning (("assuming signed overflow does "
13357					    "not occur when assuming that "
13358					    "(X - c) >= X is always false"),
13359					   WARN_STRICT_OVERFLOW_ALL);
13360		  return constant_boolean_node (0, type);
13361		}
13362	    }
13363	}
13364
13365      /* Comparisons with the highest or lowest possible integer of
13366	 the specified precision will have known values.  */
13367      {
13368	tree arg1_type = TREE_TYPE (arg1);
13369	unsigned int width = TYPE_PRECISION (arg1_type);
13370
13371	if (TREE_CODE (arg1) == INTEGER_CST
13372	    && width <= 2 * HOST_BITS_PER_WIDE_INT
13373	    && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13374	  {
13375	    HOST_WIDE_INT signed_max_hi;
13376	    unsigned HOST_WIDE_INT signed_max_lo;
13377	    unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13378
13379	    if (width <= HOST_BITS_PER_WIDE_INT)
13380	      {
13381		signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13382				- 1;
13383		signed_max_hi = 0;
13384		max_hi = 0;
13385
13386		if (TYPE_UNSIGNED (arg1_type))
13387		  {
13388		    max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13389		    min_lo = 0;
13390		    min_hi = 0;
13391		  }
13392		else
13393		  {
13394		    max_lo = signed_max_lo;
13395		    min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13396		    min_hi = -1;
13397		  }
13398	      }
13399	    else
13400	      {
13401		width -= HOST_BITS_PER_WIDE_INT;
13402		signed_max_lo = -1;
13403		signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13404				- 1;
13405		max_lo = -1;
13406		min_lo = 0;
13407
13408		if (TYPE_UNSIGNED (arg1_type))
13409		  {
13410		    max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13411		    min_hi = 0;
13412		  }
13413		else
13414		  {
13415		    max_hi = signed_max_hi;
13416		    min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13417		  }
13418	      }
13419
13420	    if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13421		&& TREE_INT_CST_LOW (arg1) == max_lo)
13422	      switch (code)
13423		{
13424		case GT_EXPR:
13425		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13426
13427		case GE_EXPR:
13428		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13429
13430		case LE_EXPR:
13431		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13432
13433		case LT_EXPR:
13434		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13435
13436		/* The GE_EXPR and LT_EXPR cases above are not normally
13437		   reached because of previous transformations.  */
13438
13439		default:
13440		  break;
13441		}
13442	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13443		     == max_hi
13444		     && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13445	      switch (code)
13446		{
13447		case GT_EXPR:
13448		  arg1 = const_binop (PLUS_EXPR, arg1,
13449				      build_int_cst (TREE_TYPE (arg1), 1), 0);
13450		  return fold_build2_loc (loc, EQ_EXPR, type,
13451				      fold_convert_loc (loc,
13452							TREE_TYPE (arg1), arg0),
13453				      arg1);
13454		case LE_EXPR:
13455		  arg1 = const_binop (PLUS_EXPR, arg1,
13456				      build_int_cst (TREE_TYPE (arg1), 1), 0);
13457		  return fold_build2_loc (loc, NE_EXPR, type,
13458				      fold_convert_loc (loc, TREE_TYPE (arg1),
13459							arg0),
13460				      arg1);
13461		default:
13462		  break;
13463		}
13464	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13465		     == min_hi
13466		     && TREE_INT_CST_LOW (arg1) == min_lo)
13467	      switch (code)
13468		{
13469		case LT_EXPR:
13470		  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13471
13472		case LE_EXPR:
13473		  return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13474
13475		case GE_EXPR:
13476		  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13477
13478		case GT_EXPR:
13479		  return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13480
13481		default:
13482		  break;
13483		}
13484	    else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13485		     == min_hi
13486		     && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13487	      switch (code)
13488		{
13489		case GE_EXPR:
13490		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13491		  return fold_build2_loc (loc, NE_EXPR, type,
13492				      fold_convert_loc (loc,
13493							TREE_TYPE (arg1), arg0),
13494				      arg1);
13495		case LT_EXPR:
13496		  arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
13497		  return fold_build2_loc (loc, EQ_EXPR, type,
13498				      fold_convert_loc (loc, TREE_TYPE (arg1),
13499							arg0),
13500				      arg1);
13501		default:
13502		  break;
13503		}
13504
13505	    else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13506		     && TREE_INT_CST_LOW (arg1) == signed_max_lo
13507		     && TYPE_UNSIGNED (arg1_type)
13508		     /* We will flip the signedness of the comparison operator
13509			associated with the mode of arg1, so the sign bit is
13510			specified by this mode.  Check that arg1 is the signed
13511			max associated with this sign bit.  */
13512		     && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13513		     /* signed_type does not work on pointer types.  */
13514		     && INTEGRAL_TYPE_P (arg1_type))
13515	      {
13516		/* The following case also applies to X < signed_max+1
13517		   and X >= signed_max+1 because previous transformations.  */
13518		if (code == LE_EXPR || code == GT_EXPR)
13519		  {
13520		    tree st;
13521		    st = signed_type_for (TREE_TYPE (arg1));
13522		    return fold_build2_loc (loc,
13523					code == LE_EXPR ? GE_EXPR : LT_EXPR,
13524					type, fold_convert_loc (loc, st, arg0),
13525					build_int_cst (st, 0));
13526		  }
13527	      }
13528	  }
13529      }
13530
13531      /* If we are comparing an ABS_EXPR with a constant, we can
13532	 convert all the cases into explicit comparisons, but they may
13533	 well not be faster than doing the ABS and one comparison.
13534	 But ABS (X) <= C is a range comparison, which becomes a subtraction
13535	 and a comparison, and is probably faster.  */
13536      if (code == LE_EXPR
13537	  && TREE_CODE (arg1) == INTEGER_CST
13538	  && TREE_CODE (arg0) == ABS_EXPR
13539	  && ! TREE_SIDE_EFFECTS (arg0)
13540	  && (0 != (tem = negate_expr (arg1)))
13541	  && TREE_CODE (tem) == INTEGER_CST
13542	  && !TREE_OVERFLOW (tem))
13543	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13544			    build2 (GE_EXPR, type,
13545				    TREE_OPERAND (arg0, 0), tem),
13546			    build2 (LE_EXPR, type,
13547				    TREE_OPERAND (arg0, 0), arg1));
13548
13549      /* Convert ABS_EXPR<x> >= 0 to true.  */
13550      strict_overflow_p = false;
13551      if (code == GE_EXPR
13552	  && (integer_zerop (arg1)
13553	      || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13554		  && real_zerop (arg1)))
13555	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13556	{
13557	  if (strict_overflow_p)
13558	    fold_overflow_warning (("assuming signed overflow does not occur "
13559				    "when simplifying comparison of "
13560				    "absolute value and zero"),
13561				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13562	  return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13563	}
13564
13565      /* Convert ABS_EXPR<x> < 0 to false.  */
13566      strict_overflow_p = false;
13567      if (code == LT_EXPR
13568	  && (integer_zerop (arg1) || real_zerop (arg1))
13569	  && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13570	{
13571	  if (strict_overflow_p)
13572	    fold_overflow_warning (("assuming signed overflow does not occur "
13573				    "when simplifying comparison of "
13574				    "absolute value and zero"),
13575				   WARN_STRICT_OVERFLOW_CONDITIONAL);
13576	  return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13577	}
13578
13579      /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13580	 and similarly for >= into !=.  */
13581      if ((code == LT_EXPR || code == GE_EXPR)
13582	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13583	  && TREE_CODE (arg1) == LSHIFT_EXPR
13584	  && integer_onep (TREE_OPERAND (arg1, 0)))
13585	{
13586	  tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13587			build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13588				TREE_OPERAND (arg1, 1)),
13589			build_int_cst (TREE_TYPE (arg0), 0));
13590	  goto fold_binary_exit;
13591	}
13592
13593      if ((code == LT_EXPR || code == GE_EXPR)
13594	  && TYPE_UNSIGNED (TREE_TYPE (arg0))
13595	  && CONVERT_EXPR_P (arg1)
13596	  && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13597	  && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13598	{
13599	  tem = build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13600			fold_convert_loc (loc, TREE_TYPE (arg0),
13601					  build2 (RSHIFT_EXPR,
13602						  TREE_TYPE (arg0), arg0,
13603						  TREE_OPERAND (TREE_OPERAND (arg1, 0),
13604								1))),
13605			build_int_cst (TREE_TYPE (arg0), 0));
13606	  goto fold_binary_exit;
13607	}
13608
13609      return NULL_TREE;
13610
13611    case UNORDERED_EXPR:
13612    case ORDERED_EXPR:
13613    case UNLT_EXPR:
13614    case UNLE_EXPR:
13615    case UNGT_EXPR:
13616    case UNGE_EXPR:
13617    case UNEQ_EXPR:
13618    case LTGT_EXPR:
13619      if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13620	{
13621	  t1 = fold_relational_const (code, type, arg0, arg1);
13622	  if (t1 != NULL_TREE)
13623	    return t1;
13624	}
13625
13626      /* If the first operand is NaN, the result is constant.  */
13627      if (TREE_CODE (arg0) == REAL_CST
13628	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13629	  && (code != LTGT_EXPR || ! flag_trapping_math))
13630	{
13631	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13632	       ? integer_zero_node
13633	       : integer_one_node;
13634	  return omit_one_operand_loc (loc, type, t1, arg1);
13635	}
13636
13637      /* If the second operand is NaN, the result is constant.  */
13638      if (TREE_CODE (arg1) == REAL_CST
13639	  && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13640	  && (code != LTGT_EXPR || ! flag_trapping_math))
13641	{
13642	  t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13643	       ? integer_zero_node
13644	       : integer_one_node;
13645	  return omit_one_operand_loc (loc, type, t1, arg0);
13646	}
13647
13648      /* Simplify unordered comparison of something with itself.  */
13649      if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13650	  && operand_equal_p (arg0, arg1, 0))
13651	return constant_boolean_node (1, type);
13652
13653      if (code == LTGT_EXPR
13654	  && !flag_trapping_math
13655	  && operand_equal_p (arg0, arg1, 0))
13656	return constant_boolean_node (0, type);
13657
13658      /* Fold (double)float1 CMP (double)float2 into float1 CMP float2.  */
13659      {
13660	tree targ0 = strip_float_extensions (arg0);
13661	tree targ1 = strip_float_extensions (arg1);
13662	tree newtype = TREE_TYPE (targ0);
13663
13664	if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13665	  newtype = TREE_TYPE (targ1);
13666
13667	if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13668	  return fold_build2_loc (loc, code, type,
13669			      fold_convert_loc (loc, newtype, targ0),
13670			      fold_convert_loc (loc, newtype, targ1));
13671      }
13672
13673      return NULL_TREE;
13674
13675    case COMPOUND_EXPR:
13676      /* When pedantic, a compound expression can be neither an lvalue
13677	 nor an integer constant expression.  */
13678      if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13679	return NULL_TREE;
13680      /* Don't let (0, 0) be null pointer constant.  */
13681      tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13682				 : fold_convert_loc (loc, type, arg1);
13683      return pedantic_non_lvalue_loc (loc, tem);
13684
13685    case COMPLEX_EXPR:
13686      if ((TREE_CODE (arg0) == REAL_CST
13687	   && TREE_CODE (arg1) == REAL_CST)
13688	  || (TREE_CODE (arg0) == INTEGER_CST
13689	      && TREE_CODE (arg1) == INTEGER_CST))
13690	return build_complex (type, arg0, arg1);
13691      return NULL_TREE;
13692
13693    case ASSERT_EXPR:
13694      /* An ASSERT_EXPR should never be passed to fold_binary.  */
13695      gcc_unreachable ();
13696
13697    default:
13698      return NULL_TREE;
13699    } /* switch (code) */
13700 fold_binary_exit:
13701  protected_set_expr_location (tem, loc);
13702  return tem;
13703}
13704
13705/* Callback for walk_tree, looking for LABEL_EXPR.  Return *TP if it is
13706   a LABEL_EXPR; otherwise return NULL_TREE.  Do not check the subtrees
13707   of GOTO_EXPR.  */
13708
13709static tree
13710contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13711{
13712  switch (TREE_CODE (*tp))
13713    {
13714    case LABEL_EXPR:
13715      return *tp;
13716
13717    case GOTO_EXPR:
13718      *walk_subtrees = 0;
13719
13720      /* ... fall through ...  */
13721
13722    default:
13723      return NULL_TREE;
13724    }
13725}
13726
13727/* Return whether the sub-tree ST contains a label which is accessible from
13728   outside the sub-tree.  */
13729
13730static bool
13731contains_label_p (tree st)
13732{
13733  return
13734   (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13735}
13736
13737/* Fold a ternary expression of code CODE and type TYPE with operands
13738   OP0, OP1, and OP2.  Return the folded expression if folding is
13739   successful.  Otherwise, return NULL_TREE.  */
13740
13741tree
13742fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13743	      tree op0, tree op1, tree op2)
13744{
13745  tree tem;
13746  tree arg0 = NULL_TREE, arg1 = NULL_TREE;
13747  enum tree_code_class kind = TREE_CODE_CLASS (code);
13748
13749  gcc_assert (IS_EXPR_CODE_CLASS (kind)
13750	      && TREE_CODE_LENGTH (code) == 3);
13751
13752  /* Strip any conversions that don't change the mode.  This is safe
13753     for every expression, except for a comparison expression because
13754     its signedness is derived from its operands.  So, in the latter
13755     case, only strip conversions that don't change the signedness.
13756
13757     Note that this is done as an internal manipulation within the
13758     constant folder, in order to find the simplest representation of
13759     the arguments so that their form can be studied.  In any cases,
13760     the appropriate type conversions should be put back in the tree
13761     that will get out of the constant folder.  */
13762  if (op0)
13763    {
13764      arg0 = op0;
13765      STRIP_NOPS (arg0);
13766    }
13767
13768  if (op1)
13769    {
13770      arg1 = op1;
13771      STRIP_NOPS (arg1);
13772    }
13773
13774  switch (code)
13775    {
13776    case COMPONENT_REF:
13777      if (TREE_CODE (arg0) == CONSTRUCTOR
13778	  && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13779	{
13780	  unsigned HOST_WIDE_INT idx;
13781	  tree field, value;
13782	  FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13783	    if (field == arg1)
13784	      return value;
13785	}
13786      return NULL_TREE;
13787
13788    case COND_EXPR:
13789      /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13790	 so all simple results must be passed through pedantic_non_lvalue.  */
13791      if (TREE_CODE (arg0) == INTEGER_CST)
13792	{
13793	  tree unused_op = integer_zerop (arg0) ? op1 : op2;
13794	  tem = integer_zerop (arg0) ? op2 : op1;
13795	  /* Only optimize constant conditions when the selected branch
13796	     has the same type as the COND_EXPR.  This avoids optimizing
13797             away "c ? x : throw", where the throw has a void type.
13798             Avoid throwing away that operand which contains label.  */
13799          if ((!TREE_SIDE_EFFECTS (unused_op)
13800               || !contains_label_p (unused_op))
13801              && (! VOID_TYPE_P (TREE_TYPE (tem))
13802                  || VOID_TYPE_P (type)))
13803	    return pedantic_non_lvalue_loc (loc, tem);
13804	  return NULL_TREE;
13805	}
13806      if (operand_equal_p (arg1, op2, 0))
13807	return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
13808
13809      /* If we have A op B ? A : C, we may be able to convert this to a
13810	 simpler expression, depending on the operation and the values
13811	 of B and C.  Signed zeros prevent all of these transformations,
13812	 for reasons given above each one.
13813
13814         Also try swapping the arguments and inverting the conditional.  */
13815      if (COMPARISON_CLASS_P (arg0)
13816	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13817					     arg1, TREE_OPERAND (arg0, 1))
13818	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
13819	{
13820	  tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13821	  if (tem)
13822	    return tem;
13823	}
13824
13825      if (COMPARISON_CLASS_P (arg0)
13826	  && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13827					     op2,
13828					     TREE_OPERAND (arg0, 1))
13829	  && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
13830	{
13831	  tem = fold_truth_not_expr (loc, arg0);
13832	  if (tem && COMPARISON_CLASS_P (tem))
13833	    {
13834	      tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13835	      if (tem)
13836		return tem;
13837	    }
13838	}
13839
13840      /* If the second operand is simpler than the third, swap them
13841	 since that produces better jump optimization results.  */
13842      if (truth_value_p (TREE_CODE (arg0))
13843	  && tree_swap_operands_p (op1, op2, false))
13844	{
13845	  /* See if this can be inverted.  If it can't, possibly because
13846	     it was a floating-point inequality comparison, don't do
13847	     anything.  */
13848	  tem = fold_truth_not_expr (loc, arg0);
13849	  if (tem)
13850	    return fold_build3_loc (loc, code, type, tem, op2, op1);
13851	}
13852
13853      /* Convert A ? 1 : 0 to simply A.  */
13854      if (integer_onep (op1)
13855	  && integer_zerop (op2)
13856	  /* If we try to convert OP0 to our type, the
13857	     call to fold will try to move the conversion inside
13858	     a COND, which will recurse.  In that case, the COND_EXPR
13859	     is probably the best choice, so leave it alone.  */
13860	  && type == TREE_TYPE (arg0))
13861	return pedantic_non_lvalue_loc (loc, arg0);
13862
13863      /* Convert A ? 0 : 1 to !A.  This prefers the use of NOT_EXPR
13864	 over COND_EXPR in cases such as floating point comparisons.  */
13865      if (integer_zerop (op1)
13866	  && integer_onep (op2)
13867	  && truth_value_p (TREE_CODE (arg0)))
13868	return pedantic_non_lvalue_loc (loc,
13869				    fold_convert_loc (loc, type,
13870					      invert_truthvalue_loc (loc,
13871								     arg0)));
13872
13873      /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>).  */
13874      if (TREE_CODE (arg0) == LT_EXPR
13875	  && integer_zerop (TREE_OPERAND (arg0, 1))
13876	  && integer_zerop (op2)
13877	  && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13878	{
13879	  /* sign_bit_p only checks ARG1 bits within A's precision.
13880	     If <sign bit of A> has wider type than A, bits outside
13881	     of A's precision in <sign bit of A> need to be checked.
13882	     If they are all 0, this optimization needs to be done
13883	     in unsigned A's type, if they are all 1 in signed A's type,
13884	     otherwise this can't be done.  */
13885	  if (TYPE_PRECISION (TREE_TYPE (tem))
13886	      < TYPE_PRECISION (TREE_TYPE (arg1))
13887	      && TYPE_PRECISION (TREE_TYPE (tem))
13888		 < TYPE_PRECISION (type))
13889	    {
13890	      unsigned HOST_WIDE_INT mask_lo;
13891	      HOST_WIDE_INT mask_hi;
13892	      int inner_width, outer_width;
13893	      tree tem_type;
13894
13895	      inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13896	      outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13897	      if (outer_width > TYPE_PRECISION (type))
13898		outer_width = TYPE_PRECISION (type);
13899
13900	      if (outer_width > HOST_BITS_PER_WIDE_INT)
13901		{
13902		  mask_hi = ((unsigned HOST_WIDE_INT) -1
13903			     >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
13904		  mask_lo = -1;
13905		}
13906	      else
13907		{
13908		  mask_hi = 0;
13909		  mask_lo = ((unsigned HOST_WIDE_INT) -1
13910			     >> (HOST_BITS_PER_WIDE_INT - outer_width));
13911		}
13912	      if (inner_width > HOST_BITS_PER_WIDE_INT)
13913		{
13914		  mask_hi &= ~((unsigned HOST_WIDE_INT) -1
13915			       >> (HOST_BITS_PER_WIDE_INT - inner_width));
13916		  mask_lo = 0;
13917		}
13918	      else
13919		mask_lo &= ~((unsigned HOST_WIDE_INT) -1
13920			     >> (HOST_BITS_PER_WIDE_INT - inner_width));
13921
13922	      if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
13923		  && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
13924		{
13925		  tem_type = signed_type_for (TREE_TYPE (tem));
13926		  tem = fold_convert_loc (loc, tem_type, tem);
13927		}
13928	      else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
13929		       && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
13930		{
13931		  tem_type = unsigned_type_for (TREE_TYPE (tem));
13932		  tem = fold_convert_loc (loc, tem_type, tem);
13933		}
13934	      else
13935		tem = NULL;
13936	    }
13937
13938	  if (tem)
13939	    return
13940	      fold_convert_loc (loc, type,
13941				fold_build2_loc (loc, BIT_AND_EXPR,
13942					     TREE_TYPE (tem), tem,
13943					     fold_convert_loc (loc,
13944							       TREE_TYPE (tem),
13945							       arg1)));
13946	}
13947
13948      /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N).  A & 1 was
13949	 already handled above.  */
13950      if (TREE_CODE (arg0) == BIT_AND_EXPR
13951	  && integer_onep (TREE_OPERAND (arg0, 1))
13952	  && integer_zerop (op2)
13953	  && integer_pow2p (arg1))
13954	{
13955	  tree tem = TREE_OPERAND (arg0, 0);
13956	  STRIP_NOPS (tem);
13957	  if (TREE_CODE (tem) == RSHIFT_EXPR
13958              && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
13959              && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13960	         TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
13961	    return fold_build2_loc (loc, BIT_AND_EXPR, type,
13962				TREE_OPERAND (tem, 0), arg1);
13963	}
13964
13965      /* A & N ? N : 0 is simply A & N if N is a power of two.  This
13966	 is probably obsolete because the first operand should be a
13967	 truth value (that's why we have the two cases above), but let's
13968	 leave it in until we can confirm this for all front-ends.  */
13969      if (integer_zerop (op2)
13970	  && TREE_CODE (arg0) == NE_EXPR
13971	  && integer_zerop (TREE_OPERAND (arg0, 1))
13972	  && integer_pow2p (arg1)
13973	  && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13974	  && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13975			      arg1, OEP_ONLY_CONST))
13976	return pedantic_non_lvalue_loc (loc,
13977				    fold_convert_loc (loc, type,
13978						      TREE_OPERAND (arg0, 0)));
13979
13980      /* Convert A ? B : 0 into A && B if A and B are truth values.  */
13981      if (integer_zerop (op2)
13982	  && truth_value_p (TREE_CODE (arg0))
13983	  && truth_value_p (TREE_CODE (arg1)))
13984	return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13985			    fold_convert_loc (loc, type, arg0),
13986			    arg1);
13987
13988      /* Convert A ? B : 1 into !A || B if A and B are truth values.  */
13989      if (integer_onep (op2)
13990	  && truth_value_p (TREE_CODE (arg0))
13991	  && truth_value_p (TREE_CODE (arg1)))
13992	{
13993	  /* Only perform transformation if ARG0 is easily inverted.  */
13994	  tem = fold_truth_not_expr (loc, arg0);
13995	  if (tem)
13996	    return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
13997				fold_convert_loc (loc, type, tem),
13998				arg1);
13999	}
14000
14001      /* Convert A ? 0 : B into !A && B if A and B are truth values.  */
14002      if (integer_zerop (arg1)
14003	  && truth_value_p (TREE_CODE (arg0))
14004	  && truth_value_p (TREE_CODE (op2)))
14005	{
14006	  /* Only perform transformation if ARG0 is easily inverted.  */
14007	  tem = fold_truth_not_expr (loc, arg0);
14008	  if (tem)
14009	    return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
14010				fold_convert_loc (loc, type, tem),
14011				op2);
14012	}
14013
14014      /* Convert A ? 1 : B into A || B if A and B are truth values.  */
14015      if (integer_onep (arg1)
14016	  && truth_value_p (TREE_CODE (arg0))
14017	  && truth_value_p (TREE_CODE (op2)))
14018	return fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
14019			    fold_convert_loc (loc, type, arg0),
14020			    op2);
14021
14022      return NULL_TREE;
14023
14024    case CALL_EXPR:
14025      /* CALL_EXPRs used to be ternary exprs.  Catch any mistaken uses
14026	 of fold_ternary on them.  */
14027      gcc_unreachable ();
14028
14029    case BIT_FIELD_REF:
14030      if ((TREE_CODE (arg0) == VECTOR_CST
14031	   || (TREE_CODE (arg0) == CONSTRUCTOR && TREE_CONSTANT (arg0)))
14032	  && type == TREE_TYPE (TREE_TYPE (arg0)))
14033	{
14034	  unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
14035	  unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14036
14037	  if (width != 0
14038	      && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
14039	      && (idx % width) == 0
14040	      && (idx = idx / width)
14041		 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14042	    {
14043	      tree elements = NULL_TREE;
14044
14045	      if (TREE_CODE (arg0) == VECTOR_CST)
14046		elements = TREE_VECTOR_CST_ELTS (arg0);
14047	      else
14048		{
14049		  unsigned HOST_WIDE_INT idx;
14050		  tree value;
14051
14052		  FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (arg0), idx, value)
14053		    elements = tree_cons (NULL_TREE, value, elements);
14054		}
14055	      while (idx-- > 0 && elements)
14056		elements = TREE_CHAIN (elements);
14057	      if (elements)
14058		return TREE_VALUE (elements);
14059	      else
14060		return fold_convert_loc (loc, type, integer_zero_node);
14061	    }
14062	}
14063
14064      /* A bit-field-ref that referenced the full argument can be stripped.  */
14065      if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14066	  && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14067	  && integer_zerop (op2))
14068	return fold_convert_loc (loc, type, arg0);
14069
14070      return NULL_TREE;
14071
14072    default:
14073      return NULL_TREE;
14074    } /* switch (code) */
14075}
14076
14077/* Perform constant folding and related simplification of EXPR.
14078   The related simplifications include x*1 => x, x*0 => 0, etc.,
14079   and application of the associative law.
14080   NOP_EXPR conversions may be removed freely (as long as we
14081   are careful not to change the type of the overall expression).
14082   We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14083   but we can constant-fold them if they have constant operands.  */
14084
14085#ifdef ENABLE_FOLD_CHECKING
14086# define fold(x) fold_1 (x)
14087static tree fold_1 (tree);
14088static
14089#endif
14090tree
14091fold (tree expr)
14092{
14093  const tree t = expr;
14094  enum tree_code code = TREE_CODE (t);
14095  enum tree_code_class kind = TREE_CODE_CLASS (code);
14096  tree tem;
14097  location_t loc = EXPR_LOCATION (expr);
14098
14099  /* Return right away if a constant.  */
14100  if (kind == tcc_constant)
14101    return t;
14102
14103  /* CALL_EXPR-like objects with variable numbers of operands are
14104     treated specially.  */
14105  if (kind == tcc_vl_exp)
14106    {
14107      if (code == CALL_EXPR)
14108	{
14109	  tem = fold_call_expr (loc, expr, false);
14110	  return tem ? tem : expr;
14111	}
14112      return expr;
14113    }
14114
14115  if (IS_EXPR_CODE_CLASS (kind))
14116    {
14117      tree type = TREE_TYPE (t);
14118      tree op0, op1, op2;
14119
14120      switch (TREE_CODE_LENGTH (code))
14121	{
14122	case 1:
14123	  op0 = TREE_OPERAND (t, 0);
14124	  tem = fold_unary_loc (loc, code, type, op0);
14125	  return tem ? tem : expr;
14126	case 2:
14127	  op0 = TREE_OPERAND (t, 0);
14128	  op1 = TREE_OPERAND (t, 1);
14129	  tem = fold_binary_loc (loc, code, type, op0, op1);
14130	  return tem ? tem : expr;
14131	case 3:
14132	  op0 = TREE_OPERAND (t, 0);
14133	  op1 = TREE_OPERAND (t, 1);
14134	  op2 = TREE_OPERAND (t, 2);
14135	  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14136	  return tem ? tem : expr;
14137	default:
14138	  break;
14139	}
14140    }
14141
14142  switch (code)
14143    {
14144    case ARRAY_REF:
14145      {
14146	tree op0 = TREE_OPERAND (t, 0);
14147	tree op1 = TREE_OPERAND (t, 1);
14148
14149	if (TREE_CODE (op1) == INTEGER_CST
14150	    && TREE_CODE (op0) == CONSTRUCTOR
14151	    && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14152	  {
14153	    VEC(constructor_elt,gc) *elts = CONSTRUCTOR_ELTS (op0);
14154	    unsigned HOST_WIDE_INT end = VEC_length (constructor_elt, elts);
14155	    unsigned HOST_WIDE_INT begin = 0;
14156
14157	    /* Find a matching index by means of a binary search.  */
14158	    while (begin != end)
14159	      {
14160		unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14161		tree index = VEC_index (constructor_elt, elts, middle)->index;
14162
14163		if (TREE_CODE (index) == INTEGER_CST
14164		    && tree_int_cst_lt (index, op1))
14165		  begin = middle + 1;
14166		else if (TREE_CODE (index) == INTEGER_CST
14167			 && tree_int_cst_lt (op1, index))
14168		  end = middle;
14169		else if (TREE_CODE (index) == RANGE_EXPR
14170			 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14171		  begin = middle + 1;
14172		else if (TREE_CODE (index) == RANGE_EXPR
14173			 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14174		  end = middle;
14175		else
14176		  return VEC_index (constructor_elt, elts, middle)->value;
14177	      }
14178	  }
14179
14180	return t;
14181      }
14182
14183    case CONST_DECL:
14184      return fold (DECL_INITIAL (t));
14185
14186    default:
14187      return t;
14188    } /* switch (code) */
14189}
14190
14191#ifdef ENABLE_FOLD_CHECKING
14192#undef fold
14193
14194static void fold_checksum_tree (const_tree, struct md5_ctx *, htab_t);
14195static void fold_check_failed (const_tree, const_tree);
14196void print_fold_checksum (const_tree);
14197
14198/* When --enable-checking=fold, compute a digest of expr before
14199   and after actual fold call to see if fold did not accidentally
14200   change original expr.  */
14201
14202tree
14203fold (tree expr)
14204{
14205  tree ret;
14206  struct md5_ctx ctx;
14207  unsigned char checksum_before[16], checksum_after[16];
14208  htab_t ht;
14209
14210  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14211  md5_init_ctx (&ctx);
14212  fold_checksum_tree (expr, &ctx, ht);
14213  md5_finish_ctx (&ctx, checksum_before);
14214  htab_empty (ht);
14215
14216  ret = fold_1 (expr);
14217
14218  md5_init_ctx (&ctx);
14219  fold_checksum_tree (expr, &ctx, ht);
14220  md5_finish_ctx (&ctx, checksum_after);
14221  htab_delete (ht);
14222
14223  if (memcmp (checksum_before, checksum_after, 16))
14224    fold_check_failed (expr, ret);
14225
14226  return ret;
14227}
14228
14229void
14230print_fold_checksum (const_tree expr)
14231{
14232  struct md5_ctx ctx;
14233  unsigned char checksum[16], cnt;
14234  htab_t ht;
14235
14236  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14237  md5_init_ctx (&ctx);
14238  fold_checksum_tree (expr, &ctx, ht);
14239  md5_finish_ctx (&ctx, checksum);
14240  htab_delete (ht);
14241  for (cnt = 0; cnt < 16; ++cnt)
14242    fprintf (stderr, "%02x", checksum[cnt]);
14243  putc ('\n', stderr);
14244}
14245
14246static void
14247fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14248{
14249  internal_error ("fold check: original tree changed by fold");
14250}
14251
14252static void
14253fold_checksum_tree (const_tree expr, struct md5_ctx *ctx, htab_t ht)
14254{
14255  const void **slot;
14256  enum tree_code code;
14257  union tree_node buf;
14258  int i, len;
14259
14260recursive_label:
14261
14262  gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
14263	       <= sizeof (struct tree_function_decl))
14264	      && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
14265  if (expr == NULL)
14266    return;
14267  slot = (const void **) htab_find_slot (ht, expr, INSERT);
14268  if (*slot != NULL)
14269    return;
14270  *slot = expr;
14271  code = TREE_CODE (expr);
14272  if (TREE_CODE_CLASS (code) == tcc_declaration
14273      && DECL_ASSEMBLER_NAME_SET_P (expr))
14274    {
14275      /* Allow DECL_ASSEMBLER_NAME to be modified.  */
14276      memcpy ((char *) &buf, expr, tree_size (expr));
14277      SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14278      expr = (tree) &buf;
14279    }
14280  else if (TREE_CODE_CLASS (code) == tcc_type
14281	   && (TYPE_POINTER_TO (expr)
14282	       || TYPE_REFERENCE_TO (expr)
14283	       || TYPE_CACHED_VALUES_P (expr)
14284	       || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14285	       || TYPE_NEXT_VARIANT (expr)))
14286    {
14287      /* Allow these fields to be modified.  */
14288      tree tmp;
14289      memcpy ((char *) &buf, expr, tree_size (expr));
14290      expr = tmp = (tree) &buf;
14291      TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14292      TYPE_POINTER_TO (tmp) = NULL;
14293      TYPE_REFERENCE_TO (tmp) = NULL;
14294      TYPE_NEXT_VARIANT (tmp) = NULL;
14295      if (TYPE_CACHED_VALUES_P (tmp))
14296	{
14297	  TYPE_CACHED_VALUES_P (tmp) = 0;
14298	  TYPE_CACHED_VALUES (tmp) = NULL;
14299	}
14300    }
14301  md5_process_bytes (expr, tree_size (expr), ctx);
14302  fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14303  if (TREE_CODE_CLASS (code) != tcc_type
14304      && TREE_CODE_CLASS (code) != tcc_declaration
14305      && code != TREE_LIST
14306      && code != SSA_NAME)
14307    fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14308  switch (TREE_CODE_CLASS (code))
14309    {
14310    case tcc_constant:
14311      switch (code)
14312	{
14313	case STRING_CST:
14314	  md5_process_bytes (TREE_STRING_POINTER (expr),
14315			     TREE_STRING_LENGTH (expr), ctx);
14316	  break;
14317	case COMPLEX_CST:
14318	  fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14319	  fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14320	  break;
14321	case VECTOR_CST:
14322	  fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
14323	  break;
14324	default:
14325	  break;
14326	}
14327      break;
14328    case tcc_exceptional:
14329      switch (code)
14330	{
14331	case TREE_LIST:
14332	  fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14333	  fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14334	  expr = TREE_CHAIN (expr);
14335	  goto recursive_label;
14336	  break;
14337	case TREE_VEC:
14338	  for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14339	    fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14340	  break;
14341	default:
14342	  break;
14343	}
14344      break;
14345    case tcc_expression:
14346    case tcc_reference:
14347    case tcc_comparison:
14348    case tcc_unary:
14349    case tcc_binary:
14350    case tcc_statement:
14351    case tcc_vl_exp:
14352      len = TREE_OPERAND_LENGTH (expr);
14353      for (i = 0; i < len; ++i)
14354	fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14355      break;
14356    case tcc_declaration:
14357      fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14358      fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14359      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14360	{
14361	  fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14362	  fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14363	  fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14364	  fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14365	  fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14366	}
14367      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14368	fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14369
14370      if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14371	{
14372	  fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14373	  fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14374	  fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14375	}
14376      break;
14377    case tcc_type:
14378      if (TREE_CODE (expr) == ENUMERAL_TYPE)
14379        fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14380      fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14381      fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14382      fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14383      fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14384      if (INTEGRAL_TYPE_P (expr)
14385          || SCALAR_FLOAT_TYPE_P (expr))
14386	{
14387	  fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14388	  fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14389	}
14390      fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14391      if (TREE_CODE (expr) == RECORD_TYPE
14392	  || TREE_CODE (expr) == UNION_TYPE
14393	  || TREE_CODE (expr) == QUAL_UNION_TYPE)
14394	fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14395      fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14396      break;
14397    default:
14398      break;
14399    }
14400}
14401
14402/* Helper function for outputting the checksum of a tree T.  When
14403   debugging with gdb, you can "define mynext" to be "next" followed
14404   by "call debug_fold_checksum (op0)", then just trace down till the
14405   outputs differ.  */
14406
14407void
14408debug_fold_checksum (const_tree t)
14409{
14410  int i;
14411  unsigned char checksum[16];
14412  struct md5_ctx ctx;
14413  htab_t ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14414
14415  md5_init_ctx (&ctx);
14416  fold_checksum_tree (t, &ctx, ht);
14417  md5_finish_ctx (&ctx, checksum);
14418  htab_empty (ht);
14419
14420  for (i = 0; i < 16; i++)
14421    fprintf (stderr, "%d ", checksum[i]);
14422
14423  fprintf (stderr, "\n");
14424}
14425
14426#endif
14427
14428/* Fold a unary tree expression with code CODE of type TYPE with an
14429   operand OP0.  LOC is the location of the resulting expression.
14430   Return a folded expression if successful.  Otherwise, return a tree
14431   expression with code CODE of type TYPE with an operand OP0.  */
14432
14433tree
14434fold_build1_stat_loc (location_t loc,
14435		      enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14436{
14437  tree tem;
14438#ifdef ENABLE_FOLD_CHECKING
14439  unsigned char checksum_before[16], checksum_after[16];
14440  struct md5_ctx ctx;
14441  htab_t ht;
14442
14443  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14444  md5_init_ctx (&ctx);
14445  fold_checksum_tree (op0, &ctx, ht);
14446  md5_finish_ctx (&ctx, checksum_before);
14447  htab_empty (ht);
14448#endif
14449
14450  tem = fold_unary_loc (loc, code, type, op0);
14451  if (!tem)
14452    {
14453      tem = build1_stat (code, type, op0 PASS_MEM_STAT);
14454      SET_EXPR_LOCATION (tem, loc);
14455    }
14456
14457#ifdef ENABLE_FOLD_CHECKING
14458  md5_init_ctx (&ctx);
14459  fold_checksum_tree (op0, &ctx, ht);
14460  md5_finish_ctx (&ctx, checksum_after);
14461  htab_delete (ht);
14462
14463  if (memcmp (checksum_before, checksum_after, 16))
14464    fold_check_failed (op0, tem);
14465#endif
14466  return tem;
14467}
14468
14469/* Fold a binary tree expression with code CODE of type TYPE with
14470   operands OP0 and OP1.  LOC is the location of the resulting
14471   expression.  Return a folded expression if successful.  Otherwise,
14472   return a tree expression with code CODE of type TYPE with operands
14473   OP0 and OP1.  */
14474
14475tree
14476fold_build2_stat_loc (location_t loc,
14477		      enum tree_code code, tree type, tree op0, tree op1
14478		      MEM_STAT_DECL)
14479{
14480  tree tem;
14481#ifdef ENABLE_FOLD_CHECKING
14482  unsigned char checksum_before_op0[16],
14483                checksum_before_op1[16],
14484		checksum_after_op0[16],
14485		checksum_after_op1[16];
14486  struct md5_ctx ctx;
14487  htab_t ht;
14488
14489  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14490  md5_init_ctx (&ctx);
14491  fold_checksum_tree (op0, &ctx, ht);
14492  md5_finish_ctx (&ctx, checksum_before_op0);
14493  htab_empty (ht);
14494
14495  md5_init_ctx (&ctx);
14496  fold_checksum_tree (op1, &ctx, ht);
14497  md5_finish_ctx (&ctx, checksum_before_op1);
14498  htab_empty (ht);
14499#endif
14500
14501  tem = fold_binary_loc (loc, code, type, op0, op1);
14502  if (!tem)
14503    {
14504      tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
14505      SET_EXPR_LOCATION (tem, loc);
14506    }
14507
14508#ifdef ENABLE_FOLD_CHECKING
14509  md5_init_ctx (&ctx);
14510  fold_checksum_tree (op0, &ctx, ht);
14511  md5_finish_ctx (&ctx, checksum_after_op0);
14512  htab_empty (ht);
14513
14514  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14515    fold_check_failed (op0, tem);
14516
14517  md5_init_ctx (&ctx);
14518  fold_checksum_tree (op1, &ctx, ht);
14519  md5_finish_ctx (&ctx, checksum_after_op1);
14520  htab_delete (ht);
14521
14522  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14523    fold_check_failed (op1, tem);
14524#endif
14525  return tem;
14526}
14527
14528/* Fold a ternary tree expression with code CODE of type TYPE with
14529   operands OP0, OP1, and OP2.  Return a folded expression if
14530   successful.  Otherwise, return a tree expression with code CODE of
14531   type TYPE with operands OP0, OP1, and OP2.  */
14532
14533tree
14534fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14535		      tree op0, tree op1, tree op2 MEM_STAT_DECL)
14536{
14537  tree tem;
14538#ifdef ENABLE_FOLD_CHECKING
14539  unsigned char checksum_before_op0[16],
14540                checksum_before_op1[16],
14541                checksum_before_op2[16],
14542		checksum_after_op0[16],
14543		checksum_after_op1[16],
14544		checksum_after_op2[16];
14545  struct md5_ctx ctx;
14546  htab_t ht;
14547
14548  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14549  md5_init_ctx (&ctx);
14550  fold_checksum_tree (op0, &ctx, ht);
14551  md5_finish_ctx (&ctx, checksum_before_op0);
14552  htab_empty (ht);
14553
14554  md5_init_ctx (&ctx);
14555  fold_checksum_tree (op1, &ctx, ht);
14556  md5_finish_ctx (&ctx, checksum_before_op1);
14557  htab_empty (ht);
14558
14559  md5_init_ctx (&ctx);
14560  fold_checksum_tree (op2, &ctx, ht);
14561  md5_finish_ctx (&ctx, checksum_before_op2);
14562  htab_empty (ht);
14563#endif
14564
14565  gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14566  tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14567  if (!tem)
14568    {
14569      tem =  build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
14570      SET_EXPR_LOCATION (tem, loc);
14571    }
14572
14573#ifdef ENABLE_FOLD_CHECKING
14574  md5_init_ctx (&ctx);
14575  fold_checksum_tree (op0, &ctx, ht);
14576  md5_finish_ctx (&ctx, checksum_after_op0);
14577  htab_empty (ht);
14578
14579  if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14580    fold_check_failed (op0, tem);
14581
14582  md5_init_ctx (&ctx);
14583  fold_checksum_tree (op1, &ctx, ht);
14584  md5_finish_ctx (&ctx, checksum_after_op1);
14585  htab_empty (ht);
14586
14587  if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14588    fold_check_failed (op1, tem);
14589
14590  md5_init_ctx (&ctx);
14591  fold_checksum_tree (op2, &ctx, ht);
14592  md5_finish_ctx (&ctx, checksum_after_op2);
14593  htab_delete (ht);
14594
14595  if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14596    fold_check_failed (op2, tem);
14597#endif
14598  return tem;
14599}
14600
14601/* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14602   arguments in ARGARRAY, and a null static chain.
14603   Return a folded expression if successful.  Otherwise, return a CALL_EXPR
14604   of type TYPE from the given operands as constructed by build_call_array.  */
14605
14606tree
14607fold_build_call_array_loc (location_t loc, tree type, tree fn,
14608			   int nargs, tree *argarray)
14609{
14610  tree tem;
14611#ifdef ENABLE_FOLD_CHECKING
14612  unsigned char checksum_before_fn[16],
14613                checksum_before_arglist[16],
14614		checksum_after_fn[16],
14615		checksum_after_arglist[16];
14616  struct md5_ctx ctx;
14617  htab_t ht;
14618  int i;
14619
14620  ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
14621  md5_init_ctx (&ctx);
14622  fold_checksum_tree (fn, &ctx, ht);
14623  md5_finish_ctx (&ctx, checksum_before_fn);
14624  htab_empty (ht);
14625
14626  md5_init_ctx (&ctx);
14627  for (i = 0; i < nargs; i++)
14628    fold_checksum_tree (argarray[i], &ctx, ht);
14629  md5_finish_ctx (&ctx, checksum_before_arglist);
14630  htab_empty (ht);
14631#endif
14632
14633  tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14634
14635#ifdef ENABLE_FOLD_CHECKING
14636  md5_init_ctx (&ctx);
14637  fold_checksum_tree (fn, &ctx, ht);
14638  md5_finish_ctx (&ctx, checksum_after_fn);
14639  htab_empty (ht);
14640
14641  if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14642    fold_check_failed (fn, tem);
14643
14644  md5_init_ctx (&ctx);
14645  for (i = 0; i < nargs; i++)
14646    fold_checksum_tree (argarray[i], &ctx, ht);
14647  md5_finish_ctx (&ctx, checksum_after_arglist);
14648  htab_delete (ht);
14649
14650  if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14651    fold_check_failed (NULL_TREE, tem);
14652#endif
14653  return tem;
14654}
14655
14656/* Perform constant folding and related simplification of initializer
14657   expression EXPR.  These behave identically to "fold_buildN" but ignore
14658   potential run-time traps and exceptions that fold must preserve.  */
14659
14660#define START_FOLD_INIT \
14661  int saved_signaling_nans = flag_signaling_nans;\
14662  int saved_trapping_math = flag_trapping_math;\
14663  int saved_rounding_math = flag_rounding_math;\
14664  int saved_trapv = flag_trapv;\
14665  int saved_folding_initializer = folding_initializer;\
14666  flag_signaling_nans = 0;\
14667  flag_trapping_math = 0;\
14668  flag_rounding_math = 0;\
14669  flag_trapv = 0;\
14670  folding_initializer = 1;
14671
14672#define END_FOLD_INIT \
14673  flag_signaling_nans = saved_signaling_nans;\
14674  flag_trapping_math = saved_trapping_math;\
14675  flag_rounding_math = saved_rounding_math;\
14676  flag_trapv = saved_trapv;\
14677  folding_initializer = saved_folding_initializer;
14678
14679tree
14680fold_build1_initializer_loc (location_t loc, enum tree_code code,
14681			     tree type, tree op)
14682{
14683  tree result;
14684  START_FOLD_INIT;
14685
14686  result = fold_build1_loc (loc, code, type, op);
14687
14688  END_FOLD_INIT;
14689  return result;
14690}
14691
14692tree
14693fold_build2_initializer_loc (location_t loc, enum tree_code code,
14694			     tree type, tree op0, tree op1)
14695{
14696  tree result;
14697  START_FOLD_INIT;
14698
14699  result = fold_build2_loc (loc, code, type, op0, op1);
14700
14701  END_FOLD_INIT;
14702  return result;
14703}
14704
14705tree
14706fold_build3_initializer_loc (location_t loc, enum tree_code code,
14707			     tree type, tree op0, tree op1, tree op2)
14708{
14709  tree result;
14710  START_FOLD_INIT;
14711
14712  result = fold_build3_loc (loc, code, type, op0, op1, op2);
14713
14714  END_FOLD_INIT;
14715  return result;
14716}
14717
14718tree
14719fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14720				       int nargs, tree *argarray)
14721{
14722  tree result;
14723  START_FOLD_INIT;
14724
14725  result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14726
14727  END_FOLD_INIT;
14728  return result;
14729}
14730
14731#undef START_FOLD_INIT
14732#undef END_FOLD_INIT
14733
14734/* Determine if first argument is a multiple of second argument.  Return 0 if
14735   it is not, or we cannot easily determined it to be.
14736
14737   An example of the sort of thing we care about (at this point; this routine
14738   could surely be made more general, and expanded to do what the *_DIV_EXPR's
14739   fold cases do now) is discovering that
14740
14741     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14742
14743   is a multiple of
14744
14745     SAVE_EXPR (J * 8)
14746
14747   when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14748
14749   This code also handles discovering that
14750
14751     SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14752
14753   is a multiple of 8 so we don't have to worry about dealing with a
14754   possible remainder.
14755
14756   Note that we *look* inside a SAVE_EXPR only to determine how it was
14757   calculated; it is not safe for fold to do much of anything else with the
14758   internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14759   at run time.  For example, the latter example above *cannot* be implemented
14760   as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14761   evaluation time of the original SAVE_EXPR is not necessarily the same at
14762   the time the new expression is evaluated.  The only optimization of this
14763   sort that would be valid is changing
14764
14765     SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14766
14767   divided by 8 to
14768
14769     SAVE_EXPR (I) * SAVE_EXPR (J)
14770
14771   (where the same SAVE_EXPR (J) is used in the original and the
14772   transformed version).  */
14773
14774int
14775multiple_of_p (tree type, const_tree top, const_tree bottom)
14776{
14777  if (operand_equal_p (top, bottom, 0))
14778    return 1;
14779
14780  if (TREE_CODE (type) != INTEGER_TYPE)
14781    return 0;
14782
14783  switch (TREE_CODE (top))
14784    {
14785    case BIT_AND_EXPR:
14786      /* Bitwise and provides a power of two multiple.  If the mask is
14787	 a multiple of BOTTOM then TOP is a multiple of BOTTOM.  */
14788      if (!integer_pow2p (bottom))
14789	return 0;
14790      /* FALLTHRU */
14791
14792    case MULT_EXPR:
14793      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14794	      || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14795
14796    case PLUS_EXPR:
14797    case MINUS_EXPR:
14798      return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14799	      && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14800
14801    case LSHIFT_EXPR:
14802      if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14803	{
14804	  tree op1, t1;
14805
14806	  op1 = TREE_OPERAND (top, 1);
14807	  /* const_binop may not detect overflow correctly,
14808	     so check for it explicitly here.  */
14809	  if (TYPE_PRECISION (TREE_TYPE (size_one_node))
14810	      > TREE_INT_CST_LOW (op1)
14811	      && TREE_INT_CST_HIGH (op1) == 0
14812	      && 0 != (t1 = fold_convert (type,
14813					  const_binop (LSHIFT_EXPR,
14814						       size_one_node,
14815						       op1, 0)))
14816	      && !TREE_OVERFLOW (t1))
14817	    return multiple_of_p (type, t1, bottom);
14818	}
14819      return 0;
14820
14821    case NOP_EXPR:
14822      /* Can't handle conversions from non-integral or wider integral type.  */
14823      if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14824	  || (TYPE_PRECISION (type)
14825	      < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14826	return 0;
14827
14828      /* .. fall through ...  */
14829
14830    case SAVE_EXPR:
14831      return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14832
14833    case INTEGER_CST:
14834      if (TREE_CODE (bottom) != INTEGER_CST
14835	  || integer_zerop (bottom)
14836	  || (TYPE_UNSIGNED (type)
14837	      && (tree_int_cst_sgn (top) < 0
14838		  || tree_int_cst_sgn (bottom) < 0)))
14839	return 0;
14840      return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
14841					     top, bottom, 0));
14842
14843    default:
14844      return 0;
14845    }
14846}
14847
14848/* Return true if CODE or TYPE is known to be non-negative. */
14849
14850static bool
14851tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14852{
14853  if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14854      && truth_value_p (code))
14855    /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14856       have a signed:1 type (where the value is -1 and 0).  */
14857    return true;
14858  return false;
14859}
14860
14861/* Return true if (CODE OP0) is known to be non-negative.  If the return
14862   value is based on the assumption that signed overflow is undefined,
14863   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14864   *STRICT_OVERFLOW_P.  */
14865
14866bool
14867tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14868				bool *strict_overflow_p)
14869{
14870  if (TYPE_UNSIGNED (type))
14871    return true;
14872
14873  switch (code)
14874    {
14875    case ABS_EXPR:
14876      /* We can't return 1 if flag_wrapv is set because
14877	 ABS_EXPR<INT_MIN> = INT_MIN.  */
14878      if (!INTEGRAL_TYPE_P (type))
14879	return true;
14880      if (TYPE_OVERFLOW_UNDEFINED (type))
14881	{
14882	  *strict_overflow_p = true;
14883	  return true;
14884	}
14885      break;
14886
14887    case NON_LVALUE_EXPR:
14888    case FLOAT_EXPR:
14889    case FIX_TRUNC_EXPR:
14890      return tree_expr_nonnegative_warnv_p (op0,
14891					    strict_overflow_p);
14892
14893    case NOP_EXPR:
14894      {
14895	tree inner_type = TREE_TYPE (op0);
14896	tree outer_type = type;
14897
14898	if (TREE_CODE (outer_type) == REAL_TYPE)
14899	  {
14900	    if (TREE_CODE (inner_type) == REAL_TYPE)
14901	      return tree_expr_nonnegative_warnv_p (op0,
14902						    strict_overflow_p);
14903	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14904	      {
14905		if (TYPE_UNSIGNED (inner_type))
14906		  return true;
14907		return tree_expr_nonnegative_warnv_p (op0,
14908						      strict_overflow_p);
14909	      }
14910	  }
14911	else if (TREE_CODE (outer_type) == INTEGER_TYPE)
14912	  {
14913	    if (TREE_CODE (inner_type) == REAL_TYPE)
14914	      return tree_expr_nonnegative_warnv_p (op0,
14915						    strict_overflow_p);
14916	    if (TREE_CODE (inner_type) == INTEGER_TYPE)
14917	      return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14918		      && TYPE_UNSIGNED (inner_type);
14919	  }
14920      }
14921      break;
14922
14923    default:
14924      return tree_simple_nonnegative_warnv_p (code, type);
14925    }
14926
14927  /* We don't know sign of `t', so be conservative and return false.  */
14928  return false;
14929}
14930
14931/* Return true if (CODE OP0 OP1) is known to be non-negative.  If the return
14932   value is based on the assumption that signed overflow is undefined,
14933   set *STRICT_OVERFLOW_P to true; otherwise, don't change
14934   *STRICT_OVERFLOW_P.  */
14935
14936bool
14937tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14938				      tree op1, bool *strict_overflow_p)
14939{
14940  if (TYPE_UNSIGNED (type))
14941    return true;
14942
14943  switch (code)
14944    {
14945    case POINTER_PLUS_EXPR:
14946    case PLUS_EXPR:
14947      if (FLOAT_TYPE_P (type))
14948	return (tree_expr_nonnegative_warnv_p (op0,
14949					       strict_overflow_p)
14950		&& tree_expr_nonnegative_warnv_p (op1,
14951						  strict_overflow_p));
14952
14953      /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14954	 both unsigned and at least 2 bits shorter than the result.  */
14955      if (TREE_CODE (type) == INTEGER_TYPE
14956	  && TREE_CODE (op0) == NOP_EXPR
14957	  && TREE_CODE (op1) == NOP_EXPR)
14958	{
14959	  tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14960	  tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14961	  if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14962	      && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14963	    {
14964	      unsigned int prec = MAX (TYPE_PRECISION (inner1),
14965				       TYPE_PRECISION (inner2)) + 1;
14966	      return prec < TYPE_PRECISION (type);
14967	    }
14968	}
14969      break;
14970
14971    case MULT_EXPR:
14972      if (FLOAT_TYPE_P (type))
14973	{
14974	  /* x * x for floating point x is always non-negative.  */
14975	  if (operand_equal_p (op0, op1, 0))
14976	    return true;
14977	  return (tree_expr_nonnegative_warnv_p (op0,
14978						 strict_overflow_p)
14979		  && tree_expr_nonnegative_warnv_p (op1,
14980						    strict_overflow_p));
14981	}
14982
14983      /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14984	 both unsigned and their total bits is shorter than the result.  */
14985      if (TREE_CODE (type) == INTEGER_TYPE
14986	  && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14987	  && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14988	{
14989	  tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14990	    ? TREE_TYPE (TREE_OPERAND (op0, 0))
14991	    : TREE_TYPE (op0);
14992	  tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14993	    ? TREE_TYPE (TREE_OPERAND (op1, 0))
14994	    : TREE_TYPE (op1);
14995
14996	  bool unsigned0 = TYPE_UNSIGNED (inner0);
14997	  bool unsigned1 = TYPE_UNSIGNED (inner1);
14998
14999	  if (TREE_CODE (op0) == INTEGER_CST)
15000	    unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15001
15002	  if (TREE_CODE (op1) == INTEGER_CST)
15003	    unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15004
15005	  if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15006	      && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15007	    {
15008	      unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15009		? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15010		: TYPE_PRECISION (inner0);
15011
15012	      unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15013		? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15014		: TYPE_PRECISION (inner1);
15015
15016	      return precision0 + precision1 < TYPE_PRECISION (type);
15017	    }
15018	}
15019      return false;
15020
15021    case BIT_AND_EXPR:
15022    case MAX_EXPR:
15023      return (tree_expr_nonnegative_warnv_p (op0,
15024					     strict_overflow_p)
15025	      || tree_expr_nonnegative_warnv_p (op1,
15026						strict_overflow_p));
15027
15028    case BIT_IOR_EXPR:
15029    case BIT_XOR_EXPR:
15030    case MIN_EXPR:
15031    case RDIV_EXPR:
15032    case TRUNC_DIV_EXPR:
15033    case CEIL_DIV_EXPR:
15034    case FLOOR_DIV_EXPR:
15035    case ROUND_DIV_EXPR:
15036      return (tree_expr_nonnegative_warnv_p (op0,
15037					     strict_overflow_p)
15038	      && tree_expr_nonnegative_warnv_p (op1,
15039						strict_overflow_p));
15040
15041    case TRUNC_MOD_EXPR:
15042    case CEIL_MOD_EXPR:
15043    case FLOOR_MOD_EXPR:
15044    case ROUND_MOD_EXPR:
15045      return tree_expr_nonnegative_warnv_p (op0,
15046					    strict_overflow_p);
15047    default:
15048      return tree_simple_nonnegative_warnv_p (code, type);
15049    }
15050
15051  /* We don't know sign of `t', so be conservative and return false.  */
15052  return false;
15053}
15054
15055/* Return true if T is known to be non-negative.  If the return
15056   value is based on the assumption that signed overflow is undefined,
15057   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15058   *STRICT_OVERFLOW_P.  */
15059
15060bool
15061tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15062{
15063  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15064    return true;
15065
15066  switch (TREE_CODE (t))
15067    {
15068    case INTEGER_CST:
15069      return tree_int_cst_sgn (t) >= 0;
15070
15071    case REAL_CST:
15072      return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15073
15074    case FIXED_CST:
15075      return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15076
15077    case COND_EXPR:
15078      return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15079					     strict_overflow_p)
15080	      && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15081						strict_overflow_p));
15082    default:
15083      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15084						   TREE_TYPE (t));
15085    }
15086  /* We don't know sign of `t', so be conservative and return false.  */
15087  return false;
15088}
15089
15090/* Return true if T is known to be non-negative.  If the return
15091   value is based on the assumption that signed overflow is undefined,
15092   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15093   *STRICT_OVERFLOW_P.  */
15094
15095bool
15096tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15097			       tree arg0, tree arg1, bool *strict_overflow_p)
15098{
15099  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15100    switch (DECL_FUNCTION_CODE (fndecl))
15101      {
15102	CASE_FLT_FN (BUILT_IN_ACOS):
15103	CASE_FLT_FN (BUILT_IN_ACOSH):
15104	CASE_FLT_FN (BUILT_IN_CABS):
15105	CASE_FLT_FN (BUILT_IN_COSH):
15106	CASE_FLT_FN (BUILT_IN_ERFC):
15107	CASE_FLT_FN (BUILT_IN_EXP):
15108	CASE_FLT_FN (BUILT_IN_EXP10):
15109	CASE_FLT_FN (BUILT_IN_EXP2):
15110	CASE_FLT_FN (BUILT_IN_FABS):
15111	CASE_FLT_FN (BUILT_IN_FDIM):
15112	CASE_FLT_FN (BUILT_IN_HYPOT):
15113	CASE_FLT_FN (BUILT_IN_POW10):
15114	CASE_INT_FN (BUILT_IN_FFS):
15115	CASE_INT_FN (BUILT_IN_PARITY):
15116	CASE_INT_FN (BUILT_IN_POPCOUNT):
15117      case BUILT_IN_BSWAP32:
15118      case BUILT_IN_BSWAP64:
15119	/* Always true.  */
15120	return true;
15121
15122	CASE_FLT_FN (BUILT_IN_SQRT):
15123	/* sqrt(-0.0) is -0.0.  */
15124	if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15125	  return true;
15126	return tree_expr_nonnegative_warnv_p (arg0,
15127					      strict_overflow_p);
15128
15129	CASE_FLT_FN (BUILT_IN_ASINH):
15130	CASE_FLT_FN (BUILT_IN_ATAN):
15131	CASE_FLT_FN (BUILT_IN_ATANH):
15132	CASE_FLT_FN (BUILT_IN_CBRT):
15133	CASE_FLT_FN (BUILT_IN_CEIL):
15134	CASE_FLT_FN (BUILT_IN_ERF):
15135	CASE_FLT_FN (BUILT_IN_EXPM1):
15136	CASE_FLT_FN (BUILT_IN_FLOOR):
15137	CASE_FLT_FN (BUILT_IN_FMOD):
15138	CASE_FLT_FN (BUILT_IN_FREXP):
15139	CASE_FLT_FN (BUILT_IN_LCEIL):
15140	CASE_FLT_FN (BUILT_IN_LDEXP):
15141	CASE_FLT_FN (BUILT_IN_LFLOOR):
15142	CASE_FLT_FN (BUILT_IN_LLCEIL):
15143	CASE_FLT_FN (BUILT_IN_LLFLOOR):
15144	CASE_FLT_FN (BUILT_IN_LLRINT):
15145	CASE_FLT_FN (BUILT_IN_LLROUND):
15146	CASE_FLT_FN (BUILT_IN_LRINT):
15147	CASE_FLT_FN (BUILT_IN_LROUND):
15148	CASE_FLT_FN (BUILT_IN_MODF):
15149	CASE_FLT_FN (BUILT_IN_NEARBYINT):
15150	CASE_FLT_FN (BUILT_IN_RINT):
15151	CASE_FLT_FN (BUILT_IN_ROUND):
15152	CASE_FLT_FN (BUILT_IN_SCALB):
15153	CASE_FLT_FN (BUILT_IN_SCALBLN):
15154	CASE_FLT_FN (BUILT_IN_SCALBN):
15155	CASE_FLT_FN (BUILT_IN_SIGNBIT):
15156	CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15157	CASE_FLT_FN (BUILT_IN_SINH):
15158	CASE_FLT_FN (BUILT_IN_TANH):
15159	CASE_FLT_FN (BUILT_IN_TRUNC):
15160	/* True if the 1st argument is nonnegative.  */
15161	return tree_expr_nonnegative_warnv_p (arg0,
15162					      strict_overflow_p);
15163
15164	CASE_FLT_FN (BUILT_IN_FMAX):
15165	/* True if the 1st OR 2nd arguments are nonnegative.  */
15166	return (tree_expr_nonnegative_warnv_p (arg0,
15167					       strict_overflow_p)
15168		|| (tree_expr_nonnegative_warnv_p (arg1,
15169						   strict_overflow_p)));
15170
15171	CASE_FLT_FN (BUILT_IN_FMIN):
15172	/* True if the 1st AND 2nd arguments are nonnegative.  */
15173	return (tree_expr_nonnegative_warnv_p (arg0,
15174					       strict_overflow_p)
15175		&& (tree_expr_nonnegative_warnv_p (arg1,
15176						   strict_overflow_p)));
15177
15178	CASE_FLT_FN (BUILT_IN_COPYSIGN):
15179	/* True if the 2nd argument is nonnegative.  */
15180	return tree_expr_nonnegative_warnv_p (arg1,
15181					      strict_overflow_p);
15182
15183	CASE_FLT_FN (BUILT_IN_POWI):
15184	/* True if the 1st argument is nonnegative or the second
15185	   argument is an even integer.  */
15186	if (TREE_CODE (arg1) == INTEGER_CST
15187	    && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15188	  return true;
15189	return tree_expr_nonnegative_warnv_p (arg0,
15190					      strict_overflow_p);
15191
15192	CASE_FLT_FN (BUILT_IN_POW):
15193	/* True if the 1st argument is nonnegative or the second
15194	   argument is an even integer valued real.  */
15195	if (TREE_CODE (arg1) == REAL_CST)
15196	  {
15197	    REAL_VALUE_TYPE c;
15198	    HOST_WIDE_INT n;
15199
15200	    c = TREE_REAL_CST (arg1);
15201	    n = real_to_integer (&c);
15202	    if ((n & 1) == 0)
15203	      {
15204		REAL_VALUE_TYPE cint;
15205		real_from_integer (&cint, VOIDmode, n,
15206				   n < 0 ? -1 : 0, 0);
15207		if (real_identical (&c, &cint))
15208		  return true;
15209	      }
15210	  }
15211	return tree_expr_nonnegative_warnv_p (arg0,
15212					      strict_overflow_p);
15213
15214      default:
15215	break;
15216      }
15217  return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15218					  type);
15219}
15220
15221/* Return true if T is known to be non-negative.  If the return
15222   value is based on the assumption that signed overflow is undefined,
15223   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15224   *STRICT_OVERFLOW_P.  */
15225
15226bool
15227tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15228{
15229  enum tree_code code = TREE_CODE (t);
15230  if (TYPE_UNSIGNED (TREE_TYPE (t)))
15231    return true;
15232
15233  switch (code)
15234    {
15235    case TARGET_EXPR:
15236      {
15237	tree temp = TARGET_EXPR_SLOT (t);
15238	t = TARGET_EXPR_INITIAL (t);
15239
15240	/* If the initializer is non-void, then it's a normal expression
15241	   that will be assigned to the slot.  */
15242	if (!VOID_TYPE_P (t))
15243	  return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15244
15245	/* Otherwise, the initializer sets the slot in some way.  One common
15246	   way is an assignment statement at the end of the initializer.  */
15247	while (1)
15248	  {
15249	    if (TREE_CODE (t) == BIND_EXPR)
15250	      t = expr_last (BIND_EXPR_BODY (t));
15251	    else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15252		     || TREE_CODE (t) == TRY_CATCH_EXPR)
15253	      t = expr_last (TREE_OPERAND (t, 0));
15254	    else if (TREE_CODE (t) == STATEMENT_LIST)
15255	      t = expr_last (t);
15256	    else
15257	      break;
15258	  }
15259	if (TREE_CODE (t) == MODIFY_EXPR
15260	    && TREE_OPERAND (t, 0) == temp)
15261	  return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15262						strict_overflow_p);
15263
15264	return false;
15265      }
15266
15267    case CALL_EXPR:
15268      {
15269	tree arg0 = call_expr_nargs (t) > 0 ?  CALL_EXPR_ARG (t, 0) : NULL_TREE;
15270	tree arg1 = call_expr_nargs (t) > 1 ?  CALL_EXPR_ARG (t, 1) : NULL_TREE;
15271
15272	return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15273					      get_callee_fndecl (t),
15274					      arg0,
15275					      arg1,
15276					      strict_overflow_p);
15277      }
15278    case COMPOUND_EXPR:
15279    case MODIFY_EXPR:
15280      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15281					    strict_overflow_p);
15282    case BIND_EXPR:
15283      return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15284					    strict_overflow_p);
15285    case SAVE_EXPR:
15286      return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15287					    strict_overflow_p);
15288
15289    default:
15290      return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15291						   TREE_TYPE (t));
15292    }
15293
15294  /* We don't know sign of `t', so be conservative and return false.  */
15295  return false;
15296}
15297
15298/* Return true if T is known to be non-negative.  If the return
15299   value is based on the assumption that signed overflow is undefined,
15300   set *STRICT_OVERFLOW_P to true; otherwise, don't change
15301   *STRICT_OVERFLOW_P.  */
15302
15303bool
15304tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15305{
15306  enum tree_code code;
15307  if (t == error_mark_node)
15308    return false;
15309
15310  code = TREE_CODE (t);
15311  switch (TREE_CODE_CLASS (code))
15312    {
15313    case tcc_binary:
15314    case tcc_comparison:
15315      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15316					      TREE_TYPE (t),
15317					      TREE_OPERAND (t, 0),
15318					      TREE_OPERAND (t, 1),
15319					      strict_overflow_p);
15320
15321    case tcc_unary:
15322      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15323					     TREE_TYPE (t),
15324					     TREE_OPERAND (t, 0),
15325					     strict_overflow_p);
15326
15327    case tcc_constant:
15328    case tcc_declaration:
15329    case tcc_reference:
15330      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15331
15332    default:
15333      break;
15334    }
15335
15336  switch (code)
15337    {
15338    case TRUTH_AND_EXPR:
15339    case TRUTH_OR_EXPR:
15340    case TRUTH_XOR_EXPR:
15341      return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15342					      TREE_TYPE (t),
15343					      TREE_OPERAND (t, 0),
15344					      TREE_OPERAND (t, 1),
15345					      strict_overflow_p);
15346    case TRUTH_NOT_EXPR:
15347      return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15348					     TREE_TYPE (t),
15349					     TREE_OPERAND (t, 0),
15350					     strict_overflow_p);
15351
15352    case COND_EXPR:
15353    case CONSTRUCTOR:
15354    case OBJ_TYPE_REF:
15355    case ASSERT_EXPR:
15356    case ADDR_EXPR:
15357    case WITH_SIZE_EXPR:
15358    case SSA_NAME:
15359      return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15360
15361    default:
15362      return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15363    }
15364}
15365
15366/* Return true if `t' is known to be non-negative.  Handle warnings
15367   about undefined signed overflow.  */
15368
15369bool
15370tree_expr_nonnegative_p (tree t)
15371{
15372  bool ret, strict_overflow_p;
15373
15374  strict_overflow_p = false;
15375  ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15376  if (strict_overflow_p)
15377    fold_overflow_warning (("assuming signed overflow does not occur when "
15378			    "determining that expression is always "
15379			    "non-negative"),
15380			   WARN_STRICT_OVERFLOW_MISC);
15381  return ret;
15382}
15383
15384
15385/* Return true when (CODE OP0) is an address and is known to be nonzero.
15386   For floating point we further ensure that T is not denormal.
15387   Similar logic is present in nonzero_address in rtlanal.h.
15388
15389   If the return value is based on the assumption that signed overflow
15390   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15391   change *STRICT_OVERFLOW_P.  */
15392
15393bool
15394tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15395				 bool *strict_overflow_p)
15396{
15397  switch (code)
15398    {
15399    case ABS_EXPR:
15400      return tree_expr_nonzero_warnv_p (op0,
15401					strict_overflow_p);
15402
15403    case NOP_EXPR:
15404      {
15405	tree inner_type = TREE_TYPE (op0);
15406	tree outer_type = type;
15407
15408	return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15409		&& tree_expr_nonzero_warnv_p (op0,
15410					      strict_overflow_p));
15411      }
15412      break;
15413
15414    case NON_LVALUE_EXPR:
15415      return tree_expr_nonzero_warnv_p (op0,
15416					strict_overflow_p);
15417
15418    default:
15419      break;
15420  }
15421
15422  return false;
15423}
15424
15425/* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15426   For floating point we further ensure that T is not denormal.
15427   Similar logic is present in nonzero_address in rtlanal.h.
15428
15429   If the return value is based on the assumption that signed overflow
15430   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15431   change *STRICT_OVERFLOW_P.  */
15432
15433bool
15434tree_binary_nonzero_warnv_p (enum tree_code code,
15435			     tree type,
15436			     tree op0,
15437			     tree op1, bool *strict_overflow_p)
15438{
15439  bool sub_strict_overflow_p;
15440  switch (code)
15441    {
15442    case POINTER_PLUS_EXPR:
15443    case PLUS_EXPR:
15444      if (TYPE_OVERFLOW_UNDEFINED (type))
15445	{
15446	  /* With the presence of negative values it is hard
15447	     to say something.  */
15448	  sub_strict_overflow_p = false;
15449	  if (!tree_expr_nonnegative_warnv_p (op0,
15450					      &sub_strict_overflow_p)
15451	      || !tree_expr_nonnegative_warnv_p (op1,
15452						 &sub_strict_overflow_p))
15453	    return false;
15454	  /* One of operands must be positive and the other non-negative.  */
15455	  /* We don't set *STRICT_OVERFLOW_P here: even if this value
15456	     overflows, on a twos-complement machine the sum of two
15457	     nonnegative numbers can never be zero.  */
15458	  return (tree_expr_nonzero_warnv_p (op0,
15459					     strict_overflow_p)
15460		  || tree_expr_nonzero_warnv_p (op1,
15461						strict_overflow_p));
15462	}
15463      break;
15464
15465    case MULT_EXPR:
15466      if (TYPE_OVERFLOW_UNDEFINED (type))
15467	{
15468	  if (tree_expr_nonzero_warnv_p (op0,
15469					 strict_overflow_p)
15470	      && tree_expr_nonzero_warnv_p (op1,
15471					    strict_overflow_p))
15472	    {
15473	      *strict_overflow_p = true;
15474	      return true;
15475	    }
15476	}
15477      break;
15478
15479    case MIN_EXPR:
15480      sub_strict_overflow_p = false;
15481      if (tree_expr_nonzero_warnv_p (op0,
15482				     &sub_strict_overflow_p)
15483	  && tree_expr_nonzero_warnv_p (op1,
15484					&sub_strict_overflow_p))
15485	{
15486	  if (sub_strict_overflow_p)
15487	    *strict_overflow_p = true;
15488	}
15489      break;
15490
15491    case MAX_EXPR:
15492      sub_strict_overflow_p = false;
15493      if (tree_expr_nonzero_warnv_p (op0,
15494				     &sub_strict_overflow_p))
15495	{
15496	  if (sub_strict_overflow_p)
15497	    *strict_overflow_p = true;
15498
15499	  /* When both operands are nonzero, then MAX must be too.  */
15500	  if (tree_expr_nonzero_warnv_p (op1,
15501					 strict_overflow_p))
15502	    return true;
15503
15504	  /* MAX where operand 0 is positive is positive.  */
15505	  return tree_expr_nonnegative_warnv_p (op0,
15506					       strict_overflow_p);
15507	}
15508      /* MAX where operand 1 is positive is positive.  */
15509      else if (tree_expr_nonzero_warnv_p (op1,
15510					  &sub_strict_overflow_p)
15511	       && tree_expr_nonnegative_warnv_p (op1,
15512						 &sub_strict_overflow_p))
15513	{
15514	  if (sub_strict_overflow_p)
15515	    *strict_overflow_p = true;
15516	  return true;
15517	}
15518      break;
15519
15520    case BIT_IOR_EXPR:
15521      return (tree_expr_nonzero_warnv_p (op1,
15522					 strict_overflow_p)
15523	      || tree_expr_nonzero_warnv_p (op0,
15524					    strict_overflow_p));
15525
15526    default:
15527      break;
15528  }
15529
15530  return false;
15531}
15532
15533/* Return true when T is an address and is known to be nonzero.
15534   For floating point we further ensure that T is not denormal.
15535   Similar logic is present in nonzero_address in rtlanal.h.
15536
15537   If the return value is based on the assumption that signed overflow
15538   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15539   change *STRICT_OVERFLOW_P.  */
15540
15541bool
15542tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15543{
15544  bool sub_strict_overflow_p;
15545  switch (TREE_CODE (t))
15546    {
15547    case INTEGER_CST:
15548      return !integer_zerop (t);
15549
15550    case ADDR_EXPR:
15551      {
15552	tree base = get_base_address (TREE_OPERAND (t, 0));
15553
15554	if (!base)
15555	  return false;
15556
15557	/* Weak declarations may link to NULL.  Other things may also be NULL
15558	   so protect with -fdelete-null-pointer-checks; but not variables
15559	   allocated on the stack.  */
15560	if (DECL_P (base)
15561	    && (flag_delete_null_pointer_checks
15562		|| (TREE_CODE (base) == VAR_DECL && !TREE_STATIC (base))))
15563	  return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
15564
15565	/* Constants are never weak.  */
15566	if (CONSTANT_CLASS_P (base))
15567	  return true;
15568
15569	return false;
15570      }
15571
15572    case COND_EXPR:
15573      sub_strict_overflow_p = false;
15574      if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15575				     &sub_strict_overflow_p)
15576	  && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15577					&sub_strict_overflow_p))
15578	{
15579	  if (sub_strict_overflow_p)
15580	    *strict_overflow_p = true;
15581	  return true;
15582	}
15583      break;
15584
15585    default:
15586      break;
15587    }
15588  return false;
15589}
15590
15591/* Return true when T is an address and is known to be nonzero.
15592   For floating point we further ensure that T is not denormal.
15593   Similar logic is present in nonzero_address in rtlanal.h.
15594
15595   If the return value is based on the assumption that signed overflow
15596   is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15597   change *STRICT_OVERFLOW_P.  */
15598
15599bool
15600tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15601{
15602  tree type = TREE_TYPE (t);
15603  enum tree_code code;
15604
15605  /* Doing something useful for floating point would need more work.  */
15606  if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
15607    return false;
15608
15609  code = TREE_CODE (t);
15610  switch (TREE_CODE_CLASS (code))
15611    {
15612    case tcc_unary:
15613      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15614					      strict_overflow_p);
15615    case tcc_binary:
15616    case tcc_comparison:
15617      return tree_binary_nonzero_warnv_p (code, type,
15618					       TREE_OPERAND (t, 0),
15619					       TREE_OPERAND (t, 1),
15620					       strict_overflow_p);
15621    case tcc_constant:
15622    case tcc_declaration:
15623    case tcc_reference:
15624      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15625
15626    default:
15627      break;
15628    }
15629
15630  switch (code)
15631    {
15632    case TRUTH_NOT_EXPR:
15633      return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
15634					      strict_overflow_p);
15635
15636    case TRUTH_AND_EXPR:
15637    case TRUTH_OR_EXPR:
15638    case TRUTH_XOR_EXPR:
15639      return tree_binary_nonzero_warnv_p (code, type,
15640					       TREE_OPERAND (t, 0),
15641					       TREE_OPERAND (t, 1),
15642					       strict_overflow_p);
15643
15644    case COND_EXPR:
15645    case CONSTRUCTOR:
15646    case OBJ_TYPE_REF:
15647    case ASSERT_EXPR:
15648    case ADDR_EXPR:
15649    case WITH_SIZE_EXPR:
15650    case SSA_NAME:
15651      return tree_single_nonzero_warnv_p (t, strict_overflow_p);
15652
15653    case COMPOUND_EXPR:
15654    case MODIFY_EXPR:
15655    case BIND_EXPR:
15656      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15657					strict_overflow_p);
15658
15659    case SAVE_EXPR:
15660      return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
15661					strict_overflow_p);
15662
15663    case CALL_EXPR:
15664      return alloca_call_p (t);
15665
15666    default:
15667      break;
15668    }
15669  return false;
15670}
15671
15672/* Return true when T is an address and is known to be nonzero.
15673   Handle warnings about undefined signed overflow.  */
15674
15675bool
15676tree_expr_nonzero_p (tree t)
15677{
15678  bool ret, strict_overflow_p;
15679
15680  strict_overflow_p = false;
15681  ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
15682  if (strict_overflow_p)
15683    fold_overflow_warning (("assuming signed overflow does not occur when "
15684			    "determining that expression is always "
15685			    "non-zero"),
15686			   WARN_STRICT_OVERFLOW_MISC);
15687  return ret;
15688}
15689
15690/* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15691   attempt to fold the expression to a constant without modifying TYPE,
15692   OP0 or OP1.
15693
15694   If the expression could be simplified to a constant, then return
15695   the constant.  If the expression would not be simplified to a
15696   constant, then return NULL_TREE.  */
15697
15698tree
15699fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15700{
15701  tree tem = fold_binary (code, type, op0, op1);
15702  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15703}
15704
15705/* Given the components of a unary expression CODE, TYPE and OP0,
15706   attempt to fold the expression to a constant without modifying
15707   TYPE or OP0.
15708
15709   If the expression could be simplified to a constant, then return
15710   the constant.  If the expression would not be simplified to a
15711   constant, then return NULL_TREE.  */
15712
15713tree
15714fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15715{
15716  tree tem = fold_unary (code, type, op0);
15717  return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15718}
15719
15720/* If EXP represents referencing an element in a constant string
15721   (either via pointer arithmetic or array indexing), return the
15722   tree representing the value accessed, otherwise return NULL.  */
15723
15724tree
15725fold_read_from_constant_string (tree exp)
15726{
15727  if ((TREE_CODE (exp) == INDIRECT_REF
15728       || TREE_CODE (exp) == ARRAY_REF)
15729      && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15730    {
15731      tree exp1 = TREE_OPERAND (exp, 0);
15732      tree index;
15733      tree string;
15734      location_t loc = EXPR_LOCATION (exp);
15735
15736      if (TREE_CODE (exp) == INDIRECT_REF)
15737	string = string_constant (exp1, &index);
15738      else
15739	{
15740	  tree low_bound = array_ref_low_bound (exp);
15741	  index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15742
15743	  /* Optimize the special-case of a zero lower bound.
15744
15745	     We convert the low_bound to sizetype to avoid some problems
15746	     with constant folding.  (E.g. suppose the lower bound is 1,
15747	     and its mode is QI.  Without the conversion,l (ARRAY
15748	     +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15749	     +INDEX), which becomes (ARRAY+255+INDEX).  Oops!)  */
15750	  if (! integer_zerop (low_bound))
15751	    index = size_diffop_loc (loc, index,
15752				 fold_convert_loc (loc, sizetype, low_bound));
15753
15754	  string = exp1;
15755	}
15756
15757      if (string
15758	  && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15759	  && TREE_CODE (string) == STRING_CST
15760	  && TREE_CODE (index) == INTEGER_CST
15761	  && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15762	  && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15763	      == MODE_INT)
15764	  && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15765	return build_int_cst_type (TREE_TYPE (exp),
15766				   (TREE_STRING_POINTER (string)
15767				    [TREE_INT_CST_LOW (index)]));
15768    }
15769  return NULL;
15770}
15771
15772/* Return the tree for neg (ARG0) when ARG0 is known to be either
15773   an integer constant, real, or fixed-point constant.
15774
15775   TYPE is the type of the result.  */
15776
15777static tree
15778fold_negate_const (tree arg0, tree type)
15779{
15780  tree t = NULL_TREE;
15781
15782  switch (TREE_CODE (arg0))
15783    {
15784    case INTEGER_CST:
15785      {
15786	unsigned HOST_WIDE_INT low;
15787	HOST_WIDE_INT high;
15788	int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15789				   TREE_INT_CST_HIGH (arg0),
15790				   &low, &high);
15791	t = force_fit_type_double (type, low, high, 1,
15792				   (overflow | TREE_OVERFLOW (arg0))
15793				   && !TYPE_UNSIGNED (type));
15794	break;
15795      }
15796
15797    case REAL_CST:
15798      t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15799      break;
15800
15801    case FIXED_CST:
15802      {
15803        FIXED_VALUE_TYPE f;
15804        bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15805					    &(TREE_FIXED_CST (arg0)), NULL,
15806					    TYPE_SATURATING (type));
15807	t = build_fixed (type, f);
15808	/* Propagate overflow flags.  */
15809	if (overflow_p | TREE_OVERFLOW (arg0))
15810	  TREE_OVERFLOW (t) = 1;
15811	break;
15812      }
15813
15814    default:
15815      gcc_unreachable ();
15816    }
15817
15818  return t;
15819}
15820
15821/* Return the tree for abs (ARG0) when ARG0 is known to be either
15822   an integer constant or real constant.
15823
15824   TYPE is the type of the result.  */
15825
15826tree
15827fold_abs_const (tree arg0, tree type)
15828{
15829  tree t = NULL_TREE;
15830
15831  switch (TREE_CODE (arg0))
15832    {
15833    case INTEGER_CST:
15834      /* If the value is unsigned, then the absolute value is
15835	 the same as the ordinary value.  */
15836      if (TYPE_UNSIGNED (type))
15837	t = arg0;
15838      /* Similarly, if the value is non-negative.  */
15839      else if (INT_CST_LT (integer_minus_one_node, arg0))
15840	t = arg0;
15841      /* If the value is negative, then the absolute value is
15842	 its negation.  */
15843      else
15844	{
15845	  unsigned HOST_WIDE_INT low;
15846	  HOST_WIDE_INT high;
15847	  int overflow = neg_double (TREE_INT_CST_LOW (arg0),
15848				     TREE_INT_CST_HIGH (arg0),
15849				     &low, &high);
15850	  t = force_fit_type_double (type, low, high, -1,
15851				     overflow | TREE_OVERFLOW (arg0));
15852	}
15853      break;
15854
15855    case REAL_CST:
15856      if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15857	t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
15858      else
15859	t =  arg0;
15860      break;
15861
15862    default:
15863      gcc_unreachable ();
15864    }
15865
15866  return t;
15867}
15868
15869/* Return the tree for not (ARG0) when ARG0 is known to be an integer
15870   constant.  TYPE is the type of the result.  */
15871
15872static tree
15873fold_not_const (tree arg0, tree type)
15874{
15875  tree t = NULL_TREE;
15876
15877  gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15878
15879  t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
15880			     ~TREE_INT_CST_HIGH (arg0), 0,
15881			     TREE_OVERFLOW (arg0));
15882
15883  return t;
15884}
15885
15886/* Given CODE, a relational operator, the target type, TYPE and two
15887   constant operands OP0 and OP1, return the result of the
15888   relational operation.  If the result is not a compile time
15889   constant, then return NULL_TREE.  */
15890
15891static tree
15892fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15893{
15894  int result, invert;
15895
15896  /* From here on, the only cases we handle are when the result is
15897     known to be a constant.  */
15898
15899  if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15900    {
15901      const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15902      const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15903
15904      /* Handle the cases where either operand is a NaN.  */
15905      if (real_isnan (c0) || real_isnan (c1))
15906	{
15907	  switch (code)
15908	    {
15909	    case EQ_EXPR:
15910	    case ORDERED_EXPR:
15911	      result = 0;
15912	      break;
15913
15914	    case NE_EXPR:
15915	    case UNORDERED_EXPR:
15916	    case UNLT_EXPR:
15917	    case UNLE_EXPR:
15918	    case UNGT_EXPR:
15919	    case UNGE_EXPR:
15920	    case UNEQ_EXPR:
15921              result = 1;
15922	      break;
15923
15924	    case LT_EXPR:
15925	    case LE_EXPR:
15926	    case GT_EXPR:
15927	    case GE_EXPR:
15928	    case LTGT_EXPR:
15929	      if (flag_trapping_math)
15930		return NULL_TREE;
15931	      result = 0;
15932	      break;
15933
15934	    default:
15935	      gcc_unreachable ();
15936	    }
15937
15938	  return constant_boolean_node (result, type);
15939	}
15940
15941      return constant_boolean_node (real_compare (code, c0, c1), type);
15942    }
15943
15944  if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15945    {
15946      const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15947      const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15948      return constant_boolean_node (fixed_compare (code, c0, c1), type);
15949    }
15950
15951  /* Handle equality/inequality of complex constants.  */
15952  if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15953    {
15954      tree rcond = fold_relational_const (code, type,
15955					  TREE_REALPART (op0),
15956					  TREE_REALPART (op1));
15957      tree icond = fold_relational_const (code, type,
15958					  TREE_IMAGPART (op0),
15959					  TREE_IMAGPART (op1));
15960      if (code == EQ_EXPR)
15961	return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15962      else if (code == NE_EXPR)
15963	return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15964      else
15965	return NULL_TREE;
15966    }
15967
15968  /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15969
15970     To compute GT, swap the arguments and do LT.
15971     To compute GE, do LT and invert the result.
15972     To compute LE, swap the arguments, do LT and invert the result.
15973     To compute NE, do EQ and invert the result.
15974
15975     Therefore, the code below must handle only EQ and LT.  */
15976
15977  if (code == LE_EXPR || code == GT_EXPR)
15978    {
15979      tree tem = op0;
15980      op0 = op1;
15981      op1 = tem;
15982      code = swap_tree_comparison (code);
15983    }
15984
15985  /* Note that it is safe to invert for real values here because we
15986     have already handled the one case that it matters.  */
15987
15988  invert = 0;
15989  if (code == NE_EXPR || code == GE_EXPR)
15990    {
15991      invert = 1;
15992      code = invert_tree_comparison (code, false);
15993    }
15994
15995  /* Compute a result for LT or EQ if args permit;
15996     Otherwise return T.  */
15997  if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15998    {
15999      if (code == EQ_EXPR)
16000	result = tree_int_cst_equal (op0, op1);
16001      else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16002	result = INT_CST_LT_UNSIGNED (op0, op1);
16003      else
16004	result = INT_CST_LT (op0, op1);
16005    }
16006  else
16007    return NULL_TREE;
16008
16009  if (invert)
16010    result ^= 1;
16011  return constant_boolean_node (result, type);
16012}
16013
16014/* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16015   indicated TYPE.  If no CLEANUP_POINT_EXPR is necessary, return EXPR
16016   itself.  */
16017
16018tree
16019fold_build_cleanup_point_expr (tree type, tree expr)
16020{
16021  /* If the expression does not have side effects then we don't have to wrap
16022     it with a cleanup point expression.  */
16023  if (!TREE_SIDE_EFFECTS (expr))
16024    return expr;
16025
16026  /* If the expression is a return, check to see if the expression inside the
16027     return has no side effects or the right hand side of the modify expression
16028     inside the return. If either don't have side effects set we don't need to
16029     wrap the expression in a cleanup point expression.  Note we don't check the
16030     left hand side of the modify because it should always be a return decl.  */
16031  if (TREE_CODE (expr) == RETURN_EXPR)
16032    {
16033      tree op = TREE_OPERAND (expr, 0);
16034      if (!op || !TREE_SIDE_EFFECTS (op))
16035        return expr;
16036      op = TREE_OPERAND (op, 1);
16037      if (!TREE_SIDE_EFFECTS (op))
16038        return expr;
16039    }
16040
16041  return build1 (CLEANUP_POINT_EXPR, type, expr);
16042}
16043
16044/* Given a pointer value OP0 and a type TYPE, return a simplified version
16045   of an indirection through OP0, or NULL_TREE if no simplification is
16046   possible.  */
16047
16048tree
16049fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16050{
16051  tree sub = op0;
16052  tree subtype;
16053
16054  STRIP_NOPS (sub);
16055  subtype = TREE_TYPE (sub);
16056  if (!POINTER_TYPE_P (subtype))
16057    return NULL_TREE;
16058
16059  if (TREE_CODE (sub) == ADDR_EXPR)
16060    {
16061      tree op = TREE_OPERAND (sub, 0);
16062      tree optype = TREE_TYPE (op);
16063      /* *&CONST_DECL -> to the value of the const decl.  */
16064      if (TREE_CODE (op) == CONST_DECL)
16065	return DECL_INITIAL (op);
16066      /* *&p => p;  make sure to handle *&"str"[cst] here.  */
16067      if (type == optype)
16068	{
16069	  tree fop = fold_read_from_constant_string (op);
16070	  if (fop)
16071	    return fop;
16072	  else
16073	    return op;
16074	}
16075      /* *(foo *)&fooarray => fooarray[0] */
16076      else if (TREE_CODE (optype) == ARRAY_TYPE
16077	       && type == TREE_TYPE (optype)
16078	       && (!in_gimple_form
16079		   || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16080	{
16081	  tree type_domain = TYPE_DOMAIN (optype);
16082	  tree min_val = size_zero_node;
16083	  if (type_domain && TYPE_MIN_VALUE (type_domain))
16084	    min_val = TYPE_MIN_VALUE (type_domain);
16085	  if (in_gimple_form
16086	      && TREE_CODE (min_val) != INTEGER_CST)
16087	    return NULL_TREE;
16088	  op0 = build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
16089	  SET_EXPR_LOCATION (op0, loc);
16090	  return op0;
16091	}
16092      /* *(foo *)&complexfoo => __real__ complexfoo */
16093      else if (TREE_CODE (optype) == COMPLEX_TYPE
16094	       && type == TREE_TYPE (optype))
16095	return fold_build1_loc (loc, REALPART_EXPR, type, op);
16096      /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16097      else if (TREE_CODE (optype) == VECTOR_TYPE
16098	       && type == TREE_TYPE (optype))
16099	{
16100	  tree part_width = TYPE_SIZE (type);
16101	  tree index = bitsize_int (0);
16102	  return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16103	}
16104    }
16105
16106  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16107  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16108      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16109    {
16110      tree op00 = TREE_OPERAND (sub, 0);
16111      tree op01 = TREE_OPERAND (sub, 1);
16112      tree op00type;
16113
16114      STRIP_NOPS (op00);
16115      op00type = TREE_TYPE (op00);
16116      if (TREE_CODE (op00) == ADDR_EXPR
16117          && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
16118          && type == TREE_TYPE (TREE_TYPE (op00type)))
16119	{
16120	  HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16121	  tree part_width = TYPE_SIZE (type);
16122	  unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16123	  unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16124	  tree index = bitsize_int (indexi);
16125
16126	  if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
16127	    return fold_build3_loc (loc,
16128				BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
16129				part_width, index);
16130
16131	}
16132    }
16133
16134
16135  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16136  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16137      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16138    {
16139      tree op00 = TREE_OPERAND (sub, 0);
16140      tree op01 = TREE_OPERAND (sub, 1);
16141      tree op00type;
16142
16143      STRIP_NOPS (op00);
16144      op00type = TREE_TYPE (op00);
16145      if (TREE_CODE (op00) == ADDR_EXPR
16146 	  && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
16147	  && type == TREE_TYPE (TREE_TYPE (op00type)))
16148	{
16149	  tree size = TYPE_SIZE_UNIT (type);
16150	  if (tree_int_cst_equal (size, op01))
16151	    return fold_build1_loc (loc, IMAGPART_EXPR, type,
16152				TREE_OPERAND (op00, 0));
16153	}
16154    }
16155
16156  /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16157  if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16158      && type == TREE_TYPE (TREE_TYPE (subtype))
16159      && (!in_gimple_form
16160	  || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16161    {
16162      tree type_domain;
16163      tree min_val = size_zero_node;
16164      sub = build_fold_indirect_ref_loc (loc, sub);
16165      type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16166      if (type_domain && TYPE_MIN_VALUE (type_domain))
16167	min_val = TYPE_MIN_VALUE (type_domain);
16168      if (in_gimple_form
16169	  && TREE_CODE (min_val) != INTEGER_CST)
16170	return NULL_TREE;
16171      op0 = build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
16172      SET_EXPR_LOCATION (op0, loc);
16173      return op0;
16174    }
16175
16176  return NULL_TREE;
16177}
16178
16179/* Builds an expression for an indirection through T, simplifying some
16180   cases.  */
16181
16182tree
16183build_fold_indirect_ref_loc (location_t loc, tree t)
16184{
16185  tree type = TREE_TYPE (TREE_TYPE (t));
16186  tree sub = fold_indirect_ref_1 (loc, type, t);
16187
16188  if (sub)
16189    return sub;
16190
16191  t = build1 (INDIRECT_REF, type, t);
16192  SET_EXPR_LOCATION (t, loc);
16193  return t;
16194}
16195
16196/* Given an INDIRECT_REF T, return either T or a simplified version.  */
16197
16198tree
16199fold_indirect_ref_loc (location_t loc, tree t)
16200{
16201  tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16202
16203  if (sub)
16204    return sub;
16205  else
16206    return t;
16207}
16208
16209/* Strip non-trapping, non-side-effecting tree nodes from an expression
16210   whose result is ignored.  The type of the returned tree need not be
16211   the same as the original expression.  */
16212
16213tree
16214fold_ignored_result (tree t)
16215{
16216  if (!TREE_SIDE_EFFECTS (t))
16217    return integer_zero_node;
16218
16219  for (;;)
16220    switch (TREE_CODE_CLASS (TREE_CODE (t)))
16221      {
16222      case tcc_unary:
16223	t = TREE_OPERAND (t, 0);
16224	break;
16225
16226      case tcc_binary:
16227      case tcc_comparison:
16228	if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16229	  t = TREE_OPERAND (t, 0);
16230	else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16231	  t = TREE_OPERAND (t, 1);
16232	else
16233	  return t;
16234	break;
16235
16236      case tcc_expression:
16237	switch (TREE_CODE (t))
16238	  {
16239	  case COMPOUND_EXPR:
16240	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16241	      return t;
16242	    t = TREE_OPERAND (t, 0);
16243	    break;
16244
16245	  case COND_EXPR:
16246	    if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16247		|| TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16248	      return t;
16249	    t = TREE_OPERAND (t, 0);
16250	    break;
16251
16252	  default:
16253	    return t;
16254	  }
16255	break;
16256
16257      default:
16258	return t;
16259      }
16260}
16261
16262/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16263   This can only be applied to objects of a sizetype.  */
16264
16265tree
16266round_up_loc (location_t loc, tree value, int divisor)
16267{
16268  tree div = NULL_TREE;
16269
16270  gcc_assert (divisor > 0);
16271  if (divisor == 1)
16272    return value;
16273
16274  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16275     have to do anything.  Only do this when we are not given a const,
16276     because in that case, this check is more expensive than just
16277     doing it.  */
16278  if (TREE_CODE (value) != INTEGER_CST)
16279    {
16280      div = build_int_cst (TREE_TYPE (value), divisor);
16281
16282      if (multiple_of_p (TREE_TYPE (value), value, div))
16283	return value;
16284    }
16285
16286  /* If divisor is a power of two, simplify this to bit manipulation.  */
16287  if (divisor == (divisor & -divisor))
16288    {
16289      if (TREE_CODE (value) == INTEGER_CST)
16290	{
16291	  unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (value);
16292	  unsigned HOST_WIDE_INT high;
16293	  bool overflow_p;
16294
16295	  if ((low & (divisor - 1)) == 0)
16296	    return value;
16297
16298	  overflow_p = TREE_OVERFLOW (value);
16299	  high = TREE_INT_CST_HIGH (value);
16300	  low &= ~(divisor - 1);
16301	  low += divisor;
16302	  if (low == 0)
16303	    {
16304	      high++;
16305	      if (high == 0)
16306		overflow_p = true;
16307	    }
16308
16309	  return force_fit_type_double (TREE_TYPE (value), low, high,
16310					-1, overflow_p);
16311	}
16312      else
16313	{
16314	  tree t;
16315
16316	  t = build_int_cst (TREE_TYPE (value), divisor - 1);
16317	  value = size_binop_loc (loc, PLUS_EXPR, value, t);
16318	  t = build_int_cst (TREE_TYPE (value), -divisor);
16319	  value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16320	}
16321    }
16322  else
16323    {
16324      if (!div)
16325	div = build_int_cst (TREE_TYPE (value), divisor);
16326      value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16327      value = size_binop_loc (loc, MULT_EXPR, value, div);
16328    }
16329
16330  return value;
16331}
16332
16333/* Likewise, but round down.  */
16334
16335tree
16336round_down_loc (location_t loc, tree value, int divisor)
16337{
16338  tree div = NULL_TREE;
16339
16340  gcc_assert (divisor > 0);
16341  if (divisor == 1)
16342    return value;
16343
16344  /* See if VALUE is already a multiple of DIVISOR.  If so, we don't
16345     have to do anything.  Only do this when we are not given a const,
16346     because in that case, this check is more expensive than just
16347     doing it.  */
16348  if (TREE_CODE (value) != INTEGER_CST)
16349    {
16350      div = build_int_cst (TREE_TYPE (value), divisor);
16351
16352      if (multiple_of_p (TREE_TYPE (value), value, div))
16353	return value;
16354    }
16355
16356  /* If divisor is a power of two, simplify this to bit manipulation.  */
16357  if (divisor == (divisor & -divisor))
16358    {
16359      tree t;
16360
16361      t = build_int_cst (TREE_TYPE (value), -divisor);
16362      value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16363    }
16364  else
16365    {
16366      if (!div)
16367	div = build_int_cst (TREE_TYPE (value), divisor);
16368      value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16369      value = size_binop_loc (loc, MULT_EXPR, value, div);
16370    }
16371
16372  return value;
16373}
16374
16375/* Returns the pointer to the base of the object addressed by EXP and
16376   extracts the information about the offset of the access, storing it
16377   to PBITPOS and POFFSET.  */
16378
16379static tree
16380split_address_to_core_and_offset (tree exp,
16381				  HOST_WIDE_INT *pbitpos, tree *poffset)
16382{
16383  tree core;
16384  enum machine_mode mode;
16385  int unsignedp, volatilep;
16386  HOST_WIDE_INT bitsize;
16387  location_t loc = EXPR_LOCATION (exp);
16388
16389  if (TREE_CODE (exp) == ADDR_EXPR)
16390    {
16391      core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16392				  poffset, &mode, &unsignedp, &volatilep,
16393				  false);
16394      core = build_fold_addr_expr_loc (loc, core);
16395    }
16396  else
16397    {
16398      core = exp;
16399      *pbitpos = 0;
16400      *poffset = NULL_TREE;
16401    }
16402
16403  return core;
16404}
16405
16406/* Returns true if addresses of E1 and E2 differ by a constant, false
16407   otherwise.  If they do, E1 - E2 is stored in *DIFF.  */
16408
16409bool
16410ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16411{
16412  tree core1, core2;
16413  HOST_WIDE_INT bitpos1, bitpos2;
16414  tree toffset1, toffset2, tdiff, type;
16415
16416  core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16417  core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16418
16419  if (bitpos1 % BITS_PER_UNIT != 0
16420      || bitpos2 % BITS_PER_UNIT != 0
16421      || !operand_equal_p (core1, core2, 0))
16422    return false;
16423
16424  if (toffset1 && toffset2)
16425    {
16426      type = TREE_TYPE (toffset1);
16427      if (type != TREE_TYPE (toffset2))
16428	toffset2 = fold_convert (type, toffset2);
16429
16430      tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16431      if (!cst_and_fits_in_hwi (tdiff))
16432	return false;
16433
16434      *diff = int_cst_value (tdiff);
16435    }
16436  else if (toffset1 || toffset2)
16437    {
16438      /* If only one of the offsets is non-constant, the difference cannot
16439	 be a constant.  */
16440      return false;
16441    }
16442  else
16443    *diff = 0;
16444
16445  *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16446  return true;
16447}
16448
16449/* Simplify the floating point expression EXP when the sign of the
16450   result is not significant.  Return NULL_TREE if no simplification
16451   is possible.  */
16452
16453tree
16454fold_strip_sign_ops (tree exp)
16455{
16456  tree arg0, arg1;
16457  location_t loc = EXPR_LOCATION (exp);
16458
16459  switch (TREE_CODE (exp))
16460    {
16461    case ABS_EXPR:
16462    case NEGATE_EXPR:
16463      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16464      return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16465
16466    case MULT_EXPR:
16467    case RDIV_EXPR:
16468      if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16469	return NULL_TREE;
16470      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16471      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16472      if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16473	return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16474			    arg0 ? arg0 : TREE_OPERAND (exp, 0),
16475			    arg1 ? arg1 : TREE_OPERAND (exp, 1));
16476      break;
16477
16478    case COMPOUND_EXPR:
16479      arg0 = TREE_OPERAND (exp, 0);
16480      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16481      if (arg1)
16482	return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16483      break;
16484
16485    case COND_EXPR:
16486      arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16487      arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16488      if (arg0 || arg1)
16489	return fold_build3_loc (loc,
16490			    COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16491			    arg0 ? arg0 : TREE_OPERAND (exp, 1),
16492			    arg1 ? arg1 : TREE_OPERAND (exp, 2));
16493      break;
16494
16495    case CALL_EXPR:
16496      {
16497	const enum built_in_function fcode = builtin_mathfn_code (exp);
16498	switch (fcode)
16499	{
16500	CASE_FLT_FN (BUILT_IN_COPYSIGN):
16501	  /* Strip copysign function call, return the 1st argument. */
16502	  arg0 = CALL_EXPR_ARG (exp, 0);
16503	  arg1 = CALL_EXPR_ARG (exp, 1);
16504	  return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16505
16506	default:
16507	  /* Strip sign ops from the argument of "odd" math functions.  */
16508	  if (negate_mathfn_p (fcode))
16509            {
16510	      arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16511	      if (arg0)
16512		return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16513	    }
16514	  break;
16515	}
16516      }
16517      break;
16518
16519    default:
16520      break;
16521    }
16522  return NULL_TREE;
16523}
16524