1/****************************************************************************
2 *                                                                          *
3 *                         GNAT COMPILER COMPONENTS                         *
4 *                                                                          *
5 *                               U T I L S 2                                *
6 *                                                                          *
7 *                          C Implementation File                           *
8 *                                                                          *
9 *          Copyright (C) 1992-2015, Free Software Foundation, Inc.         *
10 *                                                                          *
11 * GNAT is free software;  you can  redistribute it  and/or modify it under *
12 * terms of the  GNU General Public License as published  by the Free Soft- *
13 * ware  Foundation;  either version 3,  or (at your option) any later ver- *
14 * sion.  GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY;  without even the  implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License *
17 * for  more details.  You should have received a copy of the GNU General   *
18 * Public License along with GCC; see the file COPYING3.  If not see        *
19 * <http://www.gnu.org/licenses/>.                                          *
20 *                                                                          *
21 * GNAT was originally developed  by the GNAT team at  New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc.      *
23 *                                                                          *
24 ****************************************************************************/
25
26#include "config.h"
27#include "system.h"
28#include "coretypes.h"
29#include "tm.h"
30#include "hash-set.h"
31#include "machmode.h"
32#include "vec.h"
33#include "double-int.h"
34#include "input.h"
35#include "alias.h"
36#include "symtab.h"
37#include "wide-int.h"
38#include "inchash.h"
39#include "tree.h"
40#include "fold-const.h"
41#include "stor-layout.h"
42#include "stringpool.h"
43#include "varasm.h"
44#include "flags.h"
45#include "toplev.h"
46#include "ggc.h"
47#include "tree-inline.h"
48
49#include "ada.h"
50#include "types.h"
51#include "atree.h"
52#include "elists.h"
53#include "namet.h"
54#include "nlists.h"
55#include "snames.h"
56#include "stringt.h"
57#include "uintp.h"
58#include "fe.h"
59#include "sinfo.h"
60#include "einfo.h"
61#include "ada-tree.h"
62#include "gigi.h"
63
64/* Return the base type of TYPE.  */
65
66tree
67get_base_type (tree type)
68{
69  if (TREE_CODE (type) == RECORD_TYPE
70      && TYPE_JUSTIFIED_MODULAR_P (type))
71    type = TREE_TYPE (TYPE_FIELDS (type));
72
73  while (TREE_TYPE (type)
74	 && (TREE_CODE (type) == INTEGER_TYPE
75	     || TREE_CODE (type) == REAL_TYPE))
76    type = TREE_TYPE (type);
77
78  return type;
79}
80
81/* EXP is a GCC tree representing an address.  See if we can find how
82   strictly the object at that address is aligned.   Return that alignment
83   in bits.  If we don't know anything about the alignment, return 0.  */
84
85unsigned int
86known_alignment (tree exp)
87{
88  unsigned int this_alignment;
89  unsigned int lhs, rhs;
90
91  switch (TREE_CODE (exp))
92    {
93    CASE_CONVERT:
94    case VIEW_CONVERT_EXPR:
95    case NON_LVALUE_EXPR:
96      /* Conversions between pointers and integers don't change the alignment
97	 of the underlying object.  */
98      this_alignment = known_alignment (TREE_OPERAND (exp, 0));
99      break;
100
101    case COMPOUND_EXPR:
102      /* The value of a COMPOUND_EXPR is that of it's second operand.  */
103      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
104      break;
105
106    case PLUS_EXPR:
107    case MINUS_EXPR:
108      /* If two address are added, the alignment of the result is the
109	 minimum of the two alignments.  */
110      lhs = known_alignment (TREE_OPERAND (exp, 0));
111      rhs = known_alignment (TREE_OPERAND (exp, 1));
112      this_alignment = MIN (lhs, rhs);
113      break;
114
115    case POINTER_PLUS_EXPR:
116      lhs = known_alignment (TREE_OPERAND (exp, 0));
117      rhs = known_alignment (TREE_OPERAND (exp, 1));
118      /* If we don't know the alignment of the offset, we assume that
119	 of the base.  */
120      if (rhs == 0)
121	this_alignment = lhs;
122      else
123	this_alignment = MIN (lhs, rhs);
124      break;
125
126    case COND_EXPR:
127      /* If there is a choice between two values, use the smallest one.  */
128      lhs = known_alignment (TREE_OPERAND (exp, 1));
129      rhs = known_alignment (TREE_OPERAND (exp, 2));
130      this_alignment = MIN (lhs, rhs);
131      break;
132
133    case INTEGER_CST:
134      {
135	unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
136	/* The first part of this represents the lowest bit in the constant,
137	   but it is originally in bytes, not bits.  */
138	this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
139      }
140      break;
141
142    case MULT_EXPR:
143      /* If we know the alignment of just one side, use it.  Otherwise,
144	 use the product of the alignments.  */
145      lhs = known_alignment (TREE_OPERAND (exp, 0));
146      rhs = known_alignment (TREE_OPERAND (exp, 1));
147
148      if (lhs == 0)
149	this_alignment = rhs;
150      else if (rhs == 0)
151	this_alignment = lhs;
152      else
153	this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
154      break;
155
156    case BIT_AND_EXPR:
157      /* A bit-and expression is as aligned as the maximum alignment of the
158	 operands.  We typically get here for a complex lhs and a constant
159	 negative power of two on the rhs to force an explicit alignment, so
160	 don't bother looking at the lhs.  */
161      this_alignment = known_alignment (TREE_OPERAND (exp, 1));
162      break;
163
164    case ADDR_EXPR:
165      this_alignment = expr_align (TREE_OPERAND (exp, 0));
166      break;
167
168    case CALL_EXPR:
169      {
170	tree t = maybe_inline_call_in_expr (exp);
171	if (t)
172	  return known_alignment (t);
173      }
174
175      /* Fall through... */
176
177    default:
178      /* For other pointer expressions, we assume that the pointed-to object
179	 is at least as aligned as the pointed-to type.  Beware that we can
180	 have a dummy type here (e.g. a Taft Amendment type), for which the
181	 alignment is meaningless and should be ignored.  */
182      if (POINTER_TYPE_P (TREE_TYPE (exp))
183	  && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
184	this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
185      else
186	this_alignment = 0;
187      break;
188    }
189
190  return this_alignment;
191}
192
193/* We have a comparison or assignment operation on two types, T1 and T2, which
194   are either both array types or both record types.  T1 is assumed to be for
195   the left hand side operand, and T2 for the right hand side.  Return the
196   type that both operands should be converted to for the operation, if any.
197   Otherwise return zero.  */
198
199static tree
200find_common_type (tree t1, tree t2)
201{
202  /* ??? As of today, various constructs lead to here with types of different
203     sizes even when both constants (e.g. tagged types, packable vs regular
204     component types, padded vs unpadded types, ...).  While some of these
205     would better be handled upstream (types should be made consistent before
206     calling into build_binary_op), some others are really expected and we
207     have to be careful.  */
208
209  /* We must avoid writing more than what the target can hold if this is for
210     an assignment and the case of tagged types is handled in build_binary_op
211     so we use the lhs type if it is known to be smaller or of constant size
212     and the rhs type is not, whatever the modes.  We also force t1 in case of
213     constant size equality to minimize occurrences of view conversions on the
214     lhs of an assignment, except for the case of record types with a variant
215     part on the lhs but not on the rhs to make the conversion simpler.  */
216  if (TREE_CONSTANT (TYPE_SIZE (t1))
217      && (!TREE_CONSTANT (TYPE_SIZE (t2))
218	  || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
219	  || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
220	      && !(TREE_CODE (t1) == RECORD_TYPE
221		   && TREE_CODE (t2) == RECORD_TYPE
222		   && get_variant_part (t1) != NULL_TREE
223		   && get_variant_part (t2) == NULL_TREE))))
224    return t1;
225
226  /* Otherwise, if the lhs type is non-BLKmode, use it.  Note that we know
227     that we will not have any alignment problems since, if we did, the
228     non-BLKmode type could not have been used.  */
229  if (TYPE_MODE (t1) != BLKmode)
230    return t1;
231
232  /* If the rhs type is of constant size, use it whatever the modes.  At
233     this point it is known to be smaller, or of constant size and the
234     lhs type is not.  */
235  if (TREE_CONSTANT (TYPE_SIZE (t2)))
236    return t2;
237
238  /* Otherwise, if the rhs type is non-BLKmode, use it.  */
239  if (TYPE_MODE (t2) != BLKmode)
240    return t2;
241
242  /* In this case, both types have variable size and BLKmode.  It's
243     probably best to leave the "type mismatch" because changing it
244     could cause a bad self-referential reference.  */
245  return NULL_TREE;
246}
247
248/* Return an expression tree representing an equality comparison of A1 and A2,
249   two objects of type ARRAY_TYPE.  The result should be of type RESULT_TYPE.
250
251   Two arrays are equal in one of two ways: (1) if both have zero length in
252   some dimension (not necessarily the same dimension) or (2) if the lengths
253   in each dimension are equal and the data is equal.  We perform the length
254   tests in as efficient a manner as possible.  */
255
256static tree
257compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
258{
259  tree result = convert (result_type, boolean_true_node);
260  tree a1_is_null = convert (result_type, boolean_false_node);
261  tree a2_is_null = convert (result_type, boolean_false_node);
262  tree t1 = TREE_TYPE (a1);
263  tree t2 = TREE_TYPE (a2);
264  bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
265  bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
266  bool length_zero_p = false;
267
268  /* If the operands have side-effects, they need to be evaluated only once
269     in spite of the multiple references in the comparison.  */
270  if (a1_side_effects_p)
271    a1 = gnat_protect_expr (a1);
272
273  if (a2_side_effects_p)
274    a2 = gnat_protect_expr (a2);
275
276  /* Process each dimension separately and compare the lengths.  If any
277     dimension has a length known to be zero, set LENGTH_ZERO_P to true
278     in order to suppress the comparison of the data at the end.  */
279  while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
280    {
281      tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
282      tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
283      tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
284      tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
285      tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
286				 size_one_node);
287      tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
288				 size_one_node);
289      tree comparison, this_a1_is_null, this_a2_is_null;
290
291      /* If the length of the first array is a constant, swap our operands
292	 unless the length of the second array is the constant zero.  */
293      if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
294	{
295	  tree tem;
296	  bool btem;
297
298	  tem = a1, a1 = a2, a2 = tem;
299	  tem = t1, t1 = t2, t2 = tem;
300	  tem = lb1, lb1 = lb2, lb2 = tem;
301	  tem = ub1, ub1 = ub2, ub2 = tem;
302	  tem = length1, length1 = length2, length2 = tem;
303	  tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
304	  btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
305	  a2_side_effects_p = btem;
306	}
307
308      /* If the length of the second array is the constant zero, we can just
309	 use the original stored bounds for the first array and see whether
310	 last < first holds.  */
311      if (integer_zerop (length2))
312	{
313	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
314
315	  length_zero_p = true;
316
317	  ub1
318	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
319	  lb1
320	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
321
322	  comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
323	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
324	  if (EXPR_P (comparison))
325	    SET_EXPR_LOCATION (comparison, loc);
326
327	  this_a1_is_null = comparison;
328	  this_a2_is_null = convert (result_type, boolean_true_node);
329	}
330
331      /* Otherwise, if the length is some other constant value, we know that
332	 this dimension in the second array cannot be superflat, so we can
333	 just use its length computed from the actual stored bounds.  */
334      else if (TREE_CODE (length2) == INTEGER_CST)
335	{
336	  tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
337
338	  ub1
339	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
340	  lb1
341	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
342	  /* Note that we know that UB2 and LB2 are constant and hence
343	     cannot contain a PLACEHOLDER_EXPR.  */
344	  ub2
345	    = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
346	  lb2
347	    = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
348
349	  comparison
350	    = fold_build2_loc (loc, EQ_EXPR, result_type,
351			       build_binary_op (MINUS_EXPR, b, ub1, lb1),
352			       build_binary_op (MINUS_EXPR, b, ub2, lb2));
353	  comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
354	  if (EXPR_P (comparison))
355	    SET_EXPR_LOCATION (comparison, loc);
356
357	  this_a1_is_null
358	    = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
359
360	  this_a2_is_null = convert (result_type, boolean_false_node);
361	}
362
363      /* Otherwise, compare the computed lengths.  */
364      else
365	{
366	  length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
367	  length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
368
369	  comparison
370	    = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
371
372	  /* If the length expression is of the form (cond ? val : 0), assume
373	     that cond is equivalent to (length != 0).  That's guaranteed by
374	     construction of the array types in gnat_to_gnu_entity.  */
375	  if (TREE_CODE (length1) == COND_EXPR
376	      && integer_zerop (TREE_OPERAND (length1, 2)))
377	    this_a1_is_null
378	      = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
379	  else
380	    this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
381					       length1, size_zero_node);
382
383	  /* Likewise for the second array.  */
384	  if (TREE_CODE (length2) == COND_EXPR
385	      && integer_zerop (TREE_OPERAND (length2, 2)))
386	    this_a2_is_null
387	      = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
388	  else
389	    this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
390					       length2, size_zero_node);
391	}
392
393      /* Append expressions for this dimension to the final expressions.  */
394      result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
395				result, comparison);
396
397      a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
398				    this_a1_is_null, a1_is_null);
399
400      a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
401				    this_a2_is_null, a2_is_null);
402
403      t1 = TREE_TYPE (t1);
404      t2 = TREE_TYPE (t2);
405    }
406
407  /* Unless the length of some dimension is known to be zero, compare the
408     data in the array.  */
409  if (!length_zero_p)
410    {
411      tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
412      tree comparison;
413
414      if (type)
415	{
416	  a1 = convert (type, a1),
417	  a2 = convert (type, a2);
418	}
419
420      comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
421
422      result
423	= build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
424    }
425
426  /* The result is also true if both sizes are zero.  */
427  result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
428			    build_binary_op (TRUTH_ANDIF_EXPR, result_type,
429					     a1_is_null, a2_is_null),
430			    result);
431
432  /* If the operands have side-effects, they need to be evaluated before
433     doing the tests above since the place they otherwise would end up
434     being evaluated at run time could be wrong.  */
435  if (a1_side_effects_p)
436    result = build2 (COMPOUND_EXPR, result_type, a1, result);
437
438  if (a2_side_effects_p)
439    result = build2 (COMPOUND_EXPR, result_type, a2, result);
440
441  return result;
442}
443
444/* Return an expression tree representing an equality comparison of P1 and P2,
445   two objects of fat pointer type.  The result should be of type RESULT_TYPE.
446
447   Two fat pointers are equal in one of two ways: (1) if both have a null
448   pointer to the array or (2) if they contain the same couple of pointers.
449   We perform the comparison in as efficient a manner as possible.  */
450
451static tree
452compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
453{
454  tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
455  tree p1_array_is_null, p2_array_is_null;
456
457  /* If either operand has side-effects, they have to be evaluated only once
458     in spite of the multiple references to the operand in the comparison.  */
459  p1 = gnat_protect_expr (p1);
460  p2 = gnat_protect_expr (p2);
461
462  /* The constant folder doesn't fold fat pointer types so we do it here.  */
463  if (TREE_CODE (p1) == CONSTRUCTOR)
464    p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
465  else
466    p1_array = build_component_ref (p1, NULL_TREE,
467				    TYPE_FIELDS (TREE_TYPE (p1)), true);
468
469  p1_array_is_null
470    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
471		       fold_convert_loc (loc, TREE_TYPE (p1_array),
472					 null_pointer_node));
473
474  if (TREE_CODE (p2) == CONSTRUCTOR)
475    p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
476  else
477    p2_array = build_component_ref (p2, NULL_TREE,
478				    TYPE_FIELDS (TREE_TYPE (p2)), true);
479
480  p2_array_is_null
481    = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
482		       fold_convert_loc (loc, TREE_TYPE (p2_array),
483					 null_pointer_node));
484
485  /* If one of the pointers to the array is null, just compare the other.  */
486  if (integer_zerop (p1_array))
487    return p2_array_is_null;
488  else if (integer_zerop (p2_array))
489    return p1_array_is_null;
490
491  /* Otherwise, do the fully-fledged comparison.  */
492  same_array
493    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
494
495  if (TREE_CODE (p1) == CONSTRUCTOR)
496    p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
497  else
498    p1_bounds
499      = build_component_ref (p1, NULL_TREE,
500			     DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
501
502  if (TREE_CODE (p2) == CONSTRUCTOR)
503    p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
504  else
505    p2_bounds
506      = build_component_ref (p2, NULL_TREE,
507			     DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
508
509  same_bounds
510    = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
511
512  /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS).  */
513  return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
514			  build_binary_op (TRUTH_ORIF_EXPR, result_type,
515					   p1_array_is_null, same_bounds));
516}
517
518/* Compute the result of applying OP_CODE to LHS and RHS, where both are of
519   type TYPE.  We know that TYPE is a modular type with a nonbinary
520   modulus.  */
521
522static tree
523nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
524                             tree rhs)
525{
526  tree modulus = TYPE_MODULUS (type);
527  unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
528  unsigned int precision;
529  bool unsignedp = true;
530  tree op_type = type;
531  tree result;
532
533  /* If this is an addition of a constant, convert it to a subtraction
534     of a constant since we can do that faster.  */
535  if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
536    {
537      rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
538      op_code = MINUS_EXPR;
539    }
540
541  /* For the logical operations, we only need PRECISION bits.  For
542     addition and subtraction, we need one more and for multiplication we
543     need twice as many.  But we never want to make a size smaller than
544     our size. */
545  if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
546    needed_precision += 1;
547  else if (op_code == MULT_EXPR)
548    needed_precision *= 2;
549
550  precision = MAX (needed_precision, TYPE_PRECISION (op_type));
551
552  /* Unsigned will do for everything but subtraction.  */
553  if (op_code == MINUS_EXPR)
554    unsignedp = false;
555
556  /* If our type is the wrong signedness or isn't wide enough, make a new
557     type and convert both our operands to it.  */
558  if (TYPE_PRECISION (op_type) < precision
559      || TYPE_UNSIGNED (op_type) != unsignedp)
560    {
561      /* Copy the node so we ensure it can be modified to make it modular.  */
562      op_type = copy_node (gnat_type_for_size (precision, unsignedp));
563      modulus = convert (op_type, modulus);
564      SET_TYPE_MODULUS (op_type, modulus);
565      TYPE_MODULAR_P (op_type) = 1;
566      lhs = convert (op_type, lhs);
567      rhs = convert (op_type, rhs);
568    }
569
570  /* Do the operation, then we'll fix it up.  */
571  result = fold_build2 (op_code, op_type, lhs, rhs);
572
573  /* For multiplication, we have no choice but to do a full modulus
574     operation.  However, we want to do this in the narrowest
575     possible size.  */
576  if (op_code == MULT_EXPR)
577    {
578      tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
579      modulus = convert (div_type, modulus);
580      SET_TYPE_MODULUS (div_type, modulus);
581      TYPE_MODULAR_P (div_type) = 1;
582      result = convert (op_type,
583			fold_build2 (TRUNC_MOD_EXPR, div_type,
584				     convert (div_type, result), modulus));
585    }
586
587  /* For subtraction, add the modulus back if we are negative.  */
588  else if (op_code == MINUS_EXPR)
589    {
590      result = gnat_protect_expr (result);
591      result = fold_build3 (COND_EXPR, op_type,
592			    fold_build2 (LT_EXPR, boolean_type_node, result,
593					 convert (op_type, integer_zero_node)),
594			    fold_build2 (PLUS_EXPR, op_type, result, modulus),
595			    result);
596    }
597
598  /* For the other operations, subtract the modulus if we are >= it.  */
599  else
600    {
601      result = gnat_protect_expr (result);
602      result = fold_build3 (COND_EXPR, op_type,
603			    fold_build2 (GE_EXPR, boolean_type_node,
604					 result, modulus),
605			    fold_build2 (MINUS_EXPR, op_type,
606					 result, modulus),
607			    result);
608    }
609
610  return convert (type, result);
611}
612
613/* This page contains routines that implement the Ada semantics with regard
614   to atomic objects.  They are fully piggybacked on the middle-end support
615   for atomic loads and stores.
616
617   *** Memory barriers and volatile objects ***
618
619   We implement the weakened form of the C.6(16) clause that was introduced
620   in Ada 2012 (AI05-117).  Earlier forms of this clause wouldn't have been
621   implementable without significant performance hits on modern platforms.
622
623   We also take advantage of the requirements imposed on shared variables by
624   9.10 (conditions for sequential actions) to have non-erroneous execution
625   and consider that C.6(16) and C.6(17) only prescribe an uniform order of
626   volatile updates with regard to sequential actions, i.e. with regard to
627   reads or updates of atomic objects.
628
629   As such, an update of an atomic object by a task requires that all earlier
630   accesses to volatile objects have completed.  Similarly, later accesses to
631   volatile objects cannot be reordered before the update of the atomic object.
632   So, memory barriers both before and after the atomic update are needed.
633
634   For a read of an atomic object, to avoid seeing writes of volatile objects
635   by a task earlier than by the other tasks, a memory barrier is needed before
636   the atomic read.  Finally, to avoid reordering later reads or updates of
637   volatile objects to before the atomic read, a barrier is needed after the
638   atomic read.
639
640   So, memory barriers are needed before and after atomic reads and updates.
641   And, in order to simplify the implementation, we use full memory barriers
642   in all cases, i.e. we enforce sequential consistency for atomic accesses.  */
643
644/* Return the size of TYPE, which must be a positive power of 2.  */
645
646static unsigned int
647resolve_atomic_size (tree type)
648{
649  unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
650
651  if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
652    return size;
653
654  /* We shouldn't reach here without having already detected that the size
655     isn't compatible with an atomic access.  */
656  gcc_assert (Serious_Errors_Detected);
657
658  return 0;
659}
660
661/* Build an atomic load for the underlying atomic object in SRC.  */
662
663tree
664build_atomic_load (tree src)
665{
666  tree ptr_type
667    = build_pointer_type
668      (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
669  tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
670  tree orig_src = src;
671  tree t, addr, val;
672  unsigned int size;
673  int fncode;
674
675  /* Remove conversions to get the address of the underlying object.  */
676  src = remove_conversions (src, false);
677  size = resolve_atomic_size (TREE_TYPE (src));
678  if (size == 0)
679    return orig_src;
680
681  fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
682  t = builtin_decl_implicit ((enum built_in_function) fncode);
683
684  addr = build_unary_op (ADDR_EXPR, ptr_type, src);
685  val = build_call_expr (t, 2, addr, mem_model);
686
687  /* First reinterpret the loaded bits in the original type of the load,
688     then convert to the expected result type.  */
689  t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
690  return convert (TREE_TYPE (orig_src), t);
691}
692
693/* Build an atomic store from SRC to the underlying atomic object in DEST.  */
694
695tree
696build_atomic_store (tree dest, tree src)
697{
698  tree ptr_type
699    = build_pointer_type
700      (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
701  tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
702  tree orig_dest = dest;
703  tree t, int_type, addr;
704  unsigned int size;
705  int fncode;
706
707  /* Remove conversions to get the address of the underlying object.  */
708  dest = remove_conversions (dest, false);
709  size = resolve_atomic_size (TREE_TYPE (dest));
710  if (size == 0)
711    return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
712
713  fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
714  t = builtin_decl_implicit ((enum built_in_function) fncode);
715  int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
716
717  /* First convert the bits to be stored to the original type of the store,
718     then reinterpret them in the effective type.  But if the original type
719     is a padded type with the same size, convert to the inner type instead,
720     as we don't want to artificially introduce a CONSTRUCTOR here.  */
721  if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
722      && TYPE_SIZE (TREE_TYPE (dest))
723	 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
724    src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
725  else
726    src = convert (TREE_TYPE (dest), src);
727  src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
728  addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
729
730  return build_call_expr (t, 3, addr, src, mem_model);
731}
732
733/* Make a binary operation of kind OP_CODE.  RESULT_TYPE is the type
734   desired for the result.  Usually the operation is to be performed
735   in that type.  For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
736   NULL_TREE.  For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
737   case the type to be used will be derived from the operands.
738
739   This function is very much unlike the ones for C and C++ since we
740   have already done any type conversion and matching required.  All we
741   have to do here is validate the work done by SEM and handle subtypes.  */
742
743tree
744build_binary_op (enum tree_code op_code, tree result_type,
745                 tree left_operand, tree right_operand)
746{
747  tree left_type  = TREE_TYPE (left_operand);
748  tree right_type = TREE_TYPE (right_operand);
749  tree left_base_type = get_base_type (left_type);
750  tree right_base_type = get_base_type (right_type);
751  tree operation_type = result_type;
752  tree best_type = NULL_TREE;
753  tree modulus, result;
754  bool has_side_effects = false;
755
756  if (operation_type
757      && TREE_CODE (operation_type) == RECORD_TYPE
758      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
759    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
760
761  if (operation_type
762      && TREE_CODE (operation_type) == INTEGER_TYPE
763      && TYPE_EXTRA_SUBTYPE_P (operation_type))
764    operation_type = get_base_type (operation_type);
765
766  modulus = (operation_type
767	     && TREE_CODE (operation_type) == INTEGER_TYPE
768	     && TYPE_MODULAR_P (operation_type)
769	     ? TYPE_MODULUS (operation_type) : NULL_TREE);
770
771  switch (op_code)
772    {
773    case INIT_EXPR:
774    case MODIFY_EXPR:
775#ifdef ENABLE_CHECKING
776      gcc_assert (result_type == NULL_TREE);
777#endif
778      /* If there were integral or pointer conversions on the LHS, remove
779	 them; we'll be putting them back below if needed.  Likewise for
780	 conversions between array and record types, except for justified
781	 modular types.  But don't do this if the right operand is not
782	 BLKmode (for packed arrays) unless we are not changing the mode.  */
783      while ((CONVERT_EXPR_P (left_operand)
784	      || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
785	     && (((INTEGRAL_TYPE_P (left_type)
786		   || POINTER_TYPE_P (left_type))
787		  && (INTEGRAL_TYPE_P (TREE_TYPE
788				       (TREE_OPERAND (left_operand, 0)))
789		      || POINTER_TYPE_P (TREE_TYPE
790					 (TREE_OPERAND (left_operand, 0)))))
791		 || (((TREE_CODE (left_type) == RECORD_TYPE
792		       && !TYPE_JUSTIFIED_MODULAR_P (left_type))
793		      || TREE_CODE (left_type) == ARRAY_TYPE)
794		     && ((TREE_CODE (TREE_TYPE
795				     (TREE_OPERAND (left_operand, 0)))
796			  == RECORD_TYPE)
797			 || (TREE_CODE (TREE_TYPE
798					(TREE_OPERAND (left_operand, 0)))
799			     == ARRAY_TYPE))
800		     && (TYPE_MODE (right_type) == BLKmode
801			 || (TYPE_MODE (left_type)
802			     == TYPE_MODE (TREE_TYPE
803					   (TREE_OPERAND
804					    (left_operand, 0))))))))
805	{
806	  left_operand = TREE_OPERAND (left_operand, 0);
807	  left_type = TREE_TYPE (left_operand);
808	}
809
810      /* If a class-wide type may be involved, force use of the RHS type.  */
811      if ((TREE_CODE (right_type) == RECORD_TYPE
812	   || TREE_CODE (right_type) == UNION_TYPE)
813	  && TYPE_ALIGN_OK (right_type))
814	operation_type = right_type;
815
816      /* If we are copying between padded objects with compatible types, use
817	 the padded view of the objects, this is very likely more efficient.
818	 Likewise for a padded object that is assigned a constructor, if we
819	 can convert the constructor to the inner type, to avoid putting a
820	 VIEW_CONVERT_EXPR on the LHS.  But don't do so if we wouldn't have
821	 actually copied anything.  */
822      else if (TYPE_IS_PADDING_P (left_type)
823	       && TREE_CONSTANT (TYPE_SIZE (left_type))
824	       && ((TREE_CODE (right_operand) == COMPONENT_REF
825		    && TYPE_MAIN_VARIANT (left_type)
826		       == TYPE_MAIN_VARIANT
827			  (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
828		   || (TREE_CODE (right_operand) == CONSTRUCTOR
829		       && !CONTAINS_PLACEHOLDER_P
830			   (DECL_SIZE (TYPE_FIELDS (left_type)))))
831	       && !integer_zerop (TYPE_SIZE (right_type)))
832	{
833	  /* We make an exception for a BLKmode type padding a non-BLKmode
834	     inner type and do the conversion of the LHS right away, since
835	     unchecked_convert wouldn't do it properly.  */
836	  if (TYPE_MODE (left_type) == BLKmode
837	      && TYPE_MODE (right_type) != BLKmode
838	      && TREE_CODE (right_operand) != CONSTRUCTOR)
839	    {
840	      operation_type = right_type;
841	      left_operand = convert (operation_type, left_operand);
842	      left_type = operation_type;
843	    }
844	  else
845	    operation_type = left_type;
846	}
847
848      /* If we have a call to a function that returns an unconstrained type
849	 with default discriminant on the RHS, use the RHS type (which is
850	 padded) as we cannot compute the size of the actual assignment.  */
851      else if (TREE_CODE (right_operand) == CALL_EXPR
852	       && TYPE_IS_PADDING_P (right_type)
853	       && CONTAINS_PLACEHOLDER_P
854		  (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
855	operation_type = right_type;
856
857      /* Find the best type to use for copying between aggregate types.  */
858      else if (((TREE_CODE (left_type) == ARRAY_TYPE
859		 && TREE_CODE (right_type) == ARRAY_TYPE)
860		|| (TREE_CODE (left_type) == RECORD_TYPE
861		    && TREE_CODE (right_type) == RECORD_TYPE))
862	       && (best_type = find_common_type (left_type, right_type)))
863	operation_type = best_type;
864
865      /* Otherwise use the LHS type.  */
866      else
867	operation_type = left_type;
868
869      /* Ensure everything on the LHS is valid.  If we have a field reference,
870	 strip anything that get_inner_reference can handle.  Then remove any
871	 conversions between types having the same code and mode.  And mark
872	 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE.  When done, we must have
873	 either an INDIRECT_REF, a NULL_EXPR or a DECL node.  */
874      result = left_operand;
875      while (true)
876	{
877	  tree restype = TREE_TYPE (result);
878
879	  if (TREE_CODE (result) == COMPONENT_REF
880	      || TREE_CODE (result) == ARRAY_REF
881	      || TREE_CODE (result) == ARRAY_RANGE_REF)
882	    while (handled_component_p (result))
883	      result = TREE_OPERAND (result, 0);
884	  else if (TREE_CODE (result) == REALPART_EXPR
885		   || TREE_CODE (result) == IMAGPART_EXPR
886		   || (CONVERT_EXPR_P (result)
887		       && (((TREE_CODE (restype)
888			     == TREE_CODE (TREE_TYPE
889					   (TREE_OPERAND (result, 0))))
890			     && (TYPE_MODE (TREE_TYPE
891					    (TREE_OPERAND (result, 0)))
892				 == TYPE_MODE (restype)))
893			   || TYPE_ALIGN_OK (restype))))
894	    result = TREE_OPERAND (result, 0);
895	  else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
896	    {
897	      TREE_ADDRESSABLE (result) = 1;
898	      result = TREE_OPERAND (result, 0);
899	    }
900	  else
901	    break;
902	}
903
904      gcc_assert (TREE_CODE (result) == INDIRECT_REF
905		  || TREE_CODE (result) == NULL_EXPR
906		  || DECL_P (result));
907
908      /* Convert the right operand to the operation type unless it is
909	 either already of the correct type or if the type involves a
910	 placeholder, since the RHS may not have the same record type.  */
911      if (operation_type != right_type
912	  && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
913	{
914	  right_operand = convert (operation_type, right_operand);
915	  right_type = operation_type;
916	}
917
918      /* If the left operand is not of the same type as the operation
919	 type, wrap it up in a VIEW_CONVERT_EXPR.  */
920      if (left_type != operation_type)
921	left_operand = unchecked_convert (operation_type, left_operand, false);
922
923      has_side_effects = true;
924      modulus = NULL_TREE;
925      break;
926
927    case ARRAY_REF:
928      if (!operation_type)
929	operation_type = TREE_TYPE (left_type);
930
931      /* ... fall through ... */
932
933    case ARRAY_RANGE_REF:
934      /* First look through conversion between type variants.  Note that
935	 this changes neither the operation type nor the type domain.  */
936      if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
937	  && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
938	     == TYPE_MAIN_VARIANT (left_type))
939	{
940	  left_operand = TREE_OPERAND (left_operand, 0);
941	  left_type = TREE_TYPE (left_operand);
942	}
943
944      /* For a range, make sure the element type is consistent.  */
945      if (op_code == ARRAY_RANGE_REF
946	  && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
947	operation_type = build_array_type (TREE_TYPE (left_type),
948					   TYPE_DOMAIN (operation_type));
949
950      /* Then convert the right operand to its base type.  This will prevent
951	 unneeded sign conversions when sizetype is wider than integer.  */
952      right_operand = convert (right_base_type, right_operand);
953      right_operand = convert_to_index_type (right_operand);
954      modulus = NULL_TREE;
955      break;
956
957    case TRUTH_ANDIF_EXPR:
958    case TRUTH_ORIF_EXPR:
959    case TRUTH_AND_EXPR:
960    case TRUTH_OR_EXPR:
961    case TRUTH_XOR_EXPR:
962#ifdef ENABLE_CHECKING
963      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
964#endif
965      operation_type = left_base_type;
966      left_operand = convert (operation_type, left_operand);
967      right_operand = convert (operation_type, right_operand);
968      break;
969
970    case GE_EXPR:
971    case LE_EXPR:
972    case GT_EXPR:
973    case LT_EXPR:
974    case EQ_EXPR:
975    case NE_EXPR:
976#ifdef ENABLE_CHECKING
977      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
978#endif
979      /* If either operand is a NULL_EXPR, just return a new one.  */
980      if (TREE_CODE (left_operand) == NULL_EXPR)
981	return build2 (op_code, result_type,
982		       build1 (NULL_EXPR, integer_type_node,
983			       TREE_OPERAND (left_operand, 0)),
984		       integer_zero_node);
985
986      else if (TREE_CODE (right_operand) == NULL_EXPR)
987	return build2 (op_code, result_type,
988		       build1 (NULL_EXPR, integer_type_node,
989			       TREE_OPERAND (right_operand, 0)),
990		       integer_zero_node);
991
992      /* If either object is a justified modular types, get the
993	 fields from within.  */
994      if (TREE_CODE (left_type) == RECORD_TYPE
995	  && TYPE_JUSTIFIED_MODULAR_P (left_type))
996	{
997	  left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
998				  left_operand);
999	  left_type = TREE_TYPE (left_operand);
1000	  left_base_type = get_base_type (left_type);
1001	}
1002
1003      if (TREE_CODE (right_type) == RECORD_TYPE
1004	  && TYPE_JUSTIFIED_MODULAR_P (right_type))
1005	{
1006	  right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1007				  right_operand);
1008	  right_type = TREE_TYPE (right_operand);
1009	  right_base_type = get_base_type (right_type);
1010	}
1011
1012      /* If both objects are arrays, compare them specially.  */
1013      if ((TREE_CODE (left_type) == ARRAY_TYPE
1014	   || (TREE_CODE (left_type) == INTEGER_TYPE
1015	       && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1016	  && (TREE_CODE (right_type) == ARRAY_TYPE
1017	      || (TREE_CODE (right_type) == INTEGER_TYPE
1018		  && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1019	{
1020	  result = compare_arrays (input_location,
1021				   result_type, left_operand, right_operand);
1022	  if (op_code == NE_EXPR)
1023	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1024	  else
1025	    gcc_assert (op_code == EQ_EXPR);
1026
1027	  return result;
1028	}
1029
1030      /* Otherwise, the base types must be the same, unless they are both fat
1031	 pointer types or record types.  In the latter case, use the best type
1032	 and convert both operands to that type.  */
1033      if (left_base_type != right_base_type)
1034	{
1035	  if (TYPE_IS_FAT_POINTER_P (left_base_type)
1036	      && TYPE_IS_FAT_POINTER_P (right_base_type))
1037	    {
1038	      gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1039			  == TYPE_MAIN_VARIANT (right_base_type));
1040	      best_type = left_base_type;
1041	    }
1042
1043	  else if (TREE_CODE (left_base_type) == RECORD_TYPE
1044		   && TREE_CODE (right_base_type) == RECORD_TYPE)
1045	    {
1046	      /* The only way this is permitted is if both types have the same
1047		 name.  In that case, one of them must not be self-referential.
1048		 Use it as the best type.  Even better with a fixed size.  */
1049	      gcc_assert (TYPE_NAME (left_base_type)
1050			  && TYPE_NAME (left_base_type)
1051			     == TYPE_NAME (right_base_type));
1052
1053	      if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1054		best_type = left_base_type;
1055	      else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1056		best_type = right_base_type;
1057	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1058		best_type = left_base_type;
1059	      else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1060		best_type = right_base_type;
1061	      else
1062		gcc_unreachable ();
1063	    }
1064
1065	  else if (POINTER_TYPE_P (left_base_type)
1066		   && POINTER_TYPE_P (right_base_type))
1067	    {
1068	      gcc_assert (TREE_TYPE (left_base_type)
1069			  == TREE_TYPE (right_base_type));
1070	      best_type = left_base_type;
1071	    }
1072	  else
1073	    gcc_unreachable ();
1074
1075	  left_operand = convert (best_type, left_operand);
1076	  right_operand = convert (best_type, right_operand);
1077	}
1078      else
1079	{
1080	  left_operand = convert (left_base_type, left_operand);
1081	  right_operand = convert (right_base_type, right_operand);
1082	}
1083
1084      /* If both objects are fat pointers, compare them specially.  */
1085      if (TYPE_IS_FAT_POINTER_P (left_base_type))
1086	{
1087	  result
1088	    = compare_fat_pointers (input_location,
1089				    result_type, left_operand, right_operand);
1090	  if (op_code == NE_EXPR)
1091	    result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1092	  else
1093	    gcc_assert (op_code == EQ_EXPR);
1094
1095	  return result;
1096	}
1097
1098      modulus = NULL_TREE;
1099      break;
1100
1101    case LSHIFT_EXPR:
1102    case RSHIFT_EXPR:
1103    case LROTATE_EXPR:
1104    case RROTATE_EXPR:
1105       /* The RHS of a shift can be any type.  Also, ignore any modulus
1106	 (we used to abort, but this is needed for unchecked conversion
1107	 to modular types).  Otherwise, processing is the same as normal.  */
1108      gcc_assert (operation_type == left_base_type);
1109      modulus = NULL_TREE;
1110      left_operand = convert (operation_type, left_operand);
1111      break;
1112
1113    case BIT_AND_EXPR:
1114    case BIT_IOR_EXPR:
1115    case BIT_XOR_EXPR:
1116      /* For binary modulus, if the inputs are in range, so are the
1117	 outputs.  */
1118      if (modulus && integer_pow2p (modulus))
1119	modulus = NULL_TREE;
1120      goto common;
1121
1122    case COMPLEX_EXPR:
1123      gcc_assert (TREE_TYPE (result_type) == left_base_type
1124		  && TREE_TYPE (result_type) == right_base_type);
1125      left_operand = convert (left_base_type, left_operand);
1126      right_operand = convert (right_base_type, right_operand);
1127      break;
1128
1129    case TRUNC_DIV_EXPR:   case TRUNC_MOD_EXPR:
1130    case CEIL_DIV_EXPR:    case CEIL_MOD_EXPR:
1131    case FLOOR_DIV_EXPR:   case FLOOR_MOD_EXPR:
1132    case ROUND_DIV_EXPR:   case ROUND_MOD_EXPR:
1133      /* These always produce results lower than either operand.  */
1134      modulus = NULL_TREE;
1135      goto common;
1136
1137    case POINTER_PLUS_EXPR:
1138      gcc_assert (operation_type == left_base_type
1139		  && sizetype == right_base_type);
1140      left_operand = convert (operation_type, left_operand);
1141      right_operand = convert (sizetype, right_operand);
1142      break;
1143
1144    case PLUS_NOMOD_EXPR:
1145    case MINUS_NOMOD_EXPR:
1146      if (op_code == PLUS_NOMOD_EXPR)
1147	op_code = PLUS_EXPR;
1148      else
1149	op_code = MINUS_EXPR;
1150      modulus = NULL_TREE;
1151
1152      /* ... fall through ... */
1153
1154    case PLUS_EXPR:
1155    case MINUS_EXPR:
1156      /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1157	 other compilers.  Contrary to C, Ada doesn't allow arithmetics in
1158	 these types but can generate addition/subtraction for Succ/Pred.  */
1159      if (operation_type
1160	  && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1161	      || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1162	operation_type = left_base_type = right_base_type
1163	  = gnat_type_for_mode (TYPE_MODE (operation_type),
1164				TYPE_UNSIGNED (operation_type));
1165
1166      /* ... fall through ... */
1167
1168    default:
1169    common:
1170      /* The result type should be the same as the base types of the
1171	 both operands (and they should be the same).  Convert
1172	 everything to the result type.  */
1173
1174      gcc_assert (operation_type == left_base_type
1175		  && left_base_type == right_base_type);
1176      left_operand = convert (operation_type, left_operand);
1177      right_operand = convert (operation_type, right_operand);
1178    }
1179
1180  if (modulus && !integer_pow2p (modulus))
1181    {
1182      result = nonbinary_modular_operation (op_code, operation_type,
1183					    left_operand, right_operand);
1184      modulus = NULL_TREE;
1185    }
1186  /* If either operand is a NULL_EXPR, just return a new one.  */
1187  else if (TREE_CODE (left_operand) == NULL_EXPR)
1188    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1189  else if (TREE_CODE (right_operand) == NULL_EXPR)
1190    return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1191  else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1192    result = fold (build4 (op_code, operation_type, left_operand,
1193			   right_operand, NULL_TREE, NULL_TREE));
1194  else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1195    result = build2 (op_code, void_type_node, left_operand, right_operand);
1196  else
1197    result
1198      = fold_build2 (op_code, operation_type, left_operand, right_operand);
1199
1200  if (TREE_CONSTANT (result))
1201    ;
1202  else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1203    {
1204      if (TYPE_VOLATILE (operation_type))
1205	TREE_THIS_VOLATILE (result) = 1;
1206    }
1207  else
1208    TREE_CONSTANT (result)
1209      |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1210
1211  TREE_SIDE_EFFECTS (result) |= has_side_effects;
1212
1213  /* If we are working with modular types, perform the MOD operation
1214     if something above hasn't eliminated the need for it.  */
1215  if (modulus)
1216    result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1217			  convert (operation_type, modulus));
1218
1219  if (result_type && result_type != operation_type)
1220    result = convert (result_type, result);
1221
1222  return result;
1223}
1224
1225/* Similar, but for unary operations.  */
1226
1227tree
1228build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1229{
1230  tree type = TREE_TYPE (operand);
1231  tree base_type = get_base_type (type);
1232  tree operation_type = result_type;
1233  tree result;
1234
1235  if (operation_type
1236      && TREE_CODE (operation_type) == RECORD_TYPE
1237      && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1238    operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1239
1240  if (operation_type
1241      && TREE_CODE (operation_type) == INTEGER_TYPE
1242      && TYPE_EXTRA_SUBTYPE_P (operation_type))
1243    operation_type = get_base_type (operation_type);
1244
1245  switch (op_code)
1246    {
1247    case REALPART_EXPR:
1248    case IMAGPART_EXPR:
1249      if (!operation_type)
1250	result_type = operation_type = TREE_TYPE (type);
1251      else
1252	gcc_assert (result_type == TREE_TYPE (type));
1253
1254      result = fold_build1 (op_code, operation_type, operand);
1255      break;
1256
1257    case TRUTH_NOT_EXPR:
1258#ifdef ENABLE_CHECKING
1259      gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1260#endif
1261      result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1262      /* When not optimizing, fold the result as invert_truthvalue_loc
1263	 doesn't fold the result of comparisons.  This is intended to undo
1264	 the trick used for boolean rvalues in gnat_to_gnu.  */
1265      if (!optimize)
1266	result = fold (result);
1267      break;
1268
1269    case ATTR_ADDR_EXPR:
1270    case ADDR_EXPR:
1271      switch (TREE_CODE (operand))
1272	{
1273	case INDIRECT_REF:
1274	case UNCONSTRAINED_ARRAY_REF:
1275	  result = TREE_OPERAND (operand, 0);
1276
1277	  /* Make sure the type here is a pointer, not a reference.
1278	     GCC wants pointer types for function addresses.  */
1279	  if (!result_type)
1280	    result_type = build_pointer_type (type);
1281
1282	  /* If the underlying object can alias everything, propagate the
1283	     property since we are effectively retrieving the object.  */
1284	  if (POINTER_TYPE_P (TREE_TYPE (result))
1285	      && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1286	    {
1287	      if (TREE_CODE (result_type) == POINTER_TYPE
1288		  && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1289		result_type
1290		  = build_pointer_type_for_mode (TREE_TYPE (result_type),
1291						 TYPE_MODE (result_type),
1292						 true);
1293	      else if (TREE_CODE (result_type) == REFERENCE_TYPE
1294		       && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1295	        result_type
1296		  = build_reference_type_for_mode (TREE_TYPE (result_type),
1297						   TYPE_MODE (result_type),
1298						   true);
1299	    }
1300	  break;
1301
1302	case NULL_EXPR:
1303	  result = operand;
1304	  TREE_TYPE (result) = type = build_pointer_type (type);
1305	  break;
1306
1307	case COMPOUND_EXPR:
1308	  /* Fold a compound expression if it has unconstrained array type
1309	     since the middle-end cannot handle it.  But we don't it in the
1310	     general case because it may introduce aliasing issues if the
1311	     first operand is an indirect assignment and the second operand
1312	     the corresponding address, e.g. for an allocator.  */
1313	  if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1314	    {
1315	      result = build_unary_op (ADDR_EXPR, result_type,
1316				       TREE_OPERAND (operand, 1));
1317	      result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1318			       TREE_OPERAND (operand, 0), result);
1319	      break;
1320	    }
1321	  goto common;
1322
1323	case ARRAY_REF:
1324	case ARRAY_RANGE_REF:
1325	case COMPONENT_REF:
1326	case BIT_FIELD_REF:
1327	    /* If this is for 'Address, find the address of the prefix and add
1328	       the offset to the field.  Otherwise, do this the normal way.  */
1329	  if (op_code == ATTR_ADDR_EXPR)
1330	    {
1331	      HOST_WIDE_INT bitsize;
1332	      HOST_WIDE_INT bitpos;
1333	      tree offset, inner;
1334	      machine_mode mode;
1335	      int unsignedp, volatilep;
1336
1337	      inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1338					   &mode, &unsignedp, &volatilep,
1339					   false);
1340
1341	      /* If INNER is a padding type whose field has a self-referential
1342		 size, convert to that inner type.  We know the offset is zero
1343		 and we need to have that type visible.  */
1344	      if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1345		  && CONTAINS_PLACEHOLDER_P
1346		     (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1347					    (TREE_TYPE (inner))))))
1348		inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1349				 inner);
1350
1351	      /* Compute the offset as a byte offset from INNER.  */
1352	      if (!offset)
1353		offset = size_zero_node;
1354
1355	      offset = size_binop (PLUS_EXPR, offset,
1356				   size_int (bitpos / BITS_PER_UNIT));
1357
1358	      /* Take the address of INNER, convert the offset to void *, and
1359		 add then.  It will later be converted to the desired result
1360		 type, if any.  */
1361	      inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1362	      inner = convert (ptr_void_type_node, inner);
1363	      result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1364					inner, offset);
1365	      result = convert (build_pointer_type (TREE_TYPE (operand)),
1366				result);
1367	      break;
1368	    }
1369	  goto common;
1370
1371	case CONSTRUCTOR:
1372	  /* If this is just a constructor for a padded record, we can
1373	     just take the address of the single field and convert it to
1374	     a pointer to our type.  */
1375	  if (TYPE_IS_PADDING_P (type))
1376	    {
1377	      result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1378	      result = convert (build_pointer_type (TREE_TYPE (operand)),
1379				build_unary_op (ADDR_EXPR, NULL_TREE, result));
1380	      break;
1381	    }
1382
1383	  goto common;
1384
1385	case NOP_EXPR:
1386	  if (AGGREGATE_TYPE_P (type)
1387	      && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1388	    return build_unary_op (ADDR_EXPR, result_type,
1389				   TREE_OPERAND (operand, 0));
1390
1391	  /* ... fallthru ... */
1392
1393	case VIEW_CONVERT_EXPR:
1394	  /* If this just a variant conversion or if the conversion doesn't
1395	     change the mode, get the result type from this type and go down.
1396	     This is needed for conversions of CONST_DECLs, to eventually get
1397	     to the address of their CORRESPONDING_VARs.  */
1398	  if ((TYPE_MAIN_VARIANT (type)
1399	       == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1400	      || (TYPE_MODE (type) != BLKmode
1401		  && (TYPE_MODE (type)
1402		      == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1403	    return build_unary_op (ADDR_EXPR,
1404				   (result_type ? result_type
1405				    : build_pointer_type (type)),
1406				   TREE_OPERAND (operand, 0));
1407	  goto common;
1408
1409	case CONST_DECL:
1410	  operand = DECL_CONST_CORRESPONDING_VAR (operand);
1411
1412	  /* ... fall through ... */
1413
1414	default:
1415	common:
1416
1417	  /* If we are taking the address of a padded record whose field
1418	     contains a template, take the address of the field.  */
1419	  if (TYPE_IS_PADDING_P (type)
1420	      && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1421	      && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1422	    {
1423	      type = TREE_TYPE (TYPE_FIELDS (type));
1424	      operand = convert (type, operand);
1425	    }
1426
1427	  gnat_mark_addressable (operand);
1428	  result = build_fold_addr_expr (operand);
1429	}
1430
1431      TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1432      break;
1433
1434    case INDIRECT_REF:
1435      {
1436	tree t = remove_conversions (operand, false);
1437	bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1438
1439	/* If TYPE is a thin pointer, either first retrieve the base if this
1440	   is an expression with an offset built for the initialization of an
1441	   object with an unconstrained nominal subtype, or else convert to
1442	   the fat pointer.  */
1443	if (TYPE_IS_THIN_POINTER_P (type))
1444	  {
1445	    tree rec_type = TREE_TYPE (type);
1446
1447	    if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1448		&& TREE_OPERAND (operand, 1)
1449		   == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1450		&& TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1451	      {
1452		operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1453		type = TREE_TYPE (operand);
1454	      }
1455	    else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1456	      {
1457		operand
1458		  = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1459			     operand);
1460		type = TREE_TYPE (operand);
1461	      }
1462	  }
1463
1464	/* If we want to refer to an unconstrained array, use the appropriate
1465	   expression.  But this will never survive down to the back-end.  */
1466	if (TYPE_IS_FAT_POINTER_P (type))
1467	  {
1468	    result = build1 (UNCONSTRAINED_ARRAY_REF,
1469			     TYPE_UNCONSTRAINED_ARRAY (type), operand);
1470	    TREE_READONLY (result)
1471	      = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1472	  }
1473
1474	/* If we are dereferencing an ADDR_EXPR, return its operand.  */
1475	else if (TREE_CODE (operand) == ADDR_EXPR)
1476	  result = TREE_OPERAND (operand, 0);
1477
1478	/* Otherwise, build and fold the indirect reference.  */
1479	else
1480	  {
1481	    result = build_fold_indirect_ref (operand);
1482	    TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1483	  }
1484
1485	if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1486	  {
1487	    TREE_SIDE_EFFECTS (result) = 1;
1488	    if (TREE_CODE (result) == INDIRECT_REF)
1489	      TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1490	  }
1491
1492	if ((TREE_CODE (result) == INDIRECT_REF
1493	     || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1494	    && can_never_be_null)
1495	  TREE_THIS_NOTRAP (result) = 1;
1496
1497	break;
1498      }
1499
1500    case NEGATE_EXPR:
1501    case BIT_NOT_EXPR:
1502      {
1503	tree modulus = ((operation_type
1504			 && TREE_CODE (operation_type) == INTEGER_TYPE
1505			 && TYPE_MODULAR_P (operation_type))
1506			? TYPE_MODULUS (operation_type) : NULL_TREE);
1507	int mod_pow2 = modulus && integer_pow2p (modulus);
1508
1509	/* If this is a modular type, there are various possibilities
1510	   depending on the operation and whether the modulus is a
1511	   power of two or not.  */
1512
1513	if (modulus)
1514	  {
1515	    gcc_assert (operation_type == base_type);
1516	    operand = convert (operation_type, operand);
1517
1518	    /* The fastest in the negate case for binary modulus is
1519	       the straightforward code; the TRUNC_MOD_EXPR below
1520	       is an AND operation.  */
1521	    if (op_code == NEGATE_EXPR && mod_pow2)
1522	      result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1523				    fold_build1 (NEGATE_EXPR, operation_type,
1524						 operand),
1525				    modulus);
1526
1527	    /* For nonbinary negate case, return zero for zero operand,
1528	       else return the modulus minus the operand.  If the modulus
1529	       is a power of two minus one, we can do the subtraction
1530	       as an XOR since it is equivalent and faster on most machines. */
1531	    else if (op_code == NEGATE_EXPR && !mod_pow2)
1532	      {
1533		if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1534						modulus,
1535						convert (operation_type,
1536							 integer_one_node))))
1537		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1538					operand, modulus);
1539		else
1540		  result = fold_build2 (MINUS_EXPR, operation_type,
1541					modulus, operand);
1542
1543		result = fold_build3 (COND_EXPR, operation_type,
1544				      fold_build2 (NE_EXPR,
1545						   boolean_type_node,
1546						   operand,
1547						   convert
1548						     (operation_type,
1549						      integer_zero_node)),
1550				      result, operand);
1551	      }
1552	    else
1553	      {
1554		/* For the NOT cases, we need a constant equal to
1555		   the modulus minus one.  For a binary modulus, we
1556		   XOR against the constant and subtract the operand from
1557		   that constant for nonbinary modulus.  */
1558
1559		tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1560					 convert (operation_type,
1561						  integer_one_node));
1562
1563		if (mod_pow2)
1564		  result = fold_build2 (BIT_XOR_EXPR, operation_type,
1565					operand, cnst);
1566		else
1567		  result = fold_build2 (MINUS_EXPR, operation_type,
1568					cnst, operand);
1569	      }
1570
1571	    break;
1572	  }
1573      }
1574
1575      /* ... fall through ... */
1576
1577    default:
1578      gcc_assert (operation_type == base_type);
1579      result = fold_build1 (op_code, operation_type,
1580			    convert (operation_type, operand));
1581    }
1582
1583  if (result_type && TREE_TYPE (result) != result_type)
1584    result = convert (result_type, result);
1585
1586  return result;
1587}
1588
1589/* Similar, but for COND_EXPR.  */
1590
1591tree
1592build_cond_expr (tree result_type, tree condition_operand,
1593                 tree true_operand, tree false_operand)
1594{
1595  bool addr_p = false;
1596  tree result;
1597
1598  /* The front-end verified that result, true and false operands have
1599     same base type.  Convert everything to the result type.  */
1600  true_operand = convert (result_type, true_operand);
1601  false_operand = convert (result_type, false_operand);
1602
1603  /* If the result type is unconstrained, take the address of the operands and
1604     then dereference the result.  Likewise if the result type is passed by
1605     reference, because creating a temporary of this type is not allowed.  */
1606  if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1607      || TYPE_IS_BY_REFERENCE_P (result_type)
1608      || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1609    {
1610      result_type = build_pointer_type (result_type);
1611      true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1612      false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1613      addr_p = true;
1614    }
1615
1616  result = fold_build3 (COND_EXPR, result_type, condition_operand,
1617			true_operand, false_operand);
1618
1619  /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1620     in both arms, make sure it gets evaluated by moving it ahead of the
1621     conditional expression.  This is necessary because it is evaluated
1622     in only one place at run time and would otherwise be uninitialized
1623     in one of the arms.  */
1624  true_operand = skip_simple_arithmetic (true_operand);
1625  false_operand = skip_simple_arithmetic (false_operand);
1626
1627  if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1628    result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1629
1630  if (addr_p)
1631    result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1632
1633  return result;
1634}
1635
1636/* Similar, but for COMPOUND_EXPR.  */
1637
1638tree
1639build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1640{
1641  bool addr_p = false;
1642  tree result;
1643
1644  /* If the result type is unconstrained, take the address of the operand and
1645     then dereference the result.  Likewise if the result type is passed by
1646     reference, but this is natively handled in the gimplifier.  */
1647  if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1648      || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1649    {
1650      result_type = build_pointer_type (result_type);
1651      expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1652      addr_p = true;
1653    }
1654
1655  result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1656			expr_operand);
1657
1658  if (addr_p)
1659    result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1660
1661  return result;
1662}
1663
1664/* Conveniently construct a function call expression.  FNDECL names the
1665   function to be called, N is the number of arguments, and the "..."
1666   parameters are the argument expressions.  Unlike build_call_expr
1667   this doesn't fold the call, hence it will always return a CALL_EXPR.  */
1668
1669tree
1670build_call_n_expr (tree fndecl, int n, ...)
1671{
1672  va_list ap;
1673  tree fntype = TREE_TYPE (fndecl);
1674  tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1675
1676  va_start (ap, n);
1677  fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1678  va_end (ap);
1679  return fn;
1680}
1681
1682/* Call a function that raises an exception and pass the line number and file
1683   name, if requested.  MSG says which exception function to call.
1684
1685   GNAT_NODE is the gnat node conveying the source location for which the
1686   error should be signaled, or Empty in which case the error is signaled on
1687   the current ref_file_name/input_line.
1688
1689   KIND says which kind of exception this is for
1690   (N_Raise_{Constraint,Storage,Program}_Error).  */
1691
1692tree
1693build_call_raise (int msg, Node_Id gnat_node, char kind)
1694{
1695  tree fndecl = gnat_raise_decls[msg];
1696  tree label = get_exception_label (kind);
1697  tree filename;
1698  int line_number;
1699  const char *str;
1700  int len;
1701
1702  /* If this is to be done as a goto, handle that case.  */
1703  if (label)
1704    {
1705      Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1706      tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1707
1708      /* If Local_Raise is present, generate
1709	 Local_Raise (exception'Identity);  */
1710      if (Present (local_raise))
1711	{
1712	  tree gnu_local_raise
1713	    = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1714	  tree gnu_exception_entity
1715	    = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1716	  tree gnu_call
1717	    = build_call_n_expr (gnu_local_raise, 1,
1718				 build_unary_op (ADDR_EXPR, NULL_TREE,
1719						 gnu_exception_entity));
1720
1721	  gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1722			       gnu_call, gnu_result);}
1723
1724      return gnu_result;
1725    }
1726
1727  str
1728    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1729      ? ""
1730      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1731        ? IDENTIFIER_POINTER
1732          (get_identifier (Get_Name_String
1733			   (Debug_Source_Name
1734			    (Get_Source_File_Index (Sloc (gnat_node))))))
1735        : ref_filename;
1736
1737  len = strlen (str);
1738  filename = build_string (len, str);
1739  line_number
1740    = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1741      ? Get_Logical_Line_Number (Sloc(gnat_node))
1742      : LOCATION_LINE (input_location);
1743
1744  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1745					   build_index_type (size_int (len)));
1746
1747  return
1748    build_call_n_expr (fndecl, 2,
1749		       build1 (ADDR_EXPR,
1750			       build_pointer_type (unsigned_char_type_node),
1751			       filename),
1752		       build_int_cst (NULL_TREE, line_number));
1753}
1754
1755/* Similar to build_call_raise, for an index or range check exception as
1756   determined by MSG, with extra information generated of the form
1757   "INDEX out of range FIRST..LAST".  */
1758
1759tree
1760build_call_raise_range (int msg, Node_Id gnat_node,
1761			tree index, tree first, tree last)
1762{
1763  tree fndecl = gnat_raise_decls_ext[msg];
1764  tree filename;
1765  int line_number, column_number;
1766  const char *str;
1767  int len;
1768
1769  str
1770    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1771      ? ""
1772      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1773        ? IDENTIFIER_POINTER
1774          (get_identifier (Get_Name_String
1775			   (Debug_Source_Name
1776			    (Get_Source_File_Index (Sloc (gnat_node))))))
1777        : ref_filename;
1778
1779  len = strlen (str);
1780  filename = build_string (len, str);
1781  if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1782    {
1783      line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1784      column_number = Get_Column_Number (Sloc (gnat_node));
1785    }
1786  else
1787    {
1788      line_number = LOCATION_LINE (input_location);
1789      column_number = 0;
1790    }
1791
1792  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1793					   build_index_type (size_int (len)));
1794
1795  return
1796    build_call_n_expr (fndecl, 6,
1797		       build1 (ADDR_EXPR,
1798			       build_pointer_type (unsigned_char_type_node),
1799			       filename),
1800		       build_int_cst (NULL_TREE, line_number),
1801		       build_int_cst (NULL_TREE, column_number),
1802		       convert (integer_type_node, index),
1803		       convert (integer_type_node, first),
1804		       convert (integer_type_node, last));
1805}
1806
1807/* Similar to build_call_raise, with extra information about the column
1808   where the check failed.  */
1809
1810tree
1811build_call_raise_column (int msg, Node_Id gnat_node)
1812{
1813  tree fndecl = gnat_raise_decls_ext[msg];
1814  tree filename;
1815  int line_number, column_number;
1816  const char *str;
1817  int len;
1818
1819  str
1820    = (Debug_Flag_NN || Exception_Locations_Suppressed)
1821      ? ""
1822      : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1823        ? IDENTIFIER_POINTER
1824          (get_identifier (Get_Name_String
1825			   (Debug_Source_Name
1826			    (Get_Source_File_Index (Sloc (gnat_node))))))
1827        : ref_filename;
1828
1829  len = strlen (str);
1830  filename = build_string (len, str);
1831  if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1832    {
1833      line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1834      column_number = Get_Column_Number (Sloc (gnat_node));
1835    }
1836  else
1837    {
1838      line_number = LOCATION_LINE (input_location);
1839      column_number = 0;
1840    }
1841
1842  TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1843					   build_index_type (size_int (len)));
1844
1845  return
1846    build_call_n_expr (fndecl, 3,
1847		       build1 (ADDR_EXPR,
1848			       build_pointer_type (unsigned_char_type_node),
1849			       filename),
1850		       build_int_cst (NULL_TREE, line_number),
1851		       build_int_cst (NULL_TREE, column_number));
1852}
1853
1854/* qsort comparer for the bit positions of two constructor elements
1855   for record components.  */
1856
1857static int
1858compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1859{
1860  const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1861  const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1862  const_tree const field1 = elmt1->index;
1863  const_tree const field2 = elmt2->index;
1864  const int ret
1865    = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1866
1867  return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1868}
1869
1870/* Return a CONSTRUCTOR of TYPE whose elements are V.  */
1871
1872tree
1873gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1874{
1875  bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1876  bool read_only = true;
1877  bool side_effects = false;
1878  tree result, obj, val;
1879  unsigned int n_elmts;
1880
1881  /* Scan the elements to see if they are all constant or if any has side
1882     effects, to let us set global flags on the resulting constructor.  Count
1883     the elements along the way for possible sorting purposes below.  */
1884  FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1885    {
1886      /* The predicate must be in keeping with output_constructor.  */
1887      if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1888	  || (TREE_CODE (type) == RECORD_TYPE
1889	      && CONSTRUCTOR_BITFIELD_P (obj)
1890	      && !initializer_constant_valid_for_bitfield_p (val))
1891	  || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1892	allconstant = false;
1893
1894      if (!TREE_READONLY (val))
1895	read_only = false;
1896
1897      if (TREE_SIDE_EFFECTS (val))
1898	side_effects = true;
1899    }
1900
1901  /* For record types with constant components only, sort field list
1902     by increasing bit position.  This is necessary to ensure the
1903     constructor can be output as static data.  */
1904  if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1905    v->qsort (compare_elmt_bitpos);
1906
1907  result = build_constructor (type, v);
1908  CONSTRUCTOR_NO_CLEARING (result) = 1;
1909  TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1910  TREE_SIDE_EFFECTS (result) = side_effects;
1911  TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1912  return result;
1913}
1914
1915/* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1916   an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1917   for the field.  Don't fold the result if NO_FOLD_P is true.
1918
1919   We also handle the fact that we might have been passed a pointer to the
1920   actual record and know how to look for fields in variant parts.  */
1921
1922static tree
1923build_simple_component_ref (tree record_variable, tree component, tree field,
1924			    bool no_fold_p)
1925{
1926  tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1927  tree base, ref;
1928
1929  gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1930	      && COMPLETE_TYPE_P (record_type)
1931	      && (component == NULL_TREE) != (field == NULL_TREE));
1932
1933  /* If no field was specified, look for a field with the specified name in
1934     the current record only.  */
1935  if (!field)
1936    for (field = TYPE_FIELDS (record_type);
1937	 field;
1938	 field = DECL_CHAIN (field))
1939      if (DECL_NAME (field) == component)
1940	break;
1941
1942  if (!field)
1943    return NULL_TREE;
1944
1945  /* If this field is not in the specified record, see if we can find a field
1946     in the specified record whose original field is the same as this one.  */
1947  if (DECL_CONTEXT (field) != record_type)
1948    {
1949      tree new_field;
1950
1951      /* First loop through normal components.  */
1952      for (new_field = TYPE_FIELDS (record_type);
1953	   new_field;
1954	   new_field = DECL_CHAIN (new_field))
1955	if (SAME_FIELD_P (field, new_field))
1956	  break;
1957
1958      /* Next, see if we're looking for an inherited component in an extension.
1959	 If so, look through the extension directly, unless the type contains
1960	 a placeholder, as it might be needed for a later substitution.  */
1961      if (!new_field
1962	  && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1963	  && TYPE_ALIGN_OK (record_type)
1964	  && !type_contains_placeholder_p (record_type)
1965	  && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1966	     == RECORD_TYPE
1967	  && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1968	{
1969	  ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1970					    NULL_TREE, field, no_fold_p);
1971	  if (ref)
1972	    return ref;
1973	}
1974
1975      /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1976	 component in the first search.  Doing this search in two steps is
1977	 required to avoid hidden homonymous fields in the _Parent field.  */
1978      if (!new_field)
1979	for (new_field = TYPE_FIELDS (record_type);
1980	     new_field;
1981	     new_field = DECL_CHAIN (new_field))
1982	  if (DECL_INTERNAL_P (new_field))
1983	    {
1984	      tree field_ref
1985		= build_simple_component_ref (record_variable,
1986					      NULL_TREE, new_field, no_fold_p);
1987	      ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1988						no_fold_p);
1989	      if (ref)
1990		return ref;
1991	    }
1992
1993      field = new_field;
1994    }
1995
1996  if (!field)
1997    return NULL_TREE;
1998
1999  /* If the field's offset has overflowed, do not try to access it, as doing
2000     so may trigger sanity checks deeper in the back-end.  Note that we don't
2001     need to warn since this will be done on trying to declare the object.  */
2002  if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2003      && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2004    return NULL_TREE;
2005
2006  /* We have found a suitable field.  Before building the COMPONENT_REF, get
2007     the base object of the record variable if possible.  */
2008  base = record_variable;
2009
2010  if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
2011    {
2012      tree inner_variable = TREE_OPERAND (record_variable, 0);
2013      tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
2014
2015      /* Look through a conversion between type variants.  This is transparent
2016	 as far as the field is concerned.  */
2017      if (inner_type == record_type)
2018	base = inner_variable;
2019
2020      /* Look through a conversion between original and packable version, but
2021	 the field needs to be adjusted in this case.  */
2022      else if (RECORD_OR_UNION_TYPE_P (inner_type)
2023	       && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2024	{
2025	  tree new_field;
2026
2027	  for (new_field = TYPE_FIELDS (inner_type);
2028	       new_field;
2029	       new_field = DECL_CHAIN (new_field))
2030	    if (SAME_FIELD_P (field, new_field))
2031	      break;
2032	  if (new_field)
2033	    {
2034	      field = new_field;
2035	      base = inner_variable;
2036	    }
2037	}
2038    }
2039
2040  ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2041
2042  if (TREE_READONLY (record_variable)
2043      || TREE_READONLY (field)
2044      || TYPE_READONLY (record_type))
2045    TREE_READONLY (ref) = 1;
2046
2047  if (TREE_THIS_VOLATILE (record_variable)
2048      || TREE_THIS_VOLATILE (field)
2049      || TYPE_VOLATILE (record_type))
2050    TREE_THIS_VOLATILE (ref) = 1;
2051
2052  if (no_fold_p)
2053    return ref;
2054
2055  /* The generic folder may punt in this case because the inner array type
2056     can be self-referential, but folding is in fact not problematic.  */
2057  if (TREE_CODE (base) == CONSTRUCTOR
2058      && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2059    {
2060      vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
2061      unsigned HOST_WIDE_INT idx;
2062      tree index, value;
2063      FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2064	if (index == field)
2065	  return value;
2066      return ref;
2067    }
2068
2069  return fold (ref);
2070}
2071
2072/* Likewise, but generate a Constraint_Error if the reference could not be
2073   found.  */
2074
2075tree
2076build_component_ref (tree record_variable, tree component, tree field,
2077		     bool no_fold_p)
2078{
2079  tree ref = build_simple_component_ref (record_variable, component, field,
2080					 no_fold_p);
2081  if (ref)
2082    return ref;
2083
2084  /* If FIELD was specified, assume this is an invalid user field so raise
2085     Constraint_Error.  Otherwise, we have no type to return so abort.  */
2086  gcc_assert (field);
2087  return build1 (NULL_EXPR, TREE_TYPE (field),
2088		 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2089				   N_Raise_Constraint_Error));
2090}
2091
2092/* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2093   identically.  Process the case where a GNAT_PROC to call is provided.  */
2094
2095static inline tree
2096build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2097			       Entity_Id gnat_proc, Entity_Id gnat_pool)
2098{
2099  tree gnu_proc = gnat_to_gnu (gnat_proc);
2100  tree gnu_call;
2101
2102  /* A storage pool's underlying type is a record type (for both predefined
2103     storage pools and GNAT simple storage pools). The secondary stack uses
2104     the same mechanism, but its pool object (SS_Pool) is an integer.  */
2105  if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2106    {
2107      /* The size is the third parameter; the alignment is the
2108	 same type.  */
2109      Entity_Id gnat_size_type
2110	= Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2111      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2112
2113      tree gnu_pool = gnat_to_gnu (gnat_pool);
2114      tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2115      tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2116
2117      gnu_size = convert (gnu_size_type, gnu_size);
2118      gnu_align = convert (gnu_size_type, gnu_align);
2119
2120      /* The first arg is always the address of the storage pool; next
2121	 comes the address of the object, for a deallocator, then the
2122	 size and alignment.  */
2123      if (gnu_obj)
2124	gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2125				      gnu_size, gnu_align);
2126      else
2127	gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2128				      gnu_size, gnu_align);
2129    }
2130
2131  /* Secondary stack case.  */
2132  else
2133    {
2134      /* The size is the second parameter.  */
2135      Entity_Id gnat_size_type
2136	= Etype (Next_Formal (First_Formal (gnat_proc)));
2137      tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2138
2139      gnu_size = convert (gnu_size_type, gnu_size);
2140
2141      /* The first arg is the address of the object, for a deallocator,
2142	 then the size.  */
2143      if (gnu_obj)
2144	gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2145      else
2146	gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2147    }
2148
2149  return gnu_call;
2150}
2151
2152/* Helper for build_call_alloc_dealloc, to build and return an allocator for
2153   DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2154   __gnat_malloc allocator.  Honor DATA_TYPE alignments greater than what the
2155   latter offers.  */
2156
2157static inline tree
2158maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2159{
2160  /* When the DATA_TYPE alignment is stricter than what malloc offers
2161     (super-aligned case), we allocate an "aligning" wrapper type and return
2162     the address of its single data field with the malloc's return value
2163     stored just in front.  */
2164
2165  unsigned int data_align = TYPE_ALIGN (data_type);
2166  unsigned int system_allocator_alignment
2167      = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2168
2169  tree aligning_type
2170    = ((data_align > system_allocator_alignment)
2171       ? make_aligning_type (data_type, data_align, data_size,
2172			     system_allocator_alignment,
2173			     POINTER_SIZE / BITS_PER_UNIT,
2174			     gnat_node)
2175       : NULL_TREE);
2176
2177  tree size_to_malloc
2178    = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2179
2180  tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2181
2182  if (aligning_type)
2183    {
2184      /* Latch malloc's return value and get a pointer to the aligning field
2185	 first.  */
2186      tree storage_ptr = gnat_protect_expr (malloc_ptr);
2187
2188      tree aligning_record_addr
2189	= convert (build_pointer_type (aligning_type), storage_ptr);
2190
2191      tree aligning_record
2192	= build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2193
2194      tree aligning_field
2195	= build_component_ref (aligning_record, NULL_TREE,
2196			       TYPE_FIELDS (aligning_type), false);
2197
2198      tree aligning_field_addr
2199        = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2200
2201      /* Then arrange to store the allocator's return value ahead
2202	 and return.  */
2203      tree storage_ptr_slot_addr
2204	= build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2205			   convert (ptr_void_type_node, aligning_field_addr),
2206			   size_int (-(HOST_WIDE_INT) POINTER_SIZE
2207				     / BITS_PER_UNIT));
2208
2209      tree storage_ptr_slot
2210	= build_unary_op (INDIRECT_REF, NULL_TREE,
2211			  convert (build_pointer_type (ptr_void_type_node),
2212				   storage_ptr_slot_addr));
2213
2214      return
2215	build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2216		build_binary_op (INIT_EXPR, NULL_TREE,
2217				 storage_ptr_slot, storage_ptr),
2218		aligning_field_addr);
2219    }
2220  else
2221    return malloc_ptr;
2222}
2223
2224/* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2225   designated by DATA_PTR using the __gnat_free entry point.  */
2226
2227static inline tree
2228maybe_wrap_free (tree data_ptr, tree data_type)
2229{
2230  /* In the regular alignment case, we pass the data pointer straight to free.
2231     In the superaligned case, we need to retrieve the initial allocator
2232     return value, stored in front of the data block at allocation time.  */
2233
2234  unsigned int data_align = TYPE_ALIGN (data_type);
2235  unsigned int system_allocator_alignment
2236      = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2237
2238  tree free_ptr;
2239
2240  if (data_align > system_allocator_alignment)
2241    {
2242      /* DATA_FRONT_PTR (void *)
2243	 = (void *)DATA_PTR - (void *)sizeof (void *))  */
2244      tree data_front_ptr
2245	= build_binary_op
2246	  (POINTER_PLUS_EXPR, ptr_void_type_node,
2247	   convert (ptr_void_type_node, data_ptr),
2248	   size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2249
2250      /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR  */
2251      free_ptr
2252	= build_unary_op
2253	  (INDIRECT_REF, NULL_TREE,
2254	   convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2255    }
2256  else
2257    free_ptr = data_ptr;
2258
2259  return build_call_n_expr (free_decl, 1, free_ptr);
2260}
2261
2262/* Build a GCC tree to call an allocation or deallocation function.
2263   If GNU_OBJ is nonzero, it is an object to deallocate.  Otherwise,
2264   generate an allocator.
2265
2266   GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2267   object type, used to determine the to-be-honored address alignment.
2268   GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2269   pool to use.  If not present, malloc and free are used.  GNAT_NODE is used
2270   to provide an error location for restriction violation messages.  */
2271
2272tree
2273build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2274                          Entity_Id gnat_proc, Entity_Id gnat_pool,
2275                          Node_Id gnat_node)
2276{
2277  gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2278
2279  /* Explicit proc to call ?  This one is assumed to deal with the type
2280     alignment constraints.  */
2281  if (Present (gnat_proc))
2282    return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2283					  gnat_proc, gnat_pool);
2284
2285  /* Otherwise, object to "free" or "malloc" with possible special processing
2286     for alignments stricter than what the default allocator honors.  */
2287  else if (gnu_obj)
2288    return maybe_wrap_free (gnu_obj, gnu_type);
2289  else
2290    {
2291      /* Assert that we no longer can be called with this special pool.  */
2292      gcc_assert (gnat_pool != -1);
2293
2294      /* Check that we aren't violating the associated restriction.  */
2295      if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2296	Check_No_Implicit_Heap_Alloc (gnat_node);
2297
2298      return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2299    }
2300}
2301
2302/* Build a GCC tree that corresponds to allocating an object of TYPE whose
2303   initial value is INIT, if INIT is nonzero.  Convert the expression to
2304   RESULT_TYPE, which must be some pointer type, and return the result.
2305
2306   GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2307   the storage pool to use.  GNAT_NODE is used to provide an error
2308   location for restriction violation messages.  If IGNORE_INIT_TYPE is
2309   true, ignore the type of INIT for the purpose of determining the size;
2310   this will cause the maximum size to be allocated if TYPE is of
2311   self-referential size.  */
2312
2313tree
2314build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2315                 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2316{
2317  tree size, storage, storage_deref, storage_init;
2318
2319  /* If the initializer, if present, is a NULL_EXPR, just return a new one.  */
2320  if (init && TREE_CODE (init) == NULL_EXPR)
2321    return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2322
2323  /* If the initializer, if present, is a COND_EXPR, deal with each branch.  */
2324  else if (init && TREE_CODE (init) == COND_EXPR)
2325    return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2326		   build_allocator (type, TREE_OPERAND (init, 1), result_type,
2327				    gnat_proc, gnat_pool, gnat_node,
2328				    ignore_init_type),
2329		   build_allocator (type, TREE_OPERAND (init, 2), result_type,
2330				    gnat_proc, gnat_pool, gnat_node,
2331				    ignore_init_type));
2332
2333  /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2334     sizes of the object and its template.  Allocate the whole thing and
2335     fill in the parts that are known.  */
2336  else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2337    {
2338      tree storage_type
2339	= build_unc_object_type_from_ptr (result_type, type,
2340					  get_identifier ("ALLOC"), false);
2341      tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2342      tree storage_ptr_type = build_pointer_type (storage_type);
2343
2344      size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2345					     init);
2346
2347      /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2348      if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2349	size = size_int (-1);
2350
2351      storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2352					  gnat_proc, gnat_pool, gnat_node);
2353      storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2354      storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2355      TREE_THIS_NOTRAP (storage_deref) = 1;
2356
2357      /* If there is an initializing expression, then make a constructor for
2358	 the entire object including the bounds and copy it into the object.
2359	 If there is no initializing expression, just set the bounds.  */
2360      if (init)
2361	{
2362	  vec<constructor_elt, va_gc> *v;
2363	  vec_alloc (v, 2);
2364
2365	  CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2366				  build_template (template_type, type, init));
2367	  CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2368				  init);
2369	  storage_init
2370	    = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2371			       gnat_build_constructor (storage_type, v));
2372	}
2373      else
2374	storage_init
2375	  = build_binary_op (INIT_EXPR, NULL_TREE,
2376			     build_component_ref (storage_deref, NULL_TREE,
2377						  TYPE_FIELDS (storage_type),
2378						  false),
2379			     build_template (template_type, type, NULL_TREE));
2380
2381      return build2 (COMPOUND_EXPR, result_type,
2382		     storage_init, convert (result_type, storage));
2383    }
2384
2385  size = TYPE_SIZE_UNIT (type);
2386
2387  /* If we have an initializing expression, see if its size is simpler
2388     than the size from the type.  */
2389  if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2390      && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2391	  || CONTAINS_PLACEHOLDER_P (size)))
2392    size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2393
2394  /* If the size is still self-referential, reference the initializing
2395     expression, if it is present.  If not, this must have been a
2396     call to allocate a library-level object, in which case we use
2397     the maximum size.  */
2398  if (CONTAINS_PLACEHOLDER_P (size))
2399    {
2400      if (!ignore_init_type && init)
2401	size = substitute_placeholder_in_expr (size, init);
2402      else
2403	size = max_size (size, true);
2404    }
2405
2406  /* If the size overflows, pass -1 so Storage_Error will be raised.  */
2407  if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2408    size = size_int (-1);
2409
2410  storage = convert (result_type,
2411		     build_call_alloc_dealloc (NULL_TREE, size, type,
2412					       gnat_proc, gnat_pool,
2413					       gnat_node));
2414
2415  /* If we have an initial value, protect the new address, assign the value
2416     and return the address with a COMPOUND_EXPR.  */
2417  if (init)
2418    {
2419      storage = gnat_protect_expr (storage);
2420      storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2421      TREE_THIS_NOTRAP (storage_deref) = 1;
2422      storage_init
2423	= build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2424      return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2425    }
2426
2427  return storage;
2428}
2429
2430/* Indicate that we need to take the address of T and that it therefore
2431   should not be allocated in a register.  Returns true if successful.  */
2432
2433bool
2434gnat_mark_addressable (tree t)
2435{
2436  while (true)
2437    switch (TREE_CODE (t))
2438      {
2439      case ADDR_EXPR:
2440      case COMPONENT_REF:
2441      case ARRAY_REF:
2442      case ARRAY_RANGE_REF:
2443      case REALPART_EXPR:
2444      case IMAGPART_EXPR:
2445      case VIEW_CONVERT_EXPR:
2446      case NON_LVALUE_EXPR:
2447      CASE_CONVERT:
2448	t = TREE_OPERAND (t, 0);
2449	break;
2450
2451      case COMPOUND_EXPR:
2452	t = TREE_OPERAND (t, 1);
2453	break;
2454
2455      case CONSTRUCTOR:
2456	TREE_ADDRESSABLE (t) = 1;
2457	return true;
2458
2459      case VAR_DECL:
2460      case PARM_DECL:
2461      case RESULT_DECL:
2462	TREE_ADDRESSABLE (t) = 1;
2463	return true;
2464
2465      case FUNCTION_DECL:
2466	TREE_ADDRESSABLE (t) = 1;
2467	return true;
2468
2469      case CONST_DECL:
2470	return DECL_CONST_CORRESPONDING_VAR (t)
2471	       && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2472
2473      default:
2474	return true;
2475    }
2476}
2477
2478/* Save EXP for later use or reuse.  This is equivalent to save_expr in tree.c
2479   but we know how to handle our own nodes.  */
2480
2481tree
2482gnat_save_expr (tree exp)
2483{
2484  tree type = TREE_TYPE (exp);
2485  enum tree_code code = TREE_CODE (exp);
2486
2487  if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2488    return exp;
2489
2490  if (code == UNCONSTRAINED_ARRAY_REF)
2491    {
2492      tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2493      TREE_READONLY (t) = TYPE_READONLY (type);
2494      return t;
2495    }
2496
2497  /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2498     This may be more efficient, but will also allow us to more easily find
2499     the match for the PLACEHOLDER_EXPR.  */
2500  if (code == COMPONENT_REF
2501      && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2502    return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2503		   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2504
2505  return save_expr (exp);
2506}
2507
2508/* Protect EXP for immediate reuse.  This is a variant of gnat_save_expr that
2509   is optimized under the assumption that EXP's value doesn't change before
2510   its subsequent reuse(s) except through its potential reevaluation.  */
2511
2512tree
2513gnat_protect_expr (tree exp)
2514{
2515  tree type = TREE_TYPE (exp);
2516  enum tree_code code = TREE_CODE (exp);
2517
2518  if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2519    return exp;
2520
2521  /* If EXP has no side effects, we theoretically don't need to do anything.
2522     However, we may be recursively passed more and more complex expressions
2523     involving checks which will be reused multiple times and eventually be
2524     unshared for gimplification; in order to avoid a complexity explosion
2525     at that point, we protect any expressions more complex than a simple
2526     arithmetic expression.  */
2527  if (!TREE_SIDE_EFFECTS (exp))
2528    {
2529      tree inner = skip_simple_arithmetic (exp);
2530      if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2531	return exp;
2532    }
2533
2534  /* If this is a conversion, protect what's inside the conversion.  */
2535  if (code == NON_LVALUE_EXPR
2536      || CONVERT_EXPR_CODE_P (code)
2537      || code == VIEW_CONVERT_EXPR)
2538  return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2539
2540  /* If we're indirectly referencing something, we only need to protect the
2541     address since the data itself can't change in these situations.  */
2542  if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2543    {
2544      tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2545      TREE_READONLY (t) = TYPE_READONLY (type);
2546      return t;
2547    }
2548
2549  /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2550     This may be more efficient, but will also allow us to more easily find
2551     the match for the PLACEHOLDER_EXPR.  */
2552  if (code == COMPONENT_REF
2553      && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2554    return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2555		   TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2556
2557  /* If this is a fat pointer or something that can be placed in a register,
2558     just make a SAVE_EXPR.  Likewise for a CALL_EXPR as large objects are
2559     returned via invisible reference in most ABIs so the temporary will
2560     directly be filled by the callee.  */
2561  if (TYPE_IS_FAT_POINTER_P (type)
2562      || TYPE_MODE (type) != BLKmode
2563      || code == CALL_EXPR)
2564    return save_expr (exp);
2565
2566  /* Otherwise reference, protect the address and dereference.  */
2567  return
2568    build_unary_op (INDIRECT_REF, type,
2569		    save_expr (build_unary_op (ADDR_EXPR,
2570					       build_reference_type (type),
2571					       exp)));
2572}
2573
2574/* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2575   argument to force evaluation of everything.  */
2576
2577static tree
2578gnat_stabilize_reference_1 (tree e, bool force)
2579{
2580  enum tree_code code = TREE_CODE (e);
2581  tree type = TREE_TYPE (e);
2582  tree result;
2583
2584  /* We cannot ignore const expressions because it might be a reference
2585     to a const array but whose index contains side-effects.  But we can
2586     ignore things that are actual constant or that already have been
2587     handled by this function.  */
2588  if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2589    return e;
2590
2591  switch (TREE_CODE_CLASS (code))
2592    {
2593    case tcc_exceptional:
2594    case tcc_declaration:
2595    case tcc_comparison:
2596    case tcc_expression:
2597    case tcc_reference:
2598    case tcc_vl_exp:
2599      /* If this is a COMPONENT_REF of a fat pointer, save the entire
2600	 fat pointer.  This may be more efficient, but will also allow
2601	 us to more easily find the match for the PLACEHOLDER_EXPR.  */
2602      if (code == COMPONENT_REF
2603	  && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2604	result
2605	  = build3 (code, type,
2606		    gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2607		    TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2608      /* If the expression has side-effects, then encase it in a SAVE_EXPR
2609	 so that it will only be evaluated once.  */
2610      /* The tcc_reference and tcc_comparison classes could be handled as
2611	 below, but it is generally faster to only evaluate them once.  */
2612      else if (TREE_SIDE_EFFECTS (e) || force)
2613	return save_expr (e);
2614      else
2615	return e;
2616      break;
2617
2618    case tcc_binary:
2619      /* Recursively stabilize each operand.  */
2620      result
2621	= build2 (code, type,
2622		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2623		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2624      break;
2625
2626    case tcc_unary:
2627      /* Recursively stabilize each operand.  */
2628      result
2629	= build1 (code, type,
2630		  gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2631      break;
2632
2633    default:
2634      gcc_unreachable ();
2635    }
2636
2637  /* See similar handling in gnat_stabilize_reference.  */
2638  TREE_READONLY (result) = TREE_READONLY (e);
2639  TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2640  TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2641
2642  if (code == INDIRECT_REF
2643      || code == UNCONSTRAINED_ARRAY_REF
2644      || code == ARRAY_REF
2645      || code == ARRAY_RANGE_REF)
2646    TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2647
2648  return result;
2649}
2650
2651/* This is equivalent to stabilize_reference in tree.c but we know how to
2652   handle our own nodes and we take extra arguments.  FORCE says whether to
2653   force evaluation of everything.  We set SUCCESS to true unless we walk
2654   through something we don't know how to stabilize.  */
2655
2656tree
2657gnat_stabilize_reference (tree ref, bool force, bool *success)
2658{
2659  tree type = TREE_TYPE (ref);
2660  enum tree_code code = TREE_CODE (ref);
2661  tree result;
2662
2663  /* Assume we'll success unless proven otherwise.  */
2664  if (success)
2665    *success = true;
2666
2667  switch (code)
2668    {
2669    case CONST_DECL:
2670    case VAR_DECL:
2671    case PARM_DECL:
2672    case RESULT_DECL:
2673      /* No action is needed in this case.  */
2674      return ref;
2675
2676    case ADDR_EXPR:
2677    CASE_CONVERT:
2678    case FLOAT_EXPR:
2679    case FIX_TRUNC_EXPR:
2680    case VIEW_CONVERT_EXPR:
2681      result
2682	= build1 (code, type,
2683		  gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2684					    success));
2685      break;
2686
2687    case INDIRECT_REF:
2688    case UNCONSTRAINED_ARRAY_REF:
2689      result = build1 (code, type,
2690		       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2691						   force));
2692      break;
2693
2694    case COMPONENT_REF:
2695     result = build3 (COMPONENT_REF, type,
2696		      gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2697						success),
2698		      TREE_OPERAND (ref, 1), NULL_TREE);
2699      break;
2700
2701    case BIT_FIELD_REF:
2702      result = build3 (BIT_FIELD_REF, type,
2703		       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2704						 success),
2705		       TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2706      break;
2707
2708    case ARRAY_REF:
2709    case ARRAY_RANGE_REF:
2710      result = build4 (code, type,
2711		       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2712						 success),
2713		       gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2714						   force),
2715		       NULL_TREE, NULL_TREE);
2716      break;
2717
2718    case CALL_EXPR:
2719      result = gnat_stabilize_reference_1 (ref, force);
2720      break;
2721
2722    case COMPOUND_EXPR:
2723      result = build2 (COMPOUND_EXPR, type,
2724		       gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2725						 success),
2726		       gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2727						 success));
2728      break;
2729
2730    case CONSTRUCTOR:
2731      /* Constructors with 1 element are used extensively to formally
2732	 convert objects to special wrapping types.  */
2733      if (TREE_CODE (type) == RECORD_TYPE
2734	  && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2735	{
2736	  tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2737	  tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2738	  result
2739	    = build_constructor_single (type, index,
2740					gnat_stabilize_reference_1 (value,
2741								    force));
2742	}
2743      else
2744	{
2745	  if (success)
2746	    *success = false;
2747	  return ref;
2748	}
2749      break;
2750
2751    case ERROR_MARK:
2752      ref = error_mark_node;
2753
2754      /* ...  fall through to failure ... */
2755
2756      /* If arg isn't a kind of lvalue we recognize, make no change.
2757	 Caller should recognize the error for an invalid lvalue.  */
2758    default:
2759      if (success)
2760	*success = false;
2761      return ref;
2762    }
2763
2764  /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2765     may not be sustained across some paths, such as the way via build1 for
2766     INDIRECT_REF.  We reset those flags here in the general case, which is
2767     consistent with the GCC version of this routine.
2768
2769     Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2770     paths introduce side-effects where there was none initially (e.g. if a
2771     SAVE_EXPR is built) and we also want to keep track of that.  */
2772  TREE_READONLY (result) = TREE_READONLY (ref);
2773  TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2774  TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2775
2776  if (code == INDIRECT_REF
2777      || code == UNCONSTRAINED_ARRAY_REF
2778      || code == ARRAY_REF
2779      || code == ARRAY_RANGE_REF)
2780    TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2781
2782  return result;
2783}
2784
2785/* If EXPR is an expression that is invariant in the current function, in the
2786   sense that it can be evaluated anywhere in the function and any number of
2787   times, return EXPR or an equivalent expression.  Otherwise return NULL.  */
2788
2789tree
2790gnat_invariant_expr (tree expr)
2791{
2792  tree type = TREE_TYPE (expr), t;
2793
2794  expr = remove_conversions (expr, false);
2795
2796  while ((TREE_CODE (expr) == CONST_DECL
2797	  || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2798	 && decl_function_context (expr) == current_function_decl
2799	 && DECL_INITIAL (expr))
2800    {
2801      expr = DECL_INITIAL (expr);
2802      /* Look into CONSTRUCTORs built to initialize padded types.  */
2803      if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2804	expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2805      expr = remove_conversions (expr, false);
2806    }
2807
2808  /* We are only interested in scalar types at the moment and, even if we may
2809     have gone through padding types in the above loop, we must be back to a
2810     scalar value at this point.  */
2811  if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2812    return NULL_TREE;
2813
2814  if (TREE_CONSTANT (expr))
2815    return fold_convert (type, expr);
2816
2817  t = expr;
2818
2819  while (true)
2820    {
2821      switch (TREE_CODE (t))
2822	{
2823	case COMPONENT_REF:
2824	  if (TREE_OPERAND (t, 2) != NULL_TREE)
2825	    return NULL_TREE;
2826	  break;
2827
2828	case ARRAY_REF:
2829	case ARRAY_RANGE_REF:
2830	  if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2831	      || TREE_OPERAND (t, 2) != NULL_TREE
2832	      || TREE_OPERAND (t, 3) != NULL_TREE)
2833	    return NULL_TREE;
2834	  break;
2835
2836	case BIT_FIELD_REF:
2837	case VIEW_CONVERT_EXPR:
2838	case REALPART_EXPR:
2839	case IMAGPART_EXPR:
2840	  break;
2841
2842	case INDIRECT_REF:
2843	  if (!TREE_READONLY (t)
2844	      || TREE_SIDE_EFFECTS (t)
2845	      || !TREE_THIS_NOTRAP (t))
2846	    return NULL_TREE;
2847	  break;
2848
2849	default:
2850	  goto object;
2851	}
2852
2853      t = TREE_OPERAND (t, 0);
2854    }
2855
2856object:
2857  if (TREE_SIDE_EFFECTS (t))
2858    return NULL_TREE;
2859
2860  if (TREE_CODE (t) == CONST_DECL
2861      && (DECL_EXTERNAL (t)
2862	  || decl_function_context (t) != current_function_decl))
2863    return fold_convert (type, expr);
2864
2865  if (!TREE_READONLY (t))
2866    return NULL_TREE;
2867
2868  if (TREE_CODE (t) == PARM_DECL)
2869    return fold_convert (type, expr);
2870
2871  if (TREE_CODE (t) == VAR_DECL
2872      && (DECL_EXTERNAL (t)
2873	  || decl_function_context (t) != current_function_decl))
2874    return fold_convert (type, expr);
2875
2876  return NULL_TREE;
2877}
2878