stor-layout.c revision 117395
155939Snsouch/* C-compiler utilities for types and variables storage layout
255939Snsouch   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
355939Snsouch   1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
455939Snsouch
555939SnsouchThis file is part of GCC.
655939Snsouch
755939SnsouchGCC is free software; you can redistribute it and/or modify it under
855939Snsouchthe terms of the GNU General Public License as published by the Free
955939SnsouchSoftware Foundation; either version 2, or (at your option) any later
1055939Snsouchversion.
1155939Snsouch
1255939SnsouchGCC is distributed in the hope that it will be useful, but WITHOUT ANY
1355939SnsouchWARRANTY; without even the implied warranty of MERCHANTABILITY or
1455939SnsouchFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
1555939Snsouchfor more details.
1655939Snsouch
1755939SnsouchYou should have received a copy of the GNU General Public License
1855939Snsouchalong with GCC; see the file COPYING.  If not, write to the Free
1955939SnsouchSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
2055939Snsouch02111-1307, USA.  */
2155939Snsouch
2255939Snsouch
2355939Snsouch#include "config.h"
2455939Snsouch#include "system.h"
2555939Snsouch#include "tree.h"
2655939Snsouch#include "rtl.h"
2755939Snsouch#include "tm_p.h"
2855939Snsouch#include "flags.h"
2955939Snsouch#include "function.h"
3055939Snsouch#include "expr.h"
3155939Snsouch#include "toplev.h"
3255939Snsouch#include "ggc.h"
3355939Snsouch#include "target.h"
3455939Snsouch#include "langhooks.h"
3555939Snsouch
3655939Snsouch/* Set to one when set_sizetype has been called.  */
3755939Snsouchstatic int sizetype_set;
3855939Snsouch
3955939Snsouch/* List of types created before set_sizetype has been called.  We do not
4055939Snsouch   make this a GGC root since we want these nodes to be reclaimed.  */
4155939Snsouchstatic tree early_type_list;
4255939Snsouch
4355939Snsouch/* Data type for the expressions representing sizes of data types.
4455939Snsouch   It is the first integer type laid out.  */
4555939Snsouchtree sizetype_tab[(int) TYPE_KIND_LAST];
4655939Snsouch
4755939Snsouch/* If nonzero, this is an upper limit on alignment of structure fields.
4855939Snsouch   The value is measured in bits.  */
4955939Snsouchunsigned int maximum_field_alignment;
5055939Snsouch
5155939Snsouch/* If nonzero, the alignment of a bitstring or (power-)set value, in bits.
5255939Snsouch   May be overridden by front-ends.  */
5355939Snsouchunsigned int set_alignment = 0;
5455939Snsouch
5555939Snsouch/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
5655939Snsouch   allocated in Pmode, not ptr_mode.   Set only by internal_reference_types
5755939Snsouch   called only by a front end.  */
5855939Snsouchstatic int reference_types_internal = 0;
5955939Snsouch
6055939Snsouchstatic void finalize_record_size	PARAMS ((record_layout_info));
6155939Snsouchstatic void finalize_type_size		PARAMS ((tree));
6255939Snsouchstatic void place_union_field		PARAMS ((record_layout_info, tree));
6355939Snsouchstatic unsigned int update_alignment_for_field
6455939Snsouch                                        PARAMS ((record_layout_info, tree,
6555939Snsouch						 unsigned int));
6655939Snsouchextern void debug_rli			PARAMS ((record_layout_info));
6755939Snsouch
6855939Snsouch/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded.  */
6955939Snsouch
7055939Snsouchstatic GTY(()) tree pending_sizes;
7155939Snsouch
7255939Snsouch/* Nonzero means cannot safely call expand_expr now,
7355939Snsouch   so put variable sizes onto `pending_sizes' instead.  */
7455939Snsouch
7555939Snsouchint immediate_size_expand;
7655939Snsouch
7755939Snsouch/* Show that REFERENCE_TYPES are internal and should be Pmode.  Called only
7855939Snsouch   by front end.  */
7955939Snsouch
8055939Snsouchvoid
8155939Snsouchinternal_reference_types ()
8255939Snsouch{
8355939Snsouch  reference_types_internal = 1;
84}
85
86/* Get a list of all the objects put on the pending sizes list.  */
87
88tree
89get_pending_sizes ()
90{
91  tree chain = pending_sizes;
92  tree t;
93
94  /* Put each SAVE_EXPR into the current function.  */
95  for (t = chain; t; t = TREE_CHAIN (t))
96    SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
97
98  pending_sizes = 0;
99  return chain;
100}
101
102/* Return nonzero if EXPR is present on the pending sizes list.  */
103
104int
105is_pending_size (expr)
106     tree expr;
107{
108  tree t;
109
110  for (t = pending_sizes; t; t = TREE_CHAIN (t))
111    if (TREE_VALUE (t) == expr)
112      return 1;
113  return 0;
114}
115
116/* Add EXPR to the pending sizes list.  */
117
118void
119put_pending_size (expr)
120     tree expr;
121{
122  /* Strip any simple arithmetic from EXPR to see if it has an underlying
123     SAVE_EXPR.  */
124  while (TREE_CODE_CLASS (TREE_CODE (expr)) == '1'
125	 || (TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
126	    && TREE_CONSTANT (TREE_OPERAND (expr, 1))))
127    expr = TREE_OPERAND (expr, 0);
128
129  if (TREE_CODE (expr) == SAVE_EXPR)
130    pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
131}
132
133/* Put a chain of objects into the pending sizes list, which must be
134   empty.  */
135
136void
137put_pending_sizes (chain)
138     tree chain;
139{
140  if (pending_sizes)
141    abort ();
142
143  pending_sizes = chain;
144}
145
146/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
147   to serve as the actual size-expression for a type or decl.  */
148
149tree
150variable_size (size)
151     tree size;
152{
153  /* If the language-processor is to take responsibility for variable-sized
154     items (e.g., languages which have elaboration procedures like Ada),
155     just return SIZE unchanged.  Likewise for self-referential sizes and
156     constant sizes.  */
157  if (TREE_CONSTANT (size)
158      || (*lang_hooks.decls.global_bindings_p) () < 0
159      || contains_placeholder_p (size))
160    return size;
161
162  size = save_expr (size);
163
164  /* If an array with a variable number of elements is declared, and
165     the elements require destruction, we will emit a cleanup for the
166     array.  That cleanup is run both on normal exit from the block
167     and in the exception-handler for the block.  Normally, when code
168     is used in both ordinary code and in an exception handler it is
169     `unsaved', i.e., all SAVE_EXPRs are recalculated.  However, we do
170     not wish to do that here; the array-size is the same in both
171     places.  */
172  if (TREE_CODE (size) == SAVE_EXPR)
173    SAVE_EXPR_PERSISTENT_P (size) = 1;
174
175  if ((*lang_hooks.decls.global_bindings_p) ())
176    {
177      if (TREE_CONSTANT (size))
178	error ("type size can't be explicitly evaluated");
179      else
180	error ("variable-size type declared outside of any function");
181
182      return size_one_node;
183    }
184
185  if (immediate_size_expand)
186    /* NULL_RTX is not defined; neither is the rtx type.
187       Also, we would like to pass const0_rtx here, but don't have it.  */
188    expand_expr (size, expand_expr (integer_zero_node, NULL_RTX, VOIDmode, 0),
189		 VOIDmode, 0);
190  else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
191    /* The front-end doesn't want us to keep a list of the expressions
192       that determine sizes for variable size objects.  */
193    ;
194  else
195    put_pending_size (size);
196
197  return size;
198}
199
200#ifndef MAX_FIXED_MODE_SIZE
201#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
202#endif
203
204/* Return the machine mode to use for a nonscalar of SIZE bits.
205   The mode must be in class CLASS, and have exactly that many bits.
206   If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
207   be used.  */
208
209enum machine_mode
210mode_for_size (size, class, limit)
211     unsigned int size;
212     enum mode_class class;
213     int limit;
214{
215  enum machine_mode mode;
216
217  if (limit && size > MAX_FIXED_MODE_SIZE)
218    return BLKmode;
219
220  /* Get the first mode which has this size, in the specified class.  */
221  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
222       mode = GET_MODE_WIDER_MODE (mode))
223    if (GET_MODE_BITSIZE (mode) == size)
224      return mode;
225
226  return BLKmode;
227}
228
229/* Similar, except passed a tree node.  */
230
231enum machine_mode
232mode_for_size_tree (size, class, limit)
233     tree size;
234     enum mode_class class;
235     int limit;
236{
237  if (TREE_CODE (size) != INTEGER_CST
238      /* What we really want to say here is that the size can fit in a
239	 host integer, but we know there's no way we'd find a mode for
240	 this many bits, so there's no point in doing the precise test.  */
241      || compare_tree_int (size, 1000) > 0)
242    return BLKmode;
243  else
244    return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
245}
246
247/* Similar, but never return BLKmode; return the narrowest mode that
248   contains at least the requested number of bits.  */
249
250enum machine_mode
251smallest_mode_for_size (size, class)
252     unsigned int size;
253     enum mode_class class;
254{
255  enum machine_mode mode;
256
257  /* Get the first mode which has at least this size, in the
258     specified class.  */
259  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
260       mode = GET_MODE_WIDER_MODE (mode))
261    if (GET_MODE_BITSIZE (mode) >= size)
262      return mode;
263
264  abort ();
265}
266
267/* Find an integer mode of the exact same size, or BLKmode on failure.  */
268
269enum machine_mode
270int_mode_for_mode (mode)
271     enum machine_mode mode;
272{
273  switch (GET_MODE_CLASS (mode))
274    {
275    case MODE_INT:
276    case MODE_PARTIAL_INT:
277      break;
278
279    case MODE_COMPLEX_INT:
280    case MODE_COMPLEX_FLOAT:
281    case MODE_FLOAT:
282    case MODE_VECTOR_INT:
283    case MODE_VECTOR_FLOAT:
284      mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
285      break;
286
287    case MODE_RANDOM:
288      if (mode == BLKmode)
289	break;
290
291      /* ... fall through ...  */
292
293    case MODE_CC:
294    default:
295      abort ();
296    }
297
298  return mode;
299}
300
301/* Return the alignment of MODE. This will be bounded by 1 and
302   BIGGEST_ALIGNMENT.  */
303
304unsigned int
305get_mode_alignment (mode)
306     enum machine_mode mode;
307{
308  unsigned int alignment;
309
310  if (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
311      || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT)
312    alignment = GET_MODE_UNIT_SIZE (mode);
313  else
314    alignment = GET_MODE_SIZE (mode);
315
316  /* Extract the LSB of the size.  */
317  alignment = alignment & -alignment;
318  alignment *= BITS_PER_UNIT;
319
320  alignment = MIN (BIGGEST_ALIGNMENT, MAX (1, alignment));
321  return alignment;
322}
323
324/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
325   This can only be applied to objects of a sizetype.  */
326
327tree
328round_up (value, divisor)
329     tree value;
330     int divisor;
331{
332  tree arg = size_int_type (divisor, TREE_TYPE (value));
333
334  return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
335}
336
337/* Likewise, but round down.  */
338
339tree
340round_down (value, divisor)
341     tree value;
342     int divisor;
343{
344  tree arg = size_int_type (divisor, TREE_TYPE (value));
345
346  return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
347}
348
349/* Set the size, mode and alignment of a ..._DECL node.
350   TYPE_DECL does need this for C++.
351   Note that LABEL_DECL and CONST_DECL nodes do not need this,
352   and FUNCTION_DECL nodes have them set up in a special (and simple) way.
353   Don't call layout_decl for them.
354
355   KNOWN_ALIGN is the amount of alignment we can assume this
356   decl has with no special effort.  It is relevant only for FIELD_DECLs
357   and depends on the previous fields.
358   All that matters about KNOWN_ALIGN is which powers of 2 divide it.
359   If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
360   the record will be aligned to suit.  */
361
362void
363layout_decl (decl, known_align)
364     tree decl;
365     unsigned int known_align;
366{
367  tree type = TREE_TYPE (decl);
368  enum tree_code code = TREE_CODE (decl);
369  rtx rtl = NULL_RTX;
370
371  if (code == CONST_DECL)
372    return;
373  else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
374	   && code != TYPE_DECL && code != FIELD_DECL)
375    abort ();
376
377  rtl = DECL_RTL_IF_SET (decl);
378
379  if (type == error_mark_node)
380    type = void_type_node;
381
382  /* Usually the size and mode come from the data type without change,
383     however, the front-end may set the explicit width of the field, so its
384     size may not be the same as the size of its type.  This happens with
385     bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
386     also happens with other fields.  For example, the C++ front-end creates
387     zero-sized fields corresponding to empty base classes, and depends on
388     layout_type setting DECL_FIELD_BITPOS correctly for the field.  Set the
389     size in bytes from the size in bits.  If we have already set the mode,
390     don't set it again since we can be called twice for FIELD_DECLs.  */
391
392  TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
393  if (DECL_MODE (decl) == VOIDmode)
394    DECL_MODE (decl) = TYPE_MODE (type);
395
396  if (DECL_SIZE (decl) == 0)
397    {
398      DECL_SIZE (decl) = TYPE_SIZE (type);
399      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
400    }
401  else
402    DECL_SIZE_UNIT (decl)
403      = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
404				       bitsize_unit_node));
405
406  /* Force alignment required for the data type.
407     But if the decl itself wants greater alignment, don't override that.
408     Likewise, if the decl is packed, don't override it.  */
409  if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
410      && (DECL_ALIGN (decl) == 0
411	  || (! (code == FIELD_DECL && DECL_PACKED (decl))
412	      && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
413    {
414      DECL_ALIGN (decl) = TYPE_ALIGN (type);
415      DECL_USER_ALIGN (decl) = 0;
416    }
417
418  /* For fields, set the bit field type and update the alignment.  */
419  if (code == FIELD_DECL)
420    {
421      DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
422      if (maximum_field_alignment != 0)
423	DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
424
425      /* If the field is of variable size, we can't misalign it since we
426	 have no way to make a temporary to align the result.  But this
427	 isn't an issue if the decl is not addressable.  Likewise if it
428	 is of unknown size.  */
429      else if (DECL_PACKED (decl)
430	       && (DECL_NONADDRESSABLE_P (decl)
431		   || DECL_SIZE_UNIT (decl) == 0
432		   || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
433	{
434	  DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
435	  DECL_USER_ALIGN (decl) = 0;
436	}
437    }
438
439  /* See if we can use an ordinary integer mode for a bit-field.
440     Conditions are: a fixed size that is correct for another mode
441     and occupying a complete byte or bytes on proper boundary.  */
442  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
443      && TYPE_SIZE (type) != 0
444      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
445      && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
446    {
447      enum machine_mode xmode
448	= mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
449
450      if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
451	{
452	  DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
453				   DECL_ALIGN (decl));
454	  DECL_MODE (decl) = xmode;
455	  DECL_BIT_FIELD (decl) = 0;
456	}
457    }
458
459  /* Turn off DECL_BIT_FIELD if we won't need it set.  */
460  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
461      && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
462      && known_align >= TYPE_ALIGN (type)
463      && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
464      && DECL_SIZE_UNIT (decl) != 0)
465    DECL_BIT_FIELD (decl) = 0;
466
467  /* Evaluate nonconstant size only once, either now or as soon as safe.  */
468  if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
469    DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
470  if (DECL_SIZE_UNIT (decl) != 0
471      && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
472    DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
473
474  /* If requested, warn about definitions of large data objects.  */
475  if (warn_larger_than
476      && (code == VAR_DECL || code == PARM_DECL)
477      && ! DECL_EXTERNAL (decl))
478    {
479      tree size = DECL_SIZE_UNIT (decl);
480
481      if (size != 0 && TREE_CODE (size) == INTEGER_CST
482	  && compare_tree_int (size, larger_than_size) > 0)
483	{
484	  unsigned int size_as_int = TREE_INT_CST_LOW (size);
485
486	  if (compare_tree_int (size, size_as_int) == 0)
487	    warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
488	  else
489	    warning_with_decl (decl, "size of `%s' is larger than %d bytes",
490			       larger_than_size);
491	}
492    }
493
494  /* If the RTL was already set, update its mode and mem attributes.  */
495  if (rtl)
496    {
497      PUT_MODE (rtl, DECL_MODE (decl));
498      SET_DECL_RTL (decl, 0);
499      set_mem_attributes (rtl, decl, 1);
500      SET_DECL_RTL (decl, rtl);
501    }
502}
503
504/* Hook for a front-end function that can modify the record layout as needed
505   immediately before it is finalized.  */
506
507void (*lang_adjust_rli) PARAMS ((record_layout_info)) = 0;
508
509void
510set_lang_adjust_rli (f)
511     void (*f) PARAMS ((record_layout_info));
512{
513  lang_adjust_rli = f;
514}
515
516/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
517   QUAL_UNION_TYPE.  Return a pointer to a struct record_layout_info which
518   is to be passed to all other layout functions for this record.  It is the
519   responsibility of the caller to call `free' for the storage returned.
520   Note that garbage collection is not permitted until we finish laying
521   out the record.  */
522
523record_layout_info
524start_record_layout (t)
525     tree t;
526{
527  record_layout_info rli
528    = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
529
530  rli->t = t;
531
532  /* If the type has a minimum specified alignment (via an attribute
533     declaration, for example) use it -- otherwise, start with a
534     one-byte alignment.  */
535  rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
536  rli->unpacked_align = rli->unpadded_align = rli->record_align;
537  rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
538
539#ifdef STRUCTURE_SIZE_BOUNDARY
540  /* Packed structures don't need to have minimum size.  */
541  if (! TYPE_PACKED (t))
542    rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
543#endif
544
545  rli->offset = size_zero_node;
546  rli->bitpos = bitsize_zero_node;
547  rli->prev_field = 0;
548  rli->pending_statics = 0;
549  rli->packed_maybe_necessary = 0;
550
551  return rli;
552}
553
554/* These four routines perform computations that convert between
555   the offset/bitpos forms and byte and bit offsets.  */
556
557tree
558bit_from_pos (offset, bitpos)
559     tree offset, bitpos;
560{
561  return size_binop (PLUS_EXPR, bitpos,
562		     size_binop (MULT_EXPR, convert (bitsizetype, offset),
563				 bitsize_unit_node));
564}
565
566tree
567byte_from_pos (offset, bitpos)
568     tree offset, bitpos;
569{
570  return size_binop (PLUS_EXPR, offset,
571		     convert (sizetype,
572			      size_binop (TRUNC_DIV_EXPR, bitpos,
573					  bitsize_unit_node)));
574}
575
576void
577pos_from_byte (poffset, pbitpos, off_align, pos)
578     tree *poffset, *pbitpos;
579     unsigned int off_align;
580     tree pos;
581{
582  *poffset
583    = size_binop (MULT_EXPR,
584		  convert (sizetype,
585			   size_binop (FLOOR_DIV_EXPR, pos,
586				       bitsize_int (off_align
587						    / BITS_PER_UNIT))),
588		  size_int (off_align / BITS_PER_UNIT));
589  *pbitpos = size_binop (MULT_EXPR,
590			 size_binop (FLOOR_MOD_EXPR, pos,
591				     bitsize_int (off_align / BITS_PER_UNIT)),
592			 bitsize_unit_node);
593}
594
595void
596pos_from_bit (poffset, pbitpos, off_align, pos)
597     tree *poffset, *pbitpos;
598     unsigned int off_align;
599     tree pos;
600{
601  *poffset = size_binop (MULT_EXPR,
602			 convert (sizetype,
603				  size_binop (FLOOR_DIV_EXPR, pos,
604					      bitsize_int (off_align))),
605			 size_int (off_align / BITS_PER_UNIT));
606  *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
607}
608
609/* Given a pointer to bit and byte offsets and an offset alignment,
610   normalize the offsets so they are within the alignment.  */
611
612void
613normalize_offset (poffset, pbitpos, off_align)
614     tree *poffset, *pbitpos;
615     unsigned int off_align;
616{
617  /* If the bit position is now larger than it should be, adjust it
618     downwards.  */
619  if (compare_tree_int (*pbitpos, off_align) >= 0)
620    {
621      tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
622				      bitsize_int (off_align));
623
624      *poffset
625	= size_binop (PLUS_EXPR, *poffset,
626		      size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
627				  size_int (off_align / BITS_PER_UNIT)));
628
629      *pbitpos
630	= size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
631    }
632}
633
634/* Print debugging information about the information in RLI.  */
635
636void
637debug_rli (rli)
638     record_layout_info rli;
639{
640  print_node_brief (stderr, "type", rli->t, 0);
641  print_node_brief (stderr, "\noffset", rli->offset, 0);
642  print_node_brief (stderr, " bitpos", rli->bitpos, 0);
643
644  fprintf (stderr, "\naligns: rec = %u, unpack = %u, unpad = %u, off = %u\n",
645	   rli->record_align, rli->unpacked_align, rli->unpadded_align,
646	   rli->offset_align);
647  if (rli->packed_maybe_necessary)
648    fprintf (stderr, "packed may be necessary\n");
649
650  if (rli->pending_statics)
651    {
652      fprintf (stderr, "pending statics:\n");
653      debug_tree (rli->pending_statics);
654    }
655}
656
657/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
658   BITPOS if necessary to keep BITPOS below OFFSET_ALIGN.  */
659
660void
661normalize_rli (rli)
662     record_layout_info rli;
663{
664  normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
665}
666
667/* Returns the size in bytes allocated so far.  */
668
669tree
670rli_size_unit_so_far (rli)
671     record_layout_info rli;
672{
673  return byte_from_pos (rli->offset, rli->bitpos);
674}
675
676/* Returns the size in bits allocated so far.  */
677
678tree
679rli_size_so_far (rli)
680     record_layout_info rli;
681{
682  return bit_from_pos (rli->offset, rli->bitpos);
683}
684
685/* FIELD is about to be added to RLI->T.  The alignment (in bits) of
686   the next available location is given by KNOWN_ALIGN.  Update the
687   variable alignment fields in RLI, and return the alignment to give
688   the FIELD.  */
689
690static unsigned int
691update_alignment_for_field (rli, field, known_align)
692     record_layout_info rli;
693     tree field;
694     unsigned int known_align;
695{
696  /* The alignment required for FIELD.  */
697  unsigned int desired_align;
698  /* The type of this field.  */
699  tree type = TREE_TYPE (field);
700  /* True if the field was explicitly aligned by the user.  */
701  bool user_align;
702
703  /* Lay out the field so we know what alignment it needs.  For a
704     packed field, use the alignment as specified, disregarding what
705     the type would want.  */
706  desired_align = DECL_ALIGN (field);
707  user_align = DECL_USER_ALIGN (field);
708  layout_decl (field, known_align);
709  if (! DECL_PACKED (field))
710    {
711      desired_align = DECL_ALIGN (field);
712      user_align = DECL_USER_ALIGN (field);
713    }
714
715  /* Some targets (i.e. i386, VMS) limit struct field alignment
716     to a lower boundary than alignment of variables unless
717     it was overridden by attribute aligned.  */
718#ifdef BIGGEST_FIELD_ALIGNMENT
719  if (!user_align)
720    desired_align
721      = MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
722#endif
723
724#ifdef ADJUST_FIELD_ALIGN
725  if (!user_align)
726    desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
727#endif
728
729  /* Record must have at least as much alignment as any field.
730     Otherwise, the alignment of the field within the record is
731     meaningless.  */
732  if ((* targetm.ms_bitfield_layout_p) (rli->t)
733      && type != error_mark_node
734      && DECL_BIT_FIELD_TYPE (field)
735      && ! integer_zerop (TYPE_SIZE (type)))
736    {
737      /* Here, the alignment of the underlying type of a bitfield can
738	 affect the alignment of a record; even a zero-sized field
739	 can do this.  The alignment should be to the alignment of
740	 the type, except that for zero-size bitfields this only
741	 applies if there was an immediately prior, nonzero-size
742	 bitfield.  (That's the way it is, experimentally.) */
743      if (! integer_zerop (DECL_SIZE (field))
744 	  ? ! DECL_PACKED (field)
745 	  : (rli->prev_field
746 	     && DECL_BIT_FIELD_TYPE (rli->prev_field)
747 	     && ! integer_zerop (DECL_SIZE (rli->prev_field))))
748	{
749	  unsigned int type_align = TYPE_ALIGN (type);
750	  type_align = MAX (type_align, desired_align);
751	  if (maximum_field_alignment != 0)
752	    type_align = MIN (type_align, maximum_field_alignment);
753	  rli->record_align = MAX (rli->record_align, type_align);
754	  rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
755	  rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
756	}
757      else
758	desired_align = 1;
759    }
760  else
761#ifdef PCC_BITFIELD_TYPE_MATTERS
762  if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
763      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
764      && DECL_BIT_FIELD_TYPE (field)
765      && ! integer_zerop (TYPE_SIZE (type)))
766    {
767      /* For these machines, a zero-length field does not
768	 affect the alignment of the structure as a whole.
769	 It does, however, affect the alignment of the next field
770	 within the structure.  */
771      if (! integer_zerop (DECL_SIZE (field)))
772	rli->record_align = MAX (rli->record_align, desired_align);
773      else if (! DECL_PACKED (field) && !user_align)
774	desired_align = TYPE_ALIGN (type);
775
776      /* A named bit field of declared type `int'
777	 forces the entire structure to have `int' alignment.  */
778      if (DECL_NAME (field) != 0)
779	{
780	  unsigned int type_align = TYPE_ALIGN (type);
781
782#ifdef ADJUST_FIELD_ALIGN
783	  if (! TYPE_USER_ALIGN (type))
784	    type_align = ADJUST_FIELD_ALIGN (field, type_align);
785#endif
786
787	  if (maximum_field_alignment != 0)
788	    type_align = MIN (type_align, maximum_field_alignment);
789	  else if (DECL_PACKED (field))
790	    type_align = MIN (type_align, BITS_PER_UNIT);
791
792	  rli->record_align = MAX (rli->record_align, type_align);
793	  rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
794	  if (warn_packed)
795	    rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
796	  user_align |= TYPE_USER_ALIGN (type);
797	}
798    }
799  else
800#endif
801    {
802      rli->record_align = MAX (rli->record_align, desired_align);
803      rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
804      rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
805    }
806
807  TYPE_USER_ALIGN (rli->t) |= user_align;
808
809  DECL_ALIGN (field) = desired_align;
810
811  return desired_align;
812}
813
814/* Called from place_field to handle unions.  */
815
816static void
817place_union_field (rli, field)
818     record_layout_info rli;
819     tree field;
820{
821  update_alignment_for_field (rli, field, /*known_align=*/0);
822
823  DECL_FIELD_OFFSET (field) = size_zero_node;
824  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
825  SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
826
827  /* We assume the union's size will be a multiple of a byte so we don't
828     bother with BITPOS.  */
829  if (TREE_CODE (rli->t) == UNION_TYPE)
830    rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
831  else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
832    rli->offset = fold (build (COND_EXPR, sizetype,
833			       DECL_QUALIFIER (field),
834			       DECL_SIZE_UNIT (field), rli->offset));
835}
836
837/* RLI contains information about the layout of a RECORD_TYPE.  FIELD
838   is a FIELD_DECL to be added after those fields already present in
839   T.  (FIELD is not actually added to the TYPE_FIELDS list here;
840   callers that desire that behavior must manually perform that step.)  */
841
842void
843place_field (rli, field)
844     record_layout_info rli;
845     tree field;
846{
847  /* The alignment required for FIELD.  */
848  unsigned int desired_align;
849  /* The alignment FIELD would have if we just dropped it into the
850     record as it presently stands.  */
851  unsigned int known_align;
852  unsigned int actual_align;
853  /* The type of this field.  */
854  tree type = TREE_TYPE (field);
855
856  if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
857      return;
858
859  /* If FIELD is static, then treat it like a separate variable, not
860     really like a structure field.  If it is a FUNCTION_DECL, it's a
861     method.  In both cases, all we do is lay out the decl, and we do
862     it *after* the record is laid out.  */
863  if (TREE_CODE (field) == VAR_DECL)
864    {
865      rli->pending_statics = tree_cons (NULL_TREE, field,
866					rli->pending_statics);
867      return;
868    }
869
870  /* Enumerators and enum types which are local to this class need not
871     be laid out.  Likewise for initialized constant fields.  */
872  else if (TREE_CODE (field) != FIELD_DECL)
873    return;
874
875  /* Unions are laid out very differently than records, so split
876     that code off to another function.  */
877  else if (TREE_CODE (rli->t) != RECORD_TYPE)
878    {
879      place_union_field (rli, field);
880      return;
881    }
882
883  /* Work out the known alignment so far.  Note that A & (-A) is the
884     value of the least-significant bit in A that is one.  */
885  if (! integer_zerop (rli->bitpos))
886    known_align = (tree_low_cst (rli->bitpos, 1)
887		   & - tree_low_cst (rli->bitpos, 1));
888  else if (integer_zerop (rli->offset))
889    known_align = BIGGEST_ALIGNMENT;
890  else if (host_integerp (rli->offset, 1))
891    known_align = (BITS_PER_UNIT
892		   * (tree_low_cst (rli->offset, 1)
893		      & - tree_low_cst (rli->offset, 1)));
894  else
895    known_align = rli->offset_align;
896
897  desired_align = update_alignment_for_field (rli, field, known_align);
898
899  if (warn_packed && DECL_PACKED (field))
900    {
901      if (known_align > TYPE_ALIGN (type))
902	{
903	  if (TYPE_ALIGN (type) > desired_align)
904	    {
905	      if (STRICT_ALIGNMENT)
906		warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
907	      else
908		warning_with_decl (field, "packed attribute is unnecessary for `%s'");
909	    }
910	}
911      else
912	rli->packed_maybe_necessary = 1;
913    }
914
915  /* Does this field automatically have alignment it needs by virtue
916     of the fields that precede it and the record's own alignment?  */
917  if (known_align < desired_align)
918    {
919      /* No, we need to skip space before this field.
920	 Bump the cumulative size to multiple of field alignment.  */
921
922      if (warn_padded)
923	warning_with_decl (field, "padding struct to align `%s'");
924
925      /* If the alignment is still within offset_align, just align
926	 the bit position.  */
927      if (desired_align < rli->offset_align)
928	rli->bitpos = round_up (rli->bitpos, desired_align);
929      else
930	{
931	  /* First adjust OFFSET by the partial bits, then align.  */
932	  rli->offset
933	    = size_binop (PLUS_EXPR, rli->offset,
934			  convert (sizetype,
935				   size_binop (CEIL_DIV_EXPR, rli->bitpos,
936					       bitsize_unit_node)));
937	  rli->bitpos = bitsize_zero_node;
938
939	  rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
940	}
941
942      if (! TREE_CONSTANT (rli->offset))
943	rli->offset_align = desired_align;
944
945    }
946
947  /* Handle compatibility with PCC.  Note that if the record has any
948     variable-sized fields, we need not worry about compatibility.  */
949#ifdef PCC_BITFIELD_TYPE_MATTERS
950  if (PCC_BITFIELD_TYPE_MATTERS
951      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
952      && TREE_CODE (field) == FIELD_DECL
953      && type != error_mark_node
954      && DECL_BIT_FIELD (field)
955      && ! DECL_PACKED (field)
956      && maximum_field_alignment == 0
957      && ! integer_zerop (DECL_SIZE (field))
958      && host_integerp (DECL_SIZE (field), 1)
959      && host_integerp (rli->offset, 1)
960      && host_integerp (TYPE_SIZE (type), 1))
961    {
962      unsigned int type_align = TYPE_ALIGN (type);
963      tree dsize = DECL_SIZE (field);
964      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
965      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
966      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
967
968#ifdef ADJUST_FIELD_ALIGN
969      if (! TYPE_USER_ALIGN (type))
970	type_align = ADJUST_FIELD_ALIGN (field, type_align);
971#endif
972
973      /* A bit field may not span more units of alignment of its type
974	 than its type itself.  Advance to next boundary if necessary.  */
975      if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
976	     type_align - 1)
977	    / type_align)
978	   - (offset * BITS_PER_UNIT + bit_offset) / type_align)
979	  > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
980	rli->bitpos = round_up (rli->bitpos, type_align);
981
982      TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
983    }
984#endif
985
986#ifdef BITFIELD_NBYTES_LIMITED
987  if (BITFIELD_NBYTES_LIMITED
988      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
989      && TREE_CODE (field) == FIELD_DECL
990      && type != error_mark_node
991      && DECL_BIT_FIELD_TYPE (field)
992      && ! DECL_PACKED (field)
993      && ! integer_zerop (DECL_SIZE (field))
994      && host_integerp (DECL_SIZE (field), 1)
995      && host_integerp (rli->offset, 1)
996      && host_integerp (TYPE_SIZE (type), 1))
997    {
998      unsigned int type_align = TYPE_ALIGN (type);
999      tree dsize = DECL_SIZE (field);
1000      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
1001      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
1002      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
1003
1004#ifdef ADJUST_FIELD_ALIGN
1005      if (! TYPE_USER_ALIGN (type))
1006	type_align = ADJUST_FIELD_ALIGN (field, type_align);
1007#endif
1008
1009      if (maximum_field_alignment != 0)
1010	type_align = MIN (type_align, maximum_field_alignment);
1011      /* ??? This test is opposite the test in the containing if
1012	 statement, so this code is unreachable currently.  */
1013      else if (DECL_PACKED (field))
1014	type_align = MIN (type_align, BITS_PER_UNIT);
1015
1016      /* A bit field may not span the unit of alignment of its type.
1017	 Advance to next boundary if necessary.  */
1018      /* ??? This code should match the code above for the
1019	 PCC_BITFIELD_TYPE_MATTERS case.  */
1020      if ((offset * BITS_PER_UNIT + bit_offset) / type_align
1021	  != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
1022	      / type_align))
1023	rli->bitpos = round_up (rli->bitpos, type_align);
1024
1025      TYPE_USER_ALIGN (rli->t) |= TYPE_USER_ALIGN (type);
1026    }
1027#endif
1028
1029  /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.
1030     A subtlety:
1031	When a bit field is inserted into a packed record, the whole
1032	size of the underlying type is used by one or more same-size
1033	adjacent bitfields.  (That is, if its long:3, 32 bits is
1034	used in the record, and any additional adjacent long bitfields are
1035	packed into the same chunk of 32 bits. However, if the size
1036	changes, a new field of that size is allocated.)  In an unpacked
1037	record, this is the same as using alignment, but not eqivalent
1038	when packing.
1039
1040     Note: for compatability, we use the type size, not the type alignment
1041     to determine alignment, since that matches the documentation */
1042
1043  if ((* targetm.ms_bitfield_layout_p) (rli->t)
1044       && ((DECL_BIT_FIELD_TYPE (field) && ! DECL_PACKED (field))
1045 	  || (rli->prev_field && ! DECL_PACKED (rli->prev_field))))
1046    {
1047      /* At this point, either the prior or current are bitfields,
1048	 (possibly both), and we're dealing with MS packing.  */
1049      tree prev_saved = rli->prev_field;
1050
1051      /* Is the prior field a bitfield?  If so, handle "runs" of same
1052	 type size fields.  */
1053      if (rli->prev_field /* necessarily a bitfield if it exists.  */)
1054	{
1055	  /* If both are bitfields, nonzero, and the same size, this is
1056	     the middle of a run.  Zero declared size fields are special
1057	     and handled as "end of run". (Note: it's nonzero declared
1058	     size, but equal type sizes!) (Since we know that both
1059	     the current and previous fields are bitfields by the
1060	     time we check it, DECL_SIZE must be present for both.) */
1061	  if (DECL_BIT_FIELD_TYPE (field)
1062	      && !integer_zerop (DECL_SIZE (field))
1063	      && !integer_zerop (DECL_SIZE (rli->prev_field))
1064	      && simple_cst_equal (TYPE_SIZE (type),
1065		   TYPE_SIZE (TREE_TYPE (rli->prev_field))) )
1066	    {
1067	      /* We're in the middle of a run of equal type size fields; make
1068		 sure we realign if we run out of bits.  (Not decl size,
1069		 type size!) */
1070	      int bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
1071	      tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1072
1073	      if (rli->remaining_in_alignment < bitsize)
1074		{
1075		  /* out of bits; bump up to next 'word'.  */
1076		  rli->offset = DECL_FIELD_OFFSET (rli->prev_field);
1077		  rli->bitpos = size_binop (PLUS_EXPR,
1078				      type_size,
1079				      DECL_FIELD_BIT_OFFSET(rli->prev_field));
1080		  rli->prev_field = field;
1081		  rli->remaining_in_alignment = TREE_INT_CST_LOW (type_size);
1082		}
1083	      rli->remaining_in_alignment -= bitsize;
1084	    }
1085	  else
1086	    {
1087	      /* End of a run: if leaving a run of bitfields of the same type
1088		 size, we have to "use up" the rest of the bits of the type
1089		 size.
1090
1091		 Compute the new position as the sum of the size for the prior
1092		 type and where we first started working on that type.
1093		 Note: since the beginning of the field was aligned then
1094		 of course the end will be too.  No round needed.  */
1095
1096	      if (!integer_zerop (DECL_SIZE (rli->prev_field)))
1097		{
1098		  tree type_size = TYPE_SIZE(TREE_TYPE(rli->prev_field));
1099		  rli->bitpos = size_binop (PLUS_EXPR,
1100				      type_size,
1101				      DECL_FIELD_BIT_OFFSET(rli->prev_field));
1102		}
1103	      else
1104		{
1105		  /* We "use up" size zero fields; the code below should behave
1106		     as if the prior field was not a bitfield.  */
1107		  prev_saved = NULL;
1108		}
1109
1110	      /* Cause a new bitfield to be captured, either this time (if
1111		 currently a bitfield) or next time we see one.  */
1112	      if (!DECL_BIT_FIELD_TYPE(field)
1113		 || integer_zerop (DECL_SIZE (field)))
1114		{
1115		  rli->prev_field = NULL;
1116		}
1117	    }
1118	  normalize_rli (rli);
1119        }
1120
1121      /* If we're starting a new run of same size type bitfields
1122	 (or a run of non-bitfields), set up the "first of the run"
1123	 fields.
1124
1125	 That is, if the current field is not a bitfield, or if there
1126	 was a prior bitfield the type sizes differ, or if there wasn't
1127	 a prior bitfield the size of the current field is nonzero.
1128
1129	 Note: we must be sure to test ONLY the type size if there was
1130	 a prior bitfield and ONLY for the current field being zero if
1131	 there wasn't.  */
1132
1133      if (!DECL_BIT_FIELD_TYPE (field)
1134	  || ( prev_saved != NULL
1135	       ? !simple_cst_equal (TYPE_SIZE (type),
1136	              TYPE_SIZE (TREE_TYPE (prev_saved)))
1137	       : !integer_zerop (DECL_SIZE (field)) ))
1138	{
1139	  unsigned int type_align = 8;  /* Never below 8 for compatability */
1140
1141	  /* (When not a bitfield), we could be seeing a flex array (with
1142	     no DECL_SIZE).  Since we won't be using remaining_in_alignment
1143	     until we see a bitfield (and come by here again) we just skip
1144	     calculating it.  */
1145
1146	  if (DECL_SIZE (field) != NULL)
1147	      rli->remaining_in_alignment
1148		  = TREE_INT_CST_LOW (TYPE_SIZE(TREE_TYPE(field)))
1149		    - TREE_INT_CST_LOW (DECL_SIZE (field));
1150
1151	  /* Now align (conventionally) for the new type.  */
1152	  if (!DECL_PACKED(field))
1153	      type_align = MAX(TYPE_ALIGN (type), type_align);
1154
1155	  if (prev_saved
1156	      && DECL_BIT_FIELD_TYPE (prev_saved)
1157	      /* If the previous bit-field is zero-sized, we've already
1158		 accounted for its alignment needs (or ignored it, if
1159		 appropriate) while placing it.  */
1160	      && ! integer_zerop (DECL_SIZE (prev_saved)))
1161	    type_align = MAX (type_align,
1162			      TYPE_ALIGN (TREE_TYPE (prev_saved)));
1163
1164	  if (maximum_field_alignment != 0)
1165	    type_align = MIN (type_align, maximum_field_alignment);
1166
1167	  rli->bitpos = round_up (rli->bitpos, type_align);
1168          /* If we really aligned, don't allow subsequent bitfields
1169	     to undo that.  */
1170	  rli->prev_field = NULL;
1171	}
1172    }
1173
1174  /* Offset so far becomes the position of this field after normalizing.  */
1175  normalize_rli (rli);
1176  DECL_FIELD_OFFSET (field) = rli->offset;
1177  DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1178  SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1179
1180  /* If this field ended up more aligned than we thought it would be (we
1181     approximate this by seeing if its position changed), lay out the field
1182     again; perhaps we can use an integral mode for it now.  */
1183  if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1184    actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1185		    & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1186  else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1187    actual_align = BIGGEST_ALIGNMENT;
1188  else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1189    actual_align = (BITS_PER_UNIT
1190		   * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1191		      & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1192  else
1193    actual_align = DECL_OFFSET_ALIGN (field);
1194
1195  if (known_align != actual_align)
1196    layout_decl (field, actual_align);
1197
1198  /* Only the MS bitfields use this.  */
1199  if (rli->prev_field == NULL && DECL_BIT_FIELD_TYPE(field))
1200      rli->prev_field = field;
1201
1202  /* Now add size of this field to the size of the record.  If the size is
1203     not constant, treat the field as being a multiple of bytes and just
1204     adjust the offset, resetting the bit position.  Otherwise, apportion the
1205     size amongst the bit position and offset.  First handle the case of an
1206     unspecified size, which can happen when we have an invalid nested struct
1207     definition, such as struct j { struct j { int i; } }.  The error message
1208     is printed in finish_struct.  */
1209  if (DECL_SIZE (field) == 0)
1210    /* Do nothing.  */;
1211  else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1212	   || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1213    {
1214      rli->offset
1215	= size_binop (PLUS_EXPR, rli->offset,
1216		      convert (sizetype,
1217			       size_binop (CEIL_DIV_EXPR, rli->bitpos,
1218					   bitsize_unit_node)));
1219      rli->offset
1220	= size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1221      rli->bitpos = bitsize_zero_node;
1222      rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
1223    }
1224  else
1225    {
1226      rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1227      normalize_rli (rli);
1228    }
1229}
1230
1231/* Assuming that all the fields have been laid out, this function uses
1232   RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1233   inidicated by RLI.  */
1234
1235static void
1236finalize_record_size (rli)
1237     record_layout_info rli;
1238{
1239  tree unpadded_size, unpadded_size_unit;
1240
1241  /* Now we want just byte and bit offsets, so set the offset alignment
1242     to be a byte and then normalize.  */
1243  rli->offset_align = BITS_PER_UNIT;
1244  normalize_rli (rli);
1245
1246  /* Determine the desired alignment.  */
1247#ifdef ROUND_TYPE_ALIGN
1248  TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1249					  rli->record_align);
1250#else
1251  TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1252#endif
1253
1254  /* Compute the size so far.  Be sure to allow for extra bits in the
1255     size in bytes.  We have guaranteed above that it will be no more
1256     than a single byte.  */
1257  unpadded_size = rli_size_so_far (rli);
1258  unpadded_size_unit = rli_size_unit_so_far (rli);
1259  if (! integer_zerop (rli->bitpos))
1260    unpadded_size_unit
1261      = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1262
1263  /* Record the un-rounded size in the binfo node.  But first we check
1264     the size of TYPE_BINFO to make sure that BINFO_SIZE is available.  */
1265  if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
1266    {
1267      TYPE_BINFO_SIZE (rli->t) = unpadded_size;
1268      TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
1269    }
1270
1271    /* Round the size up to be a multiple of the required alignment */
1272#ifdef ROUND_TYPE_SIZE
1273  TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
1274					TYPE_ALIGN (rli->t));
1275  TYPE_SIZE_UNIT (rli->t)
1276    = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
1277			    TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1278#else
1279  TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1280  TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1281				      TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1282#endif
1283
1284  if (warn_padded && TREE_CONSTANT (unpadded_size)
1285      && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1286    warning ("padding struct size to alignment boundary");
1287
1288  if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1289      && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1290      && TREE_CONSTANT (unpadded_size))
1291    {
1292      tree unpacked_size;
1293
1294#ifdef ROUND_TYPE_ALIGN
1295      rli->unpacked_align
1296	= ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1297#else
1298      rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1299#endif
1300
1301#ifdef ROUND_TYPE_SIZE
1302      unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1303				       rli->unpacked_align);
1304#else
1305      unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1306#endif
1307
1308      if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1309	{
1310	  TYPE_PACKED (rli->t) = 0;
1311
1312	  if (TYPE_NAME (rli->t))
1313	    {
1314	      const char *name;
1315
1316	      if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1317		name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1318	      else
1319		name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1320
1321	      if (STRICT_ALIGNMENT)
1322		warning ("packed attribute causes inefficient alignment for `%s'", name);
1323	      else
1324		warning ("packed attribute is unnecessary for `%s'", name);
1325	    }
1326	  else
1327	    {
1328	      if (STRICT_ALIGNMENT)
1329		warning ("packed attribute causes inefficient alignment");
1330	      else
1331		warning ("packed attribute is unnecessary");
1332	    }
1333	}
1334    }
1335}
1336
1337/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE).  */
1338
1339void
1340compute_record_mode (type)
1341     tree type;
1342{
1343  tree field;
1344  enum machine_mode mode = VOIDmode;
1345
1346  /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1347     However, if possible, we use a mode that fits in a register
1348     instead, in order to allow for better optimization down the
1349     line.  */
1350  TYPE_MODE (type) = BLKmode;
1351
1352  if (! host_integerp (TYPE_SIZE (type), 1))
1353    return;
1354
1355  /* A record which has any BLKmode members must itself be
1356     BLKmode; it can't go in a register.  Unless the member is
1357     BLKmode only because it isn't aligned.  */
1358  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1359    {
1360      unsigned HOST_WIDE_INT bitpos;
1361
1362      if (TREE_CODE (field) != FIELD_DECL)
1363	continue;
1364
1365      if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1366	  || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1367	      && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1368	  || ! host_integerp (bit_position (field), 1)
1369	  || DECL_SIZE (field) == 0
1370	  || ! host_integerp (DECL_SIZE (field), 1))
1371	return;
1372
1373      bitpos = int_bit_position (field);
1374
1375      /* Must be BLKmode if any field crosses a word boundary,
1376	 since extract_bit_field can't handle that in registers.  */
1377      if (bitpos / BITS_PER_WORD
1378	  != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1379	      / BITS_PER_WORD)
1380	  /* But there is no problem if the field is entire words.  */
1381	  && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1382	return;
1383
1384      /* If this field is the whole struct, remember its mode so
1385	 that, say, we can put a double in a class into a DF
1386	 register instead of forcing it to live in the stack.  */
1387      if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1388	mode = DECL_MODE (field);
1389
1390#ifdef MEMBER_TYPE_FORCES_BLK
1391      /* With some targets, eg. c4x, it is sub-optimal
1392	 to access an aligned BLKmode structure as a scalar.  */
1393
1394      if (MEMBER_TYPE_FORCES_BLK (field, mode))
1395	return;
1396#endif /* MEMBER_TYPE_FORCES_BLK  */
1397    }
1398
1399  /* If we only have one real field; use its mode.  This only applies to
1400     RECORD_TYPE.  This does not apply to unions.  */
1401  if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1402    TYPE_MODE (type) = mode;
1403  else
1404    TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1405
1406  /* If structure's known alignment is less than what the scalar
1407     mode would need, and it matters, then stick with BLKmode.  */
1408  if (TYPE_MODE (type) != BLKmode
1409      && STRICT_ALIGNMENT
1410      && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1411	    || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1412    {
1413      /* If this is the only reason this type is BLKmode, then
1414	 don't force containing types to be BLKmode.  */
1415      TYPE_NO_FORCE_BLK (type) = 1;
1416      TYPE_MODE (type) = BLKmode;
1417    }
1418}
1419
1420/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1421   out.  */
1422
1423static void
1424finalize_type_size (type)
1425     tree type;
1426{
1427  /* Normally, use the alignment corresponding to the mode chosen.
1428     However, where strict alignment is not required, avoid
1429     over-aligning structures, since most compilers do not do this
1430     alignment.  */
1431
1432  if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1433      && (STRICT_ALIGNMENT
1434	  || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1435	      && TREE_CODE (type) != QUAL_UNION_TYPE
1436	      && TREE_CODE (type) != ARRAY_TYPE)))
1437    {
1438      TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1439      TYPE_USER_ALIGN (type) = 0;
1440    }
1441
1442  /* Do machine-dependent extra alignment.  */
1443#ifdef ROUND_TYPE_ALIGN
1444  TYPE_ALIGN (type)
1445    = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1446#endif
1447
1448  /* If we failed to find a simple way to calculate the unit size
1449     of the type, find it by division.  */
1450  if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1451    /* TYPE_SIZE (type) is computed in bitsizetype.  After the division, the
1452       result will fit in sizetype.  We will get more efficient code using
1453       sizetype, so we force a conversion.  */
1454    TYPE_SIZE_UNIT (type)
1455      = convert (sizetype,
1456		 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1457			     bitsize_unit_node));
1458
1459  if (TYPE_SIZE (type) != 0)
1460    {
1461#ifdef ROUND_TYPE_SIZE
1462      TYPE_SIZE (type)
1463	= ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1464      TYPE_SIZE_UNIT (type)
1465	= ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1466				TYPE_ALIGN (type) / BITS_PER_UNIT);
1467#else
1468      TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1469      TYPE_SIZE_UNIT (type)
1470	= round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1471#endif
1472    }
1473
1474  /* Evaluate nonconstant sizes only once, either now or as soon as safe.  */
1475  if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1476    TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1477  if (TYPE_SIZE_UNIT (type) != 0
1478      && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1479    TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1480
1481  /* Also layout any other variants of the type.  */
1482  if (TYPE_NEXT_VARIANT (type)
1483      || type != TYPE_MAIN_VARIANT (type))
1484    {
1485      tree variant;
1486      /* Record layout info of this variant.  */
1487      tree size = TYPE_SIZE (type);
1488      tree size_unit = TYPE_SIZE_UNIT (type);
1489      unsigned int align = TYPE_ALIGN (type);
1490      unsigned int user_align = TYPE_USER_ALIGN (type);
1491      enum machine_mode mode = TYPE_MODE (type);
1492
1493      /* Copy it into all variants.  */
1494      for (variant = TYPE_MAIN_VARIANT (type);
1495	   variant != 0;
1496	   variant = TYPE_NEXT_VARIANT (variant))
1497	{
1498	  TYPE_SIZE (variant) = size;
1499	  TYPE_SIZE_UNIT (variant) = size_unit;
1500	  TYPE_ALIGN (variant) = align;
1501	  TYPE_USER_ALIGN (variant) = user_align;
1502	  TYPE_MODE (variant) = mode;
1503	}
1504    }
1505}
1506
1507/* Do all of the work required to layout the type indicated by RLI,
1508   once the fields have been laid out.  This function will call `free'
1509   for RLI, unless FREE_P is false.  Passing a value other than false
1510   for FREE_P is bad practice; this option only exists to support the
1511   G++ 3.2 ABI.  */
1512
1513void
1514finish_record_layout (rli, free_p)
1515     record_layout_info rli;
1516     int free_p;
1517{
1518  /* Compute the final size.  */
1519  finalize_record_size (rli);
1520
1521  /* Compute the TYPE_MODE for the record.  */
1522  compute_record_mode (rli->t);
1523
1524  /* Perform any last tweaks to the TYPE_SIZE, etc.  */
1525  finalize_type_size (rli->t);
1526
1527  /* Lay out any static members.  This is done now because their type
1528     may use the record's type.  */
1529  while (rli->pending_statics)
1530    {
1531      layout_decl (TREE_VALUE (rli->pending_statics), 0);
1532      rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1533    }
1534
1535  /* Clean up.  */
1536  if (free_p)
1537    free (rli);
1538}
1539
1540/* Calculate the mode, size, and alignment for TYPE.
1541   For an array type, calculate the element separation as well.
1542   Record TYPE on the chain of permanent or temporary types
1543   so that dbxout will find out about it.
1544
1545   TYPE_SIZE of a type is nonzero if the type has been laid out already.
1546   layout_type does nothing on such a type.
1547
1548   If the type is incomplete, its TYPE_SIZE remains zero.  */
1549
1550void
1551layout_type (type)
1552     tree type;
1553{
1554  if (type == 0)
1555    abort ();
1556
1557  /* Do nothing if type has been laid out before.  */
1558  if (TYPE_SIZE (type))
1559    return;
1560
1561  switch (TREE_CODE (type))
1562    {
1563    case LANG_TYPE:
1564      /* This kind of type is the responsibility
1565	 of the language-specific code.  */
1566      abort ();
1567
1568    case BOOLEAN_TYPE:  /* Used for Java, Pascal, and Chill.  */
1569      if (TYPE_PRECISION (type) == 0)
1570	TYPE_PRECISION (type) = 1; /* default to one byte/boolean.  */
1571
1572      /* ... fall through ...  */
1573
1574    case INTEGER_TYPE:
1575    case ENUMERAL_TYPE:
1576    case CHAR_TYPE:
1577      if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1578	  && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1579	TREE_UNSIGNED (type) = 1;
1580
1581      TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1582						 MODE_INT);
1583      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1584      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1585      break;
1586
1587    case REAL_TYPE:
1588      TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1589      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1590      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1591      break;
1592
1593    case COMPLEX_TYPE:
1594      TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1595      TYPE_MODE (type)
1596	= mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1597			 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1598			  ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1599			 0);
1600      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1601      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1602      break;
1603
1604    case VECTOR_TYPE:
1605      {
1606	tree subtype;
1607
1608	subtype = TREE_TYPE (type);
1609	TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1610	TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1611	TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1612      }
1613      break;
1614
1615    case VOID_TYPE:
1616      /* This is an incomplete type and so doesn't have a size.  */
1617      TYPE_ALIGN (type) = 1;
1618      TYPE_USER_ALIGN (type) = 0;
1619      TYPE_MODE (type) = VOIDmode;
1620      break;
1621
1622    case OFFSET_TYPE:
1623      TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1624      TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1625      /* A pointer might be MODE_PARTIAL_INT,
1626	 but ptrdiff_t must be integral.  */
1627      TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1628      break;
1629
1630    case FUNCTION_TYPE:
1631    case METHOD_TYPE:
1632      TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1633      TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1634      TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1635      break;
1636
1637    case POINTER_TYPE:
1638    case REFERENCE_TYPE:
1639      {
1640	int nbits = ((TREE_CODE (type) == REFERENCE_TYPE
1641		      && reference_types_internal)
1642		     ? GET_MODE_BITSIZE (Pmode) : POINTER_SIZE);
1643
1644	TYPE_MODE (type) = nbits == POINTER_SIZE ? ptr_mode : Pmode;
1645	TYPE_SIZE (type) = bitsize_int (nbits);
1646	TYPE_SIZE_UNIT (type) = size_int (nbits / BITS_PER_UNIT);
1647	TREE_UNSIGNED (type) = 1;
1648	TYPE_PRECISION (type) = nbits;
1649      }
1650      break;
1651
1652    case ARRAY_TYPE:
1653      {
1654	tree index = TYPE_DOMAIN (type);
1655	tree element = TREE_TYPE (type);
1656
1657	build_pointer_type (element);
1658
1659	/* We need to know both bounds in order to compute the size.  */
1660	if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1661	    && TYPE_SIZE (element))
1662	  {
1663	    tree ub = TYPE_MAX_VALUE (index);
1664	    tree lb = TYPE_MIN_VALUE (index);
1665	    tree length;
1666	    tree element_size;
1667
1668	    /* The initial subtraction should happen in the original type so
1669	       that (possible) negative values are handled appropriately.  */
1670	    length = size_binop (PLUS_EXPR, size_one_node,
1671				 convert (sizetype,
1672					  fold (build (MINUS_EXPR,
1673						       TREE_TYPE (lb),
1674						       ub, lb))));
1675
1676	    /* Special handling for arrays of bits (for Chill).  */
1677	    element_size = TYPE_SIZE (element);
1678	    if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1679		&& (integer_zerop (TYPE_MAX_VALUE (element))
1680		    || integer_onep (TYPE_MAX_VALUE (element)))
1681		&& host_integerp (TYPE_MIN_VALUE (element), 1))
1682	      {
1683		HOST_WIDE_INT maxvalue
1684		  = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1685		HOST_WIDE_INT minvalue
1686		  = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1687
1688		if (maxvalue - minvalue == 1
1689		    && (maxvalue == 1 || maxvalue == 0))
1690		  element_size = integer_one_node;
1691	      }
1692
1693	    TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1694					   convert (bitsizetype, length));
1695
1696	    /* If we know the size of the element, calculate the total
1697	       size directly, rather than do some division thing below.
1698	       This optimization helps Fortran assumed-size arrays
1699	       (where the size of the array is determined at runtime)
1700	       substantially.
1701	       Note that we can't do this in the case where the size of
1702	       the elements is one bit since TYPE_SIZE_UNIT cannot be
1703	       set correctly in that case.  */
1704	    if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1705	      TYPE_SIZE_UNIT (type)
1706		= size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1707	  }
1708
1709	/* Now round the alignment and size,
1710	   using machine-dependent criteria if any.  */
1711
1712#ifdef ROUND_TYPE_ALIGN
1713	TYPE_ALIGN (type)
1714	  = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1715#else
1716	TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1717#endif
1718	TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1719
1720#ifdef ROUND_TYPE_SIZE
1721	if (TYPE_SIZE (type) != 0)
1722	  {
1723	    tree tmp
1724	      = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1725
1726	    /* If the rounding changed the size of the type, remove any
1727	       pre-calculated TYPE_SIZE_UNIT.  */
1728	    if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1729	      TYPE_SIZE_UNIT (type) = NULL;
1730
1731	    TYPE_SIZE (type) = tmp;
1732	  }
1733#endif
1734
1735	TYPE_MODE (type) = BLKmode;
1736	if (TYPE_SIZE (type) != 0
1737#ifdef MEMBER_TYPE_FORCES_BLK
1738	    && ! MEMBER_TYPE_FORCES_BLK (type, VOIDmode)
1739#endif
1740	    /* BLKmode elements force BLKmode aggregate;
1741	       else extract/store fields may lose.  */
1742	    && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1743		|| TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1744	  {
1745	    /* One-element arrays get the component type's mode.  */
1746	    if (simple_cst_equal (TYPE_SIZE (type),
1747				  TYPE_SIZE (TREE_TYPE (type))))
1748	      TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1749	    else
1750	      TYPE_MODE (type)
1751		= mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1752
1753	    if (TYPE_MODE (type) != BLKmode
1754		&& STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1755		&& TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1756		&& TYPE_MODE (type) != BLKmode)
1757	      {
1758		TYPE_NO_FORCE_BLK (type) = 1;
1759		TYPE_MODE (type) = BLKmode;
1760	      }
1761	  }
1762	break;
1763      }
1764
1765    case RECORD_TYPE:
1766    case UNION_TYPE:
1767    case QUAL_UNION_TYPE:
1768      {
1769	tree field;
1770	record_layout_info rli;
1771
1772	/* Initialize the layout information.  */
1773	rli = start_record_layout (type);
1774
1775	/* If this is a QUAL_UNION_TYPE, we want to process the fields
1776	   in the reverse order in building the COND_EXPR that denotes
1777	   its size.  We reverse them again later.  */
1778	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1779	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1780
1781	/* Place all the fields.  */
1782	for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1783	  place_field (rli, field);
1784
1785	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1786	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1787
1788	if (lang_adjust_rli)
1789	  (*lang_adjust_rli) (rli);
1790
1791	/* Finish laying out the record.  */
1792	finish_record_layout (rli, /*free_p=*/true);
1793      }
1794      break;
1795
1796    case SET_TYPE:  /* Used by Chill and Pascal.  */
1797      if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1798	  || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1799	abort ();
1800      else
1801	{
1802#ifndef SET_WORD_SIZE
1803#define SET_WORD_SIZE BITS_PER_WORD
1804#endif
1805	  unsigned int alignment
1806	    = set_alignment ? set_alignment : SET_WORD_SIZE;
1807	  int size_in_bits
1808	    = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1809	       - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1810	  int rounded_size
1811	    = ((size_in_bits + alignment - 1) / alignment) * alignment;
1812
1813	  if (rounded_size > (int) alignment)
1814	    TYPE_MODE (type) = BLKmode;
1815	  else
1816	    TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1817
1818	  TYPE_SIZE (type) = bitsize_int (rounded_size);
1819	  TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1820	  TYPE_ALIGN (type) = alignment;
1821	  TYPE_USER_ALIGN (type) = 0;
1822	  TYPE_PRECISION (type) = size_in_bits;
1823	}
1824      break;
1825
1826    case FILE_TYPE:
1827      /* The size may vary in different languages, so the language front end
1828	 should fill in the size.  */
1829      TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1830      TYPE_USER_ALIGN (type) = 0;
1831      TYPE_MODE  (type) = BLKmode;
1832      break;
1833
1834    default:
1835      abort ();
1836    }
1837
1838  /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE.  For
1839     records and unions, finish_record_layout already called this
1840     function.  */
1841  if (TREE_CODE (type) != RECORD_TYPE
1842      && TREE_CODE (type) != UNION_TYPE
1843      && TREE_CODE (type) != QUAL_UNION_TYPE)
1844    finalize_type_size (type);
1845
1846  /* If this type is created before sizetype has been permanently set,
1847     record it so set_sizetype can fix it up.  */
1848  if (! sizetype_set)
1849    early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1850
1851  /* If an alias set has been set for this aggregate when it was incomplete,
1852     force it into alias set 0.
1853     This is too conservative, but we cannot call record_component_aliases
1854     here because some frontends still change the aggregates after
1855     layout_type.  */
1856  if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1857    TYPE_ALIAS_SET (type) = 0;
1858}
1859
1860/* Create and return a type for signed integers of PRECISION bits.  */
1861
1862tree
1863make_signed_type (precision)
1864     int precision;
1865{
1866  tree type = make_node (INTEGER_TYPE);
1867
1868  TYPE_PRECISION (type) = precision;
1869
1870  fixup_signed_type (type);
1871  return type;
1872}
1873
1874/* Create and return a type for unsigned integers of PRECISION bits.  */
1875
1876tree
1877make_unsigned_type (precision)
1878     int precision;
1879{
1880  tree type = make_node (INTEGER_TYPE);
1881
1882  TYPE_PRECISION (type) = precision;
1883
1884  fixup_unsigned_type (type);
1885  return type;
1886}
1887
1888/* Initialize sizetype and bitsizetype to a reasonable and temporary
1889   value to enable integer types to be created.  */
1890
1891void
1892initialize_sizetypes ()
1893{
1894  tree t = make_node (INTEGER_TYPE);
1895
1896  /* Set this so we do something reasonable for the build_int_2 calls
1897     below.  */
1898  integer_type_node = t;
1899
1900  TYPE_MODE (t) = SImode;
1901  TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1902  TYPE_USER_ALIGN (t) = 0;
1903  TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1904  TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1905  TREE_UNSIGNED (t) = 1;
1906  TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1907  TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1908  TYPE_IS_SIZETYPE (t) = 1;
1909
1910  /* 1000 avoids problems with possible overflow and is certainly
1911     larger than any size value we'd want to be storing.  */
1912  TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1913
1914  /* These two must be different nodes because of the caching done in
1915     size_int_wide.  */
1916  sizetype = t;
1917  bitsizetype = copy_node (t);
1918  integer_type_node = 0;
1919}
1920
1921/* Set sizetype to TYPE, and initialize *sizetype accordingly.
1922   Also update the type of any standard type's sizes made so far.  */
1923
1924void
1925set_sizetype (type)
1926     tree type;
1927{
1928  int oprecision = TYPE_PRECISION (type);
1929  /* The *bitsizetype types use a precision that avoids overflows when
1930     calculating signed sizes / offsets in bits.  However, when
1931     cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1932     precision.  */
1933  int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1934		       2 * HOST_BITS_PER_WIDE_INT);
1935  unsigned int i;
1936  tree t;
1937
1938  if (sizetype_set)
1939    abort ();
1940
1941  /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE.  */
1942  sizetype = copy_node (type);
1943  TYPE_DOMAIN (sizetype) = type;
1944  TYPE_IS_SIZETYPE (sizetype) = 1;
1945  bitsizetype = make_node (INTEGER_TYPE);
1946  TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1947  TYPE_PRECISION (bitsizetype) = precision;
1948  TYPE_IS_SIZETYPE (bitsizetype) = 1;
1949
1950  if (TREE_UNSIGNED (type))
1951    fixup_unsigned_type (bitsizetype);
1952  else
1953    fixup_signed_type (bitsizetype);
1954
1955  layout_type (bitsizetype);
1956
1957  if (TREE_UNSIGNED (type))
1958    {
1959      usizetype = sizetype;
1960      ubitsizetype = bitsizetype;
1961      ssizetype = copy_node (make_signed_type (oprecision));
1962      sbitsizetype = copy_node (make_signed_type (precision));
1963    }
1964  else
1965    {
1966      ssizetype = sizetype;
1967      sbitsizetype = bitsizetype;
1968      usizetype = copy_node (make_unsigned_type (oprecision));
1969      ubitsizetype = copy_node (make_unsigned_type (precision));
1970    }
1971
1972  TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1973
1974  /* Show is a sizetype, is a main type, and has no pointers to it.  */
1975  for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1976    {
1977      TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1978      TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1979      TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1980      TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1981      TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1982    }
1983
1984  /* Go down each of the types we already made and set the proper type
1985     for the sizes in them.  */
1986  for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1987    {
1988      if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1989	abort ();
1990
1991      TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1992      TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1993    }
1994
1995  early_type_list = 0;
1996  sizetype_set = 1;
1997}
1998
1999/* Set the extreme values of TYPE based on its precision in bits,
2000   then lay it out.  Used when make_signed_type won't do
2001   because the tree code is not INTEGER_TYPE.
2002   E.g. for Pascal, when the -fsigned-char option is given.  */
2003
2004void
2005fixup_signed_type (type)
2006     tree type;
2007{
2008  int precision = TYPE_PRECISION (type);
2009
2010  /* We can not represent properly constants greater then
2011     2 * HOST_BITS_PER_WIDE_INT, still we need the types
2012     as they are used by i386 vector extensions and friends.  */
2013  if (precision > HOST_BITS_PER_WIDE_INT * 2)
2014    precision = HOST_BITS_PER_WIDE_INT * 2;
2015
2016  TYPE_MIN_VALUE (type)
2017    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2018		    ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
2019		   (((HOST_WIDE_INT) (-1)
2020		     << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2021			 ? precision - HOST_BITS_PER_WIDE_INT - 1
2022			 : 0))));
2023  TYPE_MAX_VALUE (type)
2024    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
2025		    ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
2026		   (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
2027		    ? (((HOST_WIDE_INT) 1
2028			<< (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
2029		    : 0));
2030
2031  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2032  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2033
2034  /* Lay out the type: set its alignment, size, etc.  */
2035  layout_type (type);
2036}
2037
2038/* Set the extreme values of TYPE based on its precision in bits,
2039   then lay it out.  This is used both in `make_unsigned_type'
2040   and for enumeral types.  */
2041
2042void
2043fixup_unsigned_type (type)
2044     tree type;
2045{
2046  int precision = TYPE_PRECISION (type);
2047
2048  /* We can not represent properly constants greater then
2049     2 * HOST_BITS_PER_WIDE_INT, still we need the types
2050     as they are used by i386 vector extensions and friends.  */
2051  if (precision > HOST_BITS_PER_WIDE_INT * 2)
2052    precision = HOST_BITS_PER_WIDE_INT * 2;
2053
2054  TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
2055  TYPE_MAX_VALUE (type)
2056    = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
2057		   ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
2058		   precision - HOST_BITS_PER_WIDE_INT > 0
2059		   ? ((unsigned HOST_WIDE_INT) ~0
2060		      >> (HOST_BITS_PER_WIDE_INT
2061			  - (precision - HOST_BITS_PER_WIDE_INT)))
2062		   : 0);
2063  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
2064  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
2065
2066  /* Lay out the type: set its alignment, size, etc.  */
2067  layout_type (type);
2068}
2069
2070/* Find the best machine mode to use when referencing a bit field of length
2071   BITSIZE bits starting at BITPOS.
2072
2073   The underlying object is known to be aligned to a boundary of ALIGN bits.
2074   If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
2075   larger than LARGEST_MODE (usually SImode).
2076
2077   If no mode meets all these conditions, we return VOIDmode.  Otherwise, if
2078   VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
2079   mode meeting these conditions.
2080
2081   Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
2082   the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
2083   all the conditions.  */
2084
2085enum machine_mode
2086get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
2087     int bitsize, bitpos;
2088     unsigned int align;
2089     enum machine_mode largest_mode;
2090     int volatilep;
2091{
2092  enum machine_mode mode;
2093  unsigned int unit = 0;
2094
2095  /* Find the narrowest integer mode that contains the bit field.  */
2096  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2097       mode = GET_MODE_WIDER_MODE (mode))
2098    {
2099      unit = GET_MODE_BITSIZE (mode);
2100      if ((bitpos % unit) + bitsize <= unit)
2101	break;
2102    }
2103
2104  if (mode == VOIDmode
2105      /* It is tempting to omit the following line
2106	 if STRICT_ALIGNMENT is true.
2107	 But that is incorrect, since if the bitfield uses part of 3 bytes
2108	 and we use a 4-byte mode, we could get a spurious segv
2109	 if the extra 4th byte is past the end of memory.
2110	 (Though at least one Unix compiler ignores this problem:
2111	 that on the Sequent 386 machine.  */
2112      || MIN (unit, BIGGEST_ALIGNMENT) > align
2113      || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
2114    return VOIDmode;
2115
2116  if (SLOW_BYTE_ACCESS && ! volatilep)
2117    {
2118      enum machine_mode wide_mode = VOIDmode, tmode;
2119
2120      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
2121	   tmode = GET_MODE_WIDER_MODE (tmode))
2122	{
2123	  unit = GET_MODE_BITSIZE (tmode);
2124	  if (bitpos / unit == (bitpos + bitsize - 1) / unit
2125	      && unit <= BITS_PER_WORD
2126	      && unit <= MIN (align, BIGGEST_ALIGNMENT)
2127	      && (largest_mode == VOIDmode
2128		  || unit <= GET_MODE_BITSIZE (largest_mode)))
2129	    wide_mode = tmode;
2130	}
2131
2132      if (wide_mode != VOIDmode)
2133	return wide_mode;
2134    }
2135
2136  return mode;
2137}
2138
2139#include "gt-stor-layout.h"
2140