stor-layout.c revision 96489
1/* C-compiler utilities for types and variables storage layout
2   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
3   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING.  If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA.  */
21
22
23#include "config.h"
24#include "system.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "flags.h"
29#include "function.h"
30#include "expr.h"
31#include "toplev.h"
32#include "ggc.h"
33#include "target.h"
34
35/* Set to one when set_sizetype has been called.  */
36static int sizetype_set;
37
38/* List of types created before set_sizetype has been called.  We do not
39   make this a GGC root since we want these nodes to be reclaimed.  */
40static tree early_type_list;
41
42/* Data type for the expressions representing sizes of data types.
43   It is the first integer type laid out.  */
44tree sizetype_tab[(int) TYPE_KIND_LAST];
45
46/* If nonzero, this is an upper limit on alignment of structure fields.
47   The value is measured in bits.  */
48unsigned int maximum_field_alignment;
49
50/* If non-zero, the alignment of a bitstring or (power-)set value, in bits.
51   May be overridden by front-ends.  */
52unsigned int set_alignment = 0;
53
54/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
55   allocated in Pmode, not ptr_mode.   Set only by internal_reference_types
56   called only by a front end.  */
57static int reference_types_internal = 0;
58
59static void finalize_record_size	PARAMS ((record_layout_info));
60static void finalize_type_size		PARAMS ((tree));
61static void place_union_field		PARAMS ((record_layout_info, tree));
62extern void debug_rli			PARAMS ((record_layout_info));
63
64/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded.  */
65
66static tree pending_sizes;
67
68/* Nonzero means cannot safely call expand_expr now,
69   so put variable sizes onto `pending_sizes' instead.  */
70
71int immediate_size_expand;
72
73/* Show that REFERENCE_TYPES are internal and should be Pmode.  Called only
74   by front end.  */
75
76void
77internal_reference_types ()
78{
79  reference_types_internal = 1;
80}
81
82/* Get a list of all the objects put on the pending sizes list.  */
83
84tree
85get_pending_sizes ()
86{
87  tree chain = pending_sizes;
88  tree t;
89
90  /* Put each SAVE_EXPR into the current function.  */
91  for (t = chain; t; t = TREE_CHAIN (t))
92    SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
93
94  pending_sizes = 0;
95  return chain;
96}
97
98/* Return non-zero if EXPR is present on the pending sizes list.  */
99
100int
101is_pending_size (expr)
102     tree expr;
103{
104  tree t;
105
106  for (t = pending_sizes; t; t = TREE_CHAIN (t))
107    if (TREE_VALUE (t) == expr)
108      return 1;
109  return 0;
110}
111
112/* Add EXPR to the pending sizes list.  */
113
114void
115put_pending_size (expr)
116     tree expr;
117{
118  /* Strip any simple arithmetic from EXPR to see if it has an underlying
119     SAVE_EXPR.  */
120  while (TREE_CODE_CLASS (TREE_CODE (expr)) == '1'
121	 || (TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
122	    && TREE_CONSTANT (TREE_OPERAND (expr, 1))))
123    expr = TREE_OPERAND (expr, 0);
124
125  if (TREE_CODE (expr) == SAVE_EXPR)
126    pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
127}
128
129/* Put a chain of objects into the pending sizes list, which must be
130   empty.  */
131
132void
133put_pending_sizes (chain)
134     tree chain;
135{
136  if (pending_sizes)
137    abort ();
138
139  pending_sizes = chain;
140}
141
142/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
143   to serve as the actual size-expression for a type or decl.  */
144
145tree
146variable_size (size)
147     tree size;
148{
149  /* If the language-processor is to take responsibility for variable-sized
150     items (e.g., languages which have elaboration procedures like Ada),
151     just return SIZE unchanged.  Likewise for self-referential sizes and
152     constant sizes.  */
153  if (TREE_CONSTANT (size)
154      || global_bindings_p () < 0 || contains_placeholder_p (size))
155    return size;
156
157  size = save_expr (size);
158
159  /* If an array with a variable number of elements is declared, and
160     the elements require destruction, we will emit a cleanup for the
161     array.  That cleanup is run both on normal exit from the block
162     and in the exception-handler for the block.  Normally, when code
163     is used in both ordinary code and in an exception handler it is
164     `unsaved', i.e., all SAVE_EXPRs are recalculated.  However, we do
165     not wish to do that here; the array-size is the same in both
166     places.  */
167  if (TREE_CODE (size) == SAVE_EXPR)
168    SAVE_EXPR_PERSISTENT_P (size) = 1;
169
170  if (global_bindings_p ())
171    {
172      if (TREE_CONSTANT (size))
173	error ("type size can't be explicitly evaluated");
174      else
175	error ("variable-size type declared outside of any function");
176
177      return size_one_node;
178    }
179
180  if (immediate_size_expand)
181    /* NULL_RTX is not defined; neither is the rtx type.
182       Also, we would like to pass const0_rtx here, but don't have it.  */
183    expand_expr (size, expand_expr (integer_zero_node, NULL_RTX, VOIDmode, 0),
184		 VOIDmode, 0);
185  else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
186    /* The front-end doesn't want us to keep a list of the expressions
187       that determine sizes for variable size objects.  */
188    ;
189  else
190    put_pending_size (size);
191
192  return size;
193}
194
195#ifndef MAX_FIXED_MODE_SIZE
196#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
197#endif
198
199/* Return the machine mode to use for a nonscalar of SIZE bits.
200   The mode must be in class CLASS, and have exactly that many bits.
201   If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
202   be used.  */
203
204enum machine_mode
205mode_for_size (size, class, limit)
206     unsigned int size;
207     enum mode_class class;
208     int limit;
209{
210  enum machine_mode mode;
211
212  if (limit && size > MAX_FIXED_MODE_SIZE)
213    return BLKmode;
214
215  /* Get the first mode which has this size, in the specified class.  */
216  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
217       mode = GET_MODE_WIDER_MODE (mode))
218    if (GET_MODE_BITSIZE (mode) == size)
219      return mode;
220
221  return BLKmode;
222}
223
224/* Similar, except passed a tree node.  */
225
226enum machine_mode
227mode_for_size_tree (size, class, limit)
228     tree size;
229     enum mode_class class;
230     int limit;
231{
232  if (TREE_CODE (size) != INTEGER_CST
233      /* What we really want to say here is that the size can fit in a
234	 host integer, but we know there's no way we'd find a mode for
235	 this many bits, so there's no point in doing the precise test.  */
236      || compare_tree_int (size, 1000) > 0)
237    return BLKmode;
238  else
239    return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
240}
241
242/* Similar, but never return BLKmode; return the narrowest mode that
243   contains at least the requested number of bits.  */
244
245enum machine_mode
246smallest_mode_for_size (size, class)
247     unsigned int size;
248     enum mode_class class;
249{
250  enum machine_mode mode;
251
252  /* Get the first mode which has at least this size, in the
253     specified class.  */
254  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
255       mode = GET_MODE_WIDER_MODE (mode))
256    if (GET_MODE_BITSIZE (mode) >= size)
257      return mode;
258
259  abort ();
260}
261
262/* Find an integer mode of the exact same size, or BLKmode on failure.  */
263
264enum machine_mode
265int_mode_for_mode (mode)
266     enum machine_mode mode;
267{
268  switch (GET_MODE_CLASS (mode))
269    {
270    case MODE_INT:
271    case MODE_PARTIAL_INT:
272      break;
273
274    case MODE_COMPLEX_INT:
275    case MODE_COMPLEX_FLOAT:
276    case MODE_FLOAT:
277    case MODE_VECTOR_INT:
278    case MODE_VECTOR_FLOAT:
279      mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
280      break;
281
282    case MODE_RANDOM:
283      if (mode == BLKmode)
284        break;
285
286      /* ... fall through ...  */
287
288    case MODE_CC:
289    default:
290      abort ();
291    }
292
293  return mode;
294}
295
296/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
297   This can only be applied to objects of a sizetype.  */
298
299tree
300round_up (value, divisor)
301     tree value;
302     int divisor;
303{
304  tree arg = size_int_type (divisor, TREE_TYPE (value));
305
306  return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
307}
308
309/* Likewise, but round down.  */
310
311tree
312round_down (value, divisor)
313     tree value;
314     int divisor;
315{
316  tree arg = size_int_type (divisor, TREE_TYPE (value));
317
318  return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
319}
320
321/* Set the size, mode and alignment of a ..._DECL node.
322   TYPE_DECL does need this for C++.
323   Note that LABEL_DECL and CONST_DECL nodes do not need this,
324   and FUNCTION_DECL nodes have them set up in a special (and simple) way.
325   Don't call layout_decl for them.
326
327   KNOWN_ALIGN is the amount of alignment we can assume this
328   decl has with no special effort.  It is relevant only for FIELD_DECLs
329   and depends on the previous fields.
330   All that matters about KNOWN_ALIGN is which powers of 2 divide it.
331   If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
332   the record will be aligned to suit.  */
333
334void
335layout_decl (decl, known_align)
336     tree decl;
337     unsigned int known_align;
338{
339  tree type = TREE_TYPE (decl);
340  enum tree_code code = TREE_CODE (decl);
341
342  if (code == CONST_DECL)
343    return;
344  else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
345	   && code != TYPE_DECL && code != FIELD_DECL)
346    abort ();
347
348  if (type == error_mark_node)
349    type = void_type_node;
350
351  /* Usually the size and mode come from the data type without change,
352     however, the front-end may set the explicit width of the field, so its
353     size may not be the same as the size of its type.  This happens with
354     bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
355     also happens with other fields.  For example, the C++ front-end creates
356     zero-sized fields corresponding to empty base classes, and depends on
357     layout_type setting DECL_FIELD_BITPOS correctly for the field.  Set the
358     size in bytes from the size in bits.  If we have already set the mode,
359     don't set it again since we can be called twice for FIELD_DECLs.  */
360
361  TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
362  if (DECL_MODE (decl) == VOIDmode)
363    DECL_MODE (decl) = TYPE_MODE (type);
364
365  if (DECL_SIZE (decl) == 0)
366    {
367      DECL_SIZE (decl) = TYPE_SIZE (type);
368      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
369    }
370  else
371    DECL_SIZE_UNIT (decl)
372      = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
373				       bitsize_unit_node));
374
375  /* Force alignment required for the data type.
376     But if the decl itself wants greater alignment, don't override that.
377     Likewise, if the decl is packed, don't override it.  */
378  if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
379      && (DECL_ALIGN (decl) == 0
380	  || (! (code == FIELD_DECL && DECL_PACKED (decl))
381	      && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
382    {
383      DECL_ALIGN (decl) = TYPE_ALIGN (type);
384      DECL_USER_ALIGN (decl) = 0;
385    }
386
387  /* For fields, set the bit field type and update the alignment.  */
388  if (code == FIELD_DECL)
389    {
390      DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
391      if (maximum_field_alignment != 0)
392	DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
393
394      /* If the field is of variable size, we can't misalign it since we
395	 have no way to make a temporary to align the result.  But this
396	 isn't an issue if the decl is not addressable.  Likewise if it
397	 is of unknown size.  */
398      else if (DECL_PACKED (decl)
399	       && (DECL_NONADDRESSABLE_P (decl)
400		   || DECL_SIZE_UNIT (decl) == 0
401		   || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
402	{
403	  DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
404	  DECL_USER_ALIGN (decl) = 0;
405	}
406    }
407
408  /* See if we can use an ordinary integer mode for a bit-field.
409     Conditions are: a fixed size that is correct for another mode
410     and occupying a complete byte or bytes on proper boundary.  */
411  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
412      && TYPE_SIZE (type) != 0
413      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
414      && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
415    {
416      enum machine_mode xmode
417	= mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
418
419      if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
420	{
421	  DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
422				   DECL_ALIGN (decl));
423	  DECL_MODE (decl) = xmode;
424	  DECL_BIT_FIELD (decl) = 0;
425	}
426    }
427
428  /* Turn off DECL_BIT_FIELD if we won't need it set.  */
429  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
430      && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
431      && known_align >= TYPE_ALIGN (type)
432      && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
433      && DECL_SIZE_UNIT (decl) != 0)
434    DECL_BIT_FIELD (decl) = 0;
435
436  /* Evaluate nonconstant size only once, either now or as soon as safe.  */
437  if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
438    DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
439  if (DECL_SIZE_UNIT (decl) != 0
440      && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
441    DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
442
443  /* If requested, warn about definitions of large data objects.  */
444  if (warn_larger_than
445      && (code == VAR_DECL || code == PARM_DECL)
446      && ! DECL_EXTERNAL (decl))
447    {
448      tree size = DECL_SIZE_UNIT (decl);
449
450      if (size != 0 && TREE_CODE (size) == INTEGER_CST
451	  && compare_tree_int (size, larger_than_size) > 0)
452	{
453	  unsigned int size_as_int = TREE_INT_CST_LOW (size);
454
455	  if (compare_tree_int (size, size_as_int) == 0)
456	    warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
457	  else
458	    warning_with_decl (decl, "size of `%s' is larger than %d bytes",
459			       larger_than_size);
460	}
461    }
462}
463
464/* Hook for a front-end function that can modify the record layout as needed
465   immediately before it is finalized.  */
466
467void (*lang_adjust_rli) PARAMS ((record_layout_info)) = 0;
468
469void
470set_lang_adjust_rli (f)
471     void (*f) PARAMS ((record_layout_info));
472{
473  lang_adjust_rli = f;
474}
475
476/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
477   QUAL_UNION_TYPE.  Return a pointer to a struct record_layout_info which
478   is to be passed to all other layout functions for this record.  It is the
479   responsibility of the caller to call `free' for the storage returned.
480   Note that garbage collection is not permitted until we finish laying
481   out the record.  */
482
483record_layout_info
484start_record_layout (t)
485     tree t;
486{
487  record_layout_info rli
488    = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
489
490  rli->t = t;
491
492  /* If the type has a minimum specified alignment (via an attribute
493     declaration, for example) use it -- otherwise, start with a
494     one-byte alignment.  */
495  rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
496  rli->unpacked_align = rli->unpadded_align = rli->record_align;
497  rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
498
499#ifdef STRUCTURE_SIZE_BOUNDARY
500  /* Packed structures don't need to have minimum size.  */
501  if (! TYPE_PACKED (t))
502    rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
503#endif
504
505  rli->offset = size_zero_node;
506  rli->bitpos = bitsize_zero_node;
507  rli->prev_field = 0;
508  rli->pending_statics = 0;
509  rli->packed_maybe_necessary = 0;
510
511  return rli;
512}
513
514/* These four routines perform computations that convert between
515   the offset/bitpos forms and byte and bit offsets.  */
516
517tree
518bit_from_pos (offset, bitpos)
519     tree offset, bitpos;
520{
521  return size_binop (PLUS_EXPR, bitpos,
522		     size_binop (MULT_EXPR, convert (bitsizetype, offset),
523				 bitsize_unit_node));
524}
525
526tree
527byte_from_pos (offset, bitpos)
528     tree offset, bitpos;
529{
530  return size_binop (PLUS_EXPR, offset,
531		     convert (sizetype,
532			      size_binop (TRUNC_DIV_EXPR, bitpos,
533					  bitsize_unit_node)));
534}
535
536void
537pos_from_byte (poffset, pbitpos, off_align, pos)
538     tree *poffset, *pbitpos;
539     unsigned int off_align;
540     tree pos;
541{
542  *poffset
543    = size_binop (MULT_EXPR,
544		  convert (sizetype,
545			   size_binop (FLOOR_DIV_EXPR, pos,
546				       bitsize_int (off_align
547						    / BITS_PER_UNIT))),
548		  size_int (off_align / BITS_PER_UNIT));
549  *pbitpos = size_binop (MULT_EXPR,
550			 size_binop (FLOOR_MOD_EXPR, pos,
551				     bitsize_int (off_align / BITS_PER_UNIT)),
552			 bitsize_unit_node);
553}
554
555void
556pos_from_bit (poffset, pbitpos, off_align, pos)
557     tree *poffset, *pbitpos;
558     unsigned int off_align;
559     tree pos;
560{
561  *poffset = size_binop (MULT_EXPR,
562			 convert (sizetype,
563				  size_binop (FLOOR_DIV_EXPR, pos,
564					      bitsize_int (off_align))),
565			 size_int (off_align / BITS_PER_UNIT));
566  *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
567}
568
569/* Given a pointer to bit and byte offsets and an offset alignment,
570   normalize the offsets so they are within the alignment.  */
571
572void
573normalize_offset (poffset, pbitpos, off_align)
574     tree *poffset, *pbitpos;
575     unsigned int off_align;
576{
577  /* If the bit position is now larger than it should be, adjust it
578     downwards.  */
579  if (compare_tree_int (*pbitpos, off_align) >= 0)
580    {
581      tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
582				      bitsize_int (off_align));
583
584      *poffset
585	= size_binop (PLUS_EXPR, *poffset,
586		      size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
587				  size_int (off_align / BITS_PER_UNIT)));
588
589      *pbitpos
590	= size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
591    }
592}
593
594/* Print debugging information about the information in RLI.  */
595
596void
597debug_rli (rli)
598     record_layout_info rli;
599{
600  print_node_brief (stderr, "type", rli->t, 0);
601  print_node_brief (stderr, "\noffset", rli->offset, 0);
602  print_node_brief (stderr, " bitpos", rli->bitpos, 0);
603
604  fprintf (stderr, "\naligns: rec = %u, unpack = %u, unpad = %u, off = %u\n",
605	   rli->record_align, rli->unpacked_align, rli->unpadded_align,
606	   rli->offset_align);
607  if (rli->packed_maybe_necessary)
608    fprintf (stderr, "packed may be necessary\n");
609
610  if (rli->pending_statics)
611    {
612      fprintf (stderr, "pending statics:\n");
613      debug_tree (rli->pending_statics);
614    }
615}
616
617/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
618   BITPOS if necessary to keep BITPOS below OFFSET_ALIGN.  */
619
620void
621normalize_rli (rli)
622     record_layout_info rli;
623{
624  normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
625}
626
627/* Returns the size in bytes allocated so far.  */
628
629tree
630rli_size_unit_so_far (rli)
631     record_layout_info rli;
632{
633  return byte_from_pos (rli->offset, rli->bitpos);
634}
635
636/* Returns the size in bits allocated so far.  */
637
638tree
639rli_size_so_far (rli)
640     record_layout_info rli;
641{
642  return bit_from_pos (rli->offset, rli->bitpos);
643}
644
645/* Called from place_field to handle unions.  */
646
647static void
648place_union_field (rli, field)
649     record_layout_info rli;
650     tree field;
651{
652  unsigned int desired_align;
653
654  layout_decl (field, 0);
655
656  DECL_FIELD_OFFSET (field) = size_zero_node;
657  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
658  SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
659
660  desired_align = DECL_ALIGN (field);
661
662#ifdef BIGGEST_FIELD_ALIGNMENT
663  /* Some targets (i.e. i386) limit union field alignment
664     to a lower boundary than alignment of variables unless
665     it was overridden by attribute aligned.  */
666  if (! DECL_USER_ALIGN (field))
667    desired_align =
668      MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
669#endif
670
671#ifdef ADJUST_FIELD_ALIGN
672  desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
673#endif
674
675  TYPE_USER_ALIGN (rli->t) |= DECL_USER_ALIGN (field);
676
677  /* Union must be at least as aligned as any field requires.  */
678  rli->record_align = MAX (rli->record_align, desired_align);
679  rli->unpadded_align = MAX (rli->unpadded_align, desired_align);
680
681#ifdef PCC_BITFIELD_TYPE_MATTERS
682  /* On the m88000, a bit field of declare type `int' forces the
683     entire union to have `int' alignment.  */
684  if (PCC_BITFIELD_TYPE_MATTERS && DECL_BIT_FIELD_TYPE (field))
685    {
686      rli->record_align = MAX (rli->record_align,
687			       TYPE_ALIGN (TREE_TYPE (field)));
688      rli->unpadded_align = MAX (rli->unpadded_align,
689				 TYPE_ALIGN (TREE_TYPE (field)));
690    }
691#endif
692
693  /* We assume the union's size will be a multiple of a byte so we don't
694     bother with BITPOS.  */
695  if (TREE_CODE (rli->t) == UNION_TYPE)
696    rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
697  else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
698    rli->offset = fold (build (COND_EXPR, sizetype,
699			       DECL_QUALIFIER (field),
700			       DECL_SIZE_UNIT (field), rli->offset));
701}
702
703/* RLI contains information about the layout of a RECORD_TYPE.  FIELD
704   is a FIELD_DECL to be added after those fields already present in
705   T.  (FIELD is not actually added to the TYPE_FIELDS list here;
706   callers that desire that behavior must manually perform that step.)  */
707
708void
709place_field (rli, field)
710     record_layout_info rli;
711     tree field;
712{
713  /* The alignment required for FIELD.  */
714  unsigned int desired_align;
715  /* The alignment FIELD would have if we just dropped it into the
716     record as it presently stands.  */
717  unsigned int known_align;
718  unsigned int actual_align;
719  unsigned int user_align;
720  /* The type of this field.  */
721  tree type = TREE_TYPE (field);
722
723  if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
724      return;
725
726  /* If FIELD is static, then treat it like a separate variable, not
727     really like a structure field.  If it is a FUNCTION_DECL, it's a
728     method.  In both cases, all we do is lay out the decl, and we do
729     it *after* the record is laid out.  */
730  if (TREE_CODE (field) == VAR_DECL)
731    {
732      rli->pending_statics = tree_cons (NULL_TREE, field,
733					rli->pending_statics);
734      return;
735    }
736
737  /* Enumerators and enum types which are local to this class need not
738     be laid out.  Likewise for initialized constant fields.  */
739  else if (TREE_CODE (field) != FIELD_DECL)
740    return;
741
742  /* Unions are laid out very differently than records, so split
743     that code off to another function.  */
744  else if (TREE_CODE (rli->t) != RECORD_TYPE)
745    {
746      place_union_field (rli, field);
747      return;
748    }
749
750  /* Work out the known alignment so far.  Note that A & (-A) is the
751     value of the least-significant bit in A that is one.  */
752  if (! integer_zerop (rli->bitpos))
753    known_align = (tree_low_cst (rli->bitpos, 1)
754		   & - tree_low_cst (rli->bitpos, 1));
755  else if (integer_zerop (rli->offset))
756    known_align = BIGGEST_ALIGNMENT;
757  else if (host_integerp (rli->offset, 1))
758    known_align = (BITS_PER_UNIT
759		   * (tree_low_cst (rli->offset, 1)
760		      & - tree_low_cst (rli->offset, 1)));
761  else
762    known_align = rli->offset_align;
763
764  /* Lay out the field so we know what alignment it needs.  For a
765     packed field, use the alignment as specified, disregarding what
766     the type would want.  */
767  desired_align = DECL_ALIGN (field);
768  user_align = DECL_USER_ALIGN (field);
769  layout_decl (field, known_align);
770  if (! DECL_PACKED (field))
771    {
772      desired_align = DECL_ALIGN (field);
773      user_align = DECL_USER_ALIGN (field);
774    }
775
776  /* Some targets (i.e. i386, VMS) limit struct field alignment
777     to a lower boundary than alignment of variables unless
778     it was overridden by attribute aligned.  */
779#ifdef BIGGEST_FIELD_ALIGNMENT
780  if (! user_align)
781    desired_align
782      = MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
783#endif
784
785#ifdef ADJUST_FIELD_ALIGN
786  desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
787#endif
788
789  /* Record must have at least as much alignment as any field.
790     Otherwise, the alignment of the field within the record is
791     meaningless.  */
792  if ((* targetm.ms_bitfield_layout_p) (rli->t)
793      && type != error_mark_node
794      && DECL_BIT_FIELD_TYPE (field)
795      && ! integer_zerop (TYPE_SIZE (type))
796      && integer_zerop (DECL_SIZE (field)))
797    {
798      if (rli->prev_field
799	  && DECL_BIT_FIELD_TYPE (rli->prev_field)
800	  && ! integer_zerop (DECL_SIZE (rli->prev_field)))
801	{
802	  rli->record_align = MAX (rli->record_align, desired_align);
803	  rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
804	}
805      else
806	desired_align = 1;
807    }
808  else
809#ifdef PCC_BITFIELD_TYPE_MATTERS
810  if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
811      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
812      && DECL_BIT_FIELD_TYPE (field)
813      && ! integer_zerop (TYPE_SIZE (type)))
814    {
815      /* For these machines, a zero-length field does not
816	 affect the alignment of the structure as a whole.
817	 It does, however, affect the alignment of the next field
818	 within the structure.  */
819      if (! integer_zerop (DECL_SIZE (field)))
820	rli->record_align = MAX (rli->record_align, desired_align);
821      else if (! DECL_PACKED (field))
822	desired_align = TYPE_ALIGN (type);
823
824      /* A named bit field of declared type `int'
825	 forces the entire structure to have `int' alignment.  */
826      if (DECL_NAME (field) != 0)
827	{
828	  unsigned int type_align = TYPE_ALIGN (type);
829
830	  if (maximum_field_alignment != 0)
831	    type_align = MIN (type_align, maximum_field_alignment);
832	  else if (DECL_PACKED (field))
833	    type_align = MIN (type_align, BITS_PER_UNIT);
834
835	  rli->record_align = MAX (rli->record_align, type_align);
836	  rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
837	  if (warn_packed)
838	    rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
839	}
840    }
841  else
842#endif
843    {
844      rli->record_align = MAX (rli->record_align, desired_align);
845      rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
846      rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
847    }
848
849  if (warn_packed && DECL_PACKED (field))
850    {
851      if (known_align > TYPE_ALIGN (type))
852	{
853	  if (TYPE_ALIGN (type) > desired_align)
854	    {
855	      if (STRICT_ALIGNMENT)
856		warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
857	      else
858		warning_with_decl (field, "packed attribute is unnecessary for `%s'");
859	    }
860	}
861      else
862	rli->packed_maybe_necessary = 1;
863    }
864
865  /* Does this field automatically have alignment it needs by virtue
866     of the fields that precede it and the record's own alignment?  */
867  if (known_align < desired_align)
868    {
869      /* No, we need to skip space before this field.
870	 Bump the cumulative size to multiple of field alignment.  */
871
872      if (warn_padded)
873	warning_with_decl (field, "padding struct to align `%s'");
874
875      /* If the alignment is still within offset_align, just align
876	 the bit position.  */
877      if (desired_align < rli->offset_align)
878	rli->bitpos = round_up (rli->bitpos, desired_align);
879      else
880	{
881	  /* First adjust OFFSET by the partial bits, then align.  */
882	  rli->offset
883	    = size_binop (PLUS_EXPR, rli->offset,
884			  convert (sizetype,
885				   size_binop (CEIL_DIV_EXPR, rli->bitpos,
886					       bitsize_unit_node)));
887	  rli->bitpos = bitsize_zero_node;
888
889	  rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
890	}
891
892      if (! TREE_CONSTANT (rli->offset))
893	rli->offset_align = desired_align;
894
895    }
896
897  /* Handle compatibility with PCC.  Note that if the record has any
898     variable-sized fields, we need not worry about compatibility.  */
899#ifdef PCC_BITFIELD_TYPE_MATTERS
900  if (PCC_BITFIELD_TYPE_MATTERS
901      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
902      && TREE_CODE (field) == FIELD_DECL
903      && type != error_mark_node
904      && DECL_BIT_FIELD (field)
905      && ! DECL_PACKED (field)
906      && maximum_field_alignment == 0
907      && ! integer_zerop (DECL_SIZE (field))
908      && host_integerp (DECL_SIZE (field), 1)
909      && host_integerp (rli->offset, 1)
910      && host_integerp (TYPE_SIZE (type), 1))
911    {
912      unsigned int type_align = TYPE_ALIGN (type);
913      tree dsize = DECL_SIZE (field);
914      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
915      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
916      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
917
918      /* A bit field may not span more units of alignment of its type
919	 than its type itself.  Advance to next boundary if necessary.  */
920      if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
921	     type_align - 1)
922	    / type_align)
923	   - (offset * BITS_PER_UNIT + bit_offset) / type_align)
924	  > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
925	rli->bitpos = round_up (rli->bitpos, type_align);
926    }
927#endif
928
929#ifdef BITFIELD_NBYTES_LIMITED
930  if (BITFIELD_NBYTES_LIMITED
931      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
932      && TREE_CODE (field) == FIELD_DECL
933      && type != error_mark_node
934      && DECL_BIT_FIELD_TYPE (field)
935      && ! DECL_PACKED (field)
936      && ! integer_zerop (DECL_SIZE (field))
937      && host_integerp (DECL_SIZE (field), 1)
938      && host_integerp (rli->offset, 1)
939      && host_integerp (TYPE_SIZE (type), 1))
940    {
941      unsigned int type_align = TYPE_ALIGN (type);
942      tree dsize = DECL_SIZE (field);
943      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
944      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
945      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
946
947      if (maximum_field_alignment != 0)
948	type_align = MIN (type_align, maximum_field_alignment);
949      /* ??? This test is opposite the test in the containing if
950	 statement, so this code is unreachable currently.  */
951      else if (DECL_PACKED (field))
952	type_align = MIN (type_align, BITS_PER_UNIT);
953
954      /* A bit field may not span the unit of alignment of its type.
955	 Advance to next boundary if necessary.  */
956      /* ??? This code should match the code above for the
957	 PCC_BITFIELD_TYPE_MATTERS case.  */
958      if ((offset * BITS_PER_UNIT + bit_offset) / type_align
959	  != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
960	      / type_align))
961	rli->bitpos = round_up (rli->bitpos, type_align);
962    }
963#endif
964
965  /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.  */
966  if ((* targetm.ms_bitfield_layout_p) (rli->t)
967      && TREE_CODE (field) == FIELD_DECL
968      && type != error_mark_node
969      && ! DECL_PACKED (field)
970      && rli->prev_field
971      && DECL_SIZE (field)
972      && host_integerp (DECL_SIZE (field), 1)
973      && DECL_SIZE (rli->prev_field)
974      && host_integerp (DECL_SIZE (rli->prev_field), 1)
975      && host_integerp (rli->offset, 1)
976      && host_integerp (TYPE_SIZE (type), 1)
977      && host_integerp (TYPE_SIZE (TREE_TYPE (rli->prev_field)), 1)
978      && ((DECL_BIT_FIELD_TYPE (rli->prev_field)
979	   && ! integer_zerop (DECL_SIZE (rli->prev_field)))
980	  || (DECL_BIT_FIELD_TYPE (field)
981	      && ! integer_zerop (DECL_SIZE (field))))
982      && (! simple_cst_equal (TYPE_SIZE (type),
983			      TYPE_SIZE (TREE_TYPE (rli->prev_field)))
984	  /* If the previous field was a zero-sized bit-field, either
985	     it was ignored, in which case we must ensure the proper
986	     alignment of this field here, or it already forced the
987	     alignment of this field, in which case forcing the
988	     alignment again is harmless.  So, do it in both cases.  */
989	  || (DECL_BIT_FIELD_TYPE (rli->prev_field)
990	      && integer_zerop (DECL_SIZE (rli->prev_field)))))
991    {
992      unsigned int type_align = TYPE_ALIGN (type);
993
994      if (rli->prev_field
995	  && DECL_BIT_FIELD_TYPE (rli->prev_field)
996	  /* If the previous bit-field is zero-sized, we've already
997	     accounted for its alignment needs (or ignored it, if
998	     appropriate) while placing it.  */
999	  && ! integer_zerop (DECL_SIZE (rli->prev_field)))
1000	type_align = MAX (type_align,
1001			  TYPE_ALIGN (TREE_TYPE (rli->prev_field)));
1002
1003      if (maximum_field_alignment != 0)
1004	type_align = MIN (type_align, maximum_field_alignment);
1005
1006      rli->bitpos = round_up (rli->bitpos, type_align);
1007    }
1008
1009  /* Offset so far becomes the position of this field after normalizing.  */
1010  normalize_rli (rli);
1011  DECL_FIELD_OFFSET (field) = rli->offset;
1012  DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1013  SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1014
1015  TYPE_USER_ALIGN (rli->t) |= user_align;
1016
1017  /* If this field ended up more aligned than we thought it would be (we
1018     approximate this by seeing if its position changed), lay out the field
1019     again; perhaps we can use an integral mode for it now.  */
1020  if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1021    actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1022		    & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1023  else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1024    actual_align = BIGGEST_ALIGNMENT;
1025  else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1026    actual_align = (BITS_PER_UNIT
1027		   * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1028		      & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1029  else
1030    actual_align = DECL_OFFSET_ALIGN (field);
1031
1032  if (known_align != actual_align)
1033    layout_decl (field, actual_align);
1034
1035  rli->prev_field = field;
1036
1037  /* Now add size of this field to the size of the record.  If the size is
1038     not constant, treat the field as being a multiple of bytes and just
1039     adjust the offset, resetting the bit position.  Otherwise, apportion the
1040     size amongst the bit position and offset.  First handle the case of an
1041     unspecified size, which can happen when we have an invalid nested struct
1042     definition, such as struct j { struct j { int i; } }.  The error message
1043     is printed in finish_struct.  */
1044  if (DECL_SIZE (field) == 0)
1045    /* Do nothing.  */;
1046  else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1047	   || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1048    {
1049      rli->offset
1050	= size_binop (PLUS_EXPR, rli->offset,
1051		      convert (sizetype,
1052			       size_binop (CEIL_DIV_EXPR, rli->bitpos,
1053					   bitsize_unit_node)));
1054      rli->offset
1055	= size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1056      rli->bitpos = bitsize_zero_node;
1057      rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
1058    }
1059  else
1060    {
1061      rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1062      normalize_rli (rli);
1063    }
1064}
1065
1066/* Assuming that all the fields have been laid out, this function uses
1067   RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1068   inidicated by RLI.  */
1069
1070static void
1071finalize_record_size (rli)
1072     record_layout_info rli;
1073{
1074  tree unpadded_size, unpadded_size_unit;
1075
1076  /* Now we want just byte and bit offsets, so set the offset alignment
1077     to be a byte and then normalize.  */
1078  rli->offset_align = BITS_PER_UNIT;
1079  normalize_rli (rli);
1080
1081  /* Determine the desired alignment.  */
1082#ifdef ROUND_TYPE_ALIGN
1083  TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1084					  rli->record_align);
1085#else
1086  TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1087#endif
1088
1089  /* Compute the size so far.  Be sure to allow for extra bits in the
1090     size in bytes.  We have guaranteed above that it will be no more
1091     than a single byte.  */
1092  unpadded_size = rli_size_so_far (rli);
1093  unpadded_size_unit = rli_size_unit_so_far (rli);
1094  if (! integer_zerop (rli->bitpos))
1095    unpadded_size_unit
1096      = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1097
1098  /* Record the un-rounded size in the binfo node.  But first we check
1099     the size of TYPE_BINFO to make sure that BINFO_SIZE is available.  */
1100  if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
1101    {
1102      TYPE_BINFO_SIZE (rli->t) = unpadded_size;
1103      TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
1104    }
1105
1106    /* Round the size up to be a multiple of the required alignment */
1107#ifdef ROUND_TYPE_SIZE
1108  TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
1109					TYPE_ALIGN (rli->t));
1110  TYPE_SIZE_UNIT (rli->t)
1111    = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
1112			    TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1113#else
1114  TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1115  TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1116				      TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1117#endif
1118
1119  if (warn_padded && TREE_CONSTANT (unpadded_size)
1120      && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1121    warning ("padding struct size to alignment boundary");
1122
1123  if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1124      && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1125      && TREE_CONSTANT (unpadded_size))
1126    {
1127      tree unpacked_size;
1128
1129#ifdef ROUND_TYPE_ALIGN
1130      rli->unpacked_align
1131	= ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1132#else
1133      rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1134#endif
1135
1136#ifdef ROUND_TYPE_SIZE
1137      unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1138				       rli->unpacked_align);
1139#else
1140      unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1141#endif
1142
1143      if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1144	{
1145	  TYPE_PACKED (rli->t) = 0;
1146
1147	  if (TYPE_NAME (rli->t))
1148	    {
1149	      const char *name;
1150
1151	      if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1152		name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1153	      else
1154		name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1155
1156	      if (STRICT_ALIGNMENT)
1157		warning ("packed attribute causes inefficient alignment for `%s'", name);
1158	      else
1159		warning ("packed attribute is unnecessary for `%s'", name);
1160	    }
1161	  else
1162	    {
1163	      if (STRICT_ALIGNMENT)
1164		warning ("packed attribute causes inefficient alignment");
1165	      else
1166		warning ("packed attribute is unnecessary");
1167	    }
1168	}
1169    }
1170}
1171
1172/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE).  */
1173
1174void
1175compute_record_mode (type)
1176     tree type;
1177{
1178  tree field;
1179  enum machine_mode mode = VOIDmode;
1180
1181  /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1182     However, if possible, we use a mode that fits in a register
1183     instead, in order to allow for better optimization down the
1184     line.  */
1185  TYPE_MODE (type) = BLKmode;
1186
1187  if (! host_integerp (TYPE_SIZE (type), 1))
1188    return;
1189
1190  /* A record which has any BLKmode members must itself be
1191     BLKmode; it can't go in a register.  Unless the member is
1192     BLKmode only because it isn't aligned.  */
1193  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1194    {
1195      unsigned HOST_WIDE_INT bitpos;
1196
1197      if (TREE_CODE (field) != FIELD_DECL)
1198	continue;
1199
1200      if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1201	  || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1202	      && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1203	  || ! host_integerp (bit_position (field), 1)
1204	  || DECL_SIZE (field) == 0
1205	  || ! host_integerp (DECL_SIZE (field), 1))
1206	return;
1207
1208      bitpos = int_bit_position (field);
1209
1210      /* Must be BLKmode if any field crosses a word boundary,
1211	 since extract_bit_field can't handle that in registers.  */
1212      if (bitpos / BITS_PER_WORD
1213	  != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1214	      / BITS_PER_WORD)
1215	  /* But there is no problem if the field is entire words.  */
1216	  && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1217	return;
1218
1219      /* If this field is the whole struct, remember its mode so
1220	 that, say, we can put a double in a class into a DF
1221	 register instead of forcing it to live in the stack.  */
1222      if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1223	mode = DECL_MODE (field);
1224
1225#ifdef MEMBER_TYPE_FORCES_BLK
1226      /* With some targets, eg. c4x, it is sub-optimal
1227	 to access an aligned BLKmode structure as a scalar.  */
1228
1229      /* On ia64-*-hpux we need to ensure that we don't change the
1230	 mode of a structure containing a single field or else we
1231	 will pass it incorrectly.  Since a structure with a single
1232	 field causes mode to get set above we can't allow the
1233	 check for mode == VOIDmode in this case.  Perhaps
1234	 MEMBER_TYPE_FORCES_BLK should be extended to include mode
1235	 as an argument and the check could be put in there for c4x.  */
1236
1237      if ((mode == VOIDmode || FUNCTION_ARG_REG_LITTLE_ENDIAN)
1238	  && MEMBER_TYPE_FORCES_BLK (field))
1239	return;
1240#endif /* MEMBER_TYPE_FORCES_BLK  */
1241    }
1242
1243  /* If we only have one real field; use its mode.  This only applies to
1244     RECORD_TYPE.  This does not apply to unions.  */
1245  if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1246    TYPE_MODE (type) = mode;
1247  else
1248    TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1249
1250  /* If structure's known alignment is less than what the scalar
1251     mode would need, and it matters, then stick with BLKmode.  */
1252  if (TYPE_MODE (type) != BLKmode
1253      && STRICT_ALIGNMENT
1254      && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1255	    || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1256    {
1257      /* If this is the only reason this type is BLKmode, then
1258	 don't force containing types to be BLKmode.  */
1259      TYPE_NO_FORCE_BLK (type) = 1;
1260      TYPE_MODE (type) = BLKmode;
1261    }
1262}
1263
1264/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1265   out.  */
1266
1267static void
1268finalize_type_size (type)
1269     tree type;
1270{
1271  /* Normally, use the alignment corresponding to the mode chosen.
1272     However, where strict alignment is not required, avoid
1273     over-aligning structures, since most compilers do not do this
1274     alignment.  */
1275
1276  if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1277      && (STRICT_ALIGNMENT
1278	  || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1279	      && TREE_CODE (type) != QUAL_UNION_TYPE
1280	      && TREE_CODE (type) != ARRAY_TYPE)))
1281    {
1282      TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1283      TYPE_USER_ALIGN (type) = 0;
1284    }
1285
1286  /* Do machine-dependent extra alignment.  */
1287#ifdef ROUND_TYPE_ALIGN
1288  TYPE_ALIGN (type)
1289    = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1290#endif
1291
1292  /* If we failed to find a simple way to calculate the unit size
1293     of the type, find it by division.  */
1294  if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1295    /* TYPE_SIZE (type) is computed in bitsizetype.  After the division, the
1296       result will fit in sizetype.  We will get more efficient code using
1297       sizetype, so we force a conversion.  */
1298    TYPE_SIZE_UNIT (type)
1299      = convert (sizetype,
1300		 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1301			     bitsize_unit_node));
1302
1303  if (TYPE_SIZE (type) != 0)
1304    {
1305#ifdef ROUND_TYPE_SIZE
1306      TYPE_SIZE (type)
1307	= ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1308      TYPE_SIZE_UNIT (type)
1309	= ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1310				TYPE_ALIGN (type) / BITS_PER_UNIT);
1311#else
1312      TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1313      TYPE_SIZE_UNIT (type)
1314	= round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1315#endif
1316    }
1317
1318  /* Evaluate nonconstant sizes only once, either now or as soon as safe.  */
1319  if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1320    TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1321  if (TYPE_SIZE_UNIT (type) != 0
1322      && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1323    TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1324
1325  /* Also layout any other variants of the type.  */
1326  if (TYPE_NEXT_VARIANT (type)
1327      || type != TYPE_MAIN_VARIANT (type))
1328    {
1329      tree variant;
1330      /* Record layout info of this variant.  */
1331      tree size = TYPE_SIZE (type);
1332      tree size_unit = TYPE_SIZE_UNIT (type);
1333      unsigned int align = TYPE_ALIGN (type);
1334      unsigned int user_align = TYPE_USER_ALIGN (type);
1335      enum machine_mode mode = TYPE_MODE (type);
1336
1337      /* Copy it into all variants.  */
1338      for (variant = TYPE_MAIN_VARIANT (type);
1339	   variant != 0;
1340	   variant = TYPE_NEXT_VARIANT (variant))
1341	{
1342	  TYPE_SIZE (variant) = size;
1343	  TYPE_SIZE_UNIT (variant) = size_unit;
1344	  TYPE_ALIGN (variant) = align;
1345	  TYPE_USER_ALIGN (variant) = user_align;
1346	  TYPE_MODE (variant) = mode;
1347	}
1348    }
1349}
1350
1351/* Do all of the work required to layout the type indicated by RLI,
1352   once the fields have been laid out.  This function will call `free'
1353   for RLI.  */
1354
1355void
1356finish_record_layout (rli)
1357     record_layout_info rli;
1358{
1359  /* Compute the final size.  */
1360  finalize_record_size (rli);
1361
1362  /* Compute the TYPE_MODE for the record.  */
1363  compute_record_mode (rli->t);
1364
1365  /* Perform any last tweaks to the TYPE_SIZE, etc.  */
1366  finalize_type_size (rli->t);
1367
1368  /* Lay out any static members.  This is done now because their type
1369     may use the record's type.  */
1370  while (rli->pending_statics)
1371    {
1372      layout_decl (TREE_VALUE (rli->pending_statics), 0);
1373      rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1374    }
1375
1376  /* Clean up.  */
1377  free (rli);
1378}
1379
1380/* Calculate the mode, size, and alignment for TYPE.
1381   For an array type, calculate the element separation as well.
1382   Record TYPE on the chain of permanent or temporary types
1383   so that dbxout will find out about it.
1384
1385   TYPE_SIZE of a type is nonzero if the type has been laid out already.
1386   layout_type does nothing on such a type.
1387
1388   If the type is incomplete, its TYPE_SIZE remains zero.  */
1389
1390void
1391layout_type (type)
1392     tree type;
1393{
1394  if (type == 0)
1395    abort ();
1396
1397  /* Do nothing if type has been laid out before.  */
1398  if (TYPE_SIZE (type))
1399    return;
1400
1401  switch (TREE_CODE (type))
1402    {
1403    case LANG_TYPE:
1404      /* This kind of type is the responsibility
1405	 of the language-specific code.  */
1406      abort ();
1407
1408    case BOOLEAN_TYPE:  /* Used for Java, Pascal, and Chill.  */
1409      if (TYPE_PRECISION (type) == 0)
1410	TYPE_PRECISION (type) = 1; /* default to one byte/boolean.  */
1411
1412      /* ... fall through ...  */
1413
1414    case INTEGER_TYPE:
1415    case ENUMERAL_TYPE:
1416    case CHAR_TYPE:
1417      if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1418	  && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1419	TREE_UNSIGNED (type) = 1;
1420
1421      TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1422						 MODE_INT);
1423      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1424      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1425      break;
1426
1427    case REAL_TYPE:
1428      TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1429      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1430      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1431      break;
1432
1433    case COMPLEX_TYPE:
1434      TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1435      TYPE_MODE (type)
1436	= mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1437			 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1438			  ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1439			 0);
1440      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1441      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1442      break;
1443
1444    case VECTOR_TYPE:
1445      {
1446	tree subtype;
1447
1448	subtype = TREE_TYPE (type);
1449	TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1450	TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1451	TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1452      }
1453      break;
1454
1455    case VOID_TYPE:
1456      /* This is an incomplete type and so doesn't have a size.  */
1457      TYPE_ALIGN (type) = 1;
1458      TYPE_USER_ALIGN (type) = 0;
1459      TYPE_MODE (type) = VOIDmode;
1460      break;
1461
1462    case OFFSET_TYPE:
1463      TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1464      TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1465      /* A pointer might be MODE_PARTIAL_INT,
1466	 but ptrdiff_t must be integral.  */
1467      TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1468      break;
1469
1470    case FUNCTION_TYPE:
1471    case METHOD_TYPE:
1472      TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1473      TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1474      TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1475      break;
1476
1477    case POINTER_TYPE:
1478    case REFERENCE_TYPE:
1479      {
1480	int nbits = ((TREE_CODE (type) == REFERENCE_TYPE
1481		      && reference_types_internal)
1482		     ? GET_MODE_BITSIZE (Pmode) : POINTER_SIZE);
1483
1484	TYPE_MODE (type) = nbits == POINTER_SIZE ? ptr_mode : Pmode;
1485	TYPE_SIZE (type) = bitsize_int (nbits);
1486	TYPE_SIZE_UNIT (type) = size_int (nbits / BITS_PER_UNIT);
1487	TREE_UNSIGNED (type) = 1;
1488	TYPE_PRECISION (type) = nbits;
1489      }
1490      break;
1491
1492    case ARRAY_TYPE:
1493      {
1494	tree index = TYPE_DOMAIN (type);
1495	tree element = TREE_TYPE (type);
1496
1497	build_pointer_type (element);
1498
1499	/* We need to know both bounds in order to compute the size.  */
1500	if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1501	    && TYPE_SIZE (element))
1502	  {
1503	    tree ub = TYPE_MAX_VALUE (index);
1504	    tree lb = TYPE_MIN_VALUE (index);
1505	    tree length;
1506	    tree element_size;
1507
1508	    /* The initial subtraction should happen in the original type so
1509	       that (possible) negative values are handled appropriately.  */
1510	    length = size_binop (PLUS_EXPR, size_one_node,
1511				 convert (sizetype,
1512					  fold (build (MINUS_EXPR,
1513						       TREE_TYPE (lb),
1514						       ub, lb))));
1515
1516	    /* Special handling for arrays of bits (for Chill).  */
1517	    element_size = TYPE_SIZE (element);
1518	    if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1519		&& (integer_zerop (TYPE_MAX_VALUE (element))
1520		    || integer_onep (TYPE_MAX_VALUE (element)))
1521		&& host_integerp (TYPE_MIN_VALUE (element), 1))
1522	      {
1523		HOST_WIDE_INT maxvalue
1524		  = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1525		HOST_WIDE_INT minvalue
1526		  = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1527
1528		if (maxvalue - minvalue == 1
1529		    && (maxvalue == 1 || maxvalue == 0))
1530		  element_size = integer_one_node;
1531	      }
1532
1533	    TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1534					   convert (bitsizetype, length));
1535
1536	    /* If we know the size of the element, calculate the total
1537	       size directly, rather than do some division thing below.
1538	       This optimization helps Fortran assumed-size arrays
1539	       (where the size of the array is determined at runtime)
1540	       substantially.
1541	       Note that we can't do this in the case where the size of
1542	       the elements is one bit since TYPE_SIZE_UNIT cannot be
1543	       set correctly in that case.  */
1544	    if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1545	      TYPE_SIZE_UNIT (type)
1546		= size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1547	  }
1548
1549	/* Now round the alignment and size,
1550	   using machine-dependent criteria if any.  */
1551
1552#ifdef ROUND_TYPE_ALIGN
1553	TYPE_ALIGN (type)
1554	  = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1555#else
1556	TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1557#endif
1558	TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1559
1560#ifdef ROUND_TYPE_SIZE
1561	if (TYPE_SIZE (type) != 0)
1562	  {
1563	    tree tmp
1564	      = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1565
1566	    /* If the rounding changed the size of the type, remove any
1567	       pre-calculated TYPE_SIZE_UNIT.  */
1568	    if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1569	      TYPE_SIZE_UNIT (type) = NULL;
1570
1571	    TYPE_SIZE (type) = tmp;
1572	  }
1573#endif
1574
1575	TYPE_MODE (type) = BLKmode;
1576	if (TYPE_SIZE (type) != 0
1577#ifdef MEMBER_TYPE_FORCES_BLK
1578	    && ! MEMBER_TYPE_FORCES_BLK (type)
1579#endif
1580	    /* BLKmode elements force BLKmode aggregate;
1581	       else extract/store fields may lose.  */
1582	    && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1583		|| TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1584	  {
1585	    /* One-element arrays get the component type's mode.  */
1586	    if (simple_cst_equal (TYPE_SIZE (type),
1587				  TYPE_SIZE (TREE_TYPE (type))))
1588	      TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1589	    else
1590	      TYPE_MODE (type)
1591		= mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1592
1593	    if (TYPE_MODE (type) != BLKmode
1594		&& STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1595		&& TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1596		&& TYPE_MODE (type) != BLKmode)
1597	      {
1598		TYPE_NO_FORCE_BLK (type) = 1;
1599		TYPE_MODE (type) = BLKmode;
1600	      }
1601	  }
1602	break;
1603      }
1604
1605    case RECORD_TYPE:
1606    case UNION_TYPE:
1607    case QUAL_UNION_TYPE:
1608      {
1609	tree field;
1610	record_layout_info rli;
1611
1612	/* Initialize the layout information.  */
1613	rli = start_record_layout (type);
1614
1615	/* If this is a QUAL_UNION_TYPE, we want to process the fields
1616	   in the reverse order in building the COND_EXPR that denotes
1617	   its size.  We reverse them again later.  */
1618	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1619	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1620
1621	/* Place all the fields.  */
1622	for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1623	  place_field (rli, field);
1624
1625	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1626	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1627
1628	if (lang_adjust_rli)
1629	  (*lang_adjust_rli) (rli);
1630
1631	/* Finish laying out the record.  */
1632	finish_record_layout (rli);
1633      }
1634      break;
1635
1636    case SET_TYPE:  /* Used by Chill and Pascal.  */
1637      if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1638	  || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1639	abort ();
1640      else
1641	{
1642#ifndef SET_WORD_SIZE
1643#define SET_WORD_SIZE BITS_PER_WORD
1644#endif
1645	  unsigned int alignment
1646	    = set_alignment ? set_alignment : SET_WORD_SIZE;
1647	  int size_in_bits
1648	    = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1649	       - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1650	  int rounded_size
1651	    = ((size_in_bits + alignment - 1) / alignment) * alignment;
1652
1653	  if (rounded_size > (int) alignment)
1654	    TYPE_MODE (type) = BLKmode;
1655	  else
1656	    TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1657
1658	  TYPE_SIZE (type) = bitsize_int (rounded_size);
1659	  TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1660	  TYPE_ALIGN (type) = alignment;
1661	  TYPE_USER_ALIGN (type) = 0;
1662	  TYPE_PRECISION (type) = size_in_bits;
1663	}
1664      break;
1665
1666    case FILE_TYPE:
1667      /* The size may vary in different languages, so the language front end
1668	 should fill in the size.  */
1669      TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1670      TYPE_USER_ALIGN (type) = 0;
1671      TYPE_MODE  (type) = BLKmode;
1672      break;
1673
1674    default:
1675      abort ();
1676    }
1677
1678  /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE.  For
1679     records and unions, finish_record_layout already called this
1680     function.  */
1681  if (TREE_CODE (type) != RECORD_TYPE
1682      && TREE_CODE (type) != UNION_TYPE
1683      && TREE_CODE (type) != QUAL_UNION_TYPE)
1684    finalize_type_size (type);
1685
1686  /* If this type is created before sizetype has been permanently set,
1687     record it so set_sizetype can fix it up.  */
1688  if (! sizetype_set)
1689    early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1690
1691  /* If an alias set has been set for this aggregate when it was incomplete,
1692     force it into alias set 0.
1693     This is too conservative, but we cannot call record_component_aliases
1694     here because some frontends still change the aggregates after
1695     layout_type.  */
1696  if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1697    TYPE_ALIAS_SET (type) = 0;
1698}
1699
1700/* Create and return a type for signed integers of PRECISION bits.  */
1701
1702tree
1703make_signed_type (precision)
1704     int precision;
1705{
1706  tree type = make_node (INTEGER_TYPE);
1707
1708  TYPE_PRECISION (type) = precision;
1709
1710  fixup_signed_type (type);
1711  return type;
1712}
1713
1714/* Create and return a type for unsigned integers of PRECISION bits.  */
1715
1716tree
1717make_unsigned_type (precision)
1718     int precision;
1719{
1720  tree type = make_node (INTEGER_TYPE);
1721
1722  TYPE_PRECISION (type) = precision;
1723
1724  fixup_unsigned_type (type);
1725  return type;
1726}
1727
1728/* Initialize sizetype and bitsizetype to a reasonable and temporary
1729   value to enable integer types to be created.  */
1730
1731void
1732initialize_sizetypes ()
1733{
1734  tree t = make_node (INTEGER_TYPE);
1735
1736  /* Set this so we do something reasonable for the build_int_2 calls
1737     below.  */
1738  integer_type_node = t;
1739
1740  TYPE_MODE (t) = SImode;
1741  TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1742  TYPE_USER_ALIGN (t) = 0;
1743  TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1744  TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1745  TREE_UNSIGNED (t) = 1;
1746  TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1747  TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1748  TYPE_IS_SIZETYPE (t) = 1;
1749
1750  /* 1000 avoids problems with possible overflow and is certainly
1751     larger than any size value we'd want to be storing.  */
1752  TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1753
1754  /* These two must be different nodes because of the caching done in
1755     size_int_wide.  */
1756  sizetype = t;
1757  bitsizetype = copy_node (t);
1758  integer_type_node = 0;
1759}
1760
1761/* Set sizetype to TYPE, and initialize *sizetype accordingly.
1762   Also update the type of any standard type's sizes made so far.  */
1763
1764void
1765set_sizetype (type)
1766     tree type;
1767{
1768  int oprecision = TYPE_PRECISION (type);
1769  /* The *bitsizetype types use a precision that avoids overflows when
1770     calculating signed sizes / offsets in bits.  However, when
1771     cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1772     precision.  */
1773  int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1774		       2 * HOST_BITS_PER_WIDE_INT);
1775  unsigned int i;
1776  tree t;
1777
1778  if (sizetype_set)
1779    abort ();
1780
1781  /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE.  */
1782  sizetype = copy_node (type);
1783  TYPE_DOMAIN (sizetype) = type;
1784  TYPE_IS_SIZETYPE (sizetype) = 1;
1785  bitsizetype = make_node (INTEGER_TYPE);
1786  TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1787  TYPE_PRECISION (bitsizetype) = precision;
1788  TYPE_IS_SIZETYPE (bitsizetype) = 1;
1789
1790  if (TREE_UNSIGNED (type))
1791    fixup_unsigned_type (bitsizetype);
1792  else
1793    fixup_signed_type (bitsizetype);
1794
1795  layout_type (bitsizetype);
1796
1797  if (TREE_UNSIGNED (type))
1798    {
1799      usizetype = sizetype;
1800      ubitsizetype = bitsizetype;
1801      ssizetype = copy_node (make_signed_type (oprecision));
1802      sbitsizetype = copy_node (make_signed_type (precision));
1803    }
1804  else
1805    {
1806      ssizetype = sizetype;
1807      sbitsizetype = bitsizetype;
1808      usizetype = copy_node (make_unsigned_type (oprecision));
1809      ubitsizetype = copy_node (make_unsigned_type (precision));
1810    }
1811
1812  TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1813
1814  /* Show is a sizetype, is a main type, and has no pointers to it.  */
1815  for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1816    {
1817      TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1818      TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1819      TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1820      TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1821      TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1822    }
1823
1824  ggc_add_tree_root ((tree *) &sizetype_tab,
1825		     sizeof sizetype_tab / sizeof (tree));
1826
1827  /* Go down each of the types we already made and set the proper type
1828     for the sizes in them.  */
1829  for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1830    {
1831      if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1832	abort ();
1833
1834      TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1835      TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1836    }
1837
1838  early_type_list = 0;
1839  sizetype_set = 1;
1840}
1841
1842/* Set the extreme values of TYPE based on its precision in bits,
1843   then lay it out.  Used when make_signed_type won't do
1844   because the tree code is not INTEGER_TYPE.
1845   E.g. for Pascal, when the -fsigned-char option is given.  */
1846
1847void
1848fixup_signed_type (type)
1849     tree type;
1850{
1851  int precision = TYPE_PRECISION (type);
1852
1853  /* We can not represent properly constants greater then
1854     2 * HOST_BITS_PER_WIDE_INT, still we need the types
1855     as they are used by i386 vector extensions and friends.  */
1856  if (precision > HOST_BITS_PER_WIDE_INT * 2)
1857    precision = HOST_BITS_PER_WIDE_INT * 2;
1858
1859  TYPE_MIN_VALUE (type)
1860    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1861		    ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1862		   (((HOST_WIDE_INT) (-1)
1863		     << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1864			 ? precision - HOST_BITS_PER_WIDE_INT - 1
1865			 : 0))));
1866  TYPE_MAX_VALUE (type)
1867    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1868		    ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
1869		   (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1870		    ? (((HOST_WIDE_INT) 1
1871			<< (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
1872		    : 0));
1873
1874  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1875  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1876
1877  /* Lay out the type: set its alignment, size, etc.  */
1878  layout_type (type);
1879}
1880
1881/* Set the extreme values of TYPE based on its precision in bits,
1882   then lay it out.  This is used both in `make_unsigned_type'
1883   and for enumeral types.  */
1884
1885void
1886fixup_unsigned_type (type)
1887     tree type;
1888{
1889  int precision = TYPE_PRECISION (type);
1890
1891  /* We can not represent properly constants greater then
1892     2 * HOST_BITS_PER_WIDE_INT, still we need the types
1893     as they are used by i386 vector extensions and friends.  */
1894  if (precision > HOST_BITS_PER_WIDE_INT * 2)
1895    precision = HOST_BITS_PER_WIDE_INT * 2;
1896
1897  TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
1898  TYPE_MAX_VALUE (type)
1899    = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
1900		   ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
1901		   precision - HOST_BITS_PER_WIDE_INT > 0
1902		   ? ((unsigned HOST_WIDE_INT) ~0
1903		      >> (HOST_BITS_PER_WIDE_INT
1904			  - (precision - HOST_BITS_PER_WIDE_INT)))
1905		   : 0);
1906  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1907  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1908
1909  /* Lay out the type: set its alignment, size, etc.  */
1910  layout_type (type);
1911}
1912
1913/* Find the best machine mode to use when referencing a bit field of length
1914   BITSIZE bits starting at BITPOS.
1915
1916   The underlying object is known to be aligned to a boundary of ALIGN bits.
1917   If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
1918   larger than LARGEST_MODE (usually SImode).
1919
1920   If no mode meets all these conditions, we return VOIDmode.  Otherwise, if
1921   VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
1922   mode meeting these conditions.
1923
1924   Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
1925   the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
1926   all the conditions.  */
1927
1928enum machine_mode
1929get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
1930     int bitsize, bitpos;
1931     unsigned int align;
1932     enum machine_mode largest_mode;
1933     int volatilep;
1934{
1935  enum machine_mode mode;
1936  unsigned int unit = 0;
1937
1938  /* Find the narrowest integer mode that contains the bit field.  */
1939  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1940       mode = GET_MODE_WIDER_MODE (mode))
1941    {
1942      unit = GET_MODE_BITSIZE (mode);
1943      if ((bitpos % unit) + bitsize <= unit)
1944	break;
1945    }
1946
1947  if (mode == VOIDmode
1948      /* It is tempting to omit the following line
1949	 if STRICT_ALIGNMENT is true.
1950	 But that is incorrect, since if the bitfield uses part of 3 bytes
1951	 and we use a 4-byte mode, we could get a spurious segv
1952	 if the extra 4th byte is past the end of memory.
1953	 (Though at least one Unix compiler ignores this problem:
1954	 that on the Sequent 386 machine.  */
1955      || MIN (unit, BIGGEST_ALIGNMENT) > align
1956      || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
1957    return VOIDmode;
1958
1959  if (SLOW_BYTE_ACCESS && ! volatilep)
1960    {
1961      enum machine_mode wide_mode = VOIDmode, tmode;
1962
1963      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
1964	   tmode = GET_MODE_WIDER_MODE (tmode))
1965	{
1966	  unit = GET_MODE_BITSIZE (tmode);
1967	  if (bitpos / unit == (bitpos + bitsize - 1) / unit
1968	      && unit <= BITS_PER_WORD
1969	      && unit <= MIN (align, BIGGEST_ALIGNMENT)
1970	      && (largest_mode == VOIDmode
1971		  || unit <= GET_MODE_BITSIZE (largest_mode)))
1972	    wide_mode = tmode;
1973	}
1974
1975      if (wide_mode != VOIDmode)
1976	return wide_mode;
1977    }
1978
1979  return mode;
1980}
1981
1982/* This function is run once to initialize stor-layout.c.  */
1983
1984void
1985init_stor_layout_once ()
1986{
1987  ggc_add_tree_root (&pending_sizes, 1);
1988}
1989