stor-layout.c revision 96263
1139804Simp/* C-compiler utilities for types and variables storage layout
21541Srgrimes   Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1996, 1998,
31541Srgrimes   1999, 2000, 2001, 2002 Free Software Foundation, Inc.
41541Srgrimes
51541SrgrimesThis file is part of GCC.
61541Srgrimes
71541SrgrimesGCC is free software; you can redistribute it and/or modify it under
81541Srgrimesthe terms of the GNU General Public License as published by the Free
91541SrgrimesSoftware Foundation; either version 2, or (at your option) any later
101541Srgrimesversion.
111541Srgrimes
121541SrgrimesGCC is distributed in the hope that it will be useful, but WITHOUT ANY
131541SrgrimesWARRANTY; without even the implied warranty of MERCHANTABILITY or
141541SrgrimesFITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
151541Srgrimesfor more details.
161541Srgrimes
171541SrgrimesYou should have received a copy of the GNU General Public License
181541Srgrimesalong with GCC; see the file COPYING.  If not, write to the Free
191541SrgrimesSoftware Foundation, 59 Temple Place - Suite 330, Boston, MA
201541Srgrimes02111-1307, USA.  */
211541Srgrimes
221541Srgrimes
231541Srgrimes#include "config.h"
241541Srgrimes#include "system.h"
251541Srgrimes#include "tree.h"
261541Srgrimes#include "rtl.h"
271541Srgrimes#include "tm_p.h"
281541Srgrimes#include "flags.h"
291541Srgrimes#include "function.h"
301541Srgrimes#include "expr.h"
311541Srgrimes#include "toplev.h"
32116182Sobrien#include "ggc.h"
33116182Sobrien#include "target.h"
34116182Sobrien
35106369Srwatson/* Set to one when set_sizetype has been called.  */
36106369Srwatsonstatic int sizetype_set;
371541Srgrimes
3848274Speter/* List of types created before set_sizetype has been called.  We do not
3976166Smarkm   make this a GGC root since we want these nodes to be reclaimed.  */
4076166Smarkmstatic tree early_type_list;
4112221Sbde
421541Srgrimes/* Data type for the expressions representing sizes of data types.
433308Sphk   It is the first integer type laid out.  */
441541Srgrimestree sizetype_tab[(int) TYPE_KIND_LAST];
45106369Srwatson
46140483Sps/* If nonzero, this is an upper limit on alignment of structure fields.
4725583Speter   The value is measured in bits.  */
481541Srgrimesunsigned int maximum_field_alignment;
4925656Speter
50151576Sdavidxu/* If non-zero, the alignment of a bitstring or (power-)set value, in bits.
5158377Sphk   May be overridden by front-ends.  */
521541Srgrimesunsigned int set_alignment = 0;
5376166Smarkm
5426335Speter/* Nonzero if all REFERENCE_TYPEs are internal and hence should be
5526335Speter   allocated in Pmode, not ptr_mode.   Set only by internal_reference_types
561541Srgrimes   called only by a front end.  */
57151576Sdavidxustatic int reference_types_internal = 0;
58151576Sdavidxu
59110299Sphkstatic void finalize_record_size	PARAMS ((record_layout_info));
60110299Sphkstatic void finalize_type_size		PARAMS ((tree));
619369Sdgstatic void place_union_field		PARAMS ((record_layout_info, tree));
62151576Sdavidxuextern void debug_rli			PARAMS ((record_layout_info));
63151576Sdavidxu
64151576Sdavidxu/* SAVE_EXPRs for sizes of types and decls, waiting to be expanded.  */
658876Srgrimes
661541Srgrimesstatic tree pending_sizes;
671541Srgrimes
681541Srgrimes/* Nonzero means cannot safely call expand_expr now,
691541Srgrimes   so put variable sizes onto `pending_sizes' instead.  */
701541Srgrimes
711541Srgrimesint immediate_size_expand;
721541Srgrimes
731541Srgrimes/* Show that REFERENCE_TYPES are internal and should be Pmode.  Called only
741541Srgrimes   by front end.  */
7594343Sjhb
7692723Salfredvoid
7792723Salfredinternal_reference_types ()
7813016Sbde{
79151576Sdavidxu  reference_types_internal = 1;
80151576Sdavidxu}
81151576Sdavidxu
82151576Sdavidxu/* Get a list of all the objects put on the pending sizes list.  */
83151576Sdavidxu
84151576Sdavidxutree
85151576Sdavidxuget_pending_sizes ()
86151576Sdavidxu{
87151576Sdavidxu  tree chain = pending_sizes;
88151576Sdavidxu  tree t;
89151576Sdavidxu
90151576Sdavidxu  /* Put each SAVE_EXPR into the current function.  */
91151869Sdavidxu  for (t = chain; t; t = TREE_CHAIN (t))
92151576Sdavidxu    SAVE_EXPR_CONTEXT (TREE_VALUE (t)) = current_function_decl;
93151576Sdavidxu
94151576Sdavidxu  pending_sizes = 0;
95151576Sdavidxu  return chain;
96151576Sdavidxu}
97151576Sdavidxu
98151576Sdavidxu/* Return non-zero if EXPR is present on the pending sizes list.  */
99151576Sdavidxu
100151869Sdavidxuint
101151576Sdavidxuis_pending_size (expr)
102151576Sdavidxu     tree expr;
103151576Sdavidxu{
104151576Sdavidxu  tree t;
105151576Sdavidxu
106151576Sdavidxu  for (t = pending_sizes; t; t = TREE_CHAIN (t))
107151576Sdavidxu    if (TREE_VALUE (t) == expr)
10830739Sphk      return 1;
10930739Sphk  return 0;
11030739Sphk}
11130739Sphk
11230739Sphk/* Add EXPR to the pending sizes list.  */
11330739Sphk
11492723Salfredvoid
11530739Sphkput_pending_size (expr)
11625583Speter     tree expr;
117102074Sphk{
11825583Speter  /* Strip any simple arithmetic from EXPR to see if it has an underlying
11945433Snsayer     SAVE_EXPR.  */
12045438Snsayer  while (TREE_CODE_CLASS (TREE_CODE (expr)) == '1'
12133690Sphk	 || (TREE_CODE_CLASS (TREE_CODE (expr)) == '2'
12225583Speter	    && TREE_CONSTANT (TREE_OPERAND (expr, 1))))
12325583Speter    expr = TREE_OPERAND (expr, 0);
12425656Speter
12533818Sbde  if (TREE_CODE (expr) == SAVE_EXPR)
12635029Sphk    pending_sizes = tree_cons (NULL_TREE, expr, pending_sizes);
12735029Sphk}
12825583Speter
12925583Speter/* Put a chain of objects into the pending sizes list, which must be
13033818Sbde   empty.  */
13145433Snsayer
13245433Snsayervoid
13345433Snsayerput_pending_sizes (chain)
13445433Snsayer     tree chain;
13545438Snsayer{
13645438Snsayer  if (pending_sizes)
13745438Snsayer    abort ();
13845438Snsayer
13925583Speter  pending_sizes = chain;
14094343Sjhb}
14145433Snsayer
14245437Smjacob/* Given a size SIZE that may not be a constant, return a SAVE_EXPR
14345438Snsayer   to serve as the actual size-expression for a type or decl.  */
14445437Smjacob
14545437Smjacobtree
14645437Smjacobvariable_size (size)
14745437Smjacob     tree size;
14845437Smjacob{
14945437Smjacob  /* If the language-processor is to take responsibility for variable-sized
15045437Smjacob     items (e.g., languages which have elaboration procedures like Ada),
15145433Snsayer     just return SIZE unchanged.  Likewise for self-referential sizes and
15245433Snsayer     constant sizes.  */
15345437Smjacob  if (TREE_CONSTANT (size)
15445438Snsayer      || global_bindings_p () < 0 || contains_placeholder_p (size))
15545438Snsayer    return size;
15645438Snsayer
15745438Snsayer  size = save_expr (size);
15845438Snsayer
15945438Snsayer  /* If an array with a variable number of elements is declared, and
16045438Snsayer     the elements require destruction, we will emit a cleanup for the
16145438Snsayer     array.  That cleanup is run both on normal exit from the block
16245438Snsayer     and in the exception-handler for the block.  Normally, when code
16345433Snsayer     is used in both ordinary code and in an exception handler it is
16433818Sbde     `unsaved', i.e., all SAVE_EXPRs are recalculated.  However, we do
16533818Sbde     not wish to do that here; the array-size is the same in both
16633690Sphk     places.  */
16733690Sphk  if (TREE_CODE (size) == SAVE_EXPR)
16894343Sjhb    SAVE_EXPR_PERSISTENT_P (size) = 1;
16958377Sphk
17025583Speter  if (global_bindings_p ())
17125583Speter    {
17225583Speter      if (TREE_CONSTANT (size))
17325583Speter	error ("type size can't be explicitly evaluated");
17494343Sjhb      else
17525583Speter	error ("variable-size type declared outside of any function");
17625583Speter
17725583Speter      return size_one_node;
17812221Sbde    }
17925583Speter
18025583Speter  if (immediate_size_expand)
18125583Speter    /* NULL_RTX is not defined; neither is the rtx type.
18225583Speter       Also, we would like to pass const0_rtx here, but don't have it.  */
18325583Speter    expand_expr (size, expand_expr (integer_zero_node, NULL_RTX, VOIDmode, 0),
18425656Speter		 VOIDmode, 0);
18582746Sdillon  else if (cfun != 0 && cfun->x_dont_save_pending_sizes_p)
18682746Sdillon    /* The front-end doesn't want us to keep a list of the expressions
18782746Sdillon       that determine sizes for variable size objects.  */
18825583Speter    ;
18925583Speter  else
190102074Sphk    put_pending_size (size);
19125583Speter
19225583Speter  return size;
193151357Sps}
194151357Sps
195151357Sps#ifndef MAX_FIXED_MODE_SIZE
196151357Sps#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
197151357Sps#endif
198151357Sps
199151357Sps/* Return the machine mode to use for a nonscalar of SIZE bits.
200151357Sps   The mode must be in class CLASS, and have exactly that many bits.
201151357Sps   If LIMIT is nonzero, modes of wider than MAX_FIXED_MODE_SIZE will not
202151357Sps   be used.  */
203151357Sps
204151357Spsenum machine_mode
205130884Skbyancmode_for_size (size, class, limit)
206136152Sjhb     unsigned int size;
20725583Speter     enum mode_class class;
208136152Sjhb     int limit;
209151357Sps{
210130654Skbyanc  enum machine_mode mode;
211151357Sps
212130654Skbyanc  if (limit && size > MAX_FIXED_MODE_SIZE)
213130654Skbyanc    return BLKmode;
214136152Sjhb
215136152Sjhb  /* Get the first mode which has this size, in the specified class.  */
216136152Sjhb  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
217151357Sps       mode = GET_MODE_WIDER_MODE (mode))
218130654Skbyanc    if (GET_MODE_BITSIZE (mode) == size)
219130654Skbyanc      return mode;
220136152Sjhb
221136152Sjhb  return BLKmode;
222136152Sjhb}
223130884Skbyanc
224151357Sps/* Similar, except passed a tree node.  */
225130654Skbyanc
226130884Skbyancenum machine_mode
227151357Spsmode_for_size_tree (size, class, limit)
228130884Skbyanc     tree size;
229130654Skbyanc     enum mode_class class;
230111315Sphk     int limit;
231130654Skbyanc{
232151357Sps  if (TREE_CODE (size) != INTEGER_CST
23325583Speter      /* What we really want to say here is that the size can fit in a
23425583Speter	 host integer, but we know there's no way we'd find a mode for
23525583Speter	 this many bits, so there's no point in doing the precise test.  */
23625583Speter      || compare_tree_int (size, 1000) > 0)
23725583Speter    return BLKmode;
23825583Speter  else
23925583Speter    return mode_for_size (TREE_INT_CST_LOW (size), class, limit);
24025583Speter}
24125656Speter
24282746Sdillon/* Similar, but never return BLKmode; return the narrowest mode that
24382746Sdillon   contains at least the requested number of bits.  */
24482746Sdillon
24525583Speterenum machine_mode
24625583Spetersmallest_mode_for_size (size, class)
247102074Sphk     unsigned int size;
24825583Speter     enum mode_class class;
24925583Speter{
25025583Speter  enum machine_mode mode;
25125583Speter
252151357Sps  /* Get the first mode which has at least this size, in the
253151357Sps     specified class.  */
254151357Sps  for (mode = GET_CLASS_NARROWEST_MODE (class); mode != VOIDmode;
255151357Sps       mode = GET_MODE_WIDER_MODE (mode))
256151357Sps    if (GET_MODE_BITSIZE (mode) >= size)
257151357Sps      return mode;
258151357Sps
259151357Sps  abort ();
260151357Sps}
261151357Sps
262151357Sps/* Find an integer mode of the exact same size, or BLKmode on failure.  */
263106369Srwatson
264106369Srwatsonenum machine_mode
265106369Srwatsonint_mode_for_mode (mode)
266106369Srwatson     enum machine_mode mode;
267106369Srwatson{
26893593Sjhb  switch (GET_MODE_CLASS (mode))
26994343Sjhb    {
270151357Sps    case MODE_INT:
27194343Sjhb    case MODE_PARTIAL_INT:
272151357Sps      break;
27394343Sjhb
27434901Sphk    case MODE_COMPLEX_INT:
275151357Sps    case MODE_COMPLEX_FLOAT:
27694343Sjhb    case MODE_FLOAT:
27782746Sdillon    case MODE_VECTOR_INT:
27825583Speter    case MODE_VECTOR_FLOAT:
27925583Speter      mode = mode_for_size (GET_MODE_BITSIZE (mode), MODE_INT, 0);
28025583Speter      break;
28125583Speter
28225583Speter    case MODE_RANDOM:
28325583Speter      if (mode == BLKmode)
28425583Speter        break;
28525583Speter
28625656Speter      /* ... fall through ...  */
28725583Speter
288102074Sphk    case MODE_CC:
28925583Speter    default:
29025583Speter      abort ();
291151357Sps    }
29225583Speter
293151357Sps  return mode;
294151357Sps}
295151357Sps
296151357Sps/* Return the value of VALUE, rounded up to a multiple of DIVISOR.
297151357Sps   This can only be applied to objects of a sizetype.  */
298151357Sps
299151357Spstree
300151357Spsround_up (value, divisor)
301151357Sps     tree value;
302151357Sps     int divisor;
303151357Sps{
304151357Sps  tree arg = size_int_type (divisor, TREE_TYPE (value));
305151357Sps
306151357Sps  return size_binop (MULT_EXPR, size_binop (CEIL_DIV_EXPR, value, arg), arg);
307151357Sps}
308130654Skbyanc
309130654Skbyanc/* Likewise, but round down.  */
310103964Sbde
311103964Sbdetree
312103964Sbderound_down (value, divisor)
313103964Sbde     tree value;
314103964Sbde     int divisor;
315151357Sps{
316130654Skbyanc  tree arg = size_int_type (divisor, TREE_TYPE (value));
317130654Skbyanc
318130654Skbyanc  return size_binop (MULT_EXPR, size_binop (FLOOR_DIV_EXPR, value, arg), arg);
319130654Skbyanc}
320151357Sps
321130654Skbyanc/* Set the size, mode and alignment of a ..._DECL node.
322130654Skbyanc   TYPE_DECL does need this for C++.
323130654Skbyanc   Note that LABEL_DECL and CONST_DECL nodes do not need this,
324130654Skbyanc   and FUNCTION_DECL nodes have them set up in a special (and simple) way.
325151357Sps   Don't call layout_decl for them.
32625583Speter
32725583Speter   KNOWN_ALIGN is the amount of alignment we can assume this
32826335Speter   decl has with no special effort.  It is relevant only for FIELD_DECLs
32925656Speter   and depends on the previous fields.
330140481Sps   All that matters about KNOWN_ALIGN is which powers of 2 divide it.
331140481Sps   If KNOWN_ALIGN is 0, it means, "as much alignment as you like":
33225583Speter   the record will be aligned to suit.  */
33335045Sphk
33435042Sphkvoid
33535042Sphklayout_decl (decl, known_align)
33625583Speter     tree decl;
33728773Sbde     unsigned int known_align;
33825656Speter{
33943301Sdillon  tree type = TREE_TYPE (decl);
34028773Sbde  enum tree_code code = TREE_CODE (decl);
34136119Sphk
34235029Sphk  if (code == CONST_DECL)
34335042Sphk    return;
34435042Sphk  else if (code != VAR_DECL && code != PARM_DECL && code != RESULT_DECL
34535042Sphk	   && code != TYPE_DECL && code != FIELD_DECL)
34635042Sphk    abort ();
34736119Sphk
34835042Sphk  if (type == error_mark_node)
34935042Sphk    type = void_type_node;
35035042Sphk
35135042Sphk  /* Usually the size and mode come from the data type without change,
35235042Sphk     however, the front-end may set the explicit width of the field, so its
35335042Sphk     size may not be the same as the size of its type.  This happens with
35435042Sphk     bitfields, of course (an `int' bitfield may be only 2 bits, say), but it
35535042Sphk     also happens with other fields.  For example, the C++ front-end creates
35635042Sphk     zero-sized fields corresponding to empty base classes, and depends on
35735042Sphk     layout_type setting DECL_FIELD_BITPOS correctly for the field.  Set the
35835042Sphk     size in bytes from the size in bits.  If we have already set the mode,
35935029Sphk     don't set it again since we can be called twice for FIELD_DECLs.  */
36035042Sphk
36135045Sphk  TREE_UNSIGNED (decl) = TREE_UNSIGNED (type);
36235045Sphk  if (DECL_MODE (decl) == VOIDmode)
36335045Sphk    DECL_MODE (decl) = TYPE_MODE (type);
36426335Speter
36526335Speter  if (DECL_SIZE (decl) == 0)
36625583Speter    {
36726335Speter      DECL_SIZE (decl) = TYPE_SIZE (type);
36826335Speter      DECL_SIZE_UNIT (decl) = TYPE_SIZE_UNIT (type);
36926335Speter    }
37026335Speter  else
37126335Speter    DECL_SIZE_UNIT (decl)
37226335Speter      = convert (sizetype, size_binop (CEIL_DIV_EXPR, DECL_SIZE (decl),
37326335Speter				       bitsize_unit_node));
37482746Sdillon
37582746Sdillon  /* Force alignment required for the data type.
37682746Sdillon     But if the decl itself wants greater alignment, don't override that.
37726335Speter     Likewise, if the decl is packed, don't override it.  */
37826335Speter  if (! (code == FIELD_DECL && DECL_BIT_FIELD (decl))
379102074Sphk      && (DECL_ALIGN (decl) == 0
38026335Speter	  || (! (code == FIELD_DECL && DECL_PACKED (decl))
38126335Speter	      && TYPE_ALIGN (type) > DECL_ALIGN (decl))))
38282746Sdillon    {
38326335Speter      DECL_ALIGN (decl) = TYPE_ALIGN (type);
384107849Salfred      DECL_USER_ALIGN (decl) = 0;
38526335Speter    }
38626335Speter
38782746Sdillon  /* For fields, set the bit field type and update the alignment.  */
388109521Salfred  if (code == FIELD_DECL)
389109521Salfred    {
390109521Salfred      DECL_BIT_FIELD_TYPE (decl) = DECL_BIT_FIELD (decl) ? type : 0;
391140481Sps      if (maximum_field_alignment != 0)
392107849Salfred	DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), maximum_field_alignment);
39382746Sdillon
39482746Sdillon      /* If the field is of variable size, we can't misalign it since we
395107849Salfred	 have no way to make a temporary to align the result.  But this
396109521Salfred	 isn't an issue if the decl is not addressable.  Likewise if it
39782746Sdillon	 is of unknown size.  */
39825583Speter      else if (DECL_PACKED (decl)
39925656Speter	       && (DECL_NONADDRESSABLE_P (decl)
40025583Speter		   || DECL_SIZE_UNIT (decl) == 0
40125583Speter		   || TREE_CODE (DECL_SIZE_UNIT (decl)) == INTEGER_CST))
40226335Speter	{
4031541Srgrimes	  DECL_ALIGN (decl) = MIN (DECL_ALIGN (decl), BITS_PER_UNIT);
4041541Srgrimes	  DECL_USER_ALIGN (decl) = 0;
4051541Srgrimes	}
4061541Srgrimes    }
40712221Sbde
40882746Sdillon  /* See if we can use an ordinary integer mode for a bit-field.
40982746Sdillon     Conditions are: a fixed size that is correct for another mode
41082746Sdillon     and occupying a complete byte or bytes on proper boundary.  */
4111541Srgrimes  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
4121549Srgrimes      && TYPE_SIZE (type) != 0
413102074Sphk      && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4141541Srgrimes      && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT)
4151541Srgrimes    {
416110286Stjr      enum machine_mode xmode
4171541Srgrimes	= mode_for_size_tree (DECL_SIZE (decl), MODE_INT, 1);
4181541Srgrimes
4191541Srgrimes      if (xmode != BLKmode && known_align >= GET_MODE_ALIGNMENT (xmode))
4201541Srgrimes	{
42199012Salfred	  DECL_ALIGN (decl) = MAX (GET_MODE_ALIGNMENT (xmode),
4221541Srgrimes				   DECL_ALIGN (decl));
42390836Sphk	  DECL_MODE (decl) = xmode;
424110299Sphk	  DECL_BIT_FIELD (decl) = 0;
425110299Sphk	}
426110286Stjr    }
42782746Sdillon
4281541Srgrimes  /* Turn off DECL_BIT_FIELD if we won't need it set.  */
4291541Srgrimes  if (code == FIELD_DECL && DECL_BIT_FIELD (decl)
4301541Srgrimes      && TYPE_MODE (type) == BLKmode && DECL_MODE (decl) == BLKmode
43112221Sbde      && known_align >= TYPE_ALIGN (type)
4321541Srgrimes      && DECL_ALIGN (decl) >= TYPE_ALIGN (type)
4331541Srgrimes      && DECL_SIZE_UNIT (decl) != 0)
4341541Srgrimes    DECL_BIT_FIELD (decl) = 0;
4351541Srgrimes
43612221Sbde  /* Evaluate nonconstant size only once, either now or as soon as safe.  */
43782746Sdillon  if (DECL_SIZE (decl) != 0 && TREE_CODE (DECL_SIZE (decl)) != INTEGER_CST)
43882746Sdillon    DECL_SIZE (decl) = variable_size (DECL_SIZE (decl));
43982746Sdillon  if (DECL_SIZE_UNIT (decl) != 0
4401541Srgrimes      && TREE_CODE (DECL_SIZE_UNIT (decl)) != INTEGER_CST)
4411549Srgrimes    DECL_SIZE_UNIT (decl) = variable_size (DECL_SIZE_UNIT (decl));
442102074Sphk
4431541Srgrimes  /* If requested, warn about definitions of large data objects.  */
444144445Sjhb  if (warn_larger_than
445144445Sjhb      && (code == VAR_DECL || code == PARM_DECL)
446144445Sjhb      && ! DECL_EXTERNAL (decl))
4471541Srgrimes    {
448144445Sjhb      tree size = DECL_SIZE_UNIT (decl);
449144445Sjhb
450144445Sjhb      if (size != 0 && TREE_CODE (size) == INTEGER_CST
451144445Sjhb	  && compare_tree_int (size, larger_than_size) > 0)
452144445Sjhb	{
453144445Sjhb	  unsigned int size_as_int = TREE_INT_CST_LOW (size);
454144445Sjhb
455144445Sjhb	  if (compare_tree_int (size, size_as_int) == 0)
456144445Sjhb	    warning_with_decl (decl, "size of `%s' is %d bytes", size_as_int);
457144445Sjhb	  else
458144445Sjhb	    warning_with_decl (decl, "size of `%s' is larger than %d bytes",
459144445Sjhb			       larger_than_size);
460144445Sjhb	}
461144445Sjhb    }
462144445Sjhb}
463144445Sjhb
464144445Sjhb/* Hook for a front-end function that can modify the record layout as needed
465144445Sjhb   immediately before it is finalized.  */
466144445Sjhb
467144445Sjhbvoid (*lang_adjust_rli) PARAMS ((record_layout_info)) = 0;
468144445Sjhb
469144445Sjhbvoid
470106369Srwatsonset_lang_adjust_rli (f)
471106369Srwatson     void (*f) PARAMS ((record_layout_info));
472106369Srwatson{
473106369Srwatson  lang_adjust_rli = f;
474106369Srwatson}
475144445Sjhb
476144445Sjhb/* Begin laying out type T, which may be a RECORD_TYPE, UNION_TYPE, or
47794343Sjhb   QUAL_UNION_TYPE.  Return a pointer to a struct record_layout_info which
4781541Srgrimes   is to be passed to all other layout functions for this record.  It is the
479144445Sjhb   responsibility of the caller to call `free' for the storage returned.
480144445Sjhb   Note that garbage collection is not permitted until we finish laying
48194343Sjhb   out the record.  */
482144445Sjhb
48325656Speterrecord_layout_info
484144445Sjhbstart_record_layout (t)
485144445Sjhb     tree t;
486144445Sjhb{
48782746Sdillon  record_layout_info rli
48882746Sdillon    = (record_layout_info) xmalloc (sizeof (struct record_layout_info_s));
4891541Srgrimes
490144445Sjhb  rli->t = t;
49182746Sdillon
4921541Srgrimes  /* If the type has a minimum specified alignment (via an attribute
4931541Srgrimes     declaration, for example) use it -- otherwise, start with a
4941541Srgrimes     one-byte alignment.  */
4951541Srgrimes  rli->record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (t));
4961541Srgrimes  rli->unpacked_align = rli->unpadded_align = rli->record_align;
4971541Srgrimes  rli->offset_align = MAX (rli->record_align, BIGGEST_ALIGNMENT);
4981541Srgrimes
4991541Srgrimes#ifdef STRUCTURE_SIZE_BOUNDARY
5001541Srgrimes  /* Packed structures don't need to have minimum size.  */
5011541Srgrimes  if (! TYPE_PACKED (t))
5021541Srgrimes    rli->record_align = MAX (rli->record_align, STRUCTURE_SIZE_BOUNDARY);
5031541Srgrimes#endif
5041541Srgrimes
5051541Srgrimes  rli->offset = size_zero_node;
5061541Srgrimes  rli->bitpos = bitsize_zero_node;
5071541Srgrimes  rli->prev_field = 0;
5081541Srgrimes  rli->pending_statics = 0;
5091541Srgrimes  rli->packed_maybe_necessary = 0;
5101541Srgrimes
5111541Srgrimes  return rli;
51212221Sbde}
5131541Srgrimes
5141541Srgrimes/* These four routines perform computations that convert between
5151541Srgrimes   the offset/bitpos forms and byte and bit offsets.  */
5161541Srgrimes
51712221Sbdetree
51882746Sdillonbit_from_pos (offset, bitpos)
51982746Sdillon     tree offset, bitpos;
52082746Sdillon{
5211549Srgrimes  return size_binop (PLUS_EXPR, bitpos,
522102074Sphk		     size_binop (MULT_EXPR, convert (bitsizetype, offset),
5231541Srgrimes				 bitsize_unit_node));
524141470Sjhb}
525140832Ssobomax
526140832Ssobomaxtree
527140832Ssobomaxbyte_from_pos (offset, bitpos)
528140832Ssobomax     tree offset, bitpos;
529140832Ssobomax{
530140832Ssobomax  return size_binop (PLUS_EXPR, offset,
531140832Ssobomax		     convert (sizetype,
532140832Ssobomax			      size_binop (TRUNC_DIV_EXPR, bitpos,
533140832Ssobomax					  bitsize_unit_node)));
534140832Ssobomax}
535140832Ssobomax
53683366Sjulianvoid
53734961Sphkpos_from_byte (poffset, pbitpos, off_align, pos)
5381541Srgrimes     tree *poffset, *pbitpos;
539140832Ssobomax     unsigned int off_align;
5401541Srgrimes     tree pos;
54182746Sdillon{
542140832Ssobomax  *poffset
5431541Srgrimes    = size_binop (MULT_EXPR,
54436128Sbde		  convert (sizetype,
5451541Srgrimes			   size_binop (FLOOR_DIV_EXPR, pos,
5461541Srgrimes				       bitsize_int (off_align
5471541Srgrimes						    / BITS_PER_UNIT))),
5481541Srgrimes		  size_int (off_align / BITS_PER_UNIT));
549111034Stjr  *pbitpos = size_binop (MULT_EXPR,
550140832Ssobomax			 size_binop (FLOOR_MOD_EXPR, pos,
551111034Stjr				     bitsize_int (off_align / BITS_PER_UNIT)),
552140832Ssobomax			 bitsize_unit_node);
55336119Sphk}
554140832Ssobomax
555140832Ssobomaxvoid
5561541Srgrimespos_from_bit (poffset, pbitpos, off_align, pos)
557140832Ssobomax     tree *poffset, *pbitpos;
55834961Sphk     unsigned int off_align;
55982746Sdillon     tree pos;
560111034Stjr{
561140832Ssobomax  *poffset = size_binop (MULT_EXPR,
562111034Stjr			 convert (sizetype,
56382746Sdillon				  size_binop (FLOOR_DIV_EXPR, pos,
564140832Ssobomax					      bitsize_int (off_align))),
5651541Srgrimes			 size_int (off_align / BITS_PER_UNIT));
5661541Srgrimes  *pbitpos = size_binop (FLOOR_MOD_EXPR, pos, bitsize_int (off_align));
56712221Sbde}
5681541Srgrimes
5691541Srgrimes/* Given a pointer to bit and byte offsets and an offset alignment,
5701541Srgrimes   normalize the offsets so they are within the alignment.  */
5711541Srgrimes
57212221Sbdevoid
573140832Ssobomaxnormalize_offset (poffset, pbitpos, off_align)
57482746Sdillon     tree *poffset, *pbitpos;
57582746Sdillon     unsigned int off_align;
57682746Sdillon{
5771549Srgrimes  /* If the bit position is now larger than it should be, adjust it
578102074Sphk     downwards.  */
5791541Srgrimes  if (compare_tree_int (*pbitpos, off_align) >= 0)
580141470Sjhb    {
581140832Ssobomax      tree extra_aligns = size_binop (FLOOR_DIV_EXPR, *pbitpos,
5821541Srgrimes				      bitsize_int (off_align));
583111034Stjr
584111034Stjr      *poffset
585111034Stjr	= size_binop (PLUS_EXPR, *poffset,
586111034Stjr		      size_binop (MULT_EXPR, convert (sizetype, extra_aligns),
587111034Stjr				  size_int (off_align / BITS_PER_UNIT)));
588111034Stjr
5891541Srgrimes      *pbitpos
590140832Ssobomax	= size_binop (FLOOR_MOD_EXPR, *pbitpos, bitsize_int (off_align));
591140832Ssobomax    }
592140832Ssobomax}
593140832Ssobomax
594140832Ssobomax/* Print debugging information about the information in RLI.  */
595140832Ssobomax
596140832Ssobomaxvoid
597141470Sjhbdebug_rli (rli)
598141470Sjhb     record_layout_info rli;
599140832Ssobomax{
600140832Ssobomax  print_node_brief (stderr, "type", rli->t, 0);
601140832Ssobomax  print_node_brief (stderr, "\noffset", rli->offset, 0);
602140832Ssobomax  print_node_brief (stderr, " bitpos", rli->bitpos, 0);
603141483Sjhb
604141483Sjhb  fprintf (stderr, "\naligns: rec = %u, unpack = %u, unpad = %u, off = %u\n",
605141483Sjhb	   rli->record_align, rli->unpacked_align, rli->unpadded_align,
606140832Ssobomax	   rli->offset_align);
607111034Stjr  if (rli->packed_maybe_necessary)
608140832Ssobomax    fprintf (stderr, "packed may be necessary\n");
609111034Stjr
610140832Ssobomax  if (rli->pending_statics)
611140832Ssobomax    {
612140832Ssobomax      fprintf (stderr, "pending statics:\n");
613140832Ssobomax      debug_tree (rli->pending_statics);
61482746Sdillon    }
615140832Ssobomax}
616111034Stjr
61735058Sphk/* Given an RLI with a possibly-incremented BITPOS, adjust OFFSET and
61869286Sjake   BITPOS if necessary to keep BITPOS below OFFSET_ALIGN.  */
619114980Sjhb
620140832Ssobomaxvoid
621140832Ssobomaxnormalize_rli (rli)
62269286Sjake     record_layout_info rli;
623140832Ssobomax{
624114980Sjhb  normalize_offset (&rli->offset, &rli->bitpos, rli->offset_align);
625140832Ssobomax}
626140832Ssobomax
627111034Stjr/* Returns the size in bytes allocated so far.  */
628140832Ssobomax
629140832Ssobomaxtree
630140832Ssobomaxrli_size_unit_so_far (rli)
631111034Stjr     record_layout_info rli;
632140832Ssobomax{
633111034Stjr  return byte_from_pos (rli->offset, rli->bitpos);
63482746Sdillon}
635111034Stjr
636140832Ssobomax/* Returns the size in bits allocated so far.  */
637140832Ssobomax
638111034Stjrtree
63982746Sdillonrli_size_so_far (rli)
640140832Ssobomax     record_layout_info rli;
6411541Srgrimes{
6421541Srgrimes  return bit_from_pos (rli->offset, rli->bitpos);
6431541Srgrimes}
6441541Srgrimes
6451541Srgrimes/* Called from place_field to handle unions.  */
6461541Srgrimes
6471541Srgrimesstatic void
6481541Srgrimesplace_union_field (rli, field)
6491541Srgrimes     record_layout_info rli;
65036127Sbde     tree field;
6519327Sbde{
6529327Sbde  unsigned int desired_align;
6539327Sbde
6541541Srgrimes  layout_decl (field, 0);
6551541Srgrimes
656102074Sphk  DECL_FIELD_OFFSET (field) = size_zero_node;
6571541Srgrimes  DECL_FIELD_BIT_OFFSET (field) = bitsize_zero_node;
658102074Sphk  SET_DECL_OFFSET_ALIGN (field, BIGGEST_ALIGNMENT);
65935044Sphk
6601541Srgrimes  desired_align = DECL_ALIGN (field);
6611541Srgrimes
66273916Sjhb#ifdef BIGGEST_FIELD_ALIGNMENT
6631541Srgrimes  /* Some targets (i.e. i386) limit union field alignment
66435058Sphk     to a lower boundary than alignment of variables unless
66535058Sphk     it was overridden by attribute aligned.  */
666116123Sjhb  if (! DECL_USER_ALIGN (field))
667116123Sjhb    desired_align =
66873916Sjhb      MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
6691541Srgrimes#endif
6701541Srgrimes
6711541Srgrimes#ifdef ADJUST_FIELD_ALIGN
6721541Srgrimes  desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
6731541Srgrimes#endif
67436119Sphk
67535058Sphk  TYPE_USER_ALIGN (rli->t) |= DECL_USER_ALIGN (field);
67635044Sphk
67735044Sphk  /* Union must be at least as aligned as any field requires.  */
67869286Sjake  rli->record_align = MAX (rli->record_align, desired_align);
67969286Sjake  rli->unpadded_align = MAX (rli->unpadded_align, desired_align);
68073916Sjhb
6811541Srgrimes#ifdef PCC_BITFIELD_TYPE_MATTERS
6821541Srgrimes  /* On the m88000, a bit field of declare type `int' forces the
6831541Srgrimes     entire union to have `int' alignment.  */
68473916Sjhb  if (PCC_BITFIELD_TYPE_MATTERS && DECL_BIT_FIELD_TYPE (field))
6851541Srgrimes    {
6861541Srgrimes      rli->record_align = MAX (rli->record_align,
6871541Srgrimes			       TYPE_ALIGN (TREE_TYPE (field)));
6881541Srgrimes      rli->unpadded_align = MAX (rli->unpadded_align,
6891541Srgrimes				 TYPE_ALIGN (TREE_TYPE (field)));
6901541Srgrimes    }
6911541Srgrimes#endif
6921541Srgrimes
6931549Srgrimes  /* We assume the union's size will be a multiple of a byte so we don't
694102074Sphk     bother with BITPOS.  */
6951541Srgrimes  if (TREE_CODE (rli->t) == UNION_TYPE)
6961541Srgrimes    rli->offset = size_binop (MAX_EXPR, rli->offset, DECL_SIZE_UNIT (field));
697151576Sdavidxu  else if (TREE_CODE (rli->t) == QUAL_UNION_TYPE)
6981541Srgrimes    rli->offset = fold (build (COND_EXPR, sizetype,
6991541Srgrimes			       DECL_QUALIFIER (field),
7001541Srgrimes			       DECL_SIZE_UNIT (field), rli->offset));
7011541Srgrimes}
7021541Srgrimes
7031541Srgrimes/* RLI contains information about the layout of a RECORD_TYPE.  FIELD
7041541Srgrimes   is a FIELD_DECL to be added after those fields already present in
7051541Srgrimes   T.  (FIELD is not actually added to the TYPE_FIELDS list here;
7061541Srgrimes   callers that desire that behavior must manually perform that step.)  */
7071541Srgrimes
7081541Srgrimesvoid
7091541Srgrimesplace_field (rli, field)
7101541Srgrimes     record_layout_info rli;
7111541Srgrimes     tree field;
7121541Srgrimes{
7131541Srgrimes  /* The alignment required for FIELD.  */
7141549Srgrimes  unsigned int desired_align;
715102074Sphk  /* The alignment FIELD would have if we just dropped it into the
7161541Srgrimes     record as it presently stands.  */
7171541Srgrimes  unsigned int known_align;
7181541Srgrimes  unsigned int actual_align;
7191541Srgrimes  unsigned int user_align;
7201541Srgrimes  /* The type of this field.  */
7211541Srgrimes  tree type = TREE_TYPE (field);
7221541Srgrimes
7231541Srgrimes  if (TREE_CODE (field) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
7241541Srgrimes      return;
7251541Srgrimes
7261541Srgrimes  /* If FIELD is static, then treat it like a separate variable, not
7271541Srgrimes     really like a structure field.  If it is a FUNCTION_DECL, it's a
7281541Srgrimes     method.  In both cases, all we do is lay out the decl, and we do
72935058Sphk     it *after* the record is laid out.  */
7301541Srgrimes  if (TREE_CODE (field) == VAR_DECL)
7311541Srgrimes    {
7321541Srgrimes      rli->pending_statics = tree_cons (NULL_TREE, field,
73335058Sphk					rli->pending_statics);
7341541Srgrimes      return;
7351541Srgrimes    }
7361541Srgrimes
7371541Srgrimes  /* Enumerators and enum types which are local to this class need not
7381541Srgrimes     be laid out.  Likewise for initialized constant fields.  */
7391541Srgrimes  else if (TREE_CODE (field) != FIELD_DECL)
7401541Srgrimes    return;
7411541Srgrimes
7421541Srgrimes  /* Unions are laid out very differently than records, so split
7431541Srgrimes     that code off to another function.  */
7441541Srgrimes  else if (TREE_CODE (rli->t) != RECORD_TYPE)
7451541Srgrimes    {
7461541Srgrimes      place_union_field (rli, field);
7471541Srgrimes      return;
7481541Srgrimes    }
7491541Srgrimes
7501541Srgrimes  /* Work out the known alignment so far.  Note that A & (-A) is the
7511541Srgrimes     value of the least-significant bit in A that is one.  */
7521549Srgrimes  if (! integer_zerop (rli->bitpos))
753121523Salfred    known_align = (tree_low_cst (rli->bitpos, 1)
7541541Srgrimes		   & - tree_low_cst (rli->bitpos, 1));
7551541Srgrimes  else if (integer_zerop (rli->offset))
7561541Srgrimes    known_align = BIGGEST_ALIGNMENT;
7571541Srgrimes  else if (host_integerp (rli->offset, 1))
7581541Srgrimes    known_align = (BITS_PER_UNIT
7591541Srgrimes		   * (tree_low_cst (rli->offset, 1)
7601541Srgrimes		      & - tree_low_cst (rli->offset, 1)));
7611549Srgrimes  else
762121523Salfred    known_align = rli->offset_align;
7631541Srgrimes
7641541Srgrimes  /* Lay out the field so we know what alignment it needs.  For a
7651541Srgrimes     packed field, use the alignment as specified, disregarding what
7661541Srgrimes     the type would want.  */
7671541Srgrimes  desired_align = DECL_ALIGN (field);
7681541Srgrimes  user_align = DECL_USER_ALIGN (field);
7691541Srgrimes  layout_decl (field, known_align);
77012819Sphk  if (! DECL_PACKED (field))
771102074Sphk    {
7721541Srgrimes      desired_align = DECL_ALIGN (field);
7731541Srgrimes      user_align = DECL_USER_ALIGN (field);
7741541Srgrimes    }
7751541Srgrimes
7761541Srgrimes  /* Some targets (i.e. i386, VMS) limit struct field alignment
7771541Srgrimes     to a lower boundary than alignment of variables unless
7781541Srgrimes     it was overridden by attribute aligned.  */
7791541Srgrimes#ifdef BIGGEST_FIELD_ALIGNMENT
7801541Srgrimes  if (! user_align)
7811541Srgrimes    desired_align
7821541Srgrimes      = MIN (desired_align, (unsigned) BIGGEST_FIELD_ALIGNMENT);
783108142Ssam#endif
784108142Ssam
785108511Ssam#ifdef ADJUST_FIELD_ALIGN
786108142Ssam  desired_align = ADJUST_FIELD_ALIGN (field, desired_align);
787108142Ssam#endif
788108142Ssam
789108142Ssam  /* Record must have at least as much alignment as any field.
790108142Ssam     Otherwise, the alignment of the field within the record is
791108142Ssam     meaningless.  */
792108142Ssam  if ((* targetm.ms_bitfield_layout_p) (rli->t)
793108511Ssam      && type != error_mark_node
794108511Ssam      && DECL_BIT_FIELD_TYPE (field)
795108511Ssam      && ! integer_zerop (TYPE_SIZE (type))
796108142Ssam      && integer_zerop (DECL_SIZE (field)))
797108142Ssam    {
798108142Ssam      if (rli->prev_field
799108142Ssam	  && DECL_BIT_FIELD_TYPE (rli->prev_field)
800108142Ssam	  && ! integer_zerop (DECL_SIZE (rli->prev_field)))
801108142Ssam	{
802108142Ssam	  rli->record_align = MAX (rli->record_align, desired_align);
803108142Ssam	  rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
804108142Ssam	}
805108142Ssam      else
806108142Ssam	desired_align = 1;
807108142Ssam    }
808108142Ssam  else
809108142Ssam#ifdef PCC_BITFIELD_TYPE_MATTERS
810108142Ssam  if (PCC_BITFIELD_TYPE_MATTERS && type != error_mark_node
811108142Ssam      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
812108511Ssam      && DECL_BIT_FIELD_TYPE (field)
813108511Ssam      && ! integer_zerop (TYPE_SIZE (type)))
814108511Ssam    {
815108511Ssam      /* For these machines, a zero-length field does not
816111558Ssam	 affect the alignment of the structure as a whole.
817111558Ssam	 It does, however, affect the alignment of the next field
818111558Ssam	 within the structure.  */
819111558Ssam      if (! integer_zerop (DECL_SIZE (field)))
820108511Ssam	rli->record_align = MAX (rli->record_align, desired_align);
821108511Ssam      else if (! DECL_PACKED (field))
822108511Ssam	desired_align = TYPE_ALIGN (type);
823108142Ssam
824108142Ssam      /* A named bit field of declared type `int'
825108142Ssam	 forces the entire structure to have `int' alignment.  */
826108142Ssam      if (DECL_NAME (field) != 0)
827108511Ssam	{
828108142Ssam	  unsigned int type_align = TYPE_ALIGN (type);
829108142Ssam
830108511Ssam	  if (maximum_field_alignment != 0)
831108511Ssam	    type_align = MIN (type_align, maximum_field_alignment);
832108511Ssam	  else if (DECL_PACKED (field))
833108142Ssam	    type_align = MIN (type_align, BITS_PER_UNIT);
834108511Ssam
835108511Ssam	  rli->record_align = MAX (rli->record_align, type_align);
836108511Ssam	  rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
837108511Ssam	  if (warn_packed)
838111558Ssam	    rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
839108511Ssam	}
840108511Ssam    }
841108511Ssam  else
842108511Ssam#endif
843108142Ssam    {
844151576Sdavidxu      rli->record_align = MAX (rli->record_align, desired_align);
845151576Sdavidxu      rli->unpacked_align = MAX (rli->unpacked_align, TYPE_ALIGN (type));
846151576Sdavidxu      rli->unpadded_align = MAX (rli->unpadded_align, DECL_ALIGN (field));
847151576Sdavidxu    }
848151576Sdavidxu
849151576Sdavidxu  if (warn_packed && DECL_PACKED (field))
850151576Sdavidxu    {
851151576Sdavidxu      if (known_align > TYPE_ALIGN (type))
852151576Sdavidxu	{
853151576Sdavidxu	  if (TYPE_ALIGN (type) > desired_align)
854151576Sdavidxu	    {
855151576Sdavidxu	      if (STRICT_ALIGNMENT)
856151576Sdavidxu		warning_with_decl (field, "packed attribute causes inefficient alignment for `%s'");
857151576Sdavidxu	      else
858151576Sdavidxu		warning_with_decl (field, "packed attribute is unnecessary for `%s'");
859151576Sdavidxu	    }
860151576Sdavidxu	}
861151576Sdavidxu      else
862151576Sdavidxu	rli->packed_maybe_necessary = 1;
863151576Sdavidxu    }
864151576Sdavidxu
865151576Sdavidxu  /* Does this field automatically have alignment it needs by virtue
866151576Sdavidxu     of the fields that precede it and the record's own alignment?  */
867151576Sdavidxu  if (known_align < desired_align)
868151576Sdavidxu    {
869151576Sdavidxu      /* No, we need to skip space before this field.
870151576Sdavidxu	 Bump the cumulative size to multiple of field alignment.  */
871151576Sdavidxu
872151576Sdavidxu      if (warn_padded)
873151576Sdavidxu	warning_with_decl (field, "padding struct to align `%s'");
874151576Sdavidxu
875151576Sdavidxu      /* If the alignment is still within offset_align, just align
876151576Sdavidxu	 the bit position.  */
877151576Sdavidxu      if (desired_align < rli->offset_align)
878151576Sdavidxu	rli->bitpos = round_up (rli->bitpos, desired_align);
879151576Sdavidxu      else
880151576Sdavidxu	{
881151576Sdavidxu	  /* First adjust OFFSET by the partial bits, then align.  */
882151576Sdavidxu	  rli->offset
883151576Sdavidxu	    = size_binop (PLUS_EXPR, rli->offset,
884151576Sdavidxu			  convert (sizetype,
885151576Sdavidxu				   size_binop (CEIL_DIV_EXPR, rli->bitpos,
886151576Sdavidxu					       bitsize_unit_node)));
887151576Sdavidxu	  rli->bitpos = bitsize_zero_node;
888151576Sdavidxu
889151576Sdavidxu	  rli->offset = round_up (rli->offset, desired_align / BITS_PER_UNIT);
890151576Sdavidxu	}
891151576Sdavidxu
892151576Sdavidxu      if (! TREE_CONSTANT (rli->offset))
893151576Sdavidxu	rli->offset_align = desired_align;
894151576Sdavidxu
895151576Sdavidxu    }
896151576Sdavidxu
897151576Sdavidxu  /* Handle compatibility with PCC.  Note that if the record has any
898151576Sdavidxu     variable-sized fields, we need not worry about compatibility.  */
899151576Sdavidxu#ifdef PCC_BITFIELD_TYPE_MATTERS
900151576Sdavidxu  if (PCC_BITFIELD_TYPE_MATTERS
901151576Sdavidxu      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
902151576Sdavidxu      && TREE_CODE (field) == FIELD_DECL
903151576Sdavidxu      && type != error_mark_node
904151576Sdavidxu      && DECL_BIT_FIELD (field)
905151576Sdavidxu      && ! DECL_PACKED (field)
906151576Sdavidxu      && maximum_field_alignment == 0
907151576Sdavidxu      && ! integer_zerop (DECL_SIZE (field))
908151576Sdavidxu      && host_integerp (DECL_SIZE (field), 1)
909151576Sdavidxu      && host_integerp (rli->offset, 1)
910151576Sdavidxu      && host_integerp (TYPE_SIZE (type), 1))
911151576Sdavidxu    {
912151576Sdavidxu      unsigned int type_align = TYPE_ALIGN (type);
913151576Sdavidxu      tree dsize = DECL_SIZE (field);
914151576Sdavidxu      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
915151576Sdavidxu      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
916151576Sdavidxu      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
917151576Sdavidxu
918151576Sdavidxu      /* A bit field may not span more units of alignment of its type
919151576Sdavidxu	 than its type itself.  Advance to next boundary if necessary.  */
920151576Sdavidxu      if ((((offset * BITS_PER_UNIT + bit_offset + field_size +
921151576Sdavidxu	     type_align - 1)
922151576Sdavidxu	    / type_align)
923151576Sdavidxu	   - (offset * BITS_PER_UNIT + bit_offset) / type_align)
924151576Sdavidxu	  > tree_low_cst (TYPE_SIZE (type), 1) / type_align)
925151576Sdavidxu	rli->bitpos = round_up (rli->bitpos, type_align);
926151576Sdavidxu    }
927151576Sdavidxu#endif
928151576Sdavidxu
929151576Sdavidxu#ifdef BITFIELD_NBYTES_LIMITED
930151576Sdavidxu  if (BITFIELD_NBYTES_LIMITED
931151576Sdavidxu      && ! (* targetm.ms_bitfield_layout_p) (rli->t)
932151576Sdavidxu      && TREE_CODE (field) == FIELD_DECL
933151576Sdavidxu      && type != error_mark_node
934151576Sdavidxu      && DECL_BIT_FIELD_TYPE (field)
935151576Sdavidxu      && ! DECL_PACKED (field)
936151576Sdavidxu      && ! integer_zerop (DECL_SIZE (field))
937151576Sdavidxu      && host_integerp (DECL_SIZE (field), 1)
938151576Sdavidxu      && host_integerp (rli->offset, 1)
939151576Sdavidxu      && host_integerp (TYPE_SIZE (type), 1))
940151576Sdavidxu    {
941151576Sdavidxu      unsigned int type_align = TYPE_ALIGN (type);
942151576Sdavidxu      tree dsize = DECL_SIZE (field);
943151576Sdavidxu      HOST_WIDE_INT field_size = tree_low_cst (dsize, 1);
944151576Sdavidxu      HOST_WIDE_INT offset = tree_low_cst (rli->offset, 0);
945151576Sdavidxu      HOST_WIDE_INT bit_offset = tree_low_cst (rli->bitpos, 0);
946151576Sdavidxu
947151576Sdavidxu      if (maximum_field_alignment != 0)
948151576Sdavidxu	type_align = MIN (type_align, maximum_field_alignment);
949151576Sdavidxu      /* ??? This test is opposite the test in the containing if
950151576Sdavidxu	 statement, so this code is unreachable currently.  */
951151576Sdavidxu      else if (DECL_PACKED (field))
952151576Sdavidxu	type_align = MIN (type_align, BITS_PER_UNIT);
953151576Sdavidxu
954151576Sdavidxu      /* A bit field may not span the unit of alignment of its type.
955151576Sdavidxu	 Advance to next boundary if necessary.  */
956151576Sdavidxu      /* ??? This code should match the code above for the
957151576Sdavidxu	 PCC_BITFIELD_TYPE_MATTERS case.  */
958151576Sdavidxu      if ((offset * BITS_PER_UNIT + bit_offset) / type_align
959151576Sdavidxu	  != ((offset * BITS_PER_UNIT + bit_offset + field_size - 1)
960151576Sdavidxu	      / type_align))
961151869Sdavidxu	rli->bitpos = round_up (rli->bitpos, type_align);
962151869Sdavidxu    }
963151576Sdavidxu#endif
964151869Sdavidxu
965151869Sdavidxu  /* See the docs for TARGET_MS_BITFIELD_LAYOUT_P for details.  */
966151576Sdavidxu  if ((* targetm.ms_bitfield_layout_p) (rli->t)
967151576Sdavidxu      && TREE_CODE (field) == FIELD_DECL
968151576Sdavidxu      && type != error_mark_node
969151576Sdavidxu      && ! DECL_PACKED (field)
970151585Sdavidxu      && rli->prev_field
971151576Sdavidxu      && DECL_SIZE (field)
972151576Sdavidxu      && host_integerp (DECL_SIZE (field), 1)
973151576Sdavidxu      && DECL_SIZE (rli->prev_field)
974151576Sdavidxu      && host_integerp (DECL_SIZE (rli->prev_field), 1)
975151576Sdavidxu      && host_integerp (rli->offset, 1)
976151576Sdavidxu      && host_integerp (TYPE_SIZE (type), 1)
977151869Sdavidxu      && host_integerp (TYPE_SIZE (TREE_TYPE (rli->prev_field)), 1)
978151869Sdavidxu      && ((DECL_BIT_FIELD_TYPE (rli->prev_field)
979151576Sdavidxu	   && ! integer_zerop (DECL_SIZE (rli->prev_field)))
980151576Sdavidxu	  || (DECL_BIT_FIELD_TYPE (field)
981151576Sdavidxu	      && ! integer_zerop (DECL_SIZE (field))))
982151576Sdavidxu      && (! simple_cst_equal (TYPE_SIZE (type),
983151576Sdavidxu			      TYPE_SIZE (TREE_TYPE (rli->prev_field)))
984151576Sdavidxu	  /* If the previous field was a zero-sized bit-field, either
985151576Sdavidxu	     it was ignored, in which case we must ensure the proper
986151576Sdavidxu	     alignment of this field here, or it already forced the
987151576Sdavidxu	     alignment of this field, in which case forcing the
988151576Sdavidxu	     alignment again is harmless.  So, do it in both cases.  */
989151576Sdavidxu	  || (DECL_BIT_FIELD_TYPE (rli->prev_field)
990151576Sdavidxu	      && integer_zerop (DECL_SIZE (rli->prev_field)))))
991151576Sdavidxu    {
992151576Sdavidxu      unsigned int type_align = TYPE_ALIGN (type);
993151576Sdavidxu
994151585Sdavidxu      if (rli->prev_field
995151576Sdavidxu	  && DECL_BIT_FIELD_TYPE (rli->prev_field)
996151576Sdavidxu	  /* If the previous bit-field is zero-sized, we've already
997151576Sdavidxu	     accounted for its alignment needs (or ignored it, if
998151576Sdavidxu	     appropriate) while placing it.  */
999151576Sdavidxu	  && ! integer_zerop (DECL_SIZE (rli->prev_field)))
1000151576Sdavidxu	type_align = MAX (type_align,
1001151576Sdavidxu			  TYPE_ALIGN (TREE_TYPE (rli->prev_field)));
1002151576Sdavidxu
1003151576Sdavidxu      if (maximum_field_alignment != 0)
1004151576Sdavidxu	type_align = MIN (type_align, maximum_field_alignment);
1005151585Sdavidxu
1006151576Sdavidxu      rli->bitpos = round_up (rli->bitpos, type_align);
1007151576Sdavidxu    }
1008151576Sdavidxu
1009151576Sdavidxu  /* Offset so far becomes the position of this field after normalizing.  */
1010151576Sdavidxu  normalize_rli (rli);
1011151576Sdavidxu  DECL_FIELD_OFFSET (field) = rli->offset;
1012151576Sdavidxu  DECL_FIELD_BIT_OFFSET (field) = rli->bitpos;
1013151576Sdavidxu  SET_DECL_OFFSET_ALIGN (field, rli->offset_align);
1014151585Sdavidxu
1015151576Sdavidxu  TYPE_USER_ALIGN (rli->t) |= user_align;
1016151576Sdavidxu
1017151576Sdavidxu  /* If this field ended up more aligned than we thought it would be (we
1018151576Sdavidxu     approximate this by seeing if its position changed), lay out the field
1019151576Sdavidxu     again; perhaps we can use an integral mode for it now.  */
1020151576Sdavidxu  if (! integer_zerop (DECL_FIELD_BIT_OFFSET (field)))
1021151576Sdavidxu    actual_align = (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
1022151576Sdavidxu		    & - tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1));
1023151576Sdavidxu  else if (integer_zerop (DECL_FIELD_OFFSET (field)))
1024151576Sdavidxu    actual_align = BIGGEST_ALIGNMENT;
1025151576Sdavidxu  else if (host_integerp (DECL_FIELD_OFFSET (field), 1))
1026151576Sdavidxu    actual_align = (BITS_PER_UNIT
1027151576Sdavidxu		   * (tree_low_cst (DECL_FIELD_OFFSET (field), 1)
1028151576Sdavidxu		      & - tree_low_cst (DECL_FIELD_OFFSET (field), 1)));
1029151576Sdavidxu  else
1030151576Sdavidxu    actual_align = DECL_OFFSET_ALIGN (field);
1031151576Sdavidxu
1032151576Sdavidxu  if (known_align != actual_align)
1033151576Sdavidxu    layout_decl (field, actual_align);
1034151869Sdavidxu
1035151869Sdavidxu  rli->prev_field = field;
1036151576Sdavidxu
1037151576Sdavidxu  /* Now add size of this field to the size of the record.  If the size is
1038151576Sdavidxu     not constant, treat the field as being a multiple of bytes and just
1039151576Sdavidxu     adjust the offset, resetting the bit position.  Otherwise, apportion the
1040151576Sdavidxu     size amongst the bit position and offset.  First handle the case of an
1041151576Sdavidxu     unspecified size, which can happen when we have an invalid nested struct
1042151576Sdavidxu     definition, such as struct j { struct j { int i; } }.  The error message
1043151576Sdavidxu     is printed in finish_struct.  */
1044151576Sdavidxu  if (DECL_SIZE (field) == 0)
1045151576Sdavidxu    /* Do nothing.  */;
1046151576Sdavidxu  else if (TREE_CODE (DECL_SIZE_UNIT (field)) != INTEGER_CST
1047151576Sdavidxu	   || TREE_CONSTANT_OVERFLOW (DECL_SIZE_UNIT (field)))
1048151576Sdavidxu    {
1049151576Sdavidxu      rli->offset
1050151576Sdavidxu	= size_binop (PLUS_EXPR, rli->offset,
1051151576Sdavidxu		      convert (sizetype,
1052151576Sdavidxu			       size_binop (CEIL_DIV_EXPR, rli->bitpos,
1053151576Sdavidxu					   bitsize_unit_node)));
1054151576Sdavidxu      rli->offset
1055151576Sdavidxu	= size_binop (PLUS_EXPR, rli->offset, DECL_SIZE_UNIT (field));
1056151576Sdavidxu      rli->bitpos = bitsize_zero_node;
1057151576Sdavidxu      rli->offset_align = MIN (rli->offset_align, DECL_ALIGN (field));
1058151576Sdavidxu    }
1059151576Sdavidxu  else
1060151576Sdavidxu    {
1061151576Sdavidxu      rli->bitpos = size_binop (PLUS_EXPR, rli->bitpos, DECL_SIZE (field));
1062151576Sdavidxu      normalize_rli (rli);
1063151576Sdavidxu    }
1064151576Sdavidxu}
1065151576Sdavidxu
1066151576Sdavidxu/* Assuming that all the fields have been laid out, this function uses
1067151576Sdavidxu   RLI to compute the final TYPE_SIZE, TYPE_ALIGN, etc. for the type
1068151576Sdavidxu   inidicated by RLI.  */
1069151576Sdavidxu
1070151576Sdavidxustatic void
1071151585Sdavidxufinalize_record_size (rli)
1072151585Sdavidxu     record_layout_info rli;
1073151576Sdavidxu{
1074151576Sdavidxu  tree unpadded_size, unpadded_size_unit;
1075151576Sdavidxu
1076151576Sdavidxu  /* Now we want just byte and bit offsets, so set the offset alignment
1077151576Sdavidxu     to be a byte and then normalize.  */
1078151576Sdavidxu  rli->offset_align = BITS_PER_UNIT;
1079151576Sdavidxu  normalize_rli (rli);
1080151576Sdavidxu
1081151576Sdavidxu  /* Determine the desired alignment.  */
1082151576Sdavidxu#ifdef ROUND_TYPE_ALIGN
1083151576Sdavidxu  TYPE_ALIGN (rli->t) = ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t),
1084151576Sdavidxu					  rli->record_align);
1085151576Sdavidxu#else
1086151576Sdavidxu  TYPE_ALIGN (rli->t) = MAX (TYPE_ALIGN (rli->t), rli->record_align);
1087151576Sdavidxu#endif
1088151576Sdavidxu
1089151576Sdavidxu  /* Compute the size so far.  Be sure to allow for extra bits in the
1090151576Sdavidxu     size in bytes.  We have guaranteed above that it will be no more
1091151576Sdavidxu     than a single byte.  */
1092151576Sdavidxu  unpadded_size = rli_size_so_far (rli);
1093151576Sdavidxu  unpadded_size_unit = rli_size_unit_so_far (rli);
1094151576Sdavidxu  if (! integer_zerop (rli->bitpos))
1095151576Sdavidxu    unpadded_size_unit
1096151576Sdavidxu      = size_binop (PLUS_EXPR, unpadded_size_unit, size_one_node);
1097151576Sdavidxu
1098151576Sdavidxu  /* Record the un-rounded size in the binfo node.  But first we check
1099151576Sdavidxu     the size of TYPE_BINFO to make sure that BINFO_SIZE is available.  */
1100151576Sdavidxu  if (TYPE_BINFO (rli->t) && TREE_VEC_LENGTH (TYPE_BINFO (rli->t)) > 6)
1101151576Sdavidxu    {
1102151576Sdavidxu      TYPE_BINFO_SIZE (rli->t) = unpadded_size;
1103151576Sdavidxu      TYPE_BINFO_SIZE_UNIT (rli->t) = unpadded_size_unit;
1104151576Sdavidxu    }
1105151576Sdavidxu
1106151576Sdavidxu    /* Round the size up to be a multiple of the required alignment */
1107151576Sdavidxu#ifdef ROUND_TYPE_SIZE
1108151576Sdavidxu  TYPE_SIZE (rli->t) = ROUND_TYPE_SIZE (rli->t, unpadded_size,
1109151585Sdavidxu					TYPE_ALIGN (rli->t));
1110151576Sdavidxu  TYPE_SIZE_UNIT (rli->t)
1111151576Sdavidxu    = ROUND_TYPE_SIZE_UNIT (rli->t, unpadded_size_unit,
1112151576Sdavidxu			    TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1113151576Sdavidxu#else
1114151576Sdavidxu  TYPE_SIZE (rli->t) = round_up (unpadded_size, TYPE_ALIGN (rli->t));
1115151576Sdavidxu  TYPE_SIZE_UNIT (rli->t) = round_up (unpadded_size_unit,
1116151576Sdavidxu				      TYPE_ALIGN (rli->t) / BITS_PER_UNIT);
1117151576Sdavidxu#endif
1118151576Sdavidxu
1119151576Sdavidxu  if (warn_padded && TREE_CONSTANT (unpadded_size)
1120151576Sdavidxu      && simple_cst_equal (unpadded_size, TYPE_SIZE (rli->t)) == 0)
1121151576Sdavidxu    warning ("padding struct size to alignment boundary");
1122151576Sdavidxu
1123151576Sdavidxu  if (warn_packed && TREE_CODE (rli->t) == RECORD_TYPE
1124151576Sdavidxu      && TYPE_PACKED (rli->t) && ! rli->packed_maybe_necessary
1125151576Sdavidxu      && TREE_CONSTANT (unpadded_size))
1126151576Sdavidxu    {
1127151576Sdavidxu      tree unpacked_size;
1128151576Sdavidxu
1129151576Sdavidxu#ifdef ROUND_TYPE_ALIGN
1130151576Sdavidxu      rli->unpacked_align
1131151576Sdavidxu	= ROUND_TYPE_ALIGN (rli->t, TYPE_ALIGN (rli->t), rli->unpacked_align);
1132151576Sdavidxu#else
1133151576Sdavidxu      rli->unpacked_align = MAX (TYPE_ALIGN (rli->t), rli->unpacked_align);
1134151576Sdavidxu#endif
1135151576Sdavidxu
1136151576Sdavidxu#ifdef ROUND_TYPE_SIZE
1137151576Sdavidxu      unpacked_size = ROUND_TYPE_SIZE (rli->t, TYPE_SIZE (rli->t),
1138151576Sdavidxu				       rli->unpacked_align);
1139151576Sdavidxu#else
1140151576Sdavidxu      unpacked_size = round_up (TYPE_SIZE (rli->t), rli->unpacked_align);
1141151576Sdavidxu#endif
1142151576Sdavidxu
1143151576Sdavidxu      if (simple_cst_equal (unpacked_size, TYPE_SIZE (rli->t)))
1144151576Sdavidxu	{
1145151576Sdavidxu	  TYPE_PACKED (rli->t) = 0;
1146151576Sdavidxu
1147151576Sdavidxu	  if (TYPE_NAME (rli->t))
1148151576Sdavidxu	    {
1149151576Sdavidxu	      const char *name;
1150151576Sdavidxu
1151151576Sdavidxu	      if (TREE_CODE (TYPE_NAME (rli->t)) == IDENTIFIER_NODE)
1152151576Sdavidxu		name = IDENTIFIER_POINTER (TYPE_NAME (rli->t));
1153151576Sdavidxu	      else
1154151576Sdavidxu		name = IDENTIFIER_POINTER (DECL_NAME (TYPE_NAME (rli->t)));
1155151576Sdavidxu
1156151576Sdavidxu	      if (STRICT_ALIGNMENT)
1157151576Sdavidxu		warning ("packed attribute causes inefficient alignment for `%s'", name);
1158151576Sdavidxu	      else
1159151576Sdavidxu		warning ("packed attribute is unnecessary for `%s'", name);
1160151576Sdavidxu	    }
1161151576Sdavidxu	  else
1162151576Sdavidxu	    {
1163151576Sdavidxu	      if (STRICT_ALIGNMENT)
1164151576Sdavidxu		warning ("packed attribute causes inefficient alignment");
1165151576Sdavidxu	      else
1166151576Sdavidxu		warning ("packed attribute is unnecessary");
1167151576Sdavidxu	    }
1168151576Sdavidxu	}
1169151576Sdavidxu    }
1170151576Sdavidxu}
1171151576Sdavidxu
1172151576Sdavidxu/* Compute the TYPE_MODE for the TYPE (which is a RECORD_TYPE).  */
1173151576Sdavidxu
1174151576Sdavidxuvoid
1175151576Sdavidxucompute_record_mode (type)
1176151576Sdavidxu     tree type;
1177151576Sdavidxu{
1178151576Sdavidxu  tree field;
1179151576Sdavidxu  enum machine_mode mode = VOIDmode;
1180151576Sdavidxu
1181151576Sdavidxu  /* Most RECORD_TYPEs have BLKmode, so we start off assuming that.
1182151576Sdavidxu     However, if possible, we use a mode that fits in a register
1183151576Sdavidxu     instead, in order to allow for better optimization down the
1184151576Sdavidxu     line.  */
1185151576Sdavidxu  TYPE_MODE (type) = BLKmode;
1186151576Sdavidxu
1187151576Sdavidxu  if (! host_integerp (TYPE_SIZE (type), 1))
1188151576Sdavidxu    return;
1189151576Sdavidxu
1190151576Sdavidxu  /* A record which has any BLKmode members must itself be
1191151576Sdavidxu     BLKmode; it can't go in a register.  Unless the member is
1192151576Sdavidxu     BLKmode only because it isn't aligned.  */
1193151576Sdavidxu  for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1194151576Sdavidxu    {
1195151576Sdavidxu      unsigned HOST_WIDE_INT bitpos;
1196151576Sdavidxu
1197151576Sdavidxu      if (TREE_CODE (field) != FIELD_DECL)
1198151576Sdavidxu	continue;
1199151576Sdavidxu
1200151576Sdavidxu      if (TREE_CODE (TREE_TYPE (field)) == ERROR_MARK
1201151576Sdavidxu	  || (TYPE_MODE (TREE_TYPE (field)) == BLKmode
1202151576Sdavidxu	      && ! TYPE_NO_FORCE_BLK (TREE_TYPE (field)))
1203151576Sdavidxu	  || ! host_integerp (bit_position (field), 1)
1204151576Sdavidxu	  || DECL_SIZE (field) == 0
1205151576Sdavidxu	  || ! host_integerp (DECL_SIZE (field), 1))
1206151576Sdavidxu	return;
1207151576Sdavidxu
1208151576Sdavidxu      bitpos = int_bit_position (field);
1209151576Sdavidxu
1210151576Sdavidxu      /* Must be BLKmode if any field crosses a word boundary,
1211151576Sdavidxu	 since extract_bit_field can't handle that in registers.  */
1212151576Sdavidxu      if (bitpos / BITS_PER_WORD
1213151869Sdavidxu	  != ((tree_low_cst (DECL_SIZE (field), 1) + bitpos - 1)
1214151576Sdavidxu	      / BITS_PER_WORD)
1215151576Sdavidxu	  /* But there is no problem if the field is entire words.  */
1216151576Sdavidxu	  && tree_low_cst (DECL_SIZE (field), 1) % BITS_PER_WORD != 0)
1217151576Sdavidxu	return;
1218151576Sdavidxu
1219151576Sdavidxu      /* If this field is the whole struct, remember its mode so
1220151576Sdavidxu	 that, say, we can put a double in a class into a DF
1221151576Sdavidxu	 register instead of forcing it to live in the stack.  */
1222151576Sdavidxu      if (simple_cst_equal (TYPE_SIZE (type), DECL_SIZE (field)))
1223151576Sdavidxu	mode = DECL_MODE (field);
1224151576Sdavidxu
1225151576Sdavidxu#ifdef MEMBER_TYPE_FORCES_BLK
1226151576Sdavidxu      /* With some targets, eg. c4x, it is sub-optimal
1227151576Sdavidxu	 to access an aligned BLKmode structure as a scalar.  */
1228151576Sdavidxu
1229151576Sdavidxu      /* On ia64-*-hpux we need to ensure that we don't change the
1230151576Sdavidxu	 mode of a structure containing a single field or else we
1231151576Sdavidxu	 will pass it incorrectly.  Since a structure with a single
1232151576Sdavidxu	 field causes mode to get set above we can't allow the
1233151576Sdavidxu	 check for mode == VOIDmode in this case.  Perhaps
1234151576Sdavidxu	 MEMBER_TYPE_FORCES_BLK should be extended to include mode
1235151576Sdavidxu	 as an argument and the check could be put in there for c4x.  */
1236151576Sdavidxu
1237151869Sdavidxu      if ((mode == VOIDmode || FUNCTION_ARG_REG_LITTLE_ENDIAN)
1238151576Sdavidxu	  && MEMBER_TYPE_FORCES_BLK (field))
1239151576Sdavidxu	return;
1240151576Sdavidxu#endif /* MEMBER_TYPE_FORCES_BLK  */
1241151869Sdavidxu    }
1242151869Sdavidxu
1243151576Sdavidxu  /* If we only have one real field; use its mode.  This only applies to
1244151869Sdavidxu     RECORD_TYPE.  This does not apply to unions.  */
1245151576Sdavidxu  if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode)
1246151576Sdavidxu    TYPE_MODE (type) = mode;
1247151576Sdavidxu  else
1248151576Sdavidxu    TYPE_MODE (type) = mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1249151576Sdavidxu
1250151576Sdavidxu  /* If structure's known alignment is less than what the scalar
1251151576Sdavidxu     mode would need, and it matters, then stick with BLKmode.  */
1252151576Sdavidxu  if (TYPE_MODE (type) != BLKmode
1253151576Sdavidxu      && STRICT_ALIGNMENT
1254151576Sdavidxu      && ! (TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT
1255151576Sdavidxu	    || TYPE_ALIGN (type) >= GET_MODE_ALIGNMENT (TYPE_MODE (type))))
1256151576Sdavidxu    {
1257151576Sdavidxu      /* If this is the only reason this type is BLKmode, then
1258151576Sdavidxu	 don't force containing types to be BLKmode.  */
1259151869Sdavidxu      TYPE_NO_FORCE_BLK (type) = 1;
1260151869Sdavidxu      TYPE_MODE (type) = BLKmode;
1261151869Sdavidxu    }
1262151576Sdavidxu}
1263151576Sdavidxu
1264151576Sdavidxu/* Compute TYPE_SIZE and TYPE_ALIGN for TYPE, once it has been laid
1265151869Sdavidxu   out.  */
1266151869Sdavidxu
1267151576Sdavidxustatic void
1268151576Sdavidxufinalize_type_size (type)
1269151869Sdavidxu     tree type;
1270151869Sdavidxu{
1271151576Sdavidxu  /* Normally, use the alignment corresponding to the mode chosen.
1272151576Sdavidxu     However, where strict alignment is not required, avoid
1273151869Sdavidxu     over-aligning structures, since most compilers do not do this
1274151576Sdavidxu     alignment.  */
1275151576Sdavidxu
1276151576Sdavidxu  if (TYPE_MODE (type) != BLKmode && TYPE_MODE (type) != VOIDmode
1277151576Sdavidxu      && (STRICT_ALIGNMENT
1278151576Sdavidxu	  || (TREE_CODE (type) != RECORD_TYPE && TREE_CODE (type) != UNION_TYPE
1279151576Sdavidxu	      && TREE_CODE (type) != QUAL_UNION_TYPE
1280151869Sdavidxu	      && TREE_CODE (type) != ARRAY_TYPE)))
1281151869Sdavidxu    {
1282151869Sdavidxu      TYPE_ALIGN (type) = GET_MODE_ALIGNMENT (TYPE_MODE (type));
1283151576Sdavidxu      TYPE_USER_ALIGN (type) = 0;
1284151576Sdavidxu    }
1285151869Sdavidxu
1286151576Sdavidxu  /* Do machine-dependent extra alignment.  */
1287151869Sdavidxu#ifdef ROUND_TYPE_ALIGN
1288151576Sdavidxu  TYPE_ALIGN (type)
1289151869Sdavidxu    = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (type), BITS_PER_UNIT);
1290151576Sdavidxu#endif
1291151576Sdavidxu
1292151576Sdavidxu  /* If we failed to find a simple way to calculate the unit size
1293151869Sdavidxu     of the type, find it by division.  */
1294151869Sdavidxu  if (TYPE_SIZE_UNIT (type) == 0 && TYPE_SIZE (type) != 0)
1295151576Sdavidxu    /* TYPE_SIZE (type) is computed in bitsizetype.  After the division, the
1296151576Sdavidxu       result will fit in sizetype.  We will get more efficient code using
1297151576Sdavidxu       sizetype, so we force a conversion.  */
1298151576Sdavidxu    TYPE_SIZE_UNIT (type)
1299151576Sdavidxu      = convert (sizetype,
1300151576Sdavidxu		 size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (type),
1301151576Sdavidxu			     bitsize_unit_node));
1302151576Sdavidxu
1303151576Sdavidxu  if (TYPE_SIZE (type) != 0)
1304151869Sdavidxu    {
1305151576Sdavidxu#ifdef ROUND_TYPE_SIZE
1306151576Sdavidxu      TYPE_SIZE (type)
1307151869Sdavidxu	= ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1308151576Sdavidxu      TYPE_SIZE_UNIT (type)
1309151869Sdavidxu	= ROUND_TYPE_SIZE_UNIT (type, TYPE_SIZE_UNIT (type),
1310151576Sdavidxu				TYPE_ALIGN (type) / BITS_PER_UNIT);
1311151576Sdavidxu#else
1312151869Sdavidxu      TYPE_SIZE (type) = round_up (TYPE_SIZE (type), TYPE_ALIGN (type));
1313151869Sdavidxu      TYPE_SIZE_UNIT (type)
1314151869Sdavidxu	= round_up (TYPE_SIZE_UNIT (type), TYPE_ALIGN (type) / BITS_PER_UNIT);
1315151869Sdavidxu#endif
1316151869Sdavidxu    }
1317151869Sdavidxu
1318151869Sdavidxu  /* Evaluate nonconstant sizes only once, either now or as soon as safe.  */
1319151869Sdavidxu  if (TYPE_SIZE (type) != 0 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
1320151869Sdavidxu    TYPE_SIZE (type) = variable_size (TYPE_SIZE (type));
1321151869Sdavidxu  if (TYPE_SIZE_UNIT (type) != 0
1322151869Sdavidxu      && TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)
1323151869Sdavidxu    TYPE_SIZE_UNIT (type) = variable_size (TYPE_SIZE_UNIT (type));
1324151869Sdavidxu
1325151869Sdavidxu  /* Also layout any other variants of the type.  */
1326151869Sdavidxu  if (TYPE_NEXT_VARIANT (type)
1327151869Sdavidxu      || type != TYPE_MAIN_VARIANT (type))
1328151869Sdavidxu    {
1329151869Sdavidxu      tree variant;
1330151869Sdavidxu      /* Record layout info of this variant.  */
1331151869Sdavidxu      tree size = TYPE_SIZE (type);
1332151869Sdavidxu      tree size_unit = TYPE_SIZE_UNIT (type);
1333151869Sdavidxu      unsigned int align = TYPE_ALIGN (type);
1334151869Sdavidxu      unsigned int user_align = TYPE_USER_ALIGN (type);
1335151869Sdavidxu      enum machine_mode mode = TYPE_MODE (type);
1336151869Sdavidxu
1337151869Sdavidxu      /* Copy it into all variants.  */
1338151869Sdavidxu      for (variant = TYPE_MAIN_VARIANT (type);
1339151869Sdavidxu	   variant != 0;
1340151576Sdavidxu	   variant = TYPE_NEXT_VARIANT (variant))
1341151576Sdavidxu	{
1342151576Sdavidxu	  TYPE_SIZE (variant) = size;
1343151576Sdavidxu	  TYPE_SIZE_UNIT (variant) = size_unit;
1344151576Sdavidxu	  TYPE_ALIGN (variant) = align;
1345151576Sdavidxu	  TYPE_USER_ALIGN (variant) = user_align;
1346151576Sdavidxu	  TYPE_MODE (variant) = mode;
1347151576Sdavidxu	}
1348151576Sdavidxu    }
1349151576Sdavidxu}
1350151576Sdavidxu
1351151576Sdavidxu/* Do all of the work required to layout the type indicated by RLI,
1352151576Sdavidxu   once the fields have been laid out.  This function will call `free'
1353151576Sdavidxu   for RLI.  */
1354151576Sdavidxu
1355151585Sdavidxuvoid
1356151576Sdavidxufinish_record_layout (rli)
1357151576Sdavidxu     record_layout_info rli;
1358151576Sdavidxu{
1359151576Sdavidxu  /* Compute the final size.  */
1360151576Sdavidxu  finalize_record_size (rli);
1361151576Sdavidxu
1362151576Sdavidxu  /* Compute the TYPE_MODE for the record.  */
1363151576Sdavidxu  compute_record_mode (rli->t);
1364151576Sdavidxu
1365151576Sdavidxu  /* Perform any last tweaks to the TYPE_SIZE, etc.  */
1366151576Sdavidxu  finalize_type_size (rli->t);
1367151576Sdavidxu
1368151576Sdavidxu  /* Lay out any static members.  This is done now because their type
1369151576Sdavidxu     may use the record's type.  */
1370151869Sdavidxu  while (rli->pending_statics)
1371151869Sdavidxu    {
1372151576Sdavidxu      layout_decl (TREE_VALUE (rli->pending_statics), 0);
1373151576Sdavidxu      rli->pending_statics = TREE_CHAIN (rli->pending_statics);
1374151576Sdavidxu    }
1375151576Sdavidxu
1376151576Sdavidxu  /* Clean up.  */
1377151576Sdavidxu  free (rli);
1378151869Sdavidxu}
1379151576Sdavidxu
1380151869Sdavidxu/* Calculate the mode, size, and alignment for TYPE.
1381151869Sdavidxu   For an array type, calculate the element separation as well.
1382151869Sdavidxu   Record TYPE on the chain of permanent or temporary types
1383151869Sdavidxu   so that dbxout will find out about it.
1384151869Sdavidxu
1385151576Sdavidxu   TYPE_SIZE of a type is nonzero if the type has been laid out already.
1386151869Sdavidxu   layout_type does nothing on such a type.
1387151869Sdavidxu
1388151576Sdavidxu   If the type is incomplete, its TYPE_SIZE remains zero.  */
1389151576Sdavidxu
1390151576Sdavidxuvoid
1391151869Sdavidxulayout_type (type)
1392151576Sdavidxu     tree type;
1393151869Sdavidxu{
1394151869Sdavidxu  if (type == 0)
1395151869Sdavidxu    abort ();
1396151869Sdavidxu
1397151869Sdavidxu  /* Do nothing if type has been laid out before.  */
1398151576Sdavidxu  if (TYPE_SIZE (type))
1399151576Sdavidxu    return;
1400151576Sdavidxu
1401151576Sdavidxu  switch (TREE_CODE (type))
1402151576Sdavidxu    {
1403151869Sdavidxu    case LANG_TYPE:
1404151869Sdavidxu      /* This kind of type is the responsibility
1405151869Sdavidxu	 of the language-specific code.  */
1406151869Sdavidxu      abort ();
1407151869Sdavidxu
1408151576Sdavidxu    case BOOLEAN_TYPE:  /* Used for Java, Pascal, and Chill.  */
1409151576Sdavidxu      if (TYPE_PRECISION (type) == 0)
1410151576Sdavidxu	TYPE_PRECISION (type) = 1; /* default to one byte/boolean.  */
1411151576Sdavidxu
1412151576Sdavidxu      /* ... fall through ...  */
1413151576Sdavidxu
1414151576Sdavidxu    case INTEGER_TYPE:
1415151576Sdavidxu    case ENUMERAL_TYPE:
1416151993Sdavidxu    case CHAR_TYPE:
1417151576Sdavidxu      if (TREE_CODE (TYPE_MIN_VALUE (type)) == INTEGER_CST
1418151869Sdavidxu	  && tree_int_cst_sgn (TYPE_MIN_VALUE (type)) >= 0)
1419151869Sdavidxu	TREE_UNSIGNED (type) = 1;
1420151576Sdavidxu
1421151993Sdavidxu      TYPE_MODE (type) = smallest_mode_for_size (TYPE_PRECISION (type),
1422151993Sdavidxu						 MODE_INT);
1423151993Sdavidxu      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1424151993Sdavidxu      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1425151993Sdavidxu      break;
1426151993Sdavidxu
1427151993Sdavidxu    case REAL_TYPE:
1428151869Sdavidxu      TYPE_MODE (type) = mode_for_size (TYPE_PRECISION (type), MODE_FLOAT, 0);
1429151993Sdavidxu      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1430151869Sdavidxu      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1431151869Sdavidxu      break;
1432151869Sdavidxu
1433151869Sdavidxu    case COMPLEX_TYPE:
1434151869Sdavidxu      TREE_UNSIGNED (type) = TREE_UNSIGNED (TREE_TYPE (type));
1435151869Sdavidxu      TYPE_MODE (type)
1436151869Sdavidxu	= mode_for_size (2 * TYPE_PRECISION (TREE_TYPE (type)),
1437151993Sdavidxu			 (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
1438151993Sdavidxu			  ? MODE_COMPLEX_INT : MODE_COMPLEX_FLOAT),
1439151576Sdavidxu			 0);
1440151576Sdavidxu      TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1441151576Sdavidxu      TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1442151576Sdavidxu      break;
1443151576Sdavidxu
1444151576Sdavidxu    case VECTOR_TYPE:
1445151576Sdavidxu      {
1446151576Sdavidxu	tree subtype;
1447151585Sdavidxu
1448151585Sdavidxu	subtype = TREE_TYPE (type);
1449151576Sdavidxu	TREE_UNSIGNED (type) = TREE_UNSIGNED (subtype);
1450151585Sdavidxu	TYPE_SIZE (type) = bitsize_int (GET_MODE_BITSIZE (TYPE_MODE (type)));
1451151585Sdavidxu	TYPE_SIZE_UNIT (type) = size_int (GET_MODE_SIZE (TYPE_MODE (type)));
1452151585Sdavidxu      }
1453151585Sdavidxu      break;
1454151585Sdavidxu
1455151585Sdavidxu    case VOID_TYPE:
1456151576Sdavidxu      /* This is an incomplete type and so doesn't have a size.  */
1457151585Sdavidxu      TYPE_ALIGN (type) = 1;
1458151585Sdavidxu      TYPE_USER_ALIGN (type) = 0;
1459151576Sdavidxu      TYPE_MODE (type) = VOIDmode;
1460151585Sdavidxu      break;
1461151585Sdavidxu
1462151576Sdavidxu    case OFFSET_TYPE:
1463151585Sdavidxu      TYPE_SIZE (type) = bitsize_int (POINTER_SIZE);
1464151576Sdavidxu      TYPE_SIZE_UNIT (type) = size_int (POINTER_SIZE / BITS_PER_UNIT);
1465151576Sdavidxu      /* A pointer might be MODE_PARTIAL_INT,
1466151576Sdavidxu	 but ptrdiff_t must be integral.  */
1467151576Sdavidxu      TYPE_MODE (type) = mode_for_size (POINTER_SIZE, MODE_INT, 0);
1468151576Sdavidxu      break;
1469151576Sdavidxu
1470151576Sdavidxu    case FUNCTION_TYPE:
1471151576Sdavidxu    case METHOD_TYPE:
1472151576Sdavidxu      TYPE_MODE (type) = mode_for_size (2 * POINTER_SIZE, MODE_INT, 0);
1473151576Sdavidxu      TYPE_SIZE (type) = bitsize_int (2 * POINTER_SIZE);
1474151576Sdavidxu      TYPE_SIZE_UNIT (type) = size_int ((2 * POINTER_SIZE) / BITS_PER_UNIT);
1475151585Sdavidxu      break;
1476151585Sdavidxu
1477151576Sdavidxu    case POINTER_TYPE:
1478151576Sdavidxu    case REFERENCE_TYPE:
1479151576Sdavidxu      {
1480151576Sdavidxu	int nbits = ((TREE_CODE (type) == REFERENCE_TYPE
1481151576Sdavidxu		      && reference_types_internal)
1482151576Sdavidxu		     ? GET_MODE_BITSIZE (Pmode) : POINTER_SIZE);
1483151576Sdavidxu
1484151576Sdavidxu	TYPE_MODE (type) = nbits == POINTER_SIZE ? ptr_mode : Pmode;
1485151576Sdavidxu	TYPE_SIZE (type) = bitsize_int (nbits);
1486151576Sdavidxu	TYPE_SIZE_UNIT (type) = size_int (nbits / BITS_PER_UNIT);
1487151576Sdavidxu	TREE_UNSIGNED (type) = 1;
1488151576Sdavidxu	TYPE_PRECISION (type) = nbits;
1489151576Sdavidxu      }
1490151576Sdavidxu      break;
1491151576Sdavidxu
1492151576Sdavidxu    case ARRAY_TYPE:
1493151576Sdavidxu      {
1494151576Sdavidxu	tree index = TYPE_DOMAIN (type);
1495151576Sdavidxu	tree element = TREE_TYPE (type);
1496151576Sdavidxu
1497151576Sdavidxu	build_pointer_type (element);
1498151576Sdavidxu
1499151576Sdavidxu	/* We need to know both bounds in order to compute the size.  */
1500151576Sdavidxu	if (index && TYPE_MAX_VALUE (index) && TYPE_MIN_VALUE (index)
1501151576Sdavidxu	    && TYPE_SIZE (element))
1502151576Sdavidxu	  {
1503151576Sdavidxu	    tree ub = TYPE_MAX_VALUE (index);
1504151585Sdavidxu	    tree lb = TYPE_MIN_VALUE (index);
1505151585Sdavidxu	    tree length;
1506151576Sdavidxu	    tree element_size;
1507151576Sdavidxu
1508151576Sdavidxu	    /* The initial subtraction should happen in the original type so
1509	       that (possible) negative values are handled appropriately.  */
1510	    length = size_binop (PLUS_EXPR, size_one_node,
1511				 convert (sizetype,
1512					  fold (build (MINUS_EXPR,
1513						       TREE_TYPE (lb),
1514						       ub, lb))));
1515
1516	    /* Special handling for arrays of bits (for Chill).  */
1517	    element_size = TYPE_SIZE (element);
1518	    if (TYPE_PACKED (type) && INTEGRAL_TYPE_P (element)
1519		&& (integer_zerop (TYPE_MAX_VALUE (element))
1520		    || integer_onep (TYPE_MAX_VALUE (element)))
1521		&& host_integerp (TYPE_MIN_VALUE (element), 1))
1522	      {
1523		HOST_WIDE_INT maxvalue
1524		  = tree_low_cst (TYPE_MAX_VALUE (element), 1);
1525		HOST_WIDE_INT minvalue
1526		  = tree_low_cst (TYPE_MIN_VALUE (element), 1);
1527
1528		if (maxvalue - minvalue == 1
1529		    && (maxvalue == 1 || maxvalue == 0))
1530		  element_size = integer_one_node;
1531	      }
1532
1533	    TYPE_SIZE (type) = size_binop (MULT_EXPR, element_size,
1534					   convert (bitsizetype, length));
1535
1536	    /* If we know the size of the element, calculate the total
1537	       size directly, rather than do some division thing below.
1538	       This optimization helps Fortran assumed-size arrays
1539	       (where the size of the array is determined at runtime)
1540	       substantially.
1541	       Note that we can't do this in the case where the size of
1542	       the elements is one bit since TYPE_SIZE_UNIT cannot be
1543	       set correctly in that case.  */
1544	    if (TYPE_SIZE_UNIT (element) != 0 && ! integer_onep (element_size))
1545	      TYPE_SIZE_UNIT (type)
1546		= size_binop (MULT_EXPR, TYPE_SIZE_UNIT (element), length);
1547	  }
1548
1549	/* Now round the alignment and size,
1550	   using machine-dependent criteria if any.  */
1551
1552#ifdef ROUND_TYPE_ALIGN
1553	TYPE_ALIGN (type)
1554	  = ROUND_TYPE_ALIGN (type, TYPE_ALIGN (element), BITS_PER_UNIT);
1555#else
1556	TYPE_ALIGN (type) = MAX (TYPE_ALIGN (element), BITS_PER_UNIT);
1557#endif
1558	TYPE_USER_ALIGN (type) = TYPE_USER_ALIGN (element);
1559
1560#ifdef ROUND_TYPE_SIZE
1561	if (TYPE_SIZE (type) != 0)
1562	  {
1563	    tree tmp
1564	      = ROUND_TYPE_SIZE (type, TYPE_SIZE (type), TYPE_ALIGN (type));
1565
1566	    /* If the rounding changed the size of the type, remove any
1567	       pre-calculated TYPE_SIZE_UNIT.  */
1568	    if (simple_cst_equal (TYPE_SIZE (type), tmp) != 1)
1569	      TYPE_SIZE_UNIT (type) = NULL;
1570
1571	    TYPE_SIZE (type) = tmp;
1572	  }
1573#endif
1574
1575	TYPE_MODE (type) = BLKmode;
1576	if (TYPE_SIZE (type) != 0
1577#ifdef MEMBER_TYPE_FORCES_BLK
1578	    && ! MEMBER_TYPE_FORCES_BLK (type)
1579#endif
1580	    /* BLKmode elements force BLKmode aggregate;
1581	       else extract/store fields may lose.  */
1582	    && (TYPE_MODE (TREE_TYPE (type)) != BLKmode
1583		|| TYPE_NO_FORCE_BLK (TREE_TYPE (type))))
1584	  {
1585	    /* One-element arrays get the component type's mode.  */
1586	    if (simple_cst_equal (TYPE_SIZE (type),
1587				  TYPE_SIZE (TREE_TYPE (type))))
1588	      TYPE_MODE (type) = TYPE_MODE (TREE_TYPE (type));
1589	    else
1590	      TYPE_MODE (type)
1591		= mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1);
1592
1593	    if (TYPE_MODE (type) != BLKmode
1594		&& STRICT_ALIGNMENT && TYPE_ALIGN (type) < BIGGEST_ALIGNMENT
1595		&& TYPE_ALIGN (type) < GET_MODE_ALIGNMENT (TYPE_MODE (type))
1596		&& TYPE_MODE (type) != BLKmode)
1597	      {
1598		TYPE_NO_FORCE_BLK (type) = 1;
1599		TYPE_MODE (type) = BLKmode;
1600	      }
1601	  }
1602	break;
1603      }
1604
1605    case RECORD_TYPE:
1606    case UNION_TYPE:
1607    case QUAL_UNION_TYPE:
1608      {
1609	tree field;
1610	record_layout_info rli;
1611
1612	/* Initialize the layout information.  */
1613	rli = start_record_layout (type);
1614
1615	/* If this is a QUAL_UNION_TYPE, we want to process the fields
1616	   in the reverse order in building the COND_EXPR that denotes
1617	   its size.  We reverse them again later.  */
1618	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1619	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1620
1621	/* Place all the fields.  */
1622	for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1623	  place_field (rli, field);
1624
1625	if (TREE_CODE (type) == QUAL_UNION_TYPE)
1626	  TYPE_FIELDS (type) = nreverse (TYPE_FIELDS (type));
1627
1628	if (lang_adjust_rli)
1629	  (*lang_adjust_rli) (rli);
1630
1631	/* Finish laying out the record.  */
1632	finish_record_layout (rli);
1633      }
1634      break;
1635
1636    case SET_TYPE:  /* Used by Chill and Pascal.  */
1637      if (TREE_CODE (TYPE_MAX_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST
1638	  || TREE_CODE (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) != INTEGER_CST)
1639	abort ();
1640      else
1641	{
1642#ifndef SET_WORD_SIZE
1643#define SET_WORD_SIZE BITS_PER_WORD
1644#endif
1645	  unsigned int alignment
1646	    = set_alignment ? set_alignment : SET_WORD_SIZE;
1647	  int size_in_bits
1648	    = (TREE_INT_CST_LOW (TYPE_MAX_VALUE (TYPE_DOMAIN (type)))
1649	       - TREE_INT_CST_LOW (TYPE_MIN_VALUE (TYPE_DOMAIN (type))) + 1);
1650	  int rounded_size
1651	    = ((size_in_bits + alignment - 1) / alignment) * alignment;
1652
1653	  if (rounded_size > (int) alignment)
1654	    TYPE_MODE (type) = BLKmode;
1655	  else
1656	    TYPE_MODE (type) = mode_for_size (alignment, MODE_INT, 1);
1657
1658	  TYPE_SIZE (type) = bitsize_int (rounded_size);
1659	  TYPE_SIZE_UNIT (type) = size_int (rounded_size / BITS_PER_UNIT);
1660	  TYPE_ALIGN (type) = alignment;
1661	  TYPE_USER_ALIGN (type) = 0;
1662	  TYPE_PRECISION (type) = size_in_bits;
1663	}
1664      break;
1665
1666    case FILE_TYPE:
1667      /* The size may vary in different languages, so the language front end
1668	 should fill in the size.  */
1669      TYPE_ALIGN (type) = BIGGEST_ALIGNMENT;
1670      TYPE_USER_ALIGN (type) = 0;
1671      TYPE_MODE  (type) = BLKmode;
1672      break;
1673
1674    default:
1675      abort ();
1676    }
1677
1678  /* Compute the final TYPE_SIZE, TYPE_ALIGN, etc. for TYPE.  For
1679     records and unions, finish_record_layout already called this
1680     function.  */
1681  if (TREE_CODE (type) != RECORD_TYPE
1682      && TREE_CODE (type) != UNION_TYPE
1683      && TREE_CODE (type) != QUAL_UNION_TYPE)
1684    finalize_type_size (type);
1685
1686  /* If this type is created before sizetype has been permanently set,
1687     record it so set_sizetype can fix it up.  */
1688  if (! sizetype_set)
1689    early_type_list = tree_cons (NULL_TREE, type, early_type_list);
1690
1691  /* If an alias set has been set for this aggregate when it was incomplete,
1692     force it into alias set 0.
1693     This is too conservative, but we cannot call record_component_aliases
1694     here because some frontends still change the aggregates after
1695     layout_type.  */
1696  if (AGGREGATE_TYPE_P (type) && TYPE_ALIAS_SET_KNOWN_P (type))
1697    TYPE_ALIAS_SET (type) = 0;
1698}
1699
1700/* Create and return a type for signed integers of PRECISION bits.  */
1701
1702tree
1703make_signed_type (precision)
1704     int precision;
1705{
1706  tree type = make_node (INTEGER_TYPE);
1707
1708  TYPE_PRECISION (type) = precision;
1709
1710  fixup_signed_type (type);
1711  return type;
1712}
1713
1714/* Create and return a type for unsigned integers of PRECISION bits.  */
1715
1716tree
1717make_unsigned_type (precision)
1718     int precision;
1719{
1720  tree type = make_node (INTEGER_TYPE);
1721
1722  TYPE_PRECISION (type) = precision;
1723
1724  fixup_unsigned_type (type);
1725  return type;
1726}
1727
1728/* Initialize sizetype and bitsizetype to a reasonable and temporary
1729   value to enable integer types to be created.  */
1730
1731void
1732initialize_sizetypes ()
1733{
1734  tree t = make_node (INTEGER_TYPE);
1735
1736  /* Set this so we do something reasonable for the build_int_2 calls
1737     below.  */
1738  integer_type_node = t;
1739
1740  TYPE_MODE (t) = SImode;
1741  TYPE_ALIGN (t) = GET_MODE_ALIGNMENT (SImode);
1742  TYPE_USER_ALIGN (t) = 0;
1743  TYPE_SIZE (t) = build_int_2 (GET_MODE_BITSIZE (SImode), 0);
1744  TYPE_SIZE_UNIT (t) = build_int_2 (GET_MODE_SIZE (SImode), 0);
1745  TREE_UNSIGNED (t) = 1;
1746  TYPE_PRECISION (t) = GET_MODE_BITSIZE (SImode);
1747  TYPE_MIN_VALUE (t) = build_int_2 (0, 0);
1748  TYPE_IS_SIZETYPE (t) = 1;
1749
1750  /* 1000 avoids problems with possible overflow and is certainly
1751     larger than any size value we'd want to be storing.  */
1752  TYPE_MAX_VALUE (t) = build_int_2 (1000, 0);
1753
1754  /* These two must be different nodes because of the caching done in
1755     size_int_wide.  */
1756  sizetype = t;
1757  bitsizetype = copy_node (t);
1758  integer_type_node = 0;
1759}
1760
1761/* Set sizetype to TYPE, and initialize *sizetype accordingly.
1762   Also update the type of any standard type's sizes made so far.  */
1763
1764void
1765set_sizetype (type)
1766     tree type;
1767{
1768  int oprecision = TYPE_PRECISION (type);
1769  /* The *bitsizetype types use a precision that avoids overflows when
1770     calculating signed sizes / offsets in bits.  However, when
1771     cross-compiling from a 32 bit to a 64 bit host, we are limited to 64 bit
1772     precision.  */
1773  int precision = MIN (oprecision + BITS_PER_UNIT_LOG + 1,
1774		       2 * HOST_BITS_PER_WIDE_INT);
1775  unsigned int i;
1776  tree t;
1777
1778  if (sizetype_set)
1779    abort ();
1780
1781  /* Make copies of nodes since we'll be setting TYPE_IS_SIZETYPE.  */
1782  sizetype = copy_node (type);
1783  TYPE_DOMAIN (sizetype) = type;
1784  TYPE_IS_SIZETYPE (sizetype) = 1;
1785  bitsizetype = make_node (INTEGER_TYPE);
1786  TYPE_NAME (bitsizetype) = TYPE_NAME (type);
1787  TYPE_PRECISION (bitsizetype) = precision;
1788  TYPE_IS_SIZETYPE (bitsizetype) = 1;
1789
1790  if (TREE_UNSIGNED (type))
1791    fixup_unsigned_type (bitsizetype);
1792  else
1793    fixup_signed_type (bitsizetype);
1794
1795  layout_type (bitsizetype);
1796
1797  if (TREE_UNSIGNED (type))
1798    {
1799      usizetype = sizetype;
1800      ubitsizetype = bitsizetype;
1801      ssizetype = copy_node (make_signed_type (oprecision));
1802      sbitsizetype = copy_node (make_signed_type (precision));
1803    }
1804  else
1805    {
1806      ssizetype = sizetype;
1807      sbitsizetype = bitsizetype;
1808      usizetype = copy_node (make_unsigned_type (oprecision));
1809      ubitsizetype = copy_node (make_unsigned_type (precision));
1810    }
1811
1812  TYPE_NAME (bitsizetype) = get_identifier ("bit_size_type");
1813
1814  /* Show is a sizetype, is a main type, and has no pointers to it.  */
1815  for (i = 0; i < ARRAY_SIZE (sizetype_tab); i++)
1816    {
1817      TYPE_IS_SIZETYPE (sizetype_tab[i]) = 1;
1818      TYPE_MAIN_VARIANT (sizetype_tab[i]) = sizetype_tab[i];
1819      TYPE_NEXT_VARIANT (sizetype_tab[i]) = 0;
1820      TYPE_POINTER_TO (sizetype_tab[i]) = 0;
1821      TYPE_REFERENCE_TO (sizetype_tab[i]) = 0;
1822    }
1823
1824  ggc_add_tree_root ((tree *) &sizetype_tab,
1825		     sizeof sizetype_tab / sizeof (tree));
1826
1827  /* Go down each of the types we already made and set the proper type
1828     for the sizes in them.  */
1829  for (t = early_type_list; t != 0; t = TREE_CHAIN (t))
1830    {
1831      if (TREE_CODE (TREE_VALUE (t)) != INTEGER_TYPE)
1832	abort ();
1833
1834      TREE_TYPE (TYPE_SIZE (TREE_VALUE (t))) = bitsizetype;
1835      TREE_TYPE (TYPE_SIZE_UNIT (TREE_VALUE (t))) = sizetype;
1836    }
1837
1838  early_type_list = 0;
1839  sizetype_set = 1;
1840}
1841
1842/* Set the extreme values of TYPE based on its precision in bits,
1843   then lay it out.  Used when make_signed_type won't do
1844   because the tree code is not INTEGER_TYPE.
1845   E.g. for Pascal, when the -fsigned-char option is given.  */
1846
1847void
1848fixup_signed_type (type)
1849     tree type;
1850{
1851  int precision = TYPE_PRECISION (type);
1852
1853  /* We can not represent properly constants greater then
1854     2 * HOST_BITS_PER_WIDE_INT, still we need the types
1855     as they are used by i386 vector extensions and friends.  */
1856  if (precision > HOST_BITS_PER_WIDE_INT * 2)
1857    precision = HOST_BITS_PER_WIDE_INT * 2;
1858
1859  TYPE_MIN_VALUE (type)
1860    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1861		    ? 0 : (HOST_WIDE_INT) (-1) << (precision - 1)),
1862		   (((HOST_WIDE_INT) (-1)
1863		     << (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1864			 ? precision - HOST_BITS_PER_WIDE_INT - 1
1865			 : 0))));
1866  TYPE_MAX_VALUE (type)
1867    = build_int_2 ((precision - HOST_BITS_PER_WIDE_INT > 0
1868		    ? -1 : ((HOST_WIDE_INT) 1 << (precision - 1)) - 1),
1869		   (precision - HOST_BITS_PER_WIDE_INT - 1 > 0
1870		    ? (((HOST_WIDE_INT) 1
1871			<< (precision - HOST_BITS_PER_WIDE_INT - 1))) - 1
1872		    : 0));
1873
1874  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1875  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1876
1877  /* Lay out the type: set its alignment, size, etc.  */
1878  layout_type (type);
1879}
1880
1881/* Set the extreme values of TYPE based on its precision in bits,
1882   then lay it out.  This is used both in `make_unsigned_type'
1883   and for enumeral types.  */
1884
1885void
1886fixup_unsigned_type (type)
1887     tree type;
1888{
1889  int precision = TYPE_PRECISION (type);
1890
1891  /* We can not represent properly constants greater then
1892     2 * HOST_BITS_PER_WIDE_INT, still we need the types
1893     as they are used by i386 vector extensions and friends.  */
1894  if (precision > HOST_BITS_PER_WIDE_INT * 2)
1895    precision = HOST_BITS_PER_WIDE_INT * 2;
1896
1897  TYPE_MIN_VALUE (type) = build_int_2 (0, 0);
1898  TYPE_MAX_VALUE (type)
1899    = build_int_2 (precision - HOST_BITS_PER_WIDE_INT >= 0
1900		   ? -1 : ((HOST_WIDE_INT) 1 << precision) - 1,
1901		   precision - HOST_BITS_PER_WIDE_INT > 0
1902		   ? ((unsigned HOST_WIDE_INT) ~0
1903		      >> (HOST_BITS_PER_WIDE_INT
1904			  - (precision - HOST_BITS_PER_WIDE_INT)))
1905		   : 0);
1906  TREE_TYPE (TYPE_MIN_VALUE (type)) = type;
1907  TREE_TYPE (TYPE_MAX_VALUE (type)) = type;
1908
1909  /* Lay out the type: set its alignment, size, etc.  */
1910  layout_type (type);
1911}
1912
1913/* Find the best machine mode to use when referencing a bit field of length
1914   BITSIZE bits starting at BITPOS.
1915
1916   The underlying object is known to be aligned to a boundary of ALIGN bits.
1917   If LARGEST_MODE is not VOIDmode, it means that we should not use a mode
1918   larger than LARGEST_MODE (usually SImode).
1919
1920   If no mode meets all these conditions, we return VOIDmode.  Otherwise, if
1921   VOLATILEP is true or SLOW_BYTE_ACCESS is false, we return the smallest
1922   mode meeting these conditions.
1923
1924   Otherwise (VOLATILEP is false and SLOW_BYTE_ACCESS is true), we return
1925   the largest mode (but a mode no wider than UNITS_PER_WORD) that meets
1926   all the conditions.  */
1927
1928enum machine_mode
1929get_best_mode (bitsize, bitpos, align, largest_mode, volatilep)
1930     int bitsize, bitpos;
1931     unsigned int align;
1932     enum machine_mode largest_mode;
1933     int volatilep;
1934{
1935  enum machine_mode mode;
1936  unsigned int unit = 0;
1937
1938  /* Find the narrowest integer mode that contains the bit field.  */
1939  for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1940       mode = GET_MODE_WIDER_MODE (mode))
1941    {
1942      unit = GET_MODE_BITSIZE (mode);
1943      if ((bitpos % unit) + bitsize <= unit)
1944	break;
1945    }
1946
1947  if (mode == VOIDmode
1948      /* It is tempting to omit the following line
1949	 if STRICT_ALIGNMENT is true.
1950	 But that is incorrect, since if the bitfield uses part of 3 bytes
1951	 and we use a 4-byte mode, we could get a spurious segv
1952	 if the extra 4th byte is past the end of memory.
1953	 (Though at least one Unix compiler ignores this problem:
1954	 that on the Sequent 386 machine.  */
1955      || MIN (unit, BIGGEST_ALIGNMENT) > align
1956      || (largest_mode != VOIDmode && unit > GET_MODE_BITSIZE (largest_mode)))
1957    return VOIDmode;
1958
1959  if (SLOW_BYTE_ACCESS && ! volatilep)
1960    {
1961      enum machine_mode wide_mode = VOIDmode, tmode;
1962
1963      for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT); tmode != VOIDmode;
1964	   tmode = GET_MODE_WIDER_MODE (tmode))
1965	{
1966	  unit = GET_MODE_BITSIZE (tmode);
1967	  if (bitpos / unit == (bitpos + bitsize - 1) / unit
1968	      && unit <= BITS_PER_WORD
1969	      && unit <= MIN (align, BIGGEST_ALIGNMENT)
1970	      && (largest_mode == VOIDmode
1971		  || unit <= GET_MODE_BITSIZE (largest_mode)))
1972	    wide_mode = tmode;
1973	}
1974
1975      if (wide_mode != VOIDmode)
1976	return wide_mode;
1977    }
1978
1979  return mode;
1980}
1981
1982/* This function is run once to initialize stor-layout.c.  */
1983
1984void
1985init_stor_layout_once ()
1986{
1987  ggc_add_tree_root (&pending_sizes, 1);
1988}
1989